branch_name
stringclasses
149 values
text
stringlengths
23
89.3M
directory_id
stringlengths
40
40
languages
listlengths
1
19
num_files
int64
1
11.8k
repo_language
stringclasses
38 values
repo_name
stringlengths
6
114
revision_id
stringlengths
40
40
snapshot_id
stringlengths
40
40
refs/heads/master
<repo_name>blakesmith/piezo<file_sep>/README.md # Piezo From the greek word _piezein_, meaning to squeeze or press ## What is it? In its simplest form, Piezo is a simple tool to schedule repeating http requests. It provides a simple http api to schedule and unschedule recurring requests. Some things I use it for: - HTTP pinger and status checker - Load or stress tester ## Building [Install Go](http://golang.org/doc/install) then: ``` go get go build ``` If you want to build for a different architecture or OS, see the instructions for [installing a cross-compiler toolchain](http://code.google.com/p/go-wiki/wiki/WindowsCrossCompiling). To build on Linux, run: ``` CGO_ENABLED=0 GOOS=linux GOARCH=386 go build ``` ## Usage Piezo includes the following options: ``` -connect-timeout=5000: HTTP connect timeout for polling in milliseconds -enable-kestrel=false: Register kestrel as a request receiver -kestrel-host="localhost:22133": Kestrel host:port address -port="9001": Port to run the http server on -request-timeout=10000: HTTP request timeout for polling in milliseconds -worker-count=10: Number of request workers ``` To start piezo with 20 concurrent request workers that dump request stats to Kestrel, run: ``` ./piezo -worker-count 20 -enable-kestrel -kestrel-host localhost:22133 ``` By default, piezo's http API listens on port 9001 for commands. ``` /add - Add a repeating request Query params: - url: URL escaped url of the url you want to request - interval: How often the request should be made, in milliseconds - id: Unique id of the repeating request. Needed to remove. Example: Make a request to google.com every 30 seconds: http://localhost:9001/add?interval=30000&url=http://google.com&id=google ``` ``` /remove - Remove a repeating request Query Params: - id: Unique id of the repeating request to remove. Example: Remove a repeating request by id: http://localhost:9001/remove?id=google ``` ## Stats Piezo outputs tracks request time and some other request metadata. If you're sending this data to Kestrel, you'll receive a JSON message that looks like this: ``` { "Url":"http://localhost:9000/", "Status":200, "Error":null, "ResponseTime":250, "StartTime":"2012-07-22T14:34:00.823126-04:00", "RepeatingRequestId":"myrequest" } ``` ## About Piezo is written by <NAME>. <file_sep>/piezo.go package main import ( "encoding/json" "errors" "flag" "fmt" "github.com/bradfitz/gomemcache/memcache" "log" "net" "net/http" "net/url" "strconv" "time" ) type RequestStat struct { Url string Status int Error error ResponseTime time.Duration StartTime time.Time RepeatingRequestId string } type RepeatingRequest struct { Id string Url string Interval time.Duration Ticker *time.Ticker } type Request struct { Url string Method string RepeatingRequestId string } type Options struct { Port *string ConnectTimeout *int RequestTimeout *int WorkerCount *int KestrelHost *string EnableKestrel *bool KestrelQueue *string } type Agent struct { RepeatingRequests map[string]*RepeatingRequest RequestChannel chan *Request Receivers []Receiver Opts Options } type Receiver interface { Queue(stat *RequestStat) error } type RequestParams url.Values type KestrelClient struct { Cache *memcache.Client QueueName string } type AddHandler struct { Agent *Agent } type RemoveHandler struct { Agent *Agent } func (agent *Agent) ParseOpts() { agent.Opts.Port = flag.String("port", "9001", "Port to run the http server on") agent.Opts.ConnectTimeout = flag.Int("connect-timeout", 5000, "HTTP connect timeout for polling in milliseconds") agent.Opts.RequestTimeout = flag.Int("request-timeout", 10000, "HTTP request timeout for polling in milliseconds") agent.Opts.WorkerCount = flag.Int("worker-count", 10, "Number of request workers") agent.Opts.KestrelHost = flag.String("kestrel-host", "localhost:22133", "Kestrel host:port address") agent.Opts.EnableKestrel = flag.Bool("enable-kestrel", false, "Register kestrel as a request receiver") agent.Opts.KestrelQueue = flag.String("kestrel-queue", "stats", "Name of the kestrel queue") flag.Parse() } func (agent *Agent) Setup() { agent.Receivers = make([]Receiver, 0) if *agent.Opts.EnableKestrel { kestrel := new(KestrelClient) kestrel.Cache = memcache.New(*agent.Opts.KestrelHost) kestrel.QueueName = *agent.Opts.KestrelQueue agent.RegisterReceiver(kestrel) } agent.RepeatingRequests = make(map[string]*RepeatingRequest) agent.RequestChannel = make(chan *Request) } func (agent *Agent) Start() { cs := make(chan *RequestStat) log.Println("Spawning collector") go agent.StartCollect(cs) for i := 0; i < *agent.Opts.WorkerCount; i++ { log.Printf("Spawning client %d\n", i) go agent.StartClient(agent.RequestChannel, cs) } } func (agent *Agent) StartClient(rcs chan *Request, scs chan *RequestStat) { ct := time.Duration(*agent.Opts.ConnectTimeout) * time.Millisecond rt := time.Duration(*agent.Opts.RequestTimeout) * time.Millisecond client := buildHttpClient(ct, rt) for { select { case r := <-rcs: scs <- r.Do(client) } } } func (agent *Agent) StartCollect(cs chan *RequestStat) { for { select { case stat := <-cs: for _, rec := range agent.Receivers { rec.Queue(stat) } log.Println(stat) } } } func (agent *Agent) StopRepeatingRequest(id string) { if rr, ok := agent.RepeatingRequests[id]; ok { rr.Stop() } } func (agent *Agent) AddRepeatingRequest(id, url string, interval time.Duration) *RepeatingRequest { r := new(RepeatingRequest) r.Id = id r.Url = url r.Interval = interval r.Ticker = time.NewTicker(interval) go r.Start(agent.RequestChannel) agent.RepeatingRequests[id] = r return r } func (agent *Agent) RegisterReceiver(rec Receiver) { agent.Receivers = append(agent.Receivers, rec) } func (k *KestrelClient) Queue(stat *RequestStat) error { statMessage, err := json.Marshal(stat) if err != nil { log.Printf("Failed to parse %s", stat) return err } else { item := &memcache.Item{Key: k.QueueName, Value: statMessage} err := k.Cache.Set(item) if err != nil { log.Printf("Failed to queue stat: %s", err) return err } } return nil } func buildHttpClient(dialTimeout, timeout time.Duration) *http.Client { transport := &http.Transport{Dial: func(netw, addr string) (net.Conn, error) { deadline := time.Now().Add(timeout) c, err := net.DialTimeout(netw, addr, dialTimeout) if err != nil { return nil, err } c.SetDeadline(deadline) return c, nil }} client := &http.Client{Transport: transport} return client } func (req *Request) Do(client *http.Client) *RequestStat { stat := new(RequestStat) stat.Url = req.Url stat.RepeatingRequestId = req.RepeatingRequestId httpReq, _ := http.NewRequest(req.Method, req.Url, nil) start := time.Now() resp, err := client.Do(httpReq) stat.ResponseTime = time.Now().Sub(start) stat.StartTime = start if err != nil { log.Printf("Failed to fetch %s", req.Url) stat.Error = err } else { defer resp.Body.Close() stat.Status = resp.StatusCode } return stat } func (r *RepeatingRequest) Start(requestChannel chan *Request) { for { select { case <-r.Ticker.C: req := new(Request) req.Url = r.Url req.RepeatingRequestId = r.Id req.Method = "GET" requestChannel <- req } } } func (r *RepeatingRequest) Stop() { r.Ticker.Stop() } func (form RequestParams) RequiredParams(fields ...string) (map[string]string, error) { params := make(map[string]string) for _, v := range fields { if val, ok := form[v]; ok { params[v] = val[0] } else { return nil, errors.New(fmt.Sprintf("%s is required", v)) } } return params, nil } func (h AddHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { r.ParseForm() params, err := RequestParams(r.Form).RequiredParams("url", "interval", "id") if err != nil { http.Error(w, err.Error(), 400) return } id := params["id"] url := params["url"] interval, _ := strconv.Atoi(params["interval"]) h.Agent.StopRepeatingRequest(id) h.Agent.AddRepeatingRequest(id, url, time.Duration(interval)*time.Millisecond) msg := fmt.Sprintf("Added %s\n", id) log.Println(msg) fmt.Fprintf(w, msg) } func (h RemoveHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { r.ParseForm() params, err := RequestParams(r.Form).RequiredParams("id") if err != nil { http.Error(w, err.Error(), 400) return } id := params["id"] h.Agent.StopRepeatingRequest(id) msg := fmt.Sprintf("Stopped %s\n", id) log.Println(msg) fmt.Fprintf(w, msg) } func main() { agent := new(Agent) agent.ParseOpts() flag.Args() agent.Setup() agent.Start() add := new(AddHandler) remove := new(RemoveHandler) add.Agent = agent remove.Agent = agent http.Handle("/add", add) http.Handle("/remove", remove) log.Printf("Running http server on port %s\n", *agent.Opts.Port) err := http.ListenAndServe(fmt.Sprintf(":%s", *agent.Opts.Port), nil) if err != nil { log.Printf(err.Error()) } }
9a862eadb78b7a42c8a260822602983671510cce
[ "Markdown", "Go" ]
2
Markdown
blakesmith/piezo
e549ec486b1ce58bbf3b2d518590899b92e9f0cb
a6e0d581b7e1fef94139f1929fb6eb76de6e5e88
refs/heads/master
<repo_name>DongL/magrittr<file_sep>/man/lambda.Rd % Generated by roxygen2 (4.0.1): do not edit by hand \name{compose} \alias{compose} \alias{l} \alias{lambda} \title{compose a lambda function or chain of functions.} \usage{ compose(..., .args = NULL) lambda(..., .args = NULL) l(..., .args = NULL) } \arguments{ \item{...}{A special kind of expressions for the anonymous function(s). The syntax is \code{symbol ~ expression}, see the examples.} \item{.args}{a list of the form \code{list(a, b = c)} to specify the remainder of the signature of the composed function.} } \value{ a function. } \description{ This is an alternative syntax for generating anonymous functions. When used in pipelines, the call should be enclosed in parentheses to force evaluation of the function generation before the left-hand side is inserted. If multiple lambda expressions are given they will be nested in a single global function, and evaluated as a chain. Here each input will be available to the next lambda expression (with its initial value). } \details{ \code{lambda}s have a special syntax, where the expression is defined as \code{symbol ~ expression}. The alias \code{l} is shorthand for \code{lambda}. Previous versions used symbol -> expression syntax, but this caused problems with compiling packages. There is currently a warning if the old syntax is used. } \examples{ compose(x ~ x^2 + 2*x) sapply(1:10, compose(x ~ x^2)) Filter(compose(x ~ x > 0), rnorm(100)) iris \%>\% (compose(dfr ~ rbind(dfr \%>\% head, dfr \%>\% tail))) 1:10 \%>\% sin \%>\% (compose(x ~ { d <- abs(x) > 0.5 x*d })) } <file_sep>/man/compose.Rd % Generated by roxygen2 (4.0.1): do not edit by hand \name{print.composite} \alias{print.composite} \title{Print method for composite functions.} \usage{ \method{print}{composite}(x, ...) } \arguments{ \item{x}{a composite function} \item{...}{not used.} } \description{ Generic method for printing of composite functions generated with either \code{\%,\%} or \code{compose}. } <file_sep>/R/lambda.R #' compose a lambda function or chain of functions. #' #' This is an alternative syntax for generating anonymous functions. #' When used in pipelines, the call should be enclosed in parentheses to #' force evaluation of the function generation before the left-hand side #' is inserted. If multiple lambda expressions are given they will be #' nested in a single global function, and evaluated as a chain. Here #' each input will be available to the next lambda expression (with its #' initial value). #' #' \code{lambda}s have a special syntax, where the expression is defined as #' \code{symbol ~ expression}. The alias \code{l} is shorthand for \code{lambda}. #' Previous versions used symbol -> expression syntax, but this caused #' problems with compiling packages. There is currently a warning if the #' old syntax is used. #' #' @param ... A special kind of expressions for the anonymous function(s). #' The syntax is \code{symbol ~ expression}, see the examples. #' @param .args a list of the form \code{list(a, b = c)} to #' specify the remainder of the signature of the composed function. #' @return a function. #' @rdname lambda #' @export #' @examples #' compose(x ~ x^2 + 2*x) #' #' sapply(1:10, compose(x ~ x^2)) #' #' Filter(compose(x ~ x > 0), rnorm(100)) #' #' iris %>% #' (compose(dfr ~ rbind(dfr %>% head, dfr %>% tail))) #' #' 1:10 %>% #' sin %>% #' (compose(x ~ { #' d <- abs(x) > 0.5 #' x*d #' })) compose <- function(..., .args = NULL) { dots <- lapply(as.list(substitute(list(...))[-1]), function(dot) { if (is.symbol(dot)) { dot <- call(as.character(dot), quote(.)) } if (is.call(dot) && !identical(dot[[1]], quote(`~`))) { dot <- call("~", quote(.), dot) } dot }) .args <- substitute(.args) if (!is.null(.args)) { if (!is.call(.args) || (is.call(.args) && !identical(.args[[1]], quote(list)))) stop(".args should be a list") .args <- as.list(.args[-1]) .args <- sapply(1:length(.args), function(i) { if (is.null(names(.args[i]))) { setNames(list(quote(expr = )), .args[i]) } else { eval(.args[i], parent.frame(), parent.frame()) } }) } # Utility function to generate the different function expressions. generate <- function(expr, rhs = NULL, parens = FALSE, wrap = FALSE, .args = NULL) { # Check that lambdas are of the right form x ~ expression_of(x) if (!is.call(expr) || !identical(expr[[1]], quote(`~`))) { stop("Malformed expression. Expected format is symbol ~ expression.", call. = FALSE) } if (!is.symbol(expr[[2]])) { stop("Malformed expression. Expecting one variable name on LHS", call. = FALSE) } # Construct the function inputs arg_name <- as.character(expr[[2]]) args <- c(setNames(list(quote(expr = )), arg_name), .args) body <- expr[[3]] # Construct a function with or without wrapper/parens cl <- if (wrap) { inner.cl <- call("%>%", expr[[2]], call("(", call("function", as.pairlist(args), body))) if (!is.null(rhs)) inner.cl <- call("%>%", inner.cl, rhs) inner.cl } else { call("function", as.pairlist(args), body) } if (parens) call("(", cl) else cl } if (length(dots) == 1) { # If only a single lambda is provided; regular lambda function. cl <- generate(dots[[1]], .args = .args) } else { # Multiple lambdas are given. Nest them, and create a single overall lambda. cl <- Reduce(function(l, r) { substitute(a ~ b, list( a = l[[2]], b = generate(l, wrap = TRUE, rhs = generate(r, parens = TRUE)))) }, dots, right = TRUE) cl <- generate(cl, .args = .args) } # Evaluate the final function, and return. composite <- eval(cl, parent.frame(), parent.frame()) attr(composite, "parts") <- dots class(composite) <- c("composite", "function") composite } #' Composition operator. #' #' The composition operator combined lhs and rhs using the compose function. #' If several expressions are composed, e.g. a \%,\% b \%,\% c, then #' the result will be \code{compose(a, b, c)}. #' #' @usage lhs \%,\% rhs #' #' @param lhs a function/expression #' @param rhs a function/expression #' #' @rdname composition #' #' @return a composite function #' @export `%,%` <- function(lhs, rhs) { # Capture inputs lhs <- substitute(lhs) rhs <- substitute(rhs) # Utility function to split the call chain. call2list <- function(cl) { if (is.call(cl) && identical(cl[[1]], quote(`%,%`))) { lapply(as.list(cl)[-1], call2list) } else { cl } } parts <- lapply(c(unlist(call2list(lhs)), rhs), function(part) { if (is.call(part) && identical(part[[1]], quote(`(`))) part[[-1]] else part }) cl <- do.call(compose, parts) eval(cl, parent.frame(), parent.frame()) } #' @rdname lambda #' @export lambda <- compose #' @rdname lambda #' @export l <- lambda #' Print method for composite functions. #' #' Generic method for printing of composite functions generated with #' either \code{\%,\%} or \code{compose}. #' #' @param x a composite function #' @param ... not used. #' #' @rdname compose #' @export print.composite <- function(x, ...) { cat("Function composed of the following parts:\n\n") lapply(attr(x, "parts"), function(p) cat(deparse(p), "\n")) invisible(x) } <file_sep>/man/composition.Rd % Generated by roxygen2 (4.0.1): do not edit by hand \name{\%,\%} \alias{\%,\%} \title{Composition operator.} \usage{ lhs \%,\% rhs } \arguments{ \item{lhs}{a function/expression} \item{rhs}{a function/expression} } \value{ a composite function } \description{ The composition operator combined lhs and rhs using the compose function. If several expressions are composed, e.g. a \%,\% b \%,\% c, then the result will be \code{compose(a, b, c)}. }
438c2617e5158864bf740a96c8f355dfdae147de
[ "R" ]
4
R
DongL/magrittr
2d9da55e0a5becb2a28e37cf42ff3a64d77b1e50
4fae826ba8f7f41153c00b385f58cffa0b7f0d3f
refs/heads/master
<file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using Medallion; namespace mlp_net_ga_cs { class Matrix { public double[,] matrix; public void RandomMatrix(ref Random rnd,in int rows, in int cols) { this.matrix = new double[rows, cols]; for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { this.matrix[i, j] = rnd.NextGaussian(); } } } public void EmptyMatrix(in int rows,in int cols) { this.matrix = new double[rows, cols]; for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { this.matrix[i, j] = 0.0; } } } public Matrix DotProduct(in Matrix mat) { int aRows = this.matrix.GetLength(0); int aCols = this.matrix.GetLength(1); int bRows = mat.matrix.GetLength(0); int bCols = mat.matrix.GetLength(1); if (aCols != bRows) { throw new System.ArgumentException("A:Cols: " + aCols + " did not match B:Rows " + bRows + ".", ""); } double[,] C = new double[aRows, bCols]; for (int i = 0; i < aRows; i++) { for (int j = 0; j < bCols; j++) { C[i, j] = 0.0; } } for (int i = 0; i < aRows; i++) { for (int j = 0; j < bCols; j++) { for (int k = 0; k < aCols; k++) { C[i, j] += this.matrix[i, k] * mat.matrix[k, j]; } } } Matrix product = new Matrix(); product.matrix = C; return product; } public Matrix Sigmoid() { for (int i = 0; i < this.matrix.GetLength(0); i++) { for (int j = 0; j < this.matrix.GetLength(1); j++) { this.matrix[i, j] = sigmoid(this.matrix[i, j]); } } return this; } private static double sigmoid(double x) { return (1.0 / (1.0 + Math.Exp(-x))); } public static void printMatrix(Matrix mat) { int rowLength = mat.matrix.GetLength(0); int colLength = mat.matrix.GetLength(1); for (int i = 0; i < rowLength; i++) { for (int j = 0; j < colLength; j++) { Console.Write(string.Format("{0} ", mat.matrix[i, j])); } Console.Write(Environment.NewLine); } } public int Rows() { return this.matrix.GetLength(0); } public int Cols() { return this.matrix.GetLength(1); } } } <file_sep># mlpnet-ga-cs A 3 layer MLP Network, using Genetic Algorithm as a trainer, to solve XOR problem I first wrote this program in golang, now rewrote it to learn C#. [Original Go version](https://github.com/ViktorHura/mlpnet-ga-go) <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using Medallion; namespace mlp_net_ga_cs { class Trainer { int popsize, elite, cutoff, tournamentsize, inputs, hiddens, outputs; double mutationrate; Organism[] population; public Trainer(in Network net,in int pops,in int el,in int cutf,in int tournsize,in double mutrate) { population = new Organism[pops]; popsize = pops; elite = el; cutoff = cutf; tournamentsize = tournsize; mutationrate = mutrate; inputs = net.inputs; hiddens = net.hiddens; outputs = net.outputs; } public void Train(ref Random rnd,ref Network net,in int maxgen,in Boolean reset) { if (reset == true) { this.CreateInitialPopulation(ref rnd); } for (int generation = 0; generation < maxgen; generation++) { for (int i = 0; i < popsize; i++) { population[i].CalcFitness(ref net); } this.SortPopulation(); Console.Write("Gen: " + generation + " Bfit: " + population[0].fitness); Console.Write(Environment.NewLine); this.NaturalSelection(ref rnd); } this.SortPopulation(); net.inputWeights = population[0].inputWeights; net.hiddenWeights = population[0].hiddenWeights; } public void CreateInitialPopulation(ref Random rnd) { for (int i = 0; i < popsize; i++) { Organism o = new Organism(inputs, hiddens, outputs, ref rnd); population[i] = o; } } public void NaturalSelection(ref Random rnd) { Organism[] nextPopulation = new Organism[popsize]; for (int i = 0; i < popsize; i++) { if (i < elite) { nextPopulation[i] = population[i]; continue; } else { Organism a = this.TournamentSelection(ref rnd); Organism b = this.TournamentSelection(ref rnd); Organism child = a.Crossover(ref rnd,in b); child.Mutate(ref rnd, in mutationrate); child.fitness = 0.0; nextPopulation[i] = child; } } population = nextPopulation; } public void SortPopulation() { Array.Sort(population, delegate (Organism o1, Organism o2) { return o2.fitness.CompareTo(o1.fitness); // element 0 highest fitness }); } public Organism TournamentSelection(ref Random rnd) { Organism[] tournament = new Organism[tournamentsize]; for (int n = 0; n < tournamentsize; n++) { int i = (int)(rnd.NextDouble(1) * (popsize - cutoff)); tournament[n] = population[i]; } int bestint = 0; double bestfit = tournament[0].fitness; for (int n = 0; n < tournamentsize; n++) { if (tournament[n].fitness > bestfit) { bestint = n; bestfit = tournament[n].fitness; } } return tournament[bestint]; } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using Medallion; namespace mlp_net_ga_cs { class Organism { public int inputs, hiddens, outputs; public double fitness; public Matrix inputWeights, hiddenWeights; public Organism(int inputs, int hiddens, int outputs, ref Random rnd) { this.inputs = inputs; this.hiddens = hiddens; this.outputs = outputs; fitness = 0.0; inputWeights = new Matrix(); hiddenWeights = new Matrix(); inputWeights.RandomMatrix(ref rnd, hiddens, inputs); hiddenWeights.RandomMatrix(ref rnd, outputs, hiddens); } public Organism(int inputs, int hiddens, int outputs, Boolean child) { this.inputs = inputs; this.hiddens = hiddens; this.outputs = outputs; fitness = 0.0; inputWeights = new Matrix(); hiddenWeights = new Matrix(); inputWeights.EmptyMatrix(hiddens, inputs); hiddenWeights.EmptyMatrix(outputs, hiddens); } public void CalcFitness(ref Network net) { if (fitness != 0.0) { return; } this.fitness = 0.0; net.inputWeights = this.inputWeights; net.hiddenWeights = this.hiddenWeights; double right = 0.0; double[] inputone = { 0.0, 0.0 }; Matrix resultone = net.Predict(in inputone); this.fitness += resultone.matrix[0,0]; if (resultone.matrix[0,0] > 0.5) { right += 1.0; } double[] inputtwo = { 1.0, 0.0 }; Matrix resulttwo = net.Predict(in inputtwo); this.fitness += (1 - resulttwo.matrix[0,0]); if (resulttwo.matrix[0,0] < 0.5) { right += 1.0; } double[] inputthree = { 0.0, 1.0 }; Matrix resultthree = net.Predict(in inputthree); this.fitness += (1 - resultthree.matrix[0,0]); if (resultthree.matrix[0,0] < 0.5) { right += 1.0; } double[] inputfour = { 1.0, 1.0 }; Matrix resultfour = net.Predict(in inputfour); this.fitness += resultfour.matrix[0,0]; if (resultfour.matrix[0,0] > 0.5) { right += 1.0; } this.fitness += right; } public Organism Crossover(ref Random rnd,in Organism b) { Organism child = new Organism(this.inputs, this.hiddens, this.outputs, true); int midinput = (int)(rnd.NextDouble(1) * (child.inputWeights.Rows() * child.inputWeights.Cols())); for (int i = 0; i < child.inputWeights.Rows(); i++) { for (int j = 0; j < child.inputWeights.Cols(); j++) { if (i * j < midinput) { child.inputWeights.matrix[i,j] = this.inputWeights.matrix[i,j]; } else { child.inputWeights.matrix[i,j] = b.inputWeights.matrix[i,j]; } } } int midhidden = (int)(rnd.NextDouble(1) * (child.hiddenWeights.Rows() * child.hiddenWeights.Cols())); for (int i = 0; i < child.hiddenWeights.Rows(); i++) { for (int j = 0; j < child.hiddenWeights.Cols(); j++) { if (i * j < midhidden) { child.hiddenWeights.matrix[i,j] = this.hiddenWeights.matrix[i,j]; } else { child.hiddenWeights.matrix[i,j] = b.hiddenWeights.matrix[i,j]; } } } return child; } public void Mutate(ref Random rnd, in double mutationrate) { for (int i = 0; i < this.inputWeights.Rows(); i++) { for (int j = 0; j < this.inputWeights.Cols(); j++) { if ((rnd.NextDouble(1) * 100.0) < mutationrate) { this.inputWeights.matrix[i,j] = rnd.NextGaussian(); } } } for (int i = 0; i < this.hiddenWeights.Rows(); i++) { for (int j = 0; j < this.hiddenWeights.Cols(); j++) { if ((rnd.NextDouble(1) * 100.0) < mutationrate) { this.hiddenWeights.matrix[i,j] = rnd.NextGaussian(); } } } } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace mlp_net_ga_cs { class Network { public int inputs, hiddens, outputs; public Matrix inputWeights, hiddenWeights; Network(ref Random rnd,in int input, in int hidden, in int output) { this.inputs = input; this.hiddens = hidden; this.outputs = output; this.inputWeights = new Matrix(); this.hiddenWeights = new Matrix(); inputWeights.RandomMatrix(ref rnd, in hidden, in input); hiddenWeights.RandomMatrix(ref rnd, in output, in hidden); } public Matrix Predict(in double[] inputData) { Matrix inputs = new Matrix(); int l = inputData.Length; inputs.EmptyMatrix(l, 1); for (int i = 0; i < l; i++) { inputs.matrix[i, 0] = inputData[i]; } Matrix hiddenInputs = inputWeights.DotProduct(in inputs); Matrix hiddenOutputs = hiddenInputs.Sigmoid(); Matrix finalInputs = hiddenWeights.DotProduct(in hiddenOutputs); Matrix finalOutputs = finalInputs.Sigmoid(); return finalOutputs; } static void Main(string[] args) { Random rnd = new Random(); Console.Write("Creating Network"); Console.Write(Environment.NewLine); Network net = new Network(ref rnd,2,4,1); Console.Write("Initialising Trainer"); Trainer trainer = new Trainer(in net, 500, 1, 200, 3, 5); Console.Write("Training Network"); trainer.Train(ref rnd,ref net, 10000, true); double[] inputone = { 0.0, 0.0 }; Matrix.printMatrix(net.Predict(in inputone)); double[] inputtwo = { 1.0, 0.0 }; Matrix.printMatrix(net.Predict(in inputtwo)); double[] inputthree = { 0.0, 1.0 }; Matrix.printMatrix(net.Predict(in inputthree)); double[] inputfour = { 1.0, 1.0 }; Matrix.printMatrix(net.Predict(in inputfour)); Console.ReadKey(); } } }
4e755a21faa97a7ae48cc8c7d0694d3d9730ce3e
[ "Markdown", "C#" ]
5
C#
ViktorHura/mlpnet-ga-cs
2e681c1993f34955cfe976bc596cd120e3e27760
61b563dd69d5c8f21a59723ad3df0feb062f85a6
refs/heads/master
<file_sep>class Solution { public int[] maxSlidingWindow(int[] nums, int k) { int start = 0 ; int end = 0; List<Integer> list = new ArrayList<>(); int max = Integer.MIN_VALUE; while(end < nums.length){ int currMax = nums[end]; if(currMax > max){ max = currMax; } if((end-start) == k-1){ start += 1; end = start-1; list.add(max); max = Integer.MIN_VALUE; } end++; } int[] arr = new int[list.size()]; for(int i = 0 ;i<list.size();i++){ arr[i] = list.get(i); } return arr; } } <file_sep>class inorder_tree_iterator { Stack<BinaryTreeNode> stack = new Stack<>(); public inorder_tree_iterator(BinaryTreeNode root) { stack.push(root); BinaryTreeNode temp = stack.peek(); while(temp.left != null){ stack.push(temp.left); temp = temp.left; } } public boolean hasNext() { if(!stack.isEmpty()){ return true; } return false; } public BinaryTreeNode getNext() { if(stack.isEmpty()){ return null; } BinaryTreeNode temp = stack.pop(); BinaryTreeNode rTemp = temp.right; while(rTemp != null){ stack.push(rTemp); rTemp = rTemp.left; } return temp; } //Don't Change anything down below public String inorder_using_iterator(BinaryTreeNode root) { inorder_tree_iterator iter = new inorder_tree_iterator(root); String result = ""; while (iter.hasNext()) { result += iter.getNext().data + ","; } return result; } } <file_sep>class Solution { public int uniquePathsWithObstacles(int[][] obstacleGrid) { int m = obstacleGrid.length; if(m == 0){ return 0; } int n = obstacleGrid[0].length; if(n == 0){ return 1; } if(obstacleGrid[0][0] == 1 || obstacleGrid[m-1][n-1] == 1){ return 0; } int[][] matrix = new int[m][n]; matrix[0][0] = 1; for(int i=1; i<m;i++){ if(obstacleGrid[i][0] == 1){ matrix[i][0] = 0; }else{ matrix[i][0] = matrix[i-1][0]; } } for(int i=1; i<n; i++){ if(obstacleGrid[0][i] == 1){ matrix[0][i] = 0; }else{ matrix[0][i] = matrix[0][i-1]; } } for(int i=1; i<m; i++){ for(int j=1; j<n; j++){ if(obstacleGrid[i][j] == 1){ matrix[i][j] = 0; }else{ matrix[i][j] = matrix[i-1][j] + matrix[i][j-1]; } } } return matrix[m-1][n-1]; } } <file_sep>class Solution { public void gameOfLife(int[][] board) { int m = board.length; int n= board[0].length; for(int i=0; i<m; i++){ for(int j=0; j<n; j++){ int liveNeighbors = checkNeighbors(board,i,j); if(board[i][j] == 0 && liveNeighbors == 3){ board[i][j] = 2; } if(board[i][j] == 1 && (liveNeighbors < 2 || liveNeighbors > 3)){ board[i][j] = -1; } } } for(int i=0; i<m; i++){ for(int j=0; j<n; j++){ if(board[i][j] == -1){ board[i][j] = 0; } if(board[i][j] == 2){ board[i][j] = 1; } } } } public int checkNeighbors(int[][] board, int row, int col){ int liveNeighbors = 0; int m = board.length; int n = board[0].length; for(int i = row-1; i<= row+1;i++){ for(int j = col-1; j<= col+1;j++){ if(i == row && j == col){ continue; }else if( (i>=0 && i<m) && (j>=0 && j<n)){ if(board[i][j] == 1 || board[i][j] == -1){ liveNeighbors++; } } } } return liveNeighbors; } } <file_sep>class Solution { public int numIslands(char[][] grid) { if(grid == null || grid.length == 0){ return 0; } int numRows = grid.length; int numCols = grid[0].length; int numberOfIslands = 0; for(int i=0; i<numRows; i++){ for(int j=0; j<numCols; j++){ if(grid[i][j] == '1'){ numberOfIslands++; dfs(grid,i,j); } } } return numberOfIslands; } public void dfs(char[][] grid, int row, int col){ int numRows = grid.length; int numCols = grid[0].length; if(row < 0 || col < 0 || row >= numRows || col >= numCols || grid[row][col] == '0'){ return; } grid[row][col] = '0'; dfs(grid,row-1,col); dfs(grid,row+1,col); dfs(grid,row,col+1); dfs(grid,row,col-1); } } <file_sep>class TicTacToe { int[][] board; /** Initialize your data structure here. */ public TicTacToe(int n) { board = new int[n][n]; } /** Player {player} makes a move at ({row}, {col}). @param row The row of the board. @param col The column of the board. @param player The player, can be either 1 or 2. @return The current winning condition, can be either: 0: No one wins. 1: Player 1 wins. 2: Player 2 wins. */ public int move(int row, int col, int player) { Boolean win = true; board[row][col] = player; //check row for(int i=0; i<board.length;i++){ if(board[row][i] != player){ win = false; break; } } if(win) return player; win = true; //check col for(int i=0; i<board.length;i++){ if(board[i][col] != player){ win = false; break; } } if(win) return player; win = true; // check diagonal for(int i=0; i<board.length;i++){ if(board[i][i] != player){ win = false; break; } } if(win) return player; win = true; for(int i=0;i<board.length;i++){ if(board[i][board.length - i -1] != player){ win = false; break; } } if(win) return player; return 0; } } /** * Your TicTacToe object will be instantiated and called as such: * TicTacToe obj = new TicTacToe(n); * int param_1 = obj.move(row,col,player); */ <file_sep>class Solution { public double myPow(double x, int n) { long N = n; if(N < 0 ){ x = 1/x; N = -N; } return calPow(x,N); } public double calPow(double x, long n){ if(n == 0){ return 1.0; } double pow = calPow(x,n/2); if(n % 2 == 0) return pow * pow; else return x * pow * pow; } } <file_sep>class Solution { public int pivotIndex(int[] nums) { int totalSum = 0; int leftSum = 0; for(int i : nums){ totalSum += i; } for(int i=0;i<nums.length;i++){ if((totalSum -leftSum - nums[i]) == leftSum){ return i; } leftSum += nums[i]; } return -1; } } <file_sep>class Solution { public int[][] generateMatrix(int n) { int[][] matrix = new int[n][n]; int topRow = 0; int rightCol = n-1; int bottomRow = n-1; int leftCol = 0; int dir = 0; int N=1; while(topRow <= bottomRow && leftCol <= rightCol){ if(dir == 0){ for(int i=leftCol; i<=rightCol; i++){ matrix[topRow][i] = N; N++; } topRow++; } if(dir == 1){ for(int i=topRow; i<= bottomRow; i++){ matrix[i][rightCol] = N; N++; } rightCol--; } if(dir == 2){ for(int i=rightCol; i>= leftCol; i--){ matrix[bottomRow][i] = N; N++; } bottomRow--; } if(dir == 3){ for(int i=bottomRow; i>= topRow;i--){ matrix[i][leftCol] = N; N++; } leftCol++; } dir = (dir+1)%4; } return matrix; } } <file_sep>"# LeetcodeSolutions" <file_sep>class Solution { List<List<Integer>> result = new ArrayList<>(); public List<List<Integer>> subsets(int[] nums) { listSubsets(nums,0,nums.length,new ArrayList<Integer>()); return result; } public void listSubsets(int[] nums, int start, int end, List<Integer> set){ result.add(new ArrayList(set)); for(int i=start;i < end;i++){ set.add(nums[i]); listSubsets(nums,i+1,end,set); set.remove(set.size()-1); } } } <file_sep>/** * Definition for singly-linked list. * public class ListNode { * int val; * ListNode next; * ListNode(int x) { val = x; } * } */ class Solution { public ListNode addTwoNumbers(ListNode l1, ListNode l2) { int sum = 0; int carry = 0; ListNode temp1 = l1; ListNode temp2 = l2; ListNode head = new ListNode(0); ListNode temp = head; while(temp1 != null || temp2 != null){ int x = (temp1 != null)? temp1.val : 0; int y = (temp2 != null)? temp2.val : 0; sum = x + y + carry; carry = sum/10; temp.next = new ListNode(sum%10); temp = temp.next; if(temp1 != null) temp1 = temp1.next; if(temp2 != null) temp2 = temp2.next; } if(carry != 0){ temp.next = new ListNode(carry); } return head.next; } } <file_sep>/** * Definition for a binary tree node. * public class TreeNode { * int val; * TreeNode left; * TreeNode right; * TreeNode(int x) { val = x; } * } */ class Solution { public List<List<Integer>> levelOrder(TreeNode root) { if(root == null){ return new ArrayList<>(); } Queue<TreeNode> queue = new LinkedList<>(); List<List<Integer>> levelOrderList = new ArrayList<>(); queue.add(root); levelOrderList.add(new ArrayList<>(Arrays.asList(root.val))); while(!queue.isEmpty()){ int level = queue.size(); List<Integer> tempList = new ArrayList<>(); while(level > 0){ TreeNode temp = queue.poll(); if(temp.left != null){ tempList.add(temp.left.val); queue.add(temp.left); } if(temp.right != null){ tempList.add(temp.right.val); queue.add(temp.right); } level--; } if(tempList.size() != 0) levelOrderList.add(tempList); } return levelOrderList; } }<file_sep>class Solution { public List<List<String>> groupAnagrams(String[] strs) { HashMap<String, ArrayList<String>> map = new HashMap<>(); for(String s : strs){ char[] temp = s.toCharArray(); Arrays.sort(temp); if(!map.containsKey(new String(temp))){ ArrayList<String> arr = new ArrayList<>(); arr.add(s); map.put(new String(temp),arr); }else{ ArrayList<String> arr = map.get(new String(temp)); arr.add(s); map.put(new String(temp),arr); } } List<List<String>> anagrams = new ArrayList<>(); for(Map.Entry<String, ArrayList<String>> entry : map.entrySet()){ anagrams.add(entry.getValue()); } return anagrams; } } <file_sep>class Solution { public List<String> fizzBuzz(int n) { List<String> list = new ArrayList<>(); HashMap<Integer,String> map = new HashMap<>(); map.put(3,"Fizz"); map.put(5,"Buzz"); for(int num=1; num<=n ;num++){ String temp = ""; for(Integer key : map.keySet()){ if(num % key == 0){ temp += map.get(key); } } if(temp.equals("")){ temp += Integer.toString(num); } list.add(temp); } return list; } } <file_sep>/** * Definition for singly-linked list. * public class ListNode { * int val; * ListNode next; * ListNode(int x) { val = x; } * } */ class Solution { public boolean isPalindrome(ListNode head) { int length = 0; ListNode temp1 = head; ListNode temp2 = head; while(temp1 != null){ length++; temp1 = temp1.next; } temp1 = head; int n = length/2; while(n > 0){ temp2 = temp2.next; n--; } temp2 = reverseList(temp2); while(temp2 != null){ if(temp1.val != temp2.val){ return false; } temp1 = temp1.next; temp2 = temp2.next; } return true; } public ListNode reverseList(ListNode head){ if(head == null || head.next == null){ return head; } ListNode prev = head; ListNode curr = prev.next; ListNode temp = null; while(curr != null){ temp = curr.next; curr.next = prev; if(prev == head){ prev.next = null; } prev = curr; curr = temp; } return prev; } } <file_sep>class Solution { public int singleNumber(int[] nums) { int singleNumber = 0; for(int n : nums){ singleNumber ^= n; } return singleNumber; } } <file_sep>class Solution { public int search(int[] nums, int target) { if(nums.length == 0){ return -1; } int pivot = findPivot(nums,0, (nums.length-1)); // System.out.println(pivot); if(nums[pivot] == target){ return pivot; }else if(target > nums[pivot] && target <= nums[nums.length-1]){ return binarySearch(nums, pivot+1, nums.length-1,target); }else{ return binarySearch(nums,0,pivot-1,target); } } public int binarySearch(int[] nums, int low, int high,int target){ while(low <= high){ int mid = (low+high)/2; if(nums[mid] == target){ return mid; }else if(target < nums[mid]){ high = mid-1; }else{ low = mid+1; } } return -1; } public int findPivot(int[] nums, int low, int high){ if(nums[low] < nums[high]){ return 0; } while(low <= high){ int mid = (low+high)/2; if(low == high){ return low; } if(nums[mid] < nums[mid+1] && nums[mid] < nums[mid-1]){ return mid; }else if(nums[mid] < nums[nums.length-1]){ high = mid-1; }else if(nums[mid] > nums[nums.length-1]){ low = mid+1; } } return 0; } } <file_sep>class inorderIterative{ static String inorder_iterative(BinaryTreeNode root) { Stack<BinaryTreeNode> stack = new Stack<>(); StringBuilder s = new StringBuilder(); stack.push(root); BinaryTreeNode temp = stack.peek(); while(!stack.isEmpty()){ while(temp.left != null){ stack.push(temp.left); temp = temp.left; } BinaryTreeNode node = stack.pop(); s.append(node.data + ","); if(node.right != null){ stack.push(node.right); temp = node.right; } } return s.toString(); } }
69a70436de4866cabaeadde2b719ed9f26670678
[ "Markdown", "Java" ]
19
Java
nikitabairagi/LeetcodeSolutions
012e03545ae68a5c6f57e2629d38b67ac02f8422
a67564c825e40431918aedcd57b248392dab0337
refs/heads/main
<file_sep>cross_words = [ ['A', 'V', 'E', 'T', 'O'], ['A', 'B', 'C', 'D', 'E'], ['C', 'B', 'C', 'D', 'E'], ['O', 'B', 'C', 'D', 'E'], ['R', 'B', 'C', 'D', 'E'], ['N', 'B', 'C', 'D', 'E'], ['O', 'B', 'C', 'D', 'E'], ['A', 'B', 'C', 'D', 'E'], ['A', 'B', 'C', 'D', 'E'], ['A', 'B', 'C', 'D', 'E'], ] search = 'VETO' for i in range(9): # 9 linhas row = cross_words[i] # pegar a linha row_string = ''.join(row) # esse metodo junta toda a linha em uma string string_position = row_string.find(search) # esse metodo pega a string row string e ve se dentro dela tem a string 'search', # se tiver retorna a posição dela, se não tiver retorna -1 if string_position > -1: # maior que -1 quer dizer que achou print(f"A busca '{search}' foi achada na posição cross_words[{i}][{string_position}]") for j in range(5): # 5 colunas column = [] # inicializar o array dessa coluna for i in range(9): column.append(cross_words[i][j]) # adicionar os valores ao array da coluna inicializado acima column_string = ''.join(column) # juntar a coluna em uma unica string, por exemplo a coluna 0 vai ficar 'AACORNOAAA' string_position = column_string.find(search) # buscar nessa string da coluna se o 'search' está dentro dela if string_position > -1: print(f"A busca '{search}' foi achada na posição cross_words[{string_position}][{j}]")
7d479531aa2085e56611beb8881c20af432fb9dd
[ "Python" ]
1
Python
p-schlickmann/word-finder
17229e623e8235bac0455c200d6acba17465aee5
8649e19e17d4b6414bed9473f1cacad6dd60a44c
refs/heads/master
<file_sep># smart-cctv A Smart Surveillance System using an advanced digital Image processing technique alongside the mix of computer vision and unsupervised machine learning techniques. Abnormal human activity detection has wide applications that span across monitoring in public spaces to personal health rehabilitation. This principally concentrates on automation of video surveillance in ATM machines and recognize any sort of potential criminal exercises and alert the concerned authorities. # Screenshots <file_sep>import numpy as np import cv2 import time face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml') eye_cascade = cv2.CascadeClassifier('haarcascade_eye.xml') cap = cv2.VideoCapture(0) Sec = 0 Min = 0 Check = 1 Counter = 1 while 1: ret, img = cap.read() gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) #if ret is True: # gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) #else: # continue faces = face_cascade.detectMultiScale(gray, 1.3, 5) for (x,y,w,h) in faces: cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2) roi_gray = gray[y:y+h, x:x+w] roi_color = img[y:y+h, x:x+w] eyes = eye_cascade.detectMultiScale(roi_gray) for (ex,ey,ew,eh) in eyes: cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,255,0),2) if len(faces) > 0: Sec += 1 print(str(Min) + " Mins " + str(Sec) + " Sec ") cv2.putText(img, "Time: " + str(Min) + " Mins " + str(Sec) + " Sec ", (0,img.shape[0] -30), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (0,0,255), 1) cv2.putText(img, "Number of faces detected: " + str(faces.shape[0]), (0,img.shape[0] -10), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (0,0,255), 1) time.sleep(1) if Sec == 60: Sec = 0 Min += 1 print(str(Min) + " Minute") if Min == 2: print("Alert") if Check == 1: import http.client conn = http.client.HTTPConnection("api.msg91.com") payload = "{ \"sender\": \"ATMAUT\", \"route\": \"4\", \"country\": \"91\", \"sms\": [ { \"message\": \"Suspicious activity detected inside ATM.\", \"to\": [ \"9677104366\"] } ] }" headers = {'authkey': "209349Aqh8iTXUN1Of5accca05",'content-type': "application/json"} conn.request("POST", "/api/v2/sendsms", payload, headers) res = conn.getresponse() data = res.read() print(data.decode("utf-8")) Check += 1 if len(faces) > 2 and Counter == 1: import http.client conn = http.client.HTTPConnection("api.msg91.com") payload = "{ \"sender\": \"SRMVDP\", \"route\": \"4\", \"country\": \"91\", \"sms\": [ { \"message\": \"Suspicious activity detected inside SRM VDP ATM.\", \"to\": [ \"9551631252\"] } ] }" headers = {'authkey': "209349Aqh8iTXUN1Of5accca05",'content-type': "application/json"} conn.request("POST", "/api/v2/sendsms", payload, headers) res = conn.getresponse() data = res.read() print(data.decode("utf-8")) Counter += 1 if len(faces) == 0: print('No face detected') cv2.putText(img, "No face detected ", (0,img.shape[0] -10), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (0,0,255), 1) Sec = 0 Min = 0 cv2.imshow('img',img) k = cv2.waitKey(30) & 0xff if k == 27: break cap.release() cv2.destroyAllWindows()
426ba8e5e51d6467fc52b41cad7cbb2d5b302799
[ "Markdown", "Python" ]
2
Markdown
Praveen-98cs/Abnormal-human-Activity-detection
8979bd412f8e4df456b4b3cbd7860cb1d24e0a53
66f77a9983972f761c92b4cd16ee01660b5cbdc4
refs/heads/master
<repo_name>METABYTECODE/TwilightsEveORPG<file_sep>/game/scripts/vscripts/abilities/druid/archdruid/modifier_teve_druid_archdruid_werebear.lua modifier_teve_druid_archdruid_werebear = class({}) function modifier_teve_druid_archdruid_werebear:OnCreated(kv) if IsServer() then self:GetCaster():SetAttackCapability(DOTA_UNIT_CAP_MELEE_ATTACK) end end function modifier_teve_druid_archdruid_werebear:OnDestroy(kv) if IsServer() then self:GetCaster():SetAttackCapability(DOTA_UNIT_CAP_RANGED_ATTACK) end end function modifier_teve_druid_archdruid_werebear:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_MODEL_CHANGE, MODIFIER_PROPERTY_MOVESPEED_BASE_OVERRIDE, MODIFIER_PROPERTY_STATS_STRENGTH_BONUS, MODIFIER_PROPERTY_STATS_AGILITY_BONUS, MODIFIER_PROPERTY_STATS_INTELLECT_BONUS, MODIFIER_PROPERTY_BASEATTACK_BONUSDAMAGE, MODIFIER_PROPERTY_ATTACK_RANGE_BONUS, MODIFIER_PROPERTY_BASE_ATTACK_TIME_CONSTANT, MODIFIER_PROPERTY_PHYSICAL_ARMOR_BONUS } return funcs end function modifier_teve_druid_archdruid_werebear:GetModifierModelChange() return "models/items/lone_druid/bear/spirit_of_the_atniw/spirit_of_the_atniw.vmdl" end function modifier_teve_druid_archdruid_werebear:GetModifierMoveSpeedOverride(params) return 300 end function modifier_teve_druid_archdruid_werebear:GetModifierBonusStats_Strength(params) return 20 end function modifier_teve_druid_archdruid_werebear:GetModifierBonusStats_Agility(params) return 20 end function modifier_teve_druid_archdruid_werebear:GetModifierBonusStats_Intellect(params) return 20 end function modifier_teve_druid_archdruid_werebear:GetModifierBaseAttack_BonusDamage(params) --ArchDruid has 50 base damage, we want to end up with 800 "base" damage return 800-50 end function modifier_teve_druid_archdruid_werebear:GetModifierAttackRangeBonus(params) --ArchDruid has 500 attack range, all our melee heroes have 100 range. return 100-500 end function modifier_teve_druid_archdruid_werebear:GetModifierBaseAttackTimeConstant(params) return 1.6 end function modifier_teve_druid_archdruid_werebear:GetModifierPhysicalArmorBonus(params) --ArchDruid already has 2 armor, we want 25 return 25-2 end<file_sep>/game/scripts/vscripts/abilities/druid/shapeshifter/modifier_teve_druid_shapeshifter_werewolf.lua modifier_teve_druid_shapeshifter_werewolf = class({}) function modifier_teve_druid_shapeshifter_werewolf:OnCreated(kv) if IsServer() then self:GetCaster():SetAttackCapability(DOTA_UNIT_CAP_MELEE_ATTACK) end end function modifier_teve_druid_shapeshifter_werewolf:OnDestroy(kv) if IsServer() then self:GetCaster():SetAttackCapability(DOTA_UNIT_CAP_RANGED_ATTACK) end end function modifier_teve_druid_shapeshifter_werewolf:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_MODEL_CHANGE, MODIFIER_PROPERTY_MOVESPEED_BASE_OVERRIDE, MODIFIER_PROPERTY_STATS_STRENGTH_BONUS, MODIFIER_PROPERTY_STATS_AGILITY_BONUS, MODIFIER_PROPERTY_STATS_INTELLECT_BONUS, MODIFIER_PROPERTY_BASEATTACK_BONUSDAMAGE, MODIFIER_PROPERTY_ATTACK_RANGE_BONUS, MODIFIER_PROPERTY_BASE_ATTACK_TIME_CONSTANT, MODIFIER_PROPERTY_PHYSICAL_ARMOR_BONUS } return funcs end function modifier_teve_druid_shapeshifter_werewolf:GetModifierModelChange() return "models/heroes/lycan/summon_wolves.vmdl" end function modifier_teve_druid_shapeshifter_werewolf:GetModifierMoveSpeedOverride(params) return 420 end function modifier_teve_druid_shapeshifter_werewolf:GetModifierBonusStats_Strength(params) --Shapeshifter has 65, we want 80. therefore add 80-65 return 80-65 end function modifier_teve_druid_shapeshifter_werewolf:GetModifierBonusStats_Agility(params) --Shapeshifter has 65, we want 80. therefore add 80-65 return 80-65 end function modifier_teve_druid_shapeshifter_werewolf:GetModifierBonusStats_Intellect(params) --Shapeshifter has 80, we want 100. therefore add 100-80 return 100-80 end function modifier_teve_druid_shapeshifter_werewolf:GetModifierBaseAttack_BonusDamage(params) --Shapeshifter has 210 base damage, we want to end up with 950 "base" damage return 950-210 end function modifier_teve_druid_shapeshifter_werewolf:GetModifierAttackRangeBonus(params) --Shapeshifter has 500 attack range, all our melee heroes have 100 range. return 100-500 end function modifier_teve_druid_shapeshifter_werewolf:GetModifierBaseAttackTimeConstant(params) return 1.3 end function modifier_teve_druid_shapeshifter_werewolf:GetModifierPhysicalArmorBonus(params) --Shapeshifter already has 3 armor, we want 25 return 25-3 end<file_sep>/game/scripts/vscripts/abilities/druid/shaman/LightningStorm.lua teve_druid_shaman_lightning_storm = class({}) LinkLuaModifier( "modifier_teve_druid_shaman_lightning_storm", "abilities/druid/shaman/modifier_teve_druid_shaman_lightning_storm", LUA_MODIFIER_MOTION_NONE ) function teve_druid_shaman_lightning_storm:OnSpellStart() print("Lightning Storm from Lua!") local kv = { duration = 3.3 --1.3s delay + 2.0s damage time } CreateModifierThinker( self:GetCaster(), self, "modifier_teve_druid_shaman_lightning_storm", kv, self:GetCursorPosition(), self:GetCaster():GetTeamNumber(), false ) end function teve_druid_shaman_lightning_storm:GetAOERadius() return 350 end function teve_druid_shaman_lightning_storm:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_4 end function teve_druid_shaman_lightning_storm:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 40 + (30 * level) end<file_sep>/game/scripts/vscripts/abilities/druid/druid/modifier_teve_druid_druid_thorns_aura_buff.lua modifier_teve_druid_druid_thorns_aura_buff = class({}) function modifier_teve_druid_druid_thorns_aura_buff:DeclareFunctions() local funcs = { MODIFIER_EVENT_ON_ATTACK_LANDED } return funcs end function modifier_teve_druid_druid_thorns_aura_buff:OnAttackLanded(params) if params["target"] == self:GetParent() and params["damage_type"] == DAMAGE_TYPE_PHYSICAL and not params["attacker"]:IsRangedAttacker() then -- get damage done self.target_armour = params["target"]:GetPhysicalArmorValue() self.armour_constant = 0.06 --constant is 0.06 in dota but 0.01 in teve. change value to 0.01 once teve armour is implemented self.damage_mult = 1 - (self.armour_constant * self.target_armour / (1 + (self.armour_constant * self.target_armour))) self.total_damage = params["damage"] * self.damage_mult -- deal % back to attacker self.return_pct = self:GetAbility():GetLevel() * 0.03 self.return_damage = self.return_pct * self.total_damage local damage = { attacker = params["target"], victim = params["attacker"], damage = self.return_damage, damage_type = DAMAGE_TYPE_PHYSICAL, ability = self:GetAbility() } ApplyDamage( damage ) end end<file_sep>/game/scripts/vscripts/abilities/druid/runemaster/Werewolf.lua teve_druid_runemaster_werewolf = class({}) LinkLuaModifier( "modifier_teve_druid_runemaster_werewolf", "abilities/druid/runemaster/modifier_teve_druid_runemaster_werewolf", LUA_MODIFIER_MOTION_NONE ) function teve_druid_runemaster_werewolf:OnSpellStart() print("Wearwolf from Lua!") if not self:GetCaster():HasModifier("modifier_teve_druid_runemaster_werewolf") then EmitSoundOn("Hero_Lycan.Shapeshift.Cast", self:GetCaster()) end end function teve_druid_runemaster_werewolf:OnChannelFinish(interrupted) if interrupted then --TODO: put more shit here return end if self:GetCaster():HasModifier("modifier_teve_druid_runemaster_werewolf") then --Removing werewolf, going back to human. self:GetCaster():RemoveModifierByName("modifier_teve_druid_runemaster_werewolf") return elseif self:GetCaster():HasModifier("modifier_teve_druid_runemaster_werebear") then --Removing werebear, going to werewolf self:GetCaster():RemoveModifierByName("modifier_teve_druid_runemaster_werebear") end --We might have been human or werebear, who cares we are werewolf now. self:GetCaster():AddNewModifier(self:GetCaster(), self, "modifier_teve_druid_runemaster_werewolf", {}) end function teve_druid_runemaster_werewolf:GetChannelTime() return 5.5 - self:GetLevel() end function teve_druid_runemaster_werewolf:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_2 end<file_sep>/game/scripts/vscripts/abilities/druid/shapeshifter/Lycanthrophy.lua teve_druid_shapeshifter_lycanthrophy = class({}) LinkLuaModifier("modifier_teve_druid_shapeshifter_lycanthrophy", "abilities/druid/shapeshifter/modifier_teve_druid_shapeshifter_lycanthrophy.lua", LUA_MODIFIER_MOTION_NONE) function teve_druid_shapeshifter_lycanthrophy:GetIntrinsicModifierName() return 'modifier_teve_druid_shapeshifter_lycanthrophy' end<file_sep>/game/scripts/vscripts/abilities/druid/shaman/modifier_teve_druid_shaman_regeneration.lua modifier_teve_druid_shaman_regeneration = class ({}) function modifier_teve_druid_shaman_regeneration:OnCreated( kv ) local caster = self:GetCaster() local hTarget = self:GetParent():GetAbsOrigin() local nFXIndex = ParticleManager:CreateParticle( "particles/hero/druid/shaman/shamanregeneration.vpcf", PATTACH_POINT_FOLLOW, self:GetParent() ) ParticleManager:SetParticleControlEnt( nFXIndex, 0, self:GetParent(), PATTACH_POINT_FOLLOW, "attach_hitloc", hTarget, true) ParticleManager:SetParticleControlEnt( nFXIndex, 1, self:GetParent(), PATTACH_POINT_FOLLOW, "attach_hitloc", hTarget, true) self:AddParticle( nFXIndex, false, false, -1, false, false) EmitSoundOn("Hero_Juggernaut.HealingWard.Loop", self:GetParent() ) end function modifier_teve_druid_shaman_regeneration:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_MANA_REGEN_CONSTANT, MODIFIER_PROPERTY_HEALTH_REGEN_CONSTANT } return funcs end function modifier_teve_druid_shaman_regeneration:GetModifierConstantManaRegen ( params ) --30 total mana / 15 seconds return (30/15) * self:GetAbility():GetLevel() end function modifier_teve_druid_shaman_regeneration:GetModifierConstantHealthRegen ( params ) --100 total health / 15 seconds return (100/15) * self:GetAbility():GetLevel() end function modifier_teve_druid_shaman_regeneration:OnDestroy() EmitSoundOn("Hero_Juggernaut.HealingWard.Stop", self:GetParent() ) StopSoundOn("Hero_Juggernaut.HealingWard.Loop", self:GetParent() ) end <file_sep>/game/scripts/vscripts/abilities/druid/archdruid/modifier_teve_druid_archdruid_thunderstorm_thinker.lua modifier_teve_druid_archdruid_thunderstorm_thinker = class({}) function modifier_teve_druid_archdruid_thunderstorm_thinker:OnCreated( kv ) self.thunderstorm_damage = 20 * self:GetAbility():GetLevel() self.tick_rate = 0.1 self.iter = 0 if IsServer() then self:StartIntervalThink( self.tick_rate ) local nFXIndex = ParticleManager:CreateParticle( "particles/hero/druid/archdruid/archdruidcloudblack.vpcf", PATTACH_WORLDORIGIN, self:GetCaster() ) ParticleManager:SetParticleControl( nFXIndex, 0, self:GetParent():GetOrigin() ) ParticleManager:SetParticleControl( nFXIndex, 1, self:GetParent():GetOrigin() + Vector(0, 0, 600) ) ParticleManager:ReleaseParticleIndex( nFXIndex ) end end function modifier_teve_druid_archdruid_thunderstorm_thinker:OnIntervalThink() if IsServer() then if self.iter > 13 and self.iter % 4 == 0 then --trigger if 1.3 seconds have passed and it is an interval of 0.4 for the damage local enemies = FindUnitsInRadius( self:GetParent():GetTeamNumber(), self:GetParent():GetOrigin(), self:GetParent(), 350, DOTA_UNIT_TARGET_TEAM_ENEMY, DOTA_UNIT_TARGET_HERO + DOTA_UNIT_TARGET_BASIC, 0, 0, false ) if #enemies > 0 then for _,enemy in pairs(enemies) do if enemy ~= nil and ( not enemy:IsMagicImmune() ) and ( not enemy:IsInvulnerable() ) then local damage = { attacker = self:GetCaster(), victim = enemy, damage = self.thunderstorm_damage, damage_type = DAMAGE_TYPE_MAGICAL, ability = self:GetAbility() } ApplyDamage( damage ) --EmitSoundOn () end end end local nFXIndex = ParticleManager:CreateParticle( "particles/hero/druid/archdruid/archdruid_thunderstorm.vpcf", PATTACH_WORLDORIGIN, self:GetCaster() ) ParticleManager:SetParticleControl( nFXIndex, 0, self:GetParent():GetOrigin() ) ParticleManager:SetParticleControl( nFXIndex, 1, self:GetParent():GetOrigin() + Vector(0, 0, 550) ) ParticleManager:ReleaseParticleIndex( nFXIndex ) EmitSoundOnLocationWithCaster(self:GetAbility():GetCursorPosition(), "Hero_razor.lightning", self:GetCaster()) end self.iter = self.iter + 1 end end <file_sep>/game/scripts/vscripts/abilities/druid/archdruid/Twister.lua teve_druid_archdruid_twister = class({}) function teve_druid_archdruid_twister:OnSpellStart() print("Twister from Lua!") self.archdruid_twister_speed = 1000 self.archdruid_twister_width_initial = 150 self.archdruid_twister_width_end = 200 self.archdruid_twister_distance = 600 self.archdruid_twister_damage = 100 * self:GetLevel() --Do we want unit targetting??? nope local vPos = nil if self:GetCursorTarget() then vPos = self:GetCursorTarget():GetOrigin() else vPos = self:GetCursorPosition() end local vDirection = vPos - self:GetCaster():GetOrigin() vDirection.z = 0.0 vDirection = vDirection:Normalized() self.archdruid_twister_speed = self.archdruid_twister_speed * ( self.archdruid_twister_distance / ( self.archdruid_twister_distance - self.archdruid_twister_width_initial ) ) local info = { EffectName = "particles/units/heroes/hero_invoker/invoker_tornado.vpcf", Ability = self, vSpawnOrigin = self:GetCaster():GetOrigin(), fStartRadius = self.archdruid_twister_width_initial, fEndRadius = self.archdruid_twister_width_end, vVelocity = vDirection * self.archdruid_twister_speed, fDistance = self.archdruid_twister_distance, Source = self:GetCaster(), iUnitTargetTeam = DOTA_UNIT_TARGET_TEAM_ENEMY, iUnitTargetType = DOTA_UNIT_TARGET_HERO + DOTA_UNIT_TARGET_BASIC } ProjectileManager:CreateLinearProjectile ( info ) EmitSoundOn ( "Hero_Invoker.Tornado.Cast", self:GetCaster() ) end function teve_druid_archdruid_twister:OnProjectileHit ( hTarget, vLocation ) if hTarget ~= nil and ( not hTarget:IsMagicImmune() ) and ( not hTarget:IsInvulnerable() ) then local damage = { victim = hTarget, attacker = self:GetCaster(), damage = self.archdruid_twister_damage, damage_type = DAMAGE_TYPE_MAGICAL, ability = self } ApplyDamage( damage ) local vDirection = vLocation - self:GetCaster():GetOrigin() vDirection.z = 0.0 vDirection = vDirection:Normalized() end return false end function teve_druid_archdruid_twister:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 60 + (20 * level ) end function teve_druid_archdruid_twister:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_1 end<file_sep>/game/scripts/vscripts/abilities/druid/shapeshifter/Werebear.lua teve_druid_shapeshifter_werebear = class({}) LinkLuaModifier( "modifier_teve_druid_shapeshifter_werebear", "abilities/druid/shapeshifter/modifier_teve_druid_shapeshifter_werebear", LUA_MODIFIER_MOTION_NONE ) function teve_druid_shapeshifter_werebear:OnSpellStart() print("Wearbear from Lua!") if not self:GetCaster():HasModifier("modifier_teve_druid_shapeshifter_werebear") then EmitSoundOn("Hero_LoneDruid.TrueForm.Cast", self:GetCaster()) end end function teve_druid_shapeshifter_werebear:OnChannelFinish(interrupted) if interrupted then --TODO: put more shit here return end if self:GetCaster():HasModifier("modifier_teve_druid_shapeshifter_werebear") then --Removing werebear, going back to human. self:GetCaster():RemoveModifierByName("modifier_teve_druid_shapeshifter_werebear") return elseif self:GetCaster():HasModifier("modifier_teve_druid_shapeshifter_werewolf") then --Removing werewolf, going to werebear self:GetCaster():RemoveModifierByName("modifier_teve_druid_shapeshifter_werewolf") end --We might have been human or werewolf, who cares we are werebear now. self:GetCaster():AddNewModifier(self:GetCaster(), self, "modifier_teve_druid_shapeshifter_werebear", {}) end function teve_druid_shapeshifter_werebear:GetChannelTime() return 6 - self:GetLevel() end function teve_druid_shapeshifter_werebear:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_4 end<file_sep>/game/scripts/vscripts/abilities/druid/summoner/LightningStorm.lua teve_druid_summoner_lightning_storm = class({}) LinkLuaModifier( "modifier_teve_druid_summoner_lightning_storm", "abilities/druid/summoner/modifier_teve_druid_summoner_lightning_storm", LUA_MODIFIER_MOTION_NONE ) function teve_druid_summoner_lightning_storm:OnSpellStart() print("Thunderstorm from Lua!") local kv = { duration = 2.5 --1.3s delay + 1.2s damage time } CreateModifierThinker( self:GetCaster(), self, "modifier_teve_druid_summoner_lightning_storm", kv, self:GetCursorPosition(), self:GetCaster():GetTeamNumber(), false ) end function teve_druid_summoner_lightning_storm:GetAOERadius() return 350 end function teve_druid_summoner_lightning_storm:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_1 end function teve_druid_summoner_lightning_storm:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 200 * level end<file_sep>/game/scripts/vscripts/abilities/druid/druid/FireStorm.lua teve_druid_druid_fire_storm = class({}) LinkLuaModifier( "modifier_teve_druid_druid_fire_storm", "abilities/druid/druid/modifier_teve_druid_druid_fire_storm", LUA_MODIFIER_MOTION_NONE ) function teve_druid_druid_fire_storm:OnSpellStart() print("Fire Storm from Lua!") local kv = { duration = 1.7 --0.7s delay + 1.0s damage time } CreateModifierThinker( self:GetCaster(), self, "modifier_teve_druid_druid_fire_storm", kv, self:GetCursorPosition(), self:GetCaster():GetTeamNumber(), false ) end function teve_druid_druid_fire_storm:OnAbilityPhaseStart() EmitSoundOn("Hero_AbyssalUnderlord.Firestorm.Cast", self:GetCaster()) return true end function teve_druid_druid_fire_storm:GetAOERadius() return 350 end function teve_druid_druid_fire_storm:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_2 end function teve_druid_druid_fire_storm:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 60 + (20 * level) end<file_sep>/game/scripts/vscripts/abilities/druid/summoner/modifier_teve_druid_summoner_armageddon.lua modifier_teve_druid_summoner_armageddon = class({}) function modifier_teve_druid_summoner_armageddon:OnCreated( kv ) self.armageddon_damage = (5000 * self:GetAbility():GetLevel()) / 2 --divided by 2 because tick_rate is 0.5 self.tick_rate = 0.5 if IsServer() then self:StartIntervalThink( self.tick_rate ) local hTarget = self:GetParent():GetAbsOrigin() local nFXIndex = ParticleManager:CreateParticle("particles/hero/druid/summoner/summoner_armageddon_burnfire.vpcf", PATTACH_POINT_FOLLOW, self:GetParent() ) ParticleManager:SetParticleControlEnt(nFXIndex, 0, self:GetParent(), PATTACH_POINT_FOLLOW, "attach_hitloc", hTarget, true) self:AddParticle( nFXIndex, false, false, -1, false, false) EmitSoundOn("Hero_WarlockGolem.Spawn_Loop", self:GetParent() ) end end function modifier_teve_druid_summoner_armageddon:OnIntervalThink() if IsServer() then local damage = { attacker = self:GetCaster(), victim = self:GetParent(), damage = self.armageddon_damage, damage_type = DAMAGE_TYPE_MAGICAL, ability = self:GetAbility() } ApplyDamage( damage ) end end function modifier_teve_druid_summoner_armageddon:OnDestroy() StopSoundOn("Hero_WarlockGolem.Spawn_Loop", self:GetParent() ) end<file_sep>/game/scripts/vscripts/abilities/druid/shapeshifter/modifier_teve_druid_shapeshifter_werebear.lua modifier_teve_druid_shapeshifter_werebear = class({}) function modifier_teve_druid_shapeshifter_werebear:OnCreated(kv) if IsServer() then self:GetCaster():SetAttackCapability(DOTA_UNIT_CAP_MELEE_ATTACK) end end function modifier_teve_druid_shapeshifter_werebear:OnDestroy(kv) if IsServer() then self:GetCaster():SetAttackCapability(DOTA_UNIT_CAP_RANGED_ATTACK) end end function modifier_teve_druid_shapeshifter_werebear:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_MODEL_CHANGE, MODIFIER_PROPERTY_MOVESPEED_BASE_OVERRIDE, MODIFIER_PROPERTY_STATS_STRENGTH_BONUS, MODIFIER_PROPERTY_STATS_AGILITY_BONUS, MODIFIER_PROPERTY_STATS_INTELLECT_BONUS, MODIFIER_PROPERTY_BASEATTACK_BONUSDAMAGE, MODIFIER_PROPERTY_ATTACK_RANGE_BONUS, MODIFIER_PROPERTY_BASE_ATTACK_TIME_CONSTANT, MODIFIER_PROPERTY_PHYSICAL_ARMOR_BONUS } return funcs end function modifier_teve_druid_shapeshifter_werebear:GetModifierModelChange() return "models/items/lone_druid/bear/spirit_of_the_atniw/spirit_of_the_atniw.vmdl" end function modifier_teve_druid_shapeshifter_werebear:GetModifierMoveSpeedOverride(params) return 340 end function modifier_teve_druid_shapeshifter_werebear:GetModifierBonusStats_Strength(params) --Shapeshifter has 65, we want 80. therefore add 80-65 return 80-65 end function modifier_teve_druid_shapeshifter_werebear:GetModifierBonusStats_Agility(params) --Shapeshifter has 65, we want 80. therefore add 80-65 return 80-65 end function modifier_teve_druid_shapeshifter_werebear:GetModifierBonusStats_Intellect(params) --Shapeshifter has 80, we want 100. therefore add 100-80 return 100-80 end function modifier_teve_druid_shapeshifter_werebear:GetModifierBaseAttack_BonusDamage(params) --Shapeshifter has 210 base damage, we want to end up with 1450 "base" damage return 1450-210 end function modifier_teve_druid_shapeshifter_werebear:GetModifierAttackRangeBonus(params) --Shapeshifter has 500 attack range, all our melee heroes have 100 range. return 100-500 end function modifier_teve_druid_shapeshifter_werebear:GetModifierBaseAttackTimeConstant(params) return 1.5 end function modifier_teve_druid_shapeshifter_werebear:GetModifierPhysicalArmorBonus(params) --Shapeshifter already has 3 armor, we want 125 return 125-3 end<file_sep>/game/scripts/vscripts/abilities/druid/runemaster/modifier_teve_druid_runemaster_feral_rage.lua modifier_teve_druid_runemaster_feral_rage = class ({}) function modifier_teve_druid_runemaster_feral_rage:OnCreated( kv ) self.feral_rage_damage = 500 * self:GetAbility():GetLevel() local hTarget = self:GetParent():GetAbsOrigin() local nFXIndex = ParticleManager:CreateParticle( "particles/hero/druid/runemaster/runemasterferalrage.vpcf", PATTACH_ABSORIGIN_FOLLOW, self:GetParent() ) ParticleManager:SetParticleControl( nFXIndex, 0, hTarget) self:AddParticle( nFXIndex, false, false, -1, false, false) end function modifier_teve_druid_runemaster_feral_rage:OnRefresh ( kv ) self.feral_rage_damage = 500 * self:GetAbility():GetLevel() end function modifier_teve_druid_runemaster_feral_rage:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_PREATTACK_BONUS_DAMAGE, MODIFIER_EVENT_ON_ATTACK_LANDED } return funcs end function modifier_teve_druid_runemaster_feral_rage:GetModifierPreAttack_BonusDamage( params ) return 500 * self:GetAbility():GetLevel() end function modifier_teve_druid_runemaster_feral_rage:OnAttackLanded(params) if params["attacker"] == self:GetCaster() and params["damage_type"] == DAMAGE_TYPE_PHYSICAL then self.lifesteal_pct = 0.05 * math.ceil( self:GetAbility():GetLevel() / 10 ) self.target_armour = params["target"]:GetPhysicalArmorValue() self.armour_constant = 0.06 --constant is 0.06 in dota but 0.01 in teve. change value to 0.01 once teve armour is implemented self.damage_mult = 1 - (self.armour_constant * self.target_armour / (1 + (self.armour_constant * self.target_armour))) self.total_damage = params["damage"] * self.damage_mult self.target_hp = params["target"]:GetHealth() if self.total_damage < self.target_hp then self.heal_amount = self.lifesteal_pct * self.total_damage else self.heal_amount = self.lifesteal_pct * self.target_hp end print("Lifestealing for " .. self.heal_amount) self:GetCaster():Heal(self.heal_amount, self:GetCaster()) SendOverheadEventMessage(self:GetCaster(), OVERHEAD_ALERT_HEAL, self:GetCaster(), math.floor(self.heal_amount), nil) end end function modifier_teve_druid_runemaster_feral_rage:GetEffectName() return "particles/items2_fx/mask_of_madness.vpcf" end function modifier_teve_druid_runemaster_feral_rage:GetEffectAttachType() return "follow_hitloc" end <file_sep>/game/scripts/vscripts/abilities/attributes.lua teve_attributes = class({}) LinkLuaModifier( "modifier_teve_attributes_agi", "abilities/modifier_teve_attributes_agi", LUA_MODIFIER_MOTION_NONE ) LinkLuaModifier( "modifier_teve_attributes_int", "abilities/modifier_teve_attributes_int", LUA_MODIFIER_MOTION_NONE ) function teve_attributes:OnHeroCalculateStatBonus() --print("Stat bonuses changed?!") local caster = self:GetCaster() local modifier_agi = caster:FindModifierByName("modifier_teve_attributes_agi") if modifier_agi == nil then modifier_agi = caster:AddNewModifier( self:GetCaster(), self, "modifier_teve_attributes_agi", nil ) end local modifier_int = caster:FindModifierByName("modifier_teve_attributes_int") if modifier_int == nil then modifier_int = caster:AddNewModifier( self:GetCaster(), self, "modifier_teve_attributes_int", nil ) end --Check if its different before setting, and if any are different, calculate stat bonus? if modifier_agi:GetStackCount() ~= math.floor(caster:GetAgility()) or modifier_int:GetStackCount() ~= math.floor(caster:GetIntellect()) then --print("Stack count differs, update!") --print("Expected AGI: "..modifier_agi:GetStackCount()..", Actual AGI: "..caster:GetAgility()) --print("Expected INT: "..modifier_int:GetStackCount()..", Actual INT: "..caster:GetIntellect()) modifier_agi:SetStackCount(caster:GetAgility()) modifier_int:SetStackCount(caster:GetIntellect()) self:GetCaster():CalculateStatBonus() end end<file_sep>/game/scripts/vscripts/abilities/druid/runemaster/modifier_teve_druid_runemaster_ferocity.lua modifier_teve_druid_runemaster_ferocity = class({}) function modifier_teve_druid_runemaster_ferocity:OnCreated(kv) self.args = {} self.args.chance = self:GetAbility():GetLevel() self.args.multiplier = 300 end function modifier_teve_druid_runemaster_ferocity:OnRefresh(kv) self.args.chance = self:GetAbility():GetLevel() self.args.multiplier = 300 end function modifier_teve_druid_runemaster_ferocity:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_PREATTACK_CRITICALSTRIKE, MODIFIER_PROPERTY_EVASION_CONSTANT } return funcs end function modifier_teve_druid_runemaster_ferocity:GetModifierPreAttack_CriticalStrike(params) if IsServer() and RandomInt(1, 100) <= self.args.chance then return self.args.multiplier else return false end end function modifier_teve_druid_runemaster_ferocity:GetModifierEvasion_Constant(params) return math.floor((self:GetAbility():GetLevel() + 9) / 10) * 10 end<file_sep>/README.md # TwilightsEveORPG A Dota 2 Rewrite of the Twilight's Eve ORPG map by Green Day Dance <file_sep>/game/scripts/vscripts/abilities/novice/Frenzy.lua teve_novice_frenzy = class({}) LinkLuaModifier( "modifier_teve_novice_frenzy", "abilities/novice/modifier_teve_novice_frenzy", LUA_MODIFIER_MOTION_NONE ) function teve_novice_frenzy:OnSpellStart() self:GetCaster():AddNewModifier(self:GetCaster(), self, "modifier_teve_novice_frenzy", {duration = 10.0}) end<file_sep>/game/scripts/vscripts/abilities/druid/shapeshifter/Ferocity.lua teve_druid_shapeshifter_ferocity = class({}) LinkLuaModifier("modifier_teve_druid_shapeshifter_ferocity", "abilities/druid/shapeshifter/modifier_teve_druid_shapeshifter_ferocity.lua", LUA_MODIFIER_MOTION_NONE) function teve_druid_shapeshifter_ferocity:GetIntrinsicModifierName() return 'modifier_teve_druid_shapeshifter_ferocity' end<file_sep>/game/scripts/vscripts/abilities/druid/druid/modifier_teve_druid_druid_thorns_aura.lua modifier_teve_druid_druid_thorns_aura = class({}) function modifier_teve_druid_druid_thorns_aura:IsAura() return true end function modifier_teve_druid_druid_thorns_aura:IsHidden() return true end function modifier_teve_druid_druid_thorns_aura:GetAuraRadius() return 500 end function modifier_teve_druid_druid_thorns_aura:GetModifierAura() return "modifier_teve_druid_druid_thorns_aura_buff" end -- function modifier_teve_druid_druid_thorns_aura:GetEffectName() -- return "particles/hero/druid/druid/thorns_aura.vpcf" -- end -- function modifier_teve_druid_druid_thorns_aura:GetEffectAttachType() -- return PATTACH_ABSORIGIN_FOLLOW -- end function modifier_teve_druid_druid_thorns_aura:GetAuraSearchTeam() return DOTA_UNIT_TARGET_TEAM_FRIENDLY end function modifier_teve_druid_druid_thorns_aura:GetAuraSearchType() return DOTA_UNIT_TARGET_HERO + DOTA_UNIT_TARGET_BASIC end<file_sep>/game/scripts/vscripts/addon_game_mode.lua -- TODO: Move to some util file require("libs/timers") function PrintTable(t, indent, done) --print ( string.format ('PrintTable type %s', type(keys)) ) if type(t) ~= "table" then return end done = done or {} done[t] = true indent = indent or 0 local l = {} for k, v in pairs(t) do table.insert(l, k) end table.sort(l) for k, v in ipairs(l) do -- Ignore FDesc if v ~= 'FDesc' then local value = t[v] if type(value) == "table" and not done[value] then done [value] = true print(string.rep ("\t", indent)..tostring(v)..":") PrintTable (value, indent + 2, done) elseif type(value) == "userdata" and not done[value] then done [value] = true print(string.rep ("\t", indent)..tostring(v)..": "..tostring(value)) PrintTable ((getmetatable(value) and getmetatable(value).__index) or getmetatable(value), indent + 2, done) else if t.FDesc and t.FDesc[v] then print(string.rep ("\t", indent)..tostring(t.FDesc[v])) else print(string.rep ("\t", indent)..tostring(v)..": "..tostring(value)) end end end end end --PrintTable(package.loaded) if TwilightsEveORPG == nil then TwilightsEveORPG = class({}) end function Precache( context ) --[[ Precache things we know we'll use. Possible file types include (but not limited to): PrecacheResource( "model", "*.vmdl", context ) PrecacheResource( "soundfile", "*.vsndevts", context ) PrecacheResource( "particle", "*.vpcf", context ) PrecacheResource( "particle_folder", "particles/folder", context ) ]] end -- Create the game mode when we activate function Activate() GameRules.TwilightsEveORPG = TwilightsEveORPG() GameRules.TwilightsEveORPG:InitGameMode() end function TwilightsEveORPG:InitGameMode() local levels = {} for i = 1, 300 do levels[i] = getExpForLevel(i) end GameRules:GetGameModeEntity():SetCustomXPRequiredToReachNextLevel(levels) GameRules:GetGameModeEntity():SetUseCustomHeroLevels(true) ListenToGameEvent("npc_spawned", Dynamic_Wrap(TwilightsEveORPG, "OnNPCSpawned"), self) ListenToGameEvent("dota_player_gained_level", Dynamic_Wrap(TwilightsEveORPG, "OnPlayerGainedLevel"), self) end function TwilightsEveORPG:OnNPCSpawned(keys) local npc = EntIndexToHScript(keys.entindex) -- when a hero first spawns if npc:IsRealHero() and npc.bFirstSpawned == nil then npc:AddItemByName("item_ring_of_protection") npc:AddItemByName("item_ring_of_protection") npc:AddItemByName("item_ring_of_protection") npc:SwapItems(DOTA_ITEM_SLOT_1, DOTA_ITEM_SLOT_7) npc:SwapItems(DOTA_ITEM_SLOT_2, DOTA_ITEM_SLOT_8) npc:SwapItems(DOTA_ITEM_SLOT_3, DOTA_ITEM_SLOT_9) end end function TwilightsEveORPG:OnPlayerGainedLevel( event ) local player = EntIndexToHScript( event.player ) local hero = player:GetAssignedHero() local currentCount = 0 local maxCount = 0 for i = 0, 5 do local ability = hero:GetAbilityByIndex(i) if (ability ~= nil) then currentCount = currentCount + ability:GetLevel() maxCount = maxCount + ability:GetMaxLevel() end end if maxCount > hero:GetLevel() then maxCount = hero:GetLevel() end hero:SetAbilityPoints(maxCount - currentCount) end function getExpForLevel(level) if level <= 1 then return 0 elseif level == 2 then return 100 elseif level == 3 then return 450 else local base = 450 local exp = 450 for i = 0, level - 3 do exp = exp + (base+100*i) end return exp end end<file_sep>/game/scripts/vscripts/abilities/druid/summoner/ArcticFreeze.lua teve_druid_summoner_arctic_freeze = class({}) LinkLuaModifier( "modifier_teve_druid_summoner_arctic_freeze", "abilities/druid/summoner/modifier_teve_druid_summoner_arctic_freeze", LUA_MODIFIER_MOTION_NONE ) function teve_druid_summoner_arctic_freeze:OnSpellStart() local arctic_freeze_duration = 3 local arctic_freeze_damage = self:GetLevel() * 1000 local enemies = FindUnitsInRadius( self:GetCaster():GetTeamNumber(), self:GetCaster():GetOrigin(), self:GetCaster(), self:GetAOERadius(), DOTA_UNIT_TARGET_TEAM_ENEMY, DOTA_UNIT_TARGET_HERO + DOTA_UNIT_TARGET_BASIC, 0, 0, false ) if #enemies > 0 then for _, enemy in pairs(enemies) do if enemy ~= nil and ( not enemy:IsMagicImmune() ) and ( not enemy:IsInvulnerable() ) then local damage = { victim = enemy, attacker = self:GetCaster(), damage = arctic_freeze_damage, damage_type = DAMAGE_TYPE_MAGICAL, ability = self } ApplyDamage( damage ) enemy:AddNewModifier( self:GetCaster(), self, "modifier_teve_druid_summoner_arctic_freeze", { duration = arctic_freeze_duration } ) end end end self.aoe = self:GetAOERadius() self.caster = self:GetCaster():GetOrigin() local nFXIndex = ParticleManager:CreateParticle( "particles/hero/druid/summoner/summoner_arctic_freeze.vpcf", PATTACH_ABSORIGIN, self:GetCaster() ) ParticleManager:SetParticleControl( nFXIndex, 0, self.caster ) ParticleManager:SetParticleControl( nFXIndex, 1, Vector(self.aoe, 1, 1) ) EmitSoundOn("Hero_Invoker.DeafeningBlast", self:GetCaster() ) end function teve_druid_summoner_arctic_freeze:GetAOERadius() return 600 end function teve_druid_summoner_arctic_freeze:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_3 end function teve_druid_summoner_arctic_freeze:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 150 * level end<file_sep>/game/scripts/vscripts/abilities/druid/runemaster/Ferocity.lua teve_druid_runemaster_ferocity = class({}) LinkLuaModifier("modifier_teve_druid_runemaster_ferocity", "abilities/druid/runemaster/modifier_teve_druid_runemaster_ferocity.lua", LUA_MODIFIER_MOTION_NONE) function teve_druid_runemaster_ferocity:GetIntrinsicModifierName() return 'modifier_teve_druid_runemaster_ferocity' end<file_sep>/game/scripts/vscripts/abilities/druid/shaman/Regeneration.lua teve_druid_shaman_regeneration = class({}) LinkLuaModifier( "modifier_teve_druid_shaman_regeneration", "abilities/druid/shaman/modifier_teve_druid_shaman_regeneration", LUA_MODIFIER_MOTION_NONE ) function teve_druid_shaman_regeneration:OnSpellStart() print("Regeneration!!") local hTarget = self:GetCursorTarget() local regeneration_duration = 15 hTarget:AddNewModifier( self:GetCaster(), self, "modifier_teve_druid_shaman_regeneration", { duration = regeneration_duration } ) EmitSoundOn("Hero_Juggernaut.HealingWard.Cast", self:GetCaster() ) end function teve_druid_shaman_regeneration:CastFilterResultTarget( hTarget ) local nResult = UnitFilter( hTarget, DOTA_UNIT_TARGET_TEAM_FRIENDLY, DOTA_UNIT_TARGET_HERO, DOTA_UNIT_TARGET_FLAG_NOT_MAGIC_IMMUNE_ALLIES, self:GetTeamNumber() ) if nResult ~= UF_SUCCESS then return nResult end return UF_SUCCESS end function teve_druid_shaman_regeneration:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 50 + (25 * level) end<file_sep>/game/scripts/vscripts/abilities/druid/shapeshifter/Werewolf.lua teve_druid_shapeshifter_werewolf = class({}) LinkLuaModifier( "modifier_teve_druid_shapeshifter_werewolf", "abilities/druid/shapeshifter/modifier_teve_druid_shapeshifter_werewolf", LUA_MODIFIER_MOTION_NONE ) function teve_druid_shapeshifter_werewolf:OnSpellStart() print("Wearwolf from Lua!") if not self:GetCaster():HasModifier("modifier_teve_druid_shapeshifter_werewolf") then EmitSoundOn("Hero_Lycan.Shapeshift.Cast", self:GetCaster()) end end function teve_druid_shapeshifter_werewolf:OnChannelFinish(interrupted) if interrupted then --TODO: put more shit here return end if self:GetCaster():HasModifier("modifier_teve_druid_shapeshifter_werewolf") then --Removing werewolf, going back to human. self:GetCaster():RemoveModifierByName("modifier_teve_druid_shapeshifter_werewolf") return elseif self:GetCaster():HasModifier("modifier_teve_druid_shapeshifter_werebear") then --Removing werebear, going to werewolf self:GetCaster():RemoveModifierByName("modifier_teve_druid_shapeshifter_werebear") end --We might have been human or werebear, who cares we are werewolf now. self:GetCaster():AddNewModifier(self:GetCaster(), self, "modifier_teve_druid_shapeshifter_werewolf", {}) end function teve_druid_shapeshifter_werewolf:GetChannelTime() return 6 - self:GetLevel() end function teve_druid_shapeshifter_werewolf:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_4 end<file_sep>/game/scripts/vscripts/abilities/druid/runemaster/Lycanthrophy.lua teve_druid_runemaster_lycanthrophy = class({}) LinkLuaModifier("modifier_teve_druid_runemaster_lycanthrophy", "abilities/druid/runemaster/modifier_teve_druid_runemaster_lycanthrophy.lua", LUA_MODIFIER_MOTION_NONE) function teve_druid_runemaster_lycanthrophy:GetIntrinsicModifierName() return 'modifier_teve_druid_runemaster_lycanthrophy' end<file_sep>/game/scripts/vscripts/abilities/novice/modifier_teve_novice_attributes.lua modifier_teve_novice_attributes = class({}) function modifier_teve_novice_attributes:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_STATS_STRENGTH_BONUS, MODIFIER_PROPERTY_STATS_AGILITY_BONUS, MODIFIER_PROPERTY_STATS_INTELLECT_BONUS } return funcs end function modifier_teve_novice_attributes:GetModifierBonusStats_Strength(params) return self:GetAbility():GetLevel() * 2 end function modifier_teve_novice_attributes:GetModifierBonusStats_Agility(params) return self:GetAbility():GetLevel() * 2 end function modifier_teve_novice_attributes:GetModifierBonusStats_Intellect(params) return self:GetAbility():GetLevel() * 2 end<file_sep>/game/scripts/vscripts/abilities/druid/druid/Rejuvenation.lua teve_druid_druid_rejuvenation = class({}) LinkLuaModifier( "modifier_teve_druid_druid_rejuvenation", "abilities/druid/druid/modifier_teve_druid_druid_rejuvenation", LUA_MODIFIER_MOTION_NONE ) function teve_druid_druid_rejuvenation:OnSpellStart() print("Rejuvenation!!") local hTarget = self:GetCursorTarget() local rejuvenation_duration = 15 hTarget:AddNewModifier( self:GetCaster(), self, "modifier_teve_druid_druid_rejuvenation", { duration = rejuvenation_duration } ) EmitSoundOn("n_creep_ForestTrollHighPriest.Heal", hTarget) end function teve_druid_druid_rejuvenation:CastFilterResultTarget( hTarget ) local nResult = UnitFilter( hTarget, DOTA_UNIT_TARGET_TEAM_FRIENDLY, DOTA_UNIT_TARGET_HERO, DOTA_UNIT_TARGET_FLAG_NOT_MAGIC_IMMUNE_ALLIES, self:GetTeamNumber() ) if nResult ~= UF_SUCCESS then return nResult end return UF_SUCCESS end function teve_druid_druid_rejuvenation:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_3 end function teve_druid_druid_rejuvenation:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 40 + (20 * level) end<file_sep>/game/scripts/vscripts/abilities/novice/modifier_teve_novice_frenzy.lua modifier_teve_novice_frenzy = class({}) function modifier_teve_novice_frenzy:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_ATTACKSPEED_BONUS_CONSTANT, MODIFIER_PROPERTY_MOVESPEED_BONUS_PERCENTAGE } return funcs end function modifier_teve_novice_frenzy:GetEffectName() return "particles/items2_fx/mask_of_madness.vpcf" end function modifier_teve_novice_frenzy:GetEffectAttachType() return "follow_hitloc" end function modifier_teve_novice_frenzy:GetModifierAttackSpeedBonus_Constant(params) return 20 * (self:GetAbility():GetLevel() + 1) end function modifier_teve_novice_frenzy:GetModifierMoveSpeedBonus_Percentage(params) return 20 end<file_sep>/game/scripts/vscripts/abilities/druid/shapeshifter/modifier_teve_druid_shapeshifter_lycanthrophy.lua modifier_teve_druid_shapeshifter_lycanthrophy = class({}) function modifier_teve_druid_shapeshifter_lycanthrophy:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_MOVESPEED_BONUS_PERCENTAGE, MODIFIER_PROPERTY_ATTACKSPEED_BONUS_CONSTANT } return funcs end function modifier_teve_druid_shapeshifter_lycanthrophy:GetModifierAttackSpeedBonus_Constant(params) return 3 * self:GetAbility():GetLevel() end function modifier_teve_druid_shapeshifter_lycanthrophy:GetModifierMoveSpeedBonus_Percentage(params) return math.floor((self:GetAbility():GetLevel() + 19) / 20) * 3 end<file_sep>/game/scripts/vscripts/abilities/druid/druid/modifier_teve_druid_druid_rejuvenation.lua modifier_teve_druid_druid_rejuvenation = class ({}) function modifier_teve_druid_druid_rejuvenation:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_HEALTH_REGEN_CONSTANT } return funcs end function modifier_teve_druid_druid_rejuvenation:GetModifierConstantHealthRegen ( params ) --100 total health / 15 seconds return (100 / 15) * self:GetAbility():GetLevel() end function modifier_teve_druid_druid_rejuvenation:GetEffectName() return "particles/items_fx/healing_tango.vpcf" end<file_sep>/game/scripts/vscripts/abilities/druid/runemaster/FeralRage.lua teve_druid_runemaster_feral_rage = class({}) LinkLuaModifier( "modifier_teve_druid_runemaster_feral_rage", "abilities/druid/runemaster/modifier_teve_druid_runemaster_feral_rage", LUA_MODIFIER_MOTION_NONE ) function teve_druid_runemaster_feral_rage:OnSpellStart() local feral_rage_duration = 10 self:GetCaster():AddNewModifier( self:GetCaster(), self, "modifier_teve_druid_runemaster_feral_rage", { duration = feral_rage_duration } ) if self:GetCaster():HasModifier("modifier_teve_druid_runemaster_werebear") then EmitSoundOn("Hero_LoneDruid.SavageRoar.Cast", self:GetCaster()) elseif self:GetCaster():HasModifier("modifier_teve_druid_runemaster_werewolf") then EmitSoundOn("Hero_Lycan.Howl", self:GetCaster()) else EmitSoundOn("Hero_Beastmaster.Primal_Roar", self:GetCaster()) end end function teve_druid_runemaster_feral_rage:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 200 * level end<file_sep>/game/scripts/vscripts/abilities/druid/archdruid/Werewolf.lua teve_druid_archdruid_werewolf = class({}) LinkLuaModifier( "modifier_teve_druid_archdruid_werewolf", "abilities/druid/archdruid/modifier_teve_druid_archdruid_werewolf", LUA_MODIFIER_MOTION_NONE ) function teve_druid_archdruid_werewolf:OnSpellStart() print("Wearwolf from Lua!") if not self:GetCaster():HasModifier("modifier_teve_druid_archdruid_werewolf") then EmitSoundOn("Hero_Lycan.Shapeshift.Cast", self:GetCaster()) end end function teve_druid_archdruid_werewolf:OnChannelFinish(interrupted) if interrupted then --TODO: put more shit here return end if self:GetCaster():HasModifier("modifier_teve_druid_archdruid_werewolf") then --Removing werewolf, going back to human. self:GetCaster():RemoveModifierByName("modifier_teve_druid_archdruid_werewolf") return elseif self:GetCaster():HasModifier("modifier_teve_druid_archdruid_werebear") then --Removing werebear, going to werewolf self:GetCaster():RemoveModifierByName("modifier_teve_druid_archdruid_werebear") end --We might have been human or werebear, who cares we are werewolf now. self:GetCaster():AddNewModifier(self:GetCaster(), self, "modifier_teve_druid_archdruid_werewolf", {}) end function teve_druid_archdruid_werewolf:GetChannelTime() return 6 - self:GetLevel() end function teve_druid_archdruid_werewolf:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_2 end<file_sep>/game/scripts/vscripts/abilities/druid/summoner/Tornado.lua teve_druid_summoner_tornado = class({}) function teve_druid_summoner_tornado:OnSpellStart() print("Tornado from Lua!") self.summoner_tornado_speed = 1000 self.summoner_tornado_width_initial = 150 self.summoner_tornado_width_end = 200 self.summoner_tornado_distance = 500 self.summoner_tornado_damage = 1200 * self:GetLevel() --Do we want unit targetting??? nope local vPos = nil if self:GetCursorTarget() then vPos = self:GetCursorTarget():GetOrigin() else vPos = self:GetCursorPosition() end local vDirection = vPos - self:GetCaster():GetOrigin() vDirection.z = 0.0 vDirection = vDirection:Normalized() self.summoner_tornado_speed = self.summoner_tornado_speed * ( self.summoner_tornado_distance / ( self.summoner_tornado_distance - self.summoner_tornado_width_initial ) ) local info = { EffectName = "particles/units/heroes/hero_invoker/invoker_tornado.vpcf", Ability = self, vSpawnOrigin = self:GetCaster():GetOrigin(), fStartRadius = self.summoner_tornado_width_initial, fEndRadius = self.summoner_tornado_width_end, vVelocity = vDirection * self.summoner_tornado_speed, fDistance = self.summoner_tornado_distance, Source = self:GetCaster(), iUnitTargetTeam = DOTA_UNIT_TARGET_TEAM_ENEMY, iUnitTargetType = DOTA_UNIT_TARGET_HERO + DOTA_UNIT_TARGET_BASIC } ProjectileManager:CreateLinearProjectile ( info ) EmitSoundOn ( "Hero_Invoker.Tornado.Cast", self:GetCaster() ) end function teve_druid_summoner_tornado:OnProjectileHit ( hTarget, vLocation ) if hTarget ~= nil and ( not hTarget:IsMagicImmune() ) and ( not hTarget:IsInvulnerable() ) then local damage = { victim = hTarget, attacker = self:GetCaster(), damage = self.summoner_tornado_damage, damage_type = DAMAGE_TYPE_MAGICAL, ability = self } ApplyDamage( damage ) local vDirection = vLocation - self:GetCaster():GetOrigin() vDirection.z = 0.0 vDirection = vDirection:Normalized() end return false end function teve_druid_summoner_tornado:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 200 * level end function teve_druid_summoner_tornado:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_1 end<file_sep>/game/scripts/vscripts/abilities/novice/Attributes.lua teve_novice_attributes = class({}) LinkLuaModifier("modifier_teve_novice_attributes", "abilities/novice/modifier_teve_novice_attributes", LUA_MODIFIER_MOTION_NONE) function teve_novice_attributes:GetIntrinsicModifierName() return "modifier_teve_novice_attributes" end<file_sep>/game/scripts/vscripts/abilities/druid/archdruid/Thunderstorm.lua teve_druid_archdruid_thunderstorm = class({}) LinkLuaModifier( "modifier_teve_druid_archdruid_thunderstorm_thinker", "abilities/druid/archdruid/modifier_teve_druid_archdruid_thunderstorm_thinker", LUA_MODIFIER_MOTION_NONE ) function teve_druid_archdruid_thunderstorm:OnSpellStart() print("Thunderstorm from Lua!") local kv = { duration = 2.5 --1.3s delay + 1.2s damage time } CreateModifierThinker( self:GetCaster(), self, "modifier_teve_druid_archdruid_thunderstorm_thinker", kv, self:GetCursorPosition(), self:GetCaster():GetTeamNumber(), false ) end function teve_druid_archdruid_thunderstorm:GetAOERadius() return 350 end function teve_druid_archdruid_thunderstorm:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_4 end function teve_druid_archdruid_thunderstorm:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 50 + (25 * level ) end<file_sep>/game/scripts/vscripts/abilities/druid/shapeshifter/modifier_teve_druid_shapeshifter_ferocity.lua modifier_teve_druid_shapeshifter_ferocity = class({}) function modifier_teve_druid_shapeshifter_ferocity:OnCreated(kv) self.args = {} self.args.chance = self:GetAbility():GetLevel() self.args.multiplier = 250 end function modifier_teve_druid_shapeshifter_ferocity:OnRefresh(kv) self.args.chance = self:GetAbility():GetLevel() self.args.multiplier = 250 end function modifier_teve_druid_shapeshifter_ferocity:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_PREATTACK_CRITICALSTRIKE, MODIFIER_PROPERTY_EVASION_CONSTANT } return funcs end function modifier_teve_druid_shapeshifter_ferocity:GetModifierPreAttack_CriticalStrike(params) if IsServer() and RandomInt(1, 100) <= self.args.chance then return self.args.multiplier else return false end end function modifier_teve_druid_shapeshifter_ferocity:GetModifierEvasion_Constant(params) return math.floor((self:GetAbility():GetLevel() + 19) / 20) * 10 end<file_sep>/game/scripts/vscripts/abilities/druid/summoner/Armageddon.lua teve_druid_summoner_armageddon = class({}) LinkLuaModifier( "modifier_teve_druid_summoner_armageddon", "abilities/druid/summoner/modifier_teve_druid_summoner_armageddon", LUA_MODIFIER_MOTION_NONE ) function teve_druid_summoner_armageddon:OnSpellStart() print("ARMAGEDDON!!!!") local kv = { duration = 5 } local armageddon_damage = self:GetLevel() * 3000 local enemies = FindUnitsInRadius( self:GetCaster():GetTeamNumber(), self:GetCursorPosition(), self:GetCaster(), self:GetAOERadius(), DOTA_UNIT_TARGET_TEAM_ENEMY, DOTA_UNIT_TARGET_HERO + DOTA_UNIT_TARGET_BASIC, 0, 0, false ) if #enemies > 0 then for _, enemy in pairs(enemies) do if enemy ~= nil and ( not enemy:IsInvulnerable() ) then local damage = { victim = enemy, attacker = self:GetCaster(), damage = armageddon_damage, damage_type = DAMAGE_TYPE_PURE, ability = self } ApplyDamage( damage ) enemy:AddNewModifier( self:GetCaster(), self, "modifier_teve_druid_summoner_armageddon", kv ) end end end self.aoe = self:GetAOERadius() self.target = self:GetCursorPosition() local nFXIndex = ParticleManager:CreateParticle( "particles/hero/druid/summoner/summoner_armageddon.vpcf", PATTACH_CUSTOMORIGIN, nil) ParticleManager:SetParticleControl( nFXIndex, 0, self.target ) ParticleManager:SetParticleControl( nFXIndex, 4, Vector(self.aoe, 1, 1) ) ParticleManager:ReleaseParticleIndex( nFXIndex ) EmitSoundOnLocationWithCaster( self.target, "Hero_OgreMagi.Fireblast.Target", self:GetCaster() ) end function teve_druid_summoner_armageddon:GetAOERadius() return 600 end function teve_druid_summoner_armageddon:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_4 end function teve_druid_summoner_armageddon:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 250 * level end<file_sep>/game/scripts/vscripts/abilities/druid/druid/modifier_teve_druid_druid_disabling_roots.lua modifier_teve_druid_druid_disabling_roots = class ({}) function modifier_teve_druid_druid_disabling_roots:OnCreated() self.disabling_roots_damage = 50 + (10 * self:GetAbility():GetLevel()) / 2 --divided by 2 because tick_rate is 0.5 self.tick_rate = 0.5 if IsServer() then self:StartIntervalThink( self.tick_rate ) local caster = self:GetCaster() local hTarget = self:GetParent():GetAbsOrigin() local nFXIndex = ParticleManager:CreateParticle( "particles/hero/druid/druid/druid_disabling_roots.vpcf", PATTACH_ABSORIGIN, self:GetParent() ) ParticleManager:SetParticleControlEnt( nFXIndex, 0, self:GetParent(), PATTACH_ABSORIGIN, "attach_origin", hTarget, true) ParticleManager:SetParticleControlEnt( nFXIndex, 1, self:GetParent(), PATTACH_ABSORIGIN, "attach_origin", hTarget, true) EmitSoundOn( "Hero_Treant.Overgrowth.Target", self:GetParent() ) end end function modifier_teve_druid_druid_disabling_roots:OnIntervalThink() if IsServer() then local damage = { attacker = self:GetCaster(), victim = self:GetParent(), damage = self.disabling_roots_damage, damage_type = DAMAGE_TYPE_MAGICAL, ability = self:GetAbility() } ApplyDamage( damage ) end end function modifier_teve_druid_druid_disabling_roots:IsDebuff() return true end function modifier_teve_druid_druid_disabling_roots:CheckState() state = { [MODIFIER_STATE_ROOTED] = true } return state end <file_sep>/game/scripts/vscripts/abilities/modifier_teve_attributes_int.lua modifier_teve_attributes_int = class({}) function modifier_teve_attributes_int:OnCreated( kv ) if IsServer() then self:GetParent():CalculateStatBonus() end end function modifier_teve_attributes_int:OnRefresh( kv ) if IsServer() then self:GetParent():CalculateStatBonus() end end function modifier_teve_attributes_int:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_MANA_BONUS, MODIFIER_PROPERTY_MANA_REGEN_PERCENTAGE, MODIFIER_PROPERTY_SPELL_AMPLIFY_PERCENTAGE } return funcs end function modifier_teve_attributes_int:GetModifierManaBonus(params) return 2*self:GetStackCount() --Dota gives 13 mana per int, TEvE gives 15, so two per int will make them compatible. end function modifier_teve_attributes_int:GetModifierPercentageManaRegen(params) return 0 - (0.03*self:GetStackCount()) -- Dota gives 4% mana regen per level, teve gives 1%, taking 3% per level away should make them compatible. end function modifier_teve_attributes_int:GetModifierSpellAmplify_Percentage(params) print(0 - (0.0625*self:GetStackCount()) .. " Spell Amplifiy Percentage") return 0 - (0.0625*self:GetStackCount()) end<file_sep>/game/scripts/vscripts/abilities/druid/summoner/modifier_teve_druid_summoner_arctic_freeze.lua modifier_teve_druid_summoner_arctic_freeze = class ({}) function modifier_teve_druid_summoner_arctic_freeze:OnCreated() local caster = self:GetCaster() local hTarget = self:GetParent():GetAbsOrigin() local nFXIndex = ParticleManager:CreateParticle( "particles/units/heroes/hero_crystalmaiden/maiden_frostbite_buff.vpcf", PATTACH_POINT_FOLLOW, self:GetParent() ) ParticleManager:SetParticleControlEnt( nFXIndex, 0, self:GetParent(), PATTACH_POINT_FOLLOW, "attach_hitloc", hTarget, true) self:AddParticle( nFXIndex, false, false, -1, false, false) EmitSoundOn("hero_Crystal.frostbite", self:GetParent() ) end function modifier_teve_druid_summoner_arctic_freeze:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_MOVESPEED_BONUS_PERCENTAGE, MODIFIER_PROPERTY_ATTACKSPEED_BONUS_CONSTANT } return funcs end function modifier_teve_druid_summoner_arctic_freeze:GetModifierAttackSpeedBonus_Constant(params) return -10 end function modifier_teve_druid_summoner_arctic_freeze:GetModifierMoveSpeedBonus_Percentage(params) return -20 end function modifier_teve_druid_summoner_arctic_freeze:IsDebuff() return true end function modifier_teve_druid_summoner_arctic_freeze:OnDestroy() StopSoundOn("hero_Crystal.frostbite", self:GetParent() ) end <file_sep>/game/scripts/vscripts/abilities/druid/druid/DisablingRoots.lua teve_druid_druid_disabling_roots = class({}) LinkLuaModifier( "modifier_teve_druid_druid_disabling_roots", "abilities/druid/druid/modifier_teve_druid_druid_disabling_roots", LUA_MODIFIER_MOTION_NONE ) function teve_druid_druid_disabling_roots:OnSpellStart() print("Disabling Roots from Lua!") local hTarget = self:GetCursorTarget() local kv = { duration = math.ceil(self:GetLevel() / 2) -- +1 second every 2 levels } hTarget:AddNewModifier( self:GetCaster(), self, "modifier_teve_druid_druid_disabling_roots", kv ) end function teve_druid_druid_disabling_roots:CastFilterResultTarget( hTarget ) local nResult = UnitFilter( hTarget, DOTA_UNIT_TARGET_TEAM_ENEMY, DOTA_UNIT_TARGET_HERO + DOTA_UNIT_TARGET_BASIC, 0, self:GetTeamNumber() ) if nResult ~= UF_SUCCESS then return nResult end return UF_SUCCESS end function teve_druid_druid_disabling_roots:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_3 end function teve_druid_druid_disabling_roots:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 40 + (20 * level) end <file_sep>/game/scripts/vscripts/abilities/druid/shaman/Entangle.lua teve_druid_shaman_entangle = class({}) LinkLuaModifier( "modifier_teve_druid_shaman_entangle", "abilities/druid/shaman/modifier_teve_druid_shaman_entangle", LUA_MODIFIER_MOTION_NONE ) function teve_druid_shaman_entangle:OnSpellStart() print("Entangle from Lua!") local hTarget = self:GetCursorTarget() hTarget:AddNewModifier( self:GetCaster(), self, "modifier_teve_druid_shaman_entangle", { duration = 3 } ) EmitSoundOnLocationWithCaster( hTarget:GetOrigin(),"Hero_Treant.Overgrowth.Target", self:GetCaster() ) EmitSoundOnLocationWithCaster( hTarget:GetOrigin(),"Hero_Treant.Overgrowth.Cast", self:GetCaster() ) end function teve_druid_shaman_entangle:OnAbilityPhaseStart() EmitSoundOn("Hero_Treant.Overgrowth.CastAnim", self:GetCaster() ) return true end function teve_druid_shaman_entangle:CastFilterResultTarget( hTarget ) local nResult = UnitFilter( hTarget, DOTA_UNIT_TARGET_TEAM_ENEMY, DOTA_UNIT_TARGET_HERO + DOTA_UNIT_TARGET_BASIC, 0, self:GetTeamNumber() ) if nResult ~= UF_SUCCESS then return nResult end return UF_SUCCESS end function teve_druid_shaman_entangle:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_2 end function teve_druid_shaman_entangle:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 25 + (25 * level) end<file_sep>/game/scripts/vscripts/abilities/druid/summoner/modifier_teve_druid_summoner_regeneration.lua modifier_teve_druid_summoner_regeneration = class ({}) function modifier_teve_druid_summoner_regeneration:OnCreated( kv ) local caster = self:GetCaster() local hTarget = self:GetParent():GetAbsOrigin() local nFXIndex = ParticleManager:CreateParticle( "particles/hero/druid/summoner/summoner_regeneration.vpcf", PATTACH_POINT_FOLLOW, self:GetParent() ) ParticleManager:SetParticleControlEnt( nFXIndex, 0, self:GetParent(), PATTACH_POINT_FOLLOW, "attach_hitloc", hTarget, true) ParticleManager:SetParticleControlEnt( nFXIndex, 1, self:GetParent(), PATTACH_POINT_FOLLOW, "attach_hitloc", hTarget, true) self:AddParticle( nFXIndex, false, false, -1, false, false) EmitSoundOn("Hero_Chen.TeleportLoop", self:GetParent() ) end function modifier_teve_druid_summoner_regeneration:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_MANA_REGEN_CONSTANT, MODIFIER_PROPERTY_HEALTH_REGEN_CONSTANT } return funcs end function modifier_teve_druid_summoner_regeneration:GetModifierConstantManaRegen ( params ) --600 total mana / 5 seconds return (600/5) * self:GetAbility():GetLevel() end function modifier_teve_druid_summoner_regeneration:GetModifierConstantHealthRegen ( params ) --400 total health / 5 seconds return (400/5) * self:GetAbility():GetLevel() end function modifier_teve_druid_summoner_regeneration:OnDestroy() EmitSoundOn("Hero_Chen.TeleportOut", self:GetParent() ) StopSoundOn("Hero_Chen.TeleportLoop", self:GetParent() ) end <file_sep>/game/scripts/vscripts/abilities/druid/archdruid/modifier_teve_druid_archdruid_werewolf.lua modifier_teve_druid_archdruid_werewolf = class({}) function modifier_teve_druid_archdruid_werewolf:OnCreated(kv) if IsServer() then self:GetCaster():SetAttackCapability(DOTA_UNIT_CAP_MELEE_ATTACK) end end function modifier_teve_druid_archdruid_werewolf:OnDestroy(kv) if IsServer() then self:GetCaster():SetAttackCapability(DOTA_UNIT_CAP_RANGED_ATTACK) end end function modifier_teve_druid_archdruid_werewolf:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_MODEL_CHANGE, MODIFIER_PROPERTY_MOVESPEED_BASE_OVERRIDE, MODIFIER_PROPERTY_STATS_STRENGTH_BONUS, MODIFIER_PROPERTY_STATS_AGILITY_BONUS, MODIFIER_PROPERTY_STATS_INTELLECT_BONUS, MODIFIER_PROPERTY_BASEATTACK_BONUSDAMAGE, MODIFIER_PROPERTY_ATTACK_RANGE_BONUS, MODIFIER_PROPERTY_BASE_ATTACK_TIME_CONSTANT, MODIFIER_PROPERTY_PHYSICAL_ARMOR_BONUS } return funcs end function modifier_teve_druid_archdruid_werewolf:GetModifierModelChange() return "models/items/beastmaster/boar/fotw_wolf/fotw_wolf.vmdl" end function modifier_teve_druid_archdruid_werewolf:GetModifierMoveSpeedOverride(params) return 380 end function modifier_teve_druid_archdruid_werewolf:GetModifierBonusStats_Strength(params) return 20 end function modifier_teve_druid_archdruid_werewolf:GetModifierBonusStats_Agility(params) return 20 end function modifier_teve_druid_archdruid_werewolf:GetModifierBonusStats_Intellect(params) return 20 end function modifier_teve_druid_archdruid_werewolf:GetModifierBaseAttack_BonusDamage(params) --ArchDruid has 50 base damage, we want to end up with 500 "base" damage return 500-50 end function modifier_teve_druid_archdruid_werewolf:GetModifierAttackRangeBonus(params) --ArchDruid has 500 attack range, all our melee heroes have 100 range. return 100-500 end function modifier_teve_druid_archdruid_werewolf:GetModifierBaseAttackTimeConstant(params) return 1.5 end function modifier_teve_druid_archdruid_werewolf:GetModifierPhysicalArmorBonus(params) --ArchDruid already has 2 armor, we want 5 return 5-2 end<file_sep>/game/scripts/vscripts/abilities/druid/druid/ThornsAura.lua teve_druid_druid_thorns_aura = class({}) LinkLuaModifier( "modifier_teve_druid_druid_thorns_aura", "abilities/druid/druid/modifier_teve_druid_druid_thorns_aura", LUA_MODIFIER_MOTION_NONE ) LinkLuaModifier( "modifier_teve_druid_druid_thorns_aura_buff", "abilities/druid/druid/modifier_teve_druid_druid_thorns_aura_buff", LUA_MODIFIER_MOTION_NONE ) function teve_druid_druid_thorns_aura:GetIntrinsicModifierName() return "modifier_teve_druid_druid_thorns_aura" end <file_sep>/game/scripts/vscripts/abilities/druid/shaman/LightningFury.lua teve_druid_shaman_lightning_fury = class({}) LinkLuaModifier( "modifier_teve_druid_shaman_lightning_fury", "abilities/druid/shaman/modifier_teve_druid_shaman_lightning_fury", LUA_MODIFIER_MOTION_NONE ) function teve_druid_shaman_lightning_fury:OnSpellStart() print("Lightning Fury from Lua!") local hTarget = self:GetCursorTarget() local bounces = 4 local bounce_aoe = 500 local bounce_delay = 0.2 self.damage = 130 * self:GetLevel() self.caster = self:GetCaster() -- PARTICLE STUFF -- local attack_attack1 = self.caster:ScriptLookupAttachment("attach_attack1") -- local start_position = self.caster:GetAttachmentOrigin(attach_attack1) local start_position = self.caster:GetAbsOrigin() start_position.z = start_position.z + hTarget:GetBoundingMaxs().z -- END PARTICLE STUFF local next_target_position = self:LightningBounce(hTarget, start_position) local hit_targets = {} hit_targets[hTarget:GetEntityIndex()] = true Timers:CreateTimer(bounce_delay, function() local units = FindUnitsInRadius(self:GetCaster():GetTeamNumber(), next_target_position, hTarget, bounce_aoe, DOTA_UNIT_TARGET_TEAM_ENEMY, DOTA_UNIT_TARGET_HERO + DOTA_UNIT_TARGET_BASIC, 0, 0, true) if #units > 0 then local bounce_target for _,unit in pairs(units) do local entIdx = unit:GetEntityIndex() if not hit_targets[entIdx] then bounce_target = unit hit_targets[entIdx] = true break end end if bounce_target then next_target_position = self:LightningBounce(bounce_target, next_target_position) bounces = bounces - 1 if bounces > 0 then return bounce_delay end end end end) end function teve_druid_shaman_lightning_fury:CastFilterResultTarget( hTarget ) local nResult = UnitFilter( hTarget, DOTA_UNIT_TARGET_TEAM_ENEMY, DOTA_UNIT_TARGET_HERO + DOTA_UNIT_TARGET_BASIC, 0, self:GetTeamNumber() ) if nResult ~= UF_SUCCESS then return nResult end return UF_SUCCESS end function teve_druid_shaman_lightning_fury:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_2 end function teve_druid_shaman_lightning_fury:GetManaCost(level) if level == -1 then level = self:GetLevel() end return 50 + (30 * level) end function teve_druid_shaman_lightning_fury:LightningBounce(target, start_position) -- PARTICLE STUFF local attach_hitloc = target:ScriptLookupAttachment("attach_hitloc") local target_position = target:GetAttachmentOrigin(attach_hitloc) local particle = ParticleManager:CreateParticle("particles/items_fx/chain_lightning.vpcf", PATTACH_CUSTOMORIGIN, self.caster) ParticleManager:SetParticleControl(particle, 0, start_position) ParticleManager:SetParticleControl(particle, 1, target_position) -- END PARTICLE STUFF EmitSoundOn("Hero_Zuus.ArcLightning.Target", target ) damage_table = { victim = target, attacker = self:GetCaster(), damage = self.damage, ability = self, damage_type = DAMAGE_TYPE_MAGICAL } ApplyDamage(damage_table) return target_position end <file_sep>/game/scripts/vscripts/abilities/druid/runemaster/modifier_teve_druid_runemaster_werewolf.lua modifier_teve_druid_runemaster_werewolf = class({}) function modifier_teve_druid_runemaster_werewolf:OnCreated(kv) if IsServer() then self:GetCaster():SetAttackCapability(DOTA_UNIT_CAP_MELEE_ATTACK) end end function modifier_teve_druid_runemaster_werewolf:OnDestroy(kv) if IsServer() then self:GetCaster():SetAttackCapability(DOTA_UNIT_CAP_RANGED_ATTACK) end end function modifier_teve_druid_runemaster_werewolf:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_MODEL_CHANGE, MODIFIER_PROPERTY_MOVESPEED_BASE_OVERRIDE, MODIFIER_PROPERTY_STATS_STRENGTH_BONUS, MODIFIER_PROPERTY_STATS_AGILITY_BONUS, MODIFIER_PROPERTY_STATS_INTELLECT_BONUS, MODIFIER_PROPERTY_BASEATTACK_BONUSDAMAGE, MODIFIER_PROPERTY_ATTACK_RANGE_BONUS, MODIFIER_PROPERTY_BASE_ATTACK_TIME_CONSTANT, MODIFIER_PROPERTY_PHYSICAL_ARMOR_BONUS } return funcs end function modifier_teve_druid_runemaster_werewolf:GetModifierModelChange() return "models/heroes/lycan/lycan_wolf.vmdl" end function modifier_teve_druid_runemaster_werewolf:GetModifierMoveSpeedOverride(params) return 460 end function modifier_teve_druid_runemaster_werewolf:GetModifierBonusStats_Strength(params) --Runemaster has 80, we want 120. therefore add 120-80 return 120-80 end function modifier_teve_druid_runemaster_werewolf:GetModifierBonusStats_Agility(params) --Runemaster has 80, we want 120. therefore add 120-80 return 120-80 end function modifier_teve_druid_runemaster_werewolf:GetModifierBonusStats_Intellect(params) --Runemaster has 100, we want 150. therefore add 150-100 return 150-100 end function modifier_teve_druid_runemaster_werewolf:GetModifierBaseAttack_BonusDamage(params) --Runemaster has 635 base damage, we want to end up with 16000 "base" damage return 16000-635 end function modifier_teve_druid_runemaster_werewolf:GetModifierAttackRangeBonus(params) --Runemaster has 500 attack range, all our melee heroes have 100 range. return 100-500 end function modifier_teve_druid_runemaster_werewolf:GetModifierBaseAttackTimeConstant(params) return 0.8 end function modifier_teve_druid_runemaster_werewolf:GetModifierPhysicalArmorBonus(params) --Runemaster already has 5 armor, we want 1000 return 1000-5 end<file_sep>/game/scripts/vscripts/abilities/druid/archdruid/Werebear.lua teve_druid_archdruid_werebear = class({}) LinkLuaModifier( "modifier_teve_druid_archdruid_werebear", "abilities/druid/archdruid/modifier_teve_druid_archdruid_werebear", LUA_MODIFIER_MOTION_NONE ) function teve_druid_archdruid_werebear:OnSpellStart() print("Wearbear from Lua!") if not self:GetCaster():HasModifier("modifier_teve_druid_archdruid_werebear") then EmitSoundOn("Hero_LoneDruid.TrueForm.Cast", self:GetCaster()) end end function teve_druid_archdruid_werebear:OnChannelFinish(interrupted) if interrupted then --TODO: put more shit here return end if self:GetCaster():HasModifier("modifier_teve_druid_archdruid_werebear") then --Removing werebear, going back to human. self:GetCaster():RemoveModifierByName("modifier_teve_druid_archdruid_werebear") return elseif self:GetCaster():HasModifier("modifier_teve_druid_archdruid_werewolf") then --Removing werewolf, going to werebear self:GetCaster():RemoveModifierByName("modifier_teve_druid_archdruid_werewolf") end --We might have been human or werewolf, who cares we are werebear now. self:GetCaster():AddNewModifier(self:GetCaster(), self, "modifier_teve_druid_archdruid_werebear", {}) end function teve_druid_archdruid_werebear:GetChannelTime() return 6 - self:GetLevel() end function teve_druid_archdruid_werebear:GetCastAnimation() return ACT_DOTA_CAST_ABILITY_2 end<file_sep>/game/scripts/vscripts/abilities/modifier_teve_attributes_agi.lua modifier_teve_attributes_agi = class({}) function modifier_teve_attributes_agi:OnCreated( kv ) if IsServer() then self:GetParent():CalculateStatBonus() end end function modifier_teve_attributes_agi:OnRefresh( kv ) if IsServer() then self:GetParent():CalculateStatBonus() end end function modifier_teve_attributes_agi:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_PHYSICAL_ARMOR_BONUS, MODIFIER_PROPERTY_ATTACKSPEED_BONUS_CONSTANT } return funcs end function modifier_teve_attributes_agi:GetModifierAttackSpeedBonus_Constant(params) return 0 - self:GetStackCount() --Dota gives 1 attackspeed per agi, TEvE doesn't so we subtract 1 per agi. end function modifier_teve_attributes_agi:GetModifierPhysicalArmorBonus(params) return (self:GetStackCount() / 12) - (self:GetStackCount() / 7) --Dota gives 1 armor per 7, we give 1 armor per 12, so we subtract dotas, and add ours. end<file_sep>/game/scripts/vscripts/abilities/druid/druid/modifier_teve_druid_druid_fire_storm.lua modifier_teve_druid_druid_fire_storm = class({}) function modifier_teve_druid_druid_fire_storm:OnCreated( kv ) self.fire_storm_damage = 30 * self:GetAbility():GetLevel() self.tick_rate = 0.1 self.iter = 0 if IsServer() then self:StartIntervalThink( self.tick_rate ) end end function modifier_teve_druid_druid_fire_storm:OnIntervalThink() if IsServer() then if self.iter > 7 and self.iter % 4 == 0 then --trigger if 0.7 seconds have passed and it is an interval of 0.4 for the damage local enemies = FindUnitsInRadius( self:GetParent():GetTeamNumber(), self:GetParent():GetOrigin(), self:GetParent(), 350, DOTA_UNIT_TARGET_TEAM_ENEMY, DOTA_UNIT_TARGET_HERO + DOTA_UNIT_TARGET_BASIC, 0, 0, false ) if #enemies > 0 then for _,enemy in pairs(enemies) do if enemy ~= nil and ( not enemy:IsMagicImmune() ) and ( not enemy:IsInvulnerable() ) then local damage = { attacker = self:GetCaster(), victim = enemy, damage = self.fire_storm_damage, damage_type = DAMAGE_TYPE_MAGICAL, ability = self:GetAbility() } ApplyDamage(damage) end end end local aoe = self:GetAbility():GetAOERadius() local target = self:GetParent():GetAbsOrigin() local nFXIndex = ParticleManager:CreateParticle( "particles/hero/druid/druid/druid_fire_storm.vpcf", PATTACH_CUSTOMORIGIN, nil) ParticleManager:SetParticleControl( nFXIndex, 0, target ) ParticleManager:SetParticleControl( nFXIndex, 4, Vector(aoe, 1, 1) ) ParticleManager:ReleaseParticleIndex( nFXIndex ) EmitSoundOnLocationWithCaster(self:GetAbility():GetCursorPosition(), "Hero_AbyssalUnderlord.Firestorm", self:GetCaster()) end self.iter = self.iter + 1 end end <file_sep>/game/scripts/vscripts/abilities/druid/runemaster/modifier_teve_druid_runemaster_lycanthrophy.lua modifier_teve_druid_runemaster_lycanthrophy = class({}) function modifier_teve_druid_runemaster_lycanthrophy:DeclareFunctions() local funcs = { MODIFIER_PROPERTY_MOVESPEED_BONUS_PERCENTAGE, MODIFIER_PROPERTY_ATTACKSPEED_BONUS_CONSTANT } return funcs end function modifier_teve_druid_runemaster_lycanthrophy:GetModifierAttackSpeedBonus_Constant(params) return 4 * self:GetAbility():GetLevel() end function modifier_teve_druid_runemaster_lycanthrophy:GetModifierMoveSpeedBonus_Percentage(params) return math.floor((self:GetAbility():GetLevel() + 9) / 10) * 4 end
c3ab8968d9f6ecc72d4ad11e15e5ebde00128227
[ "Markdown", "Lua" ]
53
Lua
METABYTECODE/TwilightsEveORPG
8d410376aac542f43f4ccf3ecd0d5158b98d3891
f287f6b9376520bc6047c7bccce4592dea6e8310
refs/heads/master
<file_sep>#!/usr/bin/env sh set -e TOOLS=./build/tools $TOOLS/caffe train \ --solver=examples/cifar10/cifar10_full_solver_cpu.prototxt $@ # reduce learning rate by factor of 10 $TOOLS/caffe train \ --solver=examples/cifar10/cifar10_full_solver_lr1_cpu.prototxt \ --snapshot=examples/cifar10/cifar10_full_cpu_iter_60000.solverstate $@ # reduce learning rate by factor of 10 $TOOLS/caffe train \ --solver=examples/cifar10/cifar10_full_solver_lr2_cpu.prototxt \ --snapshot=examples/cifar10/cifar10_full_cpu_iter_65000.solverstate $@ <file_sep>#!/usr/bin/env sh set -e TOOLS=./build/tools $TOOLS/caffe train \ --solver=examples/cifar10/cifar10_full_solver_gpu_winograd.prototxt $@ # reduce learning rate by factor of 10 $TOOLS/caffe train \ --solver=examples/cifar10/cifar10_full_solver_lr1_gpu_winograd.prototxt \ --snapshot=examples/cifar10/cifar10_full_gpu_winograd_iter_60000.solverstate $@ # reduce learning rate by factor of 10 $TOOLS/caffe train \ --solver=examples/cifar10/cifar10_full_solver_lr2_gpu_winograd.prototxt \ --snapshot=examples/cifar10/cifar10_full_gpu_winograd_iter_65000.solverstate $@ <file_sep>#!/usr/bin/env sh set -e TOOLS=./build/tools $TOOLS/caffe train \ --solver=examples/cifar10/cifar10_full_solver_gpu.prototxt $@ # reduce learning rate by factor of 10 $TOOLS/caffe train \ --solver=examples/cifar10/cifar10_full_solver_lr1_gpu.prototxt \ --snapshot=examples/cifar10/cifar10_full_gpu_iter_60000.solverstate $@ # reduce learning rate by factor of 10 $TOOLS/caffe train \ --solver=examples/cifar10/cifar10_full_solver_lr2_gpu.prototxt \ --snapshot=examples/cifar10/cifar10_full_gpu_iter_65000.solverstate $@ <file_sep>#!/bin/sh HERE=$(cd $(dirname $0); pwd -P) ECHO=$(which echo) FILE=cp2k-perf.txt RUNS="23_23_23 4_6_9 13_5_7 24_3_36" if [ "" != "$1" ]; then FILE=$1 shift fi if [ "" != "$1" ]; then SIZE=$1 shift else SIZE=0 fi cat /dev/null > ${FILE} NRUN=1 NMAX=$(${ECHO} ${RUNS} | wc -w) for RUN in ${RUNS} ; do MVALUE=$(${ECHO} ${RUN} | cut -d_ -f1) NVALUE=$(${ECHO} ${RUN} | cut -d_ -f2) KVALUE=$(${ECHO} ${RUN} | cut -d_ -f3) >&2 ${ECHO} -n "${NRUN} of ${NMAX} (M=${MVALUE} N=${NVALUE} K=${KVALUE})... " ERROR=$({ CHECK=1 ${HERE}/cp2k.sh ${MVALUE} ${SIZE} 0 ${NVALUE} ${KVALUE} >> ${FILE}; } 2>&1) RESULT=$? if [ 0 != ${RESULT} ]; then ${ECHO} "FAILED(${RESULT}) ${ERROR}" exit 1 else ${ECHO} "OK ${ERROR}" fi ${ECHO} >> ${FILE} NRUN=$((NRUN+1)) done <file_sep>/****************************************************************************** ** Copyright (c) 2016-2017, Intel Corporation ** ** All rights reserved. ** ** ** ** Redistribution and use in source and binary forms, with or without ** ** modification, are permitted provided that the following conditions ** ** are met: ** ** 1. Redistributions of source code must retain the above copyright ** ** notice, this list of conditions and the following disclaimer. ** ** 2. Redistributions in binary form must reproduce the above copyright ** ** notice, this list of conditions and the following disclaimer in the ** ** documentation and/or other materials provided with the distribution. ** ** 3. Neither the name of the copyright holder nor the names of its ** ** contributors may be used to endorse or promote products derived ** ** from this software without specific prior written permission. ** ** ** ** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ** ** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ** ** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ** ** A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT ** ** HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, ** ** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED ** ** TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR ** ** PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF ** ** LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING ** ** NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS ** ** SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ** ******************************************************************************/ /* <NAME> (Intel Corp.) ******************************************************************************/ #ifndef LIBXSMM_SPMV_HPP #define LIBXSMM_SPMV_HPP #include <libxsmm_macros.h> #include <libxsmm_spmdm.h> typedef struct libxsmm_spmv_handle { /* The following are the matrix multiply dimensions: A (sparse): m X k, x (dense): k X 1, Output y (dense): m X 1 */ /* In general, the convention is calling the dimension of A as m x n but we use m x k convention instead to be consistent with spmdm */ int m; int k; /* The block sizes for A, B and C. */ /* Here we fix A to be divided into 128 X 128 blocks, B/C to be 128 X 48 for HSW/BDW and 128 X 96 for SKX */ int bm; int bk; /* The number of blocks for the m, n and k dimensions */ int mb; int kb; libxsmm_spmdm_datatype datatype; char * base_ptr_scratch_A; } libxsmm_spmv_handle; LIBXSMM_API void libxsmm_spmv_init( int M, int K, int max_threads, libxsmm_spmv_handle* handle, libxsmm_CSR_sparseslice** libxsmm_output_csr); LIBXSMM_API void libxsmm_spmv_destroy( libxsmm_spmv_handle * handle); /* Don't need libxsmm_spmv_get_num_*_blocks functions like spmdm because we assume a simple * 1-D blocking along rows of A. */ /*LIBXSMM_API int libxsmm_spmv_get_num_createSparseSlice_blocks( const libxsmm_spmv_handle* handle); LIBXSMM_API int libxsmm_spmv_get_num_compute_blocks( const libxsmm_spmv_handle* handle);*/ /** This converts a dense representation of the sparse matrix to 2D array of sparse slices. */ LIBXSMM_API void libxsmm_spmv_createSparseSlice_fp32_thread( const libxsmm_spmv_handle* handle, char transA, const float * A, libxsmm_CSR_sparseslice* libxsmm_output_csr_a, int block_id, int tid, int nthreads); /*LIBXSMM_API void libxsmm_spmv_createSparseSlice_bfloat16_thread( const libxsmm_spmv_handle* handle, char transA, const uint16_t * A, libxsmm_CSR_sparseslice* libxsmm_output_csr_a, int block_id, int tid, int nthreads);*/ /** NOTE: This code currently ignores alpha input to the matrix multiply */ LIBXSMM_API void libxsmm_spmv_compute_fp32_thread( const libxsmm_spmv_handle* handle, char transA, const float *alpha, libxsmm_CSR_sparseslice* A_sparse, const float *B, const float *beta, float* C, int block_id, int tid, int nthreads); /** NOTE: This code currently ignores alpha input to the matrix multiply */ /*LIBXSMM_API void libxsmm_spmv_compute_bfloat16_thread( const libxsmm_spmv_handle* handle, char transA, const uint16_t *alpha, libxsmm_CSR_sparseslice* A_sparse, const uint16_t *B, const uint16_t *beta, float* C, int block_id, int tid, int nthreads);*/ #endif /*LIBXSMM_SPMV_HPP*/ <file_sep>#ifndef MATHLAB_H #define MATHLAB_H #define USE_MKL 1 #define USE_OPENBLAS 0 #if USE_MKL #include <mkl.h> #elif USE_OPENBLAS #include <cblas.h> #endif #endif<file_sep>#!/usr/bin/env sh set -e TOOLS=./build/tools $TOOLS/caffe train \ --solver=examples/cifar10/cifar10_full_solver_cpu_winograd.prototxt $@ # reduce learning rate by factor of 10 $TOOLS/caffe train \ --solver=examples/cifar10/cifar10_full_solver_lr1_cpu_winograd.prototxt \ --snapshot=examples/cifar10/cifar10_full_cpu_winograd_iter_60000.solverstate $@ # reduce learning rate by factor of 10 $TOOLS/caffe train \ --solver=examples/cifar10/cifar10_full_solver_lr2_cpu_winograd.prototxt \ --snapshot=examples/cifar10/cifar10_full_cpu_winograd_iter_65000.solverstate $@
32ab95a7880167d7af478cf8c7b69332c1a012be
[ "C++", "Shell" ]
7
Shell
Swas99/caffe
07dc14eafa0041a29685bd8cd6d1d6ca7915f786
0e920ee3fb87116c0412360187aa4b5f393f2c1f
refs/heads/master
<repo_name>morenotos/Monopoly<file_sep>/main.py from random import randint from time import sleep #import sys #from cards import card_chooser players_number = [] players_names = [] players_money = [] players_position = [] property_names = ['Go', 'Mediterranean Av', 'Community Chest', 'Baltic Av', 'Income Tax', 'Reading Railroad', 'Oriental Av', 'Chance', 'Vermont Av', 'Conneticut Av', 'Jail', 'St Charles Place', 'Electric Company', 'States Av', 'Virginia Av', 'Pennsylvania Railroad', 'St James Place', 'Community Chest', 'Tennesse Av', 'New York Av', 'Free Parking', 'Kentucky Av', 'Chance', 'Indiana Av', 'Illinois Av', 'B&O Railroad', 'Atlantic Av', 'Ventnor Av', 'Water Works', 'Marvin Gardens', 'Go to Jail!', 'Pacific Av', 'North Carolina Av', 'Community Chest', 'Pennsylvania Av', 'Short Line', 'Chance', 'Park Place', 'Luxury Tax', 'Boardwalk' ] property_price = [0, 60, 0, 60, 0, 200, 100, 0, 100, 120, 0, 140, 150, 140, 160, 200, 180, 0, 180, 200, 0,220, 0, 220, 240, 200, 260, 260, 150, 280, 0, 300, 300, 0, 320, 200, 0, 350, 0, 400 ] property_position = [1 , 2 , 3, 4, 5, 6, 7, 8, 9, 10,11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40] property_owner = ['No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner', 'No owner'] property_rent = [0, 2, 0, 4, 200, 25, 6, 0, 6, 8, 0, 10, 28, 10, 12, 25, 14, 0, 14, 16, 0, 18, 0, 18, 20, 25, 22, 22, 28, 24, 0, 26, 26, 0, 28, 25, 0, 35, 75, 50] property_has_home = ['No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No'] property_has_hotel = ['No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No', 'No'] print('------------------------') print('----- MONOPOLY -------') print('------------------------') number_of_players = int(input('How many players will play? ')) #creates the list with players' number for player in range(1, (number_of_players + 1)): players_number.append(player) #creates the lists with players' name, initial money and initial position for player in players_number: player_name = input('Player choose your name: ') players_names.append(player_name) players_money.append(1500) players_position.append(0) print(players_names) print(players_money) print(players_position) game_on = True turn = 0 #variable that stores wich player's turn it is money_in_free_parking = 0 #checks if the player has money left after he has payed for something def player_has_money(): if players_money[turn] < 0: print('You are broke, the game has ended for you') game_on = False else: print('You have now $' + str(players_money[turn]) + ' available') #This function runs when a player lands on Go, income tax, Jail, Free Parking, Go to Jail or luxury tax def special_properties(): global money_in_free_parking if players_position[turn] == 0: print('Your position is now ' + str(property_names[players_position[turn]])) elif players_position[turn] == 4: print('You have to pay income tax of $200 or 10% of your total money (the highest)') income_tax_10 = 0.1 * players_money[turn] if income_tax_10 > 200: players_money[turn] -= income_tax_10 print('You paid $' + str(income_tax_10) + ' of income tax') money_in_free_parking += income_tax_10 print('This money you paid will go to Free Parking.') print('The total money in Free Parking is $' + str(money_in_free_parking)) player_has_money() else: players_money[turn] -= 200 print('You paid $200 of income tax') money_in_free_parking += 200 print('This money you paid will go to Free Parking.') print('The total money in Free Parking is $' + str(money_in_free_parking)) player_has_money() elif players_position[turn] == 10: print('You are in ' + str(property_names[players_position[turn]]) + ', but you are just visiting' ) elif players_position[turn] == 20: print('Your position is now ' + str(property_names[players_position[turn]])) print('Congratulations! You earn $' + str(money_in_free_parking) + ' that was saved here') players_money[turn] += money_in_free_parking money_in_free_parking = 0 elif players_position[turn] == 30: print(str(property_names[players_position[turn]]) + ' You will not collect $200 and will go back straight to jail.') players_position[turn] = 10 elif players_position[turn] == 38: print('Luxury tax: you have to pay $75') players_money[turn] -= 75 money_in_free_parking += 75 print('This money you paid will go to Free Parking.') print('The total money in Free Parking is $' + str(money_in_free_parking)) player_has_money() #choses a card when a player lands on community chest or chance def card_chooser(x): number = turn #community chest cards #card that advances player to GO and gives him $200 def one(): print('You advance directly to GO and collect $200') players_position[number] = 0 players_money[number] += 200 player_has_money() #gives player $100 def two(): print('You inherit $100') players_money[number] += 100 player_has_money() #player collects $50 from every player. def three(): print('Collect $50 from every player') payment_from_players = 50 * (len(players_number) - 1) players_money[number] += payment_from_players for name in players_names: if name != players_names[number]: index = players_names.index(name) players_money[index] -= 50 print('You received a total of $' + str(payment_from_players)) player_has_money() #player receives $25 def four(): print ('You receive $25 for services') players_money[number] += 25 player_has_money() #sents player to Jail def five(): print('You go directly to Jail without passing GO and without collecting $200') players_position[number] = 10 player_has_money() #player receives $100 def six(): print('Xmas funds matures, you receive $100') players_money[number] += 100 player_has_money() #NOT COMPLETE YET!!! player recieves get-out-of-jail-free card def seven(): print('You now have a card to get out of jail for free') print('You can use it wenever you want or sell it') player_has_money() #player receives $10 def eight(): print('You have won 2nd place in a beauty contest, collect $10') players_money[number] += 10 player_has_money() #player receives $45 def nine(): print('From sale of stock you get $45') players_money[number] += 45 player_has_money() #players pays $100 for the hospital def ten(): global money_in_free_parking print('Pay $100 for hospital expenses') players_money[number] -= 100 money_in_free_parking += 100 print('This money you paid will go to Free Parking.') print('The total money in Free Parking is $' + str(money_in_free_parking)) player_has_money() #player receives $20 def eleven(): print('Income tax refund, you receive $20') players_money[number] += 20 player_has_money() #player pays $50 def twelve(): global money_in_free_parking print('For Doctor\'s fee you pay $50') players_money[number] -= 50 money_in_free_parking += 50 print('This money you paid will go to Free Parking.') print('The total money in Free Parking is $' + str(money_in_free_parking)) player_has_money() #player receives $200 def thirteen(): print('Bank error in your favor, you receive $200') players_money[number] += 200 player_has_money() #player receives $100 def fourteen(): print('Life insurance matures, you receive $100') players_money[number] += 100 player_has_money() #player pays $150 def fifteen(): global money_in_free_parking print('School tax, you pay $150') players_money[number] -= 150 money_in_free_parking += 150 print('This money you paid will go to Free Parking.') print('The total money in Free Parking is $' + str(money_in_free_parking)) player_has_money() #NOT COMPLETE YET!!! player pays $40 for each house and $115 for each hotel def sixteen(): print('For street repairs you pay $40 for each house and $115 for each hotel') player_has_money() #chance cards #player pays $50 to each payer def seventeen(): print('You are elected Chairman of the Board, pay each player $50') payment_to_players = 50 * (len(players_number) - 1) players_money[number] -= payment_to_players for name in players_names: if name != players_names[number]: index = players_names.index(name) players_money[index] += 50 print('You payed a total of $' + str(payment_to_players)) player_has_money() #player goes back 3 spaces def eighteen(): print('Go back three spaces') players_position[number] -= 3 print('You are now in ' + str(players_position[number])) player_has_money() #player advances to St Charles Place and collects $200 if he passes GO def nineteen(): print('You advance to ' + property_names[11]) if players_position[number] > 11: print('As you passed Go you receive $200') players_money[number] += 200 players_position[number] = 11 player_has_money() else: players_position[number] = 11 player_has_money() #player pays $15 def twenty(): global money_in_free_parking print('You pay a poor tax of $15') players_money[number] -= 15 money_in_free_parking += 15 print('This money you paid will go to Free Parking.') print('The total money in Free Parking is $' + str(money_in_free_parking)) player_has_money() #Player advances to Boardwalk def twentyone(): print('Take a walk on the board walk, you advance to Boardwalk') players_position[number] = 39 player_has_money() #advances player to the nearest railroad. def twentytwo(): if players_position[number] == 7: print('You will advance to ' + property_names[15]) players_position[number] == 15 player_has_money() elif players_position[number] == 22: print('You will advance to ' + property_names[25]) players_position[number] == 25 player_has_money() elif players_position[number] == 36: print('You will advance to ' + property_names[5]) players_position[number] == 5 player_has_money() #card that advances player to GO and gives him $200 def twentythree(): print('You advance directly to GO and collect $200') players_position[number] = 0 players_money[number] += 200 player_has_money() #NOT COMPLETE YET!!! player pays $25 for each house and $110 for each hotel def twentyfour(): print('Make repairs to you properties, you pay $25 for each house and $110 for each hotel') player_has_money() #sents player to Jail def twentyfive(): print('You go directly to Jail without passing GO and without collecting $200') players_position[number] = 10 player_has_money() #Player advances to Illinois Av def twentysix(): print('You advance to ' + property_names[24]) players_position[number] = 24 player_has_money() #NOT COMPLETE YET!!! player recieves get-out-of-jail-free card def twentyseven(): print('You now have a card to get out of jail for free') print('You can use it wenever you want or sell it') player_has_money() #player receives $50 def twentyeight(): print('Your building and loan matures, you receive $50') players_money[number] += 50 player_has_money() #player receives $50 def twentynine(): print('The bank pays you a dividend of $50') players_money[number] += 50 player_has_money() #player goes to Reading railroad and if he passes GO collects $200 def thirty(): print('Take a ride on the ' + property_names[5] + '.') players_position[number] = 5 print('As you passed GO, you will receive $200') players_money[number] += 200 player_has_money() #player advances to the nearest utility def thirtyone(): print('You advance to the nearest utility'); if players_position[number] == 7: print('You will advance to ' + property_names[12]) players_position[number] == 12 player_has_money() elif players_position[number] == 22: print('You will advance to ' + property_names[28]) players_position[number] == 28 player_has_money() elif players_position[number] == 36: print('You will advance to ' + property_names[12]) players_position[number] == 12 player_has_money() #'switch' statement to run the selected card switcher = { 1: one, 2: two, 3: three, 4: four, 5: five, 6: six, 7: seven, 8: eight, 9: nine, 10: ten, 11: eleven, 12: twelve, 13: thirteen, 14: fourteen, 15: fifteen, 16: sixteen, 17: seventeen, 18: eighteen, 19: nineteen, 20: twenty, 21: twentyone, 22: twentytwo, 23: twentythree, 24: twentyfour, 25: twentyfive, 26: twentysix, 27: twentyseven, 28: twentyeight, 29: twentynine, 30: thirty, 31: thirtyone } # Get the function from switcher dictionary func = switcher.get(x) # Execute the function return func() #rolls the dice def roll_dice(): return randint(1,6) + randint(1,6) #game code while game_on: print('------------------------------------') print(players_names[turn] + ' you are in ' + str(property_names[players_position[turn]])) sleep(3) print(players_names[turn] + ' Roll the dice') print('Rolling the dice...') sleep(3) player1_dice = roll_dice() print('The dice turned ' + str(player1_dice)) if players_position[turn] + player1_dice > 39: players_money[turn] += 200 print('You completed another lap and got $200. You now have $' + str(players_money[turn])) players_position[turn] = players_position[turn] + player1_dice - 40 else: players_position[turn] = players_position[turn] + player1_dice print('Your position is now ' + str(property_names[players_position[turn]])) if players_position[turn] == 2 or players_position[turn] == 17 or players_position[turn] == 33: comm_card_number = randint(1,16) print(comm_card_number) card_chooser(comm_card_number) elif players_position[turn] == 7 or players_position[turn] == 22 or players_position[turn] == 36: chance_card_number = randint(17,31) print(chance_card_number) card_chooser(chance_card_number) elif players_position[turn] == 0 or players_position[turn] == 4 or players_position[turn] == 10 or players_position[turn] == 20 or players_position[turn] == 30 or players_position[turn] == 38: special_properties() else: if property_owner[players_position[turn]] == 'No owner': print(str(property_names[players_position[turn]]) + ' has no owner!') print('You can buy it for $' + str(property_price[players_position[turn]]) + ' and get a rent of $'+ str(property_rent[players_position[turn]])) print('You have $' + str(players_money[turn]) + ' available') print('Choose a) to buy it or b) Not buy it') player_choice = input('What do you want to do?') if player_choice == 'a': print('Processing your request...') sleep(5) if players_money[turn] >= property_price[players_position[turn]]: print('Congratulations! You are now the owner of ' + property_names[players_position[turn]]) property_owner[players_position[turn]] = players_names[turn] players_money[turn] -= property_price[players_position[turn]] print('You have now $' + str(players_money[turn]) + ' available') else: print('You do not have enough money to buy this property') else: print('Processing your request...') sleep(5) print('You did not buy ' + property_names[players_position[turn]]) elif property_owner[players_position[turn]] == players_names[turn]: print('You are the owner of ' + property_names[players_position[turn]] + '. Welcome! Enjoy your stay.') else: print('This property is owned by ' + property_owner[players_position[turn]] + ' and you will have to pay the owner $' + str(property_rent[players_position[turn]]) + ' for your stay') players_money[turn] -= property_rent[players_position[turn]] landlord = players_names.index(property_owner[players_position[turn]]) players_money[landlord] += property_rent[players_position[turn]] player_has_money() print('It is now the turn for the next player') sleep(3) if turn < len(players_number) - 1: turn += 1 else: turn = 0 <file_sep>/play_vs_comp.py # This file runs when there is only 1 player. # The player plays vs the computer
9a808cbbeb387a1950187ac3a146e4599eececab
[ "Python" ]
2
Python
morenotos/Monopoly
160093930f5fd4d73b1a9b0287434847ee461ada
7dfafc4369c93fe918d942a9781178e46d9e82fb
refs/heads/master
<file_sep>#!/usr/bin/env python2 # Script used to download all repos from a specific Github user or organisation # Based on a gist: https://gist.github.com/matthewgrossman/0ec47718511ea62853c5e32286f1c54ac import sys import os import requests def get_repos(group, name, access_token): repos = {} page = 1 while True: url = 'https://api.github.com/{0}/{1}/repos?per_page=100&access_token={2}&page={3}' r = requests.get(url.format(group, name, access_token, page)) if r.status_code == 200: rdata = r.json() for repo in rdata: repos[repo['name']] = repo['ssh_url'] if (len(rdata) >= 100): page = page + 1 else: print('Found {0} repos.'.format(len(repos))) break else: print(r) return False return repos def clone_repos(repos): print('Running...') counter = 1 total = str(len(repos)) for repo_name, repo_ssh_url in repos.iteritems(): print ('\nRepository {0} of {1}: {2}\n' ).format(str(counter), total, repo_name) repo_dir = './' + repo_name if os.path.isdir(repo_dir): print 'Already cloned, pulling new changes...' os.system('git --git-dir={0}/.git --work-tree={0} reset HEAD --hard'.format(repo_dir)) os.system('git --git-dir={0}/.git --work-tree={0} pull --rebase'.format(repo_dir)) else: os.system('git clone ' + repo_ssh_url) counter = counter + 1 if __name__ == '__main__': if len(sys.argv) > 2: repos = get_repos(sys.argv[1], sys.argv[2], sys.argv[3]) if repos: clone_repos(repos) else: print('Usage: python [users or orgs] [username or organisation name] [access token]') <file_sep>#!/usr/bin/env bash # # Colors. # ESC_SEQ="\x1b[" COL_RESET=$ESC_SEQ"39;49;00m" COL_RED=$ESC_SEQ"31;01m" COL_GREEN=$ESC_SEQ"32;01m" COL_YELLOW=$ESC_SEQ"33;01m" COL_BLUE=$ESC_SEQ"34;01m" COL_MAGENTA=$ESC_SEQ"35;01m" COL_CYAN=$ESC_SEQ"36;01m" # # Messages. # function ok() { echo -e "$COL_GREEN[ok]$COL_RESET "$1 } function bot() { echo -e "\n$COL_GREEN\[._.]/$COL_RESET - "$1 } function running() { echo -en "$COL_YELLOW ⇒ $COL_RESET"$1": " } function action() { echo -e "\n$COL_YELLOW[action]:$COL_RESET\n ⇒ $1..." } function warn() { echo -e "$COL_YELLOW[warning]$COL_RESET "$1 } function error() { echo -e "$COL_RED[error]$COL_RESET "$1 } # # Utilities. # audio-dl() { youtube-dl -f bestaudio -o '%(title)s' $1 --exec "ffmpeg -i {} -codec:a libmp3lame -qscale:a 0 {}.mp3 && rm {} " } <file_sep>#!/bin/bash # Utility to SSH into a running Docker container # Based on a SO answer: https://askubuntu.com/a/759459 # Docker id/name might be given as a parameter. DID=$1 if [[ "$DID" == "" ]]; then # If no id given simply just connect to the first running instance. DID=$(docker ps --filter 'status=running' --format "{{.Names}}" | head -1) fi docker exec -it $DID /bin/bash <file_sep># This is a custom .bash_profile file which will be executed on the top of the ~/.bash_profile file. # This file is executed every time when user sing in to OS. CURRENT_DIR=`dirname "$BASH_SOURCE"` export PATH="$PATH:$CURRENT_DIR/bin" export CLICOLOR=1 # See http://geoff.greer.fm/lscolors/ export LSCOLORS=ExFxBxDxCxegedabagacad # Symlink Cask apps to main apps directory. export HOMEBREW_CASK_OPTS="--appdir=/Applications" # Load the shell dotfiles, and then some: # * ~/.path can be used to extend `$PATH`. # * ~/.extra can be used for other settings you don’t want to commit. for file in "$CURRENT_DIR/."{path,bash_prompt,exports,aliases,functions,extra}; do [ -r "$file" ] && [ -f "$file" ] && source "$file"; done; unset file;<file_sep>#!/usr/bin/env bash # Close any open System Preferences panes, to prevent them from overriding # settings we’re about to change osascript -e 'tell application "System Preferences" to quit' # Ask for the administrator password upfront sudo -v # Keep-alive: update existing `sudo` time stamp until `.macos` has finished while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null & ############################################################################### # General UI/UX ############################################################################### # Turn off AutoCorrect defaults write NSGlobalDomain WebAutomaticSpellingCorrectionEnabled -bool false defaults write NSGlobalDomain NSAutomaticSpellingCorrectionEnabled -bool false # Increase contrast defaults write com.apple.universalaccess increaseContrast 1 # Disable the Boot Sound (or “Startup Chime”) on a Mac. sudo nvram SystemAudioVolume=%80 # Show full file path in Finder. defaults write com.apple.finder _FXShowPosixPathInTitle -bool YES; killall Finder # Don't automatically pair Mac with bluetooth audio devices (very painful with Google Home) defaults write /Library/Preferences/com.apple.Bluetooth.plist DontPageAudioDevices 1 ############################################################################### # Dock ############################################################################### # Make the Icon of Any Hidden App in the Dock Translucent defaults write com.apple.Dock showhidden -bool TRUE # Use Scroll Gestures (Expose) defaults write com.apple.dock scroll-to-open -bool TRUE # Lock the Dock size defaults write com.apple.dock size-immutable -bool true # Make hidden apps “hidden” in Dock. defaults write com.apple.Dock showhidden -bool TRUE # Add Custom Stacks for Recent, Documents & More defaults write com.apple.dock persistent-others -array-add '{"tile-data" = {"list-type" = 1;}; "tile-type" = "recents-tile";}' # Disable the drag window to top of screen Mission Control / Spaces feature in El Capitan # See: http://apple.stackexchange.com/questions/214301/can-we-disable-the-drag-window-to-top-of-screen-mission-control-spaces-feature defaults write com.apple.spaces spans-displays -bool TRUE # Restart dock to make changes visible killall Dock ############################################################################### # SSD-specific tweaks ############################################################################### # Disable hibernation (speeds up entering sleep mode) sudo pmset -a hibernatemode 0 <file_sep>#!/usr/bin/env bash # This file is based on Leevi's Graham script: https://gist.github.com/leevigraham/922bcd86ebd30dd861d0 # Include my library helpers for colorized echo and error messages, etc source ./.functions # Ask for the administrator password upfront. sudo -v # Homebrew setup. ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" brew doctor brew tap homebrew/dupes brew tap homebrew/versions brew tap homebrew/php brew tap caskroom/versions export HOMEBREW_CASK_OPTS="--appdir=/Applications" # # Brew packages. # action "Installing Homebrew packages." brew install wget brew install python brew install php56 brew install php56-xdebug brew install phpsh brew install phpunit brew install phpmd brew install behat brew install composer brew install node brew install git brew install git-extras brew install git-flow brew install tree brew install terminal-notifier brew install drush brew install ruby brew install docker-compose brew install rmtrash brew install mono brew install homebrew/php/php-code-sniffer brew install jq brew install youtube-dl brew install osquery # # Cask apps. # action "Installing Cask apps." brew cask install java brew cask install xquartz # X11 is required by many apps. brew cask install google-chrome brew cask install skype brew cask install jdownloader brew cask install spotify brew cask install steam brew cask install firefox brew cask install opera brew cask install torbrowser brew cask install sublime-text # Plugins and settings needs to be manually imported. brew cask install virtualbox brew cask install vagrant brew cask install sourcetree brew cask install google-drive brew cask install silverlight brew cask install flash-player brew cask install tuxera-ntfs brew cask install airdroid brew cask install android-file-transfer brew cask install android-studio # Settings needs to be manually imported. brew cask install bettertouchtool # Settings needs to be manually imported. brew cask install controllermate # Settings needs to be manually imported. brew cask install genymotion brew cask install istumbler brew cask install libreoffice brew cask install mplayerx brew cask install smcfancontrol brew cask install the-unarchiver brew cask install wireshark brew cask install wineskin-winery brew cask install avocode brew cask install filezilla brew cask install intellij-idea-ce brew cask install rambox brew cask install vlc brew cask install phpstorm brew cask install unified-remote brew cask install slate # To arrange windows between monitors. brew cask install numi brew cask install atom brew cask install harmony brew cask install xamarin brew cask install xamarin-studio # # NPM modules. # action "Installing Node.js modules." npm install -g bower npm install -g grunt npm install -g express npm install -g jade npm install -g stylus npm install -g nib npm install -g coffee-script # # Gems # action "Installing Ruby gems." gem install sass gem install compass # # Python packages # action "Installing Python packages." pip2 install awscli pip2 install requests # # Install SDKs with SDKMAN. # action "Installing SDKs." curl -s "https://get.sdkman.io" | bash source "$HOME/.sdkman/bin/sdkman-init.sh" sdk install groovy # # Run personalisation. # action "Personalising OSX." source .macos <file_sep>#!/usr/bin/env bash # Directories alias ..="cd .." alias ...='cd ../..' alias ....='cd ../../..' # Shortcuts alias g="git" # Vagrant alias vs="vagrant ssh" alias vu="vagrant up" alias vr="vagrant reload" alias vp="vagrant provision" alias vh="vagrant halt" alias vd="vagrant destroy" alias vup="vagrant up --provision" alias vrp="vagrant reload --provision" # Drupal alias dcc="drush cc all" alias drs="drush rs /" # Detect which `ls` flavor is in use if ls --color > /dev/null 2>&1; then # GNU `ls` colorflag="--color" else # OS X `ls` colorflag="-G" fi # Directory listing alias ll="ls -lF ${colorflag}" alias la="ls -laF ${colorflag}" alias lsd="ls -lF ${colorflag} | grep --color=never '^d'" # Utilities alias ip="dig +short myip.opendns.com @resolver1.opendns.com" alias timestamp="date +'%s'" alias awake="caffeinate" alias dotfiles="atom `dirname "$BASH_SOURCE"`" # Apps alias st="open -a SourceTree $1" alias webstorm="open -a WebStorm $1" alias phpstorm="open -a PhpStorm $1" # Safe delete alias trash="rmtrash" alias del="rmtrash"
7336a766d24d922f2b6684adc7c83bbf2dfe65d8
[ "Python", "Shell" ]
7
Python
rafalenden/dotfiles
3dc91f4fcad095b7855cd2341a00e0b5c0b92599
0ed1f80de2a82736ce7effa1983ec49b5f886ddb
refs/heads/master
<repo_name>sunshinelina1213/sunshinelina1213.github.io<file_sep>/yiyibushe/js/zhuce.js $(function(){ var telyanzheng=false; var pwdyanzheng=false; var pwdyanzheng2=false; var yzyanzheng=false; //手机验证方式 $(".checktel input").focusout(function(){ var telnum=/^1\d{10}$/; if(telnum.test($(this).val())){ $(".checktel span").text("✔").css({"color":"#f69","display":"block"}); findTel(); telyanzheng = true; }else{ $(".checktel span").text("请输入正确的11位手机号码").css({"color":"#f00","display":"block"}); } }) //密码验证,6~20位字符 $(".checkpsd1 .psd1").focusout(function(){ var reg= /^[a-z0-9_]{6,20}$/; if(reg.test($(this).val())){ $(".checkpsd1 span").text("✔").css({"color":"#f69","display":"block"}); pwdyanzheng = true; }else{ $(".checkpsd1 span").text("请输入6~20位的字母数字或者下划线组合").css({"color":"#f00","display":"block"}); } }) //第二次密码验证 $(".checkpsd2 .psd2").focusout(function(){ if($(this).val()==$(".checkpsd1 .psd1").val()){ $(".checkpsd2 .span").text("✔").css({"color":"#f69","display":"block"}); pwdyanzheng2 = true; }else{ $(".checkpsd2 span").text("两次密码不一致").css({"color":"#f00","display":"block"}); } }) //验证码 $(".checkcheck input").focus(function(){ if($(this).val()=="验证码"){ $(this).val(""); } }) $(".checkcheck input").focusout(function(){ if($(this).val()==""){ $(this).val("验证码"); } if($(this).val().toLowerCase()==$(".checkcheck .checkma2").text().toLowerCase()){ $(".checkcheck span").text("✔").css({"color":"#f69","display":"block"}); yzyanzheng =true; }else{ $(".checkcheck span").text("请输入正确的验证码").css({"color":"#f00","display":"block","line-height":"36px"}); changeCheck(); } }) $(".huan").click(function(event){ event.preventDefault(); changeCheck(); }) changeCheck(); function changeCheck(){ var arr=['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z','A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z','0','1','2','3','4','5','6','7','8','9']; var str=""; for(var i=0; i<4; i++){ var index=parseInt(Math.random()*62); str=str+arr[index]; } $(".checkcheck .checkma2").text(str); } $(".btn").click(function(){ if(telyanzheng==true && pwdyanzheng==true && pwdyanzheng2 == true && yzyanzheng==true){ window.open("zhucesuccess.html"); } }) }) //本地存储信息 function savelocalData(){ //alert("11"); var data = new Object; data.tel = document.getElementById("tel").value; data.pwd = document.getElementById("pwd").value; var str = JSON.stringify(data); var time = new Date().getTime(); var key = time.toString(); localStorage.setItem(time,str); } function findTel(){ //alert(1); var oTel = document.getElementById("tel").value; //遍历所有在localStorage中的对象 for (var i = 0; i < localStorage.length; i++) { var key = localStorage.key(i); var data = JSON.parse(localStorage.getItem(key)); //遍历对象中的属性 for (var pTel in data) { //console.log(data); var tel = data.tel; if(oTel == tel){ alert("该用户已经注册过了"); document.getElementById("tel").value=""; return ; } } } } <file_sep>/yiyibushe/js/qqdenglu.js $(function(){ //tab切换 $(".tab_deng .dengli1").click(function(){ $(".tab_deng .dengli1").css({"borderBottom":"3px solid #f69","color":"#333333"}).siblings().css({"color":"#999999","borderBottom":"none"}); $(".qq_wrap .kuaisu").css({"display":"block"}).siblings().css({"display":"none"}); }) $(".tab_deng .dengli2").click(function(){ $(".tab_deng .dengli2").css({"borderBottom":"3px solid #f69","color":"#333333"}).siblings().css({"color":"#999999","borderBottom":"none"}); $(".qq_wrap .zhanghao").css("display","block").siblings().css("display","none") }) }) <file_sep>/yiyibushe/js/denglu.js // JavaScript Document $(function(){ // $(".denguser input").focus(function(){ // if($(this).val()=="手机号"){ // $(this).val(""); // } // }) $(".denguser input").focusout(function(){ // if($(this).val()==""){ // $(this).val("手机号"); // } // var telnum=/^1\d{10}$/; if(telnum.test($(this).val())){ findTel(); // $(".denguser span").text("✔").css({"color":"#f69","display":"block"}); // // }else{ // $(".denguser span").text("请输入正确的11位手机号码").css({"color":"#f00","display":"block","line-height":"20px"}) } }) // $(".dengpsd input").focusout(function(){ checklogpass(); }) }) function findTel(){ var oName = document.getElementById("tel").value; // var data = new Object; // data.tel = document.getElementById("tel").value; // data.pwd = document.getElementById("pwd").value; for (var i = 0; i < localStorage.length; i++) { var key = localStorage.key(i); var data = JSON.parse(localStorage.getItem(key)); var name = data.tel; var isreg=false; if(oName != name){ continue; } else{ $(".noreg").hide(); return data.pwd; isreg=true; } } if(!isreg){ $(".noreg").show(); } } function checklogpass(){ //alert("11"); var mima = findTel(mima); var oMima = $("#pwd").val(); if(oMima!= mima){ $(".wrong").show(); }else{ $(".wrong").hide(); location.href="index.html"; } } <file_sep>/yiyibushe/js/mydingdan.js $(function(){ //用户信息 $(".userlie").mouseover(function(){ $(".userinfo").css("display","block"); }) $(".userlie").mouseout(function(){ $(".userinfo").css("display","none") }) //关闭提醒 $(".guanbi").click(function(){ $(".tixing").css("display","none"); }) //tab切换 $(".ddright_top .listall").click(function(){ $(".ddcontain .ddall").css("display","block").siblings().css("display","none"); $(".listall").css("border-bottom","2px solid #f69").siblings().css("border","none"); }) $(".ddright_top .listzhifu").click(function(){ $(".ddcontain .daizhifu").css("display","block").siblings().css("display","none"); $(".listzhifu").css("border-bottom","2px solid #f69").siblings().css("border","none"); }) $(".ddright_top .listfahuo").click(function(){ $(".ddcontain .daifahuo").css("display","block").siblings().css("display","none"); $(".listfahuo").css("border-bottom","2px solid #f69").siblings().css("border","none"); }) $(".ddright_top .listshouhuo").click(function(){ $(".ddcontain .daishouhuo").css("display","block").siblings().css("display","none"); $(".listshouhuo").css("border-bottom","2px solid #f69").siblings().css("border","none"); }) $(".ddright_top .listpingjia").click(function(){ $(".ddcontain .daipingjia").css("display","block").siblings().css("display","none"); $(".listpingjia").css("border-bottom","2px solid #f69").siblings().css("border","none"); }) //取消订单 $(".quxiaodingdan").click(function(){ var conf=confirm("亲,您确定要取消订单吗?"); if(conf){ $(this).parent().parent().remove(); $("thead").remove(); }else{ alert("亲,订单陪你度过漫长岁月"); } }) }) <file_sep>/yiyibushe/js/index.js // JavaScript Document $(function(){ //清空文本框 $("#txt").focus(function(){ $("#txt").val(""); }) }) $(function(){ //回到顶部 $("#totop").click(function(){ $("body,html").animate({ scrollTop:0 },500); return false; }) }) //吸顶效果 $(window).scroll(function(){ var _top=$(window).scrollTop(); if(_top>200){ $("#nav").addClass("guding"); }else{ $("#nav").removeClass("guding"); } }) //hi范儿 window.onload = function(){ var oumei = document.getElementById("oumei"); var yuanchuang = document.getElementById("yuanchuang"); var rihan = document.getElementById("rihan"); var tianmei = document.getElementById("tianmei"); oumei.onmouseover = function(){ document.getElementById("oumei_div").style.display = "block"; document.getElementById("yuanchuang_div").style.display = "none"; document.getElementById("rihan_div").style.display = "none"; document.getElementById("tianmei_div").style.display = "none"; } yuanchuang.onmouseover = function(){ document.getElementById("oumei_div").style.display = "none"; document.getElementById("yuanchuang_div").style.display = "block"; document.getElementById("rihan_div").style.display = "none"; document.getElementById("tianmei_div").style.display = "none"; } rihan.onmouseover = function(){ document.getElementById("oumei_div").style.display = "none"; document.getElementById("yuanchuang_div").style.display = "none"; document.getElementById("rihan_div").style.display = "block"; document.getElementById("tianmei_div").style.display = "none"; } tianmei.onmouseover = function(){ document.getElementById("oumei_div").style.display = "none"; document.getElementById("yuanchuang_div").style.display = "none"; document.getElementById("rihan_div").style.display = "none"; document.getElementById("tianmei_div").style.display = "block"; } } //banner轮播图 $(function(){ var num=$("bannerNum li").length; var page=0; var time1; var flag=true; time1=setInterval(Shuffling,2000); function Shuffling(){ page++; if(page>4){ page=0 } Lunbo(); flag=false; } function reduce(){ page--; if(page<0){ page=4; } Lunbo(); flag=false; } function Lunbo(){//轮播 $(".bannerLunbo div").eq(page).stop().siblings().fadeOut(500,function(){ $(".bannerLunbo div").eq(page).stop().fadeIn(0,function(){ $(".bannerNum li").eq(page).addClass("firstNum").siblings().removeClass("firstNum"); flag=true;// }); }); } $(".bannerNum li").mouseover(function(){ var page = $(this).index(); $(".bannerNum li").eq(page).addClass("firstNum").siblings().removeClass("firstNum"); $(".bannerLunbo div").eq(page).stop().fadeIn(500).siblings().fadeOut(500); }) $(".mingxing img").mouseover(function(){ $(".mxhidden").css("display","block"); }) $(".mingxing img").mouseout(function(){ $(".mxhidden").css("display","none"); }) }) <file_sep>/yiyibushe/js/querendingdan.js $(function(){ //模态窗口的显示与关闭 $(".tianaddress").click(function(){ $(".addbox").css("display","block"); }) $(".addclose").click(function(){ $(".addbox").css("display","none"); }) //删除地址 $(".address").on("click",".shanchu", function(){ var conf=confirm("亲,您确定要删除吗?"); if(conf==true){ $(this).parent().parent().remove(); alert("已经成功删除"); }else{ alert("已经取消"); } }) //模态取消 $(".address_btn2").click(function(){ $(".addbox").css("display","none"); }) //创建元素 function baocundizhi(event){ $(".addbox").css("display","none"); var oTr = $("<tr></tr>").appendTo(".address"); $("<td></td>").appendTo(oTr).text($(".address_name input").val()); $("<td></td>").appendTo(oTr).text($(".address_address select").eq(0).val()+$(".address_address select").eq(1).val()+$(".address_address select").eq(2).val()+$(".address_xiangxi textarea").val()); $("<td></td>").appendTo(oTr).text($(".address_phone input").val()); $("<td></td>").appendTo(oTr).html('<a class="shanchu" href="#">删除</a>'); //清空内容 $('.address_name input').val(""); $('.address_phone input').val(""); $('.address_address input').val(""); $('.address_xiangxi textarea').val(""); //清空span $(".address_name td span").text(""); $(".address_phone span").text(""); $(".address_address span").text(""); $(".address_xiangxi span").text(""); } //名字验证 $(".address_name input").focusout(function(){ if($(this).val()==""){ $(".address_name td span").text("收货人姓名不能为空").css("color","red"); }else{ $(".address_name td span").text("√").css("color","red"); } }) //手机号验证 $(".address_phone input").focusout(function(){ var phonenum=/[0-9]{11}/; if(phonenum.test($(this).val())){ $(".address_phone td span").text("√").css({"color":"red"}) }else{ $(".address_phone td span").text("请输入正确的11位手机号码").css({"color":"red"}) } }) $(".address_btn").click(function(event){ event.preventDefault(); if($(".address_name input").val()==""){ $(".address_name td span").text("姓名不能为空").css({"color":"#f00"}); }else if($(".address_phone input").val()==""){ $(".address_phone td span").text("请输入正确的11位手机号码").css({"color":"#f00"}); }else{ baocundizhi(); } }) //提交订单 $(".tjdd").click(function(){ window.open("dingdansuccess.html"); }) }) <file_sep>/yiyibushe/js/guanlilogin.js //验证管理员的账号密码 $(function(){ $("#guanlibtn").click(function(){ if($("#gltxt").val()=="lina"&&$("#glpsd").val()==123456) { window.open("guanli.html"); }else{ alert("账号或密码有误请重新输入"); } }) })
c783cc79cad21adb3dc6bd437481d11612fec1fc
[ "JavaScript" ]
7
JavaScript
sunshinelina1213/sunshinelina1213.github.io
337a9dd39eda4c69e6138444db16b272f1e22a4d
bf1a384dc6c8d12d2e02537cb36517c19df7c0f8
refs/heads/master
<file_sep>""" Author: <NAME>, <EMAIL> Last Modified: 04/14/2014 This is a recreation of the Battleship board game where players place ships on a 10x10 grid, then proceed to try and sink the enemie's 5 ships battleshipAI.py will contain functions for the computer populating a grid with pieces and determining which location to try and hit. """ import sys import random import battleshipAI from collections import OrderedDict ### Player Class ### class Player(): """ This class will contain the player and game grids used to play the game. Contains functions used to create, modify, update, and print the grids during play. """ def __init__(self, pieceLocations): """ Takes the piece locations array as a parameter. Initializes the player instance by: creating locations and shipLocations dictionary, init's player piece grid and game grid, and updates dictionary with locations of each ship. """ self.locations = pieceLocations # Need to keep track of ships and their locations self.shipLocations = OrderedDict([ ("Aircraft Carrier (5)", []), ("Battleship (4)", []), ("Submarine (3)", []), ("Cruiser (3)", []), ("Patrol Boat (2)", []) ]) # Need variable to keep track of how many times a player is hit self.hits = 0 # Need variable to keep track of hit data for the last hit # Format: [boatName, ([hitLocations])] # e.g.: ["Aircraft Carrier (5)", (["E2", "E3"])] self.hitData = OrderedDict([ ("Aircraft Carrier (5)", []), ("Battleship (4)", []), ("Submarine (3)", []), ("Cruiser (3)", []), ("Patrol Boat (2)", []) ]) # Variable to hold string of last player's ship that was hit self.lastShipHit = "" # Need variable to keep track of how many hits a boat has self.shipHits = OrderedDict([ ("Aircraft Carrier (5)", 5), ("Battleship (4)", 4), ("Submarine (3)", 3), ("Cruiser (3)", 3), ("<NAME> (2)", 2) ]) # Initialize Player grid and game grid to all zeros self.myGrid = [ ["O" for i in range(10)] for j in range(10)] self.gameGrid = [ ["O" for i in range(10)] for j in range(10)] # Initialize shipLocations self.setShipLocations(self.locations) # Now set up the grid with the gridSet method for num in range(0, len(self.locations), 2): self.gridSet(self.locations[num], self.locations[num + 1], self.myGrid) # Sets up initial grid with piece locations def gridSet(self, start, end, grid): """ gridSet function takes the start, end coordinates, and the grid. It determines if the piece is to be placed horizontally or vertically, then updates the player grid""" # Horizontal piece layout if start[:1] is end[:1]: for num in range(int(start[1:]) - 1, int(end[1:])): grid[ord(start[:1]) % 65][num] = "S" # Vertical piece layout elif start[1:] is end[1:]: for num in range(ord(start[:1]) % 65, ord(end[:1]) % 65 + 1): grid[num][int(start[1:]) - 1] = "S" else: print "Looks like you have invalid coordinates, unable to set the grid" # Update player's game grid with Hit (H) or Miss (M) def gameGridUpdate(self, loc, hitMiss): """ gameGridUpdate function takes a coordinate value, and a string (hitMiss), then updates the gameGrid appropriately to be displayed by the player""" self.gameGrid[ord(loc[:1]) % 65][int(loc[1:]) - 1] = hitMiss # Player tries a location (e.g. "F5") on the grid of another player def move(self, opponent, loc): # If other player's grid @ location is occupied if opponent.myGrid[ord(loc[:1]) % 65][int(loc[1:]) - 1] is "S": print "HIT - %s - %s!!!!" % (loc, opponent.getShipName(loc)) self.gameGridUpdate(loc, "H") self.lastShipHit = opponent.getShipName(loc) opponent.hits += 1 # If other player's grid @ location is NOT occupied elif opponent.myGrid[ord(loc[:1]) % 65][int(loc[1:]) - 1] is "O": print "MISS on %s!!!!" % (loc) self.gameGridUpdate(loc, "M") # Update the shipLocations dictionary to store the grid locations of the ship def setShipLocations(self, coordinates): """ setShipLocations function takes in an array of start and end piece coordinates, creates an array of all coordinates of each ship, then assigns those coordinates to the shipLocations dictionary. This assumes that the param coordinates are in the format: [acStart, acEnd, batStart, batEnd, subStart, subEnd, cruiseStart, cruiseEnd, patrolStart, patrolEnd] """ start = "" end = "" # Hold keys for dictionary to use for assignment later, count variable will assist keys = self.shipLocations.keys() count = 0 for num in range(0, len(coordinates), 2): start = coordinates[num] end = coordinates[num + 1] length = 0 chrStart = 0 positions = [] # Same row if start[:1] is end[:1]: length = int(end[1:]) - int(start[1:]) + 1 for number in range(int(start[1:]), int(end[1:]) + 1): positions += [str(start[:1] + str(number))] # Same Column elif start[1:] is end[1:]: length = (ord(end[:1]) % 65) - (ord(start[:1]) % 65) + 1 chrStart = ord(start[:1]) for number in range(chrStart, chrStart + length): positions += [str(chr(number) + start[1:])] # Assign positions to shipPositions array self.shipLocations[keys[count]] = positions count += 1 # Get ship name from the shipLocations dictionary def getShipName(self, loc): """ Returns the name of the ship that was hit""" for boat in self.shipLocations: if loc in self.shipLocations[boat]: # Subtract 1 hit from the boat self.shipHits[boat] -= 1 # If the boat is down, return the boat name and that it's down # Otherwise return the boat value if self.shipDown(boat) is True: #Change game grid to reflect sunken ship self.sunkenShipUpdate(boat) return "%s is down!" % (boat) else: return boat def shipDown(self, boat): """ Returns True if the boat's hit value is at zero, meaning it is sunk""" if self.shipHits[boat] is 0: return True else: return False # Need function to return True if the player has a boat which was hit on the board def hasHits(self): """ Returns True if player has a ship on the board which has been hit and is not sunk.""" count = 0 for item in self.hitData: if len(self.hitData[item]) != 0 and len(self.hitData[item]) != int(self.hitData[item][-2]): count += 1 if count != 0: return True else: return False # Prints the Player grid with ships placed def printPlayerGrid(self): """ Prints the player grid with appropriate labels""" chrStart = 65 labelNums = " 1 2 3 4 5 6 7 8 9 10 " print labelNums for line in self.myGrid: print chr(chrStart), line chrStart += 1 # Prints Player's game grid with Hits and Misses def printGameGrid(self): """ Prints the player's game grid with appropriate labels""" chrStart = 65 labelNums = " 1 2 3 4 5 6 7 8 9 10 " print labelNums for line in self.gameGrid: print chr(chrStart), line chrStart += 1 # Updates the game grid to relfect a sunken ship with 'X' def sunkenShipUpdate(self, boat): coords = self.hitData[boat] for coord in coords: self.gameGrid[ord(coord[:1]) % 65][int(coord[1:])] = 'X' def printShipHits(self): """ Print the shipHits dictionary, used for testing""" return self.shipHits def printShipLocations(self): """ Print the shipLocations dictionary, used for testing""" return self.shipLocations # Piece locations # An array storing the string locations of all pieces # [acStart, acEnd, batStart, batEnd, subStart, subEnd, cruiseStart, cruiseEnd, patrolStart, patrolEnd] p1Pieces = battleshipAI.createShipArray() p2Pieces = battleshipAI.createShipArray() ## Game State ## # Create player instances p1 = Player(p1Pieces) p2 = Player(p2Pieces) # Variables to hold the last hit location # [boatName, hitLocation, timesHit] p1LastHit = [] p2LastHit = [] # Arrays to hold all coordinates that a player has tried p1allTries = [] p2allTries = [] # Variable to determine who's turn it is p1turn = True while p1.hits < 18 and p2.hits < 18: print p1.hits, p2.hits # If Player 1 has max hits, player 2 wins if p1.hits is 17: print "Player 2 Wins!!" break # If Player 2 has max hits, player 1 wins elif p2.hits is 17: print "Player 1 Wins!!" break # Continue playing the game else: if p1turn: # If it's player 1's turn p1.printGameGrid() hitLoc = raw_input("Player 1 - Enter hit location: ") while hitLoc in p1allTries: print "Already tried that coord, choose another" hitLoc = raw_input("Player 1 - Enter hit location: ") p1allTries.append(hitLoc) # Add to tries array p1.move(p2, hitLoc) p1turn = False else: # Player 2 turn (comp) print "Computer's Turn" if p1.hits < 1 and p1.hasHits() == False: # Need a random coord hitLoc = battleshipAI.randCoord() while hitLoc in p1allTries: # No duplicates hitLoc = battleshipAI.randCoord() p2allTries.append(hitLoc) #Add to p2 Tries array else: potentials = battleshipAI.potentialHits(p1.gameGrid, p1.shipHits, p1.hitData) hitLoc = potentials[random.randrange(0, len(potentials))] p2allTries.append(hitLoc) print "Computer tries %s" % (hitLoc) p2.move(p1, hitLoc) p1turn = True <file_sep>pyBattleship ============ Python Battleship game Need to Update the main file so that you can play the computer. AI is all set (battleshipAI.py). Main functions for the AI are: createShipArray() -> Creates a random array of coordinates for ship placement potentialHits(params) -> Given a gameGrid, shipHits, and hitData: Returns a list of potential coords where a ship could be located. Initially it just returns all coordinates that are hitsLeft distance away from the current hit coordinates. Calls on potentialTrim to do a smart Trim of the potential coords. potentialTrim(params) -> Given a gameGrid, potential coords, and hitsLeft: Trim down potential coords to exclude those who have been hit/missed already, and those that are blocked by a hit/miss. <file_sep>import random # Array to hold all coordinates already used allCoords = [] # Need function to create an array of ship locations # Returns ship array in format: # [acStart, acEnd, batStart, batEnd, subStart, subEnd, cruiseStart, cruiseEnd, patrolStart, patrolEnd] def createShipArray(): """ Returns a ship array of randoms piece coordinates to place ships on the grid.""" # Array to hold string coordinates of ships # Returned at the end shipArray = [] # Array of integers holding the max length of the 5 different ships shipLengths = [5, 4, 3, 3, 2] # Variable to hold a tempCoordinate tempCoord = [] for num in range(0, len(shipLengths)): #Reset All Coords in case it's run multiple times if num == 0: allCoords = [] # Determine which direction to place a piece - # 0 = Horizontal, 1 = Vertical direction = random.randrange(0,2) tempCoord = directionCoords(direction, shipLengths[num]) # If these coordinates are in use, try again while coordsTaken(getAllCoords(tempCoord[0], tempCoord[1])) is True: tempCoord = directionCoords(direction, shipLengths[num]) # Append to shipArray shipArray.append(tempCoord[0]) shipArray.append(tempCoord[1]) # Update the Coordinates in the main allCoords list updateCoords(tempCoord[0], tempCoord[1]) # Now return the final array of 10 string values return shipArray # Function to return potential hit locations which haven't already been touched def potentialHits(grid, shipHits, hitData): """ Returns all potential hit coordinates based on the hitData and shipHits of that ship. First it makes sure the boat is not sunk and has at least 1 hit, then based on dirction of the boat (or if it's just a single coord), get all possible coordinates where the ship can be.""" # Array to return later pHits = [] # Array to hold all hit locations of ships alive allHits = [] # Variables to hold number counts highNum = 0 lowNum = 0 startNum = 0 # Variables to hold letter decimal values startLetter = 65 endLetter = 74 letter = 65 # Populate array for item in hitData: if hitData[item] != [] and shipHits[item] != 0: allHits.extend(hitData[item]) for boat in shipHits: # Make sure the ship is not sunk already and hasn't been hit if shipHits[boat] != 0 and shipHits[boat] != int(boat[-2]): hitsLeft = shipHits[boat] coordsHit = hitData[boat] coordsHit.sort() if len(coordsHit) >= 1: if direction(coordsHit) is 0: # Horizontal # Get high and low numbers values highNum = int(coordsHit[len(coordsHit) - 1][1:]) lowNum = int(coordsHit[0][1:]) # Get potential hits for a in range(1, lowNum): if lowNum - hitsLeft <= a < lowNum: pHits.append(coordsHit[0][:1] + str(a)) for b in range(lowNum, highNum + 1): if lowNum < b < highNum: pHits.append(coordsHit[0][:1] + str(b)) for c in range(highNum, 11): if highNum < c <= highNum + hitsLeft: pHits.append(coordsHit[0][:1] + str(c)) elif direction(coordsHit) is 1: # Vertical # Get Start and end letter values startLetter = ord(coordsHit[0][:1]) endLetter = ord(coordsHit[len(coordsHit) - 1][:1]) # Get potential hits for a in range(65, startLetter): if startLetter - hitsLeft <= a < startLetter: pHits.append(chr(a) + coordsHit[0][1:]) for b in range(startLetter, endLetter + 1): if startLetter < b < endLetter: pHits.append(chr(b) + coordsHit[0][1:]) for c in range(endLetter, 75): if endLetter < c <= endLetter + hitsLeft: pHits.append(chr(c) + coordsHit[0][1:]) else: # Case - Single hit coord print coordsHit startNum = int(coordsHit[0][1:]) letter = ord(coordsHit[0][:1]) # Left coords for a in range(1, startNum): if startNum - hitsLeft <= a < startNum: pHits.append(coordsHit[0][:1] + str(a)) # Right coords for b in range(startNum, 11): if startNum < b <= startNum + hitsLeft: pHits.append(coordsHit[0][:1] + str(b)) # Coords above for c in range(65, letter): if letter - hitsLeft <= c < letter: pHits.append(chr(c) + coordsHit[0][1:]) # Coords below for d in range(letter, 75): if letter < d <= letter + hitsLeft: pHits.append(chr(d) + coordsHit[0][1:]) else: # If boat has no hits, do nothing pass # Trim results in case there is a miss or sunken shit at location return potentialTrim(grid, pHits, hitsLeft) # Function to only return potential coords that haven't been touched # Also trims coords that are blocked by misses and other boats def potentialTrim(grid, potentials, hits): """Returns only the coordinates that most likely have a ship on them. It trims coordinates based on where the ship has already been hit and how many hits it has left.""" # Variable to return later trimmed = [] # Variables to hold distance counts leftHits = hits rightHits = hits # Variables to hold location of first miss or ship horizontal leftLoc = 0 rightLoc = 0 # Variables to hold location of first miss/ship Vertical topLoc = 65 botLoc = 74 # Variables to hold array of game grid values gridPotentials = [] gridPotentials = gridCoordsTranslate(potentials, grid) # Now trim based on direction if direction(potentials) is 2: # single hit coord only # Variables to break up the grid and hits allGrids = [[], []] allHits = [[], []] # Set up separated grids allHits[0].extend(potentials[:len(potentials)/2]) allHits[1].extend(potentials[len(potentials)/2:]) allGrids[0].extend(grid[:len(potentials)/2]) allGrids[1].extend(grid[len(potentials)/2:]) # Now loop through both separated grids and trim for item, grids in zip(allHits, allGrids): # trim left for b in range(hits - 1, -1, -1): if grids[b] != 'O': leftHits -= 1 if leftHits == hits - 1: # First hit location leftLoc = b if leftHits < hits: trimmed.extend(item[leftLoc + 1:hits]) else: # No coords blocking trimmed.extend(item[:hits]) # trim right for c in range(hits, len(item)): if grids[c] != 'O': rightHits -= 1 if rightHits == hits - 1: # First hit location rightLoc = c if rightHits < hits: # trimmed.extend(item[hits:rightLoc]) else: # No coords blocking trimmed.extend(item[hits:]) # Reset hits count leftHits = hits rightHits = hits # Returned trimmed return trimmed elif direction(potentials) == 0 or direction(potentials) == 1: # Horiz/Vert # Trim Left for a in range(hits - 1, -1, -1): if gridPotentials[a] != 'O': leftHits -= 1 if leftHits == hits - 1: # First hit location leftLoc = a if leftHits < hits: # trimmed.extend(potentials[leftLoc + 1:hits]) else: # No coords blocking trimmed.extend(potentials[:hits]) # Trim Right for b in range(hits, len(potentials)): if gridPotentials[b] != 'O': rightHits -= 1 if rightHits == hits - 1: # First hit location rightLoc = b if rightHits < hits: # trimmed.extend(potentials[hits:rightLoc]) else: # No coords blocking trimmed.extend(potentials[hits:]) return trimmed # Function to return an translate array of coords to their grid values def gridCoordsTranslate(poten, gameGrid): """Returns an array that is the gameGrid coordinate equivalent of the potential hits. For example, it should look like: ["O", "O", "M", "O]""" grid = [] for coord in poten: grid.append(gameGrid[ord(coord[:1]) % 65][int(coord[1:])]) return grid # Function to get a start and end coord set based on direction and # Length of the ship to be placed def directionCoords(direc, length): """ Returns coordinates for a ship based on which direction and the size of the ship """ # to be returned later startEnd = [] # Holds the decimal value of a letter from 'A' to 'J' letterCode = 65 #Maximum letter code for looping, chr(74) = 'J' # Set to 75 so I can use randrange(65, maxChar) and only get ints from 65-74 maxChar = 75 # Variables to hold the int for making coordinates numHold = 0 # Holds array of letters already used horizontalLetters = [] if direc is 0: # Horizontal Placement # Pick random letter between 'A' and maxChar ('J') letterCode = random.randrange(65, maxChar) # Need a number starting position between 1 and (10 minus ship length) numHold = random.randrange(1, 10 - length) # Append coordinate value for start position startEnd.append(chr(letterCode) + str(numHold)) # Now the end coord startEnd.append(chr(letterCode) + str(numHold + length - 1)) return startEnd else: # Vertical Placement # Random letter between A and J(74)-length letterCode = random.randrange(65, maxChar - length) # Pick a random number numHold = random.randrange(1, 10) # Append to return array startEnd.append(chr(letterCode) + str(numHold)) startEnd.append(chr(letterCode + length - 1) + str(numHold)) return startEnd # Need a function to return a random hit location def randCoord(): """ Returns a random coordinate""" # Random value between 65 and 74 (need to put 75) letterCode = random.randrange(65, 75) # Random number between 1 and 10 num = random.randrange(1, 11) return chr(letterCode) + str(num) # Function to get full coordinates based on start and end values, then store them in the # allCoords array def updateCoords(start, end): """ Used to update the allCoords global list in creating a ship Array""" positions = [] chrStart = 0 length = 0 # Same row if start[:1] is end[:1]: length = int(end[1:]) - int(start[1:]) + 1 for number in range(int(start[1:]), int(end[1:]) + 1): positions += [str(start[:1] + str(number))] # Same Column elif start[1:] is end[1:]: length = (ord(end[:1]) % 65) - (ord(start[:1]) % 65) + 1 chrStart = ord(start[:1]) for number in range(chrStart, chrStart + length): positions += [str(chr(number) + start[1:])] allCoords.append(positions) # Function to return list coordinates between start and end values def getAllCoords(start, end): """ Returns all coordinates between start and end positions""" positions = [] chrStart = 0 length = 0 # Same row if start[:1] is end[:1]: length = int(end[1:]) - int(start[1:]) + 1 for number in range(int(start[1:]), int(end[1:]) + 1): positions.append(str(start[:1] + str(number))) # Same Column elif start[1:] is end[1:]: length = (ord(end[:1]) % 65) - (ord(start[:1]) % 65) + 1 chrStart = ord(start[:1]) for number in range(chrStart, chrStart + length): positions.append(str(chr(number) + start[1:])) return positions # Returns False if the coordinates aren't in use def coordsTaken(arr): """ Checks to see if any of the coordinates in arr exist in allCoords, returns False if not. """ for item in arr: for store in allCoords: if item in store: return True return False # Need a function shipDown (same as in pyBattleship class) def shipDown(shipHits, boat): """ Returns True if the boat's hit value is at zero, meaning it is sunk""" if shipHits[boat] is 0: return True else: return False # Function to figure out direction based on array of hits def direction(hits): """Given an array of hit coordinates, return value based on direction""" if len(hits) < 2: return 2 # Single Element Array else: if hits[0][:1] is hits[1][:1]: return 0 # Horizontal elif hits[0][1:] is hits [1][1:]: return 1 # Vertical # Function returns True if location has a miss def missed(grid, coord): """Return True if the coord in the grid is a miss. """ if grid[ord(coord[:1]) % 65][int(coord[1:])] is 'M': return True else: return False
8bde4beb5cb18ce0d4b4528c119b12d0de2e468f
[ "Markdown", "Python" ]
3
Python
javproj/pyBattleship
f815cc6ebdb4aeb386446fecf42b7485399a36e1
16f4d01eacfb5ffb45dace54ac16a625e1ac14f2
refs/heads/master
<repo_name>eapenzacharias/enumerable<file_sep>/README.md # Advanced Building Blocks - Enumerables This is the first project of the Main Ruby curriculum at [Microverse](https://www.microverse.org/) - @microverseinc * The goal was learn about sorting algorithims and blocks. * The project was completed using ruby. #### [Assignment link](https://www.theodinproject.com/courses/ruby-programming/lessons/advanced-building-blocks) #### Authors * [@eapenzacharias](https://github.com/eapenzacharias) <file_sep>/blocks.rb module Enumerable def my_each a=self.length for i in 0...a do yield(self[i]) end end def my_each_with_index a=self.length for i in 0...a do yield(self[i], i) end end def my_select result = [] self.my_each do |x| result << x if yield(x) end result end def my_all? x = true self.my_each do |value| unless yield(value) x = false end end return x end def my_any? x = false self.my_each do |value| if yield(value) x = true end end return x end def my_none? x = true self.my_each do |value| if yield(value) x = false end end return x end def my_count(obj=nil) count = 0 if block_given? self.my_each do |x| count += 1 if yield(x) end elsif obj self.my_each do |x| count += 1 if x == obj end else count = self.length end count end def my_map result = [] self.my_each do |x| result << proc.call(x) end result end def my_inject(a=0) total = a self.my_each do |value| total = yield(total, value) end total end end =begin -----------------For Testing------------------ =end x = [1, 2, 4, 2,7,2] puts "My Each" x.my_each {|a| puts a} puts "My Each with index" x.my_each_with_index {|a,i| puts "#{i}: #{a}"} puts "My select" puts x.my_select{ |x| x%2 == 0} puts "My All" puts x.my_all?{ |x| x%2 == 0} puts "My Any" puts x.my_any?{ |x| x%2 == 0} puts "My All" puts x.my_none?{ |x| x%2 == 0} puts "My Count" puts x.my_count {|a| a} puts "My Map" puts x.my_map {|a| a*=2} puts "My Inject" puts x.my_inject(0) {|a,b| a+=b}
c2dfae836c07312655210d48c08f524f4fe0f2b5
[ "Markdown", "Ruby" ]
2
Markdown
eapenzacharias/enumerable
3d462e115f048ea601ab2f926e8633565a684f7a
11b1b9eadd3b83cb28a66028da6e9ea1f863b4a8
refs/heads/master
<file_sep>#!/bin/bash # # Base Praxis Command # @see /usr/lib/portage/bin/etc-update # @todo check for LANG and LC_COLLATE set -o errexit # set -o nounset # Colors and reset Screen # CLEAR="" N="\033[0;39m" # Reset Terminal Defaults R="\033[1;31m" # R: Failure or error message G="\033[1;32m" # G: Commands Y="\033[1;33m" # Y: Options B="\033[1;34m" # B: Paths # MAGENTA: Found devices or drivers # MAGENTA="" # CYAN: Questions # CYAN="" W="\033[1;37m" # white # X="\033[1;38m" # white export EMERGE_DEFAULT_OPTS="--alphabetical --nospinner --misspell-suggestions=n" export NOCOLOR="true" export PORTAGE_NICENESS=15 thiscmd=$( readlink -f $0 ) thisdir=$( dirname "$thiscmd" ) basedir=$( dirname "$thisdir" ) opt_action= opt_force=0 opt_fetch=0 # # Check the System Health # function praxis_check() { echo "* Pending Updates:" ( emerge --update --deep --newuse --pretend @world | grep '\[' ) || true echo "* emaint checks" emaint --check all if [ -x /usr/bin/revdep-rebuild ]; then echo "* Reverse Dependency Check (revdep-rebuild):" revdep-rebuild --pretend --ignore --quiet | egrep '^ broken|^\[ebuild' fi if [ -x /usr/bin/python-updater ]; then echo "* Python Updates" python-updater -- --getbinpkg --oneshot --pretend --verbose fi if [ -x /usr/sbin/module-rebuild ]; then echo "* Modules to Rebuild" module-rebuild -- --getbinpkg --oneshot --pretend --verbose else echo "Missing: sys-kernel/module-rebuild" fi # Packages that can be removed old slots echo "* Packages to Clean [ --clean ]" (emerge --clean --pretend) || true # Prune echo "* Packages to Prune [ --prune ]" (emerge --prune --nodeps --pretend | egrep '^ \w+|^ +selected') || true # Cleans the system by removing packages that are not associated with explicitly merged packages echo "* Unused Packages [ --depclean ]" (emerge --depclean --pretend | egrep '^ \w+|^ +selected') || true # @todo do we want this in here? chk=$(eselect news count all) if [[ "x$chk" != "x0" ]]; then echo "News:" eselect news list fi # eclean distfiles --pretend # eclean packages --pretend # Check GLSA echo "* Gentoo Linux Security Announcements:" for x in $( nice glsa-check --test affected 2>&1 |grep '^[0-9]' ); do nice glsa-check --print $x 2>/dev/null|awk '($0=="") && (p=="") {exit}{print;p=$0}'|sed '/^$/d' echo -n "Updates: " nice glsa-check --pretend $x 2>/dev/null|awk '/^ /{print $1 $2}'|tr '\n' ',' echo done # @todo grep /etc/locale.gen for entires x=$(grep -v -e '^#' -e '^$' -e '^[[:space:]]*$' /etc/locale.gen) if [[ -z "$x" ]]; then echo "!! /etc/locale.gen needs to be updated" fi # Remove nox11 from /etc/pam.d/system-login # session optional pam_ck_connector.so nox11 } # Dependency Check function praxis_check_depend_on() { atom=$1 # Show Packages Depending on $atom echo "Packages Depending On: $atom" equery --quiet depends $atom qdepends --query $atom | tr '\n' ' ' echo # What does $atom depend on? echo "$atom Depends On:" qdepends --all $atom } function praxis_clean() { eselect news purge rm /var/log/emerge.log rm /var/log/emerge-fetch.log rm /var/log/portage/elog/summary.log eclean distfiles eclean packages } # # Display Help # function praxis_help() { cat <<EOF -d|--debug) Enable Debug Mode -h|--help) This Help -V|--version) Display Version +fetch) Force Fetching +force) Force Command (like sync/update) check) Check the System including GLSA clean) Removes Cruft, Logs, &c depends) praxis_check_depend_on XX size) Determine Size of Installed Packages update) Update this Host (sync, -pvuDN world) verify) Verify is not Handled Yet, what should we do? verify=*) Verify is not Handled Yet, what should we do? EOF } # # check for root or die # function praxis_need_root() { if [[ "x$UID" != "x0" ]]; then echo -e "\n You've got to be root to run this thing\n" exit 1; fi } # Package Size Output function praxis_package_size() { equery size '*' \ | awk '{ print $4 " " $1 }' \ | sed 's/size//' | sed 's/[()]//g' \ | sort --numeric-sort --reverse } # # Updates this system from the Portage Source # Does a little house-keepting too function praxis_update() { # Get the Latest Portage Tree from Gentoo # if [ "x$opt_force" == "x1" ]; then # # @todo use $PORT_DIR # set # exit # rm -fr /usr/portage/metadata/timestamp.chk # fi emerge --sync > /dev/null # Print List of Updates emerge --deep --newuse --update @world echo "Gentoo News:" eselect news list # Clean up eclean distfiles > /dev/null eclean packages > /dev/null revdep-rebuild --ignore # @todo eselect stuff here echo "Removable Packages" (emerge --depclean --pretend | grep "All selected packages" | cut -d: -f2) || true } # # main() # # Nothing or Help? if [[ $# == 0 ]] || [[ "$1" == "-h" ]] || [[ "$1" == "--help" ]]; then praxis_help; exit; fi # Loop Options while [[ $# > 0 ]] ; do case $1 in -d|--debug) set -x ;; -h|--help) praxis_help ;; -V|--version) echo 'Edoceo Praxis v1800'; exit 0 ;; +fetch) opt_fetch="x1" ;; +force) opt_force="x1" ;; binhost_build emerge --sync emerge -vuDN @world ;; binhost_publish) if [ -z "$praxis_binhost_publish" ]; then echo " ${R}*${N} Please set praxis_binhost_publish in make.conf" exit fi eclean disfiles eclean packages rsync \ --archive \ --delete-before \ /usr/portage/packages/ \ $praxis_binhost_publish ;; check) echo "Check is not Handled Yet, what should we do?" ;; depends) shift; praxis_check_depend_on $1 ;; size) praxis_package_size ;; stage4-pack) praxis_need_root praxis_stage4_pack ;; update) praxis_need_root praxis_update ;; verify) echo "Verify is not Handled Yet, what should we do?" ;; verify=*) echo "Verify is not Handled Yet, what should we do?" ;; *) set +o xtrace set -o errtrace set -o functrace set -o monitor echo "The command or option '$1' was not recognized" praxis_help exit ;; esac shift done <file_sep>#!/bin/bash # :mode=shellscript: # # This script updates our portage, builds packages, pushes to host # set -o errexit set -o nounset export EMERGE_DEFAULT_OPTS="--alphabetical --nospinner --misspell-suggestions=n" export NOCOLOR="true" export PORTAGE_NICENESS=15 # Check for the binhost to push to binhost=$(eval source /etc/make.conf; echo $praxis_binhost) if [[ -z "$binhost" ]]; then echo "praxis_binhost is not set" echo "Update /etc/make.conf" echo " praxis_binhost=\"http://cdn.edoceo.com/praxis/x64\"" exit; fi log=$(mktemp) # pre-clean rm -f /var/log/emerge.log rm -f /var/log/emerge-fetch.log rm -f /var/log/portage/elog/summary.log # Sync echo "* --sync" emerge --sync >$log # Update echo "* --update" emerge --deep --newuse --update @world >>$log # Check if PERL was updated & do perl-cleaner echo "* perl-cleaner" perl-cleaner --all >>$log emerge --oneshot perl-core/ExtUtils-ParseXS >>$log # Check if Python was update & do python-updater echo "* python-updater" python-updater >>$log # Check if Ruby was updated # # Security Check echo "* glsa-check" # /opt/edoceo/sbin/element.sh glsa glsa-check --test all # Check for some revdep-rebuild stuff echo "* revdep-rebuild" revdep-rebuild --ignore --pretend --nocolor --no-progress --quiet # # Cleanup echo "* --depclean" emerge --depclean --pretend # (emerge --depclean --pretend | grep "All selected packages" | cut -d: -f2) || true # # Prune echo "* Packages to Prune [ --prune ]" emerge --prune --pretend # (emerge --prune --pretend | egrep '^ \w+|^ +selected') || true # # eclean stuff eclean distfiles eclean packages # # Show Logs #if [ -f /var/log/portage/elog/summary.log ]; then # cat /var/log/portage/elog/summary.log #fi # Log Package Size equery size '*' \ | awk '{ print $4 " " $1 }' \ | sed 's/size//' | sed 's/[()]//g' \ | sort --numeric-sort --reverse \ > /usr/portage/packages/package.size # Log Package Details emerge --emptytree --pretend --verbose world \ | grep '^\[ebuild' \ | cut -c17- \ | sort \ > /usr/portage/packages/package.info # Push my Packages Up # It's up to the binhost to prune itself rsync --archive --verbose /usr/portage/packages/ $binhost/ <file_sep># Edoceo Gentoo/Praxis This is our overlay for Gentoo which provides three profiles ## Praxis This is our baseline profile, for all system ## Praxis ## Proton ## Nucleus <file_sep>#!/bin/bash # # makes an Element Package # pushes to Lithium # cmd=$(readlink -f "$0") mwd=$(dirname "$cmd") tmp=$(mktemp -d) # rev=$(svn info $mwd@HEAD | awk ' /^Revision/ { print $2 }') rev=$(date +%Y.%W) tgz=element-$rev.tgz svn export --force https://carbon.edoceo.com/svn/element/sysroot $tmp tar -vzcf $tgz -C $tmp/ ./ rm -fr $tmp # scp $tgz <EMAIL>:/var/www/cdn.edoceo.com/element/ rm $tgz # make ebuild in our tree for me? # sed 's/^SRC_URI.*/SRC_URI=http://cdn.edoceo.com/ # cp "$mwd/element.ebuild" "$mwd/../portage/edoceo/element/element-$rev.ebuild" pushd $(dirname $mwd)/sysroot/ tar -zcf ../praxis-2012.31.tgz \ ./opt/edoceo/sbin/praxis.sh \ ./opt/edoceo/sbin/praxis-portage-mirror.sh \ ./opt/edoceo/sbin/praxis-binhost-update.sh popd scp ./praxis-2012.31.tgz <EMAIL>:/var/www/cdn.edoceo.com/praxis/ rm ./praxis-2012.31.tgz<file_sep>#!/bin/bash # @file # @brief Updates this Praxis Mirror set -o errexit # @todo detect default profile? gentoo_profile="default/linux/amd64/13.0" praxis_profile="edoceo/praxis/x64" # Stop Others from pulling from us while we update # /etc/init.d/rsyncd --nocolor stop >/dev/null # Ensure that our sync will get latest, block downstream rm -f /usr/portage/metadata/timestamp rm -f /usr/portage/metadata/timestamp.chk rm -f /usr/portage/metadata/timestamp.x # Synx to Gentoo export SYNC="rsync://rsync.gentoo.org/gentoo-portage" eselect profile set $gentoo_profile emerge --sync >/dev/null || true # (can be in /etc/portage/postsync.d/ ) # Runs after the emerge --sync to automatically merge in the Element # set > /tmp/postsync.env # # add my stuffs to portage log_svn=$(mktemp) svn export --force https://edoceo.com/svn/praxis/portage/ /usr/portage/ >$log_svn || true # # purge Praxis distfiles so we start fresh rm -fr /usr/portage/distfiles/praxis* >/dev/null # # add edoceo to the categories grep -q edoceo /usr/portage/profiles/categories || echo 'edoceo' >> /usr/portage/profiles/categories # # Add profiles to the profile list grep -q 'edoceo' /usr/portage/profiles/profiles.desc || ( cat >> /usr/portage/profiles/profiles.desc <<EOS # These lines added by Edoceo # @deprecated, leave till all 32bits are gone x86 edoceo/praxis/x32 stable x86 edoceo/praxis/x32/gui stable amd64 edoceo/nucleus stable amd64 edoceo/praxis stable amd64 edoceo/praxis/gui stable EOS ) # Rebuild Digests for f in $(awk '/\.ebuild$/ { print $2 }' $log_svn); do ebuild $f digest >/dev/null || true done rm -fr $log_svn # Update Portage Timestamp # date --utc +'%a %b %e %H:%M:%S %Z %Y' > /usr/portage/metadata/timestamp # date --utc +'%a, %d %b %Y %H:%M:%S %z' > /usr/portage/metadata/timestamp.chk # date --utc +'%s %a %b %e %H:%M:%S %Y %Z' > /usr/portage/metadata/timestamp.x # Restore Profile eselect profile set $praxis_profile # Trim our Portage Binhost Root # find /var/www/cdn.edoceo.com/element/ -type f -mtime +1095 -exec rm -fv {} + # /etc/init.d/rsyncd --nocolor start >/dev/null
ca6cdf4b8219cae58ab146e51c71cd3380997775
[ "Markdown", "Shell" ]
5
Shell
edoceo/praxis
17f11f8a989ea42d85f808af948997f299c4c6e8
adb00dd2b99b8d779c9fa2cb0b8f3b424b7433fa
refs/heads/master
<repo_name>wangbin2016/netty_learn<file_sep>/src/main/java/com/wb/netty/ch07/client/SubReqClient.java package com.wb.netty.ch07.client; import io.netty.bootstrap.Bootstrap; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioSocketChannel; import io.netty.handler.codec.serialization.ClassResolvers; import io.netty.handler.codec.serialization.ObjectDecoder; import io.netty.handler.codec.serialization.ObjectEncoder; public class SubReqClient { public void req(int port) { EventLoopGroup workerGroup = new NioEventLoopGroup(); try { Bootstrap b = new Bootstrap(); b.group(workerGroup); b.channel(NioSocketChannel.class); b.option(ChannelOption.SO_KEEPALIVE, true); b.handler(new ChannelInitializer<SocketChannel>() { @Override protected void initChannel(SocketChannel ch) throws Exception { ch.pipeline().addLast(new ObjectDecoder(1024 * 1024,ClassResolvers.weakCachingConcurrentResolver(this.getClass().getClassLoader()))); ch.pipeline().addLast(new ObjectEncoder()); ch.pipeline().addLast(new SubReqClientHandle()); } }); // Start the client. ChannelFuture f; try { f = b.connect("127.0.0.1", port).sync(); f.channel().closeFuture().sync(); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } finally { workerGroup.shutdownGracefully(); } } public static void main(String[] args) { new SubReqClient().req(8083); } } <file_sep>/src/main/java/GetImage.java import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.Proxy; import java.net.SocketAddress; import java.net.URL; import java.net.URLConnection; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.GZIPInputStream; public class GetImage { /** * 测试 * @param args */ public static void main(String[] args) { getHtml(); } public static void getHtml(){ //System.out.println(getUrlByChrome("http://m.nanrenvip.net/tianhaiyi/2017/RBD-821.html","utf-8")); getImage(); } public static void getImage(){ //http://img3.nanrenvip.net/uploads/2017/04/rbd00821pl.jpg String url = "http://img3.nanrenvip.net/uploads/2017/04/rbd00821pl.jpg"; byte[] btImg = getImageFromNetByUrl(url); if(null != btImg && btImg.length > 0){ System.out.println("读取到:" + btImg.length + " 字节"); String fileName = "rbd00821pl.jpg"; writeImageToDisk(btImg, fileName); }else{ System.out.println("没有从该连接获得内容"); } } /** * 将图片写入到磁盘 * @param img 图片数据流 * @param fileName 文件保存时的名称 */ public static void writeImageToDisk(byte[] img, String fileName){ try { File file = new File("e:\\" + fileName); FileOutputStream fops = new FileOutputStream(file); fops.write(img); fops.flush(); fops.close(); System.out.println("图片已经写入到C盘"); } catch (Exception e) { e.printStackTrace(); } } /** * 根据地址获得数据的字节流 * @param strUrl 网络连接地址 * @return */ public static byte[] getImageFromNetByUrl(String strUrl){ try { URL url = new URL(strUrl); HttpURLConnection conn = (HttpURLConnection)url.openConnection(); conn.setRequestProperty("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8"); conn.setRequestProperty("Accept-Encoding", "gzip, deflate, sdch"); conn.setRequestProperty("Accept-Language", "zh-CN,zh;q=0.8"); conn.setRequestProperty("Cache-Control:", "max-age=0"); conn.setRequestProperty("Connection", "keep-alive"); conn.setRequestProperty("Upgrade-Insecure-Requests", "1"); conn.setRequestProperty("User-Agent", "Mozilla/5.0 (iPhone; CPU iPhone OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B143 Safari/601.1"); conn.setRequestProperty("Referer", strUrl); conn.setRequestMethod("GET"); conn.setConnectTimeout(5 * 1000); InputStream inStream = conn.getInputStream();//通过输入流获取图片数据 byte[] btImg = readInputStream(inStream);//得到图片的二进制数据 return btImg; } catch (Exception e) { e.printStackTrace(); } return null; } /** * 从输入流中获取数据 * @param inStream 输入流 * @return * @throws Exception */ public static byte[] readInputStream(InputStream inStream) throws Exception{ ByteArrayOutputStream outStream = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int len = 0; while( (len=inStream.read(buffer)) != -1 ){ outStream.write(buffer, 0, len); } inStream.close(); return outStream.toByteArray(); } /** * 模拟chrome发送请求 */ @SuppressWarnings("unused") static public String getUrlByChrome(String urlString,String charsetCode) { URL url = null; URLConnection connection = null; InputStream in = null; if (urlString != null && !urlString.trim().startsWith("http:")) { return ""; } try { url = new URL(urlString); //匹配url为500的 就用代理 Pattern pattern500 = Pattern.compile("\\.(500|500wan)\\."); Matcher matcher500 = pattern500.matcher(urlString); //匹配url为ydniu的 就用代理 Pattern patternYdniu = Pattern.compile("\\.(ydniu)\\."); Matcher matcherYdniu = patternYdniu.matcher(urlString); if(false && matcher500.find()){ SocketAddress addr = new InetSocketAddress("172.16.31.10",80);//代理地址 Proxy typeProxy = new Proxy(Proxy.Type.HTTP, addr); connection = url.openConnection(typeProxy); } else if(false && matcherYdniu.find()){ SocketAddress addr = new InetSocketAddress("192.168.3.11",8000);//代理地址 Proxy typeProxy = new Proxy(Proxy.Type.HTTP, addr); connection = url.openConnection(typeProxy); }else{ connection=url.openConnection(); } connection.setConnectTimeout(60000); connection.setReadTimeout(60000); connection.setRequestProperty("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8"); connection.setRequestProperty("Accept-Encoding", "gzip, deflate, sdch"); connection.setRequestProperty("Accept-Language", "zh-CN,zh;q=0.8"); connection.setRequestProperty("Cache-Control:", "max-age=0"); connection.setRequestProperty("Connection", "keep-alive"); connection.setRequestProperty("Upgrade-Insecure-Requests", "1"); connection.setRequestProperty("User-Agent", "Mozilla/5.0 (iPhone; CPU iPhone OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B143 Safari/601.1"); connection.setRequestProperty("Referer", urlString); String charset = charsetCode; // default IE charset String encoding = ""; if (connection instanceof HttpURLConnection) { HttpURLConnection http = (HttpURLConnection) connection; encoding = http.getContentEncoding(); } // com.cailele.lottery.tools.LogUtil.out(connection.getContentEncoding()); @SuppressWarnings("rawtypes") Map headers = connection.getHeaderFields(); if (headers.size() > 0) { String response = headers.get(null).toString(); if (response.indexOf("200 OK") < 0) { throw new Exception("读取地址:" + url + " 错误:" + response); } /* * com.cailele.lottery.tools.LogUtil.out(headers.keySet().toArray().length); * for(Object o:headers.keySet().toArray()) { * com.cailele.lottery.tools.LogUtil.out(o==null?"":o.toString() * +"="+headers.get(o)); } // */ // * try { String contentType = headers.get("Content-Type").toString().replaceAll("\\[|\\]|\\\"", ""); String ct[] = contentType.split(";"); if (ct.length > 1) { String[] cs = ct[1].split("="); if (cs.length > 1) { charset = cs[1]; } } } catch (Exception e) { } // */ } if (("gzip").equals(encoding)) { in = new GZIPInputStream(connection.getInputStream()); } else { in = connection.getInputStream(); } BufferedReader reader = new BufferedReader(new InputStreamReader(in, charset)); StringBuffer sb = new StringBuffer(); String temp = ""; while ((temp = reader.readLine()) != null) { sb.append(temp + "\r\n"); } return sb.toString(); } catch (Exception e) { e.printStackTrace(); } finally { try { if (in != null) in.close(); } catch (Exception e) { } } return null; } }
65536593748c7d20ec21bdb18baf43c43571b954
[ "Java" ]
2
Java
wangbin2016/netty_learn
8137a66f2f1f6fab1882c420ca6b314e79290624
1560872eed95a59719c6dfafe6f9fa18eaf93e06
refs/heads/master
<repo_name>ratchawutk55/web<file_sep>/student-page.php <?php require "config.php"; session_start(); if(!isset($_SESSION['login_user'])) { header('Location: login.php'); exit; } $sql = "SELECT Name,pic_url,email,Faculty FROM comsystem.students WHERE Student_ID = '".$_SESSION['login_user']."'"; $query = mysql_query($sql) or die(mysql_error()); $rows = mysql_num_rows($query) or die("1234"); if($rows == 1){ while($r1=mysql_fetch_array($query)) { $name = $r1["Name"] ; $pic_url = $r1["pic_url"]; $email = $r1["email"]; $faculty = $r1["Faculty"]; } } else; ?> <html> <head> <meta charset = "utf-8"> <title>Computer System.</title> <link rel="stylesheet" type="text/css" href="css/Header.css" /> <link rel="stylesheet" type="text/css" href="css/student-page.css" /> <STYLE> A:link { color: #F7B810; text-decoration:none} A:visited {color: #F7B810; text-decoration: none} A:hover {color: #F7B810} </STYLE> </head> <body> <div class = "titlepage"> <div class = "subtitle"> <img src="images/web.png" width="100%" height="100%"> </div> <div class = "subtext-title" align = "center"> <font size ="6" color="#EEEEEE" style="text-shadow: 1px 1px #2c3e50;"> <b>ภาควิชาวิศวกรรมไฟฟ้าและคอมพิวเตอร์ </font><br> <font size ="5" color="#EEEEEE" style="text-shadow: 1px 1px #2c3e50;"> Department of Electrical and Computer Engineering.<br> คณะวิศวกรรมศาสตร์ มหาวิทยาลัยนเรศวร</b></font> </div> </div> <div class = "menubar" align="center"> <a href="#"><div class = "menu1" align = "center"> <b class = "small">หน้าหลัก</b> </div> </a> <a href="appform.php"><div class = "menu2" align = "center"> <b class = "small">ฟอร์มโครงงาน</b> </div></a> <a href="#"><div class = "menu3" align = "center"> <b class = "small">ติดตามโครงงาน</b> </div></a> <a href="logout.php"><div class = "menu4" align = "center"> <b class = "small">ออกจากระบบ</b> </div></a> </div> <!-- Show User Login. <div class = "show-user" align = "center"> Welcome: คุณ สหกรณ์ </div>--> <!--==================================== Main page ========================================== --> <div class = "mainpage"> <div class = "profile"> <br> <div class = "picture"> <img src="<?php echo $pic_url;?>" width="100%" height="100%" style ="border-radius:15px;"> </div> <div class = "profile-info"> <table style="width:100%; color:#2980b9;" align ="left"> <tr align ="left"> <th align ="left" >ID:</th> <td align ="left" ><?php echo $_SESSION['login_user']; ?></td> </tr> <tr> <th align ="left">Name:</th> <td align ="left"><?php echo $name; ?></td> </tr> <tr> <th align ="left">Email:</th> <td align ="left"><?php echo $email; ?></td> </tr> <tr> <th align ="left">Faculty:</th> <td align ="left"><?php echo $faculty; ?></td> </tr> </table> </div> <div class = "profile-info" align = "center"> <input type = "button" class = "button_" value="Edit Profile"/> <input type = "button" class = "button_" value="Edit Profile"/> </div> </div> <div class = "submain" align = "center"> <br><br> <a href="download/manual.pdf"><div class ="botton-div"> <table style="width:100%; color:#2980b9;" align ="left"> <tr align ="left"> <td align ="left" style="width:50px;"><img src="images/pdf.png" width="50px" height="50px"></td> <td align = "center">คู่มือการใช้งาน</td> </tr> </table> </div></a> <div class ="botton-div"> <table style="width:100%; color:#2980b9;" align ="left"> <tr align ="left"> <td align ="left" style="width:50px;"><img src="images/clock.png" width="50px" height="50px"></td> <td align = "center">แจ้งเตือน</td> </tr> </table> </div> <div class ="botton-div"> <table style="width:100%; color:#2980b9;" align ="left"> <tr align ="left"> <td align ="left" style="width:50px;"><img src="images/tracking.png" width="50px" height="50px"></td> <td align = "center">ติดตามโครงงาน</td> </tr> </table> </div> <a href = "calendar.php"><div class ="botton-div"> <table style="width:100%; color:#2980b9;" align ="left"> <tr align ="left"> <td align ="left" style="width:50px;"><img src="images/calendar.png" width="50px" height="50px"></td> <td align = "center">ปฏิทิน</td> </tr> </table> </div></a> </div> </div> <div class = "fooster"> <center><font face = "ThaiSans Neue" size = "+1.5" color = "#446CB3"><b>ภาควิชาวิศวกรรมไฟฟ้าและคอมพิวเตอร์ คณะวิศวกรรมศาสตร์ มหาวิทยาลัยนเรศวร ตำบลท่าโพธิ์ อำเภอเมือง จังหวัดพิษณุโลก 65000<b></font></center> <center><font face = "ThaiSans Neue" size = "+1.5" color = "#446CB3"><b>โทร 0559-6437-3,0559-6437-1 แฟกซ์ 0559-6400-5 อีเมล์</font> <a href = "mailto:<EMAIL>" ><font class = "link" ><EMAIL> </font></a> <b></center> </div> </body> </html> <file_sep>/cpe01.php <?php require "config.php"; require "layout.php"; session_start(); if(!isset($_SESSION['login_user'])) { header('Location: login.php'); exit; } $id = $_SESSION['login_user']; $sth = $pdo->prepare("SELECT * FROM students WHERE Student_ID = :id"); $sth->bindParam(':id', $id, PDO::PARAM_STR); $sth->execute(); while ($row = $sth->fetch(PDO::FETCH_ASSOC)) { $name = $row['Name']; $email = $row['email']; $phone = $row['phone']; } ?> <html> <meta charset = "utf-8"> <head> <title>CPE01</title> <?php res() ?> </head> <body> <div class="container"> <!-- --> <div class="jumbotron"> <p class="text-center">แบบเสนอหัวข้อโครงงานวิศวกรรมคอมพิวเตอร์</p> <div class="panel panel-primary"> <div class="panel-heading">ชื่อโครงงาน</div> <div class="panel-body"> <div class="col-lg-6"> <label for="i-focused" class="control-label"> ชื่อภาษาไทย </label> <input type="text" value="This is focused..." class="form-control"> </div> <div class="col-lg-6"> <label for="i-focused" class="control-label"> ชื่อภาษาอังกฤษ </label> <input type="text" value="This is focused..." class="form-control"> </div> </div> </div> <div class="panel panel-primary"> <div class="panel-heading">รายชื่อนิสิตผู้ทำโครงงาน</div> <div class="panel-body"> <div class="row" > <div class="col-sm-1" align="center"> <label>ลำดับที่</label> <p><b> 1 </b></p> </div> <div class="col-lg-2"> <label>รหัสนิสิต</label> <div> <?php echo $id ?> </div> </div> <div class="col-sm-3"> <label>ชื่อ - สกุล</label> <div> <?php echo $name ?> </div> </div> <div class="col-sm-2"> <label>เบอร์โทรศัพท์</label> <div> <?php echo $phone ?> </div> </div> <div class="col-sm-2"> <label>อีเมลล์</label> <div> <?php echo $email ?></div> </div> <div class="col-sm-2"> <br> <!-- <button class="btn btn-danger btn-sm">ลบ</button> --> </div> </div> <hr> </div> <div class="row"> <div class="col-lg-1"> <label>ลำดับที่</label> <h5 class="text-center">2</h5> </div> <div class="col-lg-2"> <label>รหัสนิสิต</label> <asp:TextBox ID="text_id2" runat="server" CssClass="form-control" ReadOnly="True"></asp:TextBox> </div> <div class="col-lg-3"> <label>ชื่อ-นามสกุล</label> <asp:TextBox ID="text_name2" runat="server" CssClass="form-control" ReadOnly="True"></asp:TextBox> </div> <div class="col-lg-2"> <label>เบอร์โทร</label> <asp:TextBox ID="text_tel2" runat="server" CssClass="form-control" ReadOnly="True"></asp:TextBox> </div> <div class="col-lg-4"> <label>อีเมล์</label> <div class="input-group"> <asp:TextBox ID="text_email2" runat="server" CssClass="form-control" ReadOnly="True"></asp:TextBox> <span class="input-group-btn"> <asp:Button ID="btnRemove" runat="server" Text="ลบ" class="btn btn-danger" OnClick="remove" /> </span> </div> </div> </div> <div class="row"> <div class="container"> <div class="col-sm-3"> <label>กรอกรหัสนิสิต</label> <div class="input-group date"> <input type="text" class="form-control" style="height: 28px;"> <span class="input-group-btn"> <button type="button" class="btn btn-default"> <em class="fa fa-fw fa-search "></em> </button> </span> </div> </div> <div class="col-sm-3"> <br> <div><label> ชื่อที่พบ </label></div> </div> <div class="col-sm-2"> <br> <div><label> เบอร์ที่พบ </label></div> </div> <div class="col-sm-2"> <br> <div><label> อีเมลล์ที่พบ </label></div> </div> <div class="col-sm-2"> <br> <button class="btn btn-primary btn-sm">เพิ่ม</button> <button class="btn btn-danger btn-sm">ยกเลิก</button> </div> </div> </div> <br> </div> <!-- เพิ่ม สมาชิก --> <div id="myModal" class="modal fade" role="dialog"> <div class="modal-dialog"> <!-- Modal content--> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal">&times;</button> <h5 class="modal-title">เพิ่มสมาชิก</h5> </div> <div class="modal-body"> <label>กรอกรหัสนิสิต</label><br> <div class="col-sm-4"> <div class="input-group date"> <input type="text" class="form-control" style="height: 28px;"> <span class="input-group-btn"> <button type="button" class="btn btn-default"> <em class="fa fa-fw fa-search hidden-xs"></em> </button> </span> </div> </div> <br><br> </div> <div class="modal-footer"> <button type="button" class="btn btn-default" data-dismiss="modal">Close</button> </div> </div> </div> </div> <div class="panel panel-primary"> <div class="panel-heading">อาจารย์ที่ปรึกษาและกรรมการ</div> <div class="panel-body"> <div class="col-sm-4"><label>1</label> <select class="form-control" > <option value="volvo">Volvo</option> <option value="saab">Saab</option> <option value="mercedes">Mercedes</option> <option value="audi">Audi</option> </select> </div> <div class="col-sm-4"><label>2</label> <select class="form-control" > <option value="volvo">Volvo</option> <option value="saab">Saab</option> <option value="mercedes">Mercedes</option> <option value="audi">Audi</option> </select> </div> <div class="col-sm-4"><label>3</label> <select class="form-control" > <option value="volvo">Volvo</option> <option value="saab">Saab</option> <option value="mercedes">Mercedes</option> <option value="audi">Audi</option> </select> </div> </div> </div> <div class="row" align="center"> <div class="col-sm-12"><button type="button" class="btn btn-primary">บันทึก</button> <button type="button" class="btn btn-success disabled">บันทึกและส่งแบบฟอร์ม</button> </div> </div> </div> </div> </div> <form> </form> <script data-cfasync="false" src="http://alexgorbatchev.com/pub/sh/current/scripts/shCore.js"></script> <script data-cfasync="false" src="http://alexgorbatchev.com/pub/sh/current/scripts/shBrushXml.js"></script> <script data-cfasync="false" src="http://alexgorbatchev.com/pub/sh/current/scripts/shBrushJScript.js"></script> <script data-cfasync="false" data-main="js/release.min" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.1.17/require.min.js"></script> </body> </html><file_sep>/process.php <meta charset = "utf-8"> <?php require "config.php"; function login($usr,$pass,$type,$pdo) { if($type == 0) { $login_type = "admin"; $from = "adminregis"; } else if($type==1) { $login_type = "Student_ID"; $from = "studentregis"; } else { $login_type = "Projressor_ID"; $from = "profressorregis"; } $sth = $pdo->prepare("SELECT * FROM $from WHERE $login_type = :usr and password = :<PASSWORD>"); $sth->bindParam(':usr', $usr, PDO::PARAM_STR); $sth->bindParam(':psswrd', $pass, PDO::PARAM_STR); $sth->execute(); while ($row = $sth->fetch(PDO::FETCH_ASSOC)) { return true; } return false; } function goback($message,$url) { echo " <script type='text/javascript'> alert('$message'); window.location.href='$url' </script> "; } function success($user,$url) { session_start(); $_SESSION['login_user']= $user; // Initializing Session $status = "Success"; header("location: $url");// Redirecting To Other Page } ?><file_sep>/cpe01-active.php <?php require "config.php"; $name_thai = $_POST["name_thai"]; $name_eng = $_POST["name_eng"]; $std1 = $_POST["id_std1"]; $std2 = $_POST["id_std2"]; $std3 = $_POST["id_std3"]; $pro1 = $_POST["pro1"]; $pro2 = $_POST["pro2"]; $pro3 = $_POST["pro3"]; if($name_thai == null || $name_eng == null || $std1 == null || $pro1 == null ||$pro2 == null || $pro3 == null ) { header("Refresh:0.1; url=cpe01.php"); }else{ $sql_check = "SELECT ID FROM comsystem.project_status WHERE ID = '".$std1."' or ID = '".$std2."' or ID = '".$std3."'"; $query_check = mysql_query($sql_check) or die(mysql_error()); $rows_check = mysql_num_rows($query_check); if($rows_check > 0){ while($rs = mysql_fetch_array($query_check) ){ echo "<font size ='5' color='#2c3e50'> สมาชิกรหัสนิสิต ".$rs['ID']." <br>"; } echo "มีชื่อร่วมโครงงานในระบบ <a href='cpe01.php'><button>BACK.</button></a><hr>"; } else{ $sql = "INSERT INTO comsystem.createproject(nameThai,nameEng,std1,std2,std3,pro1,pro2,pro3) VALUES('$name_thai','$name_eng','$std1','$std2','$std3','$pro1','$pro2','$pro3')"; $sql2 = "INSERT INTO comsystem.project_status(ID,status_ID,status_title) VALUES('$std1','1','CPE01')"; $sql3 = "INSERT INTO comsystem.project_status(ID,status_ID,status_title) VALUES('$std2','1','CPE01')"; $sql4 = "INSERT INTO comsystem.project_status(ID,status_ID,status_title) VALUES('$std3','1','CPE01')"; if($std1 !=null) { mysql_query($sql2); } if($std2 != null) { mysql_query($sql3); } if($std3!=null) { mysql_query($sql4); } if(mysql_query($sql)){ header("Refresh:0.1; url=appform.php"); } else { echo mysql_error(); } } } ?> <file_sep>/README.md # WebSite-Comsys This project created from Computer system Engineering class. This Using php based and mysql for database #Installation - 1. Install Appserve [http://www.appservnetwork.com/] ,When Appserve installed you can access [http://localhost/] to browser<br/> - 2. You put the project to the folder [C:\AppServ\www], Example -> c:\Appserv\www\WebSite-Comsys<br/> - 3. Then [access http://localhost/WebSite-Comsys]<br/> - 4. Go to DB , you will see the Database.sql please import to phpMyadmin for use database #Author - <NAME> - Email : <EMAIL> - <NAME> - Email : <EMAIL> - <NAME> - Email : <EMAIL> - <NAME> - Email : <EMAIL> - <NAME> - Email : <EMAIL><br/> - This Project Created 2014-Juior[Third year 2nd]
f835d74163be7d34aa8cd1acda2ada1031cf3e17
[ "Markdown", "PHP" ]
5
PHP
ratchawutk55/web
066ad456eef8bfa855f9cca089b9814b7f4195d0
9a6180faaf4bf7544ecb3675026c85168fdcbf91
refs/heads/master
<file_sep>//global Variables var countdownTimer = { time: 30, reset: function() { this.time = 30; } } var correctAnswers = 0; var wrongAnswers = 0; var index = 0; var timer = setInterval(decrement(), 1000); function decrement() { $("#timer").html("<h2>" + countdownTimer.time + "</h2>") countdownTimer.time--; }; var audioElement = document.createElement("audio"); audioElement.setAttribute("src", "cheers.wav"); //global funtions function stop(){ clearInterval(timer); } function loadQuestionTwo(){ clearInterval(timer) countdownTimer.reset(); $(".question").text(q2.question); $("#bigHarden").remove(); $(".answerchoices").show(); $("#buttonA").text(q2.possibleAnswers[0]); $("#buttonB").text(q2.possibleAnswers[1]); $("#buttonC").text(q2.possibleAnswers[2]); $("#buttonD").text(q2.possibleAnswers[3]); $("#timer").show() timer = setInterval(decrement, 1000); } function loadQuestionThree(){ clearInterval(timer) countdownTimer.reset(); $(".question").text(q3.question); $("#mikejordan").remove(); $(".answerchoices1").show(); $("#buttonA").text(q3.possibleAnswers[0]); $("#buttonB").text(q3.possibleAnswers[1]); $("#buttonC").text(q3.possibleAnswers[2]); $("#buttonD").text(q3.possibleAnswers[3]); $("#timer").show() timer = setInterval(decrement, 1000); } function loadQuestionFour(){ clearInterval(timer) countdownTimer.reset(); $(".question").text(q4.question); $("#iverson").remove(); $(".answerchoices2").show(); $("#buttonA").text(q4.possibleAnswers[0]); $("#buttonB").text(q4.possibleAnswers[1]); $("#buttonC").text(q4.possibleAnswers[2]); $("#buttonD").text(q4.possibleAnswers[3]); $("#timer").show() timer = setInterval(decrement, 1000); } function loadQuestionFive(){ clearInterval(timer) countdownTimer.reset(); $(".question").text(q5.question); $("#durant").remove(); $(".answerchoices3").show(); $("#buttonA").text(q5.possibleAnswers[0]); $("#buttonB").text(q5.possibleAnswers[1]); $("#buttonC").text(q5.possibleAnswers[2]); $("#buttonD").text(q5.possibleAnswers[3]); $("#timer").show() timer = setInterval(decrement, 1000); } function loadQuestionSix(){ clearInterval(timer) countdownTimer.reset(); $(".question").text(q6.question); $("#russ").remove(); $(".answerchoices4").show(); $("#buttonA").text(q6.possibleAnswers[0]); $("#buttonB").text(q6.possibleAnswers[1]); $("#buttonC").text(q6.possibleAnswers[2]); $("#buttonD").text(q6.possibleAnswers[3]); $("#timer").show() timer = setInterval(decrement, 1000); } function loadQuestionSeven(){ clearInterval(timer) countdownTimer.reset(); $(".question").text(q7.question); $("#vince").remove(); $(".answerchoices5").show(); $("#buttonA").text(q7.possibleAnswers[0]); $("#buttonB").text(q7.possibleAnswers[1]); $("#buttonC").text(q7.possibleAnswers[2]); $("#buttonD").text(q7.possibleAnswers[3]); $("#timer").show(); timer = setInterval(decrement, 1000); } function loadQuestionEight(){ clearInterval(timer) countdownTimer.reset(); $(".question").text(q8.question); $("#lebron").remove(); $(".answerchoices6").show(); $("#buttonA").text(q8.possibleAnswers[0]); $("#buttonB").text(q8.possibleAnswers[1]); $("#buttonC").text(q8.possibleAnswers[2]); $("#buttonD").text(q8.possibleAnswers[3]); $("#timer").show(); timer = setInterval(decrement, 1000); } function loadQuestionNine(){ clearInterval(timer) countdownTimer.reset(); $(".question").text(q9.question); $("#giannis").remove(); $(".answerchoices7").show(); $("#buttonA").text(q9.possibleAnswers[0]); $("#buttonB").text(q9.possibleAnswers[1]); $("#buttonC").text(q9.possibleAnswers[2]); $("#buttonD").text(q9.possibleAnswers[3]); $("#timer").show(); timer = setInterval(decrement, 1000); } function loadQuestionTen(){ clearInterval(timer) countdownTimer.reset(); $(".question").text(q10.question); $("#kobe").remove(); $(".answerchoices8").show(); $("#buttonA").text(q10.possibleAnswers[0]); $("#buttonB").text(q10.possibleAnswers[1]); $("#buttonC").text(q10.possibleAnswers[2]); $("#buttonD").text(q10.possibleAnswers[3]); $("#timer").show(); timer = setInterval(decrement, 1000); } function scoreCard (){ $(".question").html("<p>Correct: " + correctAnswers + "<br> Incorrect: " + wrongAnswers + "</P>" ); } $(document).ready(function(){ $(".answerchoice").hide() $("#timer").hide() }); var q1 = { question : 'What Houston Rockets Superstar wears jersey number 13?', possibleAnswers : ['A. <NAME>', 'B. <NAME>', 'C. <NAME>', 'D. <NAME>'], key : [false, false, true, false], answer : 'C. <NAME>' }; var q2 = { question: 'What NBA legend won 6 championships with the Chicago Bulls?', possibleAnswers: ['A. <NAME>', '<NAME>', '<NAME>', 'D. <NAME>'], key : [false, true, false, false], answer : 'B. <NAME>' }; var q3 = { question : 'Which NBA player was the first overall pick in the 1996 NBA draft', possibleAnswers : ['A. <NAME>', '<NAME>', 'C. <NAME>', 'D. <NAME>'], key : [false, true, false, false], answer : '<NAME>' }; var q4 = { question : 'Which 2017-2018 Golden State Warrior played college basketball at the University of Texas?', possibleAnswers : ['A. <NAME>', 'B. <NAME>', '<NAME>', 'D. <NAME>'], key : [true, false, false, false], answer : 'A. <NAME>' }; var q5 = { question : 'What NBA player averaged a triple-double in the 2016-2017 NBA season', possibleAnswers : ['A. <NAME>', '<NAME>', 'C. <NAME>', 'D. <NAME>'], key : [false, true, false, false], answer : 'B. <NAME>' }; var q6 = { question : 'Who won the 2000 NBA all-star weekend slam dunk contest?', possibleAnswers : ['A. <NAME>', 'B. <NAME>', '<NAME>', 'D. <NAME>'], key : [true, false, false, false], answer : 'A. <NAME>' }; var q7 = { question : 'Which player wore number 23 and helped his team defeat the Golden State Warriors in the NBA finals?', possibleAnswers : ['<NAME>', '<NAME>', '<NAME>', 'D. <NAME>'], key : [false, false, true, false], answer : '<NAME>' }; var q8 = { question : 'Which NBA player is known as the "Greek Freak"?', possibleAnswers : ['A. <NAME>', 'B. <NAME>', 'C. <NAME>', 'D. <NAME>'], key : [false, true, false, false], answer : '<NAME>' }; var q9 = { question : 'Which NBA great scored 81 points in one game', possibleAnswers : ['A. <NAME>', '<NAME>', '<NAME>', 'D. <NAME>'], key : [false, false, false, true], answer : 'D. <NAME>' }; var q10 = { question : "Which NBA player left his dad's team to join the Boston Celtics", possibleAnswers : ['A. <NAME>', '<NAME>', 'C. <NAME>', 'D. <NAME>'], key : [false, true, false, false], answer : '<NAME>' } var questionArray = [q1, q2, q3, q4, q5, q6, q7, q8, q9, q10]; function question1correct(){ $(".question").text("Correct!") $(".answerchoice").html("<img src='./assets/images/hardenonfire.jpg' id='bigHarden'>") $("#bigHarden").css({height: "500px", width: "300px"}) audioElement.play() correctAnswers++; } function question1wrong(){ $(".question").text("Thats wrong! The correct answer is <NAME>!"); $(".answerchoice").html("<img src='./assets/images/hardenonfire.jpg' id='bigHarden'>"); $("#bigHarden").css({height: "500px", width: "300px"}); wrongAnswers++; } function question2correct(){ $(".question").text("Correct!") $(".answerchoices").html("<img src='./assets/images/kingjordan.jpg' id='mikeJordan'>") $("#mikejordan").css({height: "500px", width: "300px"}) correctAnswers++; } function question2wrong(){ $(".question").text("Thats wrong! The correct answer is <NAME>!"); $(".answerchoices").html("<img src='./assets/images/kingjordan.jpg' id='mikeJordan'>"); $("#mikejordan").css({height: "500px", width: "300px"}); wrongAnswers++; } function question3correct(){ $(".question").text("Correct!") $(".answerchoices1").html("<img src='../images/iverson.jpg' id='iverson'>") $("#iverson").css({height: "500px", width: "300px"}) correctAnswers++; } function question3wrong(){ $(".question").text("Thats wrong! The correct answer is <NAME>!"); $(".answerchoices1").html("<img src='./assets/images/iverson.jpg' id='iverson'>"); $("#iverson").css({height: "500px", width: "300px"}); wrongAnswers++; } function question4correct(){ $(".question").text("Correct!") $(".answerchoices2").html("<img src='./assets/images/durant.jpg' id='durant'>") $("#durant").css({height: "500px", width: "300px"}) correctAnswers++; } function question4wrong(){ $(".question").text("Thats wrong! The correct answer is <NAME>!"); $(".answerchoices2").html("<img src='./assets/images/durant.jpg' id='durant'>"); $("#durant").css({height: "500px", width: "300px"}); wrongAnswers++; } function question5correct(){ $(".question").text("Correct!") $(".answerchoices3").html("<img src='./assets/images/russ.jpg' id='russ'>") $("#russ").css({height: "500px", width: "300px"}) correctAnswers++; } function question5wrong(){ $(".question").text("Thats wrong! The correct answer is <NAME>!"); $(".answerchoices3").html("<img src='./assets/images/russ.jpg' id='russ'>"); $("#russ").css({height: "500px", width: "300px"}); wrongAnswers++; } function question6correct(){ $(".question").text("Correct!") $(".answerchoices4").html("<img src='./assets/images/vince.jpg' id='vince'>") $("#vince").css({height: "500px", width: "300px"}) correctAnswers++; } function question6wrong(){ $(".question").text("Thats wrong! The correct answer is <NAME>!"); $(".answerchoices4").html("<img src='./assets/images/vince.jpg' id='vince'>"); $("#vince").css({height: "500px", width: "300px"}); wrongAnswers++; } function question7correct(){ $(".question").text("Correct!") $(".answerchoices5").html("<img src='./assets/images/kingjames.jpg' id='lebron'>") $("#lebron").css({height: "500px", width: "300px"}) correctAnswers++; } function question7wrong(){ $(".question").text("Thats wrong! The correct answer is <NAME>!"); $(".answerchoices5").html("<img src='./assets/images/kingjames.jpg' id='lebron'>"); $("#lebron").css({height: "500px", width: "300px"}); wrongAnswers++; } function question8correct(){ $(".question").text("Correct!") $(".answerchoices6").html("<img src='./assets/images/giannis.jpg' id='giannis'>") $("#giannis").css({height: "500px", width: "300px"}) correctAnswers++; } function question8wrong(){ $(".question").text("Thats wrong! The correct answer is <NAME>!"); $(".answerchoices6").html("<img src='./assets/images/giannis.jpg' id='giannis'>"); $("#giannis").css({height: "500px", width: "300px"}); wrongAnswers++; } function question9correct(){ $(".question").text("Correct!") $(".answerchoices7").html("<img src='./assets/images/kobe.jpg' id='kobe'>") $("#kobe").css({height: "500px", width: "300px"}) correctAnswers++; } function question9wrong(){ $(".question").text("Thats wrong! The correct answer is <NAME>!"); $(".answerchoices7").html("<img src='./assets/images/kobe.jpg' id='kobe'>"); $("#kobe").css({height: "500px", width: "300px"}); wrongAnswers++; } function question10correct(){ $(".question").text("Correct!") $(".answerchoices8").html("<img src='./assets/images/kyrie.jpg' id='kyrie'>") $("#kyrie").css({height: "500px", width: "300px"}) correctAnswers++; } function question10wrong(){ $(".question").text("Thats wrong! The correct answer is <NAME>!"); $(".answerchoices8").html("<img src='./assets/images/kyrie.jpg' id='kyrie'>"); $("#kyrie").css({height: "500px", width: "300px"}); wrongAnswers++; } function newGame(){ $("#startGame").show(); $(".answerchoices8").attr("class", "answerchoice") $(".answerchoice").hide() $("#timer").hide() } $("#startGame").on("click", function(){ $("#startGame").hide(); $(".question").text(q1.question); $(".answerchoice").show(); $("#buttonA").text(q1.possibleAnswers[0]); $("#buttonB").text(q1.possibleAnswers[1]); $("#buttonC").text(q1.possibleAnswers[2]); $("#buttonD").text(q1.possibleAnswers[3]); $("#timer").show() timer = setInterval(decrement, 1000); $(".answerchoice").on("click", function(){ if(this.id == 'buttonA') { var answerChosen = 'A'; } else if(this.id == 'buttonB') { answerChosen = 'B'; } else if (this.id == 'buttonC') { answerChosen = 'C'; } else if (this.id == 'buttonD') { answerChosen = 'D'; } if ((answerChosen == 'A') && (q1.key[0] == true)) { question1correct(); } else if (answerChosen == 'A') { question1wrong(); } else if ((answerChosen == 'B') && (q1.key[1] == true)) { setTimeout(question1correct(), 3000) } else if (answerChosen == 'B') { question1wrong(); } else if ((answerChosen == 'C') && (q1.key[2] == true)) { setTimeout(question1correct(), 3000) } else if (answerChosen == 'C') { question1wrong(); } else if ((answerChosen == 'D') && (q1.key[3] == true)) { question1correct() } else if (answerChosen == 'D') { question1wrong(); } clearInterval(timer) $(".answerchoice").attr("class", "answerchoices") //timer then on to the next question setTimeout(loadQuestionTwo, 3000); $(".answerchoices").on("click", function(){ console.log(this); if(this.id == 'buttonA') { var answerChosen = 'A'; } else if(this.id == 'buttonB') { answerChosen = 'B'; } else if (this.id == 'buttonC') { answerChosen = 'C'; } else if (this.id == 'buttonD') { answerChosen = 'D'; } if ((answerChosen == 'A') && (q2.key[0] == true)) { question2correct(); } else if (answerChosen == 'A') { question2wrong(); } if ((answerChosen == 'B') && (q2.key[1] == true)) { question2correct(); } else if (answerChosen == 'B') { question2wrong(); } if ((answerChosen == 'C') && (q2.key[2] == true)) { question2correct(); } else if (answerChosen == 'C') { question2wrong(); } if ((answerChosen == 'D') && (q2.key[3] == true)) { question2correct() } else if (answerChosen == 'D') { question2wrong(); } stop(); //timer then on to the next question $(".answerchoices").attr("class", "answerchoices1") setTimeout(loadQuestionThree, 3000); $(".answerchoices1").on("click", function(){ console.log(this); if(this.id == 'buttonA') { var answerChosen = 'A'; } else if(this.id == 'buttonB') { answerChosen = 'B'; } else if (this.id == 'buttonC') { answerChosen = 'C'; } else if (this.id == 'buttonD') { answerChosen = 'D'; } if ((answerChosen == 'A') && (q3.key[0] == true)) { question3correct(); } else if (answerChosen == 'A') { question3wrong(); } if ((answerChosen == 'B') && (q3.key[1] == true)) { question3correct(); } else if (answerChosen == 'B') { question3wrong(); } if ((answerChosen == 'C') && (q3.key[2] == true)) { question3correct(); } else if (answerChosen == 'C') { question3wrong(); } if ((answerChosen == 'D') && (q3.key[3] == true)) { question3correct() } else if (answerChosen == 'D') { question3wrong(); } stop(); //timer then on to the next question $(".answerchoices1").attr("class", "answerchoices2") setTimeout(loadQuestionFour, 3000); $(".answerchoices2").on("click", function(){ console.log(this); if(this.id == 'buttonA') { var answerChosen = 'A'; } else if(this.id == 'buttonB') { answerChosen = 'B'; } else if (this.id == 'buttonC') { answerChosen = 'C'; } else if (this.id == 'buttonD') { answerChosen = 'D'; } if ((answerChosen == 'A') && (q4.key[0] == true)) { question4correct(); } else if (answerChosen == 'A') { question4wrong(); } if ((answerChosen == 'B') && (q4.key[1] == true)) { question4correct(); } else if (answerChosen == 'B') { question4wrong(); } if ((answerChosen == 'C') && (q4.key[2] == true)) { question4correct(); } else if (answerChosen == 'C') { question4wrong(); } if ((answerChosen == 'D') && (q4.key[3] == true)) { question4correct() } else if (answerChosen == 'D') { question4wrong(); } stop(); //timer then on to the next question $(".answerchoices2").attr("class", "answerchoices3") setTimeout(loadQuestionFive, 3000); $(".answerchoices3").on("click", function(){ console.log(this); if(this.id == 'buttonA') { var answerChosen = 'A'; } else if(this.id == 'buttonB') { answerChosen = 'B'; } else if (this.id == 'buttonC') { answerChosen = 'C'; } else if (this.id == 'buttonD') { answerChosen = 'D'; } if ((answerChosen == 'A') && (q5.key[0] == true)) { question5correct(); } else if (answerChosen == 'A') { question5wrong(); } if ((answerChosen == 'B') && (q5.key[1] == true)) { question5correct(); } else if (answerChosen == 'B') { question5wrong(); } if ((answerChosen == 'C') && (q5.key[2] == true)) { question5correct(); } else if (answerChosen == 'C') { question5wrong(); } if ((answerChosen == 'D') && (q5.key[3] == true)) { question5correct() } else if (answerChosen == 'D') { question5wrong(); } stop(); //timer then on to the next question $(".answerchoices3").attr("class", "answerchoices4") setTimeout(loadQuestionSix, 3000); $(".answerchoices4").on("click", function(){ console.log(this); if(this.id == 'buttonA') { var answerChosen = 'A'; } else if(this.id == 'buttonB') { answerChosen = 'B'; } else if (this.id == 'buttonC') { answerChosen = 'C'; } else if (this.id == 'buttonD') { answerChosen = 'D'; } if ((answerChosen == 'A') && (q6.key[0] == true)) { question6correct(); } else if (answerChosen == 'A') { question6wrong(); } if ((answerChosen == 'B') && (q6.key[1] == true)) { question6correct(); } else if (answerChosen == 'B') { question6wrong(); } if ((answerChosen == 'C') && (q6.key[2] == true)) { question6correct(); } else if (answerChosen == 'C') { question6wrong(); } if ((answerChosen == 'D') && (q6.key[3] == true)) { question6correct() } else if (answerChosen == 'D') { question6wrong(); } stop(); //timer then on to the next question $(".answerchoices4").attr("class", "answerchoices5") setTimeout(loadQuestionSeven, 3000); $(".answerchoices5").on("click", function(){ console.log(this); if(this.id == 'buttonA') { var answerChosen = 'A'; } else if(this.id == 'buttonB') { answerChosen = 'B'; } else if (this.id == 'buttonC') { answerChosen = 'C'; } else if (this.id == 'buttonD') { answerChosen = 'D'; } if ((answerChosen == 'A') && (q7.key[0] == true)) { question7correct(); } else if (answerChosen == 'A') { question7wrong(); } if ((answerChosen == 'B') && (q7.key[1] == true)) { question7correct(); } else if (answerChosen == 'B') { question7wrong(); } if ((answerChosen == 'C') && (q7.key[2] == true)) { question7correct(); } else if (answerChosen == 'C') { question7wrong(); } if ((answerChosen == 'D') && (q7.key[3] == true)) { question7correct() } else if (answerChosen == 'D') { question7wrong(); } stop(); //timer then on to the next question $(".answerchoices5").attr("class", "answerchoices6") setTimeout(loadQuestionEight, 3000); $(".answerchoices6").on("click", function(){ console.log(this); if(this.id == 'buttonA') { var answerChosen = 'A'; } else if(this.id == 'buttonB') { answerChosen = 'B'; } else if (this.id == 'buttonC') { answerChosen = 'C'; } else if (this.id == 'buttonD') { answerChosen = 'D'; } if ((answerChosen == 'A') && (q8.key[0] == true)) { question8correct(); } else if (answerChosen == 'A') { question8wrong(); } if ((answerChosen == 'B') && (q8.key[1] == true)) { question8correct(); } else if (answerChosen == 'B') { question8wrong(); } if ((answerChosen == 'C') && (q8.key[2] == true)) { question8correct(); } else if (answerChosen == 'C') { question8wrong(); } if ((answerChosen == 'D') && (q8.key[3] == true)) { question8correct() } else if (answerChosen == 'D') { question8wrong(); } stop(); //timer then on to the next question $(".answerchoices6").attr("class", "answerchoices7") setTimeout(loadQuestionNine, 3000); $(".answerchoices7").on("click", function(){ console.log(this); if(this.id == 'buttonA') { var answerChosen = 'A'; } else if(this.id == 'buttonB') { answerChosen = 'B'; } else if (this.id == 'buttonC') { answerChosen = 'C'; } else if (this.id == 'buttonD') { answerChosen = 'D'; } if ((answerChosen == 'A') && (q9.key[0] == true)) { question9correct(); } else if (answerChosen == 'A') { question9wrong(); } if ((answerChosen == 'B') && (q9.key[1] == true)) { question9correct(); } else if (answerChosen == 'B') { question9wrong(); } if ((answerChosen == 'C') && (q9.key[2] == true)) { question9correct(); } else if (answerChosen == 'C') { question9wrong(); } if ((answerChosen == 'D') && (q9.key[3] == true)) { question9correct() } else if (answerChosen == 'D') { question9wrong(); } stop(); //timer then on to the next question $(".answerchoices7").attr("class", "answerchoices8") setTimeout(loadQuestionTen, 3000); $(".answerchoices8").on("click", function(){ console.log(this); if(this.id == 'buttonA') { var answerChosen = 'A'; } else if(this.id == 'buttonB') { answerChosen = 'B'; } else if (this.id == 'buttonC') { answerChosen = 'C'; } else if (this.id == 'buttonD') { answerChosen = 'D'; } if ((answerChosen == 'A') && (q10.key[0] == true)) { question10correct(); } else if (answerChosen == 'A') { question10wrong(); } if ((answerChosen == 'B') && (q10.key[1] == true)) { question10correct(); } else if (answerChosen == 'B') { question10wrong(); } if ((answerChosen == 'C') && (q10.key[2] == true)) { question10correct(); } else if (answerChosen == 'C') { question10wrong(); } if ((answerChosen == 'D') && (q10.key[3] == true)) { question10correct() } else if (answerChosen == 'D') { question10wrong(); } stop(); //timer then on to the next question setTimeout(scoreCard(), 3000); function newGame(){ clearInterval(timer); $("#startGame").show(); $(".answerchoices8").attr("class", "answerchoice") $(".answerchoice").hide() $("#timer").hide() } } );})})})})})})})})})})
9802462a3cc6a666a3e1cdf9e750e0d49f7bf4d9
[ "JavaScript" ]
1
JavaScript
prestonedwards3/TriviaGame
59b98f6e21e9b5683577e584ff2f4e18fefceed7
321a4a73a08ca067d002cd02631997c2b78fc95a
refs/heads/main
<repo_name>AndrewPhilbin/D3-Scatter-Plot<file_sep>/script.js let margin = { top: 20, right: 50, bottom: 20, left: 50 }; let height = 600 - margin.left - margin.right; let width = 1000 - margin.top - margin.bottom; let svgBuild = d3 .select('.container') .append('svg') .attr('width', width + margin.left + margin.right) .attr('height', height + margin.top + margin.bottom) .append('g') .attr('transform', 'translate(' + margin.left + ',' + margin.top + ')'); d3.json( 'https://raw.githubusercontent.com/freeCodeCamp/ProjectReferenceData/master/cyclist-data.json' ).then((data) => { console.log(data); let times = []; data.forEach((item) => { times.push(item.Time); }); let parsedTimes = times.map((time) => time.split(':')); let yDates = parsedTimes.map((time) => { return new Date(0, 0, 0, 0, time[0], time[1]); }); let years = []; data.forEach((item) => years.push(item.Year)); let xScale = d3 .scaleLinear() .domain([d3.min(years) - 1, d3.max(years) + 1]) .range([0, width]); let xAxis = d3.axisBottom(xScale).tickFormat(d3.format('d')); svgBuild .append('g') .attr('id', 'x-axis') .attr('transform', 'translate(0,' + height + ')') .call(xAxis); let yParser = d3.timeFormat('%M:%S'); let yScale = d3.scaleTime().domain(d3.extent(yDates)).range([0, height]); let yAxis = d3.axisLeft(yScale).tickFormat(yParser); svgBuild.append('g').attr('id', 'y-axis').call(yAxis); let tooltip = d3 .select('.container') .append('div') .attr('id', 'tooltip') .style('opacity', 0); let mouseover = function (d) { tooltip.style('opacity', 1).attr('data-year', d.Year); d3.select(this).style('stroke', 'black').style('opacity', 1); }; let mousemove = function (d) { tooltip .html(`${d.Name} <br> ${d.Year} ${d.Time} <br> ${d.Doping}`) .style('left', d3.mouse(this)[0] + 100 + 'px') .style('top', d3.mouse(this)[1] + 10 + 'px'); }; let mouseleave = function (d) { tooltip.style('opacity', 0); d3.select(this).style('stroke', 'none').style('opacity', 0.8); }; svgBuild .append('g') .selectAll('.dot') .data(data) .enter() .append('circle') .attr('class', 'dot') .attr('cx', (d) => xScale(d.Year)) .attr('cy', (d, i) => yScale(yDates[i])) .attr('r', 5) .attr('data-xvalue', (d, i) => d.Year) .attr('data-yvalue', (d, i) => yDates[i]) .attr('fill', (d) => { if (d.Doping === '') { return 'green'; } else return 'red'; }) .on('mouseover', mouseover) .on('mousemove', mousemove) .on('mouseleave', mouseleave); });
8f788873bcb5a179d6c83ddd38bb08aa5419eb70
[ "JavaScript" ]
1
JavaScript
AndrewPhilbin/D3-Scatter-Plot
8e500447c0ee1fa4083f034ca17819d19e7928bc
9c47ed55abc46fb7557f2a11a531d2cab61727c1
refs/heads/master
<repo_name>Powerman-code/goit-react-hw-04-movies<file_sep>/src/App.js import { Switch, Route } from 'react-router-dom'; import './App.css'; // import Navigation from './Navigation/Navigation'; import AppBar from './AppBar/AppBar'; import Container from './Container/Container'; // import NotFoundView from './views/NotFoundView/NotFoundView'; import HomePage from './views/HomePage/HomePage'; // import MoviesPage from './views/MoviesPage/MoviesPage'; import MovieDetailsPage from './views/MovieDetailsPage/MovieDetailsPage'; // import Cast from './views/Cast/Cast'; // import Reviews from './views/Reviews/Reviews'; // <KEY> function App() { // const params = useParams(); return ( <Container> <AppBar /> <Switch> <Route path="/" exact> <HomePage /> </Route> {/* <Route path="/movies" exact> <MoviesPage></MoviesPage> </Route> */} <Route path="/movies/:movieId"> <MovieDetailsPage></MovieDetailsPage> </Route> {/* <Route path="movies/:movieId/cast"> <Cast></Cast> </Route> */} {/* <Route path="movies/:movieId/reviews"> <Reviews></Reviews> </Route> */} {/* <Route path="/"> <NotFoundView></NotFoundView> </Route> <Route path="/"> <FilmPendingView></FilmPendingView> </Route> <Route path="/"> <FilmErrorView></FilmErrorView> </Route> */} </Switch> </Container> // <div className="App"> // <header className="App-header"></header> // </div> ); } export default App; <file_sep>/src/views/MoviesPage/MoviesPage.js import { useState, useEffect } from 'react'; import { Link, useRouteMatch } from 'react-router-dom'; import api from '../../services/movies-api'; export default function MoviesPage() { const [movies, setMovies] = useState(null); const { url } = useRouteMatch(); useEffect(() => { api.fetchMovies().then(setMovies); }, []); return ( <> {movies && ( <ul> {movies.map(movie => ( <li key={movie.id}> <Link to={`${url}/${movies.id}`}>{movie.title}</Link>тут будет карточка фильма/список фильмов </li> ))} </ul> )} </> ); } <file_sep>/src/Navigation/Navigation.js import { NavLink } from 'react-router-dom'; import s from './Navigation.module.css'; const Navigation = () => { return ( <nav> <NavLink exact to="/" className={s.link} activeClassName={s.activeLink}> Главная </NavLink> <NavLink to="/movies" className={s.link} activeClassName={s.activeLink}> Фильмы </NavLink> {/* <NavLink exact to="/movies/:movieId" className={s.link} activeClassName={s.activeLink} > Информация </NavLink> <NavLink to="/movies/:movieId/cast" className={s.link} activeClassName={s.activeLink} > Актерский состав </NavLink> <NavLink to="/movies/:movieId/reviews" className={s.link} activeClassName={s.activeLink} > Обзоры </NavLink> */} </nav> ); }; export default Navigation; <file_sep>/src/views/NotFoundView/NotFoundView.js export default function NotFoundView() { <h1>Error 404 Movie not found</h1>; } <file_sep>/src/views/FilmErrorView/FilmErrorView.js export default function FilmErrorView() { <h1>Movie not found</h1>; } <file_sep>/src/views/FilmGalleryView/FilmGalleryView.js import { Link, useRouteMatch } from 'react-router-dom'; export default function FilmGalleryView({ movies }) { const { url } = useRouteMatch(); console.log(url); return ( <ul> {movies.map(movie => ( <li key={movie.id}> <Link to={`${url}/${movie.id}`}>{movie.name}</Link> <Link to={`${url}/${movie.id}`}>{movie.original_title}</Link> </li> ))} </ul> ); }
39d377dffbf48fc5a8fb4addab4d1c87bce56c91
[ "JavaScript" ]
6
JavaScript
Powerman-code/goit-react-hw-04-movies
79c33f718049ec7d3e4fe070bf0bf8225d0cf237
a86c7d0a8a0aa6a5215ee9b49b49efce241fe9ac
refs/heads/main
<repo_name>ttmgs/Todo-List<file_sep>/README.md # Todo-List ## Description Post and delete notes within the mongodb database ## URL is private <file_sep>/index.js const express = require('express'); const ejs = require('ejs'); const app = express(); const mongoose = require("mongoose"); var _ = require('lodash'); app.use(express.urlencoded({extented: true})); app.use(express.json()); app.set("view engine", "ejs"); app.use(express.static("public")); // mongodb connection mongoose.connect("mongodb+srv://ttmgs:Windsor2000!!@cluster0.a9rki.mongodb.net/toDB", {useNewUrlParser: true}) // schemas const listSchema = mongoose.Schema({ item: String }) const listedSchema = ({ name: String, items: [listSchema] }) const Title = mongoose.model("Title", listedSchema) // model const List = mongoose.model("List", listSchema) const first = new List({ item: "first Item" }); const defaultItems = [first] app.get('/', (req, res) => { List.find({}, function(err, lists) { if (lists.length === 0) { List.insertMany(defaultItems, function(err) { if (err) { console.log(err) } else { console.log("successfully saved items to DB") } }); res.redirect("/"); } else { res.render("list", {listtitle: "Today", items: lists}) } }); }); app.post("/", (req, res) => { const itemInput = req.body.input const listname = req.body.list const newlist = new List({ item: itemInput }) if (listname === "Today") { newlist.save(); res.redirect("/") } else { Title.findOne({name: listname}, function(err, foundlist) { foundlist.items.push(newlist) foundlist.save(); res.redirect("/" + listname) }) } }); app.post("/delete", (req, res) => { const checkedItemId = req.body.checkbox var list = req.body.listname if (list === "Today") { List.findByIdAndRemove(checkedItemId, function(err) { if (err) { console.log("error removing list item") } else { res.redirect("/") } }) } else { Title.findOneAndUpdate({name: list}, {$pull: {items: {_id: checkedItemId}}}, function(err, foundlist){ if (!err) { res.redirect("/" + list) } }) } }) app.get("/:custom", (req, res) => { const title = _.capitalize(req.params.custom) Title.findOne({name: title}, function(err, foundlist) { if (!err) { if (!foundlist) { // create new list const newItem = new Title({ name: title, items: first }) newItem.save(); res.redirect("/" + title) } else { // show list res.render("list", {listtitle: foundlist.name, items: foundlist.items}) } } }) }) let port = process.env.PORT; if (port == null || port == "") { port = 3000; } app.listen(port, function() { console.log('app is has started successfully') });
c91db25031fcc6a84246b68727fa1b1244eaec33
[ "Markdown", "JavaScript" ]
2
Markdown
ttmgs/Todo-List
ae0b9e6054ceea4f35cc38e59c1a4db187c6e011
da6f49734c882f2573bafbb01d15f95980123b47
refs/heads/master
<file_sep>package alekseev.market.dto; import java.util.List; public class CategoryWithProductDTO extends CategoryDTO { private List<ProductWithoutCategoryDTO> products; public CategoryWithProductDTO() { super(); } public CategoryWithProductDTO(int categoryId, String nameCategory, List<ProductWithoutCategoryDTO> products) { super(categoryId, nameCategory); this.products = products; } public List<ProductWithoutCategoryDTO> getProducts() { return products; } public void setProducts(List<ProductWithoutCategoryDTO> products) { this.products = products; } } <file_sep>package alekseev.market.controller; import alekseev.market.dto.ClientDTO; import alekseev.market.dto.ProductWithoutCategoryDTO; import alekseev.market.service.ClientService; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import java.util.List; @RestController @RequestMapping("/api/clients") public class ClientController { private final ClientService clientService; public ClientController(ClientService clientService) { this.clientService = clientService; } @GetMapping() public ResponseEntity<List<ClientDTO>> getClient() { List<ClientDTO> categories = clientService.getAllClients(); return new ResponseEntity<>(categories, HttpStatus.OK); } @GetMapping("/{id}") public ResponseEntity<ClientDTO> getClient(@PathVariable int id) { ClientDTO clientDTO = clientService.getClient(id); if (clientDTO == null) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } return new ResponseEntity<>(clientDTO, HttpStatus.OK); } @GetMapping("/product") public ResponseEntity<List<ClientDTO>> getClientByProductForInterval(@RequestBody ProductWithoutCategoryDTO product, @RequestParam(value = "from", required = false) String from, @RequestParam(value = "to", required = false) String to) { List<ClientDTO> clients = clientService.getByProductForInterval(product, from, to); return new ResponseEntity<>(clients, HttpStatus.OK); } @PostMapping() public ResponseEntity<ClientDTO> createClient(@RequestBody ClientDTO clientDTO) { clientService.saveClient(clientDTO); return new ResponseEntity<>(HttpStatus.CREATED); } @PutMapping("/{id}") public ResponseEntity<ClientDTO> updateClient(@RequestBody ClientDTO clientDTO, @PathVariable int id) { clientService.updateClient(id, clientDTO); return new ResponseEntity<>(HttpStatus.OK); } @DeleteMapping("/{id}") public ResponseEntity<ClientDTO> deleteClient(@PathVariable int id) { clientService.deleteClient(id); return new ResponseEntity<>(HttpStatus.OK); } } <file_sep>package alekseev.market.dto; public class ClientDTO { private int clientId; private String nameClient; public ClientDTO() { } public ClientDTO(String nameClient) { this.nameClient = nameClient; } public ClientDTO(int clientId, String nameClient) { this.clientId = clientId; this.nameClient = nameClient; } public int getClientId() { return clientId; } public void setClientId(int clientId) { this.clientId = clientId; } public String getNameClient() { return nameClient; } public void setNameClient(String nameClient) { this.nameClient = nameClient; } } <file_sep>package alekseev.market.service; import alekseev.market.dao.CategoryDAO; import alekseev.market.dao.ProductDAO; import alekseev.market.dto.CategoryDTO; import alekseev.market.dto.ProductDTO; import org.springframework.stereotype.Service; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.NoSuchElementException; @Service public class ProductServiceImpl implements ProductService { private final ProductDAO productDAO; private final CategoryDAO categoryDAO; public ProductServiceImpl(ProductDAO productDAO, CategoryDAO categoryDAO) { this.productDAO = productDAO; this.categoryDAO = categoryDAO; } @Override public int saveProduct(ProductDTO productDTO) { try { productDAO.save(productDTO); int productId = productDAO.findIdByTitleAndPrice(productDTO); List<Integer> categoryIds = new ArrayList<>(); for (CategoryDTO category: productDTO.getCategories()) { categoryIds.add(categoryDAO.findIdByNameCategory(category.getNameCategory())); } productDAO.saveProductWithCategory(productId, categoryIds); return 1; } catch (SQLException | NoSuchElementException e) { return 0; } } @Override public ProductDTO getProduct(int id) { try { return productDAO.findById(id); } catch (NoSuchElementException e) { return null; } } @Override public List<ProductDTO> getAllProducts() { return productDAO.findAll(); } @Override public int update(int id, ProductDTO productDTO) { try { productDAO.updateById(id, productDTO); return 1; } catch (SQLException e) { return 0; } } @Override public int delete(int id) { try { productDAO.deleteById(id); return 1; } catch (SQLException e) { return 0; } } } <file_sep>package alekseev.market.dao; import alekseev.market.dto.CategoryDTO; import alekseev.market.dto.CategoryWithProductDTO; import alekseev.market.dto.ProductWithoutCategoryDTO; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.BeanPropertyRowMapper; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Repository; import java.sql.SQLException; import java.util.List; @Repository public class CategoryDAO implements DAO<CategoryDTO> { private final JdbcTemplate jdbcTemplate; @Autowired public CategoryDAO(JdbcTemplate jdbcTemplate) { this.jdbcTemplate = jdbcTemplate; } @Override public void save(CategoryDTO category) throws SQLException { String sql = "INSERT INTO category (name_category) VALUES (?)"; if(jdbcTemplate.update(sql, category.getNameCategory()) != 1) { throw new SQLException(); } } @Override public CategoryWithProductDTO findById(int id) { String sql = "SELECT category_id, name_category FROM category WHERE category_id=?"; CategoryWithProductDTO category = jdbcTemplate.query(sql, new BeanPropertyRowMapper<>(CategoryWithProductDTO.class), id) .stream().findAny().orElseThrow(); category.setProducts(getProducts(category)); return category; } private List<ProductWithoutCategoryDTO> getProducts(CategoryDTO category) { String sql = "SELECT p.product_id, title, price\n" + "FROM category\n" + " INNER JOIN product_category pc on category.category_id = pc.category_id\n" + " INNER JOIN product p on pc.product_id = p.product_id\n" + "WHERE name_category=?"; return jdbcTemplate.query(sql, new BeanPropertyRowMapper<>(ProductWithoutCategoryDTO.class), category.getNameCategory()); } @Override public List<CategoryWithProductDTO> findAll() { String sql = "SELECT category_id, name_category FROM category"; List<CategoryWithProductDTO> categories = jdbcTemplate.query(sql, new BeanPropertyRowMapper<>(CategoryWithProductDTO.class)); for (CategoryWithProductDTO category : categories) { category.setProducts(getProducts(category)); } return categories; } public int findIdByNameCategory(String nameCategory) { String sql = "SELECT category_id FROM category WHERE name_category=?"; return jdbcTemplate.query(sql, (rs, rowNum) -> rs.getInt("category_id"), nameCategory) .stream().findAny().orElseThrow(); } @Override public void updateById(int id, CategoryDTO category) throws SQLException { String sql = "UPDATE category SET name_category=? WHERE category_id=?"; if (jdbcTemplate.update(sql, category.getNameCategory(), id) != 1) { throw new SQLException(); } } @Override public void deleteById(int id) throws SQLException { String sql1 = "DELETE FROM product_category WHERE category_id=?"; String sq2 = "DELETE FROM category WHERE category_id=?"; jdbcTemplate.update(sql1, id); if (jdbcTemplate.update(sq2, id) != 1) { throw new SQLException(); } } } <file_sep>SELECT DISTINCT title, name_category, price FROM product INNER JOIN product_category pc on product.product_id = pc.product_id INNER JOIN category c on c.category_id = pc.category_id; SELECT DISTINCT title, price FROM product WHERE product_id=1; SELECT name_category FROM category INNER JOIN product_category pc on category.category_id = pc.category_id INNER JOIN product p on p.product_id = pc.product_id WHERE title='Бутсы мужские Nike Vapor 14'; SELECT name_category FROM category INNER JOIN product_category ON category.category_id =product_category.category_id WHERE name_category = 'Бутсы мужские Nike Vapor 14'; SELECT title FROM product INNER JOIN product_category pc on product.product_id = pc.product_id INNER JOIN category c on c.category_id = pc.category_id WHERE pc.product_id = 2; INSERT INTO product (title, price) VALUES ('Мужская футболка Nike', 1199.99); SELECT category_id FROM category WHERE name_category='Мужское'; DELETE FROM product WHERE product_id=20; DELETE FROM product_category WHERE product_id=20; SELECT p.product_id, title FROM category INNER JOIN product_category pc on category.category_id = pc.category_id INNER JOIN product p on pc.product_id = p.product_id WHERE name_category='Мужское'; <file_sep>CREATE TABLE product ( product_id SERIAL PRIMARY KEY, title VARCHAR(100) NOT NULL, price DECIMAL NOT NULL ); CREATE TABLE category ( category_id SERIAL PRIMARY KEY, name_category VARCHAR(100) NOT NULL ); CREATE TABLE product_category ( product_category_id SERIAL PRIMARY KEY, product_id INTEGER NOT NULL, category_id INTEGER NOT NULL, FOREIGN KEY(product_id) REFERENCES product(product_id), FOREIGN KEY(category_id) REFERENCES category(category_id) ); CREATE TABLE client ( client_id SERIAL PRIMARY KEY, name_client VARCHAR(100) NOT NULL ); CREATE TABLE reservation ( reservation_id SERIAL PRIMARY KEY, client_id INTEGER NOT NULL, reservation_date DATE NOT NULL, FOREIGN KEY (client_id) REFERENCES client (client_id) ); CREATE TABLE reservation_product ( reservation_product_id SERIAL PRIMARY KEY, reservation_id INTEGER NOT NULL, product_id INTEGER NOT NULL, FOREIGN KEY (reservation_id) REFERENCES reservation (reservation_id), FOREIGN KEY (product_id) REFERENCES product (product_id) ); <file_sep>package alekseev.market.service; import alekseev.market.dao.CategoryDAO; import alekseev.market.dto.CategoryDTO; import alekseev.market.dto.CategoryWithProductDTO; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.sql.SQLException; import java.util.List; import java.util.NoSuchElementException; @Service public class CategoryServiceImpl implements CategoryService { private final CategoryDAO categoryDAO; @Autowired public CategoryServiceImpl(CategoryDAO categoryDAO) { this.categoryDAO = categoryDAO; } @Override public int saveCategory(CategoryDTO category) { try { categoryDAO.save(category); } catch (SQLException e) { return 0; } return 1; } @Override public CategoryWithProductDTO getCategory(int id) { try { return categoryDAO.findById(id); } catch (NoSuchElementException e) { return null; } } @Override public List<CategoryWithProductDTO> getAllCategories() { return categoryDAO.findAll(); } @Override public int updateCategory(int id, CategoryDTO category) { try { categoryDAO.updateById(id, category); } catch (SQLException e) { return 0; } return 1; } @Override public int deleteCategory(int id) { try { categoryDAO.deleteById(id); } catch (SQLException e) { return 0; } return 1; } } <file_sep>INSERT INTO category (name_category) VALUES ('Мужское'); INSERT INTO category (name_category) VALUES ('Женское'); INSERT INTO category (name_category) VALUES ('Детское'); INSERT INTO product (title, price) VALUES ('Мужская футболка Nike', 1199.99); INSERT INTO product_category (product_id, category_id) VALUES (1, 1); SELECT title, name_category, price FROM product_category INNER JOIN product ON product_category.product_id = product.product_id INNER JOIN category ON product_category.category_id = category.category_id; INSERT INTO product_category (product_id, category_id) VALUES (2, 1); INSERT INTO product_category (product_id, category_id) VALUES (2, 4); SELECT title, name_category, price FROM product_category INNER JOIN product ON product_category.product_id = product.product_id INNER JOIN category ON product_category.category_id = category.category_id WHERE name_category = 'Мужское'; INSERT INTO reservation(client_id, reservation_date) VALUES (1, '2020-07-16');<file_sep>package alekseev.market.dto; import java.time.LocalDateTime; import java.util.List; public class ReservationDTO { private int reservationId; private ClientDTO client; private LocalDateTime reservationDate; private List<ProductDTO> products; public ReservationDTO() { this.reservationDate = LocalDateTime.now(); } public ReservationDTO(ClientDTO client, List<ProductDTO> products) { this.client = client; this.products = products; this.reservationDate = LocalDateTime.now(); } public ReservationDTO(int reservation_id, ClientDTO client, List<ProductDTO> products) { this.reservationId = reservation_id; this.client = client; this.products = products; this.reservationDate = LocalDateTime.now(); } public int getReservationId() { return reservationId; } public void setReservationId(int reservation_id) { this.reservationId = reservation_id; } public ClientDTO getClient() { return client; } public void setClient(ClientDTO client) { this.client = client; } public List<ProductDTO> getProducts() { return products; } public void setProducts(List<ProductDTO> products) { this.products = products; } public void setReservationDate(LocalDateTime reservationDate) { this.reservationDate = reservationDate; } public LocalDateTime getReservationDate() { return reservationDate; } } <file_sep>package alekseev.market.dto; import java.util.List; public class ProductDTO extends ProductWithoutCategoryDTO { private List<CategoryDTO> categories; public ProductDTO() { } public ProductDTO(List<CategoryDTO> categories) { this.categories = categories; } public List<CategoryDTO> getCategories() { return categories; } public void setCategories(List<CategoryDTO> categories) { this.categories = categories; } } <file_sep>package alekseev.market.dao; import java.sql.SQLException; import java.util.List; public interface DAO <T> { void save(T t) throws SQLException; T findById(int id); <S extends T> List<S> findAll(); void updateById(int id, T t) throws SQLException; void deleteById(int id) throws SQLException; } <file_sep>package alekseev.market.controller; import alekseev.market.dto.ClientDTO; import alekseev.market.dto.ReservationDTO; import alekseev.market.service.ReservationService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import java.util.List; @RestController @RequestMapping("/api/reservations") public class ReservationController { private final ReservationService reservationService; @Autowired public ReservationController(ReservationService reservationService) { this.reservationService = reservationService; } @GetMapping("") public ResponseEntity<List<ReservationDTO>> getReservation() { List<ReservationDTO> reservations = reservationService.getAllReservations(); if (reservations.isEmpty()) { return new ResponseEntity<>(reservations, HttpStatus.INTERNAL_SERVER_ERROR); } return new ResponseEntity<>(reservations, HttpStatus.OK); } @GetMapping("/{id}") public ResponseEntity<ReservationDTO> getReservation(@PathVariable int id) { ReservationDTO reservation = reservationService.getReservation(id); if (reservation == null) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } return new ResponseEntity<>(reservation, HttpStatus.OK); } @PostMapping("") public ResponseEntity<ReservationDTO> createReservation(@RequestBody ReservationDTO reservation) { int status = reservationService.createReservation(reservation); if (status != 1) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } return new ResponseEntity<>(HttpStatus.CREATED); } @DeleteMapping("/{id}") public ResponseEntity<ReservationDTO> deleteReservation(@PathVariable int id) { int status = reservationService.deleteReservation(id); if (status != 1) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } return new ResponseEntity<>(HttpStatus.OK); } @GetMapping("/client") public ResponseEntity<List<ReservationDTO>> getClientReservation(@RequestBody ClientDTO client, @RequestParam(value = "from", required = false) String from, @RequestParam(value = "to", required = false) String to) { List<ReservationDTO> reservations = reservationService.getReservationsByDate(client.getNameClient(), from, to); return new ResponseEntity<>(reservations, HttpStatus.OK); } } <file_sep>INSERT INTO reservation(client_id, reservation_date) VALUES (1, '2020-07-16'); INSERT INTO reservation_product(reservation_id, product_id) VALUES (1, 1); SELECT reservation.reservation_id, name_client, title, name_category, reservation_date FROM client INNER JOIN reservation ON client.client_id = reservation.client_id INNER JOIN reservation_product ON reservation.reservation_id = reservation_product.reservation_id INNER JOIN product ON reservation_product.product_id = product.product_id INNER JOIN product_category on product.product_id = product_category.product_id INNER JOIN category on category.category_id = product_category.category_id WHERE client.client_id = (SELECT client_id FROM client WHERE name_client = 'admin') AND reservation_date BETWEEN '2020-07-11' AND '2020-07-16'; SELECT name_client FROM client INNER JOIN reservation ON client.client_id = reservation.client_id INNER JOIN reservation_product ON reservation.reservation_id = reservation_product.reservation_id INNER JOIN product ON reservation_product.product_id = product.product_id WHERE title = 'Мужская футболка Nike' AND reservation_date BETWEEN '2020-07-10' AND '2020-07-20'; INSERT INTO reservation(client_id, reservation_date) SELECT client_id, '2020-07-11' FROM client WHERE name_client='admin'; SELECT reservation.reservation_id, reservation_date, name_client, title, name_category FROM client INNER JOIN reservation ON client.client_id = reservation.client_id INNER JOIN reservation_product ON reservation.reservation_id = reservation_product.reservation_id INNER JOIN product ON reservation_product.product_id = product.product_id INNER JOIN product_category on product.product_id = product_category.product_id INNER JOIN category on category.category_id = product_category.category_id WHERE client.client_id = (SELECT client_id FROM client WHERE name_client = 'admin') AND reservation_date BETWEEN '2020-07-11' AND '2020-07-16';<file_sep>package alekseev.market.dao; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import static org.junit.jupiter.api.Assertions.*; @SpringBootTest class ProductDAOTest { @Autowired private ProductDAO productDAO; @Test void findByIdTest() { assertTrue(productDAO.findById(1).getTitle().equals("Мужская футболка Nike")); } @Test void findAllByIdTest() { productDAO.findAll().forEach(System.out::println); } }<file_sep>package alekseev.market.dto; import java.util.List; public class ProductWithoutCategoryDTO { private int productId; private String title; private double price; public ProductWithoutCategoryDTO() { } public ProductWithoutCategoryDTO(String title, double price) { this.title = title; this.price = price; } public ProductWithoutCategoryDTO(int id, String title, double price) { this.productId = id; this.title = title; this.price = price; } public int getProductId() { return productId; } public void setProductId(int productId) { this.productId = productId; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public double getPrice() { return price; } public void setPrice(double price) { this.price = price; } } <file_sep>package alekseev.market.dao; import alekseev.market.dto.CategoryDTO; import alekseev.market.dto.ClientDTO; import alekseev.market.dto.ProductDTO; import alekseev.market.dto.ReservationDTO; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.BeanPropertyRowMapper; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Repository; import java.sql.SQLException; import java.time.LocalDate; import java.util.ArrayList; import java.util.List; @Repository public class ReservationDAO implements DAO<ReservationDTO> { private final JdbcTemplate jdbcTemplate; @Autowired public ReservationDAO(JdbcTemplate jdbcTemplate) { this.jdbcTemplate = jdbcTemplate; } @Override public void save(ReservationDTO reservation) throws SQLException { String sql = "INSERT INTO reservation(client_id, reservation_date) VALUES ((SELECT client_id FROM client WHERE name_client=?), ?)"; if (jdbcTemplate.update(sql, reservation.getClient().getNameClient(), reservation.getReservationDate()) != 1) { throw new SQLException(); } String sql2 = "SELECT reservation_id FROM reservation WHERE reservation.client_id = (SELECT client_id FROM client WHERE name_client=?) " + "AND reservation_date=?"; int reservationId = jdbcTemplate.query(sql2, (rs, rowNum) -> rs.getInt("reservation_id"), reservation.getClient().getNameClient(), reservation.getReservationDate()).stream().findAny().orElseThrow(); String sql3 = "INSERT INTO reservation_product(reservation_id, product_id)\n" + "VALUES (?, (SELECT product_id FROM product WHERE title=? AND price=?))"; for (ProductDTO product : reservation.getProducts()) { if (jdbcTemplate.update(sql3, reservationId, product.getTitle(), product.getPrice()) != 1) { throw new SQLException(); } } } @Override public ReservationDTO findById(int id) { String sql1 = "SELECT reservation.reservation_id, reservation_date\n" + "FROM client\n" + " INNER JOIN reservation ON client.client_id = reservation.client_id\n" + "WHERE reservation.reservation_id=?"; ReservationDTO reservation = jdbcTemplate.query(sql1, new BeanPropertyRowMapper<>(ReservationDTO.class), id).stream().findAny().orElseThrow(); String sql2 = "SELECT client.client_id, name_client\n" + "FROM client\n" + " INNER JOIN reservation ON client.client_id = reservation.client_id\n" + "WHERE reservation.reservation_id=?"; ClientDTO client = jdbcTemplate.query(sql2, new BeanPropertyRowMapper<>(ClientDTO.class), id).stream().findAny().orElseThrow(); String sql3 = "SELECT product.product_id, title, price\n" + "FROM client\n" + " INNER JOIN reservation ON client.client_id = reservation.client_id\n" + " INNER JOIN reservation_product ON reservation.reservation_id = reservation_product.reservation_id\n" + " INNER JOIN product ON reservation_product.product_id = product.product_id\n" + "WHERE reservation.reservation_id=?"; List<ProductDTO> products = jdbcTemplate.query(sql3, new BeanPropertyRowMapper<>(ProductDTO.class), id); String sql4 = "SELECT category.category_id, name_category\n" + "FROM product\n" + " INNER JOIN product_category on product.product_id = product_category.product_id\n" + " INNER JOIN category on category.category_id = product_category.category_id\n" + "WHERE product.product_id=?"; for (ProductDTO product : products) { List<CategoryDTO> categories = jdbcTemplate.query(sql4, new BeanPropertyRowMapper<>(CategoryDTO.class),product.getProductId()); product.setCategories(categories); } reservation.setClient(client); reservation.setProducts(products); return reservation; } @Override public List<ReservationDTO> findAll() { String sql = "SELECT reservation.reservation_id\n" + "FROM client\n" + " INNER JOIN reservation ON client.client_id = reservation.client_id"; List<Integer> reservationIds = jdbcTemplate.query(sql, (rs, rowNum) -> { return rs.getInt("reservation_id"); }); List<ReservationDTO> reservations = new ArrayList<>(); for (int reservationId : reservationIds) { reservations.add(findById(reservationId)); } return reservations; } public List<ReservationDTO> findByDate (String nameClient, LocalDate from, LocalDate to) { String sql = "SELECT reservation.reservation_id\n" + "FROM client\n" + " INNER JOIN reservation ON client.client_id = reservation.client_id\n" + "WHERE client.client_id = (SELECT client_id FROM client WHERE name_client = ?)\n" + " AND reservation_date BETWEEN ? AND ?"; List<Integer> reservationIds = jdbcTemplate.query(sql, (rs, rowNum) -> { return rs.getInt("reservation_id"); }, nameClient, from, to); List<ReservationDTO> reservations = new ArrayList<>(); for (int reservationId : reservationIds) { reservations.add(findById(reservationId)); } return reservations; } @Override public void updateById(int id, ReservationDTO reservationDTO) { // to do } @Override public void deleteById(int id) throws SQLException { String sql = "DELETE FROM reservation_product WHERE reservation_id=?"; String sql2 = "DELETE FROM reservation WHERE reservation_id=?"; jdbcTemplate.update(sql, id); if (jdbcTemplate.update(sql2, id) != 1) { throw new SQLException(); } } } <file_sep>package alekseev.market.service; import alekseev.market.dto.CategoryDTO; import alekseev.market.dto.CategoryWithProductDTO; import java.util.List; public interface CategoryService { int saveCategory(CategoryDTO category); CategoryWithProductDTO getCategory(int id); List<CategoryWithProductDTO> getAllCategories(); int updateCategory(int id, CategoryDTO category); int deleteCategory(int id); } <file_sep>package alekseev.market.controller; import alekseev.market.dto.ProductDTO; import alekseev.market.service.ProductService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import java.util.List; @RestController @RequestMapping("/api/products") public class ProductController { private final ProductService productService; @Autowired public ProductController(ProductService productService) { this.productService = productService; } @GetMapping("") public ResponseEntity<List<ProductDTO>> getProduct() { List<ProductDTO> products = productService.getAllProducts(); return new ResponseEntity<>(products, HttpStatus.OK); } @GetMapping("/{id}") public ResponseEntity<ProductDTO> getProduct(@PathVariable int id) { ProductDTO product = productService.getProduct(id); if (product == null) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } return new ResponseEntity<>(product, HttpStatus.OK); } @PostMapping() public ResponseEntity<ProductDTO> createProduct(@RequestBody ProductDTO productDTO) { if (productDTO.getCategories().isEmpty()) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } if (productService.saveProduct(productDTO) != 1) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } return new ResponseEntity<>(HttpStatus.CREATED); } @PutMapping("/{id}") public ResponseEntity<ProductDTO> updateProduct(@RequestBody ProductDTO productDTO, @PathVariable int id) { int status = productService.update(id, productDTO); if (status == 0) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } return new ResponseEntity<>(HttpStatus.OK); } @DeleteMapping("/{id}") public ResponseEntity<ProductDTO> deleteProduct(@PathVariable int id) { int status = productService.delete(id); if (status != 1) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } return new ResponseEntity<>(HttpStatus.OK); } }
1d58592f049ce70910d28208d6f8721d4497e243
[ "Java", "SQL" ]
19
Java
AlekseevSergey16/Market-Service
ce2f14394c1d6cd4bed7e1e88e5c10673d3a2f32
90063858404f49eb21298e4f9fb8b9c1e4d0cbe5
refs/heads/master
<file_sep>require 'bundler/setup' Bundler.require get '/' do erb :index end post '/user/properties/sort' do "Your property order is updated." end <file_sep>== About ChittyChittyBangBang is a jQuery and AJAX exercise. To run app locally: 1. Clone down 2. Bundle install 3. Type in command line: $ruby server.rb 4. Go to localhost:4567
930414696ef793a2d911e1ee6527d27f784a3ec5
[ "RDoc", "Ruby" ]
2
Ruby
periwinklepath/ChittyChittyBangBang
6e295221e43bf1e4a9661249a60a2a293e84863b
46682794429922b9928c4fd45817225d6f8a7585
refs/heads/main
<repo_name>ArturQuirino/super-calculadora<file_sep>/README.md # super-calculadora Projeto didático para UFV <file_sep>/src/super-calculadora/src/store/actions.js export const ADDICIONAR_NUMERO = 'ADDICIONAR_NUMERO'; export const addNumeroHistorico = (numero) => ({ type: ADDICIONAR_NUMERO, payload: numero, }); <file_sep>/src/super-calculadora/src/store/reducers.js import { ADDICIONAR_NUMERO } from './actions' const initialState = { historico: [ 0 ] } const calculadoraReducer = (state = initialState, action) => { switch (action.type) { case ADDICIONAR_NUMERO: const novoHistorico = state.historico; novoHistorico.push(action.payload); return { historico: novoHistorico } default: return state } } export default calculadoraReducer;<file_sep>/src/super-calculadora/src/components/header/header.jsx import React from 'react'; import './header.css'; import { Link } from 'react-router-dom'; const Header = (props) => { return ( <header className="header"> Esta é a super calculadora de {props.autor} <nav className="header__navigation-container"> <Link to="/" className="header__navigation-container__link"> Calculadora </Link> <Link to="/sobre" className="header__navigation-container__link"> Sobre </Link> </nav> </header> ); }; export default Header; <file_sep>/src/super-calculadora/src/components/calculadora/displayCalculo/displayCalculo.jsx import React from 'react'; const DisplayCalculo = (props) => { const { numero1, numero2, descricao, calculo } = props; return ( <div> {descricao} : {calculo(+numero1, +numero2)} </div> ); }; export default DisplayCalculo; <file_sep>/src/super-calculadora/src/components/sobre/sobre.jsx import React, { Component } from 'react'; import { connect } from 'react-redux'; class Sobre extends Component { render() { return ( <article> Esta é uma supercalculadora desenvovlida pelos alunos da UFV <div>{this.props.historico}</div> </article> ); } } const mapStateToProps = (state) => { return { historico: state.historico, }; }; export default connect(mapStateToProps)(Sobre); <file_sep>/src/super-calculadora-backend/routes/index.js const express = require('express'); const server = express(); const port = process.env.PORT || 5000; server.use(express.json()); const lista = []; server.get('/nome', (req, res) => { res.send({ nome: 'Andressa' }); }); server.get('/lista', (req, res) => { return res.json(lista); }); server.post('/lista', checkKeyExists, (req, res) => { const { name } = req.body; lista.push(name); return res.json(lista); }); server.put('/lista/:index', checkKeyExists, (req, res) => { const { index } = req.params; const { name } = req.body; lista[index] = name; return res.json(lista); }); server.delete('/lista/:index', (req, res) => { const { index } = req.params; lista.splice(index, 1); return res.send("Deletado"); }); function checkKeyExists(req, res, next) { if (!req.body.name) { return res.status(400).json({ error: 'O parâmetro name é necessário' }); } return next(); } server.listen(port);
42746e2f02c19f503a59e28b075cee50da9054e2
[ "Markdown", "JavaScript" ]
7
Markdown
ArturQuirino/super-calculadora
8539aa55d5bbee04549a5597b014e26dc8e55f6b
1510023ed47b3c25e6ea80c1f0fb1ca4003227e4
refs/heads/main
<file_sep>/* * Copyright 2021 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.androiddevchallenge import android.os.Bundle import android.os.CountDownTimer import androidx.activity.compose.setContent import androidx.appcompat.app.AppCompatActivity import androidx.compose.foundation.layout.Arrangement import androidx.compose.foundation.layout.Box import androidx.compose.foundation.layout.Column import androidx.compose.foundation.layout.Row import androidx.compose.foundation.layout.fillMaxHeight import androidx.compose.foundation.layout.fillMaxWidth import androidx.compose.foundation.layout.height import androidx.compose.foundation.layout.padding import androidx.compose.foundation.layout.size import androidx.compose.foundation.layout.width import androidx.compose.foundation.shape.RoundedCornerShape import androidx.compose.foundation.text.KeyboardOptions import androidx.compose.material.Card import androidx.compose.material.CircularProgressIndicator import androidx.compose.material.Icon import androidx.compose.material.IconButton import androidx.compose.material.MaterialTheme import androidx.compose.material.Surface import androidx.compose.material.Text import androidx.compose.material.TextField import androidx.compose.material.icons.Icons import androidx.compose.material.icons.filled.Check import androidx.compose.material.icons.filled.Pause import androidx.compose.material.icons.filled.PlayArrow import androidx.compose.material.icons.filled.Replay import androidx.compose.material.icons.filled.Settings import androidx.compose.runtime.Composable import androidx.compose.runtime.mutableStateOf import androidx.compose.runtime.remember import androidx.compose.ui.Alignment import androidx.compose.ui.Modifier import androidx.compose.ui.graphics.Color import androidx.compose.ui.text.TextStyle import androidx.compose.ui.text.font.FontFamily import androidx.compose.ui.text.font.FontWeight import androidx.compose.ui.text.input.KeyboardType import androidx.compose.ui.text.input.TextFieldValue import androidx.compose.ui.text.style.TextAlign import androidx.compose.ui.tooling.preview.Preview import androidx.compose.ui.unit.dp import com.example.androiddevchallenge.ui.theme.MyTheme import java.util.concurrent.TimeUnit class MainActivity : AppCompatActivity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setContent { MyTheme { MyApp() } } } } // Start building your app here! @Composable fun MyApp() { val totalTime: Long val inputMinute = remember { mutableStateOf(TextFieldValue()) } val inputSecond = remember { mutableStateOf(TextFieldValue()) } var minute: Long = TimeUnit.MINUTES.toMillis(0) var second: Long = TimeUnit.SECONDS.toMillis(0) if (inputMinute.value.text.isNotEmpty()) { minute = TimeUnit.MINUTES.toMillis(inputMinute.value.text.toLong()) } if (inputSecond.value.text.isNotEmpty()) { second = TimeUnit.SECONDS.toMillis(inputSecond.value.text.toLong()) } totalTime = minute + second val progress = remember { mutableStateOf(1F) } val timeLeft = remember { mutableStateOf(totalTime) } val startTimer = remember { mutableStateOf(totalTime) } val isTimerRunning = remember { mutableStateOf(false) } var countDownTimer: CountDownTimer? = object : CountDownTimer(startTimer.value, 10) { override fun onTick(millisUntilFinished: Long) { val progressPercent: Float = millisUntilFinished.toFloat() / totalTime.toFloat() progress.value = progressPercent timeLeft.value = millisUntilFinished } override fun onFinish() { cancel() isTimerRunning.value = false progress.value = 1F timeLeft.value = 0 } } Surface( color = MaterialTheme.colors.surface, modifier = Modifier .fillMaxWidth() .fillMaxHeight() ) { Column( modifier = Modifier.padding(24.dp), ) { Row( verticalAlignment = Alignment.CenterVertically ) { Text( text = "Countdown Timer", style = MaterialTheme.typography.h5, fontWeight = FontWeight.Bold, color = MaterialTheme.colors.primary, fontFamily = FontFamily.Serif, modifier = Modifier.weight(1f) ) IconButton(onClick = { }) { Icon( imageVector = Icons.Default.Settings, contentDescription = "Setting", modifier = Modifier.size(24.dp), tint = MaterialTheme.colors.primary ) } } Row(verticalAlignment = Alignment.CenterVertically) { TextField( value = inputMinute.value, onValueChange = { inputMinute.value = it }, keyboardOptions = KeyboardOptions( keyboardType = KeyboardType.Number, ), placeholder = { Text(text = "min") }, textStyle = TextStyle( fontFamily = FontFamily.SansSerif, color = Color.White ), modifier = Modifier .weight(1f) .padding(16.dp) ) TextField( value = inputSecond.value, onValueChange = { inputSecond.value = it }, keyboardOptions = KeyboardOptions( keyboardType = KeyboardType.Number, ), placeholder = { Text(text = "sec") }, textStyle = TextStyle( fontFamily = FontFamily.SansSerif, color = Color.White ), modifier = Modifier .weight(1f) .padding(16.dp) ) /*Set*/ Card( shape = RoundedCornerShape(24.dp), backgroundColor = Color.White, elevation = 4.dp, ) { IconButton( onClick = { countDownTimer?.cancel() isTimerRunning.value = false startTimer.value = minute + second timeLeft.value = minute + second progress.value = 1F countDownTimer = null countDownTimer = object : CountDownTimer(startTimer.value, 10) { override fun onTick(millisUntilFinished: Long) { val progressPercent: Float = millisUntilFinished.toFloat() / totalTime.toFloat() progress.value = progressPercent timeLeft.value = millisUntilFinished } override fun onFinish() { cancel() isTimerRunning.value = false progress.value = 1F timeLeft.value = 0 } } }, modifier = Modifier.padding(horizontal = 24.dp) ) { Icon( imageVector = Icons.Default.Check, contentDescription = "Back Icon", modifier = Modifier.size(36.dp), tint = MaterialTheme.colors.primary, ) } } } } /* Circular Progress Bar*/ CircularProgressBar(progressValue = progress.value, millis = timeLeft.value) /* Buttons */ Row( modifier = Modifier .padding(24.dp), verticalAlignment = Alignment.Bottom, horizontalArrangement = Arrangement.Center ) { /* Restart Button */ Card( shape = RoundedCornerShape(24.dp), backgroundColor = Color.White, elevation = 4.dp, modifier = Modifier.padding(horizontal = 24.dp) ) { IconButton( onClick = { countDownTimer?.cancel() isTimerRunning.value = false startTimer.value = totalTime timeLeft.value = totalTime progress.value = 1F countDownTimer = null countDownTimer = object : CountDownTimer(startTimer.value, 10) { override fun onTick(millisUntilFinished: Long) { val progressPercent: Float = millisUntilFinished.toFloat() / totalTime.toFloat() progress.value = progressPercent timeLeft.value = millisUntilFinished } override fun onFinish() { cancel() isTimerRunning.value = false progress.value = 1F timeLeft.value = 0 } } }, modifier = Modifier.padding(horizontal = 24.dp) ) { Icon( imageVector = Icons.Default.Replay, contentDescription = "Back Icon", modifier = Modifier.size(36.dp), tint = MaterialTheme.colors.primary, ) } } /* Start Button*/ Card( shape = RoundedCornerShape(24.dp), backgroundColor = Color.White, elevation = 4.dp, modifier = Modifier.padding(horizontal = 24.dp) ) { IconButton( onClick = { if (isTimerRunning.value) { countDownTimer?.cancel() startTimer.value = timeLeft.value isTimerRunning.value = false } else { countDownTimer?.start() isTimerRunning.value = true } }, modifier = Modifier.padding(horizontal = 24.dp) ) { Icon( imageVector = if (isTimerRunning.value) Icons.Default.Pause else Icons.Default.PlayArrow, contentDescription = "Back Icon", modifier = Modifier.size(36.dp), tint = MaterialTheme.colors.primary ) } } } } } @Composable fun CircularProgressBar( progressValue: Float, millis: Long ) { Box( modifier = Modifier.padding(16.dp), contentAlignment = Alignment.Center ) { CircularProgressIndicator( strokeWidth = 6.dp, color = MaterialTheme.colors.primary, progress = 1F, modifier = Modifier .height(300.dp) .width(300.dp) ) CircularProgressIndicator( strokeWidth = 6.dp, color = MaterialTheme.colors.primaryVariant, progress = progressValue, modifier = Modifier .height(300.dp) .width(300.dp) ) Row( modifier = Modifier .height(280.dp) .width(280.dp) .padding(16.dp), verticalAlignment = Alignment.CenterVertically, horizontalArrangement = Arrangement.Center ) { Text( text = timeFormat(millis = millis), style = MaterialTheme.typography.h3, fontWeight = FontWeight.Bold, modifier = Modifier.weight(1f), textAlign = TextAlign.Center, color = MaterialTheme.colors.primary, fontFamily = FontFamily.Serif ) } } } fun timeFormat(millis: Long): String { return String.format( "%02d:%02d:%02d", TimeUnit.MILLISECONDS.toMinutes(millis), TimeUnit.MILLISECONDS.toSeconds(millis) % TimeUnit.MINUTES.toSeconds(1), TimeUnit.MILLISECONDS.toMillis(millis) % TimeUnit.SECONDS.toMillis(1) / 10 ) } @Preview("Light Theme", widthDp = 360, heightDp = 640) @Composable fun LightPreview() { MyTheme { MyApp() } } @Preview("Dark Theme", widthDp = 360, heightDp = 640) @Composable fun DarkPreview() { MyTheme(darkTheme = true) { MyApp() } }
481cffd837ef255185fbba544ad6423ec1537bcf
[ "Kotlin" ]
1
Kotlin
mohd-naushaaad/CountdownTimer
2af5ac64b3216af2576c55ce21e058cdc9325878
051d7403dca4f06df0903d7651703141b6ce0f4e
refs/heads/main
<file_sep>package com.logicbig.example; public class LoggerLines { private String title = ""; private String fileName = ""; private String loggerName = ""; private String loggerLevel = ""; private String appenderRef = ""; public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getFileName() { return fileName; } public void setFileName(String fileName) { this.fileName = fileName; } public String getLoggerName() { return loggerName; } public void setLoggerName(String loggerName) { this.loggerName = loggerName; } public String getLoggerLevel() { return loggerLevel; } public void setLoggerLevel(String loggerLevel) { this.loggerLevel = loggerLevel; } public String getAppenderRef() { return appenderRef; } public void setAppenderRef(String appenderRef) { this.appenderRef = appenderRef; } @Override public String toString() { return "LoggerLines [title=" + title + ", fileName=" + fileName + ", loggerName=" + loggerName + ", loggerLevel=" + loggerLevel + ", appenderRef=" + appenderRef + "]"; } } <file_sep>package com.logicbig.example; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MyClass { private static Logger LOGGER = LoggerFactory.getLogger(MyClass.class); public static void writeSB() { try { // LOGGER = LoggerFactory.getLogger("Batch.EDMDocUpload"); List<String> inputLoggerList = Files.readAllLines(Paths.get("C:/temp/myfile.txt")); for (String line : inputLoggerList) { if (line == null || line.isEmpty()) { System.out.println("log4j2Logger.append(System.lineSeparator());"); } else { System.out.println("log4j2Logger.append(\"" + line + "\");"); } System.out.println("log4j2Logger.append(System.lineSeparator());"); } } catch (IOException e) { e.printStackTrace(); } } public static void checkDelimitterLines(){ StringBuilder log4j2Logger = new StringBuilder(); for (int i = 0; i < 6; i++) { log4j2Logger.append("sample text "+i); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("sample text "+i+1); log4j2Logger.append(System.lineSeparator()); } String content = log4j2Logger.toString(); System.out.println(content); String path = "C:\\temp\\out.txt"; try { Files.write( Paths.get(path), content.getBytes()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public static String getTemplate() { StringBuilder log4j2Logger = new StringBuilder(); log4j2Logger.append("appender.%s.type = RollingFile"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("appender.%s.name = %s"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("appender.%s.fileName = ${%s}"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("appender.%s.filePattern = ${%s}.%d{yyyy-MM-dd}"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("appender.%s.layout.type = PatternLayout"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("appender.%s.layout.pattern = %-10.30X{server} %-4.4X{user} %d{ISO8601} %p %m%n"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("appender.%s.policies.type = Policies"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("appender.%s.policies.time.type = TimeBasedTriggeringPolicy"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("appender.%s.policies.time.interval = 1"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("appender.%s.policies.time.modulate = true"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("appender.%s.strategy.type = DefaultRolloverStrategy"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("appender.%s.strategy.max = 15"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("logger.%s.name = Batch.CovgTransformation"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("logger.%s.level = ${%s.level}"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("logger.%s.additivity = ${additivity}"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("logger.%s.appenderRefs = %s,stdout"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("logger.%s.appenderRefs.level = ${%s.level}"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("logger.%s.appenderRef.%s.ref = %s"); log4j2Logger.append(System.lineSeparator()); log4j2Logger.append("logger.%s.appenderRef.stdout.ref = STDOUT"); log4j2Logger.append(System.lineSeparator()); return log4j2Logger.toString(); } public static void appendSyslogLog4j2() { LOGGER.info("mtd name = log4j1Tologg4j2"); String inputFileName = "C:/temp/log4j2.properties"; String outputFileName = "C:/temp/log4j-TEST.properties"; try { List<String> inputLoggerList = Files.readAllLines(Paths.get(inputFileName)); List<String> inputLoggerList2 = new ArrayList<>(); StringBuilder data = new StringBuilder(); for (String line : inputLoggerList) { if (line.contains(",stdout")) { line = line + ",syslog"; } if (line.contains(".appenderRef.stdout.ref = STDOUT")) { String syslogAppenderRef = line.replace(".appenderRef.stdout.ref = STDOUT", ".appenderRef.syslog.ref = syslog"); line = line + System.lineSeparator() + syslogAppenderRef; } inputLoggerList2.add(line); data.append(line); } File outputFile = new File(outputFileName); if (outputFile.createNewFile()) { LOGGER.info("mtd name = log4j1Tologg4j2 " + outputFile.getName() + " was created."); } else { outputFile.delete(); LOGGER.info("mtd name = log4j1Tologg4j2 " + outputFile.getName() + " was deleted."); boolean b = outputFile.createNewFile(); LOGGER.info("mtd name = log4j1Tologg4j2 " + outputFile.getName() + " was created. " + b); } // FileUtils.writeLines(new File("output.txt"), encoding, list); // writToFile(outputFileName,data.toString()); writToFile(outputFileName, inputLoggerList2); } catch (IOException e) { e.printStackTrace(); } } public static void writToFile(String fileName, String data) { try { Files.write(Paths.get(fileName), data.getBytes()); } catch (IOException e) { e.printStackTrace(); } } public static void writToFile(String fileName, List<String> list) { try { FileWriter writer = new FileWriter(fileName); for (String str : list) { writer.write(str + System.lineSeparator()); } writer.close(); } catch (IOException e) { e.printStackTrace(); } } public static void log4j1Tologg4j2() { LOGGER.info("mtd name = log4j1Tologg4j2"); String log4j2Logger = getTemplate(); // String inputFileName = "C:/temp/input.txt"; // String inputFileName = "C:/temp/input_minimal.txt"; String inputFileName = "C:/temp/inputs.txt"; try { List<String> inputLoggerList = Files.readAllLines(Paths.get(inputFileName)); int count = 1; List<LoggerLines> loggerLineList = new ArrayList<LoggerLines>(); LoggerLines loggerLine = null; for (String line : inputLoggerList) { if (line != null && !line.isEmpty()) { if (count == 1) { loggerLine = new LoggerLines(); } else if (count % 7 == 0) { loggerLineList.add(loggerLine); loggerLine = new LoggerLines(); } if (line.contains("######################")) { loggerLine.setTitle(line); } else if (line.contains("log4j.category.")) { String s = "log4j.category."; String loggerName = line.substring(s.length(), line.indexOf("=")); String loggerLevel = line.substring(line.indexOf("=") + 1, line.indexOf(",")); String appenderRef = line.substring(line.indexOf(",") + 1); loggerLine.setLoggerName(loggerName.trim()); loggerLine.setLoggerLevel(loggerLevel.trim()); loggerLine.setAppenderRef(appenderRef.trim()); } else if (line.contains("/ECOS_NAS/")) { String fileName = line.substring(line.indexOf("/ECOS_NAS/")); fileName = fileName.replace("${app.environment}", "${appEnv}"); fileName = fileName.replace("${jboss.jvm.name}", "${jbossJVMname}"); loggerLine.setFileName(fileName); } count++; } } System.out.println("loggerLines size = " + loggerLineList.size()); List<Integer> filters = new ArrayList<Integer>(Arrays.asList(5)); for (int i = 1; i <= 5; i++) { if (filters.contains(i)) { log4j1Tologg4j2Processor(i, loggerLineList, log4j2Logger); } // System.out.println("i = " + i + " filters = " + filters); } } catch (IOException e) { e.printStackTrace(); } } private static void log4j1Tologg4j2Processor(int option, List<LoggerLines> loggerLineList, String log4j2Logger) { StringBuilder sb = new StringBuilder(); String formattedString; switch (option) { case 1: for (LoggerLines loggerLines : loggerLineList) { sb.append(loggerLines.getTitle()); sb.append(System.lineSeparator()); formattedString = log4j2Logger.replace("%s", loggerLines.getAppenderRef()); formattedString = formattedString.replace("Batch.CovgTransformation", loggerLines.getLoggerName()); sb.append(formattedString); sb.append(System.lineSeparator()); } break; case 2: sb = new StringBuilder(); for (LoggerLines loggerLines : loggerLineList) { formattedString = String.format("property.%s.level = %s", loggerLines.getAppenderRef(), loggerLines.getLoggerLevel()); sb.append(formattedString); sb.append(System.lineSeparator()); } break; case 3: sb = new StringBuilder(); for (LoggerLines loggerLines : loggerLineList) { formattedString = String.format("property.%s = %s", loggerLines.getAppenderRef(), loggerLines.getFileName()); sb.append(formattedString); sb.append(System.lineSeparator()); } break; case 4: for (LoggerLines loggerLines : loggerLineList) { sb.append(","); sb.append(loggerLines.getAppenderRef()); } break; case 5: for (LoggerLines loggerLines : loggerLineList) { LOGGER = LoggerFactory.getLogger(loggerLines.getLoggerName()); String LoggerName = loggerLines.getLoggerName(); if (LOGGER.isInfoEnabled()) { LOGGER.info("Sample info message "+LoggerName); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Sample debug message "+LoggerName); } if (LOGGER.isErrorEnabled()) { LOGGER.error("Sample error message "+LoggerName); } if (LOGGER.isTraceEnabled()) { LOGGER.trace("Sample trace message "+LoggerName); } // System.out.println(loggerLines.toString()); } break; default: break; } System.out.println(sb.toString()); } }<file_sep>package com.logicbig.example; public class ExampleMain { public static void main(String[] args) { MyClass.log4j1Tologg4j2(); //MyClass.checkDelimitterLines(); //MyClass.writeSB(); //MyClass.appendSyslogLog4j2(); } }
810502a98aac9bec44a4c74ac2d532a300b645dd
[ "Java" ]
3
Java
Saravana1992-java/Log4j2_Slf4j
f6bd1b411c323be4251944286749c31a9462aac8
43aba7eca9da5d4696e6380b272d360df36e0cae
refs/heads/master
<file_sep>#-*- coding: utf-8 -*- import mysql.connector import database1 import json import time import datetime def messagelog(beg,end,group): conn = mysql.connector.connect( host=database1.host, port=database1.port, user=database1.user, passwd=database1.passwd, db=database1.db, charset="utf8", buffered = True ) jsonStr ='' try: cur2 = conn.cursor() sel="select min(ID) from groups where name='%s'"% group cur2.execute(sel) groupid=cur2.fetchone() if len(end)<13: end=end+' 23:59:59' sel1 = "select distinct message.content,message.Time,members.NickName,message.type,message.url from message,members where message.time>='%s' and message.time<='%s' and message.group_ID='%d' and message.member_ID=members.ID order by message.time desc"%(beg,end,int(groupid[0])) cur2.execute(sel1) results = cur2.fetchall() users = [] for r in results: user = {} user['content'] = r[0] user['Time'] = r[1].strftime("%Y-%m-%d %H:%M:%S") user['NickName'] = r[2] user['type'] = r[3] user['url'] = r[4] users.append(user) jsonStr = json.dumps(users) except Exception as e: pass finally: cur2.close() conn.close() return jsonStr<file_sep>/* Navicat MySQL Data Transfer Source Server : yiyi Source Server Version : 50638 Source Host : 172.16.31.10:3300 Source Database : wechat Target Server Type : MYSQL Target Server Version : 50638 File Encoding : 65001 Date: 2017-12-28 20:31:49 */ SET FOREIGN_KEY_CHECKS=0; -- ---------------------------- -- Table structure for badinfo -- ---------------------------- DROP TABLE IF EXISTS `badinfo`; CREATE TABLE `badinfo` ( `ID` int(10) unsigned NOT NULL AUTO_INCREMENT, `Content` varchar(255) DEFAULT NULL, PRIMARY KEY (`ID`) ) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for expression -- ---------------------------- DROP TABLE IF EXISTS `expression`; CREATE TABLE `expression` ( `ID` int(10) unsigned NOT NULL AUTO_INCREMENT, `Expression` varchar(255) DEFAULT NULL, PRIMARY KEY (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for groups -- ---------------------------- DROP TABLE IF EXISTS `groups`; CREATE TABLE `groups` ( `ID` int(10) unsigned NOT NULL AUTO_INCREMENT, `Name` varchar(255) DEFAULT NULL, `Remark` varchar(255) DEFAULT NULL, `Manager_ID` int(10) DEFAULT NULL, PRIMARY KEY (`ID`) ) ENGINE=InnoDB AUTO_INCREMENT=131 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for keywords -- ---------------------------- DROP TABLE IF EXISTS `keywords`; CREATE TABLE `keywords` ( `ID` int(10) unsigned NOT NULL AUTO_INCREMENT, `Words` varchar(255) DEFAULT NULL, `Reply_ID` int(10) DEFAULT NULL, PRIMARY KEY (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for manager -- ---------------------------- DROP TABLE IF EXISTS `manager`; CREATE TABLE `manager` ( `ID` int(10) unsigned NOT NULL AUTO_INCREMENT, `Name` varchar(255) DEFAULT NULL, `Member_ID` int(10) DEFAULT NULL, `Permissions` int(2) DEFAULT NULL, PRIMARY KEY (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for members -- ---------------------------- DROP TABLE IF EXISTS `members`; CREATE TABLE `members` ( `ID` int(10) unsigned NOT NULL AUTO_INCREMENT, `Gender` bit(1) DEFAULT NULL, `NickName` varchar(255) DEFAULT NULL, `Group_ID` int(10) DEFAULT NULL, `SpeakTimes` int(10) DEFAULT NULL, `JionTime` datetime DEFAULT NULL, `City` varchar(25) DEFAULT NULL, PRIMARY KEY (`ID`) ) ENGINE=InnoDB AUTO_INCREMENT=33 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for message -- ---------------------------- DROP TABLE IF EXISTS `message`; CREATE TABLE `message` ( `ID` int(10) unsigned NOT NULL AUTO_INCREMENT, `Content` text CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci, `url` text, `Time` datetime DEFAULT NULL, `Group_ID` int(10) DEFAULT NULL, `Member_ID` int(10) DEFAULT NULL, `Type` varchar(50) DEFAULT NULL, PRIMARY KEY (`ID`) ) ENGINE=InnoDB AUTO_INCREMENT=251 DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for reply -- ---------------------------- DROP TABLE IF EXISTS `reply`; CREATE TABLE `reply` ( `ID` int(10) unsigned NOT NULL AUTO_INCREMENT, `Message` text, PRIMARY KEY (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; <file_sep>import mysql.connector import database1 def readGname(name): conn = mysql.connector.connect( host=database1.host, port=database1.port, user=database1.user, passwd=database1.passwd, db=database1.db, charset="utf8", buffered = True ) groupnames=[] try: cur1 = conn.cursor() sel1 = "select groups.name from groups,manager,group2manager where manager.Name='%s' and group2manager.manager_ID=manager.ID and groups.ID=group2manager.group_ID"% name cur1.execute(sel1) results = cur1.fetchall() groupnames=[] for r in results: gname=r[0] groupnames.append(gname) conn.commit() except Exception as e: pass finally: cur1.close() conn.close() return groupnames<file_sep># host='172.16.17.32' # port=3300 # user='root' # passwd='!<PASSWORD>' # db='wechat' host='localhost' port=3306 user='root' passwd='<PASSWORD>' db='wechat' <file_sep>#-*- coding: utf-8 -*- import mysql.connector import database1 def addmessage(dic): conn = mysql.connector.connect( host=database1.host, port=database1.port, user=database1.user, passwd=database1.passwd, db=database1.db, charset="utf8mb4", buffered = True ) try: cur1 = conn.cursor() cur2 = conn.cursor() cur3 = conn.cursor() sel="select id from members where nickname='%s'" % dic['rename'] cur1.execute(sel) memberid =cur1.fetchone() sel1 = "select ID from groups where name='%s'" % dic['gname'] cur2.execute(sel1) groupid = cur2.fetchone() if dic['type']=='Sharing': url=dic['url'] else: url='' if groupid==None: pass else: if memberid == None: inse = "insert into members (nickname)values('%s')" % (dic['rename']) cur1.execute(inse) sel = "select id from members where nickname='%s'" % dic['rename'] cur1.execute(sel) memberid = cur1.fetchone() inser="insert into group2member (group_ID,member_ID)values('%s','%s')" % (groupid[0],memberid[0]) cur1.execute(inser) # str="insert into message (Content,group_id,member_id,time,Type,url) values('%s','%d','%d','%s','%s','%s')"%(dic['info'], int(groupid[0]) ,int(memberid[0]), dic['time'],dic['type'],url) # cur3.execute(str) print(dic['info'], int(groupid[0]), int(memberid[0]), dic['time'], dic['type'], url) str = "insert into message (Content,group_id,member_id,time,Type,url) values(%s,%s,%s,%s,%s,%s)" cur3.execute(str, (dic['info'], groupid[0], memberid[0], dic['time'], dic['type'], url)) conn.commit() except Exception as e: print(e) pass finally: cur1.close() cur2.close() cur3.close() conn.close()<file_sep>import mysql.connector import database1 def recordGname(gname,manager): conn = mysql.connector.connect( host=database1.host, port=database1.port, user=database1.user, passwd=database1.passwd, db=database1.db, charset="utf8", buffered=True ) try: cur2 = conn.cursor() cur1 = conn.cursor() sel = "select ID from members where NickName='%s'"% manager cur1.execute(sel) managerID=cur1.fetchone() if managerID==None: inse="insert into members (nickname) values('%s')"% manager cur2.execute(inse) cur1.execute(sel) managerID = cur1.fetchone() sel = "select ID from manager where Name='%s'" % manager cur1.execute(sel) manID = cur1.fetchone() if manID == None: inse = "insert into manager (name,member_ID) values('%s','%s')" % (manager,int(managerID[0])) cur2.execute(inse) cur1.execute(sel) manID = cur1.fetchone() sel1 = "delete from group2manager where manager_ID='%s'"% int(manID[0]) cur1.execute(sel1) for name in gname: sel = "select ID from groups where Name='%s'" % name cur1.execute(sel) ID = cur1.fetchone() if ID == None: inse = "insert into groups (name) values('%s')" % name cur2.execute(inse) cur1.execute(sel) ID = cur1.fetchone() ins="insert into group2manager (group_ID,manager_ID) values('%s','%s')" % (int(ID[0]),int(manID[0])) cur1.execute(ins) conn.commit() except Exception as e: print(e) pass finally: cur1.close() conn.close() <file_sep>var groupfiles=[]; var base64group=[]; $(function(){ $(document).unbind('click').bind('click',function(){ $('.emoijbox').css('display','none'); $('#emoijdivgroup').css('display','none'); }); $("#emoij").unbind("click").bind("click",function(e) { e.stopPropagation(); $.ajax({ type: "post", url: "/emoijtitle", dataType: "json", async:false, success: function (data) { $("#titleul").empty(); var list=data.titlelist; var content=''; for(var i=0;i<list.length;i++) { if(list[i]=="guanfang") { content=content+'<li id="'+list[i]+'" onclick="changetitle(this,event)"><span>官方表情</span></li>'; break; } } for(var i=0;i<list.length;i++) { if(list[i]=="shoucang") { content=content+'<li id="'+list[i]+'" onclick="changetitle(this,event)"><span>收藏表情</span></li>'; break; } } for(var i=0;i<list.length;i++) { if(list[i]!="shoucang"&& list[i]!="guanfang") { content=content+'<li id="'+list[i]+'" onclick="changetitle(this,event)"><span>'+list[i]+'</span></li>'; } } $("#titleul").append(content); } }); $(".emoijbox").show(); var width=0; var ligroup=$("#titleul").find("li"); for(var i=0;i<ligroup.length;i++) { width=width+$(ligroup[i]).width(); } $("#emoijtitle2").width(width+100); $("#titleul li:eq(0)").addClass("liactive"); $.ajax({ type: "post", url: "/emoij", dataType: "json", success: function (data) { $("#emoijul").empty(); var list=data.list; var content=""; for(var i=0;i<list.length;i++) { content=content+'<li><img onclick="sendguanfangemoij(this,event)" src="'+list[i]+'"/></li>'; } $("#emoijul").append(content); } }); }) $("#emoijgroup").unbind("click").bind("click",function(e) { e.stopPropagation(); $.ajax({ type: "post", url: "/emoijtitle", dataType: "json", async:false, success: function (data) { $("#titleulgroup").empty(); var list=data.titlelist; var content=''; for(var i=0;i<list.length;i++) { if(list[i]=="guanfang") { content=content+'<li id="'+list[i]+'" onclick="changetitlegroup(this,event)"><span>官方表情</span></li>'; break; } } for(var i=0;i<list.length;i++) { if(list[i]=="shoucang") { content=content+'<li id="'+list[i]+'" onclick="changetitlegroup(this,event)"><span>收藏表情</span></li>'; break; } } for(var i=0;i<list.length;i++) { if(list[i]!="shoucang"&& list[i]!="guanfang") { content=content+'<li id="'+list[i]+'" onclick="changetitlegroup(this,event)"><span>'+list[i]+'</span></li>'; } } $("#titleulgroup").append(content); } }); $("#emoijdivgroup").show(); var width=0; var ligroup=$("#titleulgroup").find("li"); for(var i=0;i<ligroup.length;i++) { width=width+$(ligroup[i]).width(); } $("#emoijtitle2group").width(width+100); $("#titleulgroup li:eq(0)").addClass("liactive"); $.ajax({ type: "post", url: "/emoij", dataType: "json", success: function (data) { $("#emoijulgroup").empty(); var list=data.list; var content=""; for(var i=0;i<list.length;i++) { content=content+'<li><img onclick="sendguanfangemoij(this,event)" src="'+list[i]+'"/></li>'; } $("#emoijulgroup").append(content); } }); }) $("#close").unbind("click").bind("click",function(){ $(".emoijbox").hide(); }) $("#closegroup").unbind("click").bind("click",function(){ $("#emoijdivgroup").hide(); }) $("#importemoij").unbind("click").bind("click",function(e){ $("#importemoijfile").click(); e.stopPropagation(); }) $("#emoijimportclose").unbind("click").bind("click",function(){ $("#emoijimport").hide(); }) $("#sendinport").unbind("click").bind("click",function(){ var value=$("#title").val(); if(value=="") { alert("请输入表情包的名称"); $("#title").css("background-color","yellow"); return; } if(groupfiles.length==0) { alert("没有表情需要导入"); return; } var imgdata = new FormData(); for(var i=0;i<groupfiles.length;i++) { imgdata.append("importlist"+i,groupfiles[i]); } imgdata.append("title",$("#title").val()); imgdata.append("length",groupfiles.length); $.ajax({ type: "post", url: "/impemoij", data: imgdata, cache: false, processData: false, contentType: false, success: function (data) { alert("导入成功"); $("#emoijimport").css("display",'none'); } }); }) $("#title").bind("change",function(e){ $.ajax({ type: "post", url: "/emoijtitle", dataType: "json", async:false, success: function (data) { var list=data.titlelist; for(var i=0;i<list.length;i++ ) { if( $("#title").val()==list[i]) { $("#title").css("background-color","yellow"); return; } } $("#title").css("background-color","white"); } }); }) }) function changetitle(evt,event) { event.stopPropagation(); var $title=$(evt); if(!$title.hasClass("liactive")) { $("#titleul li.liactive").removeClass("liactive"); var id=$title.attr("id"); $title.addClass("liactive"); $.ajax({ type: "post", url: "/emoijone", data: {id:id}, dataType: "json", success: function (data) { $("#emoijul").empty(); var list=data.list; var content=""; if(id=="guanfang") { for(var i=0;i<list.length;i++) { content=content+'<li><img onclick="sendguanfangemoij(this,event)" src="'+list[i]+'"/></li>'; } }else { for(var i=0;i<list.length;i++) { content=content+'<li><img onclick="sendemoij(this,event)" src="'+list[i]+'"/></li>'; } } $("#emoijul").append(content) } }); } } function changetitlegroup(evt,event) { event.stopPropagation(); var $title=$(evt); if(!$title.hasClass("liactive")) { $("#titleulgroup li.liactive").removeClass("liactive"); var id=$title.attr("id"); $title.addClass("liactive"); $.ajax({ type: "post", url: "/emoijone", data: {id:id}, dataType: "json", success: function (data) { $("#emoijulgroup").empty(); var list=data.list; var content=""; if(id=="guanfang") { for(var i=0;i<list.length;i++) { content=content+'<li><img onclick="sendguanfangemoijgroup(this,event)" src="'+list[i]+'"/></li>'; } }else { for(var i=0;i<list.length;i++) { content=content+'<li><img onclick="sendemoijgroup(this,event)" src="'+list[i]+'"/></li>'; } } $("#emoijulgroup").append(content) } }); } } function handleFiles(e) { groupfiles=e.target.files; base64group=[]; $("#emoijimportul").empty(); for(var i=0;i<groupfiles.length;i++) { var reader = new FileReader(); reader.onload = (function (file) { return function (e) { $("#emoijimportul").append('<li><img src="'+this.result+'"/><i onclick="deleteli(this)" class="fa fa-window-close closeli"></i></li>') }; })(groupfiles[i]); reader.readAsDataURL(groupfiles[i]); } $("#emoijimport").css('display','block'); } function deleteli(evt) { var $ele=$(evt).parent(); var index = $ele.index(); groupfiles1=[]; for(var i=0;i<index;i++) { groupfiles1[i]=groupfiles[i]; } for(var i=index;i<groupfiles.length-1;i++) { groupfiles1[i]=groupfiles[i+1]; } groupfiles=groupfiles1; $ele.remove(); } function sendemoij(evt,event) { event.stopPropagation(); var $emoij=$(evt); var src=$emoij.attr("src"); var groups = new Array(); var groupnames = new Array(); var count = 0; var sendid; $("#groups-ul").find("li").each(function(index, e){ if ($(this).hasClass("active")) { var aitecount = parseInt($(this).attr("data-aite")); var aites = ""; for (var i = 0; i < aitecount; i++) { aites += "@"; } sendid = $(this).find(".group-name").attr("data-content"); groups[count] = aites + $(this).find(".group-name").attr("data-content"); groupnames[count++] = $(this).find(".group-name").attr("title"); } return count; }); $.ajax({ type: "post", url: "/sendemoij", data:{ groups:groups, message:src }, dataType: "json", success: function (data) { var today = new Date(); var sendtime = today.toLocaleTimeString(); var pic = $("#mypic").attr("src"); var filepath = data.success.split("/"); var length = filepath.length; var message ='<img src="'+ data.success +'" class="emoji-pic" onclick="ShowOptions(this)"><div class="picture-func"><i class="fa fa-search-plus" title="查看" onclick="ShowPic(this)"></i><i class="fa fa-plus" title="添加表情"></i></div>'; var li = '<li class="message"><img class="pic-right" src="'+ pic +'"/>' + '<span class="message-box-right"><span class="message-user">' + '<p class="NickName-right"><span>'+ sendtime +'</span><span> 我</span></p>' + '</span><span class="message-content"><i class="angle-right"></i>' + '<span class="text-right">'+ message +'</span></span></span></li>'; $("#" + sendid).append(li); } }); } function sendguanfangemoij(evt,event) { event.stopPropagation(); var $emoij=$(evt); var src=$emoij.attr("src"); var value=src.split("/"); var value1=value[value.length-1]; var message=""; for(var key in face){ if(face[key]==value1){ message=key; break; } } // var groups = new Array(); // var groupnames = new Array(); // var count = 0; // var sendid; // $("#groups-ul").find("li").each(function(index, e){ // if ($(this).hasClass("active")) { // var aitecount = parseInt($(this).attr("data-aite")); // var aites = ""; // for (var i = 0; i < aitecount; i++) { // aites += "@"; // } // sendid = $(this).find(".group-name").attr("data-content"); // groups[count] = aites + $(this).find(".group-name").attr("data-content"); // groupnames[count++] = $(this).find(".group-name").attr("title"); // } // return count; // }); $(evt).parents(".emoijbox").prev().find("textarea").val($(evt).parents(".emoijbox").prev().find("textarea").val() + message); /*$.ajax({ type: "post", url: "/sendguangfangemoij", data:{ message:message, src:src, groups:groups }, dataType: "json", success: function (data) { var today = new Date(); var sendtime = today.toLocaleTimeString(); var pic = $("#mypic").attr("src"); var filepath = data.success.split("/"); var length = filepath.length; var message ='<img src="'+ data.success +'" class="emoji-pic" onclick="ShowOptions(this)"><div class="picture-func"><i class="fa fa-search-plus" title="查看" onclick="ShowPic(this)"></i><i class="fa fa-plus" title="添加表情"></i></div>'; var li = '<li class="message"><img class="pic-right" src="'+ pic +'"/>' + '<span class="message-box-right"><span class="message-user">' + '<p class="NickName-right"><span>'+ sendtime +'</span><span> 我</span></p>' + '</span><span class="message-content"><i class="angle-right"></i>' + '<span class="text-right">'+ message +'</span></span></span></li>'; $("#" + sendid).append(li); } });*/ } function sendemoijgroup(evt,event) { event.stopPropagation(); var $emoij=$(evt); var src=$emoij.attr("src"); var groups = new Array(); var groupnames = new Array(); var count = 0; $("#multigroups-ul").find("li").each(function(index, e){ if ($(this).find("i").hasClass("fa-check-square")) { var aitecount = parseInt($(this).attr("data-aite")); var aites = ""; for (var i = 0; i < aitecount; i++) { aites += "@"; } sendid = $(this).find(".group-name").attr("data-content"); groups[count] = aites + $(this).find(".group-name").attr("data-content"); groupnames[count++] = $(this).find(".group-name").attr("title"); } return count; }); $.ajax({ type: "post", url: "/sendemoij", data:{ groups:groups, message:src }, dataType: "json", success: function (data) { var today = new Date(); var sendtime = today.toLocaleTimeString(); var pic = $("#mypic").attr("src"); var filepath = data.success.split("/"); var length = filepath.length; var message ='<img src="'+ data.success +'" class="emoji-pic" onclick="ShowOptions(this)"><div class="picture-func"><i class="fa fa-search-plus" title="查看" onclick="ShowPic(this)"></i><i class="fa fa-plus" title="添加表情"></i></div>'; var li = '<li class="message"><img class="pic-right" src="'+ pic +'"/>' + '<span class="message-box-right"><span class="message-user">' + '<p class="NickName-right"><span>'+ sendtime +'</span><span> 我</span></p>' + '</span><span class="message-content"><i class="angle-right"></i>' + '<span class="text-right">'+ message +'</span></span></span></li>'; $("#" + sendid).append(li); alert("发送成功!"); } }); } function sendguanfangemoijgroup(evt,event) { event.stopPropagation(); var $emoij=$(evt); var src=$emoij.attr("src"); var value=src.split("/"); var value1=value[value.length-1]; var message=""; for(var key in face){ if(face[key]==value1){ message=key; break; } } // var groups = new Array(); // var groupnames = new Array(); // var count = 0; // var sendid; // $("#groups-ul").find("li").each(function(index, e){ // if ($(this).hasClass("active")) { // var aitecount = parseInt($(this).attr("data-aite")); // var aites = ""; // for (var i = 0; i < aitecount; i++) { // aites += "@"; // } // sendid = $(this).find(".group-name").attr("data-content"); // groups[count] = aites + $(this).find(".group-name").attr("data-content"); // groupnames[count++] = $(this).find(".group-name").attr("title"); // } // return count; // }); $("#multimessage-text").val( $("#multimessage-text").val()+message); /*$.ajax({ type: "post", url: "/sendguangfangemoij", data:{ message:message, src:src, groups:groups }, dataType: "json", success: function (data) { var today = new Date(); var sendtime = today.toLocaleTimeString(); var pic = $("#mypic").attr("src"); var filepath = data.success.split("/"); var length = filepath.length; var message ='<img src="'+ data.success +'" class="emoji-pic" onclick="ShowOptions(this)"><div class="picture-func"><i class="fa fa-search-plus" title="查看" onclick="ShowPic(this)"></i><i class="fa fa-plus" title="添加表情"></i></div>'; var li = '<li class="message"><img class="pic-right" src="'+ pic +'"/>' + '<span class="message-box-right"><span class="message-user">' + '<p class="NickName-right"><span>'+ sendtime +'</span><span> 我</span></p>' + '</span><span class="message-content"><i class="angle-right"></i>' + '<span class="text-right">'+ message +'</span></span></span></li>'; $("#" + sendid).append(li); } });*/ }<file_sep>import mysql.connector import database1 import datetime import json import time def getactivite(gname): conn = mysql.connector.connect( host=database1.host, port=database1.port, user=database1.user, passwd=database1.passwd, db=database1.db, charset="utf8", buffered = True ) activite3 = [] try: cur1 = conn.cursor() list1 = [] list2 = [] list3 = [] sel1="select distinct member_ID,content,time,group_ID from message,groups where group_ID=groups.ID and groups.name='%s'" % gname cur1.execute(sel1) results = cur1.fetchall() for r in results: list1.append(r[0]) l1 = {}.fromkeys(list1).keys() DayAgo = (datetime.datetime.now() - datetime.timedelta(days=7))#minutes=10 DayAgo = DayAgo.strftime("%Y-%m-%d %H:%M:%S") sel1 = "select distinct member_ID,content,time,group_ID from message,groups where group_ID=groups.ID and groups.name='%s' and message.Time >= '%s'" % (gname,DayAgo) cur1.execute(sel1) results = cur1.fetchall() for r in results: list2.append(r[0]) l2 = {}.fromkeys(list2).keys() DayAgo = (datetime.datetime.now() - datetime.timedelta(days=30))#minutes=15 DayAgo = DayAgo.strftime("%Y-%m-%d %H:%M:%S") sel1 = "select distinct member_ID,content,time,group_ID from message,groups where group_ID=groups.ID and groups.name='%s' and message.Time >= '%s'" % ( gname, DayAgo) cur1.execute(sel1) results = cur1.fetchall() for r in results: list3.append(r[0]) l3 = {}.fromkeys(list3).keys() for l in l1: count1 = list1.count(l) count2 = list2.count(l) count3 = list3.count(l) sel11 = "select NickName from members where ID='%s'" % l cur1.execute(sel11) name = cur1.fetchone() item1 = {} item1['name'] = name[0] item1['allcount'] = count1 item1['monthcount'] = count3 item1['weekcount'] = count2 activite3.append(item1) conn.commit() except Exception as e: pass finally: cur1.close() conn.close() return activite3<file_sep># coding=utf8 from distutils.core import setup import py2exe setup(console=['D:\python3\WechatManager\WechatManager.py\WechatManager.py']) <file_sep>function getBasicInfo(url){ var UserInfo; $.ajax({ type: "post", url: url, async: true, dataType: "json", success: function (data) { UserInfo = data.user; userpho = data.userpic; $("#mypic").attr("src","data:image/jpg;base64," + userpho); MessageSync(UserInfo.NickName); } }); }<file_sep>#-*- coding: utf-8 -*- import mysql.connector import database1 import json def SelectForGroup(): conn = mysql.connector.connect( host=database1.host, port=database1.port, user=database1.user, passwd=database1.passwd, db=database1.db, charset="utf8" ) cur2 = conn.cursor() sel1 = "select id,Name from groups" cur2.execute(sel1) results = cur2.fetchall() users = [] for r in results: user = {} user['id'] = r[0] user['name'] = r[1] users.append(user) jsonStr = json.dumps(users) cur2.close() conn.close() return jsonStr<file_sep>#! /usr/bin/python #-*- coding: utf-8 -*- #coding=utf8 import itchat import time from itchat.content import * import base64 import os import itchat.config import types import random import re from addmessage import * from readGname import * from badinfo import * picDir = '/Users/apple/Documents/mavenProject/WechatManager/static/qr/QR.png' class ChatRun(object): def __init__(self,socketio, *args, **kw): self.__friends = [] self.socketio=socketio # 获取自己 self.__mySelf = {} # 获取所有群 self.__groups = [] # @回复内容 self.__atContent = '' # 关键字回复字典 self.__keyWordReponse = {} # 关键字广告检测 self.__keywordAdd=[] # 待管理的群 self.__needGroups = [] self.__gid = [] # 二维码 self.__fileQr = None self.__QR = '' self.__QrTrue = False # 登录成功 self.__successLogin = False # 收到消息数量 self.__newMsgCount = 0 # 新消息缓存 self.__newMsgList = [] def run(self): itchat.login(self.socketio) #itchat.auto_login(enableCmdQR=True) self.__friends = itchat.get_friends(update=True)[0:] #获取自己 self.__mySelf = self.__friends[0] # 获取所有群 self.__groups = itchat.get_chatrooms() # @回复内容 self.__atContent = '' # 关键字回复字典 self.__keyWordReponse = {} # 待管理的群 self.setNeedGroupByName(readGname(self.__mySelf['NickName'])) # 广告关键字 self.setAddKey() #gid self.updateGid() # for gs in self.__groups: # print gs['NickName'] # print itchat.search_chatrooms(u'项目测试') # 群处理 # 文本 @itchat.msg_register([TEXT, MAP, CARD, NOTE, SHARING], isGroupChat=True) def reply_group(msg): memberList = itchat.update_chatroom(msg['FromUserName'], detailedMember=True) realNickName="" #print("memberList: ", memberList.get('MemberList',[])) for member in memberList.get('MemberList',[]): if msg['ActualUserName'] == member.get('UserName'): realNickName = member.get('NickName') break #print("realName:", realNickName) if self.getmySelfID() == msg['FromUserName']: if msg['Type'] == 'Sharing': self.sendMsg(msg['Text'], self.getmySelfName(), self.getGroupNameById(msg['ToUserName']), time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg['CreateTime'])), self.getmySelfID(), msg['ToUserName'], msg['Type'], url = msg['Url'],rename=self.getmySelfName()) else: self.sendMsg(msg['Text'], self.getmySelfName(), self.getGroupNameById(msg['ToUserName']),time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(msg['CreateTime'])), msg['FromUserName'],msg['ToUserName'],msg['Type'],rename=self.getmySelfName()) self.updateGid() gid = self.__gid #print 'need:',self.__needGroups, 'gid:', len(gid) # print gid for gs in gid: if msg['FromUserName'] in gs: text = msg['Text'] if msg['Type'] is "Note": who = '' invite1 = re.compile(r'^\w+ invited (\S*) to the group chat$') m = invite1.match(text) invite2 = re.compile(r'^"?([^"]*)"? joined the group chat via the QR Code shared by "?\S*"?.$') m2 = invite2.match(text) invite3 = re.compile(u'^"?\S*"?邀请"?([^"]*)"?加入了群聊 *$') m3 = invite3.match(text) invite4 = re.compile(u'^"?([^"]*)"?通过扫描"?\S*"?分享的二维码加入群聊( )*( 撤销)?$') m4 = invite4.match(text) if m: itchat.send(u"欢迎" + m.group(1) + u"加入本群 @" + m.group(1), msg['FromUserName']) elif m2: itchat.send(u"欢迎" + m2.group(1) + u"加入本群 @" + m2.group(1), msg['FromUserName']) elif m3: itchat.send(u"欢迎" + m3.group(1) + u"加入本群 @" + m3.group(1), msg['FromUserName']) elif m4: itchat.send(u"欢迎" + m4.group(1) + u"加入本群 @" + m4.group(1), msg['FromUserName']) flag = False if not self.getmySelfID() == msg['FromUserName']: infomation = msg['Text'] phone1 = re.compile( r'([\S\s])*((\+86)|(86))?([^\d]{0,5})?(\d[^\d]{0,5}){11}([^\d]|\b)([\S\s])*') phone2 = re.compile(r'([\S\s])*(\d[^\d]{0,5}){8,10}([^\d]|\b)([\S\s])*') link = re.compile( r'([\S\s])*((http|ftp|https)://)(([a-zA-Z0-9\._-]+\.[a-zA-Z]{2,6})|([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}))(:[0-9]{1,4})*(/[a-zA-Z0-9\&%_\./-~-]*)?') if msg['Type'] == 'Card': flag = True self.sendMsg(infomation.get('NickName'), msg['ActualNickName'], gs[msg['FromUserName']], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg['CreateTime'])), msg['ActualUserName'], msg['FromUserName'], msg['Type'], rename=realNickName, addType='card') elif link.match(infomation): flag=True self.sendMsg(infomation, msg['ActualNickName'], gs[msg['FromUserName']], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg['CreateTime'])), msg['ActualUserName'], msg['FromUserName'], msg['Type'], rename=realNickName,addType='link') elif phone1.match(infomation) or phone2.match(infomation): flag = True self.sendMsg(infomation, msg['ActualNickName'], gs[msg['FromUserName']], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg['CreateTime'])), msg['ActualUserName'], msg['FromUserName'], msg['Type'], rename=realNickName, addType='phone') elif msg['Type'] == 'Sharing': flag = True if infomation == '': infomation = u'不支持预览,请在手机上查看。' self.sendMsg(infomation, msg['ActualNickName'], self.getGroupNameById(msg['FromUserName']), time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg['CreateTime'])), msg['ActualUserName'], msg['FromUserName'], msg['Type'], url=msg['Url'], rename=realNickName, addType='sharing') else: for key in self.__keywordAdd: if infomation.find(key) > -1: flag=True self.sendMsg(infomation, msg['ActualNickName'], gs[msg['FromUserName']], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg['CreateTime'])), msg['ActualUserName'], msg['FromUserName'], msg['Type'], rename=realNickName, addType='keyword', addkeyword=key) break; if not flag: text = msg['Text'] self.sendMsg(text,msg['ActualNickName'],gs[msg['FromUserName']], time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(msg['CreateTime'])), msg['ActualUserName'],msg['FromUserName'],msg['Type'],rename=realNickName) # print msg, 'type:', type(msg),'\n' if msg['isAt']: gName = gs[msg['FromUserName']] self.saveText(text, gName, msg['ActualNickName']) for key in self.__keyWordReponse: if text.find(key) > -1: time.sleep(3) value = self.__keyWordReponse[key] if not value == 'auto': itchat.send('%s @%s ' % (self.__keyWordReponse[key], msg['ActualNickName']), msg['FromUserName']) self.sendMsg(self.__keyWordReponse[key], self.__mySelf['NickName'], gs[msg['FromUserName']], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg['CreateTime'])), self.__mySelf['UserName'], msg['FromUserName'], msg['Type'], rename=self.__mySelf['NickName']) else: itchat.send(u'收到消息: %s @%s ' % (text, msg['ActualNickName']), msg['FromUserName']) self.sendMsg(text, self.__mySelf['NickName'], gs[msg['FromUserName']], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg['CreateTime'])), self.__mySelf['UserName'], msg['FromUserName'], msg['Type'], rename=self.__mySelf['NickName']) break elif msg['isAt']: time.sleep(3) itchat.send(self.__atContent, msg['FromUserName']) break break @itchat.msg_register([PICTURE,RECORDING,ATTACHMENT,VIDEO], isGroupChat=True) def reply_files(msg): memberList = itchat.update_chatroom(msg['FromUserName'], detailedMember=True) realNickName = "" for member in memberList.get('MemberList', []): if msg['ActualUserName'] == member.get('UserName'): realNickName = member.get('NickName') break basepath = os.path.dirname(__file__) ran = (int)(time.time()) + random.randint(0,(int)(time.time() / 10000)) upload_path = os.path.join(basepath, 'static/picture',(str)(ran)+"-"+msg['FileName']) print(upload_path) msg['Text'](upload_path) fsize = os.path.getsize(upload_path) if self.getmySelfID() == msg['FromUserName']: if not fsize==0: self.sendMsg('static/picture/'+(str)(ran) + "-" + msg['FileName'], self.getmySelfName(), self.getGroupNameById(msg['ToUserName']),time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(msg['CreateTime'])), msg['FromUserName'],msg['ToUserName'],msg['Type'],rename=self.getmySelfName()) else: self.sendMsg('static/img/nonepic.jpg', self.getmySelfName(), self.getGroupNameById(msg['ToUserName']), time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg['CreateTime'])), msg['FromUserName'], msg['ToUserName'], msg['Type'], rename=self.getmySelfName()) self.updateGid() gid = self.__gid for gs in gid: if msg['FromUserName'] in gs: text = 'static/picture/'+(str)(ran) + "-" + msg['FileName'] if msg['Type'] == 'Video': self.sendMsg(text, msg['ActualNickName'], gs[msg['FromUserName']], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg['CreateTime'])), msg['ActualUserName'], msg['FromUserName'], msg['Type'], rename=realNickName, addType='Viedo') else: if not fsize == 0: self.sendMsg(text, msg['ActualNickName'], gs[msg['FromUserName']], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg['CreateTime'])), msg['ActualUserName'], msg['FromUserName'],msg['Type'],rename=realNickName) else: self.sendMsg('static/img/nonepic.jpg', msg['ActualNickName'], gs[msg['FromUserName']], time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg['CreateTime'])), msg['ActualUserName'], msg['FromUserName'],msg['Type'],rename=realNickName) break #其他处理 self.__successLogin = True self.socketio.emit('loginsuccess', {'nickname': self.__mySelf['NickName'], 'pic': bytes.decode(base64.b64encode(self.getMypic()))},json=True, namespace='/login') itchat.run() # 保存群中@自己的文本 # @param text 待保存的文本 # @param group 来自的群的名字 # @param user 发送者的NickName def saveText(self, text, group, user): now = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) str = u'%s 收到来自于群%s中用户%s的@消息: %s' % (now[5:], group, user, text) # 设置关键回复字字典 def setKeyWordResponse(self, map): self.__keyWordReponse = map # 新增关键回复字及其内容 def addKeyWordResponse(self, map): key = map['key'] val = map['val'] self.__keyWordReponse[key] = val # 删除指定关键回复字 def deleteKeyWordResponse(self, *args): for k in args: if k in self.__keyWordReponse: self.__keyWordReponse.pop(k) # 获取关键字字典 def getKeyWorkMap(self): return self.__keyWordReponse # 获取自身全部信息 def getMyself(self): return self.__mySelf # 获取自身ID def getmySelfID(self): return self.__mySelf['UserName'] # 获取自身NickName def getmySelfName(self): return self.__mySelf['NickName'] # 获取自身头像 def getMypic(self): return itchat.get_head_img(userName=self.__mySelf['UserName']) #获取对话头像 def getheadpic(self,ID,GID): return itchat.get_head_img(userName=ID,chatroomUserName=GID) #获取群头像 def getgrouppic(self,gid): list2 = [] # img = itchat.get_head_img(x['UserName']) img = itchat.get_head_img(chatroomUserName=gid) try: # buffer = BytesIO(img) # buffer2 = BytesIO() # image = Image.open(buffer) # image.save(buffer2, format="JPEG") img_str = base64.b64encode(img) img_str=bytes.decode(img_str) except: img_str="" return img_str # 跟新需要管理的群的GroupId def updateGid(self): self.__gid = [] self.updateGroup() for group in self.__groups: if group['UserName'] in self.__needGroups: self.__gid.append({group['UserName']: group['NickName']}) def updateGroup(self): self.__groups = [] self.__groups = itchat.get_chatrooms(update=True) # 根据NickName获取群ID def getGroupIdByName(self,name): for group in self.__groups: if name == group['NickName']: return group['UserName'] # 根据id获取群名 def getGroupNameById(self,id): self.updateGroup() for group in self.__groups: if id == group['UserName']: return group['NickName'] # 群发消息 # @param info 消息 # @param group 需要发送的群,默认为None,如果为None则向所有管理的群发送 def group_information(self, info, group = None): if group == None: list = self.__gid group = [] for l in list: for x in l: group.append(x) if len(group) == 1: itchat.send(info, group[0]) else: for g in group: ran = random.randint(2,7) time.sleep(3 + ran) itchat.send(info, g) for gi in group: gname=self.getGroupNameById(gi) if gname==None: continue else: msg={} msg['info']=info msg['rename']=self.__mySelf['NickName'] msg['gname']=gname msg['time']=time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())) msg['type']='Text' addmessage(msg) # 更新群 def updateGroup(self): self.__groups = itchat.get_chatrooms() # 获取所有群的成员列表 def getAllGroupMember(self): list = [] for x in self.__groups: memberList = itchat.update_chatroom(x['UserName'], detailedMember=True) list.append({'name': x['NickName'], 'id': x['UserName'], 'memberList': memberList}) return list # 获取所有群名称 def getAllGroup(self): self.updateGroup() list = [] for x in self.__groups: img = itchat.get_head_img(chatroomUserName=x['UserName']) try: # buffer = BytesIO(img) # buffer2 = BytesIO() # image = Image.open(buffer) # image.save(buffer2, format="JPEG") img_str = base64.b64encode(img) img_str=bytes.decode(img_str) except: img_str="" list.append({'name': x['NickName'], 'id': x['UserName'], 'need': x['UserName'] in self.__needGroups,'grouppic':img_str}) return list # 获取指定群所有成员 def getGroupMemberInner(self, group): memberList = itchat.update_chatroom(group, detailedMember=True) return memberList # 将群成员封装成所需对象 def getGroupMember(self, group): memberlist = self.getGroupMemberInner(group) list = [] for member in memberlist['MemberList']: list.append({'id': member['UserName'], 'name': member['NickName']}) return list # 获取QR def getQR(self): while(not self.__QrTrue): pass with open(self.__fileQr, 'wb') as f: self.__QR = base64.urlsafe_b64encode(f.read()) return self.__QR def setfileQr(self, f): self.__fileQr = f def setQrTrue(self, b): self.__QrTrue = b def loginSuccess(self): return self.__successLogin def setNeedGroup(self,idlist): self.__needGroups = idlist def setNeedGroupByName(self,namelist): self.updateGroup() for group in self.__groups: if group['NickName'] in namelist: self.__needGroups.append(group['UserName']) def getNeedGroup(self): return self.__needGroups # 收到消息就发送到前台 def sendMsg(self, info, name, gname,time, uid, gid, t, **kw): text = {} text['info'] = info text['name'] = name text['gname'] = gname text['time'] = time text['uid'] = uid text['gid'] = gid text['type'] = t for k, v in kw.items(): text[k] = v self.__newMsgList.append(text) self.__newMsgCount += 1 # 判断后台是否有新消息 def hasNewMsg(self): return len(self.__newMsgList) > 0 def realSend(self): first = self.__newMsgList[0] self.__newMsgList.pop(0) return first # 群发文件 # @param info 图片 # @param group 需要发送的群,默认为None,如果为None则向所有管理的群发送 def group_file(self, info,mes, ftype='Picture', group=None): if group == None: list = self.__gid group = [] for l in list: for x in l: group.append(x) if len(group) == 1: itchat.send('@%s@%s' % ({'Picture': 'img', 'Video': 'vid'}.get(ftype, 'fil'), info), group[0]) else: for g in group: ran = random.randint(2, 7) time.sleep(3+ran) itchat.send('@%s@%s' % ({'Picture': 'img', 'Video': 'vid'}.get(ftype, 'fil'), info), g) for gi in group: gname = self.getGroupNameById(gi) if gname == None: continue else: msg = {} msg['info'] = mes msg['rename'] = self.__mySelf['NickName'] msg['gname'] = gname msg['time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) msg['type'] = ftype addmessage(msg) # 增加add关键字 def addkeyadd(self, *args): for key in args: self.__keywordAdd.append(key) recordbadinfo(key) #加入数据库 # 获取广告关键字 def getAddKey(self): return self.__keywordAdd # 读取数据库广告关键字 def setAddKey(self): user = self.getmySelfName() keys=readbadinfo() for key in keys: self.__keywordAdd.append(key) # 删除关键字getMypic def deleteAddKey(self,*args): for key in args: if key in self.__keywordAdd: self.__keywordAdd.remove(key) deletebadinfo(key) # 数据库删除 def deleteMember(self, groupid, memberid): g = itchat.search_chatrooms(userName=groupid) memberlist = [{'UserName': memberid}] itchat.delete_member_from_chatroom(g, memberlist) # memberlist = itchat.update_chatroom(groupid, detailedMember=True)['MemberList'] # itchat.delete_member_from_chatroom(g, memberlist) # for member in memberlist: # if member.get('UserName') == memberid: # itchat.delete_member_from_chatroom(g, member) # break; #注销登录 def logout(self): itchat.logout(self.socketio) self.__friends = [] self.__mySelf = {} self.__groups = [] self.__atContent = '' self.__keyWordReponse = {} self.__keywordAdd = [] self.__needGroups = [] self.__gid = [] self.__fileQr = None self.__QR = '' self.__QrTrue = False self.__successLogin = False self.__newMsgCount = 0 self.__newMsgList = [] #注销chat def logout2(self): self.__friends = [] self.__mySelf = {} self.__groups = [] self.__atContent = '' self.__keyWordReponse = {} self.__keywordAdd = [] self.__needGroups = [] self.__gid = [] self.__fileQr = None self.__QR = '' self.__QrTrue = False self.__successLogin = False self.__newMsgCount = 0 self.__newMsgList = []<file_sep>from flask import Flask from flask import send_file from flask import request import json from threading import Lock from flask import render_template from MainChat import ChatRun import threading import base64 from flask_socketio import SocketIO, emit, join_room, leave_room, \ close_room, rooms, disconnect import time import os from werkzeug.utils import secure_filename from addmessage import * from SelectForGroup import * from messagelog import * from recordGname import * import re import types from template import * from memberActivite import * app = Flask(__name__) basepath = os.path.dirname(__file__) async_mode = None app.config['SECRET_KEY'] = 'secret!' socketio = SocketIO(app, async_mode=async_mode) thread = None thread_lock = Lock() chat = ChatRun(socketio) loginstate=False loginedstate=False qrcode=None def background_thread(): global chat while True: if chat.hasNewMsg(): m=chat.realSend() addmessage(m) try: #buffer = BytesIO(chat.getheadpic(m['uid'],m['gid'])) # buffer2 = BytesIO() # image = Image.open(buffer) # image.save(buffer2, format="JPEG") img_str = base64.b64encode(chat.getheadpic(m['uid'],m['gid'])) img_str= bytes.decode(img_str) except: img_str = "" listbase= chat.getgrouppic(m['gid']) dic = {} for k, v in m.items(): dic[k] = v; dic['grouppic'] = listbase dic['pic'] = img_str socketio.emit('msg', dic, json=True,namespace='/test') @socketio.on('connect', namespace='/test') def sendNewMsg(): global thread with thread_lock: if thread is None: thread = socketio.start_background_task(target=background_thread) #emit('msg', {'info': 'jy', 'name': '1', 'gname': '2'}, json=True) @socketio.on('disconnect', namespace='/test') def test_disconnect(): print('Client disconnected', request.sid) @app.route('/socket') def tosocket(): return render_template('sockettest.html',async_mode=socketio.async_mode) @app.route('/ToIndex') def toindex(): return render_template('wechat.html',async_mode=socketio.async_mode) @app.route('/') def towechat(): return send_file("templates/login.html") @app.route('/logg/<gname>') def tologg(gname): return render_template("messagelogging.html",gname=gname) # # @app.route('/shit') # def hello_shit(): # return 'hello shit' @app.route('/postpara',methods=['POST']) def post_para(): name = request.form.get('name') print(name) print(request.form.get('age')) # return jsonify({'ok': True}) return '123' @app.route('/testpara/<name>') def testpara(name): print(name) return '' @app.route('/testtemplate') def testtemplate(): return render_template('user.html',name='shabi') @app.route('/getQR', methods=["POST"]) def getQR(): exit_code = os.system('ping www.baidu.com') if exit_code: return json.dumps({'success': 'nonet'}) else: global chat global loginstate global qrcode global socketio global loginedstate if not loginstate: t = threading.Thread(target=lambda: chat.run(), name="chat") t.start() loginstate = True if not loginedstate: return json.dumps({'success': True}) else: chat.logout() loginstate = False loginedstate = False return json.dumps({'success': 'drop'}) else: if not loginedstate: socketio.emit('qrinfo', qrcode, namespace='/login') return json.dumps({'success': True}) else: return json.dumps({'success': 'logined'}) @app.route('/getGroupSelect', methods=["POST"]) def getGroupSelect(): select=SelectForGroup() return select @app.route('/login') def toLogin(): return send_file("templates/login.html") @app.route('/update', methods=['POST']) def updatepage(): exit_code = os.system('ping www.baidu.com') global chat if exit_code: return json.dumps({'refresh': 'false'}) else: try: #buffer = BytesIO(chat.getMypic()) # buffer2 = BytesIO() # image = Image.open(buffer) # image.save(buffer2, format="JPEG") img_str = base64.b64encode(chat.getMypic()) img_str = bytes.decode(img_str) except: img_str = "" return json.dumps({'groups': chat.getAllGroup(), 'user': chat.getMyself(), 'userpic': img_str, 'addKey': chat.getAddKey(),'templategroup':readtemplate(chat.getmySelfName()),'refresh': 'true'}) @app.route('/test',methods=['POST']) def test(): global chat return chat.getmySelfID() @app.route('/info', methods=['POST']) def groupInfor(): global chat str = request.form.get('message') group = request.form.getlist('groups[]') if len(group) == 0: group = None chat.group_information(str, group) return json.dumps({'success':True}) @app.route('/activite', methods=['POST']) def activite(): gname = request.form.get('gname') activite=getactivite(gname) return json.dumps(activite) @app.route('/msglogging', methods=['POST']) def msglogging1(): begin = request.form.get('begin') end = request.form.get('end') groupid=request.form.get('group') msg=messagelog(begin,end,groupid) return msg @app.route('/picture', methods=['POST']) def groupPic(): global chat group = request.form.getlist('groups') groups = group[0].split(','); file = request.files['image'] if len(group) == 0: group = None if file: basepath = os.path.dirname(__file__) upload_path = os.path.join(basepath, 'static\\sendFile', secure_filename( time.strftime('%Y-%m-%d-%H-%M-%S', time.localtime(time.time()))+file.filename)) file.save(upload_path) rpath = 'static\\sendFile' + secure_filename(time.strftime('%Y-%m-%d-%H-%M-%S', time.localtime(time.time())) + file.filename) m = re.compile(r'[^\.]+(\.[\d\w]+)+').match(file.filename) types = m.groups()[-1] print(types) dic = {'.bmp':'Picture','.gif':'Picture','.jpeg':'Picture','.png':'Picture','.pcx':'Picture','.tiff':'Picture','.tga':'Picture','.jpg':'Picture','.avi':'Video','.rmvb':'Video','.rm':'Video','.asf':'Video','.divx':'Video','.mpg':'Video','.mpeg':'Video','.mpe':'Video','.wmv':'Video','.mp4':'Video','.mkv':'Video','.vob':'Video'} chat.group_file(upload_path,rpath,group=groups,ftype=dic.get(types,'file')) return json.dumps({'success': rpath}) return json.dumps(({'success': False})) @app.route('/p') def stop(): return send_file("templates/picture.html") @app.route('/getGroupMember', methods=['POST']) def getGroupMember(): global chat group = request.form.get('groupID') return json.dumps(chat.getGroupMember(group)) @app.route('/getKeyWord', methods=['POST']) def getKeyWord(): global chat map = request.form.getlist('') return json.dumps({'success':True}) @app.route('/setDefault', methods=['POST']) def setDefaultGroup(): global chat namelist = request.form.getlist("groups[]") gname = [] for item in namelist: name = chat.getGroupNameById(item) gname.append(name) manager=chat.getmySelfName() recordGname(gname,manager) chat.setNeedGroup(namelist) return json.dumps({'success':True}) @app.route('/addKeyWord', methods=['POST']) def addKeyWord(): global chat key = request.form.get('keyword') val = request.form.get('reply') chat.addKeyWordResponse({'key': key, 'val': val}) return json.dumps({'success':True}) @app.route('/addAddkey', methods=['POST']) def addAddkey(): global char key = request.form.get('key') chat.addkeyadd(key) return json.dumps({"success": True}) @app.route('/deleteAddKey', methods=['POST']) def deleteAddkey(): global chat keys = request.form.getlist('keys[]') chat.deleteAddKey(*keys) return json.dumps({"success": True}) @app.route('/emoij', methods=["POST"]) def getemoij(): list=[] for root, dirs, files in os.walk(basepath+"/static/img/emoij/guanfang"): for file in files: if os.path.splitext(file)[1] == '.jpeg' or '.gif' or '.png': list.append("/static/img/emoij/"+"guanfang"+"/"+file) return json.dumps({'list':list}) @app.route('/emoijtitle', methods=["POST"]) def getemoijtitle(): list=[] jsonlist={} for root, dirs, files in os.walk(basepath+"/static/img/emoij"): for file in dirs: list.append(file) jsonlist["titlelist"]=list return json.dumps(jsonlist) @app.route('/emoijone', methods=["POST"]) def getemoijone(): list = [] id=request.form.get("id") for root, dirs, files in os.walk(basepath+"/static/img/emoij/"+id): for file in files: if os.path.splitext(file)[1] == '.jpeg' or '.gif' or '.png': list.append("static/img/emoij/"+id+"/"+file) return json.dumps({'list': list}) @app.route('/impemoij', methods=["POST"]) def importemoij(): list = [] id="shoucang" lenth = int(request.form.get("length")) for i in range(0,lenth): file = request.files['importlist'+str(i)] # print(id); filename = file.filename.split('.')[0] + '_new.' + file.filename.split('.')[-1] if os.path.exists(basepath+"/static/img/emoij/"+id): file.save(basepath+"/static/img/emoij/"+id+"/"+ time.strftime('%Y-%m-%d-%H-%M-%S', time.localtime(time.time()))+filename) else: os.makedirs(basepath+"/static/img/emoij/"+id) file.save(basepath+"/static/img/emoij/" + id + "/" + time.strftime('%Y-%m-%d-%H-%M-%S', time.localtime(time.time())) + filename) return "success" @app.route('/sendemoij', methods=['POST']) def groupemoij(): global chat group = request.form.getlist("groups[]") filepath=request.form.get("message"); filepath=basepath+ '/' + filepath; if len(group) == 0: group = None rpath = filepath chat.group_file(filepath,request.form.get("message"),group=group) return json.dumps({'success': request.form.get("message")}) @app.route('/sendguangfangemoij', methods=['POST']) def groupguangfangemoij(): global chat str = request.form.get('message') src= request.form.get('src') group = request.form.getlist('groups[]') if len(group) == 0: group = None chat.group_information(str, group) return json.dumps({'success': src}) @app.route('/addemoij', methods=['POST']) def addemoij(): global chat str = request.form.get('message') filename=str.split("/"); with open(basepath+"/"+str,'rb') as f: s=f.read() with open(basepath+"/static/img/emoij/shoucang/"+ time.strftime('%Y-%m-%d-%H-%M-%S', time.localtime(time.time()))+filename[len(filename)-1], 'wb') as f: f.write(s) return json.dumps({'success': "success"}) @app.route('/removeUser', methods=['POST']) def removeMember(): global chat gid = request.form.get('gid') uid = request.form.get('uid') chat.deleteMember(gid,uid) return json.dumps({'success':True}) @app.route('/logout', methods=['POST']) def logoutcommand(): global chat global loginstate global loginedstate chat.logout() loginstate = False loginedstate=False return json.dumps({'success': True}) @app.route('/RELOGIN') def tologinpage(): return render_template('login.html',async_mode=socketio.async_mode) @app.route('/addtemplate',methods=['POST']) def addtemplate(): global chat template = request.form.get('template') template = template.replace('\n', '<br/>') recordtemplate(template,chat.getmySelfName()) templategroup=readtemplate(chat.getmySelfName()) return json.dumps({'group': templategroup}) @app.route('/deltemplate',methods=['POST']) def deltemplate(): global chat template = request.form.get('template') template = template.replace('<br>', '<br/>') deletetemplate(template,chat.getmySelfName()) templategroup=readtemplate(chat.getmySelfName()) return json.dumps({'group': templategroup}) @app.route('/deleteemoij',methods=['POST']) def deleteemoij(): global chat str = request.form.get('message') os.remove(basepath+"/"+str) return json.dumps({'success': True}) @app.route('/setQR',methods=['POST']) def setqr(): global chat global qrcode qr = request.form.get('qr') qrcode=qr return json.dumps({'success': True}) @app.route('/setloginedstate',methods=['POST']) def setloginedstate(): global chat global loginedstate loginedstate = True return json.dumps({'success': True}) @app.route('/cancellogin',methods=['POST']) def cancellogin(): global chat global loginstate loginstate = False chat.logout2() return json.dumps({'success': True}) @app.route('/cancellogin2',methods=['POST']) def cancellogin2(): global chat global loginstate global loginedstate loginedstate=False loginstate = False chat.logout2() return json.dumps({'success': True}) if __name__ == '__main__': #app.run(debug=True) socketio.run(app=app,debug=True)<file_sep>namespace = '/test'; var socket = io.connect(location.protocol + '//' + document.domain + ':' + location.port + namespace); $(document).ready(function () { AdjustPage(); var UserInfo = getBasicInfo(); MessageSync(UserInfo.NickName); PageInit(); }) window.onresize = function() { AdjustPage(); } function PageInit() { $("#contact").hide(); $("#func").hide(); $(".menulist").find("li").each(function(){ $(this).unbind("click").bind("click",function(){ if ($(this).find("a").hasClass("menu-a")) { $(".menulist").find("li").each(function(){ if ($(this).find("a").hasClass("menu-a-active")) { $(this).find("a").removeClass("menu-a-active").addClass("menu-a"); var contentclass = $(this).find("a").attr("data-content"); $(".menu-content").find("." + contentclass).hide(); } }); $(this).find("a").removeClass("menu-a").addClass("menu-a-active"); var contentclass = $(this).find("a").attr("data-content"); $(".menu-content").find("." + contentclass).show(); } }); }); $("#config-lists").find("li").each(function(){ $(this).unbind("click").bind("click",function(){ if (!($(this).hasClass("active"))) { $("#config-lists").find("li").each(function(){ if ($(this).hasClass("active")) { $(this).removeClass("active"); var contentid = $(this).find("span").attr("data-content"); $("#" + contentid).hide(); } }); $(this).addClass("active"); var contentid = $(this).find("span").attr("data-content"); $("#" + contentid).show(); } }); }); $(".dialog-func-tab").find("a").each(function(){ $(this).unbind("click").bind("click",function(){ if (!($(this).find("span").hasClass("tab-active"))) { $(".dialog-func-tab").find("a").each(function(){ if ($(this).find("span").hasClass("tab-active")) { $(this).find("span").removeClass("tab-active"); var contentclass = $(this).attr("data-content"); $(this).parent().parent().find("." + contentclass).hide(); } }); $(this).find("span").addClass("tab-active"); var contentclass = $(this).attr("data-content"); $(this).parent().parent().find("." + contentclass).show(); } }); }); $(".contact-tab").find("a").each(function(){ $(this).unbind("click").bind("click",function(){ if (!($(this).find("span").hasClass("tab-active"))) { $(".contact-tab").find("a").each(function(){ if ($(this).find("span").hasClass("tab-active")) { $(this).find("span").removeClass("tab-active"); var contentclass = $(this).attr("data-content"); $(this).parent().parent().find("." + contentclass).hide(); } }); $(this).find("span").addClass("tab-active"); var contentclass = $(this).attr("data-content"); $(this).parent().parent().find("." + contentclass).show(); } }); }); $("#send").unbind("click").bind("click",function(){ var message = $("#message-text").val(); var sendid; $("#message-text").val(""); if (message != "") { var groups = new Array(); var groupnames = new Array(); var count = 0; $("#groups-ul").find("li").each(function(index, e){ if ($(this).hasClass("active")) { var aitecount = parseInt($(this).attr("data-aite")); var aites = ""; for (var i = 0; i < aitecount; i++) { aites += "@"; } sendid = $(this).find(".group-name").attr("data-content"); groups[count] = aites + $(this).find(".group-name").attr("data-content"); groupnames[count++] = $(this).find(".group-name").attr("title"); } return count; }); $.ajax({ type: "post", async: false, url: "/info", data:{ groups:groups, message:message }, dataType: "json", success: function (data) { var today = new Date(); var sendtime = today.toLocaleTimeString(); var pic = $("#mypic").attr("src"); var li = '<li class="message"><img class="pic-right" src="'+ pic +'"/>' + '<span class="message-box-right"><span class="message-user">' + '<p class="NickName-right"><span>'+ sendtime +'</span><span> 我</span></p>' + '</span><span class="message-content"><i class="angle-right"></i>' + '<span class="text-right">'+ message +'</span></span></span></li>'; $("#" + sendid).append(li); } }); }else{ alert("消息不能为空"); } }); $("#multisend").unbind("click").bind("click",function(){ var message = $("#multimessage-text").val(); var sendid; $("#multimessage-text").val(""); if (message != "") { var groups = new Array(); var groupnames = new Array(); var count = 0; $("#multigroups-ul").find("li").each(function(index, e){ if ($(this).find("i").hasClass("fa-check-square")) { var aitecount = parseInt($(this).attr("data-aite")); var aites = ""; for (var i = 0; i < aitecount; i++) { aites += "@"; } sendid = $(this).find(".group-name").attr("data-content"); groups[count] = aites + $(this).find(".group-name").attr("data-content"); //alert(groups[count]); groupnames[count++] = $(this).find(".group-name").attr("title"); } return count; }); $.ajax({ type: "post", url: "/info", data:{ groups:groups, message:message }, dataType: "json", success: function (data) { /*var today = new Date(); var sendtime = today.toLocaleTimeString(); var pic = $("#mypic").attr("src"); var li = '<li class="message"><img class="pic-right" src="'+ pic +'"/>' + '<span class="message-box-right"><span class="message-user">' + '<p class="NickName-right"><span>'+ sendtime +'</span><span> 我</span></p>' + '</span><span class="message-content"><i class="angle-right"></i>' + '<span class="text-right">'+ message +'</span></span></span></li>'; $("#" + sendid).append(li);*/ //alert("success"); } }); }else{ alert("消息不能为空"); } }); $("#choose-file").unbind("click").bind("click",function(){ $("#send-file").click(); $("#send-file").unbind("change").on("change",function(e){ var data=new FormData() data.append('image',$("#send-file")[0].files[0]); var groups = new Array(); var groupnames = new Array(); var count = 0; $("#groups-ul").find("li").each(function(index, e){ if ($(this).hasClass("active")) { var aitecount = parseInt($(this).attr("data-aite")); var aites = ""; for (var i = 0; i < aitecount; i++) { aites += "@"; } sendid = $(this).find(".group-name").attr("data-content"); groups[count] = aites + $(this).find(".group-name").attr("data-content"); groupnames[count++] = $(this).find(".group-name").attr("title"); } return count; }); data.append('groups',groups); $.ajax({ type: "post", url: "/picture", data:data, async: true, dataType: "json", contentType: false, processData: false, success: function (data) { var today = new Date(); var sendtime = today.toLocaleTimeString(); var pic = $("#mypic").attr("src"); var filepath = data.success.split("\\"); var length = filepath.length; var message = '<span>'+ filepath[length - 1] +'</span><a href="'+ data.success +'" download><i class="fa fa-download fa-fw"></i></a>'; var li = '<li class="message"><img class="pic-right" src="'+ pic +'"/>' + '<span class="message-box-right"><span class="message-user">' + '<p class="NickName-right"><span>'+ sendtime +'</span><span> 我</span></p>' + '</span><span class="message-content"><i class="angle-right"></i>' + '<span class="text-right">'+ message +'</span></span></span></li>'; $("#" + sendid).append(li); } }); }); }); $("#multichoose-file").unbind("click").bind("click",function(){ $("#multisend-file").click(); $("#multisend-file").unbind("change").on("change",function(e){ var data=new FormData() data.append('image',$("#multisend-file")[0].files[0]); var groups = new Array(); var groupnames = new Array(); var count = 0; $("#multigroups-ul").find("li").each(function(index, e){ if ($(this).find("i").hasClass("fa-check-square")) { var aitecount = parseInt($(this).attr("data-aite")); var aites = ""; for (var i = 0; i < aitecount; i++) { aites += "@"; } sendid = $(this).find(".group-name").attr("data-content"); groups[count] = aites + $(this).find(".group-name").attr("data-content"); groupnames[count++] = $(this).find(".group-name").attr("title"); } return count; }); data.append('groups',groups); $.ajax({ type: "post", url: "/picture", data:data, async: true, dataType: "json", contentType: false, processData: false, success: function (data) { } }); }); }); $("#setdefaultgroups").unbind("click").bind("click",function(){ $("#modal-loading").show(); var groups = new Array(); var groupnames = new Array(); var count = 0; $("#allgroups-ul").find("li").each(function(index, e){ if ($(this).find("i").hasClass("fa-check-square")) { var aitecount = parseInt($(this).attr("data-aite")); var aites = ""; for (var i = 0; i < aitecount; i++) { aites += "@"; } sendid = $(this).find(".group-name").attr("data-content"); groups[count] = aites + $(this).find(".group-name").attr("data-content"); groupnames[count++] = $(this).find(".group-name").attr("title"); } return count; }); $.ajax({ type: "post", url: "/setDefault", data:{ groups:groups, groupnames:groupnames }, dataType: "json", success: function (data) { getBasicInfo(); PageInit(); $("#modal-loading").hide(); } }); }); $("#deletekewords").unbind("click").bind("click",function(){ if ($(this).html() == "删除") { $(this).next().find(".selected-group").each(function(){ var del = '<i class="delete-selected-group fa fa-close" title="删除" onclick="deletekeyword(this)"></i>'; $(this).append(del); }); $(this).html("取消"); }else{ $(this).next().find(".selected-group").each(function(){ $(this).find("i").remove(); }); $(this).html("删除"); } }); $("#savekeyword").unbind("click").bind("click",function(){ $("#modal-loading").show(); var keyword = $("#newkeyword").val(); $.ajax({ type: "post", url: "/addAddkey", async: false, data:{ key:keyword }, dataType: "json", success: function (data) { var keywordsarea = $(".keywords"); var p = '<p class="selected-group"><span>'+ keyword +'</span></p>'; keywordsarea.append(p); $("#newkeyword").val(""); $("#modal-loading").hide(); } }); }); $('#message-text').keydown(function(event){ var keynum = (event.keyCode ? event.keyCode : event.which); if(keynum == '13'){ $("#send").click(); return false; } }); $("body").bind("click").bind("click",function(){ $(".template-area").hide(); $(".multitemplate-area").hide(); }); $("#word-template").unbind("click").bind("click",function(e){ e.stopPropagation(); $("#emoijdiv").hide(); $(".template-area").show(); }); $("#multiword-template").unbind("click").bind("click",function(e){ e.stopPropagation(); $("#emoijdivgroup").hide(); $(".multitemplate-area").show(); }); $("#messages-history").unbind("click").bind("click",function(){ var gname = $("#dialog-name").html(); window.open("/logg/" + gname); }); $("#modal-pic").unbind("click").bind("click",function(){ $(this).hide(); }); $("#larger").unbind("click").bind("click",function(e){ e.stopPropagation(); var picwidth = $(this).parents(".showfunc").next().find("img").width(); $(this).parents(".showfunc").next().find("img").width(picwidth + 50); }); $("#smaller").unbind("click").bind("click",function(e){ e.stopPropagation(); var picwidth = $(this).parents(".showfunc").next().find("img").width(); $(this).parents(".showfunc").next().find("img").width(picwidth - 50); }); $("#savetemplate").unbind("click").bind("click",function(){ $("#modal-loading").show(); var template = $("#newtemplate").val(); $.ajax({ type: "post", url: "/addtemplate", async: false, data:{ template:template }, dataType: "json", success: function (data) { $("#newtemplate").val(""); var templates = data.group; var templatearea = $(".template-lists"); templatearea.each(function(){ $(this).html(""); for (var i = 0; i < templates.length; i++) { var li = '<li class="single-template" onclick="choosetemplate(this)">'+ templates[i] +'</li>'; $(this).append(li); } }); var templatemanagelist = $("#template-manage-list"); templatemanagelist.html(""); for (var i = 0; i < templates.length; i++) { var li = '<li class="single-allsee-list"><span class="templatemessage-content">'+ templates[i] +'</span><i onclick="deletetemplate(this)" class="fa fa-close closetemplate"></i></li>'; templatemanagelist.append(li); } $("#modal-loading").hide(); } }); }); $("#static-groups").find("li").each(function(){ $(this).unbind("click").bind("click",function(){ var gname = $(this).find(".group-name").attr("title"); $("#statistics-groupname").html(gname); if ($("#statistics-groupname").next().length > 0) { $("#statistics-groupname").next().remove(); } var button = '<div><button type="button" class="form-btn pull-right" onclick="getHistoryMessage(this)">历史消息</button></div>'; $("#statistics-groupname").after(button); var StatisticsTable = $("#statistics-table"); StatisticsTable.html(""); var table = '<thead><tr><th>用户昵称</th><th><span class="sortedth">周活跃度</span></th>' + '<th><span class="sortedth">月活跃度</span></th>' + '<th><span class="sortedth">历史活跃度</span></th></tr></thead><tbody>' + '<tr><td colspan="4" style="text-align:center;font-size:30px;"><i class="fa fa-spinner fa-spin fa-fw"></i></td></tr></tbody>'; StatisticsTable.append(table); $("#static-groups").find("li").each(function(){ if ($(this).hasClass("active")) { $(this).removeClass("active"); } }); $(this).addClass("active"); var gname = $(this).find(".group-name").attr("title"); $.ajax({ type: "post", url: "/activite", async: true, data:{ gname:gname }, dataType: "json", success: function (data) { var StatisticsTable = $("#statistics-table"); StatisticsTable.html(""); var table = '<thead><tr><th>用户昵称</th><th><span class="sortedth">周活跃度</span></th>' + '<th><span class="sortedth">月活跃度</span></th>' + '<th><span class="sortedth">历史活跃度</span></th></tr></thead><tbody>'; for (var i = 0; i < data.length; i++) { var tr = '<tr><td title="'+ data[i].name +'">'+ data[i].name +'</td><td>'+ data[i].weekcount +'</td><td>'+ data[i].monthcount +'</td><td>'+ data[i].allcount +'</td></tr>'; table += tr; } table += '</tbody>'; StatisticsTable.append(table); SortTable(StatisticsTable); } }); }); }); $(".search-text").bind("input propertychange",function(){ var searchstr = $(this).val(); if (searchstr != "") { $(this).parent().next().find("li").each(function(){ var gname = $(this).find(".group-name").attr("title"); if (gname.search(searchstr) >= 0) { $(this).show(); }else{ $(this).hide(); } }); }else{ $(this).parent().next().find("li").each(function(){ $(this).show(); }); } }); $("#mypic").unbind("click").bind("click",function(e){ e.stopPropagation(); var logoutspan = '<div class="logoutdiv"><button class="btn" type="button" onclick="Logout()">注销</button></div>'; $(this).parent().append(logoutspan); var top = $(this).offset().top; var left = $(this).offset().left + 50; $(this).parent().find(".logoutdiv").offset({top : top, left: left}); }); $(document).unbind('click').bind('click',function() { $(".logoutdiv").remove(); }); socket.on('logout', function (msg) { window.location.href="RELOGIN"; $.ajax({ type: "post", url: "/cancellogin2", async: true, dataType: "json", success: function (data) { } }); }); $(".menu-content").show(); $(".menu").show(); } function AdjustPage(argument) { windowheight = $(window).height(); windowwidth = $(window).width(); actualheight = windowheight > 700 ? windowheight : 700; actualwidth = windowwidth > 1200 ? windowwidth : 1200; $(".content").height(actualheight); $(".menu").height(actualheight); $(".menu-content").height(actualheight); $(".menu-content").width(actualwidth - 60); $(".scroll-content").height(actualheight - 53); $(".loadarea").height(actualheight); $(".loadarea").width(actualwidth - 60); } function MessageSync(UserName) { socket.on('msg', function (msg) { var dialogs = $("#dialogs"); var groupname = msg["gname"] == null ? "" : msg["gname"]; var picsrc = msg["pic"] == "" ? "static/img/nonepic.png" : ("data:image/jpg;base64," + msg["pic"]); var uid = msg["uid"]; var gid = msg["gid"]; if (groupname != "") { var c = "@"; var regex = new RegExp(c, 'g'); var result = msg["gid"].match(regex); var count = !result ? 0 : result.length; var groupid = msg["gid"].substring(count , msg["gid"].length); var GroupList = $("#groups-ul"); var flag = 0; $("#none-dialog").hide(); GroupList.find("li").each(function(index,e){ contentID = $(this).find(".group-name").attr("data-content"); if (groupid == contentID) { flag = 1; var name = msg["name"]; var sendtime = " " + msg["time"].split(" ")[1] + " "; switch(msg["type"]){ case "Text": var message = msg["info"].replace(/\n/g, "<br>"); break; case "Video": var message = '<video class="message-video" src="'+ msg["info"] +'" controls="controls">your browser does not support the video tag</video>'; break; case "Picture": var message = '<img src="'+ msg["info"] +'" class="emoji-pic" onclick="ShowOptions(this)"><div class="picture-func"><i class="fa fa-search-plus" title="查看" onclick="ShowPic(this)"></i><i onclick="addemoij(this)" class="fa fa-plus" title="添加表情"></i></div>'; break; case "Recording": var message = '<audio src="'+ msg["info"] +'" controls="controls">your browser does not support the audio tag</audio>'; break; case "Sharing": var u = msg["url"] == "" ? "javascript:;" : msg["url"]; var info = msg["info"] == "" ? "不支持预览,请在手机上查看。" : msg["info"]; var message = '<a class="tab-a" href="'+ u +'" target="_blank">链接:'+ info +'</a>'; break; case "Card": var message = '名片:' + msg["info"]; break; case "Note": var message = msg["info"]; break; case "Attachment": var filepath = msg["info"].split("\\"); var length = filepath.length; var message = '<span>'+ filepath[length - 1] +'</span><a href="'+ msg["info"] +'" download><i class="fa fa-download fa-fw"></i></a>'; break; default: var message = "不支持预览,请在手机上查看。"; } if (msg.type == "Note") { var li = '<li class="system-message"><span class="system-info">'+ message +'</span></li>'; }else{ if (name == UserName) { var li = '<li class="message"><img class="pic-right" src="'+ picsrc +'"/>' + '<span class="message-box-right"><span class="message-user">' + '<p class="NickName-right"><span>'+ sendtime +'</span><span>我</span></p>' + '</span><span class="message-content"><i class="angle-right"></i>' + '<span class="text-right">'+ message +'</span></span></span></li>'; }else{ var li = '<li class="message unread"><img class="pic-left" src="'+ picsrc +'"/>' + '<span class="message-box-left"><span class="message-user">' + '<p class="NickName-left"><span>'+ name +'</span><span>'+ sendtime +'</span></p>' + '</span><span class="message-content"><i class="angle-left"></i>' + '<span class="text-left">'+ message +'</span></span></span></li>'; var addType = msg["addType"]; if (addType != undefined) { switch(addType){ case "phone": var type = "电话消息"; var warningmessage = '<li class="warning-message"><p class="warning-name">'+ type +'</p>' + '<p class="warning-content">'+ '来自:' + '<span class="highlight">'+ groupname +'</span>' + ',' //+ '<a class="kickmember" href="javascript:;" onclick="kick(this)">'+ name + name + '<input type="hidden" value="'+ uid +'"><input type="hidden" value="'+ gid +'"></a>' + ',' + sendtime + ',' + message + '</p>' + '<i class="fa fa-close close" onclick="deletewarning(this)"></i></li>'; break; case "link": var type = "链接消息"; var warningmessage = '<li class="warning-message"><p class="warning-name">'+ type +'</p>' + '<p class="warning-content">'+ '来自:' + '<span class="highlight">'+ groupname +'</span>' + ',' //+ '<a class="kickmember" href="javascript:;" onclick="kick(this)">'+ name + name + '<input type="hidden" value="'+ uid +'"><input type="hidden" value="'+ gid +'"></a>' + ',' + sendtime + ',' + message + '</p>' + '<i class="fa fa-close close" onclick="deletewarning(this)"></i></li>'; break; case "sharing": var type = "分享消息"; var warningmessage = '<li class="warning-message"><p class="warning-name">'+ type +'</p>' + '<p class="warning-content">'+ '来自:' + '<span class="highlight">'+ groupname +'</span>' + ',' //+ '<a class="kickmember" href="javascript:;" onclick="kick(this)">'+ name + name + '<input type="hidden" value="'+ uid +'"><input type="hidden" value="'+ gid +'"></a>' + ',' + sendtime + ',' + message + '</p>' + '<i class="fa fa-close close" onclick="deletewarning(this)"></i></li>'; break; case "keyword": var type = "关键词消息:" + msg["addkeyword"]; var warningmessage = '<li class="warning-message"><p class="warning-name">'+ type +'</p>' + '<p class="warning-content">'+ '来自:' + '<span class="highlight">'+ groupname +'</span>' + ',' //+ '<a class="kickmember" href="javascript:;" onclick="kick(this)">'+ name + name + '<input type="hidden" value="'+ uid +'"><input type="hidden" value="'+ gid +'"></a>' + ',' + sendtime + ',' + message + '</p>' + '<i class="fa fa-close close" onclick="deletewarning(this)"></i></li>'; break; case "card": var type = "名片消息:"; var warningmessage = '<li class="warning-message"><p class="warning-name">'+ type +'</p>' + '<p class="warning-content">'+ '来自:' + '<span class="highlight">'+ groupname +'</span>' + ',' //+ '<a class="kickmember" href="javascript:;" onclick="kick(this)">'+ name + name + '<input type="hidden" value="'+ uid +'"><input type="hidden" value="'+ gid +'"></a>' + ',' + sendtime + ',' + message + '</p>' + '<i class="fa fa-close close" onclick="deletewarning(this)"></i></li>'; break; case "Viedo": var type = "视频消息:"; var warningmessage = '<li class="warning-message"><p class="warning-name">'+ type +'</p>' + '<p class="warning-content">'+ '来自:' + '<span class="highlight">'+ groupname +'</span>' + ',' //+ '<a class="kickmember" href="javascript:;" onclick="kick(this)">'+ name + name + '<input type="hidden" value="'+ uid +'"><input type="hidden" value="'+ gid +'"></a>' + ',' + sendtime + ',' + '视频' + '</p>' + '<i class="fa fa-close close" onclick="deletewarning(this)"></i></li>'; break; default: } $(".warningmessage-content").append(warningmessage); } } } $("#" + contentID).append(li); RefreshDialogList(); } return flag; }); if (flag == 0) { var name = msg["name"]; var sendtime = " " + msg.time.split(" ")[1]; switch(msg.type){ case "Text": var message = msg["info"].replace(/\n/g, "<br>"); break; case "Video": var message = '<video class="message-video" src="'+ msg["info"] +'" controls="controls">your browser does not support the video tag</video>'; break; case "Picture": var message = '<img src="'+ msg["info"] +'" class="emoji-pic" onclick="ShowOptions(this)"><div class="picture-func"><i class="fa fa-search-plus" title="查看" onclick="ShowPic(this)"></i><i onclick="addemoij(this)" class="fa fa-plus" title="添加表情"></i></div>'; break; case "Recording": var message = '<audio src="'+ msg["info"] +'" controls="controls">your browser does not support the audio tag</audio>'; break; case "Sharing": var u = msg["url"] == "" ? "javascript:;" : msg["url"]; var info = msg["info"] == "" ? "不支持预览,请在手机上查看。" : msg["info"]; var message = '<a class="tab-a" href="'+ u +'" target="_blank">链接:'+ info +'</a>'; break; case "Card": var message = '名片:' + msg["info"]; break; case "Note": var message = msg["info"]; break; case "Attachment": var filepath = msg["info"].split("\\"); var length = filepath.length; var message = '<span>'+ filepath[length - 1] +'</span><a href="'+ msg["info"] +'" download><i class="fa fa-download fa-fw"></i></a>'; break; default: var message = "不支持预览,请在手机上查看。"; } if (msg.type == "Note") { var li = '<li class="system-message"><span class="system-info">'+ message +'</span></li>'; }else{ if (name == UserName) { var li = '<li class="message"><img class="pic-right" src="'+ picsrc +'"/>' + '<span class="message-box-right"><span class="message-user">' + '<p class="NickName-right"><span>'+ sendtime +'</span><span>我</span></p>' + '</span><span class="message-content"><i class="angle-right"></i>' + '<span class="text-right">'+ message +'</span></span></span></li>'; }else{ var li = '<li class="message unread"><img class="pic-left" src="'+ picsrc +'"/>' + '<span class="message-box-left"><span class="message-user">' + '<p class="NickName-left"><span>'+ name +'</span><span>'+ sendtime +'</span></p>' + '</span><span class="message-content"><i class="angle-left"></i>' + '<span class="text-left">'+ message +'</span></span></span></li>'; var addType = msg["addType"]; if (addType != undefined) { switch(addType){ case "phone": var type = "电话消息"; var warningmessage = '<li class="warning-message"><p class="warning-name">'+ type +'</p>' + '<p class="warning-content">'+ '来自:' + '<span class="highlight">'+ groupname +'</span>' + ',' //+ '<a class="kickmember" href="javascript:;" onclick="kick(this)">'+ name + name + '<input type="hidden" value="'+ uid +'"><input type="hidden" value="'+ gid +'"></a>' + ',' + sendtime + ',' + message + '</p>' + '<i class="fa fa-close close" onclick="deletewarning(this)"></i></li>'; break; case "link": var type = "链接消息"; var warningmessage = '<li class="warning-message"><p class="warning-name">'+ type +'</p>' + '<p class="warning-content">'+ '来自:' + '<span class="highlight">'+ groupname +'</span>' + ',' //+ '<a class="kickmember" href="javascript:;" onclick="kick(this)">'+ name + name + '<input type="hidden" value="'+ uid +'"><input type="hidden" value="'+ gid +'"></a>' + ',' + sendtime + ',' + message + '</p>' + '<i class="fa fa-close close" onclick="deletewarning(this)"></i></li>'; break; case "sharing": var type = "分享消息"; var warningmessage = '<li class="warning-message"><p class="warning-name">'+ type +'</p>' + '<p class="warning-content">'+ '来自:' + '<span class="highlight">'+ groupname +'</span>' + ',' //+ '<a class="kickmember" href="javascript:;" onclick="kick(this)">'+ name + name + '<input type="hidden" value="'+ uid +'"><input type="hidden" value="'+ gid +'"></a>' + ',' + sendtime + ',' + message + '</p>' + '<i class="fa fa-close close" onclick="deletewarning(this)"></i></li>'; break; case "keyword": var type = "关键词消息:" + msg["addkeyword"]; var warningmessage = '<li class="warning-message"><p class="warning-name">'+ type +'</p>' + '<p class="warning-content">'+ '来自:' + '<span class="highlight">'+ groupname +'</span>' + ',' //+ '<a class="kickmember" href="javascript:;" onclick="kick(this)">'+ name + name + '<input type="hidden" value="'+ uid +'"><input type="hidden" value="'+ gid +'"></a>' + ',' + sendtime + ',' + message + '</p>' + '<i class="fa fa-close close" onclick="deletewarning(this)"></i></li>'; break; case "card": var type = "名片消息:"; var warningmessage = '<li class="warning-message"><p class="warning-name">'+ type +'</p>' + '<p class="warning-content">'+ '来自:' + '<span class="highlight">'+ groupname +'</span>' + ',' //+ '<a class="kickmember" href="javascript:;" onclick="kick(this)">'+ name + name + '<input type="hidden" value="'+ uid +'"><input type="hidden" value="'+ gid +'"></a>' + ',' + sendtime + ',' + message + '</p>' + '<i class="fa fa-close close" onclick="deletewarning(this)"></i></li>'; break; case "Viedo": var type = "视频消息:"; var warningmessage = '<li class="warning-message"><p class="warning-name">'+ type +'</p>' + '<p class="warning-content">'+ '来自:' + '<span class="highlight">'+ groupname +'</span>' + ',' //+ '<a class="kickmember" href="javascript:;" onclick="kick(this)">'+ name + name + '<input type="hidden" value="'+ uid +'"><input type="hidden" value="'+ gid +'"></a>' + ',' + sendtime + ',' + '视频' + '</p>' + '<i class="fa fa-close close" onclick="deletewarning(this)"></i></li>'; break; default: } $(".warningmessage-content").append(warningmessage); } } } var grouppic = '<img class="message-pic" src="data:image/jpg;base64,'+ msg["grouppic"] +'"/>' var groupli = '<li class="single-message" data-aite="'+ count +'"><span class="group-pic">'+ grouppic +'</span>' + '<span class="group-name" title="'+ groupname +'" data-content="'+ groupid +'">'+ groupname +'</span></li>'; var dialogul = '<ul id="'+ groupid +'" class="conversation-area">'+ li +'</ul>'; GroupList.append(groupli); dialogs.append(dialogul); GroupList.find("li").each(function(index,e){ $(this).unbind("click").bind("click",function(){ $(".contact-menu").css({'border-right':'0px'}); $("#contact").show(); $("#func").show(); $("#dialog-name").html($(this).find(".group-name").attr("title")); GroupList.find("li").each(function(index,e){ if ($(this).hasClass("active")) { $(this).removeClass("active"); var ulid = $(this).find(".group-name").attr("data-content"); dialogs.find("ul").each(function(index,e){ if ($(this).hasClass("current-area")) { $(this).removeClass("current-area"); } }); } }); var ulid = $(this).find(".group-name").attr("data-content"); $("#" + ulid).addClass("current-area"); $(this).addClass("active"); $(this).find("li").each(function(index,e){ if ($(this).hasClass("unread")) { $(this).removeClass("unread"); } }); RefreshDialogList(); }); }); RefreshDialogList(); } } //$("#msgul").append("<li>" + msg.info + " " + msg.name + " " + msg.gname + "</li>"); })//div.scrollTop = div.scrollHeight; } function RefreshDialogList() {// var GroupList = $("#groups-ul"); var total = 0; GroupList.find("li").each(function(index,e){ if ($(this).find(".unreadnum").length > 0) { $(this).find(".unreadnum").remove(); } if ($(this).hasClass("active") && !($(".dialog-content").is(":hidden"))) { var ulid = $(this).find(".group-name").attr("data-content"); $("#dialogs").scrollTop($("#dialogs")[0].scrollHeight); $("#" + ulid).find("li").each(function(index,e){ if ($(this).hasClass("unread")) { $(this).removeClass("unread"); } }); }else{ var ulid = $(this).find(".group-name").attr("data-content"); $("#dialogs").scrollTop($("#dialogs")[0].scrollHeight); var count = 0; $("#" + ulid).find("li").each(function(index,e){ if ($(this).hasClass("unread")) { count ++; } return count; }); if (count > 0) { total += count; var showcount = count > 99 ? "…" : count; var span = '<span class="unreadnum">'+ showcount +'</span>'; $(this).append(span); } } return total; }); if ($("#at").find(".unreadtotal").length > 0) { $("#at").find(".unreadtotal").remove(); } if (total > 0) { var showtotal = total > 99 ? "…" : total; var totalspan = '<span class="unreadtotal">'+ showtotal +'</span>'; $("#at").append(totalspan); } } function getBasicInfo() { var UserInfo; $.ajax({ type: "post", url: "/update", async: false, dataType: "json", success: function (data) { if(data.refresh=="true") { UserInfo = data.user; userpho = data.userpic; var addAddkey = data.addKey; $("#mypic").attr("src","data:image/jpg;base64," + userpho); //关键词 var keywordsarea = $(".keywords"); keywordsarea.html(""); for (var i = 0; i < addAddkey.length; i++) { var p = '<p class="selected-group"><span>'+ addAddkey[i] +'</span></p>'; keywordsarea.append(p); } //模板 var templates = data.templategroup; var templatearea = $(".template-lists"); templatearea.each(function(){ $(this).html(""); if (templates.length > 0) { for (var i = 0; i < templates.length; i++) { var li = '<li class="single-template" onclick="choosetemplate(this)">'+ templates[i] +'</li>'; $(this).append(li); } }else{ var li = '<li class="single-template">'+ '暂无消息模板' +'</li>'; $(this).append(li); } }); var templatemanagelist = $("#template-manage-list"); templatemanagelist.html(""); for (var i = 0; i < templates.length; i++) { var li = '<li class="single-allsee-list"><span class="templatemessage-content">'+ templates[i] +'</span><i onclick="deletetemplate(this)" class="fa fa-close closetemplate"></i></li>'; templatemanagelist.append(li); } //群组 var Allgroups = $(".allgroups-content"); var Allcontact = $(".allcontact-content"); //var Manager = $(".manager-content"); var StaticGroups = $("#static-groups"); Allgroups.html(""); Allcontact.html(""); //Manager.html(""); StaticGroups.html(""); $(".allgroups-contacts").find(".selected-group").each(function(){ $(this).remove(); }); var c = "@"; var regex = new RegExp(c, 'g'); for (var i = 0; i < data.groups.length; i++) { if (data.groups[i].name != "") { var result = data.groups[i].id.match(regex); var count = !result ? 0 : result.length; var groupid = data.groups[i].id.substring(count , data.groups[i].id.length); var need = (data.groups[i].need == true) ? 'fa fa-check-square' : 'fa fa-square-o'; var grouppic = '<img class="message-pic" src="data:image/jpg;base64,'+ data.groups[i].grouppic +'"/>'; var groupli = '<li class="single-message" data-aite="'+ count +'"><span class="group-pic">'+ grouppic +'</span>' + '<span class="group-name" title="'+ data.groups[i].name +'" data-content="'+ groupid +'">'+ data.groups[i].name +'</span>' + '<span class="choosebox pull-right"><i class="'+ need +'"></i></span></li>'; Allgroups.append(groupli); if (need == 'fa fa-check-square') { var groupli = '<li class="single-message" data-aite="'+ count +'"><span class="group-pic">'+ grouppic +'</span>' + '<span class="group-name" title="'+ data.groups[i].name +'" data-content="'+ groupid +'">'+ data.groups[i].name +'</span></li>'; //Manager.append(groupli); StaticGroups.append(groupli); var groupp = '<p class="selected-group"><span title="'+ data.groups[i].name +'">'+ data.groups[i].name +' </span><i class="delete-selected-group fa fa-close" title="删除"></i></p>'; $(".allgroups-contacts").append(groupp); } } } DeleteSelectedGroup(); $(".choosebox").unbind("click").bind("click",function(){ var name = $(this).prev().attr("title"); var allgroups = $(this).parents("ul").parent().parent().next().find(".allgroups-contacts"); if ($(this).find("i").hasClass("fa-check-square")) { $(this).find("i").removeClass("fa-check-square"); $(this).find("i").addClass("fa-square-o"); allgroups.find(".selected-group").each(function(){ if ($(this).find("span").attr("title") == name) { $(this).remove(); } }); }else{ $(this).find("i").removeClass("fa-square-o"); $(this).find("i").addClass("fa-check-square"); var groupp = '<p class="selected-group"><span title="'+ name +'">'+ name +' </span><i class="fa fa-close" title="删除"></i></p>'; allgroups.append(groupp); } DeleteSelectedGroup(); }); }else{ window.location.href="RELOGIN"; $.ajax({ type: "post", url: "/cancellogin", async: true, dataType: "json", success: function (data) { } }); } } }); return UserInfo; } function ShowOptions(e) { if ($(e).next().css("display") == "none") { $(e).next().show(); }else{ $(e).next().hide(); } } function ShowPic(e) { var src = $(e).parent().prev().attr("src"); $("#modal-pic").find("img").attr("src",src); $("#modal-pic").find("img").css("width","800px"); $("#modal-pic").show(); } function findChoseGroup(){ $("#ChoseGroup-area").html(""); $(".allgroups-content").find("li").each(function(){ if ($(this).find("i").hasClass("fa-check-square")) { var name = $(this).find(".group-name").attr("title"); var p = '<p class="chosegroup-p">'+ name +'</p>'; $("#ChoseGroup-area").append(p); } }); } function DeleteSelectedGroup() { $(".selected-group").each(function(){ $(this).find("i").unbind("click").bind("click",function(){ var name = $(this).prev().attr("title"); $(this).parent().remove(); $(".allgroups-content").find(".group-name").each(function(){ if ($(this).attr("title") == name) { $(this).next().find("i").removeClass("fa-check-square").addClass("fa-square-o"); } }); }); }); } function addemoij(e) { var $e=$(e); var src=$e.parent().prev().attr("src"); $.ajax({ type: "post", url: "/addemoij", data:{ message:src }, dataType: "json", success: function (data) { alert("添加成功!"); } }); } function deletekeyword(e) { var keyword = new Array(); keyword[0] = $(e).prev().html(); $.ajax({ type: "post", async: false, url: "/deleteAddKey", data:{ keys:keyword }, dataType: "json", success: function (data) { alert("删除成功!"); $(e).parent().remove(); } }); } function deletewarning(e) { $(e).parent().remove(); } function kick(e,uid,gid) { var kickspan = '<div class="kickdiv">踢除此人?<button class="btn" type="button" onclick="deletemember(this)">确定</button></div>'; $(e).parent().append(kickspan); var top = $(e).offset().top + 15; var left = $(e).offset().left; $(e).parent().find(".kickdiv").offset({top : top, left: left}); } function deletemember(e) { var uid,gid; $(e).parent().parent().find("input").each(function(index,e){ if (index == 0) { uid = $(this).val(); } if (index == 1) { gid = $(this).val(); } }); $.ajax({ type: "post", async: true, url: "/removeUser", data:{ uid : uid, gid : gid }, dataType: "json", success: function (data) { alert("踢除成功!"); } }); $(e).parent().remove(); } function closethis(e) { $(e).parents().find(".close-area").hide(); } function choosetemplate(e){ var content = $(e).html(); content=content.replace(/<br>/g,'\r\n'); $(e).parent().parent().parent().prev().prev().find("textarea").val(content); } function deletetemplate(e){ $("#modal-loading").show(); var template=$(e).prev().html(); $.ajax({ type: "post", url: "/deltemplate", async: false, data:{ template:template }, dataType: "json", success: function (data) { var templates = data.group; var templatearea = $(".template-lists"); templatearea.each(function(){ $(this).html(""); for (var i = 0; i < templates.length; i++) { var li = '<li class="single-template" onclick="choosetemplate(this)">'+ templates[i] +'</li>'; $(this).append(li); } }); var templatemanagelist = $("#template-manage-list"); templatemanagelist.html(""); for (var i = 0; i < templates.length; i++) { var li = '<li class="single-allsee-list"><span class="templatemessage-content">'+ templates[i] +'</span><i onclick="deletetemplate(this)" class="fa fa-close closetemplate"></i></li>'; templatemanagelist.append(li); } $("#modal-loading").hide(); } }); } function SortTable(e) { var tableObject = e; var tbHead = tableObject.children('thead'); var tbHeadTh = tbHead.find('tr th'); var tbBody = tableObject.children('tbody'); var tbBodyTr = tbBody.find('tr'); tbHeadTh.each(function () { var clickIndex = tbHeadTh.index($(this)); if (clickIndex > 0) { $(this).unbind("click").bind("click",{clickIndex:clickIndex},function(e){ tbHeadTh.each(function () { if ($(this).find("i").length > 0) { $(this).find("i").remove(); } }); var icon = '<i class="fa fa-sort-numeric-desc"></i>'; $(this).append(icon); var trsValue = new Array(); var trsHtml = new Array(); var count = 0; var row = e.data.clickIndex; tbBodyTr.each(function(index,e) { var trValue = parseInt($(this).find('td').eq(row).html()); var trHtml = $(this).html(); trsValue[count] = trValue; trsHtml[count ++] = trHtml; }); for (var i = 0; i < trsValue.length; i++) { for (var j = 0; j < trsValue.length - 1 - i; j++) { if(trsValue[j] < trsValue[j + 1]){ var temp = trsValue[j]; var temphtml = trsHtml[j]; trsValue[j] = trsValue[j + 1]; trsHtml[j] = trsHtml[j + 1]; trsValue[j + 1] = temp; trsHtml[j + 1] = temphtml; } } } var singlenum = 10; PagingTable(tableObject,trsHtml,singlenum); $("#changepagenum").unbind("click").bind("click",{table:tableObject,trsHtml:trsHtml},function(e){ var singlenum = $("#singlenum").val(); PagingTable(e.data.table,e.data.trsHtml,singlenum); }); }); if (clickIndex == 1) { $(this).click(); } } }); } function PagingTable(table,trsHtml,singlenum) { //table.next().remove(); table.children('tbody').html(""); var pagesnum = Math.ceil(trsHtml.length / singlenum); for (var i = 0; i < trsHtml.length; i++) { var page = parseInt(i / singlenum) + 1; var pageclass = "page_" + page; if (i < singlenum) { var tr = '<tr class="'+ pageclass +'">'+ trsHtml[i] +'</tr>'; }else{ var tr = '<tr class="hide '+ pageclass +'">'+ trsHtml[i] +'</tr>'; } table.children('tbody').append(tr); } if (table.next().length == 0) { var paging = '<div class="paging-area"><span class="page-span first"><i class="fa fa-angle-double-left"></i></span>' +'<span class="page-span prev"><i class="fa fa-angle-left"></i></span><span class="page-num">' +'</span><span class="page-span next"><i class="fa fa-angle-right"></i></span>' +'<span class="page-span last"><i class="fa fa-angle-double-right"></i></span><span>共'+ trsHtml.length +'条,每页</span>' +'<input id="singlenum" class="form-num" type="text" value="'+ singlenum +'">' +'<span>条</span><button id="changepagenum" class="form-btn" type="button">确定</button></div>'; table.parent().append(paging); } var pages = ''; var changepageitem = '<span>共'+ trsHtml.length +'条,每页</span><input id="singlenum" class="form-num" type="text" value="'+ singlenum +'"><span>条</span>'; $(".page-num").html(""); $("#changepagenum").prev().prev().prev().remove(); $("#changepagenum").prev().prev().remove(); $("#changepagenum").prev().remove(); $("#changepagenum").before(changepageitem); for (var i = 1; i <= pagesnum; i++) { if (i == 1) { var p = '<span class="page-span current">1</span>'; }else{ var p = '<span class="page-span">'+ i +'</span>'; } pages += p; } $(".page-num").append(pages); $(".page-num").find(".page-span").each(function(){ $(this).unbind("click").bind("click",function(){ var currentnum = parseInt($(this).text()); $(this).parent().find(".page-span").each(function(){ if ($(this).hasClass("current")) { $(this).removeClass("current"); } }); $(this).addClass("current"); $(this).parents(".paging-area").prev().find("tbody").find("tr").each(function(){ if ($(this).hasClass("page_" + currentnum)) { if ($(this).hasClass("hide")) { $(this).removeClass("hide"); } }else{ if (!($(this).hasClass("hide"))) { $(this).addClass("hide"); } } }); }); }); $(".first").unbind("click").bind("click",function(){ $(this).next().next().find(".page-span").first().click(); }); $(".prev").unbind("click").bind("click",function(){ $(this).next().find(".page-span").each(function(){ if ($(this).hasClass("current")) { if ($(this).prev().length > 0) { $(this).prev().click(); return false; } } }); }); $(".next").unbind("click").bind("click",function(){ $(this).prev().find(".page-span").each(function(){ if ($(this).hasClass("current")) { if ($(this).next().length > 0) { $(this).next().click(); return false; } } }); }); $(".last").unbind("click").bind("click",function(){ $(this).prev().prev().find(".page-span").last().click(); }); } function getHistoryMessage(e) { var gname = $(e).parent().prev().html(); window.open("/logg/" + gname); } function Logout() { $.ajax({ type: "post", url: "/logout", async: false, dataType: "json", success: function (data) { } }); }<file_sep>import mysql.connector import database1 def recordtemplate(word,name): conn = mysql.connector.connect( host=database1.host, port=database1.port, user=database1.user, passwd=<PASSWORD>1.<PASSWORD>, db=database1.db, charset="utf8" ) try: cur1 = conn.cursor() sel="select ID from manager where Name='%s'" % name cur1.execute(sel) memberid = cur1.fetchone() ins="insert into template (message,manager_ID) values('%s','%s')" % (word,int(memberid[0])) cur1.execute(ins) conn.commit() except Exception as e: pass finally: cur1.close() conn.close() def readtemplate(name): conn = mysql.connector.connect( host=database1.host, port=database1.port, user=database1.user, passwd=<PASSWORD>, db=database1.db, charset="utf8" ) template=[] try: cur1 = conn.cursor() sel = "select ID from manager where Name='%s'" % name cur1.execute(sel) memberid = cur1.fetchone() ins="select message from template where manager_ID='%s' "% int(memberid[0]) cur1.execute(ins) results = cur1.fetchall() template = [] for r in results: badin = r[0] template.append(badin) conn.commit() except Exception as e: pass finally: cur1.close() conn.close() return template def deletetemplate(words,name): conn = mysql.connector.connect( host=database1.host, port=database1.port, user=database1.user, passwd=<PASSWORD>, db=database1.db, charset="utf8" ) try: cur1 = conn.cursor() sel = "select ID from manager where Name='%s'" % name cur1.execute(sel) memberid = cur1.fetchone() ins="delete from template where message='%s' and manager_ID ='%s'"% (words,int(memberid[0])) cur1.execute(ins) conn.commit() except Exception as e: pass finally: cur1.close() conn.close()<file_sep>import mysql.connector import database1 # def recordbadinfo(word): conn = mysql.connector.connect( host=database1.host, port=database1.port, user=database1.user, passwd=<PASSWORD>, db=database1.db, charset="utf8", buffered = True ) try: cur1 = conn.cursor() ins="insert into badinfo (Content) values('%s')" % (word) cur1.execute(ins) conn.commit() except Exception as e: pass finally: cur1.close() conn.close() def readbadinfo(): conn = mysql.connector.connect( host=database1.host, port=database1.port, user=database1.user, passwd=<PASSWORD>, db=database1.db, charset="utf8" ) badinfo=[] try: cur1 = conn.cursor() ins="select Content from badinfo " cur1.execute(ins) results = cur1.fetchall() badinfo = [] for r in results: badin = r[0] badinfo.append(badin) conn.commit() except Exception as e: pass finally: cur1.close() conn.close() return badinfo def deletebadinfo(words): conn = mysql.connector.connect( host=database1.host, port=database1.port, user=database1.user, passwd=<PASSWORD>, db=database1.db, charset="utf8" ) try: cur1 = conn.cursor() ins="delete from badinfo where content='%s'"%words cur1.execute(ins) conn.commit() except Exception as e: pass finally: cur1.close() conn.close()<file_sep>/* */ var face = { '[微笑]': 'weixiao.gif', '[撇嘴]': 'pizui.gif', '[色]': 'se.gif', '[发呆]': 'fadai.gif', '[得意]': 'deyi.gif', '[流泪]': 'liulei.gif', '[害羞]': 'haixiu.gif', '[闭嘴]': 'bizui.gif', '[睡觉]': 'shuijiao.gif', '[大哭]': 'daku.gif', '[尴尬]': 'gangga.gif', '[发怒]': 'danu.gif', '[调皮]': 'tiaopi.gif', '[呲牙]': 'ciya.gif', '[惊讶]': 'jingya.gif', '[难过]': 'nanguo.gif', '[酷]': 'ku.gif', '[冷汗]': 'lenghan.gif', '[抓狂]': 'zhuakuang.gif', '[吐]': 'tu.gif', '[偷笑]': 'touxiao.gif', '[可爱]': 'keai.gif', '[白眼]': 'baiyan.gif', '[傲慢]': 'aoman.gif', '[饥饿]': 'er.gif', '[困]': 'kun.gif', '[惊恐]': 'jingkong.gif', '[流汗]': 'liuhan.gif', '[憨笑]': 'haha.gif', '[悠闲]': 'dabing.gif', '[奋斗]': 'fendou.gif', '[咒骂]': 'ma.gif', '[疑问]': 'wen.gif', '[嘘]': 'xu.gif', '[晕]': 'yun.gif', '[折磨]': 'zhemo.gif', '[衰]': 'shuai.gif', '[骷髅]': 'kulou.gif', '[敲打]': 'da.gif', '[再见]': 'zaijian.gif', '[擦汗]': 'cahan.gif', '[挖鼻]': 'wabi.gif', '[鼓掌]': 'guzhang.gif', '[糗大了]': 'qioudale.gif', '[坏笑]': 'huaixiao.gif', '[左哼哼]': 'zuohengheng.gif', '[右哼哼]': 'youhengheng.gif', '[哈欠]': 'haqian.gif', '[鄙视]': 'bishi.gif', '[委屈]': 'weiqu.gif', '[哭了]': 'ku.gif', '[快哭了]': 'kuaikule.gif', '[阴险]': 'yinxian.gif', '[亲亲]': 'qinqin.gif', '[亲吻]': 'kiss.gif', '[吓]': 'xia.gif', '[可怜]': 'kelian.gif', '[菜刀]': 'caidao.gif', '[西瓜]': 'xigua.gif', '[啤酒]': 'pijiu.gif', '[篮球]': 'lanqiu.gif', '[乒乓]': 'pingpang.gif', '[咖啡]': 'kafei.gif', '[饭]': 'fan.gif', '[猪头]': 'zhutou.gif', '[玫瑰]': 'hua.gif', '[凋谢]': 'diaoxie.gif', '[爱心]': 'love.gif', '[心碎]': 'xinsui.gif', '[蛋糕]': 'dangao.gif', '[闪电]': 'shandian.gif', '[地雷]': 'zhadan.gif', '[刀]': 'dao.gif', '[足球]': 'qiu.gif', '[虫]': 'chong.gif', '[便便]': 'dabian.gif', '[月亮]': 'yueliang.gif', '[太阳]': 'taiyang.gif', '[礼物]': 'liwu.gif', '[拥抱]': 'yongbao.gif', '[强]': 'qiang.gif', '[弱]': 'ruo.gif', '[握手]': 'woshou.gif', '[胜利]': 'shengli.gif', '[佩服]': 'peifu.gif', '[勾引]': 'gouyin.gif', '[拳头]': 'quantou.gif', '[差劲]': 'chajin.gif', '[干杯]': 'cheer.gif', '[NO]': 'no.gif', '[OK]': 'ok.gif', '[给力]': 'geili.gif', '[飞吻]': 'feiwen.gif', '[跳跳]': 'tiao.gif', '[发抖]': 'fadou.gif', '[怄火]': 'dajiao.gif', '[转圈]': 'zhuanquan.gif', '[磕头]': 'ketou.gif', '[回头]': 'huitou.gif', '[跳绳]': 'tiaosheng.gif', '[挥手]': 'huishou.gif', '[激动]': 'jidong.gif', '[街舞]': 'tiaowu.gif', '[献吻]': 'xianwen.gif', '[左太极]': 'youtaiji.gif', '[右太极]': 'zuotaiji.gif', '[足球]':'zuqiu.gif' }<file_sep>$(document).ready(function () { $(".content").height($(window).height() - 10); $(".content").width($(window).width() * 0.85); $(".menu").height($(window).height() - 9); $(".submenu").height($(window).height() - 9); $(".contact-list").height($(window).height() - 97); $(".thirdmenu").height($(window).height() - 9); $(".content-menu").height($(window).height() - 9); $(".extramenu").height($(window).height() - 9); $(".dialogue-send").height($(window).height() * 0.35 - 72); $(".send-area").height($(".dialogue-send").height() - 68); $(".keyword-send").height($(window).height() * 0.35 - 60); $(".thirdmenu").width($(".content").width() - $(".menu").width() - $(".submenu").width() - 254); $(".content-menu").width($(".content").width() - $(".menu").width() - $(".submenu").width() - 4); //$(".modal").show(); }) $(function(){ getBasicInfo("/getQR"); $(".menu-list").each(function(index,e){ $(this).unbind("click").bind("click",{index:index},function(e){ $(".menu-list").each(function(index,e){ if ($(this).find("a").hasClass("menu-a-active")) { $(this).find("a").removeClass("menu-a-active").addClass("menu-a"); } }); $(this).find("a").removeClass("menu-a").addClass("menu-a-active"); $(".groupsend").hide(); $(".config").hide(); $(".collection").hide(); switch(e.data.index){ case 0: $(".groupsend").show(); break; case 1: $(".config").show(); break; case 2: $(".collection").show(); break; } }); }); $("#send").unbind("click").bind("click",function(){ var message = $("#message-text").val(); $("#message-text").val(""); if (message != "") { var groups = new Array(); var groupnames = new Array(); var count = 0; $("#groups-ul").find("li").each(function(index, e){ if ($(this).find("i").hasClass("fa-check-square")) { groups[count] = $(this).find("span:first").attr("id"); groupnames[count++] = $(this).find("span:first").attr("title"); } return count; }); $.ajax({ type: "post", url: "/info", data:{ groups:groups, message:message }, dataType: "json", success: function (data) { var messagearea = $(".conversation-area"); var g = ""; for (var i = 0; i < groupnames.length; i++) { g += groupnames[i]; if (groupnames.length - 1 != i) { g += "、"; } } var li = '<li class="message"><img class="pic-right" src="static/img/pic.jpg">' + '<span class="message-box-right"><span class="message-content">' + '<i class="angle-right"></i><span class="text-right">' + '<p class="group-names-p" title="' + g + '">' + g + '</p>' + '<p class="group-text-p">' + message + '</p>' + '</span></span></span></li>'; messagearea.append(li); } }); }else{ alert("消息不能为空"); } }); $("#setdefaultgroups").unbind("click").bind("click",function(){ var groups = new Array(); var groupnames = new Array(); var count = 0; $("#groups-ul").find("li").each(function(index, e){ if ($(this).find("i").hasClass("fa-check-square")) { groups[count] = $(this).find("span:first").attr("id"); groupnames[count++] = $(this).find("span:first").attr("title"); } return count; }); $.ajax({ type: "post", url: "/setDefault", data:{ groups:groups, groupnames:groupnames }, dataType: "json", success: function (data) { getBasicInfo("/update"); } }); }); $("#keyword-save").unbind("click").bind("click",function(){ var keyword = $("#keyword").val(); $("#keyword").val(""); var reply = $("#reply").val(); $("#reply").val(""); if (keyword != "" && reply != "") { var groups = new Array(); var groupnames = new Array(); var count = 0; $("#groups-ul").find("li").each(function(index, e){ if ($(this).find("i").hasClass("fa-check-square")) { groups[count] = $(this).find("span:first").attr("id"); groupnames[count++] = $(this).find("span:first").attr("title"); } return count; }); $.ajax({ type: "post", url: "/addKeyWord", data:{ groups:groups, keyword:keyword, reply:reply }, dataType: "json", success: function (data) { var keywordreplyul = $("#keyword-reply-ul"); var g = ""; for (var i = 0; i < groupnames.length; i++) { g += groupnames[i]; if (groupnames.length - 1 != i) { g += "、"; } } var li = '<li class="message"><span class="message-box-left">' + '<span class="message-content"><i class="angle-left"></i>' + '<span class="text-left"><p class="group-names-p" title="' + g + '">'+ g +'</p>' + '<p class="group-text-p">关键词:' + keyword + '</p>' + '<p class="group-text-p">回复:' + reply + '</p>' + '</span></span></span></li>'; keywordreplyul.append(li); } }); }else{ if (keyword == "") { alert("请输入关键词"); return false; }else{ alert("请输入回复"); return false; } } }); }); function findChoseGroup(){ $("#ChoseGroup-area").html(""); $(".single-group").each(function(){ if ($(this).find("i").hasClass("fa-check-square")) { var name = $(this).find("span:first").attr("title"); var p = '<p class="chosegroup-p">'+ name +'</p>'; $("#ChoseGroup-area").append(p); } }); } function getBasicInfo(url){ $.ajax({ type: "post", url: url, dataType: "json", success: function (data) { var ul = $("#groups-ul"); ul.html(""); for (var i = 0; i < data.length; i++) { var need = (data[i].need == true) ? 'fa fa-check-square' : 'fa fa-square-o'; var manage = (data[i].need == true) ? '(已管理)' : ''; var li = '<li class="single-group"><img class="message-pic" src="static/img/weixin.jpg">' + '<span id="'+ data[i].id +'" class="group-name" title="'+ data[i].name + manage +'">'+ data[i].name + '</span><span class="choose-box"><i class="' + need + '"></i></span></li>'; ul.append(li); } findChoseGroup(); $(".choose-box").unbind("click").bind("click",function(){ if ($(this).find("i").hasClass("fa-check-square")) { $(this).find("i").removeClass("fa-check-square"); $(this).find("i").addClass("fa-square-o"); }else{ $(this).find("i").removeClass("fa-square-o"); $(this).find("i").addClass("fa-check-square"); } findChoseGroup(); }); } }); }
409152cf4650895eb45da10e1851efaf0d76a90c
[ "JavaScript", "SQL", "Python" ]
18
Python
LittleChenry/WechatManager
487622bcccb1445f646a3000320b54e94565751f
8cee693ec9c1650370aa35460e91de161319c640
refs/heads/master
<file_sep><?php ini_set('display_errors', 1); header('Access-Control-Allow-Origin: *'); header('Access-Control-Expose-Headers: Location'); if (isset($_GET["u"])) { $url = $_GET["u"]; $url = str_replace(" ", "+", $url); echo file_get_contents($url); } <file_sep>$(document).ready(function () { var $popupButton = $(".popup__button"); var $finishLink = $("#finish-link"); $popupButton.on("click", function () { var count = chrome.extension.getBackgroundPage().count; $finishLink.text(count); }); }); <file_sep><?php ini_set( 'display_errors', 1 ); $url = $_GET["url"]; $url = str_replace(" ", "+", $url); $header = get_headers($url, 1); $location = $header["Location"]; if ($location) { echo $location; }
b9a717113aab03e8ac7600d5a3151c11ee04f383
[ "JavaScript", "PHP" ]
3
PHP
maechabin/redirect-link-checker
63fc9e5602fddc4ee6eb1ad1a1d2b53a1f87208b
11f26004aafa647e574f3c413fa5849bf0dd02de
refs/heads/master
<file_sep>using System; using System.Collections.Specialized; namespace WebServerProj { public abstract class AbsHttpHeaderEvaluator { public abstract string HeaderName { get; } public abstract string ErrorCode { get; } public bool Evaluate(NameValueCollection headers) { bool noContains = true; foreach (var header in headers) { if (header.ToString() == "Accept") { return noContains; } } throw new Exception($"{ErrorCode} - {HeaderName}"); } } }<file_sep>namespace WebServerProj { public interface ICustomBuilder { string Build(object data); string PageNotFound(object msg); } }
e884e94503739b19eda6699c492cbe073624570a
[ "C#" ]
2
C#
Kedech/MCSD-2018II
d7fd1ddbcb2c3ee43bef445e9220e951b7f02ee5
22a2b7ded11e21cfcd88fc1554ca24426d55e446
refs/heads/master
<repo_name>iv-mexx/TelegramUITestRequestServer<file_sep>/get_last_message.sh #!/bin/bash # # This script searches for the newest message from `USER_NAME` and returns it # It is necessary that the telegram-cli tool (https://github.com/vysheng/tg) is installed and configured # # Edit the `USER_NAME` variable to contain the username from which the message should be read. # Attention: whitespaces in the username have to be substituted with underscores # Edit the `TELEGRAM_PATH` variable to point to your telegram-cli client and the key USER_NAME='M_C' TELEGRAM_PATH='~/code/tg' TELEGRAM="$TELEGRAM_PATH/bin/telegram-cli -k $TELEGRAM_PATH/server.pub" # Due to a bug in the telegram-cli it is necessary to request the contact list once and then perform the actual command (sleep 1; echo "contact_list"; sleep 1; echo "history $USER_NAME 1") | eval "$TELEGRAM" | egrep "\[[0-9]*:[0-9]*\]" | tail -n 1 | sed 's/.*» //' <file_sep>/send_message.sh #!/bin/bash # # This script sends a message to `USER_NAME` # The first argument to this script will be sent as the message # It is necessary that the telegram-cli tool (https://github.com/vysheng/tg) is installed and configured # # Edit the `USER_NAME` variable to contain the username from which the message should be read. # Attention: whitespaces in the username have to be substituted with underscores # Edit the `TELEGRAM_PATH` variable to point to your telegram-cli client and the key USER_NAME='M_C' TELEGRAM_PATH='~/code/tg' MESSAGE=$1 TELEGRAM="$TELEGRAM_PATH/bin/telegram-cli -k $TELEGRAM_PATH/server.pub" # Due to a bug in the telegram-cli it is necessary to request the contact list once and then perform the actual command (sleep 1; echo "contact_list"; sleep 1; echo "msg $USER_NAME $MESSAGE") | eval "$TELEGRAM" <file_sep>/README.md # Telegram UI-Test Request Server This project was built to enable communication of Unit Testcases (especially on iOS) with the Telegram Backend. The corresponding iOS Project with Testcases implemented [can be found here](https://github.com/iv-mexx/Telegram-UI-Tests) ## Setup First, install the dependencies via `npm install`. You will need the [Telegram CLI](https://github.com/vysheng/tg) installed and configured. (Make sure to use the `--recursive` option when cloning and on OSX you may need to follow [this comment](https://github.com/vysheng/tg/issues/811#issuecomment-157707009)). ``` brew update brew upgrade brew install libconfig readline lua python libevent jansson export CFLAGS="-I/usr/local/include -I/usr/local/Cellar/readline/6.3.8/include" export CPPFLAGS="-I/usr/local/opt/openssl/include" export LDFLAGS="-L/usr/local/opt/openssl/lib -L/usr/local/lib -L/usr/local/Cellar/readline/6.3.8/lib" ./configure && make ``` Start up the Telegram CLI at least once and setup your account. You should use the same phone number that you will use in your testcases so that the confirmation code is sent to your CLI client when requested from the Testcase. Create a new contact with your own number (This can only be done in the CLI and is necessary for the testcases to work properly, the testcases will send messages to yourself, otherwise you would need a second CLI user). * In the telegram CLI: `add_contact <phone> <first name> <last name>` Finally, edit the [get_confirmation_code.sh](get_confirmation_code.sh), [get_last_message.sh](get_last_message.sh) and [send_message.sh](send_message.sh): * `TELEGRAM_PATH` has to point to the directory of your telegram installation (assuming you have cloned the repo and built it yourself) * `USER_NAME` is the name of the user to which you want to send messages / read the last message from. Keep in mind that you have to substitute whitespaces between first- and last name with underscores Finally, start up the server with `npm start`. You can now send HTTP requests to `0.0.0.0:3000` ## Routes Available * `GET confirmationCode`, returns the latest confirmation code * `GET lastMessage`, returns the last message received from `USER_NAME` * `POST message`, sends a message to `USER_NAME`. The body of the request has to be a JSON object `{ message: <your_message> }` The GET routes take some time to process and return, you may need to increase the timeout of your network calls for these. ## Troubleshooting * Telegram CLI prints `Assertion failed: (0), function print_media, file interface.c, line 3446.` You need to apply [this patch](https://github.com/vysheng/tg/pull/920/files) to telegram (unless this PR is already merged by this time)<file_sep>/get_confirmation_code.sh #!/bin/bash # # This script searches for the newest message from `Telegram` and extracts the confirmation code. # It is necessary that the telegram-cli tool (https://github.com/vysheng/tg) is installed and configured # # Edit the `TELEGRAM_PATH` variable to point to your telegram-cli client and the key TELEGRAM_PATH='~/code/tg' TELEGRAM="$TELEGRAM_PATH/bin/telegram-cli -k $TELEGRAM_PATH/server.pub" echo -e "search Telegram code\nsearch Telegram code\n" | eval "$TELEGRAM" | egrep "Your login code: ([0-9]*)" | tail -n 1 | sed 's/.*code: //' <file_sep>/app.js var express = require('express') var app = express() var bodyParser = require('body-parser') var jsonParser = bodyParser.json() app.use(bodyParser.json()) // Spawn Bash Scripts const spawn = require('child_process').spawn; // Routes app.get('/confirmationCode', function (req, res) { process.stdout.write("Request Confirmation Code\n"); const ls = spawn('bash', ['get_confirmation_code.sh']); ls.stdout.on('data', (data) => { res.json({"code": `${data}`.trim()}); }); }); app.get('/lastMessage', function (req, res) { process.stdout.write("Request Last Message\n"); const ls = spawn('bash', ['get_last_message.sh']); ls.stdout.on('data', (data) => { res.json({"message": `${data}`.trim().replace(/\033\[[0-9;]*m/,"")}); }); }); app.post('/message', function (req, res) { const msg = req.body.message; process.stdout.write("Post message '" + msg + "'\n"); const ls = spawn('bash', ['send_message.sh', msg]); res.status(201).json({"message": msg}) }); app.listen(3000)
21627e51b0739dc03c9843a0c6a7b6fe814848b6
[ "Markdown", "JavaScript", "Shell" ]
5
Shell
iv-mexx/TelegramUITestRequestServer
7a601aff04953a37a923518b8dcbf62dca46287f
30b3610f6622140312796478091b10f2646ce10c
refs/heads/master
<file_sep>#include <bits/stdc++.h> using namespace std; int cnum(string X,string Y) { string XY=X.append(Y); string YX=Y.append(X); return XY.compare(YX)>0 ? 1:0; }; int main() { int t; vector<string> n; string s; string res=""; cin>>t; while(t--) { vector<string> n; int r; cin>>r; for(int j=0;j<r;j++) { cin>>s; n.push_back(s); } sort(n.begin(),n.end(),cnum); for(int j=0;j<n.size();j++) { res=res.append(n[j]); } cout << res << endl; res=""; } }
d8d74e961a8707485ba19d2d766b8e895e499625
[ "C++" ]
1
C++
PraneethChandra18/lab-7
c59a90ea6fd8ecb0b241f58f9988404682958976
95a504edc0e379dfcdda082aa6ff8758cf8356ab
refs/heads/master
<file_sep># coding: utf-8 """ Copyright (c) 2011, <NAME> <fn7.bitbucket at gmail.com> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import sys, traceback from lxml import html from time import sleep import re class Input(): def __init__(self, form_element, name): self.form_element = form_element self.name = name def input(self, value): """ webdriver用の関数名と引数を返す getattr関数を用いて、関数名からメソッドを取得し実行するためのもの。 """ return [['send_keys', self.xpath, value]] class ChoiceInput(Input): def __init__(self, form_element, name, input_list): Input.__init__(self, form_element, name) self.input_list = input_list def choice(self, n): return self.input_list[n] class Form(): class Textfield(Input): @staticmethod def xpath(): return '//input[@type="text"]' def __init__(self, form_element, name): self.xpath = '//input[@type="text" and @name="%s"]' % name Input.__init__(self, form_element, name) class Textarea(Input): @staticmethod def xpath(): return '//textarea' def __init__(self, form_element, name): self.xpath = '//textarea[@name="%s"]' % name Input.__init__(self, form_element, name) class Radiobutton(ChoiceInput): @staticmethod def xpath(): return '//input[@type="radio"]' def __init__(self, form_element, name): self.xpath = '//input[@type="radio" and @name="%s" and @value="%%s"]' % (name) input_list = [] for elem in form_element.xpath('//input[@type="radio" and @name="%s"]' % name): if len(elem.get('value')) > 0: input_list.append( elem.get('value') ) if len(input_list) < 1: raise Exception, "input_list is empty" ChoiceInput.__init__(self, form_element, name, input_list) def input(self, value): return [['click', self.xpath % value, None]] class Checkbox(ChoiceInput): @staticmethod def xpath(): return '//input[@type="checkbox"]' def __init__(self, form_element, name): self.xpath = '//input[@type="checkbox" and @name="%s" and @value="%%s"]' % (name) input_list = [] for elem in form_element.xpath('//input[@type="checkbox" and @name="%s"]' % name): if len(elem.get('value')) > 0: input_list.append( elem.get('value') ) if len(input_list) < 1: raise Exception, "input_list is empty" ChoiceInput.__init__(self, form_element, name, input_list) def input(self, value): v = value if isinstance(value, list) or isinstance(value, tuple) else [value] r = [] for i in v: r.append(['click', self.xpath % i, None]) return r class Pulldown(ChoiceInput): @staticmethod def xpath(): return '//select' def __init__(self, form_element, name): self.xpath = '//select[@name="%s"]/option[@value="%%s"]' % (name) input_list = [] for elem in form_element.xpath('//select[@name="%s"]/option' % name): if len(elem.get('value')) > 0: input_list.append( elem.get('value') ) if len(input_list) < 1: raise Exception, "input_list is empty: %s" % self.xpath ChoiceInput.__init__(self, form_element, name, input_list) def input(self, value): return [['click', self.xpath % value, None]] def __init__(self, browser, xpath): self.xpath = xpath self.browser = browser try: self.elem = html.fromstring(browser.page_source).xpath(xpath)[0]; except Exception, e: traceback.print_exc() self.browser.close() exit() # 入力項目の取得 self.inputs = {} for elem in self.elem.xpath(Form.Textfield.xpath()): name = elem.get('name') if 0 < len(name): self.inputs[name] = Form.Textfield(self.elem, name) for elem in self.elem.xpath(Form.Textarea.xpath()): name = elem.get('name') if 0 < len(name): self.inputs[name] = Form.Textarea(self.elem, name) for elem in self.elem.xpath(Form.Radiobutton.xpath()): name = elem.get('name') if 0 < len(name): self.inputs[name] = Form.Radiobutton(self.elem, name) for elem in self.elem.xpath(Form.Checkbox.xpath()): name = elem.get('name') if 0 < len(name): self.inputs[name] = Form.Checkbox(self.elem, name) for elem in self.elem.xpath(Form.Pulldown.xpath()): name = elem.get('name') if 0 < len(name): self.inputs[name] = Form.Pulldown(self.elem, name) def fill(self, params): try: keys = sorted(self.inputs.keys()) for k in keys: if not params.has_key(k): continue val = params[k] cmds = self.inputs[k].input(val) for cmd in cmds: print >> sys.stderr, "%s: %s" % (cmd[1], val) exec_input = getattr(self.browser.find_element_by_xpath(cmd[1]), cmd[0]) if cmd[2] or cmd[2] == 0 or cmd[2] == '': exec_input(cmd[2]) else: exec_input() sleep(0.05) # waitいれとかないとちゃんと入力してくれない except Exception, e: traceback.print_exc()
0a726f7903560ffab1f18e0fca64682edf2faae6
[ "Python" ]
1
Python
fn7/form.py
4b117d034f16a55630400b875505e8b8b88d637a
0a6729abfbd7a482a5b5b26ffe9c698b456ed633
refs/heads/main
<repo_name>tmct-web/sconfig_tmct<file_sep>/README.md # sconfig_tmct Ported Intel(ALTERA) SRunner to Raspberry Pi. This tool is a port of a tool called "SRunner" published by Intel (ALTERA) to the Raspberry Pi. このツールはIntel(ALTERA)が公開している「SRunner」というツールをRaspberry Piに移植したものです。 ※日本語の解説文は英文解説の後にあります。 --- ## What is SRunner? SRunner is a tool to connect a PC's parallel port to Intel(ALTERA) FPGA configuration ROM(EPCS series) for writing, but it is almost worthless nowadays for the following reasons. - **It is impossible to get a new PC which has parallel port and works properly.** USB-to-parallel converter is also not compatible and useless. - **Supported OS up to Windows XP.** ...However, the documentation for making a tool to write to the configuration ROM is very scarce, so I've ported it to the Raspberry Pi and published it as "SCONFIG". The I/O drive part uses the standard Raspberian GPIO driver, but by rewriting this part to just drive I/O, it can easily be ported to general microcontrollers. ## Port Assignment The port assignment is as follows. This assignment is defined in bb_gpio.c, so it can be rewritten here to assign it to a different port. | Pin No. | Pin name | Input/output direction | Signal name | Remarks | | :-- | :-- | :-- | :-- | :-- | | 7 | GPIO4;GPIO_GCLK | OUT | EPCS_ASDI | ASDI output ...connect to ASDI of EPCS. | | 11 | GPIO17;GPIO_GEN0 | OUT | EPCS_nCE | nCE output ...connect to nCE of FPGA | | 12 | GPIO18;GPIO_GEN1 | OUT | EPCS_nCS | nCS output ... Connect to EPCS nCS. | | 13 | GPIO27;GPIO_GEN2 | OUT | EPCS_nCONFIG | nCONFIG. Connect to nCONFIG of FPGA. | | 15 | GPIO22;GPIO_GEN3 | OUT | EPCS_DCLK | DCLK output; Connect to DCLK of EPCS. | | 16 | GPIO23;GPIO_GEN4 | IN | EPCS_CONFDONE | CONFDONE input; Connect to CONFDONE of FPGA. | | 18 | GPIO24;GPIO_GEN5 | IN | EPCS_DATAO | DATAO input: Connect to EPCS DATA. | ## How to install All you need to do is to put the program file itself into an appropriate folder. No special settings are required, but if GPIO resources are being used for other tasks, they cannot be used. The GPIO control uses the standard Raspberian device driver, so nothing else is needed. ## How to use This is a command line only tool. Parameters cannot be omitted. . /mt_sconfig -[command] -[EPCS density] [filename]. | Parameter | | | :-- | :-- | | [command] | **Specifies the command to be executed for EPCS.**<br/>program ... Write the specified RPD file<br/>read ... Save the contents of the EPCS to the specified RPD file<br/>verify ... Verify that the contents of the specified RPD file match the contents of the EPCS<br/>erase ... Erase EPCS (function not available in SRunner) | | [EPCS density] | **Specify the size of the EPCS.**<br/>4 for EPCS4, 16 for EPCS16. | | [filename] | **Specifies the RPD file to write target.**<br/>RPD files are not generated by default, so they are generated by converting from SOF files with Quartus' file converter. | ## License The license terms are governed by the terms of SRunner. You may use it freely for commercial or private use as long as you do not remove the copyright notice, but you do so at your own risk. You can find more information on the following page. (*Japanese only) [tmct web-site: SCONFIG](https://ss1.xrea.com/tmct.s1009.xrea.com/doc/ta-ja-7e5g02.html) --- ## SRunnerとは? SRunnerはパソコンのパラレルポートとIntel(ALTERA)のFPGAコンフィグレーションROM(EPCSシリーズ)を接続して書き込みを行うためのツールですが、下記の理由があり今となってはほぼ利用価値がないツールです。 - **パラレルポートが付いていて正常稼働するパソコンは新品での入手は不可。** USB-パラレル変換もソフト的な互換がなく使えない。 - **対応OSがWindows XPまで。** …とはいえコンフィグレーションROMに書き込むツールを製作するための資料は非常に乏しく、技術資料としての価値は高いので、試しにRaspberry Piに移植してみたものを「SCONFIG」として公開します。 I/O駆動部分はRaspberianのGPIO標準ドライバを利用していますが、この部分をただのI/O駆動に書き換えることで一般的なマイコンなどにも容易にポーティングが可能です。 ## ポートアサイン 以下のようなポートアサインになっています。 このアサインは bb_gpio.c に定義しているので、ここを書き換えると別のポートにアサインすることもできます。 | Pin No. | ピン名 | 入出力方向 | 信号名 | 備考 | | :-- | :-- | :-- | :-- | :-- | | 7 | GPIO4;GPIO_GCLK | OUT | EPCS_ASDI | ASDI出力‥EPCSのASDIに接続 | | 11 | GPIO17;GPIO_GEN0 | OUT | EPCS_nCE | nCE出力‥FPGAのnCEに接続 | | 12 | GPIO18;GPIO_GEN1 | OUT | EPCS_nCS | nCS出力‥EPCSのnCSに接続 | | 13 | GPIO27;GPIO_GEN2 | OUT | EPCS_nCONFIG | nCONFIG‥FPGAのnCONFIGに接続 | | 15 | GPIO22;GPIO_GEN3 | OUT | EPCS_DCLK | DCLK出力‥EPCSのDCLKに接続 | | 16 | GPIO23;GPIO_GEN4 | IN | EPCS_CONFDONE | CONFDONE入力‥FPGAのCONFDONEに接続 | | 18 | GPIO24;GPIO_GEN5 | IN | EPCS_DATAO | DATAO入力‥EPCSのDATAに接続 | ## インストール方法 適当なフォルダにプログラムファイル本体を入れるだけです。とくに設定などは不要ですが、他のタスクにGPIOリソースを使われている場合は使用できません。 GPIO制御はRaspberian標準のデバイスドライバを使用しており、ほかには何も必要ありません。 ## 使い方 コマンドライン専用ツールです。パラメータを省略することはできません。 ./mt_sconfig -[command] -[EPCS density] [filename] | Parameter | | | :-- | :-- | | [command] | **EPCSに対して実行するコマンドを指定します。**<br/>program ... 指定されたRPDファイルを書き込む<br/>read ... EPCSの内容を指定されたRPDファイルに保存する<br/>verify ... 指定されたRPDファイルの内容とEPCSの内容が一致することを確認する<br/>erase ... EPCSをイレースする(本家SRunnerにはない機能) | | [EPCS density] | **EPCSのサイズを指定します。**<br/>EPCS4なら4、EPCS16なら16を指定します。 | | [filename] | **書き込みターゲットとなるRPDファイルを指定します。**<br/>RPDファイルは標準では生成されないので、SOFファイルからQuartusのファイルコンバータで変換することで生成します。 | ## ライセンス ライセンス条項はSRunnerの条項に準拠します。著作権表示を消さない限り商用・私用問わず自由に利用して頂いて構いませんが、すべて自己責任にてご利用ください。 下記のページにもう少し詳しく書いてあります。 [tmct web-site: SCONFIG](https://ss1.xrea.com/tmct.s1009.xrea.com/doc/ta-ja-7e5g02.html) <file_sep>/src/main.c /****************************************************************************/ /* */ /* Module: main.c (SRunner) */ /* */ /* Copyright (C) Altera Corporation 2004 */ /* */ /* Descriptions: Main source file that manages SRunner-User interface */ /* to execute program or read routine in SRunner */ /* */ /* Revisions: 1.0 09/30/04 Khai Liang Aw */ /* 1.1 06/05/05 Khai Liang Aw - EPCS64 Support */ /* 1.2 06/11/08 <NAME> - EPCS128 Support */ /* */ /* */ /* */ /****************************************************************************/ #include <stdio.h> #include <string.h> #include <stdlib.h> #include "user.h" #include "as.h" /* Version Number */ const char VERSION[4] = "1.0"; const char SRCVERSION[4] = "1.2"; /********************************************************************************/ /* Name: Main */ /* */ /* Parameters: int argc, char* argv[] */ /* - number of argument. */ /* - argument character pointer. */ /* */ /* Return Value: None. */ /* */ /* Descriptions: Open programming file and initialize driver if required */ /* (WINDOWS NT). */ /* */ /********************************************************************************/ int main( int argc, char** argv) { int status = 0; int epcsDensity = 0; /* Introduction */ fprintf(stdout,"\n=======================================================\n"); fprintf(stdout," Mi-ke neko Tools :: SCONFIG Version %s\n", VERSION); fprintf(stdout," Copyright (c) 2015 Mi-ke neko Tools.\n"); fprintf(stdout," Based on SRunner(Windows) Version %s\n", SRCVERSION); fprintf(stdout," Copyright (c) 1995-2015 Altera Corporation.\n"); fprintf(stdout,"=======================================================\n"); if(argc != 4) //Syntax check { fprintf( stdout, " \nError: Invalid Number of Arguments\n"); fprintf( stdout, " \n Command\t\t\t\t\t\tDescription\n"); fprintf( stdout, " ============\t\t\t\t\t\t===============\n"); fprintf( stdout, " sconfig -program -<EPCS density in Mb> <file.rpd>\t=> Program EPCS\n"); fprintf( stdout, " sconfig -read -<EPCS density in Mb> <file.rpd>\t\t=> Read EPCS data to file\n"); fprintf( stdout, " sconfig -verify -<EPCS density in Mb> <file.rpd>\t=> Verify EPCS data with file\n"); fprintf( stdout, " sconfig -erase -<EPCS density in Mb> \"\"\t\t=> Erase EPCS data\n"); fprintf( stdout, "\nExample of command -> srunner -program -64 Mydesign.rpd <-\n"); status = CB_INVALID_NUMBER_OF_ARGUMENTS; } else if ( argv[1][1] != 'p' && argv[1][1] != 'r' && argv[1][1] != 'v' && argv[1][1] != 'e') { fprintf( stdout, " \nError: Invalid Command\n"); fprintf( stdout, " \n Command\t\t\t\t\t\tDescription\n"); fprintf( stdout, " ============\t\t\t\t\t\t===============\n"); fprintf( stdout, " sconfig -program -<EPCS density in Mb> <file.rpd>\t=> Program EPCS\n"); fprintf( stdout, " sconfig -read -<EPCS density in Mb> <file.rpd>\t\t=> Read EPCS data to file\n"); fprintf( stdout, " sconfig -verify -<EPCS density in Mb> <file.rpd>\t=> Verify EPCS data with file\n"); fprintf( stdout, " sconfig -erase -<EPCS density in Mb> \"\"\t\t=> Erase EPCS data\n"); fprintf( stdout, "\nExample of command -> srunner -program -64 Mydesign.rpd <-\n"); status = CB_INVALID_COMMAND; } else if ((strcmp(&argv[2][1],"1")!=0) && (strcmp(&argv[2][1],"4")!=0) && (strcmp(&argv[2][1],"16")!=0) && (strcmp(&argv[2][1],"64")!=0) && (strcmp(&argv[2][1],"128")!=0)) { fprintf( stdout, "\nError: Invalid EPCS density\n"); fprintf( stdout, "\nValid choices are 1, 4, 16, 64, or 128\n"); fprintf( stdout, "\nExample of command -> srunner -program -64 Mydesign.rpd <-\n"); status = CB_INVALID_EPCS_DENSITY; } else { epcsDensity = atoi(&argv[2][1]); if ( argv[1][1] == 'p') { fprintf( stdout, "\nOperation: Programming EPCS\n"); status = as_program( &argv[3][0], epcsDensity ); //Execute programming function } else if ( argv[1][1] == 'r') { fprintf( stdout, "\nOperation: Reading EPCS Data\n"); status = as_read( &argv[3][0], epcsDensity ); //Execute reading function } else if ( argv[1][1] == 'v') { fprintf( stdout, "\nOperation: Verifying EPCS Data\n"); status = as_ver( &argv[3][0], epcsDensity ); //Execute verify function } else if ( argv[1][1] == 'e') { fprintf( stdout, "\nOperation: Erase EPCS Data\n"); status = as_erase( epcsDensity ); //Execute erase fu nction } } if(status != CB_OK) fprintf( stdout, "\nError code: %d\n\n", status ); else fprintf( stdout, "\nOperation Completed!!!\n\n" ); return status; } <file_sep>/src/user.h /* Error Code Start */ #define CB_OK 0 #define CB_FS_OPEN_FILE_ERROR -1 #define CB_FS_CLOSE_FILE_ERROR -2 #define CB_FS_SIZE_EOF_NOT_FOUND -3 #define CB_FS_READ_ERROR -4 #define CB_BB_OPEN_ERROR_OPEN_PORT -5 #define CB_BB_OPEN_VERSION_INCOMPATIBLE -6 #define CB_BB_OPEN_DRIVER_INCOMPATIBLE -7 #define CB_BB_OPEN_DEVICEIOCONTROL_FAIL -8 #define CB_BB_CLOSE_BYTEBLASTER_NOT_OPEN -9 #define CB_BB_FLUSH_ERROR -10 #define CB_BB_VERIFY_BYTEBLASTER_NOT_FOUND -11 #define CB_BB_LPTREAD_ERROR -12 #define CB_BB_LPTWRITE_ERROR -13 #define CB_PS_CONF_NSTATUS_LOW -14 #define CB_PS_CONF_CONFDONE_LOW -15 #define CB_PS_INIT_NSTATUS_LOW -16 #define CB_PS_INIT_CONFDONE_LOW -17 #define CB_AS_VERIFY_FAIL -18 #define CB_AS_UNSUPPORTED_DEVICE -19 #define CB_AS_WRONG_RPD_FILE -20 #define CB_INVALID_NUMBER_OF_ARGUMENTS -21 #define CB_INVALID_COMMAND -22 #define CB_INVALID_EPCS_DENSITY -23 /* Error Code END */ <file_sep>/src/as.h /*////////////////////*/ /* Global Definitions */ /*////////////////////*/ #define CHECK_EVERY_X_BYTE 10240 #define INIT_CYCLE 200 /*///////////////////////*/ /* AS Instruction Set */ /*///////////////////////*/ #define AS_WRITE_ENABLE 0x06 #define AS_WRITE_DISABLE 0x04 #define AS_READ_STATUS 0x05 #define AS_WRITE_STATUS 0x01 #define AS_READ_BYTES 0x03 #define AS_FAST_READ_BYTES 0x0B #define AS_PAGE_PROGRAM 0x02 #define AS_ERASE_SECTOR 0xD8 #define AS_ERASE_BULK 0xC7 #define AS_READ_SILICON_ID 0xAB #define AS_CHECK_SILICON_ID 0x9F /*///////////////////////*/ /* Silicon ID for EPCS */ /*///////////////////////*/ #define EPCS1_ID 0x10 #define EPCS4_ID 0x12 #define EPCS16_ID 0x14 #define EPCS64_ID 0x16 #define EPCS128_ID 0x18 /*///////////////////////*/ /* EPCS device */ /*///////////////////////*/ #define EPCS1 1 #define EPCS4 4 #define EPCS16 16 #define EPCS64 64 #define EPCS128 128 #define DEV_READBACK 0xFF //Special bypass indicator during EPCS data readback /*///////////////////////*/ /* Functions Prototyping */ /*///////////////////////*/ int as_program( char*, int); int as_read( char*, int ); int as_erase( int ); int as_ver( char *, int ); int as_open( char*, int*, long int* ); int as_close( int ); int as_program_start( void ); int as_program_done(void); int as_bulk_erase( void ); int as_prog( int, int ); int as_silicon_id(int, int); int as_program_byte_lsb( int ); int as_read_byte_lsb( int* ); int as_program_byte_msb( int ); int as_read_byte_msb( int* ); int as_readback(int); int as_verify( int, int); void as_lsb_to_msb( int *, int *); <file_sep>/src/bb.c /****************************************************************************/ /* */ /* Module: mb_io.c (MicroBlaster) */ /* */ /* Copyright (C) Altera Corporation 2001 */ /* */ /* Descriptions: Defines all IO control functions. operating system */ /* is defined here. Functions are operating system */ /* dependant. */ /* */ /* Revisions: 1.0 12/10/01 <NAME> */ /* Supports Altera ByteBlaster hardware download cable */ /* on Windows NT. */ /* */ /****************************************************************************/ #include <stdio.h> #include "user.h" #include "bb.h" /*////////////////////*/ /* Global Definitions */ /*////////////////////*/ #define PGDC_IOCTL_GET_DEVICE_INFO_PP 0x00166A00L #define PGDC_IOCTL_READ_PORT_PP 0x00166A04L #define PGDC_IOCTL_WRITE_PORT_PP 0x0016AA08L #define PGDC_IOCTL_PROCESS_LIST_PP 0x0016AA1CL #define PGDC_WRITE_PORT 0x0a82 #define PGDC_HDLC_NTDRIVER_VERSION 2 #define PORT_IO_BUFFER_SIZE 256 /*//////////////////*/ /* Global Variables */ /*//////////////////*/ HANDLE nt_device_handle = INVALID_HANDLE_VALUE; int port_io_buffer_count = 0; struct PORT_IO_LIST_STRUCT { USHORT command; USHORT data; } port_io_buffer[PORT_IO_BUFFER_SIZE]; int bb_type = 0; /* port_data holds the current values of signals for every port. By default, they hold the values in */ /* reset mode (PM_RESET_<ByteBlaster used>). */ /* port_data[Z], where Z - port number, holds the value of the port. */ int cur_data = 0x42;/* Initial value for Port 0, 1 and 2 */ /********************************************************************************/ /* Name: InitNtDriver */ /* */ /* Parameters: None. */ /* */ /* Return Value: None. */ /* */ /* Descriptions: Initiallize Windows NT Driver for ByteBlasterMV. */ /* */ /********************************************************************************/ int bb_open( void ) { int init_ok = 0; /* Initialization OK */ int status = 0; ULONG buffer[1]; ULONG returned_length = 0; char nt_lpt_str[] = { '\\', '\\', '.', '\\', 'A', 'L', 'T', 'L', 'P', 'T', '1', '\0' }; nt_device_handle = CreateFile( nt_lpt_str, GENERIC_READ | GENERIC_WRITE, 0, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL ); if ( nt_device_handle == INVALID_HANDLE_VALUE ) { fprintf( stderr, "I/O Error: Cannot open device \"%s\"\n", nt_lpt_str ); status = CB_BB_OPEN_ERROR_OPEN_PORT; } else { if ( DeviceIoControl( nt_device_handle, PGDC_IOCTL_GET_DEVICE_INFO_PP, (ULONG *) NULL, 0, &buffer, sizeof(ULONG), &returned_length, NULL )) { if ( returned_length == sizeof( ULONG ) ) { if (buffer[0] == PGDC_HDLC_NTDRIVER_VERSION) { init_ok = 1; fprintf( stdout, "Info: Port \"%s\" opened.\n", nt_lpt_str ); } else { fprintf(stderr, "I/O Error: device driver %s is not compatible!\n(Driver version is %lu, expected version %lu.\n", nt_lpt_str, (unsigned long) buffer[0], (unsigned long) PGDC_HDLC_NTDRIVER_VERSION ); status = CB_BB_OPEN_VERSION_INCOMPATIBLE; } } else fprintf(stderr, "I/O Error: device driver %s is not compatible!\n", nt_lpt_str); status = CB_BB_OPEN_DRIVER_INCOMPATIBLE; } if ( !init_ok ) { fprintf( stderr, "I/O Error: DeviceIoControl not successful!" ); CloseHandle( nt_device_handle ); nt_device_handle = INVALID_HANDLE_VALUE; status = CB_BB_OPEN_DEVICEIOCONTROL_FAIL; } } if ( !init_ok ) { fprintf( stderr, "Error: Driver initialization fail!\n" ); CloseHandle( nt_device_handle ); nt_device_handle = INVALID_HANDLE_VALUE; return status; } else { status = bb_verify( &bb_type ); if ( status != CB_OK ) { CloseHandle( nt_device_handle ); nt_device_handle = INVALID_HANDLE_VALUE; return status; } else { if ( bb_type == 1 ) status = bb_reset( BBMV_CONFIG_MODE ); else if ( bb_type == 2) status = bb_reset( BBII_CONFIG_MODE ); return status; } } } /********************************************************************************/ /* Name: CloseNtDriver */ /* */ /* Parameters: None. */ /* */ /* Return Value: None. */ /* */ /* Descriptions: Close Windows NT Driver. */ /* */ /********************************************************************************/ int bb_close( void ) { int status = 0; if ( bb_type == BBNONE ) { fprintf(stderr, "ByteBlaster not opened!"); return CB_BB_CLOSE_BYTEBLASTER_NOT_OPEN; } else { if ( bb_type == BBMV ) status = bb_reset( BBMV_USER_MODE ); else if ( bb_type == BBII) status = bb_reset( BBII_USER_MODE ); if ( status == CB_OK ) bb_type = BBNONE; return status; } CloseHandle( nt_device_handle ); nt_device_handle = INVALID_HANDLE_VALUE; } /********************************************************************************/ /* Name: flush_ports */ /* */ /* Parameters: None. */ /* */ /* Return Value: None. */ /* */ /* Descriptions: Flush processes in [port_io_buffer] and reset buffer */ /* size to 0. */ /* */ /********************************************************************************/ int bb_flush( void ) { ULONG n_writes = 0L; BOOL status; status = DeviceIoControl( nt_device_handle, /* handle to device */ PGDC_IOCTL_PROCESS_LIST_PP, /* IO control code */ (LPVOID)port_io_buffer, /* IN buffer (list buffer) */ port_io_buffer_count * sizeof(struct PORT_IO_LIST_STRUCT),/* length of IN buffer in bytes */ (LPVOID)port_io_buffer, /* OUT buffer (list buffer) */ port_io_buffer_count * sizeof(struct PORT_IO_LIST_STRUCT),/* length of OUT buffer in bytes */ &n_writes, /* number of writes performed */ 0); /* wait for operation to complete */ if ((!status) || ((port_io_buffer_count * sizeof(struct PORT_IO_LIST_STRUCT)) != n_writes)) { fprintf(stderr, "I/O Error: Cannot flush ByteBlaster hardware!\n"); return CB_BB_FLUSH_ERROR; } else { port_io_buffer_count = 0; return CB_OK; } } /******************************************************************/ /* Name: VerifyBBII (ByteBlaster II) */ /* */ /* Parameters: None. */ /* */ /* Return Value: '0' if verification is successful;'1' if not. */ /* */ /* Descriptions: Verify if ByteBlaster II is properly attached to */ /* the parallel port. */ /* */ /******************************************************************/ int bb_verify( int *types ) { int status = 0; int type = 0; int test_count = 0; int read_data = 0; int error = 0; int i = 0; for ( type = 0; type < 2; type++ ) { int vector = (type) ? 0x10 : 0xA0; int expect = (type) ? 0x40 : 0x60; int vtemp; for ( test_count = 0; test_count < 2; test_count++ ) { /* Write '0' to Pin 6 (Data4) for the first test and '1' for the second test */ vtemp = (test_count) ? (vector & 0xff) : 0x00;/* 0001 0000:0000 0000... drive to Port0 */ status = bb_lptwrite( LPT_DATA, vtemp, 1 ); if ( status != CB_OK ) return status; //delay for (i=0;i<1500;i++); /* Expect '0' at Pin 10 (Ack) and Pin 15 (Error) for the first test */ /* and '1' at Pin 10 (Ack) and '0' Pin 15 (Error) for the second test */ status = bb_lptread( LPT_STATUS, &read_data ); if ( status != CB_OK ) return status; read_data = read_data & (expect & 0xff); /* If no ByteBlaster II detected, error = 1 */ if (test_count==0) { if(read_data==0x00) error=0; else error=1; } if (test_count==1) { if(read_data == (expect & 0xff)) error=error|0; else error=1; } } if ( !error ) break; } if (!type) { fprintf( stdout, "Info: Verifying hardware: ByteBlasterMV found.\n" ); *types = BBMV; return CB_OK; } else { if (!error) { fprintf( stdout, "Info: Verifying hardware: ByteBlaster II found.\n" ); *types = BBII; return CB_OK; } else { fprintf( stderr, "Error: Verifying hardware: ByteBlaster not found or not installed properly!\n" ); return CB_BB_VERIFY_BYTEBLASTER_NOT_FOUND; } } } /********************************************************************************/ /* Name: ReadByteBlaster */ /* */ /* Parameters: int port */ /* - port number 0, 1, or 2. Index to parallel port base */ /* address. */ /* */ /* Return Value: Integer, value of the port. */ /* */ /* Descriptions: Read the value of the port registers. */ /* */ /********************************************************************************/ int bb_lptread( int port, int *data ) { int temp = 0; int status = 0; int returned_length = 0; status = DeviceIoControl( nt_device_handle, /* Handle to device */ PGDC_IOCTL_READ_PORT_PP, /* IO Control code for Read */ (ULONG *)&port, /* Buffer to driver. */ sizeof(int), /* Length of buffer in bytes. */ (ULONG *)&temp, /* Buffer from driver. */ sizeof(int), /* Length of buffer in bytes. */ (ULONG *)&returned_length, /* Bytes placed in data_buffer. */ NULL); /* Wait for operation to complete */ if ((!status) || (returned_length != sizeof(int))) { fprintf(stderr, "I/O error: Cannot read from ByteBlaster hardware!\n"); return CB_BB_LPTREAD_ERROR; } else { *data = temp & 0xff; return CB_OK; } } /********************************************************************************/ /* Name: WriteByteBlaster */ /* */ /* Parameters: int port, int data, int test */ /* - port number 0, 1, or 2. Index to parallel port base */ /* address. */ /* - value to written to port registers. */ /* - purpose of write. */ /* */ /* Return Value: None */ /* */ /* Descriptions: Write [data] to [port] registers. When dump to Port0, if */ /* [test] = '0', processes in [port_io_buffer] are dumped */ /* when [PORT_IO_BUFFER_SIZE] is reached. If [test] = '1', */ /* [data] is dumped immediately to Port0. */ /* */ /********************************************************************************/ int bb_lptwrite( int port, int data, int nbuffering ) { int status = 0; int returned_length = 0; int buffer[2]; /* Collect up to [PORT_IO_BUFFER_SIZE] data for Port0, then flush them */ /* if nbuffering = 1 or Port = 1 or Port = 2, writing to the ports are done immediately */ if (port == 0 && nbuffering == 0) { port_io_buffer[port_io_buffer_count].data = (USHORT) data; port_io_buffer[port_io_buffer_count].command = PGDC_WRITE_PORT; ++port_io_buffer_count; if (port_io_buffer_count >= PORT_IO_BUFFER_SIZE) bb_flush(); } else { buffer[0] = port; buffer[1] = data; status = DeviceIoControl( nt_device_handle, /* Handle to device */ PGDC_IOCTL_WRITE_PORT_PP, /* IO Control code for write */ (ULONG *)&buffer, /* Buffer to driver. */ 2 * sizeof(int), /* Length of buffer in bytes. */ (ULONG *)NULL, /* Buffer from driver. Not used. */ 0, /* Length of buffer in bytes. */ (ULONG *)&returned_length, /* Bytes returned. Should be zero. */ NULL); /* Wait for operation to complete */ if ( !status ) { fprintf(stderr, "I/O error: Cannot write to ByteBlaster hardware!\n"); return CB_BB_LPTWRITE_ERROR; } } return CB_OK; } /********************************************************************************/ /* Name: CheckSignal */ /* */ /* Parameters: int signal */ /* - name of the signal (SIG_*). */ /* */ /* Return Value: Integer, the value of the signal. '0' is returned if the */ /* value of the signal is LOW, if not, the signal is HIGH. */ /* */ /* Descriptions: Return the value of the signal. */ /* */ /********************************************************************************/ int bb_read( int signal, int *data ) { int temp = 0; int status = 0; status = bb_lptread( LPT_STATUS, &temp ); if ( status == CB_OK ) *data = (temp ^ 0x80) & signal; return status; } /********************************************************************************/ /* Name: Dump2Port */ /* */ /* Parameters: int signal, int data, int clk */ /* - name of the signal (SIG_*). */ /* - value to be dumped to the signal. */ /* - assert a LOW to HIGH transition to SIG_DCLK togther with */ /* [signal]. */ /* */ /* Return Value: None. */ /* */ /* Descriptions: Dump [data] to [signal]. If [clk] is '1', a clock pulse is */ /* generated after the [data] is dumped to [signal]. */ /* */ /********************************************************************************/ int bb_write( int signal, int data ) { int status = 0; /* AND signal bit with '0', then OR with [data] */ int mask = ~signal; cur_data = ( cur_data & mask ) | ( data * signal ); status = bb_lptwrite( LPT_DATA, cur_data, 0 ); return status; } /********************************************************************************/ /* Name: SetPortMode */ /* */ /* Parameters: int mode */ /* - The mode of the port (PM_*) */ /* */ /* Return Value: None. */ /* */ /* Descriptions: Set the parallel port registers to particular values. */ /* */ /********************************************************************************/ int bb_reset( int mode ) { int status = 0; /* write to Port 0 and Port 2 with predefined values */ int control = mode ? 0x0C : 0x0E; cur_data = 0x42; status = bb_lptwrite( LPT_DATA, cur_data, 1 ); if ( status == CB_OK ) status = bb_lptwrite( LPT_CONTROL, control, 1 ); return status; }<file_sep>/src/makefile mt_sconfig: main.o fs.o bb_gpio.o as.o gcc -Wall -O2 -o mt_sconfig main.o fs.o bb_gpio.o as.o main.o: gcc -c main.c fs.o: gcc -c fs.c bb_gpio.o: gcc -c bb_gpio.c as.o: gcc -c as.c clean: rm -f mt_sconfig main.o fs.o bb_gpio.o as.o <file_sep>/src/as.c #include <stdio.h> #include "user.h" #include "as.h" #include "fs.h" #include "bb_gpio.h" int EPCS_device = 0; int RPD_file_size = 0; /********************************************************************************/ /* Name: as_program */ /* */ /* Parameters: FILE* finputid */ /* - programming file pointer. */ /* */ /* Return Value: Error Code */ /* */ /* Descriptions: Get programming file size, parse through every single byte */ /* and dump to parallel port. */ /* */ /* FPGA access to the EPCS is disable when the programming */ /* starts. */ /* */ /* */ /********************************************************************************/ int as_program( char *file_path, int epcsDensity ) { int status = 0; int file_id = 0; long int file_size = 0; /* Open RPD file for programming */ status = as_open( file_path, &file_id, &file_size ); if ( status != CB_OK ) return status; /* Disable FPGA access to EPCS */ status = as_program_start(); if ( status != CB_OK ) return status; /* Read EPCS silicon ID */ status = as_silicon_id(file_size, epcsDensity); if ( status != CB_OK ) return status; /* EPCS Bulk Erase */ status = as_bulk_erase( ); if ( status != CB_OK ) return status; /* Start EPCS Programming */ status = as_prog( file_id, file_size ); if ( status != CB_OK ) return status; /* Start EPCS Verifying */ //status = as_verify( file_id, file_size ); //if ( status != CB_OK ) // return status; /* Enable FPGA access to EPCS */ status = as_program_done(); if ( status != CB_OK ) return status; status = as_close( file_id ); if ( status != CB_OK ) return status; return CB_OK; } /********************************************************************************/ /* Name: as_ver */ /* */ /* Parameters: FILE* finputid */ /* - programming file pointer. */ /* */ /* Return Value: Error Code */ /* */ /* Descriptions: Verify EPCS data */ /* */ /* */ /* FPGA access to the EPCS is disable when the programming */ /* starts. */ /* */ /* */ /********************************************************************************/ int as_ver( char *file_path, int epcsDensity) { int status = 0; int file_id = 0; long int file_size = 0; /* Open RPD file for verify */ status = as_open( file_path, &file_id, &file_size ); if ( status != CB_OK ) return status; /* Disable FPGA access to EPCS */ status = as_program_start(); if ( status != CB_OK ) return status; /* Read EPCS silicon ID */ status = as_silicon_id(file_size, epcsDensity); if ( status != CB_OK ) return status; /* Start EPCS Verifying */ status = as_verify( file_id, file_size ); if ( status != CB_OK ) return status; /* Enable FPGA access to EPCS */ status = as_program_done(); if ( status != CB_OK ) return status; status = as_close( file_id ); if ( status != CB_OK ) return status; return CB_OK; } /********************************************************************************/ /* Name: as_read */ /* */ /* Parameters: FILE* finputid */ /* - programming file pointer. */ /* */ /* Return Value: Error Code */ /* */ /* Descriptions: Get EPCS data and save in a file */ /* */ /* */ /* FPGA access to the EPCS is disable when the reading */ /* starts. */ /* */ /* */ /********************************************************************************/ int as_read( char *file_path, int epcsDensity ) { int status = 0; int file_id = 0; long int file_size = 0; status = bb_open(); if ( status != CB_OK ) return status; /* Open RPD file for to store EPCS data */ status = fs_open( file_path, "w+b", &file_id ); if ( status != CB_OK ) return status; /* Disable FPGA access to EPCS */ status = as_program_start(); if ( status != CB_OK ) return status; /* Read EPCS silicon ID */ status = as_silicon_id(DEV_READBACK, epcsDensity); if ( status != CB_OK ) return status; /* Start EPCS Readback */ status = as_readback( file_id); if ( status != CB_OK ) return status; /* Enable FPGA access to EPCS */ status = as_program_done(); if ( status != CB_OK ) return status; status = as_close( file_id ); if ( status != CB_OK ) return status; return CB_OK; } int as_erase( int epcsDensity ) { int status = 0; status = bb_open(); if ( status != CB_OK ) return status; /* Disable FPGA access to EPCS */ status = as_program_start(); if ( status != CB_OK ) return status; /* Read EPCS silicon ID */ status = as_silicon_id(DEV_READBACK, epcsDensity); if ( status != CB_OK ) return status; /* EPCS Bulk Erase */ status = as_bulk_erase( ); if ( status != CB_OK ) return status; /* Enable FPGA access to EPCS */ status = as_program_done(); if ( status != CB_OK ) return status; bb_close(); return CB_OK; } int as_program_start(void) { int status = 0; // Drive NCONFIG to reset FPGA before programming EPCS status = bb_write( NCONFIG, 0 ); if ( status != CB_OK ) return status; // Drive NCE to disable FPGA from accessing EPCS status = bb_write( NCE, 1 ); if ( status != CB_OK ) return status; // Drive NCS to high when not acessing EPCS status = bb_write( NCS, 1 ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; return CB_OK; } int as_program_done(void) { int status; // Drive NCE to enable FPGA status = bb_write( NCE, 0 ); if ( status != CB_OK ) return status; // Drive NCONFIG from low to high to reset FPGA status = bb_write( NCONFIG, 1 ); if ( status != CB_OK ) return status; // Drive NCS to high when not acessing EPCS status = bb_write( NCS, 1 ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; return CB_OK; } int as_open( char *file_path, int *file_id, long int *file_size ) { int status = 0; status = fs_open( file_path, "rb", file_id ); if ( status != CB_OK ) return status; status = bb_open(); if ( status != CB_OK ) return status; status = fs_size( *file_id, file_size ); if ( status != CB_OK ) return status; return CB_OK; } int as_close( int file_id ) { int status = 0; bb_close(); //status = bb_close(); //if ( status != CB_OK ) // return status; status = fs_close( file_id ); if ( status != CB_OK ) return status; return CB_OK; } /********************************************************************************/ /* Name: as_prog */ /* */ /* Parameters: int file_size */ /* - file size to check for the correct programming file. */ /* int file_id */ /* - to refer to the RPD file. */ /* */ /* Return Value: status. */ /* */ /* Descriptions: program the data in the EPCS */ /* */ /********************************************************************************/ int as_prog( int file_id, int file_size ) { int page = 0; int one_byte = 0; int EPCS_Address =0; int StatusReg =0; int i,j; int status = 0; int bal_byte = 0; int byte_per_page = 256; fprintf( stdout, "\nInfo: Start programming process.\n" ); page = file_size/256; bal_byte = file_size%256; if(bal_byte) //if there is balance after divide, program the balance in the next page { page++; } //=========== Page Program command Start=========// status = bb_write( NCS, 0 ); if ( status != CB_OK ) return status; status = as_program_byte_msb( AS_WRITE_ENABLE ); if ( status != CB_OK ) return status; status = bb_write( NCS, 1 ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; // page program fprintf( stdout, "\nInfo: Programming. Please wait few minutes...\n"); for(i=0; i<page; i++ ) { status = bb_write( NCS, 0 ); if ( status != CB_OK ) return status; status = as_program_byte_msb( AS_WRITE_ENABLE ); if ( status != CB_OK ) return status; status = bb_write( NCS, 1 ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; status = bb_write( NCS, 0 ); if ( status != CB_OK ) return status; status = as_program_byte_msb( AS_PAGE_PROGRAM ); if ( status != CB_OK ) return status; EPCS_Address = i*256; status = as_program_byte_msb( ((EPCS_Address & 0xFF0000)>>16)); status = as_program_byte_msb( ((EPCS_Address & 0x00FF00)>>8) ); status = as_program_byte_msb( EPCS_Address & 0xFF); //status = bb_flush(); //if ( status != CB_OK ) // return status; if((i == (page - 1)) && (bal_byte != 0)) //if the last page has has been truncated less than 256 byte_per_page = bal_byte; for(j=0; j<byte_per_page; j++) { // read one byte status = fs_read( file_id, &one_byte ); if ( status != CB_OK ) return status; // Progaram a byte status = as_program_byte_lsb( one_byte ); if ( status != CB_OK ) return status; } status = bb_write( NCS, 1 ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; //Program in proress status = bb_write( NCS, 0 ); if ( status != CB_OK ) return status; status = as_program_byte_msb( AS_READ_STATUS ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; status = as_read_byte_msb(&StatusReg); if ( status != CB_OK ) return status; while((StatusReg & 0x01)) { status = as_read_byte_msb(&StatusReg); if ( status != CB_OK ) return status; } status = bb_write( NCS, 1 ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; //Program End } fprintf( stdout, "Info: Programming successful.\n" ); //=========== Page Program command End==========// return CB_OK; } /********************************************************************************/ /* Name: as_bulk_erase */ /* */ /* Parameters: int file_size */ /* - file size to check for the correct programming file. */ /* int file_id */ /* - to refer to the RPD file. */ /* */ /* Return Value: status. */ /* */ /* Descriptions: program the data in the EPCS */ /* */ /********************************************************************************/ int as_bulk_erase( void ) { int status =0; int StatusReg =0; //=========== Bulk erase command Start ===========// status = bb_write( NCS, 0 ); if ( status != CB_OK ) return status; status = as_program_byte_msb( AS_WRITE_ENABLE ); if ( status != CB_OK ) return status; status = bb_write( NCS, 1 ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; status = bb_write( NCS, 0 ); if ( status != CB_OK ) return status; status = as_program_byte_msb( AS_ERASE_BULK ); if ( status != CB_OK ) return status; status = bb_write( NCS, 1 ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; status = bb_write( NCS, 0 ); if ( status != CB_OK ) return status; status = as_program_byte_msb( AS_READ_STATUS ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; //Erase in proress fprintf( stdout, "Info: Erasing. Please wait few minutes...\n" ); status = as_read_byte_msb(&StatusReg); if ( status != CB_OK ) return status; while((StatusReg & 0x01)) //Keep on polling if the WIP is high { status = as_read_byte_msb(&StatusReg); if ( status != CB_OK ) return status; } fprintf( stdout, "Info: Erase Done." ); //Erase End status = bb_write( NCS, 1 ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; //=========== Bulk erase command End ============// return CB_OK; } /********************************************************************************/ /* Name: as_readback */ /* */ /* Parameters: none */ /* */ /* */ /* */ /* */ /* Return Value: status. */ /* */ /* Descriptions: read the content of the EPCS devices and store in RPD file */ /* */ /********************************************************************************/ int as_readback( int file_id ) { //=========== Readback Program command Start=========// int status; int i; int read_byte; fprintf( stdout, "Info: Reading. Please wait few minutes...\n" ); status = bb_write( NCS, 0 ); if ( status != CB_OK ) return status; status = as_program_byte_msb( AS_READ_BYTES ); if ( status != CB_OK ) return status; status = as_program_byte_msb(0x00); status = as_program_byte_msb(0x00); status = as_program_byte_msb(0x00); //status = bb_flush(); //if ( status != CB_OK ) // return status; for(i=0; i<RPD_file_size; i++) { status = as_read_byte_lsb(&read_byte); if ( status != CB_OK ) return status; fs_write(file_id, read_byte); } status = bb_write( NCS, 0 ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; fprintf( stdout, "Info: Read successful.\n" ); //=========== Readback Program command End==========// return CB_OK; } /********************************************************************************/ /* Name: as_verify */ /* */ /* Parameters: int file_size */ /* - file size to check for the correct programming file. */ /* int file_id */ /* - to refer to the RPD file. */ /* */ /* Return Value: None. */ /* */ /* Descriptions: verify the all the programmed data matach the data in the */ /* data in RPD file. */ /* */ /********************************************************************************/ int as_verify( int file_id, int file_size ) { //=========== Readback Program command Start=========// int status; int i; int read_byte =0; int one_byte = 0; fprintf( stdout, "Info: Verifying. Please wait few minutes...\n" ); fs_rewind(file_id); //reposition the file pointer of the RPD file status = bb_write( NCS, 0 ); if ( status != CB_OK ) return status; status = as_program_byte_msb( AS_READ_BYTES ); if ( status != CB_OK ) return status; status = as_program_byte_msb(0x00); status = as_program_byte_msb(0x00); status = as_program_byte_msb(0x00); //status = bb_flush(); //if ( status != CB_OK ) // return status; for(i=0; i<file_size; i++) { // read one byte from the EPCS status = as_read_byte_lsb(&read_byte); if ( status != CB_OK ) return status; // read one byte from RPD file status = fs_read( file_id, &one_byte ); if ( status != CB_OK ) return status; if(one_byte != read_byte) { status = CB_AS_VERIFY_FAIL; return status; } } fprintf( stdout, "Info: Verify completed.\n" ); status = bb_write( NCS, 1 ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; //=========== Readback Program command End==========// return CB_OK; } /********************************************************************************/ /* Name: as_silicon_id */ /* */ /* Parameters: int file_size */ /* - file size to check for the correct programming file. */ /* */ /* Return Value: None. */ /* */ /* Descriptions: check silicon id to determine: */ /* EPCS devices. */ /* RPD file size. */ /* */ /********************************************************************************/ int as_silicon_id(int file_size, int epcsDensity) { //=========== Read silicon id command Start=========// int status; int silicon_ID = 0; status = bb_write( NCS, 0 ); if ( status != CB_OK ) return status; if (epcsDensity != 128) //for EPCS1, EPCS4, EPCS16, EPCS64 { status = as_program_byte_msb( AS_READ_SILICON_ID ); if ( status != CB_OK ) return status; status = as_program_byte_msb(0x00); //3 Dummy bytes status = as_program_byte_msb(0x00); status = as_program_byte_msb(0x00); } else // for EPCS128 { status = as_program_byte_msb( AS_CHECK_SILICON_ID ); if ( status != CB_OK ) return status; status = as_program_byte_msb(0x00); //2 Dummy bytes status = as_program_byte_msb(0x00); } //status = bb_flush(); //if ( status != CB_OK ) // return status; // read silicon byte from the EPCS status = as_read_byte_msb(&silicon_ID); if ( status != CB_OK ) return status; // determine the required RPD file size and EPCS devices if(silicon_ID == EPCS1_ID) { EPCS_device = EPCS1; } else if(silicon_ID == EPCS4_ID) { EPCS_device = EPCS4; } else if(silicon_ID == EPCS16_ID) { EPCS_device = EPCS16; } else if(silicon_ID == EPCS64_ID) { EPCS_device = EPCS64; } else if(silicon_ID == EPCS128_ID) { EPCS_device = EPCS128; } else { fprintf( stdout, "\nError: Unsupported Device"); status = CB_AS_UNSUPPORTED_DEVICE; return status; } fprintf( stdout, "\nInfo: Silicon ID - 0x%x \n", silicon_ID); fprintf( stdout, "Info: Serial Configuration Device - EPCS%d\n", EPCS_device); RPD_file_size = EPCS_device * 131072; //To calculate the maximum file size for the EPCS if(file_size > RPD_file_size && file_size != DEV_READBACK) { fprintf( stdout, "\nError: Wrong programming file"); return CB_AS_WRONG_RPD_FILE; } status = bb_write( NCS, 1 ); if ( status != CB_OK ) return status; //status = bb_flush(); //if ( status != CB_OK ) // return status; //=========== Readback Program command End==========// return CB_OK; } /********************************************************************************/ /* Name: as_program_byte_lsb */ /* */ /* Parameters: int one_byte */ /* - The byte to dump. */ /* */ /* Return Value: None. */ /* */ /* Descriptions: Dump to parallel port bit by bit, from least significant */ /* bit to most significant bit. A positive edge clock pulse */ /* is also asserted. */ /* */ /********************************************************************************/ int as_program_byte_lsb( int one_byte ) { int bit = 0; int i = 0; int status = 0; // write from LSB to MSB for ( i = 0; i < 8; i++ ) { bit = one_byte >> i; bit = bit & 0x1; // Dump to DATA0 and insert a positive edge pulse at the same time status = bb_write( DCLK, 0 ); if ( status != CB_OK ) return status; //status = bb_write( DATA0, bit ); status = bb_write( ASDI, bit ); if ( status != CB_OK ) return status; status = bb_write( DCLK, 1 ); if ( status != CB_OK ) return status; } return CB_OK; } /********************************************************************************/ /* Name: as_program_byte_msb */ /* */ /* Parameters: int one_byte */ /* - The byte to dump. */ /* */ /* Return Value: None. */ /* */ /* Descriptions: Convert MSB to LSB and Dump to parallel port bit by bit, */ /* from most significant bit to least significant bit. */ /* A positive edge clock pulse is also asserted. */ /* */ /********************************************************************************/ int as_program_byte_msb( int one_byte ) { int status = 0; int data_byte = 0; //Convert MSB to LSB before programming as_lsb_to_msb(&one_byte, &data_byte); //After conversion, MSB will goes out first status = as_program_byte_lsb(data_byte); return CB_OK; } /********************************************************************************/ /* Name: as_read_byte_lsb */ /* */ /* Parameters: int one_byte */ /* - The byte to read. */ /* */ /* Return Value: None. */ /* */ /* Descriptions: read to parallel port bit by bit, from least significant */ /* bit to most significant bit. A positive edge clock pulse */ /* is also asserted. (read during positive edge) */ /* */ /********************************************************************************/ int as_read_byte_lsb( int *one_byte ) { int bit = 0; int mask = 0x01; int i; int status = 0; *one_byte = 0; // Flush out the remaining data in Port0 before reading //status = bb_flush(); //if ( status != CB_OK ) // return status; // read from from LSB to MSB for ( i = 0; i < 8; i++ ) { // Dump to DATA0 and insert a positive edge pulse at the same time status = bb_write( DCLK, 0 ); if ( status != CB_OK ) return status; status = bb_write( DCLK, 1 ); if ( status != CB_OK ) return status; // Flush the positive clk before reading //status = bb_flush(); //if ( status != CB_OK ) // return status; status = bb_read( DATAOUT, &bit ); if ( status != CB_OK ) return status; if (bit!=0) //if bit is true *one_byte |= (mask << i); } return CB_OK; } /********************************************************************************/ /* Name: as_read_byte_msb */ /* */ /* Parameters: int one_byte */ /* - The byte to read. */ /* */ /* Return Value: None. */ /* */ /* Descriptions: read from parallel port bit by bit, from most significant */ /* bit to least significant bit. A positive edge clock pulse */ /* is also asserted. (read during positive edge) */ /* */ /********************************************************************************/ int as_read_byte_msb( int *one_byte ) { int status = 0; int data_byte = 0; status = as_read_byte_lsb(&data_byte); if ( status != CB_OK ) return status; //After conversion, MSB will come in first as_lsb_to_msb(&data_byte, one_byte); return CB_OK; } /********************************************************************************/ /* Name: as_lsb_to_msb */ /* */ /* Parameters: int *in_byte */ /* - The byte to convert. */ /* int *out_byte */ /* - The converted byte */ /* */ /* Return Value: None. */ /* */ /* Descriptions: Convert LSB to MSB */ /* */ /* */ /* */ /********************************************************************************/ void as_lsb_to_msb( int *in_byte, int *out_byte) { int mask; int i; int temp; *out_byte = 0x00; for ( i = 0; i < 8; i++ ) { temp = *in_byte >> i; mask = 0x80 >> i; if(temp & 0x01) //if lsb is set inbyte, set msb for outbyte { *out_byte |= mask; } } } <file_sep>/src/bb_gpio.c /****************************************************************************/ /* */ /* Module: mb_io.c (MicroBlaster) */ /* */ /* Copyright (C) Altera Corporation 2001 */ /* */ /* Descriptions: Defines all IO control functions. operating system */ /* is defined here. Functions are operating system */ /* dependant. */ /* */ /* Revisions: 1.0 12/10/01 <NAME> */ /* Supports Altera ByteBlaster hardware download cable */ /* on Windows NT. */ /* */ /****************************************************************************/ #include <stdio.h> #include <fcntl.h> #include <stdlib.h> #include "user.h" #include "bb_gpio.h" #define GPIO_PORT_ASDI "4" #define GPIO_PORT_nCE "17" #define GPIO_PORT_nCS "18" #define GPIO_PORT_nCONFIG "27" #define GPIO_PORT_DCLK "22" #define GPIO_PORT_CONFDONE "23" #define GPIO_PORT_DATA0 "24" #define DRIVER_GPIO_EXPORT "/sys/class/gpio/export" #define DRIVER_GPIO_UNEXPORT "/sys/class/gpio/unexport" #define DRIVER_GPIO_ASDI_DIRECTION "/sys/class/gpio/gpio4/direction" #define DRIVER_GPIO_nCE_DIRECTION "/sys/class/gpio/gpio17/direction" #define DRIVER_GPIO_nCS_DIRECTION "/sys/class/gpio/gpio18/direction" #define DRIVER_GPIO_nCONFIG_DIRECTION "/sys/class/gpio/gpio27/direction" #define DRIVER_GPIO_DCLK_DIRECTION "/sys/class/gpio/gpio22/direction" #define DRIVER_GPIO_CONFDONE_DIRECTION "/sys/class/gpio/gpio23/direction" #define DRIVER_GPIO_DATA0_DIRECTION "/sys/class/gpio/gpio24/direction" #define DRIVER_GPIO_ASDI_VALUE "/sys/class/gpio/gpio4/value" #define DRIVER_GPIO_nCE_VALUE "/sys/class/gpio/gpio17/value" #define DRIVER_GPIO_nCS_VALUE "/sys/class/gpio/gpio18/value" #define DRIVER_GPIO_nCONFIG_VALUE "/sys/class/gpio/gpio27/value" #define DRIVER_GPIO_DCLK_VALUE "/sys/class/gpio/gpio22/value" #define DRIVER_GPIO_CONFDONE_VALUE "/sys/class/gpio/gpio23/value" #define DRIVER_GPIO_DATA0_VALUE "/sys/class/gpio/gpio24/value" int fhandle_ASDI = 0; int fhandle_nCE = 0; int fhandle_nCS = 0; int fhandle_nCONFIG = 0; int fhandle_DCLK = 0; int fhandle_CONFDONE = 0; int fhandle_DATA0 = 0; int bb_open( void ) { volatile int init_ok = 0; /* Initialization OK */ /*-------------------------------------------------------*/ // GPIO Reserve /*-------------------------------------------------------*/ init_ok = DriverWrite(DRIVER_GPIO_EXPORT, GPIO_PORT_ASDI, sizeof(GPIO_PORT_ASDI)); init_ok += DriverWrite(DRIVER_GPIO_EXPORT, GPIO_PORT_nCE, sizeof(GPIO_PORT_nCE)); init_ok += DriverWrite(DRIVER_GPIO_EXPORT, GPIO_PORT_nCS, sizeof(GPIO_PORT_nCS)); init_ok += DriverWrite(DRIVER_GPIO_EXPORT, GPIO_PORT_nCONFIG, sizeof(GPIO_PORT_nCONFIG)); init_ok += DriverWrite(DRIVER_GPIO_EXPORT, GPIO_PORT_DCLK, sizeof(GPIO_PORT_DCLK)); init_ok += DriverWrite(DRIVER_GPIO_EXPORT, GPIO_PORT_CONFDONE, sizeof(GPIO_PORT_CONFDONE)); init_ok += DriverWrite(DRIVER_GPIO_EXPORT, GPIO_PORT_DATA0, sizeof(GPIO_PORT_DATA0)); if (init_ok != 0) { fprintf( stderr, "I/O Error: Cannot open device.\n" ); bb_close(); return (CB_BB_OPEN_ERROR_OPEN_PORT); } /*-------------------------------------------------------*/ // GPIO Direction setting /*-------------------------------------------------------*/ usleep(100000); init_ok = DriverWrite(DRIVER_GPIO_ASDI_DIRECTION, "out", 4); init_ok += DriverWrite(DRIVER_GPIO_nCE_DIRECTION, "out", 4); init_ok += DriverWrite(DRIVER_GPIO_nCS_DIRECTION, "out", 4); init_ok += DriverWrite(DRIVER_GPIO_nCONFIG_DIRECTION, "out", 4); init_ok += DriverWrite(DRIVER_GPIO_DCLK_DIRECTION, "out", 4); init_ok += DriverWrite(DRIVER_GPIO_CONFDONE_DIRECTION, "in", 3); init_ok += DriverWrite(DRIVER_GPIO_DATA0_DIRECTION, "in", 3); if (init_ok != 0) { fprintf( stderr, "Error: Driver initialization fail.\n" ); bb_close(); return (CB_BB_OPEN_DEVICEIOCONTROL_FAIL); } /*-------------------------------------------------------*/ // Open GPIO value handle /*-------------------------------------------------------*/ fhandle_ASDI = open(DRIVER_GPIO_ASDI_VALUE, O_WRONLY); fhandle_nCE = open(DRIVER_GPIO_nCE_VALUE, O_WRONLY); fhandle_nCS = open(DRIVER_GPIO_nCS_VALUE, O_WRONLY); fhandle_nCONFIG = open(DRIVER_GPIO_nCONFIG_VALUE, O_WRONLY); fhandle_DCLK = open(DRIVER_GPIO_DCLK_VALUE, O_WRONLY); fhandle_CONFDONE = open(DRIVER_GPIO_CONFDONE_VALUE, O_RDONLY); fhandle_DATA0 = open(DRIVER_GPIO_DATA0_VALUE, O_RDONLY); if ((fhandle_ASDI < 0) || (fhandle_nCE < 0) || (fhandle_nCS < 0) || (fhandle_nCONFIG < 0) || (fhandle_DCLK < 0) || (fhandle_CONFDONE < 0) || (fhandle_DATA0 < 0)) { fprintf( stderr, "Error: Driver handling fail.\n" ); bb_close(); return (CB_BB_OPEN_DEVICEIOCONTROL_FAIL); } fprintf( stdout, "Info: Driver opened.\n" ); return (CB_OK); } void bb_close( void ) { volatile int result = 0; result = DriverWrite(DRIVER_GPIO_UNEXPORT, GPIO_PORT_ASDI, sizeof(GPIO_PORT_ASDI)); result = DriverWrite(DRIVER_GPIO_UNEXPORT, GPIO_PORT_nCE, sizeof(GPIO_PORT_nCE)); result = DriverWrite(DRIVER_GPIO_UNEXPORT, GPIO_PORT_nCS, sizeof(GPIO_PORT_nCS)); result = DriverWrite(DRIVER_GPIO_UNEXPORT, GPIO_PORT_nCONFIG, sizeof(GPIO_PORT_nCONFIG)); result = DriverWrite(DRIVER_GPIO_UNEXPORT, GPIO_PORT_DCLK, sizeof(GPIO_PORT_DCLK)); result = DriverWrite(DRIVER_GPIO_UNEXPORT, GPIO_PORT_CONFDONE, sizeof(GPIO_PORT_CONFDONE)); result = DriverWrite(DRIVER_GPIO_UNEXPORT, GPIO_PORT_DATA0, sizeof(GPIO_PORT_DATA0)); close(fhandle_ASDI); close(fhandle_nCE); close(fhandle_nCS); close(fhandle_nCONFIG); close(fhandle_DCLK); close(fhandle_CONFDONE); close(fhandle_DATA0); fprintf( stdout, "\nInfo: Driver closed.\n" ); } int bb_read ( int sig, int *data ) { char rdata = 0; if (sig == CONF_DONE) { lseek(fhandle_CONFDONE, 0, SEEK_SET); read(fhandle_CONFDONE, &rdata, 1); if (rdata == '0') *data = 0x80; else *data = 0x00; } else { lseek(fhandle_DATA0, 0, SEEK_SET); read(fhandle_DATA0, &rdata, 1); if (rdata == '0') *data = 0x00; else *data = 0x10; } return (CB_OK); } int bb_write ( int sig, int data ) { if (sig == DCLK) { if (data == 0) write(fhandle_DCLK, "0", 2); else write(fhandle_DCLK, "1", 2); } else if (sig == NCONFIG) { if (data == 0) write(fhandle_nCONFIG, "0", 2); else write(fhandle_nCONFIG, "1", 2); } else if (sig == NCS) { if (data == 0) write(fhandle_nCS, "0", 2); else write(fhandle_nCS, "1", 2); } else if (sig == NCE) { if (data == 0) write(fhandle_nCE, "0", 2); else write(fhandle_nCE, "1", 2); } else { if (data == 0) write(fhandle_ASDI, "0", 2); else write(fhandle_ASDI, "1", 2); } return (CB_OK); } /*---------------------------------------------------------*/ // Driver write function /*---------------------------------------------------------*/ int DriverWrite(char *desc, char *param, int len) { int fhandle = 0; fhandle = open(desc, O_WRONLY); if (fhandle < 0) return(1); else { write(fhandle, param, len); close(fhandle); return(0); } } <file_sep>/src/bb_gpio.h /*////////////////////*/ /* Global Definitions */ /*////////////////////*/ #define LPT_DATA 0 #define LPT_STATUS 1 #define LPT_CONTROL 2 #define DCLK 0x01 #define NCONFIG 0x02 #define DATA0 0x40 #define CONF_DONE 0x80 #define NSTATUS 0x10 #define NCS 0x04 #define NCE 0x08 #define ASDI 0x40 #define DATAOUT 0x10 #define TCK 0x01 #define TMS 0x02 #define TDI 0x40 #define TDO 0x80 #define BBNONE 0 #define BBMV 1 #define BBII 2 /* Port Mode for ByteBlaster II Cable */ #define BBII_CONFIG_MODE 1 /* Reset */ #define BBII_USER_MODE 0 /* User */ /* Port Mode for ByteBlasterMV Cable */ #define BBMV_CONFIG_MODE 0 /* Reset */ #define BBMV_USER_MODE 1 /* User */ int bb_open ( void ); void bb_close ( void ); //int bb_flush ( void ); //int bb_verify ( int* ); //int bb_lptread ( int, int* ); //int bb_lptwrite ( int, int, int ); int bb_read ( int, int* ); int bb_write ( int, int ); //int bb_reset ( int ); int DriverWrite(char *desc, char *param, int len); <file_sep>/src/fs.c #include <stdio.h> #include "user.h" #include "fs.h" /********************************************************************************/ /* Name: fs_open */ /* */ /* Descriptions: Open programming file */ /********************************************************************************/ int fs_open( char argv[], char* mode, int *file_id ) { FILE* fid; fid = fopen( argv, mode ); if ( fid == NULL ) { fprintf( stderr, "Error: Could not open file: \"%s\"!\n", argv ); return CB_FS_OPEN_FILE_ERROR; } else { *file_id = (int) fid; fprintf( stdout, "Info: Programming file: \"%s\" opened.\n", argv ); return CB_OK; } } /********************************************************************************/ /* Name: fs_close */ /* */ /* Descriptions: Close file */ /********************************************************************************/ int fs_close( int file_id ) { int status = 0; status = fclose( (FILE*) file_id); if ( status ) { fprintf( stderr, "Error: Could not close file!\n"); return CB_FS_CLOSE_FILE_ERROR; } else { return CB_OK; } } /********************************************************************************/ /* Name: fs_size */ /* */ /* Descriptions: check file size */ /********************************************************************************/ int fs_size( int file_id, long int *size ) { int status = 0; status = fseek( (FILE*) file_id, 0, S_END ); if ( status ) { fprintf( stderr, "Error: End of file could not be located!" ); return CB_FS_SIZE_EOF_NOT_FOUND; } else { *size = ftell( (FILE*) file_id ); fseek( (FILE*) file_id, 0, S_SET ); fprintf( stdout, "Info: File size: %ld bytes.\n", *size ); return CB_OK; } } /********************************************************************************/ /* Name: fs_read */ /* */ /* Descriptions: read a byte from file */ /********************************************************************************/ int fs_read( int file_id, int *data ) { int status = 0; status = fgetc( (FILE*) file_id ); if ( status == EOF ) { fprintf( stderr, "Error: Could not read data from file!" ); return CB_FS_READ_ERROR; } else { *data = status; return CB_OK; } } /********************************************************************************/ /* Name: fs_write */ /* */ /* Descriptions: write a byte to file */ /********************************************************************************/ void fs_write(int file_id, int data) { fputc(data, (FILE*) file_id ); } /********************************************************************************/ /* Name: fs_rewind */ /* */ /* Descriptions: Repositions the file pointer to the beginning of a file */ /********************************************************************************/ void fs_rewind( int file_id) { rewind( (FILE*) file_id ); } <file_sep>/src/fs.h /*////////////////////*/ /* Global Definitions */ /*////////////////////*/ #define S_CUR 1 /* SEEK_CUR */ #define S_END 2 /* SEEK_END */ #define S_SET 0 /* SEEK_SET */ /*///////////////////////*/ /* Functions Prototyping */ /*///////////////////////*/ int fs_open( char[], char*, int* ); int fs_close( int ); int fs_size( int, long int* ); int fs_read( int, int* ); int fs_open_log(void); //Srunner 4827 void fs_write(int, int); //Srunner 4827 void fs_rewind( int file_id); //Srunner 4827
6852dd7c286451b21a68b643afb17975cf9c0731
[ "Markdown", "C", "Makefile" ]
11
Markdown
tmct-web/sconfig_tmct
43120ac20dd15d278b2708de8f8c61dd2e602ae8
344449c654db7d9d0a38b3161b909d2ad40cd44a
refs/heads/master
<repo_name>themrinalsinha/stock_screener<file_sep>/main.py from turtle import forward import models import yfinance as yf from fastapi import FastAPI, Request, Depends, BackgroundTasks from fastapi.templating import Jinja2Templates from sqlalchemy.orm import Session from database import SessionLocal, engine from pydantic import BaseModel from models import Stock models.Base.metadata.create_all(bind=engine) app = FastAPI() templates = Jinja2Templates(directory="templates") class StockRequest(BaseModel): symbol: str def get_db(): """ this function helps us to get the database session """ try: db = SessionLocal() yield db finally: db.close() @app.get("/") def home(request: Request, db: Session = Depends(get_db)): """ Displays the stock screener homepage """ stocks = db.query(Stock) _filters = request.query_params # TODO: exact filter as passed in query_params # stocks = stocks.filter_by(**dict(_filters)) _ma_50 = _filters.get("ma_50") _ma_200 = _filters.get("ma_200") _forward_pe = _filters.get("forward_pe") _dividend_yield = _filters.get("dividend_yield") if _ma_50: stocks = stocks.filter(Stock.price > Stock.ma50) if _ma_200: stocks = stocks.filter(Stock.price > Stock.ma200) if _forward_pe: stocks = stocks.filter(Stock.forward_pe < _forward_pe) if _dividend_yield: stocks = stocks.filter(Stock.dividend_yield > _dividend_yield) return templates.TemplateResponse("home.html", { 'request': request, 'stocks' : stocks.all(), **_filters, }) def fetch_stock_data(id: int): db = SessionLocal() stock = db.query(Stock).filter(Stock.id == id).first() yf_data = yf.Ticker(stock.symbol) stock.ma50 = yf_data.info.get('fiftyDayAverage') stock.ma200 = yf_data.info.get('twoHundredDayAverage') stock.price = yf_data.info.get('previousClose') stock.forward_pe = yf_data.info.get('forwardPE') stock.forward_eps = yf_data.info.get('forwardEps') stock.dividend_yield = yf_data.info.get('dividendYield') db.add(stock) db.commit() @app.post("/stock") def create_stock(stock_request: StockRequest, background_tasks: BackgroundTasks, db: Session = Depends(get_db)): """ Created a stock and store it in the database """ stock = db.query(Stock).filter(Stock.symbol == stock_request.symbol).first() if not stock: stock = Stock() stock.symbol = stock_request.symbol db.add(stock) db.commit() background_tasks.add_task(fetch_stock_data, stock.id) return { "code": "success", "message": "stock created successfully", } <file_sep>/docker-compose.yml version: "3.7" services: backend: build: . working_dir: /app volumes: - .:/app ports: - 8000:8000 command: "uvicorn main:app --host 0.0.0.0 --port 8000 --reload" <file_sep>/requirements.txt lxml jinja2 fastapi uvicorn yfinance sqlalchemy async-exit-stack async-generator <file_sep>/README.md # stock_screener Building a stock screener using FastAPI <file_sep>/models.py from sqlalchemy import Column, Numeric, String, Integer from database import Base class Stock(Base): __tablename__ = "stocks" id = Column(Integer, primary_key=True, index=True) symbol = Column(String, unique=True, index=True) price = Column(Numeric(10, 2)) forward_pe = Column(Numeric(10, 2)) forward_eps = Column(Numeric(10, 2)) dividend_yield = Column(Numeric(10, 2)) ma50 = Column(Numeric(10, 2)) ma200 = Column(Numeric(10, 2)) <file_sep>/Dockerfile FROM python:3.6.12-slim-buster WORKDIR /app COPY requirements.txt . RUN pip install -r requirements.txt
f1a901088f76bb1f0fafd5b87851810f5e4cbed5
[ "YAML", "Markdown", "Python", "Text", "Dockerfile" ]
6
Python
themrinalsinha/stock_screener
6597790f2ef9e4e64142ed47ca8ef2d098dd5a6d
35c4c4effa98de19eb172bde57ee08eaf48af8fe
refs/heads/master
<repo_name>js-tasks-ru/js-20210309-2_andrew277<file_sep>/03-objects-arrays-intro-to-testing/1-create-getter/index.js /** * createGetter - creates function getter which allows select value from object * @param {string} path - the strings path separated by dot * @returns {function} - function-getter which allow get value from object by set path */ export function createGetter(field) { const arr = field.split('.'); return function (product) { return arr.reduce((acum, item) => { if (!acum) { return; } else if (acum[item]) { acum = acum[item]; return acum; } else { return acum[item]; } }, {...product}); } }
dd69f19d695bffe608e38f66d9d9c38898d0400a
[ "JavaScript" ]
1
JavaScript
js-tasks-ru/js-20210309-2_andrew277
1f1d6e410848b964ff65ec21617824fc2833f087
e94c421265f7cd79e06cd6f3e8a635bce719be83
refs/heads/master
<file_sep>let title = document.createElement('title'); title.innerHTML = 'Page'; document.head.appendChild(title); let metaUtf8 = document.createElement('meta'); metaUtf8.setAttribute ('charset', 'UTF-8'); document.head.appendChild(metaUtf8); document.querySelector('html').getAttribute('lang', 'en'); //??? let h1 = document.createElement ('h1'); h1.classList.add('header'); h1.innerHTML = 'Choose Your Option'; document.body.appendChild(h1); let p1 = document.createElement ('p'); p1.innerHTML = 'But I must explain to you how all this mistaken idea of denouncing'; p1.classList.add('discription') document.body.appendChild(p1); let div4 = document.createElement('div'); div4.classList.add('header_information'); document.body.appendChild(div4); div4.appendChild(h1); div4.appendChild(p1); let p2 = document.createElement ('p'); p2.innerHTML = 'FREELANCER'; p2.classList.add('information') document.body.appendChild(p2); let h2_1 = document.createElement ('h2'); h2_1.classList.add('inf_title'); h2_1.innerHTML = 'Initially <br> designed to '; document.body.appendChild(h2_1); let p3 = document.createElement ('p'); p3.innerHTML = 'But I must explain to you how all this <br> mistaken idea of denouncing '; p3.classList.add('discription') document.body.appendChild(p3); let button1 = document.createElement ('button'); button1.classList.add('btn'); button1.innerHTML = 'Start here'; document.body.appendChild(button1); let div1 = document.createElement('div'); div1.classList.add('block_information1'); document.body.appendChild (div1); div1.appendChild(p2); div1.appendChild(h2_1); div1.appendChild(p3); div1.appendChild(button1); let p4 = document.createElement ('p'); p4.innerHTML = 'STUDIO'; p4.classList.add('information'); document.body.appendChild(p4); let h2_2 = document.createElement ('h2'); h2_2.classList.add('inf_title'); h2_2.innerHTML = 'Initially <br> designed to '; document.body.appendChild(h2_2); let p5 = document.createElement ('p'); p5.innerHTML = 'But I must explain to you how all this <br> mistaken idea of denouncing '; p5.classList.add('discription'); document.body.appendChild(p5); let button2 = document.createElement ('button'); button2.classList.add('btn'); button2.innerHTML = 'Start here'; document.body.appendChild(button2); let div2 = document.createElement('div'); div2.classList.add('block_information2'); document.body.appendChild (div2); div2.appendChild(p4); div2.appendChild(h2_2); div2.appendChild(p5); div2.appendChild(button2); let div3 = document.createElement('div'); div3.classList.add('block_information'); document.body.appendChild (div3); div3.appendChild(div1); div3.appendChild(div2); document.body.classList.add('container'); let style = document.createElement('style'); style.innerHTML =` *{ padding: 0; margin: 0; } .container{ max-width: 1280px; width: 80%; margin: 0 auto; height: 900px; } body{ font-family: 'Arvo', serif; } .header_information{ display: flex; justify-content: center; flex-direction: column; align-items: center; height: 300px; } .header{ font-size: 44px; line-height: 48px; font-weight: normal; } .discription{ font-size: 14px; line-height: 26px; margin-top: 35px; color: #9FA3A7; } .block_information{ display: flex; border: 1px solid #9FA3A7; border-radius: 5px; height: 500px; width: 800px; margin: 0 auto; } .block_information1, .block_information2{ display: flex; justify-content: center; align-items: center; flex-direction: column; width: 100%; } .block_information2{ background-color: #8F75BE; } .information{ font-weight: bold; font-size: 12px; line-height: 15px; text-transform: uppercase; margin-bottom: 20px; letter-spacing: 2.4px; } .block_information1 .information{ color: #9FA3A7; } .block_information2 .information{ color: #FFC80A; } .inf_title{ font-size: 36px; line-height: 46px; font-style: normal; font-weight: normal; margin-bottom: 30px; text-align: center; } .block_information2 .inf_title, .block_information2 .discription{ color: #fff; } .block_information .discription{ text-align: center; } .btn{ font-family: 'Arvo', serif; margin-top: 60px; width: 176px; font-style: normal; font-weight: bold; font-size: 12px; line-height: 15px; text-transform: uppercase; background-color: white; border: 4px solid #FFC80A; padding: 25px 0; border-radius: 55px; letter-spacing: 2.4px; } .block_information2 .btn{ color: white; background-color: #8F75BE; } .btn:hover{ opacity: 0.5; cursor: pointer; } `; document.head.appendChild(style);
63f0d3b874e8576b816edcd96b69768ba006c264
[ "JavaScript" ]
1
JavaScript
NatashaRosinskaya/hw-js-8
f35f796e4ee505a98dce634098d96ea70cb5950e
a96336572cc02e286885f7de3e518da0b82bc617
refs/heads/master
<repo_name>mrclsu/TokinGame<file_sep>/core/src/hu/tokin/game1/CarActor.java package hu.tokin.game1; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.Batch; import com.badlogic.gdx.graphics.g2d.Sprite; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.InputEvent; import com.badlogic.gdx.scenes.scene2d.utils.ClickListener; /** * Created by -Dinamo- on 2016.10.01.. */ public class CarActor extends Actor{ Sprite car; Texture img; static float carWidth; float carHeight; float proportionality; float carHereX; float carHereY; static float carThereX; static float carThereY; float carSpeed=RoadActor.roadSpeed; static boolean goX=false; static boolean goY=false; static boolean GO=false; public CarActor(){ img=new Texture("car.png"); car=new Sprite(img); carWidth=car.getWidth(); carHeight=car.getHeight(); addListener(new ClickListener() { @Override public void clicked(InputEvent event, float x, float y) { GO=!GO; } } ); } @Override public void act(float delta) { super.act(delta); proportionality=carWidth/carHeight; carWidth=TokinGame.wigth/(float)2.3; carHeight=carWidth/proportionality; car.setSize(carWidth,carHeight); car.setPosition(carHereX,carHereY); move(); } @Override public void draw(Batch batch, float parentAlpha) { super.draw(batch, parentAlpha); car.draw(batch); } @Override public void setSize(float width, float height) { super.setSize(width, height); car.setSize(width,height); } @Override public void setPosition(float x, float y) { super.setPosition(x, y); car.setPosition(x,y); } public void move() { if(goX) { if (carThereX > carHereX) carHereX +=Gdx.graphics.getDeltaTime()* carSpeed; else carHereX -=Gdx.graphics.getDeltaTime()* carSpeed; } if(goY) { if (carThereY > carHereY) carHereY += Gdx.graphics.getDeltaTime() * carSpeed; else carHereY -=Gdx.graphics.getDeltaTime() * carSpeed; } if(carThereX==carHereX) goX=false; if(carThereY==carHereY) goY=false; if(GO) carHereX = carHereX + Gdx.graphics.getDeltaTime() * carSpeed; car.setPosition(carHereX,carHereY); super.setPosition(carHereX,carHereY); } } <file_sep>/core/src/hu/tokin/game1/RoadActor.java package hu.tokin.game1; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.Batch; import com.badlogic.gdx.graphics.g2d.Sprite; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.InputEvent; import com.badlogic.gdx.scenes.scene2d.utils.ClickListener; /** * Created by -Dinamo- on 2016.10.01.. */ public class RoadActor extends Actor{ Sprite road1; Sprite road2; Texture img; float delta; float x1=0; float x2=TokinGame.wigth; static float roadSpeed=300; public RoadActor(){ debug(); img=new Texture("road.png"); road1=new Sprite(img); road2=new Sprite(img); addListener(new ClickListener() { @Override public void clicked(InputEvent event, float x, float y) { CarActor.carThereX=Gdx.input.getX()-(int)CarActor.carWidth; CarActor.carThereY=Gdx.input.getY(); float m=Gdx.input.getY()/TokinGame.height; float b=1-m; CarActor.carThereY=TokinGame.height*b-10; CarActor.goX=true; CarActor.goY=true; CarActor.GO=false; } } ); } @Override public void act(float delta) { super.act(delta); delta=Gdx.graphics.getDeltaTime(); road1.setSize(TokinGame.wigth,TokinGame.height); road2.setSize(TokinGame.wigth,TokinGame.height); super.setSize(TokinGame.wigth,TokinGame.height); setPos(); move(); } @Override public void setSize(float width, float height) { super.setSize(width, height); road1.setSize(width,height); road2.setSize(width,height); } public void setPos() { super.setPosition(0, 0); road1.setPosition(x1,0); road2.setPosition(x2,0); } @Override public void draw(Batch batch, float parentAlpha) { super.draw(batch, parentAlpha); road1.draw(batch); road2.draw(batch); } public void move(){ x1=x1-(Gdx.graphics.getDeltaTime()*roadSpeed); x2=x2-(Gdx.graphics.getDeltaTime()*roadSpeed); setPos(); if(x2<=0){ x1=0; x2=TokinGame.wigth; } } }
03353ee91f9dc2773cf92c628b499434d743193b
[ "Java" ]
2
Java
mrclsu/TokinGame
1061f6af42a752e86df0ae453ffa68a2d57ff576
73c0cf0bf0ed578ed5a894050122be961119b324
refs/heads/master
<repo_name>irahavoi/jpaExam<file_sep>/sandbox/src/main/java/com/rahavoi/entity/listener/EmployeeDebugListener.java package com.rahavoi.entity.listener; import javax.persistence.PostLoad; import com.rahavoi.entity.Employee; public class EmployeeDebugListener { @PostLoad public void preLoad(Employee emp){ System.out.println("Listener says: Employee " + emp.getName() + " was loaded!"); } } <file_sep>/sandbox/src/main/java/com/rahavoi/sanbox/CriteriaAPI.java package com.rahavoi.sanbox; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Root; import com.rahavoi.entity.Employee; public class CriteriaAPI { public static void main(String[] args){ EntityManagerFactory emf = Persistence.createEntityManagerFactory("EmployeeFactory"); EntityManager em = emf.createEntityManager(); CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<Employee> c = cb.createQuery(Employee.class); Root<Employee> emp = c.from(Employee.class); c.select(emp).where(cb.equal(emp.get("name"), "Joe")); } } <file_sep>/sandbox/src/main/java/com/rahavoi/type/PhoneType.java package com.rahavoi.type; public enum PhoneType { HOME, MOBILE, WORK } <file_sep>/sandbox/src/main/java/com/rahavoi/sanbox/service/EmployeeService.java package com.rahavoi.sanbox.service; import java.util.ArrayList; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.Tuple; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Join; import javax.persistence.criteria.JoinType; import javax.persistence.criteria.ParameterExpression; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import com.rahavoi.entity.Department; import com.rahavoi.entity.Employee; import com.rahavoi.entity.Project; public class EmployeeService { private EntityManager em; public EmployeeService(EntityManager em) { this.em = em; } public Employee createEmployee(String name, Long salary) { Employee emp = new Employee(); emp.setName(name); emp.setSalary(salary); em.persist(emp); return emp; } public void removeEmployee(int id) { Employee emp = findEmployee(id); if (emp != null) { em.remove(emp); } } public Employee findEmployee(int id) { return em.find(Employee.class, id); } public Employee raiseEmployeeSalary(int id, long raise) { Employee emp = em.find(Employee.class, id); if (emp != null) { emp.setSalary(emp.getSalary() + raise); } return emp; } public List<Employee> findAllEmployees() { TypedQuery<Employee> query = em.createQuery("SELECT e FROM Employee e", Employee.class); return query.getResultList(); } public List<Employee> findEmployees(String name, String deptName, String projectName, String city) { CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<Employee> c = cb.createQuery(Employee.class); Root<Employee> emp = c.from(Employee.class); c.select(emp); c.distinct(true); Join<Employee, Project> project = emp.join("projects", JoinType.LEFT); List<Predicate> criteria = new ArrayList<Predicate>(); if (name != null) { ParameterExpression<String> p = cb.parameter(String.class, "name"); criteria.add(cb.equal(emp.get("name"), p)); } if (deptName != null) { ParameterExpression<String> p = cb.parameter(String.class, "dept"); criteria.add(cb.equal(emp.get("dept").get("name"), p)); } if (projectName != null) { ParameterExpression<String> p = cb.parameter(String.class, "project"); criteria.add(cb.equal(project.get("name"), p)); } if (city != null) { ParameterExpression<String> p = cb.parameter(String.class, "city"); criteria.add(cb.equal(emp.get("address").get("city"), p)); } if (criteria.size() == 0) { throw new RuntimeException("no criteria"); } else if (criteria.size() == 1) { c.where(criteria.get(0)); } else { c.where(cb.and(criteria.toArray(new Predicate[0]))); } TypedQuery<Employee> q = em.createQuery(c); if (name != null) { q.setParameter("name", name); } if (deptName != null) { q.setParameter("dept", deptName); } if (projectName != null) { q.setParameter("project", projectName); } if (city != null) { q.setParameter("city", city); } return q.getResultList(); } public void tupleTest(){ CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<Tuple> c = cb.createTupleQuery(); Root<Employee> emp = c.from(Employee.class); c.select(cb.tuple(emp.get("id"), emp.get("name"))); em.createQuery(c).getResultList(); } } <file_sep>/sandbox/src/main/java/com/rahavoi/sanbox/EmployeeManager.java package com.rahavoi.sanbox; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import javax.persistence.TypedQuery; import com.rahavoi.entity.Employee; public class EmployeeManager { private EntityManagerFactory emf; private EntityManager em; public EmployeeManager() { this.init(); } private void init() { emf = Persistence.createEntityManagerFactory("EmployeeFactory"); em = emf.createEntityManager(); } /** * Creates an employee Entity * * @param id * @param name * @param salary * @return */ public Employee createEmployee(int id, String name, long salary) { Employee emp = new Employee(id); emp.setName(name); emp.setSalary(salary); em.persist(emp); return emp; } public Employee findEmployee(int id) { return em.find(Employee.class, id); } public void removeEmployee(int id) { Employee emp = em.find(Employee.class, id); if (emp != null) { em.remove(emp); } } public Employee raiseEmployeeSalary(int id, long raise) { Employee emp = em.find(Employee.class, id); if (emp != null) { emp.setSalary(emp.getSalary() + raise); } return emp; } public List<Employee> findAllEmployees() { TypedQuery<Employee> query = em.createQuery("SELECT e FROM Employee e", Employee.class); return query.getResultList(); } } <file_sep>/notes/notes.txt em.getReference() <-- will return the proxy of the asked entity containing info about the key only. Relationship maintenance is the responsibility of the application. If the entity to be removed is the target of foreign keys in other tables, those foreign keys must be cleared for the remove to succeed: Employee emp = em.find(Employee.class, empId); ParkingSpace ps = emp.getParkingSpace(); emp.setParkingSpace(null); em.remove(ps); Cascading Operations: JPA provides a mechanism to define when operations such as persist should be automatically cascaded across relationships. The "cascade" attribute in all the logical relationship annotations(@OneToOne, @OneToMany, @ManyToOne, and @ManyToMany), defines the list of entity manager operations to be cascaded: @ManyToOne(cascade = CascadeType.ALL) CascadeType.DETACH CascadeType.MERGE CascadeType.PERSIST CascadeType.REFRESH CascadeType.REMOVE Cascade Persist: @Entity public class Employee { // ... @ManyToOne(cascade=CascadeType.PERSIST) Address address; // ... } As the entity manager encounters the Employee instance and adds it to the persistence context, it will navigate across the address relationship looking for a new Address entity to manage as well. This frees us from having to persist the Address separately. Cascade settings are unidirectional. In the “Persisting an Entity” section, we mentioned that the entity instance is ignored if it is already persisted. This is true, but the entity manager will still honor the PERSIST cascade in this situation. For example, consider our Employee entity again. If the Employee instance is already managed, and a new Address instance is set in it, invoking persist() again on the Employee instance will cause the Address instance to become managed. No changes will be made to the Employee instance because it is already managed. Cascade Remove: There are only 2 cases when cascading remove operation is appropriate: one-to-one and one-to-many relationships, in which there is a clear parent-child relationship. It can’t be blindly applied to all one-to-one and one-tomany relationships because the target entities might also be participating in other relationships or might make sense as stand-alone entities. Listing cascade operations: @OneToOne(cascade={CascadeType.PERSIST, CascadeType.REMOVE}) Clearing the persistence Context: This is usually required only for application-managed and extended persistence contexts that are long-lived and have grown too large. For example, consider an application-managed entity manager that issues a query returning several hundred entity instances. If you don’t want to close the persistence context, you need to be able to clear out the managed entities, or else the persistence context will continue to grow over time. EntityManager.clear(); This is similar to transaction rollback. It detaches all managed entities. But if this is done in the middle of transaction commit and some changes have been written to the database, they will not be rollbacked. Synchronization with the database. (page 185 of 538) flush() <-- manual flushing of the persistence context. Most persistence providers defer SQL generation to the last possible moment for performance reasons, but this is not guaranteed. A flush basically consists of three components: new entities that need to be persisted, changed entities that need to be updated, and removed entities that need to be deleted from the database. If an entity points to another entity that is not managed or has been removed, an exception can be thrown. it is always safer to update relationships pointing to entities that will be removed before carrying out the remove() operation. A flush can occur at any time, so invoking remove() on an entity without clearing any relationships that point to the removed entity could result in an unexpected IllegalStateException exception if the provider decides to flush the persistence context before you get around to updating the relationships Detachment and Merging: detached entity is one that is no longer associated with a persistence context. It was managed at one point, but the persistence context might have ended or the entity might have been transformed so that it has lost its association with the persistence context that used to manage it. The persistence context, if there still is one, is no longer tracking the entity. Any changes made to the entity won’t be persisted to the database, but all the state that was there on the entity when it was detached can still be used by the application. The opposite of detachment is merging. Merging is the process by which an entity manager integrates detached entity state into a persistence context. Any changes to entity state that were made on the detached entity overwrite the current values in the persistence context. When the transaction commits, those changes will be persisted. Merging allows entities to be changed “offline” and then have those changes incorporated later on. Entities become detached when: * transaction commits; * if the persistence context is closed; * if the stateful session bean with the extended context is removed; * if clear() method is called on the entity manager; * if the detach method is called on the entity manager (detaches the passed entity. cascading might be applied if DETACH or ALL option is set) * if transaction rollback occurs; * when entity is serialized, it's serialized form becomes detached; @ManyToOne and @OneToOne are eagerly loaded by default; @oneToMany is lazy loaded by default; If the entity becomes detached and it's lazy-loaded children have not been accessed, we have a problem: the behavior of accessing an unloaded attribute when the entity is detached is not defined. Some vendors might attempt to resolve the relationship, while others might simply throw an exception or leave the attribute uninitialized. Merging detached Entities: The merge() operation is used to merge the state of a detached entity into a persistence context. changes made to the instance while it was detached will be written to the database. WRONG: public void updateEmployee(Employee emp) { em.merge(emp); //<-- Only changes made to the emp object prior to merging will be persisted emp.setLastAccessTime(new Date()); //<--- THIS CHANGE WILL NOT BE PERISTED. THE ARGUMENT TO merge() DOES NOT BECOME MANAGED. //A DIFFERENT MANAGED ENTITY IS UPDATED TO MATCH THE ARGUMENT. AND THEN THIS ENTITY IS RETURNED FROMM merge() } RIGHT: public void updateEmployee(Employee emp) { Employee managedEmp = em.merge(emp); managedEmp.setLastAccessTime(new Date()); } When merge() is invoked on a new entity, it behaves similarly to the persist() operation. It adds the entity to the persistence context, but instead of adding the original entity instance, it creates a new copy and manages that instance instead. The copy that is created by the merge() operation is persisted as if the persist() method were invoked on it. If the entity has a relationship to an object that has no persistent identity, the outcome of the merge operation is undefined. The merge() operation can be optionally cascaded in these cases to prevent an exception from occurring. Merging detached entities with relationships can be a tricky operation. Ideally, we want to merge the root of an object graph and have all related entities get merged in the process. This can work, but only if the MERGE cascade setting has been applied to all relationships in the graph. If it hasn’t, you must merge each instance that is the target of a non-cascaded relationship one at a time. Working with Detached Entities (page 192) Real-life example: There is an Employee entity that has a collection of Departments. EJB fetches all employees and passes them to the Servlet. Servlet sets employees as http request attribute and passes it to the jsp. Jsp tries to display department names. Here the problem occurs, since Employees are already detached at the moment when jsp gets them and departments are lazy-loaded. Possible solutions: 1. Eager loading. (mark departments as eagerly loaded) 2. While employees are still not detached, call getDepartments() method in order to trigger lazy-loading of the departments (ejb level). 3. Avoiding detachment. a. Either do not work with entities in your jsp at all (copy entity data into some other structure - Transfer Object pattern. Or use projection queries to retrieve the state of the entity you need). b. Or keep persistence context open while the jsp renders so that lazy-loaded relationships could be resolved (does not work with remote clients or when entities have to be serialized). In order to keep the transaction-scoped persistence context open, the transaction must be started in the web layer and then re-used at the business layer and not committed until jsp is rendered. This pattern is called Transaction View. @Resource UserTransaction tx; @EJB EmployeeService bean; try { tx.begin(); List emps = bean.findAll(); request.setAttribute("employees", emps); getServletContext().getRequestDispatcher("/listEmployees.jsp") .forward(request, response); } finally { tx.commit(); } // ... The downside of this approach is that the servlet must now manage transaction lifecycle. + this logic must be duplicated across different servlets. Another option might be to use extended entity manager which lives as long as it's stateful session bean exists. Merging Strategies: Usually, when user wants to update some information, he makes changes to the detached entities that are later merged with the existing persistence context: em.merge(myEntity) merge() is used to add changes to the existing managed entity. Web tier commonly collects changes into detached entities or transfer objects and passes the changed state back to session beans to be merged and written to the database. <file_sep>/README.md Notes and code examples for JPA 2.0 exam<file_sep>/sandbox/src/main/java/com/rahavoi/sanbox/QueriesExamples.java package com.rahavoi.sanbox; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import com.rahavoi.entity.Employee; public class QueriesExamples { private EntityManagerFactory emf = Persistence.createEntityManagerFactory("EmployeeFactory"); private EntityManager em = emf.createEntityManager(); public static void main(String[] args){ QueriesExamples qe = new QueriesExamples(); String pokahontas = "Pokahontas"; Long pokahontasSalary = qe.insecureInefficientQuery(pokahontas); String joe = "Joe"; Long joeSalary = qe.namedParametersQuery(joe); System.out.println(pokahontas + ": " + pokahontasSalary); System.out.println(joe + ": " + joeSalary); List<Employee> allEmployees = qe.namedQuery(); for(Employee e : allEmployees){ System.out.println("-------------------------------------------"); System.out.println("Employee: " + e.getName()); System.out.println("Salary: " + e.getSalary()); System.out.println("Department: " + e.getDepartment().getName()); System.out.println("-------------------------------------------"); } } /** * An example of a dynamic query having performance and security problems: * Because names are concatenated into sString instead of parameter binding, * a ne query is created each time (cannot cache). * It is also vulnerable to sql injection attacks. * @param em */ public Long insecureInefficientQuery(String name){ String query = "SELECT e.salary FROM Employee e " + "WHERE e.name = '" + name + "'"; return em.createQuery(query, Long.class).getSingleResult(); } /** * Still inefficiient but secure query. * The parameters ar marshalled using jdbc api and handled directly by database. * The text of the parameter is properly escaped. * @param name * @return */ public Long namedParametersQuery(String name){ String query = "SELECT e.salary FROM Employee e " + "WHERE e.name = :empName"; return em.createQuery(query, Long.class) .setParameter("empName", name) .getSingleResult(); } /** * Uses static named query "Employee.findAll" defined in the Employee class * using @NamedQuery annotation. * @return */ public List<Employee> namedQuery(){ em.getTransaction().begin(); List<Employee> result = em.createNamedQuery("Employee.findAll", Employee.class) .getResultList(); //Returned entities are managed. If any changes are made on them //before transaction commits, these changes will be persisted. for(Employee e : result){ //e.setName(e.getName() + "test tx"); } em.getTransaction().commit(); return result; } }
f7c9a6778d41241ccd8e89523e1e912e54577c5c
[ "Markdown", "Java", "Text" ]
8
Java
irahavoi/jpaExam
2a32571828ce366c531459cfaac77ba3d97a3a7e
dc9a307e66b17f363757c7b203b767066dbc1500
refs/heads/master
<file_sep># INF 221 Trabalho Final <file_sep>const g = require("graphql-import"); const schema = g.importSchema("./src/config/schema.graphql"); module.exports = schema; <file_sep>import client from "src/config/ApolloClient"; class CacheManager { public login = () => { if (client && client.cache) { client.cache.writeData({ data: { logged: true } }); } }; public logoff = () => { if (client && client.cache) { client.cache.writeData({ data: { logged: false } }); } }; } export default new CacheManager(); <file_sep>import { GraphQLResolveInfo, GraphQLSchema } from 'graphql' import { IResolvers } from 'graphql-tools/dist/Interfaces' import { Options } from 'graphql-binding' import { makePrismaBindingClass, BasePrismaOptions } from 'prisma-binding' export interface Query { users: <T = User[]>(args: { where?: UserWhereInput, orderBy?: UserOrderByInput, skip?: Int, after?: String, before?: String, first?: Int, last?: Int }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , reviewVoteses: <T = ReviewVotes[]>(args: { where?: ReviewVotesWhereInput, orderBy?: ReviewVotesOrderByInput, skip?: Int, after?: String, before?: String, first?: Int, last?: Int }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , reviews: <T = Review[]>(args: { where?: ReviewWhereInput, orderBy?: ReviewOrderByInput, skip?: Int, after?: String, before?: String, first?: Int, last?: Int }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , ufvClasses: <T = UfvClass[]>(args: { where?: UfvClassWhereInput, orderBy?: UfvClassOrderByInput, skip?: Int, after?: String, before?: String, first?: Int, last?: Int }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , user: <T = User | null>(args: { where: UserWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , reviewVotes: <T = ReviewVotes | null>(args: { where: ReviewVotesWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , review: <T = Review | null>(args: { where: ReviewWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , ufvClass: <T = UfvClass | null>(args: { where: UfvClassWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , usersConnection: <T = UserConnection>(args: { where?: UserWhereInput, orderBy?: UserOrderByInput, skip?: Int, after?: String, before?: String, first?: Int, last?: Int }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , reviewVotesesConnection: <T = ReviewVotesConnection>(args: { where?: ReviewVotesWhereInput, orderBy?: ReviewVotesOrderByInput, skip?: Int, after?: String, before?: String, first?: Int, last?: Int }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , reviewsConnection: <T = ReviewConnection>(args: { where?: ReviewWhereInput, orderBy?: ReviewOrderByInput, skip?: Int, after?: String, before?: String, first?: Int, last?: Int }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , ufvClassesConnection: <T = UfvClassConnection>(args: { where?: UfvClassWhereInput, orderBy?: UfvClassOrderByInput, skip?: Int, after?: String, before?: String, first?: Int, last?: Int }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , node: <T = Node | null>(args: { id: ID_Output }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> } export interface Mutation { createUser: <T = User>(args: { data: UserCreateInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , createReviewVotes: <T = ReviewVotes>(args: { data: ReviewVotesCreateInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , createReview: <T = Review>(args: { data: ReviewCreateInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , createUfvClass: <T = UfvClass>(args: { data: UfvClassCreateInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , updateUser: <T = User | null>(args: { data: UserUpdateInput, where: UserWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , updateReviewVotes: <T = ReviewVotes | null>(args: { data: ReviewVotesUpdateInput, where: ReviewVotesWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , updateReview: <T = Review | null>(args: { data: ReviewUpdateInput, where: ReviewWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , updateUfvClass: <T = UfvClass | null>(args: { data: UfvClassUpdateInput, where: UfvClassWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , deleteUser: <T = User | null>(args: { where: UserWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , deleteReviewVotes: <T = ReviewVotes | null>(args: { where: ReviewVotesWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , deleteReview: <T = Review | null>(args: { where: ReviewWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , deleteUfvClass: <T = UfvClass | null>(args: { where: UfvClassWhereUniqueInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , upsertUser: <T = User>(args: { where: UserWhereUniqueInput, create: UserCreateInput, update: UserUpdateInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , upsertReviewVotes: <T = ReviewVotes>(args: { where: ReviewVotesWhereUniqueInput, create: ReviewVotesCreateInput, update: ReviewVotesUpdateInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , upsertReview: <T = Review>(args: { where: ReviewWhereUniqueInput, create: ReviewCreateInput, update: ReviewUpdateInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , upsertUfvClass: <T = UfvClass>(args: { where: UfvClassWhereUniqueInput, create: UfvClassCreateInput, update: UfvClassUpdateInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , updateManyUsers: <T = BatchPayload>(args: { data: UserUpdateInput, where?: UserWhereInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , updateManyReviewVoteses: <T = BatchPayload>(args: { data: ReviewVotesUpdateInput, where?: ReviewVotesWhereInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , updateManyReviews: <T = BatchPayload>(args: { data: ReviewUpdateInput, where?: ReviewWhereInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , updateManyUfvClasses: <T = BatchPayload>(args: { data: UfvClassUpdateInput, where?: UfvClassWhereInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , deleteManyUsers: <T = BatchPayload>(args: { where?: UserWhereInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , deleteManyReviewVoteses: <T = BatchPayload>(args: { where?: ReviewVotesWhereInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , deleteManyReviews: <T = BatchPayload>(args: { where?: ReviewWhereInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> , deleteManyUfvClasses: <T = BatchPayload>(args: { where?: UfvClassWhereInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<T> } export interface Subscription { user: <T = UserSubscriptionPayload | null>(args: { where?: UserSubscriptionWhereInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<AsyncIterator<T>> , reviewVotes: <T = ReviewVotesSubscriptionPayload | null>(args: { where?: ReviewVotesSubscriptionWhereInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<AsyncIterator<T>> , review: <T = ReviewSubscriptionPayload | null>(args: { where?: ReviewSubscriptionWhereInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<AsyncIterator<T>> , ufvClass: <T = UfvClassSubscriptionPayload | null>(args: { where?: UfvClassSubscriptionWhereInput }, info?: GraphQLResolveInfo | string, options?: Options) => Promise<AsyncIterator<T>> } export interface Exists { User: (where?: UserWhereInput) => Promise<boolean> ReviewVotes: (where?: ReviewVotesWhereInput) => Promise<boolean> Review: (where?: ReviewWhereInput) => Promise<boolean> UfvClass: (where?: UfvClassWhereInput) => Promise<boolean> } export interface Prisma { query: Query mutation: Mutation subscription: Subscription exists: Exists request: <T = any>(query: string, variables?: {[key: string]: any}) => Promise<T> delegate(operation: 'query' | 'mutation', fieldName: string, args: { [key: string]: any; }, infoOrQuery?: GraphQLResolveInfo | string, options?: Options): Promise<any>; delegateSubscription(fieldName: string, args?: { [key: string]: any; }, infoOrQuery?: GraphQLResolveInfo | string, options?: Options): Promise<AsyncIterator<any>>; getAbstractResolvers(filterSchema?: GraphQLSchema | string): IResolvers; } export interface BindingConstructor<T> { new(options: BasePrismaOptions): T } /** * Type Defs */ const typeDefs = `type AggregateReview { count: Int! } type AggregateReviewVotes { count: Int! } type AggregateUfvClass { count: Int! } type AggregateUser { count: Int! } type BatchPayload { """The number of nodes that have been affected by the Batch operation.""" count: Long! } scalar DateTime enum Department { Depto__de_Economia_Rural Depto__de_Engenharia_Agricola Depto__de_Engenharia_Florestal Depto__de_Fitopatologia Depto__de_Fitotecnia Depto__de_Solos Depto__de_Zootecnia Depto__de_Biologia_Animal Depto__de_Biologia_Geral Depto__de_Biologia_Vegetal Depto__de_Bioquimica_e_Biologia_Molecular Depto__de_Educacao_Fisica Depto__de_Entomologia Depto__de_Microbiologia Depto__de_Medicina_e_Enfermagem Depto__de_Nutricaoo_e_Saude Depto__de_Veterinaria Depto__de_Arquitetura_e_Urbanismo Depto__de_Engenharia_Civil Depto__de_Engenharia_Eletrica Depto__de_Engenharia_de_Producao_e_Mecanica Depto__de_Estatistica Depto__de_Fisica Depto__de_Informatica Depto__de_Matematica Depto__de_Quimica Depto__de_Tecnologia_de_Alimentos Depto__de_Administracao_e_Contabilidade Depto__de_Artes_e_Humanidades Depto__de_Ciencias_Sociais Depto__de_Comunicacao_Social Depto__de_Direito Depto__de_Economia Depto__de_Economia_Domestica Depto__de_Educacao Depto__de_Geografia Depto__de_Historia Depto__de_Letras } """ The \`Long\` scalar type represents non-fractional signed whole numeric values. Long can represent values between -(2^63) and 2^63 - 1. """ scalar Long type Mutation { createUser(data: UserCreateInput!): User! createReviewVotes(data: ReviewVotesCreateInput!): ReviewVotes! createReview(data: ReviewCreateInput!): Review! createUfvClass(data: UfvClassCreateInput!): UfvClass! updateUser(data: UserUpdateInput!, where: UserWhereUniqueInput!): User updateReviewVotes(data: ReviewVotesUpdateInput!, where: ReviewVotesWhereUniqueInput!): ReviewVotes updateReview(data: ReviewUpdateInput!, where: ReviewWhereUniqueInput!): Review updateUfvClass(data: UfvClassUpdateInput!, where: UfvClassWhereUniqueInput!): UfvClass deleteUser(where: UserWhereUniqueInput!): User deleteReviewVotes(where: ReviewVotesWhereUniqueInput!): ReviewVotes deleteReview(where: ReviewWhereUniqueInput!): Review deleteUfvClass(where: UfvClassWhereUniqueInput!): UfvClass upsertUser(where: UserWhereUniqueInput!, create: UserCreateInput!, update: UserUpdateInput!): User! upsertReviewVotes(where: ReviewVotesWhereUniqueInput!, create: ReviewVotesCreateInput!, update: ReviewVotesUpdateInput!): ReviewVotes! upsertReview(where: ReviewWhereUniqueInput!, create: ReviewCreateInput!, update: ReviewUpdateInput!): Review! upsertUfvClass(where: UfvClassWhereUniqueInput!, create: UfvClassCreateInput!, update: UfvClassUpdateInput!): UfvClass! updateManyUsers(data: UserUpdateInput!, where: UserWhereInput): BatchPayload! updateManyReviewVoteses(data: ReviewVotesUpdateInput!, where: ReviewVotesWhereInput): BatchPayload! updateManyReviews(data: ReviewUpdateInput!, where: ReviewWhereInput): BatchPayload! updateManyUfvClasses(data: UfvClassUpdateInput!, where: UfvClassWhereInput): BatchPayload! deleteManyUsers(where: UserWhereInput): BatchPayload! deleteManyReviewVoteses(where: ReviewVotesWhereInput): BatchPayload! deleteManyReviews(where: ReviewWhereInput): BatchPayload! deleteManyUfvClasses(where: UfvClassWhereInput): BatchPayload! } enum MutationType { CREATED UPDATED DELETED } """An object with an ID""" interface Node { """The id of the object.""" id: ID! } """Information about pagination in a connection.""" type PageInfo { """When paginating forwards, are there more items?""" hasNextPage: Boolean! """When paginating backwards, are there more items?""" hasPreviousPage: Boolean! """When paginating backwards, the cursor to continue.""" startCursor: String """When paginating forwards, the cursor to continue.""" endCursor: String } type Query { users(where: UserWhereInput, orderBy: UserOrderByInput, skip: Int, after: String, before: String, first: Int, last: Int): [User]! reviewVoteses(where: ReviewVotesWhereInput, orderBy: ReviewVotesOrderByInput, skip: Int, after: String, before: String, first: Int, last: Int): [ReviewVotes]! reviews(where: ReviewWhereInput, orderBy: ReviewOrderByInput, skip: Int, after: String, before: String, first: Int, last: Int): [Review]! ufvClasses(where: UfvClassWhereInput, orderBy: UfvClassOrderByInput, skip: Int, after: String, before: String, first: Int, last: Int): [UfvClass]! user(where: UserWhereUniqueInput!): User reviewVotes(where: ReviewVotesWhereUniqueInput!): ReviewVotes review(where: ReviewWhereUniqueInput!): Review ufvClass(where: UfvClassWhereUniqueInput!): UfvClass usersConnection(where: UserWhereInput, orderBy: UserOrderByInput, skip: Int, after: String, before: String, first: Int, last: Int): UserConnection! reviewVotesesConnection(where: ReviewVotesWhereInput, orderBy: ReviewVotesOrderByInput, skip: Int, after: String, before: String, first: Int, last: Int): ReviewVotesConnection! reviewsConnection(where: ReviewWhereInput, orderBy: ReviewOrderByInput, skip: Int, after: String, before: String, first: Int, last: Int): ReviewConnection! ufvClassesConnection(where: UfvClassWhereInput, orderBy: UfvClassOrderByInput, skip: Int, after: String, before: String, first: Int, last: Int): UfvClassConnection! """Fetches an object given its ID""" node( """The ID of an object""" id: ID! ): Node } type Review implements Node { id: ID! useful: ReviewUseful! easy: ReviewEasy! description: String! anonymous: Boolean! recommended: Boolean! teacher: String! score: Int! classReviewed(where: UfvClassWhereInput): UfvClass! reviewer(where: UserWhereInput): User! votes(where: ReviewVotesWhereInput, orderBy: ReviewVotesOrderByInput, skip: Int, after: String, before: String, first: Int, last: Int): [ReviewVotes!] createdAt: DateTime! updatedAt: DateTime! } """A connection to a list of items.""" type ReviewConnection { """Information to aid in pagination.""" pageInfo: PageInfo! """A list of edges.""" edges: [ReviewEdge]! aggregate: AggregateReview! } input ReviewCreateInput { useful: ReviewUseful! easy: ReviewEasy! description: String! anonymous: Boolean! recommended: Boolean! teacher: String! score: Int classReviewed: UfvClassCreateOneWithoutReviewsInput! reviewer: UserCreateOneWithoutReviewsInput! votes: ReviewVotesCreateManyWithoutReviewInput } input ReviewCreateManyWithoutClassReviewedInput { create: [ReviewCreateWithoutClassReviewedInput!] connect: [ReviewWhereUniqueInput!] } input ReviewCreateManyWithoutReviewerInput { create: [ReviewCreateWithoutReviewerInput!] connect: [ReviewWhereUniqueInput!] } input ReviewCreateOneWithoutVotesInput { create: ReviewCreateWithoutVotesInput connect: ReviewWhereUniqueInput } input ReviewCreateWithoutClassReviewedInput { useful: ReviewUseful! easy: ReviewEasy! description: String! anonymous: Boolean! recommended: Boolean! teacher: String! score: Int reviewer: UserCreateOneWithoutReviewsInput! votes: ReviewVotesCreateManyWithoutReviewInput } input ReviewCreateWithoutReviewerInput { useful: ReviewUseful! easy: ReviewEasy! description: String! anonymous: Boolean! recommended: Boolean! teacher: String! score: Int classReviewed: UfvClassCreateOneWithoutReviewsInput! votes: ReviewVotesCreateManyWithoutReviewInput } input ReviewCreateWithoutVotesInput { useful: ReviewUseful! easy: ReviewEasy! description: String! anonymous: Boolean! recommended: Boolean! teacher: String! score: Int classReviewed: UfvClassCreateOneWithoutReviewsInput! reviewer: UserCreateOneWithoutReviewsInput! } enum ReviewEasy { E0 E1 E2 E3 E4 E5 } """An edge in a connection.""" type ReviewEdge { """The item at the end of the edge.""" node: Review! """A cursor for use in pagination.""" cursor: String! } enum ReviewOrderByInput { id_ASC id_DESC useful_ASC useful_DESC easy_ASC easy_DESC description_ASC description_DESC anonymous_ASC anonymous_DESC recommended_ASC recommended_DESC teacher_ASC teacher_DESC score_ASC score_DESC createdAt_ASC createdAt_DESC updatedAt_ASC updatedAt_DESC } type ReviewPreviousValues { id: ID! useful: ReviewUseful! easy: ReviewEasy! description: String! anonymous: Boolean! recommended: Boolean! teacher: String! score: Int! createdAt: DateTime! updatedAt: DateTime! } type ReviewSubscriptionPayload { mutation: MutationType! node: Review updatedFields: [String!] previousValues: ReviewPreviousValues } input ReviewSubscriptionWhereInput { """Logical AND on all given filters.""" AND: [ReviewSubscriptionWhereInput!] """Logical OR on all given filters.""" OR: [ReviewSubscriptionWhereInput!] """Logical NOT on all given filters combined by AND.""" NOT: [ReviewSubscriptionWhereInput!] """ The subscription event gets dispatched when it's listed in mutation_in """ mutation_in: [MutationType!] """ The subscription event gets only dispatched when one of the updated fields names is included in this list """ updatedFields_contains: String """ The subscription event gets only dispatched when all of the field names included in this list have been updated """ updatedFields_contains_every: [String!] """ The subscription event gets only dispatched when some of the field names included in this list have been updated """ updatedFields_contains_some: [String!] node: ReviewWhereInput } input ReviewUpdateInput { useful: ReviewUseful easy: ReviewEasy description: String anonymous: Boolean recommended: Boolean teacher: String score: Int classReviewed: UfvClassUpdateOneWithoutReviewsInput reviewer: UserUpdateOneWithoutReviewsInput votes: ReviewVotesUpdateManyWithoutReviewInput } input ReviewUpdateManyWithoutClassReviewedInput { create: [ReviewCreateWithoutClassReviewedInput!] connect: [ReviewWhereUniqueInput!] disconnect: [ReviewWhereUniqueInput!] delete: [ReviewWhereUniqueInput!] update: [ReviewUpdateWithWhereUniqueWithoutClassReviewedInput!] upsert: [ReviewUpsertWithWhereUniqueWithoutClassReviewedInput!] } input ReviewUpdateManyWithoutReviewerInput { create: [ReviewCreateWithoutReviewerInput!] connect: [ReviewWhereUniqueInput!] disconnect: [ReviewWhereUniqueInput!] delete: [ReviewWhereUniqueInput!] update: [ReviewUpdateWithWhereUniqueWithoutReviewerInput!] upsert: [ReviewUpsertWithWhereUniqueWithoutReviewerInput!] } input ReviewUpdateOneWithoutVotesInput { create: ReviewCreateWithoutVotesInput connect: ReviewWhereUniqueInput delete: Boolean update: ReviewUpdateWithoutVotesDataInput upsert: ReviewUpsertWithoutVotesInput } input ReviewUpdateWithoutClassReviewedDataInput { useful: ReviewUseful easy: ReviewEasy description: String anonymous: Boolean recommended: Boolean teacher: String score: Int reviewer: UserUpdateOneWithoutReviewsInput votes: ReviewVotesUpdateManyWithoutReviewInput } input ReviewUpdateWithoutReviewerDataInput { useful: ReviewUseful easy: ReviewEasy description: String anonymous: Boolean recommended: Boolean teacher: String score: Int classReviewed: UfvClassUpdateOneWithoutReviewsInput votes: ReviewVotesUpdateManyWithoutReviewInput } input ReviewUpdateWithoutVotesDataInput { useful: ReviewUseful easy: ReviewEasy description: String anonymous: Boolean recommended: Boolean teacher: String score: Int classReviewed: UfvClassUpdateOneWithoutReviewsInput reviewer: UserUpdateOneWithoutReviewsInput } input ReviewUpdateWithWhereUniqueWithoutClassReviewedInput { where: ReviewWhereUniqueInput! data: ReviewUpdateWithoutClassReviewedDataInput! } input ReviewUpdateWithWhereUniqueWithoutReviewerInput { where: ReviewWhereUniqueInput! data: ReviewUpdateWithoutReviewerDataInput! } input ReviewUpsertWithoutVotesInput { update: ReviewUpdateWithoutVotesDataInput! create: ReviewCreateWithoutVotesInput! } input ReviewUpsertWithWhereUniqueWithoutClassReviewedInput { where: ReviewWhereUniqueInput! update: ReviewUpdateWithoutClassReviewedDataInput! create: ReviewCreateWithoutClassReviewedInput! } input ReviewUpsertWithWhereUniqueWithoutReviewerInput { where: ReviewWhereUniqueInput! update: ReviewUpdateWithoutReviewerDataInput! create: ReviewCreateWithoutReviewerInput! } enum ReviewUseful { U0 U1 U2 U3 U4 U5 } type ReviewVotes implements Node { id: ID! review(where: ReviewWhereInput): Review! user(where: UserWhereInput): User! type: ReviewVotesTypes! } """A connection to a list of items.""" type ReviewVotesConnection { """Information to aid in pagination.""" pageInfo: PageInfo! """A list of edges.""" edges: [ReviewVotesEdge]! aggregate: AggregateReviewVotes! } input ReviewVotesCreateInput { type: ReviewVotesTypes! review: ReviewCreateOneWithoutVotesInput! user: UserCreateOneWithoutVotesInput! } input ReviewVotesCreateManyWithoutReviewInput { create: [ReviewVotesCreateWithoutReviewInput!] connect: [ReviewVotesWhereUniqueInput!] } input ReviewVotesCreateManyWithoutUserInput { create: [ReviewVotesCreateWithoutUserInput!] connect: [ReviewVotesWhereUniqueInput!] } input ReviewVotesCreateWithoutReviewInput { type: ReviewVotesTypes! user: UserCreateOneWithoutVotesInput! } input ReviewVotesCreateWithoutUserInput { type: ReviewVotesTypes! review: ReviewCreateOneWithoutVotesInput! } """An edge in a connection.""" type ReviewVotesEdge { """The item at the end of the edge.""" node: ReviewVotes! """A cursor for use in pagination.""" cursor: String! } enum ReviewVotesOrderByInput { id_ASC id_DESC type_ASC type_DESC updatedAt_ASC updatedAt_DESC createdAt_ASC createdAt_DESC } type ReviewVotesPreviousValues { id: ID! type: ReviewVotesTypes! } type ReviewVotesSubscriptionPayload { mutation: MutationType! node: ReviewVotes updatedFields: [String!] previousValues: ReviewVotesPreviousValues } input ReviewVotesSubscriptionWhereInput { """Logical AND on all given filters.""" AND: [ReviewVotesSubscriptionWhereInput!] """Logical OR on all given filters.""" OR: [ReviewVotesSubscriptionWhereInput!] """Logical NOT on all given filters combined by AND.""" NOT: [ReviewVotesSubscriptionWhereInput!] """ The subscription event gets dispatched when it's listed in mutation_in """ mutation_in: [MutationType!] """ The subscription event gets only dispatched when one of the updated fields names is included in this list """ updatedFields_contains: String """ The subscription event gets only dispatched when all of the field names included in this list have been updated """ updatedFields_contains_every: [String!] """ The subscription event gets only dispatched when some of the field names included in this list have been updated """ updatedFields_contains_some: [String!] node: ReviewVotesWhereInput } enum ReviewVotesTypes { Agree Disagree } input ReviewVotesUpdateInput { type: ReviewVotesTypes review: ReviewUpdateOneWithoutVotesInput user: UserUpdateOneWithoutVotesInput } input ReviewVotesUpdateManyWithoutReviewInput { create: [ReviewVotesCreateWithoutReviewInput!] connect: [ReviewVotesWhereUniqueInput!] disconnect: [ReviewVotesWhereUniqueInput!] delete: [ReviewVotesWhereUniqueInput!] update: [ReviewVotesUpdateWithWhereUniqueWithoutReviewInput!] upsert: [ReviewVotesUpsertWithWhereUniqueWithoutReviewInput!] } input ReviewVotesUpdateManyWithoutUserInput { create: [ReviewVotesCreateWithoutUserInput!] connect: [ReviewVotesWhereUniqueInput!] disconnect: [ReviewVotesWhereUniqueInput!] delete: [ReviewVotesWhereUniqueInput!] update: [ReviewVotesUpdateWithWhereUniqueWithoutUserInput!] upsert: [ReviewVotesUpsertWithWhereUniqueWithoutUserInput!] } input ReviewVotesUpdateWithoutReviewDataInput { type: ReviewVotesTypes user: UserUpdateOneWithoutVotesInput } input ReviewVotesUpdateWithoutUserDataInput { type: ReviewVotesTypes review: ReviewUpdateOneWithoutVotesInput } input ReviewVotesUpdateWithWhereUniqueWithoutReviewInput { where: ReviewVotesWhereUniqueInput! data: ReviewVotesUpdateWithoutReviewDataInput! } input ReviewVotesUpdateWithWhereUniqueWithoutUserInput { where: ReviewVotesWhereUniqueInput! data: ReviewVotesUpdateWithoutUserDataInput! } input ReviewVotesUpsertWithWhereUniqueWithoutReviewInput { where: ReviewVotesWhereUniqueInput! update: ReviewVotesUpdateWithoutReviewDataInput! create: ReviewVotesCreateWithoutReviewInput! } input ReviewVotesUpsertWithWhereUniqueWithoutUserInput { where: ReviewVotesWhereUniqueInput! update: ReviewVotesUpdateWithoutUserDataInput! create: ReviewVotesCreateWithoutUserInput! } input ReviewVotesWhereInput { """Logical AND on all given filters.""" AND: [ReviewVotesWhereInput!] """Logical OR on all given filters.""" OR: [ReviewVotesWhereInput!] """Logical NOT on all given filters combined by AND.""" NOT: [ReviewVotesWhereInput!] id: ID """All values that are not equal to given value.""" id_not: ID """All values that are contained in given list.""" id_in: [ID!] """All values that are not contained in given list.""" id_not_in: [ID!] """All values less than the given value.""" id_lt: ID """All values less than or equal the given value.""" id_lte: ID """All values greater than the given value.""" id_gt: ID """All values greater than or equal the given value.""" id_gte: ID """All values containing the given string.""" id_contains: ID """All values not containing the given string.""" id_not_contains: ID """All values starting with the given string.""" id_starts_with: ID """All values not starting with the given string.""" id_not_starts_with: ID """All values ending with the given string.""" id_ends_with: ID """All values not ending with the given string.""" id_not_ends_with: ID type: ReviewVotesTypes """All values that are not equal to given value.""" type_not: ReviewVotesTypes """All values that are contained in given list.""" type_in: [ReviewVotesTypes!] """All values that are not contained in given list.""" type_not_in: [ReviewVotesTypes!] review: ReviewWhereInput user: UserWhereInput } input ReviewVotesWhereUniqueInput { id: ID } input ReviewWhereInput { """Logical AND on all given filters.""" AND: [ReviewWhereInput!] """Logical OR on all given filters.""" OR: [ReviewWhereInput!] """Logical NOT on all given filters combined by AND.""" NOT: [ReviewWhereInput!] id: ID """All values that are not equal to given value.""" id_not: ID """All values that are contained in given list.""" id_in: [ID!] """All values that are not contained in given list.""" id_not_in: [ID!] """All values less than the given value.""" id_lt: ID """All values less than or equal the given value.""" id_lte: ID """All values greater than the given value.""" id_gt: ID """All values greater than or equal the given value.""" id_gte: ID """All values containing the given string.""" id_contains: ID """All values not containing the given string.""" id_not_contains: ID """All values starting with the given string.""" id_starts_with: ID """All values not starting with the given string.""" id_not_starts_with: ID """All values ending with the given string.""" id_ends_with: ID """All values not ending with the given string.""" id_not_ends_with: ID useful: ReviewUseful """All values that are not equal to given value.""" useful_not: ReviewUseful """All values that are contained in given list.""" useful_in: [ReviewUseful!] """All values that are not contained in given list.""" useful_not_in: [ReviewUseful!] easy: ReviewEasy """All values that are not equal to given value.""" easy_not: ReviewEasy """All values that are contained in given list.""" easy_in: [ReviewEasy!] """All values that are not contained in given list.""" easy_not_in: [ReviewEasy!] description: String """All values that are not equal to given value.""" description_not: String """All values that are contained in given list.""" description_in: [String!] """All values that are not contained in given list.""" description_not_in: [String!] """All values less than the given value.""" description_lt: String """All values less than or equal the given value.""" description_lte: String """All values greater than the given value.""" description_gt: String """All values greater than or equal the given value.""" description_gte: String """All values containing the given string.""" description_contains: String """All values not containing the given string.""" description_not_contains: String """All values starting with the given string.""" description_starts_with: String """All values not starting with the given string.""" description_not_starts_with: String """All values ending with the given string.""" description_ends_with: String """All values not ending with the given string.""" description_not_ends_with: String anonymous: Boolean """All values that are not equal to given value.""" anonymous_not: Boolean recommended: Boolean """All values that are not equal to given value.""" recommended_not: Boolean teacher: String """All values that are not equal to given value.""" teacher_not: String """All values that are contained in given list.""" teacher_in: [String!] """All values that are not contained in given list.""" teacher_not_in: [String!] """All values less than the given value.""" teacher_lt: String """All values less than or equal the given value.""" teacher_lte: String """All values greater than the given value.""" teacher_gt: String """All values greater than or equal the given value.""" teacher_gte: String """All values containing the given string.""" teacher_contains: String """All values not containing the given string.""" teacher_not_contains: String """All values starting with the given string.""" teacher_starts_with: String """All values not starting with the given string.""" teacher_not_starts_with: String """All values ending with the given string.""" teacher_ends_with: String """All values not ending with the given string.""" teacher_not_ends_with: String score: Int """All values that are not equal to given value.""" score_not: Int """All values that are contained in given list.""" score_in: [Int!] """All values that are not contained in given list.""" score_not_in: [Int!] """All values less than the given value.""" score_lt: Int """All values less than or equal the given value.""" score_lte: Int """All values greater than the given value.""" score_gt: Int """All values greater than or equal the given value.""" score_gte: Int createdAt: DateTime """All values that are not equal to given value.""" createdAt_not: DateTime """All values that are contained in given list.""" createdAt_in: [DateTime!] """All values that are not contained in given list.""" createdAt_not_in: [DateTime!] """All values less than the given value.""" createdAt_lt: DateTime """All values less than or equal the given value.""" createdAt_lte: DateTime """All values greater than the given value.""" createdAt_gt: DateTime """All values greater than or equal the given value.""" createdAt_gte: DateTime updatedAt: DateTime """All values that are not equal to given value.""" updatedAt_not: DateTime """All values that are contained in given list.""" updatedAt_in: [DateTime!] """All values that are not contained in given list.""" updatedAt_not_in: [DateTime!] """All values less than the given value.""" updatedAt_lt: DateTime """All values less than or equal the given value.""" updatedAt_lte: DateTime """All values greater than the given value.""" updatedAt_gt: DateTime """All values greater than or equal the given value.""" updatedAt_gte: DateTime classReviewed: UfvClassWhereInput reviewer: UserWhereInput votes_every: ReviewVotesWhereInput votes_some: ReviewVotesWhereInput votes_none: ReviewVotesWhereInput } input ReviewWhereUniqueInput { id: ID } type Subscription { user(where: UserSubscriptionWhereInput): UserSubscriptionPayload reviewVotes(where: ReviewVotesSubscriptionWhereInput): ReviewVotesSubscriptionPayload review(where: ReviewSubscriptionWhereInput): ReviewSubscriptionPayload ufvClass(where: UfvClassSubscriptionWhereInput): UfvClassSubscriptionPayload } type UfvClass implements Node { id: ID! cod: String! name: String! optional: Boolean! department: Department! useful: Float! easy: Float! recommended: Int! reviews(where: ReviewWhereInput, orderBy: ReviewOrderByInput, skip: Int, after: String, before: String, first: Int, last: Int): [Review!] } """A connection to a list of items.""" type UfvClassConnection { """Information to aid in pagination.""" pageInfo: PageInfo! """A list of edges.""" edges: [UfvClassEdge]! aggregate: AggregateUfvClass! } input UfvClassCreateInput { cod: String! name: String! optional: Boolean! department: Department! useful: Float easy: Float recommended: Int reviews: ReviewCreateManyWithoutClassReviewedInput } input UfvClassCreateOneWithoutReviewsInput { create: UfvClassCreateWithoutReviewsInput connect: UfvClassWhereUniqueInput } input UfvClassCreateWithoutReviewsInput { cod: String! name: String! optional: Boolean! department: Department! useful: Float easy: Float recommended: Int } """An edge in a connection.""" type UfvClassEdge { """The item at the end of the edge.""" node: UfvClass! """A cursor for use in pagination.""" cursor: String! } enum UfvClassOrderByInput { id_ASC id_DESC cod_ASC cod_DESC name_ASC name_DESC optional_ASC optional_DESC department_ASC department_DESC useful_ASC useful_DESC easy_ASC easy_DESC recommended_ASC recommended_DESC updatedAt_ASC updatedAt_DESC createdAt_ASC createdAt_DESC } type UfvClassPreviousValues { id: ID! cod: String! name: String! optional: Boolean! department: Department! useful: Float! easy: Float! recommended: Int! } type UfvClassSubscriptionPayload { mutation: MutationType! node: UfvClass updatedFields: [String!] previousValues: UfvClassPreviousValues } input UfvClassSubscriptionWhereInput { """Logical AND on all given filters.""" AND: [UfvClassSubscriptionWhereInput!] """Logical OR on all given filters.""" OR: [UfvClassSubscriptionWhereInput!] """Logical NOT on all given filters combined by AND.""" NOT: [UfvClassSubscriptionWhereInput!] """ The subscription event gets dispatched when it's listed in mutation_in """ mutation_in: [MutationType!] """ The subscription event gets only dispatched when one of the updated fields names is included in this list """ updatedFields_contains: String """ The subscription event gets only dispatched when all of the field names included in this list have been updated """ updatedFields_contains_every: [String!] """ The subscription event gets only dispatched when some of the field names included in this list have been updated """ updatedFields_contains_some: [String!] node: UfvClassWhereInput } input UfvClassUpdateInput { cod: String name: String optional: Boolean department: Department useful: Float easy: Float recommended: Int reviews: ReviewUpdateManyWithoutClassReviewedInput } input UfvClassUpdateOneWithoutReviewsInput { create: UfvClassCreateWithoutReviewsInput connect: UfvClassWhereUniqueInput delete: Boolean update: UfvClassUpdateWithoutReviewsDataInput upsert: UfvClassUpsertWithoutReviewsInput } input UfvClassUpdateWithoutReviewsDataInput { cod: String name: String optional: Boolean department: Department useful: Float easy: Float recommended: Int } input UfvClassUpsertWithoutReviewsInput { update: UfvClassUpdateWithoutReviewsDataInput! create: UfvClassCreateWithoutReviewsInput! } input UfvClassWhereInput { """Logical AND on all given filters.""" AND: [UfvClassWhereInput!] """Logical OR on all given filters.""" OR: [UfvClassWhereInput!] """Logical NOT on all given filters combined by AND.""" NOT: [UfvClassWhereInput!] id: ID """All values that are not equal to given value.""" id_not: ID """All values that are contained in given list.""" id_in: [ID!] """All values that are not contained in given list.""" id_not_in: [ID!] """All values less than the given value.""" id_lt: ID """All values less than or equal the given value.""" id_lte: ID """All values greater than the given value.""" id_gt: ID """All values greater than or equal the given value.""" id_gte: ID """All values containing the given string.""" id_contains: ID """All values not containing the given string.""" id_not_contains: ID """All values starting with the given string.""" id_starts_with: ID """All values not starting with the given string.""" id_not_starts_with: ID """All values ending with the given string.""" id_ends_with: ID """All values not ending with the given string.""" id_not_ends_with: ID cod: String """All values that are not equal to given value.""" cod_not: String """All values that are contained in given list.""" cod_in: [String!] """All values that are not contained in given list.""" cod_not_in: [String!] """All values less than the given value.""" cod_lt: String """All values less than or equal the given value.""" cod_lte: String """All values greater than the given value.""" cod_gt: String """All values greater than or equal the given value.""" cod_gte: String """All values containing the given string.""" cod_contains: String """All values not containing the given string.""" cod_not_contains: String """All values starting with the given string.""" cod_starts_with: String """All values not starting with the given string.""" cod_not_starts_with: String """All values ending with the given string.""" cod_ends_with: String """All values not ending with the given string.""" cod_not_ends_with: String name: String """All values that are not equal to given value.""" name_not: String """All values that are contained in given list.""" name_in: [String!] """All values that are not contained in given list.""" name_not_in: [String!] """All values less than the given value.""" name_lt: String """All values less than or equal the given value.""" name_lte: String """All values greater than the given value.""" name_gt: String """All values greater than or equal the given value.""" name_gte: String """All values containing the given string.""" name_contains: String """All values not containing the given string.""" name_not_contains: String """All values starting with the given string.""" name_starts_with: String """All values not starting with the given string.""" name_not_starts_with: String """All values ending with the given string.""" name_ends_with: String """All values not ending with the given string.""" name_not_ends_with: String optional: Boolean """All values that are not equal to given value.""" optional_not: Boolean department: Department """All values that are not equal to given value.""" department_not: Department """All values that are contained in given list.""" department_in: [Department!] """All values that are not contained in given list.""" department_not_in: [Department!] useful: Float """All values that are not equal to given value.""" useful_not: Float """All values that are contained in given list.""" useful_in: [Float!] """All values that are not contained in given list.""" useful_not_in: [Float!] """All values less than the given value.""" useful_lt: Float """All values less than or equal the given value.""" useful_lte: Float """All values greater than the given value.""" useful_gt: Float """All values greater than or equal the given value.""" useful_gte: Float easy: Float """All values that are not equal to given value.""" easy_not: Float """All values that are contained in given list.""" easy_in: [Float!] """All values that are not contained in given list.""" easy_not_in: [Float!] """All values less than the given value.""" easy_lt: Float """All values less than or equal the given value.""" easy_lte: Float """All values greater than the given value.""" easy_gt: Float """All values greater than or equal the given value.""" easy_gte: Float recommended: Int """All values that are not equal to given value.""" recommended_not: Int """All values that are contained in given list.""" recommended_in: [Int!] """All values that are not contained in given list.""" recommended_not_in: [Int!] """All values less than the given value.""" recommended_lt: Int """All values less than or equal the given value.""" recommended_lte: Int """All values greater than the given value.""" recommended_gt: Int """All values greater than or equal the given value.""" recommended_gte: Int reviews_every: ReviewWhereInput reviews_some: ReviewWhereInput reviews_none: ReviewWhereInput } input UfvClassWhereUniqueInput { id: ID cod: String } enum UfvCourses { Agronegocio Agronomia Cooperativismo Engenharia_Agricola_e_Ambiental Engenharia_Florestal Zootecnia Bioquimica Ciencias_Biologicas__Bacharelado_Licenciatura_ Educacao_Fisica__Bacharelado_Licenciatura_ Enfermagem Licenciatura_em_Ciencias_Biologicas__Noturno_ Medicina Medicina_Veterinaria Nutricao Arquitetura_e_Urbanismo Ciencia_da_Computacaoo Ciencia_e_Tecnologia_de_Laticinios Engenharia_Ambiental Engenharia_Civil Engenharia_de_Agrimensura_e_Cartografica Engenharia_de_Alimentos Engenharia_de_Producao Engenharia_Eletrica Engenharia_Mecanica Engenharia_Quimica Fisica__Bacharelado_Licenciatura_ Licenciatura_em_Fisica Licenciatura_em_Matematica Licenciatura_em_Quimica Matematica__Bacharelado_Licenciatura_ Quimica__Bacharelado_Licenciatura_ Administracao Ciencias_Contabeis Ciencias_Economicas Ciencias_Sociais__Bacharelado_Licenciatura_ Comunicacao_Social___Jornalismo Danca__Bacharelado_Licenciatura_ Direito Economia_Domestica Educacao_do_Campo Educacao_Infantil Geografia__Bacharelado_Licenciatura_ Historia__Bacharelado_Licenciatura_ Letras Pedagogia Secretariado_Executivo_Trilingue___Portugues__Frances_e_Ingles Servico_Social } enum UfvYears { Y19201 Y19211 Y19221 Y19231 Y19241 Y19251 Y19261 Y19271 Y19281 Y19291 Y19301 Y19311 Y19321 Y19331 Y19341 Y19351 Y19361 Y19371 Y19381 Y19391 Y19401 Y19411 Y19421 Y19431 Y19441 Y19451 Y19461 Y19471 Y19481 Y19491 Y19501 Y19511 Y19521 Y19531 Y19541 Y19551 Y19561 Y19571 Y19581 Y19591 Y19601 Y19611 Y19621 Y19631 Y19641 Y19651 Y19661 Y19671 Y19681 Y19691 Y19701 Y19711 Y19721 Y19731 Y19741 Y19751 Y19761 Y19771 Y19781 Y19791 Y19801 Y19811 Y19821 Y19831 Y19841 Y19851 Y19861 Y19871 Y19881 Y19891 Y19901 Y19911 Y19921 Y19931 Y19941 Y19951 Y19961 Y19971 Y19981 Y19991 Y20001 Y20011 Y20021 Y20031 Y20041 Y20051 Y20061 Y20071 Y20081 Y20091 Y20101 Y20111 Y20121 Y20131 Y20141 Y20151 Y20161 Y20171 Y20181 } type User implements Node { id: ID! facebookId: String! name: String! course: UfvCourses! year: UfvYears! rate: UserRate! reviews(where: ReviewWhereInput, orderBy: ReviewOrderByInput, skip: Int, after: String, before: String, first: Int, last: Int): [Review!] votes(where: ReviewVotesWhereInput, orderBy: ReviewVotesOrderByInput, skip: Int, after: String, before: String, first: Int, last: Int): [ReviewVotes!] createdAt: DateTime! updatedAt: DateTime! } """A connection to a list of items.""" type UserConnection { """Information to aid in pagination.""" pageInfo: PageInfo! """A list of edges.""" edges: [UserEdge]! aggregate: AggregateUser! } input UserCreateInput { facebookId: String! name: String! course: UfvCourses! year: UfvYears! rate: UserRate reviews: ReviewCreateManyWithoutReviewerInput votes: ReviewVotesCreateManyWithoutUserInput } input UserCreateOneWithoutReviewsInput { create: UserCreateWithoutReviewsInput connect: UserWhereUniqueInput } input UserCreateOneWithoutVotesInput { create: UserCreateWithoutVotesInput connect: UserWhereUniqueInput } input UserCreateWithoutReviewsInput { facebookId: String! name: String! course: UfvCourses! year: UfvYears! rate: UserRate votes: ReviewVotesCreateManyWithoutUserInput } input UserCreateWithoutVotesInput { facebookId: String! name: String! course: UfvCourses! year: UfvYears! rate: UserRate reviews: ReviewCreateManyWithoutReviewerInput } """An edge in a connection.""" type UserEdge { """The item at the end of the edge.""" node: User! """A cursor for use in pagination.""" cursor: String! } enum UserOrderByInput { id_ASC id_DESC facebookId_ASC facebookId_DESC name_ASC name_DESC course_ASC course_DESC year_ASC year_DESC rate_ASC rate_DESC createdAt_ASC createdAt_DESC updatedAt_ASC updatedAt_DESC } type UserPreviousValues { id: ID! facebookId: String! name: String! course: UfvCourses! year: UfvYears! rate: UserRate! createdAt: DateTime! updatedAt: DateTime! } enum UserRate { Iniciante Confiavel } type UserSubscriptionPayload { mutation: MutationType! node: User updatedFields: [String!] previousValues: UserPreviousValues } input UserSubscriptionWhereInput { """Logical AND on all given filters.""" AND: [UserSubscriptionWhereInput!] """Logical OR on all given filters.""" OR: [UserSubscriptionWhereInput!] """Logical NOT on all given filters combined by AND.""" NOT: [UserSubscriptionWhereInput!] """ The subscription event gets dispatched when it's listed in mutation_in """ mutation_in: [MutationType!] """ The subscription event gets only dispatched when one of the updated fields names is included in this list """ updatedFields_contains: String """ The subscription event gets only dispatched when all of the field names included in this list have been updated """ updatedFields_contains_every: [String!] """ The subscription event gets only dispatched when some of the field names included in this list have been updated """ updatedFields_contains_some: [String!] node: UserWhereInput } input UserUpdateInput { facebookId: String name: String course: UfvCourses year: UfvYears rate: UserRate reviews: ReviewUpdateManyWithoutReviewerInput votes: ReviewVotesUpdateManyWithoutUserInput } input UserUpdateOneWithoutReviewsInput { create: UserCreateWithoutReviewsInput connect: UserWhereUniqueInput delete: Boolean update: UserUpdateWithoutReviewsDataInput upsert: UserUpsertWithoutReviewsInput } input UserUpdateOneWithoutVotesInput { create: UserCreateWithoutVotesInput connect: UserWhereUniqueInput delete: Boolean update: UserUpdateWithoutVotesDataInput upsert: UserUpsertWithoutVotesInput } input UserUpdateWithoutReviewsDataInput { facebookId: String name: String course: UfvCourses year: UfvYears rate: UserRate votes: ReviewVotesUpdateManyWithoutUserInput } input UserUpdateWithoutVotesDataInput { facebookId: String name: String course: UfvCourses year: UfvYears rate: UserRate reviews: ReviewUpdateManyWithoutReviewerInput } input UserUpsertWithoutReviewsInput { update: UserUpdateWithoutReviewsDataInput! create: UserCreateWithoutReviewsInput! } input UserUpsertWithoutVotesInput { update: UserUpdateWithoutVotesDataInput! create: UserCreateWithoutVotesInput! } input UserWhereInput { """Logical AND on all given filters.""" AND: [UserWhereInput!] """Logical OR on all given filters.""" OR: [UserWhereInput!] """Logical NOT on all given filters combined by AND.""" NOT: [UserWhereInput!] id: ID """All values that are not equal to given value.""" id_not: ID """All values that are contained in given list.""" id_in: [ID!] """All values that are not contained in given list.""" id_not_in: [ID!] """All values less than the given value.""" id_lt: ID """All values less than or equal the given value.""" id_lte: ID """All values greater than the given value.""" id_gt: ID """All values greater than or equal the given value.""" id_gte: ID """All values containing the given string.""" id_contains: ID """All values not containing the given string.""" id_not_contains: ID """All values starting with the given string.""" id_starts_with: ID """All values not starting with the given string.""" id_not_starts_with: ID """All values ending with the given string.""" id_ends_with: ID """All values not ending with the given string.""" id_not_ends_with: ID facebookId: String """All values that are not equal to given value.""" facebookId_not: String """All values that are contained in given list.""" facebookId_in: [String!] """All values that are not contained in given list.""" facebookId_not_in: [String!] """All values less than the given value.""" facebookId_lt: String """All values less than or equal the given value.""" facebookId_lte: String """All values greater than the given value.""" facebookId_gt: String """All values greater than or equal the given value.""" facebookId_gte: String """All values containing the given string.""" facebookId_contains: String """All values not containing the given string.""" facebookId_not_contains: String """All values starting with the given string.""" facebookId_starts_with: String """All values not starting with the given string.""" facebookId_not_starts_with: String """All values ending with the given string.""" facebookId_ends_with: String """All values not ending with the given string.""" facebookId_not_ends_with: String name: String """All values that are not equal to given value.""" name_not: String """All values that are contained in given list.""" name_in: [String!] """All values that are not contained in given list.""" name_not_in: [String!] """All values less than the given value.""" name_lt: String """All values less than or equal the given value.""" name_lte: String """All values greater than the given value.""" name_gt: String """All values greater than or equal the given value.""" name_gte: String """All values containing the given string.""" name_contains: String """All values not containing the given string.""" name_not_contains: String """All values starting with the given string.""" name_starts_with: String """All values not starting with the given string.""" name_not_starts_with: String """All values ending with the given string.""" name_ends_with: String """All values not ending with the given string.""" name_not_ends_with: String course: UfvCourses """All values that are not equal to given value.""" course_not: UfvCourses """All values that are contained in given list.""" course_in: [UfvCourses!] """All values that are not contained in given list.""" course_not_in: [UfvCourses!] year: UfvYears """All values that are not equal to given value.""" year_not: UfvYears """All values that are contained in given list.""" year_in: [UfvYears!] """All values that are not contained in given list.""" year_not_in: [UfvYears!] rate: UserRate """All values that are not equal to given value.""" rate_not: UserRate """All values that are contained in given list.""" rate_in: [UserRate!] """All values that are not contained in given list.""" rate_not_in: [UserRate!] createdAt: DateTime """All values that are not equal to given value.""" createdAt_not: DateTime """All values that are contained in given list.""" createdAt_in: [DateTime!] """All values that are not contained in given list.""" createdAt_not_in: [DateTime!] """All values less than the given value.""" createdAt_lt: DateTime """All values less than or equal the given value.""" createdAt_lte: DateTime """All values greater than the given value.""" createdAt_gt: DateTime """All values greater than or equal the given value.""" createdAt_gte: DateTime updatedAt: DateTime """All values that are not equal to given value.""" updatedAt_not: DateTime """All values that are contained in given list.""" updatedAt_in: [DateTime!] """All values that are not contained in given list.""" updatedAt_not_in: [DateTime!] """All values less than the given value.""" updatedAt_lt: DateTime """All values less than or equal the given value.""" updatedAt_lte: DateTime """All values greater than the given value.""" updatedAt_gt: DateTime """All values greater than or equal the given value.""" updatedAt_gte: DateTime reviews_every: ReviewWhereInput reviews_some: ReviewWhereInput reviews_none: ReviewWhereInput votes_every: ReviewVotesWhereInput votes_some: ReviewVotesWhereInput votes_none: ReviewVotesWhereInput } input UserWhereUniqueInput { id: ID facebookId: String } ` export const Prisma = makePrismaBindingClass<BindingConstructor<Prisma>>({typeDefs}) /** * Types */ export type ReviewVotesOrderByInput = 'id_ASC' | 'id_DESC' | 'type_ASC' | 'type_DESC' | 'updatedAt_ASC' | 'updatedAt_DESC' | 'createdAt_ASC' | 'createdAt_DESC' export type Department = 'Depto__de_Economia_Rural' | 'Depto__de_Engenharia_Agricola' | 'Depto__de_Engenharia_Florestal' | 'Depto__de_Fitopatologia' | 'Depto__de_Fitotecnia' | 'Depto__de_Solos' | 'Depto__de_Zootecnia' | 'Depto__de_Biologia_Animal' | 'Depto__de_Biologia_Geral' | 'Depto__de_Biologia_Vegetal' | 'Depto__de_Bioquimica_e_Biologia_Molecular' | 'Depto__de_Educacao_Fisica' | 'Depto__de_Entomologia' | 'Depto__de_Microbiologia' | 'Depto__de_Medicina_e_Enfermagem' | 'Depto__de_Nutricaoo_e_Saude' | 'Depto__de_Veterinaria' | 'Depto__de_Arquitetura_e_Urbanismo' | 'Depto__de_Engenharia_Civil' | 'Depto__de_Engenharia_Eletrica' | 'Depto__de_Engenharia_de_Producao_e_Mecanica' | 'Depto__de_Estatistica' | 'Depto__de_Fisica' | 'Depto__de_Informatica' | 'Depto__de_Matematica' | 'Depto__de_Quimica' | 'Depto__de_Tecnologia_de_Alimentos' | 'Depto__de_Administracao_e_Contabilidade' | 'Depto__de_Artes_e_Humanidades' | 'Depto__de_Ciencias_Sociais' | 'Depto__de_Comunicacao_Social' | 'Depto__de_Direito' | 'Depto__de_Economia' | 'Depto__de_Economia_Domestica' | 'Depto__de_Educacao' | 'Depto__de_Geografia' | 'Depto__de_Historia' | 'Depto__de_Letras' export type ReviewUseful = 'U0' | 'U1' | 'U2' | 'U3' | 'U4' | 'U5' export type UfvCourses = 'Agronegocio' | 'Agronomia' | 'Cooperativismo' | 'Engenharia_Agricola_e_Ambiental' | 'Engenharia_Florestal' | 'Zootecnia' | 'Bioquimica' | 'Ciencias_Biologicas__Bacharelado_Licenciatura_' | 'Educacao_Fisica__Bacharelado_Licenciatura_' | 'Enfermagem' | 'Licenciatura_em_Ciencias_Biologicas__Noturno_' | 'Medicina' | 'Medicina_Veterinaria' | 'Nutricao' | 'Arquitetura_e_Urbanismo' | 'Ciencia_da_Computacaoo' | 'Ciencia_e_Tecnologia_de_Laticinios' | 'Engenharia_Ambiental' | 'Engenharia_Civil' | 'Engenharia_de_Agrimensura_e_Cartografica' | 'Engenharia_de_Alimentos' | 'Engenharia_de_Producao' | 'Engenharia_Eletrica' | 'Engenharia_Mecanica' | 'Engenharia_Quimica' | 'Fisica__Bacharelado_Licenciatura_' | 'Licenciatura_em_Fisica' | 'Licenciatura_em_Matematica' | 'Licenciatura_em_Quimica' | 'Matematica__Bacharelado_Licenciatura_' | 'Quimica__Bacharelado_Licenciatura_' | 'Administracao' | 'Ciencias_Contabeis' | 'Ciencias_Economicas' | 'Ciencias_Sociais__Bacharelado_Licenciatura_' | 'Comunicacao_Social___Jornalismo' | 'Danca__Bacharelado_Licenciatura_' | 'Direito' | 'Economia_Domestica' | 'Educacao_do_Campo' | 'Educacao_Infantil' | 'Geografia__Bacharelado_Licenciatura_' | 'Historia__Bacharelado_Licenciatura_' | 'Letras' | 'Pedagogia' | 'Secretariado_Executivo_Trilingue___Portugues__Frances_e_Ingles' | 'Servico_Social' export type ReviewVotesTypes = 'Agree' | 'Disagree' export type UserOrderByInput = 'id_ASC' | 'id_DESC' | 'facebookId_ASC' | 'facebookId_DESC' | 'name_ASC' | 'name_DESC' | 'course_ASC' | 'course_DESC' | 'year_ASC' | 'year_DESC' | 'rate_ASC' | 'rate_DESC' | 'createdAt_ASC' | 'createdAt_DESC' | 'updatedAt_ASC' | 'updatedAt_DESC' export type ReviewOrderByInput = 'id_ASC' | 'id_DESC' | 'useful_ASC' | 'useful_DESC' | 'easy_ASC' | 'easy_DESC' | 'description_ASC' | 'description_DESC' | 'anonymous_ASC' | 'anonymous_DESC' | 'recommended_ASC' | 'recommended_DESC' | 'teacher_ASC' | 'teacher_DESC' | 'score_ASC' | 'score_DESC' | 'createdAt_ASC' | 'createdAt_DESC' | 'updatedAt_ASC' | 'updatedAt_DESC' export type ReviewEasy = 'E0' | 'E1' | 'E2' | 'E3' | 'E4' | 'E5' export type UfvYears = 'Y19201' | 'Y19211' | 'Y19221' | 'Y19231' | 'Y19241' | 'Y19251' | 'Y19261' | 'Y19271' | 'Y19281' | 'Y19291' | 'Y19301' | 'Y19311' | 'Y19321' | 'Y19331' | 'Y19341' | 'Y19351' | 'Y19361' | 'Y19371' | 'Y19381' | 'Y19391' | 'Y19401' | 'Y19411' | 'Y19421' | 'Y19431' | 'Y19441' | 'Y19451' | 'Y19461' | 'Y19471' | 'Y19481' | 'Y19491' | 'Y19501' | 'Y19511' | 'Y19521' | 'Y19531' | 'Y19541' | 'Y19551' | 'Y19561' | 'Y19571' | 'Y19581' | 'Y19591' | 'Y19601' | 'Y19611' | 'Y19621' | 'Y19631' | 'Y19641' | 'Y19651' | 'Y19661' | 'Y19671' | 'Y19681' | 'Y19691' | 'Y19701' | 'Y19711' | 'Y19721' | 'Y19731' | 'Y19741' | 'Y19751' | 'Y19761' | 'Y19771' | 'Y19781' | 'Y19791' | 'Y19801' | 'Y19811' | 'Y19821' | 'Y19831' | 'Y19841' | 'Y19851' | 'Y19861' | 'Y19871' | 'Y19881' | 'Y19891' | 'Y19901' | 'Y19911' | 'Y19921' | 'Y19931' | 'Y19941' | 'Y19951' | 'Y19961' | 'Y19971' | 'Y19981' | 'Y19991' | 'Y20001' | 'Y20011' | 'Y20021' | 'Y20031' | 'Y20041' | 'Y20051' | 'Y20061' | 'Y20071' | 'Y20081' | 'Y20091' | 'Y20101' | 'Y20111' | 'Y20121' | 'Y20131' | 'Y20141' | 'Y20151' | 'Y20161' | 'Y20171' | 'Y20181' export type MutationType = 'CREATED' | 'UPDATED' | 'DELETED' export type UfvClassOrderByInput = 'id_ASC' | 'id_DESC' | 'cod_ASC' | 'cod_DESC' | 'name_ASC' | 'name_DESC' | 'optional_ASC' | 'optional_DESC' | 'department_ASC' | 'department_DESC' | 'useful_ASC' | 'useful_DESC' | 'easy_ASC' | 'easy_DESC' | 'recommended_ASC' | 'recommended_DESC' | 'updatedAt_ASC' | 'updatedAt_DESC' | 'createdAt_ASC' | 'createdAt_DESC' export type UserRate = 'Iniciante' | 'Confiavel' export interface ReviewCreateWithoutVotesInput { useful: ReviewUseful easy: ReviewEasy description: String anonymous: Boolean recommended: Boolean teacher: String score?: Int classReviewed: UfvClassCreateOneWithoutReviewsInput reviewer: UserCreateOneWithoutReviewsInput } export interface UserWhereInput { AND?: UserWhereInput[] | UserWhereInput OR?: UserWhereInput[] | UserWhereInput NOT?: UserWhereInput[] | UserWhereInput id?: ID_Input id_not?: ID_Input id_in?: ID_Input[] | ID_Input id_not_in?: ID_Input[] | ID_Input id_lt?: ID_Input id_lte?: ID_Input id_gt?: ID_Input id_gte?: ID_Input id_contains?: ID_Input id_not_contains?: ID_Input id_starts_with?: ID_Input id_not_starts_with?: ID_Input id_ends_with?: ID_Input id_not_ends_with?: ID_Input facebookId?: String facebookId_not?: String facebookId_in?: String[] | String facebookId_not_in?: String[] | String facebookId_lt?: String facebookId_lte?: String facebookId_gt?: String facebookId_gte?: String facebookId_contains?: String facebookId_not_contains?: String facebookId_starts_with?: String facebookId_not_starts_with?: String facebookId_ends_with?: String facebookId_not_ends_with?: String name?: String name_not?: String name_in?: String[] | String name_not_in?: String[] | String name_lt?: String name_lte?: String name_gt?: String name_gte?: String name_contains?: String name_not_contains?: String name_starts_with?: String name_not_starts_with?: String name_ends_with?: String name_not_ends_with?: String course?: UfvCourses course_not?: UfvCourses course_in?: UfvCourses[] | UfvCourses course_not_in?: UfvCourses[] | UfvCourses year?: UfvYears year_not?: UfvYears year_in?: UfvYears[] | UfvYears year_not_in?: UfvYears[] | UfvYears rate?: UserRate rate_not?: UserRate rate_in?: UserRate[] | UserRate rate_not_in?: UserRate[] | UserRate createdAt?: DateTime createdAt_not?: DateTime createdAt_in?: DateTime[] | DateTime createdAt_not_in?: DateTime[] | DateTime createdAt_lt?: DateTime createdAt_lte?: DateTime createdAt_gt?: DateTime createdAt_gte?: DateTime updatedAt?: DateTime updatedAt_not?: DateTime updatedAt_in?: DateTime[] | DateTime updatedAt_not_in?: DateTime[] | DateTime updatedAt_lt?: DateTime updatedAt_lte?: DateTime updatedAt_gt?: DateTime updatedAt_gte?: DateTime reviews_every?: ReviewWhereInput reviews_some?: ReviewWhereInput reviews_none?: ReviewWhereInput votes_every?: ReviewVotesWhereInput votes_some?: ReviewVotesWhereInput votes_none?: ReviewVotesWhereInput } export interface UfvClassCreateOneWithoutReviewsInput { create?: UfvClassCreateWithoutReviewsInput connect?: UfvClassWhereUniqueInput } export interface ReviewUpsertWithWhereUniqueWithoutReviewerInput { where: ReviewWhereUniqueInput update: ReviewUpdateWithoutReviewerDataInput create: ReviewCreateWithoutReviewerInput } export interface UfvClassCreateWithoutReviewsInput { cod: String name: String optional: Boolean department: Department useful?: Float easy?: Float recommended?: Int } export interface ReviewCreateWithoutClassReviewedInput { useful: ReviewUseful easy: ReviewEasy description: String anonymous: Boolean recommended: Boolean teacher: String score?: Int reviewer: UserCreateOneWithoutReviewsInput votes?: ReviewVotesCreateManyWithoutReviewInput } export interface ReviewVotesCreateManyWithoutReviewInput { create?: ReviewVotesCreateWithoutReviewInput[] | ReviewVotesCreateWithoutReviewInput connect?: ReviewVotesWhereUniqueInput[] | ReviewVotesWhereUniqueInput } export interface UfvClassSubscriptionWhereInput { AND?: UfvClassSubscriptionWhereInput[] | UfvClassSubscriptionWhereInput OR?: UfvClassSubscriptionWhereInput[] | UfvClassSubscriptionWhereInput NOT?: UfvClassSubscriptionWhereInput[] | UfvClassSubscriptionWhereInput mutation_in?: MutationType[] | MutationType updatedFields_contains?: String updatedFields_contains_every?: String[] | String updatedFields_contains_some?: String[] | String node?: UfvClassWhereInput } export interface ReviewVotesCreateWithoutReviewInput { type: ReviewVotesTypes user: UserCreateOneWithoutVotesInput } export interface UserSubscriptionWhereInput { AND?: UserSubscriptionWhereInput[] | UserSubscriptionWhereInput OR?: UserSubscriptionWhereInput[] | UserSubscriptionWhereInput NOT?: UserSubscriptionWhereInput[] | UserSubscriptionWhereInput mutation_in?: MutationType[] | MutationType updatedFields_contains?: String updatedFields_contains_every?: String[] | String updatedFields_contains_some?: String[] | String node?: UserWhereInput } export interface UserCreateOneWithoutVotesInput { create?: UserCreateWithoutVotesInput connect?: UserWhereUniqueInput } export interface ReviewWhereInput { AND?: ReviewWhereInput[] | ReviewWhereInput OR?: ReviewWhereInput[] | ReviewWhereInput NOT?: ReviewWhereInput[] | ReviewWhereInput id?: ID_Input id_not?: ID_Input id_in?: ID_Input[] | ID_Input id_not_in?: ID_Input[] | ID_Input id_lt?: ID_Input id_lte?: ID_Input id_gt?: ID_Input id_gte?: ID_Input id_contains?: ID_Input id_not_contains?: ID_Input id_starts_with?: ID_Input id_not_starts_with?: ID_Input id_ends_with?: ID_Input id_not_ends_with?: ID_Input useful?: ReviewUseful useful_not?: ReviewUseful useful_in?: ReviewUseful[] | ReviewUseful useful_not_in?: ReviewUseful[] | ReviewUseful easy?: ReviewEasy easy_not?: ReviewEasy easy_in?: ReviewEasy[] | ReviewEasy easy_not_in?: ReviewEasy[] | ReviewEasy description?: String description_not?: String description_in?: String[] | String description_not_in?: String[] | String description_lt?: String description_lte?: String description_gt?: String description_gte?: String description_contains?: String description_not_contains?: String description_starts_with?: String description_not_starts_with?: String description_ends_with?: String description_not_ends_with?: String anonymous?: Boolean anonymous_not?: Boolean recommended?: Boolean recommended_not?: Boolean teacher?: String teacher_not?: String teacher_in?: String[] | String teacher_not_in?: String[] | String teacher_lt?: String teacher_lte?: String teacher_gt?: String teacher_gte?: String teacher_contains?: String teacher_not_contains?: String teacher_starts_with?: String teacher_not_starts_with?: String teacher_ends_with?: String teacher_not_ends_with?: String score?: Int score_not?: Int score_in?: Int[] | Int score_not_in?: Int[] | Int score_lt?: Int score_lte?: Int score_gt?: Int score_gte?: Int createdAt?: DateTime createdAt_not?: DateTime createdAt_in?: DateTime[] | DateTime createdAt_not_in?: DateTime[] | DateTime createdAt_lt?: DateTime createdAt_lte?: DateTime createdAt_gt?: DateTime createdAt_gte?: DateTime updatedAt?: DateTime updatedAt_not?: DateTime updatedAt_in?: DateTime[] | DateTime updatedAt_not_in?: DateTime[] | DateTime updatedAt_lt?: DateTime updatedAt_lte?: DateTime updatedAt_gt?: DateTime updatedAt_gte?: DateTime classReviewed?: UfvClassWhereInput reviewer?: UserWhereInput votes_every?: ReviewVotesWhereInput votes_some?: ReviewVotesWhereInput votes_none?: ReviewVotesWhereInput } export interface UserCreateWithoutVotesInput { facebookId: String name: String course: UfvCourses year: UfvYears rate?: UserRate reviews?: ReviewCreateManyWithoutReviewerInput } export interface ReviewUpdateWithWhereUniqueWithoutClassReviewedInput { where: ReviewWhereUniqueInput data: ReviewUpdateWithoutClassReviewedDataInput } export interface ReviewVotesCreateManyWithoutUserInput { create?: ReviewVotesCreateWithoutUserInput[] | ReviewVotesCreateWithoutUserInput connect?: ReviewVotesWhereUniqueInput[] | ReviewVotesWhereUniqueInput } export interface ReviewVotesWhereUniqueInput { id?: ID_Input } export interface ReviewVotesCreateWithoutUserInput { type: ReviewVotesTypes review: ReviewCreateOneWithoutVotesInput } export interface UfvClassWhereUniqueInput { id?: ID_Input cod?: String } export interface ReviewCreateOneWithoutVotesInput { create?: ReviewCreateWithoutVotesInput connect?: ReviewWhereUniqueInput } export interface UfvClassUpdateInput { cod?: String name?: String optional?: Boolean department?: Department useful?: Float easy?: Float recommended?: Int reviews?: ReviewUpdateManyWithoutClassReviewedInput } export interface ReviewVotesUpdateManyWithoutUserInput { create?: ReviewVotesCreateWithoutUserInput[] | ReviewVotesCreateWithoutUserInput connect?: ReviewVotesWhereUniqueInput[] | ReviewVotesWhereUniqueInput disconnect?: ReviewVotesWhereUniqueInput[] | ReviewVotesWhereUniqueInput delete?: ReviewVotesWhereUniqueInput[] | ReviewVotesWhereUniqueInput update?: ReviewVotesUpdateWithWhereUniqueWithoutUserInput[] | ReviewVotesUpdateWithWhereUniqueWithoutUserInput upsert?: ReviewVotesUpsertWithWhereUniqueWithoutUserInput[] | ReviewVotesUpsertWithWhereUniqueWithoutUserInput } export interface ReviewVotesUpdateInput { type?: ReviewVotesTypes review?: ReviewUpdateOneWithoutVotesInput user?: UserUpdateOneWithoutVotesInput } export interface UserCreateOneWithoutReviewsInput { create?: UserCreateWithoutReviewsInput connect?: UserWhereUniqueInput } export interface ReviewUpsertWithoutVotesInput { update: ReviewUpdateWithoutVotesDataInput create: ReviewCreateWithoutVotesInput } export interface UserCreateWithoutReviewsInput { facebookId: String name: String course: UfvCourses year: UfvYears rate?: UserRate votes?: ReviewVotesCreateManyWithoutUserInput } export interface UserUpdateWithoutReviewsDataInput { facebookId?: String name?: String course?: UfvCourses year?: UfvYears rate?: UserRate votes?: ReviewVotesUpdateManyWithoutUserInput } export interface ReviewVotesCreateInput { type: ReviewVotesTypes review: ReviewCreateOneWithoutVotesInput user: UserCreateOneWithoutVotesInput } export interface ReviewUpdateWithoutVotesDataInput { useful?: ReviewUseful easy?: ReviewEasy description?: String anonymous?: Boolean recommended?: Boolean teacher?: String score?: Int classReviewed?: UfvClassUpdateOneWithoutReviewsInput reviewer?: UserUpdateOneWithoutReviewsInput } export interface ReviewCreateInput { useful: ReviewUseful easy: ReviewEasy description: String anonymous: Boolean recommended: Boolean teacher: String score?: Int classReviewed: UfvClassCreateOneWithoutReviewsInput reviewer: UserCreateOneWithoutReviewsInput votes?: ReviewVotesCreateManyWithoutReviewInput } export interface ReviewVotesUpdateWithoutUserDataInput { type?: ReviewVotesTypes review?: ReviewUpdateOneWithoutVotesInput } export interface UfvClassCreateInput { cod: String name: String optional: Boolean department: Department useful?: Float easy?: Float recommended?: Int reviews?: ReviewCreateManyWithoutClassReviewedInput } export interface UserCreateInput { facebookId: String name: String course: UfvCourses year: UfvYears rate?: UserRate reviews?: ReviewCreateManyWithoutReviewerInput votes?: ReviewVotesCreateManyWithoutUserInput } export interface ReviewCreateManyWithoutClassReviewedInput { create?: ReviewCreateWithoutClassReviewedInput[] | ReviewCreateWithoutClassReviewedInput connect?: ReviewWhereUniqueInput[] | ReviewWhereUniqueInput } export interface ReviewCreateWithoutReviewerInput { useful: ReviewUseful easy: ReviewEasy description: String anonymous: Boolean recommended: Boolean teacher: String score?: Int classReviewed: UfvClassCreateOneWithoutReviewsInput votes?: ReviewVotesCreateManyWithoutReviewInput } export interface ReviewVotesWhereInput { AND?: ReviewVotesWhereInput[] | ReviewVotesWhereInput OR?: ReviewVotesWhereInput[] | ReviewVotesWhereInput NOT?: ReviewVotesWhereInput[] | ReviewVotesWhereInput id?: ID_Input id_not?: ID_Input id_in?: ID_Input[] | ID_Input id_not_in?: ID_Input[] | ID_Input id_lt?: ID_Input id_lte?: ID_Input id_gt?: ID_Input id_gte?: ID_Input id_contains?: ID_Input id_not_contains?: ID_Input id_starts_with?: ID_Input id_not_starts_with?: ID_Input id_ends_with?: ID_Input id_not_ends_with?: ID_Input type?: ReviewVotesTypes type_not?: ReviewVotesTypes type_in?: ReviewVotesTypes[] | ReviewVotesTypes type_not_in?: ReviewVotesTypes[] | ReviewVotesTypes review?: ReviewWhereInput user?: UserWhereInput } export interface ReviewSubscriptionWhereInput { AND?: ReviewSubscriptionWhereInput[] | ReviewSubscriptionWhereInput OR?: ReviewSubscriptionWhereInput[] | ReviewSubscriptionWhereInput NOT?: ReviewSubscriptionWhereInput[] | ReviewSubscriptionWhereInput mutation_in?: MutationType[] | MutationType updatedFields_contains?: String updatedFields_contains_every?: String[] | String updatedFields_contains_some?: String[] | String node?: ReviewWhereInput } export interface UserUpdateInput { facebookId?: String name?: String course?: UfvCourses year?: UfvYears rate?: UserRate reviews?: ReviewUpdateManyWithoutReviewerInput votes?: ReviewVotesUpdateManyWithoutUserInput } export interface ReviewUpsertWithWhereUniqueWithoutClassReviewedInput { where: ReviewWhereUniqueInput update: ReviewUpdateWithoutClassReviewedDataInput create: ReviewCreateWithoutClassReviewedInput } export interface ReviewUpdateManyWithoutReviewerInput { create?: ReviewCreateWithoutReviewerInput[] | ReviewCreateWithoutReviewerInput connect?: ReviewWhereUniqueInput[] | ReviewWhereUniqueInput disconnect?: ReviewWhereUniqueInput[] | ReviewWhereUniqueInput delete?: ReviewWhereUniqueInput[] | ReviewWhereUniqueInput update?: ReviewUpdateWithWhereUniqueWithoutReviewerInput[] | ReviewUpdateWithWhereUniqueWithoutReviewerInput upsert?: ReviewUpsertWithWhereUniqueWithoutReviewerInput[] | ReviewUpsertWithWhereUniqueWithoutReviewerInput } export interface UserWhereUniqueInput { id?: ID_Input facebookId?: String } export interface ReviewUpdateWithWhereUniqueWithoutReviewerInput { where: ReviewWhereUniqueInput data: ReviewUpdateWithoutReviewerDataInput } export interface ReviewUpdateManyWithoutClassReviewedInput { create?: ReviewCreateWithoutClassReviewedInput[] | ReviewCreateWithoutClassReviewedInput connect?: ReviewWhereUniqueInput[] | ReviewWhereUniqueInput disconnect?: ReviewWhereUniqueInput[] | ReviewWhereUniqueInput delete?: ReviewWhereUniqueInput[] | ReviewWhereUniqueInput update?: ReviewUpdateWithWhereUniqueWithoutClassReviewedInput[] | ReviewUpdateWithWhereUniqueWithoutClassReviewedInput upsert?: ReviewUpsertWithWhereUniqueWithoutClassReviewedInput[] | ReviewUpsertWithWhereUniqueWithoutClassReviewedInput } export interface ReviewUpdateWithoutReviewerDataInput { useful?: ReviewUseful easy?: ReviewEasy description?: String anonymous?: Boolean recommended?: Boolean teacher?: String score?: Int classReviewed?: UfvClassUpdateOneWithoutReviewsInput votes?: ReviewVotesUpdateManyWithoutReviewInput } export interface ReviewVotesUpsertWithWhereUniqueWithoutUserInput { where: ReviewVotesWhereUniqueInput update: ReviewVotesUpdateWithoutUserDataInput create: ReviewVotesCreateWithoutUserInput } export interface UfvClassUpdateOneWithoutReviewsInput { create?: UfvClassCreateWithoutReviewsInput connect?: UfvClassWhereUniqueInput delete?: Boolean update?: UfvClassUpdateWithoutReviewsDataInput upsert?: UfvClassUpsertWithoutReviewsInput } export interface UserUpdateOneWithoutReviewsInput { create?: UserCreateWithoutReviewsInput connect?: UserWhereUniqueInput delete?: Boolean update?: UserUpdateWithoutReviewsDataInput upsert?: UserUpsertWithoutReviewsInput } export interface UfvClassUpdateWithoutReviewsDataInput { cod?: String name?: String optional?: Boolean department?: Department useful?: Float easy?: Float recommended?: Int } export interface ReviewVotesUpdateWithWhereUniqueWithoutUserInput { where: ReviewVotesWhereUniqueInput data: ReviewVotesUpdateWithoutUserDataInput } export interface UfvClassUpsertWithoutReviewsInput { update: UfvClassUpdateWithoutReviewsDataInput create: UfvClassCreateWithoutReviewsInput } export interface UfvClassWhereInput { AND?: UfvClassWhereInput[] | UfvClassWhereInput OR?: UfvClassWhereInput[] | UfvClassWhereInput NOT?: UfvClassWhereInput[] | UfvClassWhereInput id?: ID_Input id_not?: ID_Input id_in?: ID_Input[] | ID_Input id_not_in?: ID_Input[] | ID_Input id_lt?: ID_Input id_lte?: ID_Input id_gt?: ID_Input id_gte?: ID_Input id_contains?: ID_Input id_not_contains?: ID_Input id_starts_with?: ID_Input id_not_starts_with?: ID_Input id_ends_with?: ID_Input id_not_ends_with?: ID_Input cod?: String cod_not?: String cod_in?: String[] | String cod_not_in?: String[] | String cod_lt?: String cod_lte?: String cod_gt?: String cod_gte?: String cod_contains?: String cod_not_contains?: String cod_starts_with?: String cod_not_starts_with?: String cod_ends_with?: String cod_not_ends_with?: String name?: String name_not?: String name_in?: String[] | String name_not_in?: String[] | String name_lt?: String name_lte?: String name_gt?: String name_gte?: String name_contains?: String name_not_contains?: String name_starts_with?: String name_not_starts_with?: String name_ends_with?: String name_not_ends_with?: String optional?: Boolean optional_not?: Boolean department?: Department department_not?: Department department_in?: Department[] | Department department_not_in?: Department[] | Department useful?: Float useful_not?: Float useful_in?: Float[] | Float useful_not_in?: Float[] | Float useful_lt?: Float useful_lte?: Float useful_gt?: Float useful_gte?: Float easy?: Float easy_not?: Float easy_in?: Float[] | Float easy_not_in?: Float[] | Float easy_lt?: Float easy_lte?: Float easy_gt?: Float easy_gte?: Float recommended?: Int recommended_not?: Int recommended_in?: Int[] | Int recommended_not_in?: Int[] | Int recommended_lt?: Int recommended_lte?: Int recommended_gt?: Int recommended_gte?: Int reviews_every?: ReviewWhereInput reviews_some?: ReviewWhereInput reviews_none?: ReviewWhereInput } export interface ReviewVotesUpdateManyWithoutReviewInput { create?: ReviewVotesCreateWithoutReviewInput[] | ReviewVotesCreateWithoutReviewInput connect?: ReviewVotesWhereUniqueInput[] | ReviewVotesWhereUniqueInput disconnect?: ReviewVotesWhereUniqueInput[] | ReviewVotesWhereUniqueInput delete?: ReviewVotesWhereUniqueInput[] | ReviewVotesWhereUniqueInput update?: ReviewVotesUpdateWithWhereUniqueWithoutReviewInput[] | ReviewVotesUpdateWithWhereUniqueWithoutReviewInput upsert?: ReviewVotesUpsertWithWhereUniqueWithoutReviewInput[] | ReviewVotesUpsertWithWhereUniqueWithoutReviewInput } export interface ReviewUpdateWithoutClassReviewedDataInput { useful?: ReviewUseful easy?: ReviewEasy description?: String anonymous?: Boolean recommended?: Boolean teacher?: String score?: Int reviewer?: UserUpdateOneWithoutReviewsInput votes?: ReviewVotesUpdateManyWithoutReviewInput } export interface ReviewVotesUpdateWithWhereUniqueWithoutReviewInput { where: ReviewVotesWhereUniqueInput data: ReviewVotesUpdateWithoutReviewDataInput } export interface ReviewUpdateInput { useful?: ReviewUseful easy?: ReviewEasy description?: String anonymous?: Boolean recommended?: Boolean teacher?: String score?: Int classReviewed?: UfvClassUpdateOneWithoutReviewsInput reviewer?: UserUpdateOneWithoutReviewsInput votes?: ReviewVotesUpdateManyWithoutReviewInput } export interface ReviewVotesUpdateWithoutReviewDataInput { type?: ReviewVotesTypes user?: UserUpdateOneWithoutVotesInput } export interface ReviewUpdateOneWithoutVotesInput { create?: ReviewCreateWithoutVotesInput connect?: ReviewWhereUniqueInput delete?: Boolean update?: ReviewUpdateWithoutVotesDataInput upsert?: ReviewUpsertWithoutVotesInput } export interface ReviewVotesUpsertWithWhereUniqueWithoutReviewInput { where: ReviewVotesWhereUniqueInput update: ReviewVotesUpdateWithoutReviewDataInput create: ReviewVotesCreateWithoutReviewInput } export interface UserUpsertWithoutVotesInput { update: UserUpdateWithoutVotesDataInput create: UserCreateWithoutVotesInput } export interface UserUpdateWithoutVotesDataInput { facebookId?: String name?: String course?: UfvCourses year?: UfvYears rate?: UserRate reviews?: ReviewUpdateManyWithoutReviewerInput } export interface UserUpdateOneWithoutVotesInput { create?: UserCreateWithoutVotesInput connect?: UserWhereUniqueInput delete?: Boolean update?: UserUpdateWithoutVotesDataInput upsert?: UserUpsertWithoutVotesInput } export interface ReviewCreateManyWithoutReviewerInput { create?: ReviewCreateWithoutReviewerInput[] | ReviewCreateWithoutReviewerInput connect?: ReviewWhereUniqueInput[] | ReviewWhereUniqueInput } export interface UserUpsertWithoutReviewsInput { update: UserUpdateWithoutReviewsDataInput create: UserCreateWithoutReviewsInput } export interface ReviewWhereUniqueInput { id?: ID_Input } export interface ReviewVotesSubscriptionWhereInput { AND?: ReviewVotesSubscriptionWhereInput[] | ReviewVotesSubscriptionWhereInput OR?: ReviewVotesSubscriptionWhereInput[] | ReviewVotesSubscriptionWhereInput NOT?: ReviewVotesSubscriptionWhereInput[] | ReviewVotesSubscriptionWhereInput mutation_in?: MutationType[] | MutationType updatedFields_contains?: String updatedFields_contains_every?: String[] | String updatedFields_contains_some?: String[] | String node?: ReviewVotesWhereInput } /* * An object with an ID */ export interface Node { id: ID_Output } export interface UfvClassPreviousValues { id: ID_Output cod: String name: String optional: Boolean department: Department useful: Float easy: Float recommended: Int } /* * A connection to a list of items. */ export interface UserConnection { pageInfo: PageInfo edges: UserEdge[] aggregate: AggregateUser } export interface ReviewVotesPreviousValues { id: ID_Output type: ReviewVotesTypes } export interface UfvClassSubscriptionPayload { mutation: MutationType node?: UfvClass updatedFields?: String[] previousValues?: UfvClassPreviousValues } export interface UfvClass extends Node { id: ID_Output cod: String name: String optional: Boolean department: Department useful: Float easy: Float recommended: Int reviews?: Review[] } export interface ReviewVotes extends Node { id: ID_Output review: Review user: User type: ReviewVotesTypes } /* * A connection to a list of items. */ export interface UfvClassConnection { pageInfo: PageInfo edges: UfvClassEdge[] aggregate: AggregateUfvClass } export interface AggregateUfvClass { count: Int } /* * An edge in a connection. */ export interface ReviewEdge { node: Review cursor: String } export interface BatchPayload { count: Long } export interface AggregateReviewVotes { count: Int } export interface ReviewPreviousValues { id: ID_Output useful: ReviewUseful easy: ReviewEasy description: String anonymous: Boolean recommended: Boolean teacher: String score: Int createdAt: DateTime updatedAt: DateTime } /* * A connection to a list of items. */ export interface ReviewVotesConnection { pageInfo: PageInfo edges: ReviewVotesEdge[] aggregate: AggregateReviewVotes } export interface Review extends Node { id: ID_Output useful: ReviewUseful easy: ReviewEasy description: String anonymous: Boolean recommended: Boolean teacher: String score: Int classReviewed: UfvClass reviewer: User votes?: ReviewVotes[] createdAt: DateTime updatedAt: DateTime } /* * An edge in a connection. */ export interface UserEdge { node: User cursor: String } export interface ReviewSubscriptionPayload { mutation: MutationType node?: Review updatedFields?: String[] previousValues?: ReviewPreviousValues } /* * An edge in a connection. */ export interface UfvClassEdge { node: UfvClass cursor: String } /* * A connection to a list of items. */ export interface ReviewConnection { pageInfo: PageInfo edges: ReviewEdge[] aggregate: AggregateReview } export interface ReviewVotesSubscriptionPayload { mutation: MutationType node?: ReviewVotes updatedFields?: String[] previousValues?: ReviewVotesPreviousValues } export interface User extends Node { id: ID_Output facebookId: String name: String course: UfvCourses year: UfvYears rate: UserRate reviews?: Review[] votes?: ReviewVotes[] createdAt: DateTime updatedAt: DateTime } export interface UserPreviousValues { id: ID_Output facebookId: String name: String course: UfvCourses year: UfvYears rate: UserRate createdAt: DateTime updatedAt: DateTime } export interface UserSubscriptionPayload { mutation: MutationType node?: User updatedFields?: String[] previousValues?: UserPreviousValues } /* * An edge in a connection. */ export interface ReviewVotesEdge { node: ReviewVotes cursor: String } export interface AggregateReview { count: Int } /* * Information about pagination in a connection. */ export interface PageInfo { hasNextPage: Boolean hasPreviousPage: Boolean startCursor?: String endCursor?: String } export interface AggregateUser { count: Int } /* The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). */ export type Float = number /* The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID. */ export type ID_Input = string | number export type ID_Output = string export type DateTime = Date | string /* The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1. */ export type Int = number /* The `Long` scalar type represents non-fractional signed whole numeric values. Long can represent values between -(2^63) and 2^63 - 1. */ export type Long = string /* The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text. */ export type String = string /* The `Boolean` scalar type represents `true` or `false`. */ export type Boolean = boolean<file_sep>class LocalStorageManager { public removeToken = () => { localStorage.removeItem("token"); }; public setToken = (token: string) => { localStorage.setItem("token", token); }; public getToken = () => { try { return localStorage.getItem("token") || ""; } catch { return ""; } }; } export default new LocalStorageManager(); <file_sep>import { Prisma } from "./generated/prisma"; import { ContextParameters } from "graphql-yoga/dist/types"; import fetch from "node-fetch"; export interface Context extends ContextParameters { db: Prisma; } export const getUserData = async (ctx: Context) => { const Authorization = ctx.request.get("Authorization"); if (Authorization) { const token = Authorization.replace("Bearer ", ""); const extra = await fetch( "https://graph.facebook.com/v3.0/me?access_token=" + token ); const data = (await extra.json()) as { id: string; name: string }; if (!data.id) { throw Error("Usuário não logado"); } return data; } throw new AuthError(); }; export class AuthError extends Error { constructor() { super("Not authorized"); } } <file_sep>import client from "src/config/ApolloClient"; import { FbLoginMutation, LogoffMutation } from "src/config/Mutations"; import LocalStorageManager from "src/singletons/LocalStorageManager"; import { IAuthResponse, IKindResponse } from "src/utils/types"; import { IsRegisteredQuery, LocalLoggedQuery } from "../config/Queries"; class FacebookManager { public init(debug: boolean = false) { (window as any).fbAsyncInit = () => { (FB as any).init({ appId: "193675244675446", cookie: true, xfbml: true, version: "v3.0" }); (FB as any).AppEvents.logPageView(); this.getLoginStatus().then(this.handleFBAuth); }; const init = (d: any, s: any, id: any) => { const fjs = d.getElementsByTagName(s)[0]; let js: any; if (d.getElementById(id)) { return; } js = d.createElement(s); js.id = id; js.src = "https://connect.facebook.net/en_US/sdk.js"; fjs.parentNode.insertBefore(js, fjs); }; init(document, "script", "facebook-jssdk"); } public getLoginStatus = (): Promise<IAuthResponse | void> => { return new Promise((rs, rj) => { (FB as any).getLoginStatus((response: IKindResponse) => { if (response.status === "unknown") { rj(); } else { if (response.status === "connected") { rs(response.authResponse); } else { rs(); } } }); }); }; public login = () => { ((window as any).FB as any).login((response: IKindResponse) => { const promise: Promise<IAuthResponse | void> = new Promise((rs, rj) => { if (response.status === "unknown") { rj(); } else { if (response.status === "connected") { rs(response.authResponse); } else { rs(); } } }); promise.then(this.handleFBAuth); }); }; public logoff = async () => { const logout = (): Promise<{ name: string }> => new Promise((rs, rj) => { if (!(window as any).FB) { rs(); } (FB as any).logout(rs); }); await logout(); this.unsetLogged(); }; private handleFBAuth = async (value: IAuthResponse | void) => { if (value) { this.setLogged(value.accessToken); } else { this.unsetLogged(); } }; private setLogged = async (fbAccessToken: string) => { LocalStorageManager.setToken(fbAccessToken); client.mutate({ mutation: FbLoginMutation, refetchQueries: [ { query: IsRegisteredQuery }, { query: LocalLoggedQuery } ] }); }; private unsetLogged = () => { LocalStorageManager.removeToken(); client.mutate({ mutation: LogoffMutation, refetchQueries: [ { query: IsRegisteredQuery }, { query: LocalLoggedQuery } ] }); }; } // curl -i -X GET \ // "https://graph.facebook.com/v3.0/me?access_token=***" // tslint:disable-next-line:max-classes-per-file declare class FB {} export default new FacebookManager(); <file_sep>import * as fs from "fs"; const file = fs.readFileSync("./optativas.csv", "utf-8"); const lines = file.split("\r\n"); const useful = lines.filter(x => x !== "").filter(x => x !== "\r"); interface K { cod: string; name: string; } let flip = true; const data = useful.reduce( (prev: K[], curr: string) => { if (flip) { const newItem = { cod: curr, name: "" }; flip = false; return [newItem, ...prev]; } else { const [thisItem, ...rest] = [...prev]; const newItem = { ...thisItem, name: curr }; flip = true; return [newItem, ...rest]; } }, [] as K[] ); console.log(data); <file_sep>import { createBrowserHistory } from "history"; import { Cadastro, Home, Login } from "src/utils/routes"; class HistoryManager { private history = createBrowserHistory(); public getHistory = () => this.history; public goHome = () => this.history.push(Home); public goBack = () => this.history.goBack(); public goToLogin = () => this.history.push(this.loginRoute()); public goToUser = (userId: string) => this.history.push(this.usuarioRoute(userId)); public goToClass = (classId: string) => this.history.push(this.disciplinaRoute(classId)); public getRoute = () => this.history.location.pathname; public clearLoginUrl = () => { this.history.push(this.lastRoute()); }; public lastRoute = () => { const oldState = this.history.location.state; if (!oldState) { return Home; } else { if ((oldState.from as string).startsWith("/cadastro")) { return Home; } if ((oldState.from as string).startsWith("/login")) { return Home; } return oldState.from; } }; // sumir com isso daqui public usuarioRoute = (id: string) => "/usuario/" + id; public disciplinaRoute = (id: string) => "/disciplina/" + id; public avaliarRoute = (id: string) => "/avaliardisciplina/" + id; public editarRoute = (id: string) => "/editaravaliacao/" + id; public loginRoute = () => { const oldFrom = this.lastRoute(); const newFrom = this.getRoute(); if (newFrom.startsWith("/login")) { return { pathname: Login, state: { from: oldFrom } }; } return { pathname: Login, state: { from: newFrom } }; }; public cadastroRoute = () => { const oldFrom = this.lastRoute(); const newFrom = this.getRoute(); if (newFrom.startsWith("/login")) { return { pathname: Cadastro, state: { from: oldFrom } }; } if (newFrom.startsWith("/cadastro")) { return { pathname: Cadastro, state: { from: oldFrom } }; } return { pathname: Cadastro, state: { from: newFrom } }; }; } export default new HistoryManager(); <file_sep>import { GraphQLServer } from "graphql-yoga"; import { Prisma } from "./generated/prisma"; import resolvers from "./resolvers"; import { ContextParameters } from "graphql-yoga/dist/types"; const server = new GraphQLServer({ typeDefs: "./src/schema.graphql", resolvers: resolvers as any, //TODO remove context: (req: ContextParameters) => ({ ...req, db: new Prisma({ debug: false, // log all GraphQL queries & mutations sent to the Prisma API PRISMA_ENDPOINT secret: process.env.PRISMA_SECRET, // only needed if specified in `database/prisma.yml` (value set in `.env`) endpoint: process.env.PRISMA_ENDPOINT // only needed if specified in `database/prisma.yml` (value set in `.env`) }) }) }); server.start(() => console.log(`Server is running on 4000`)); <file_sep>export const BLOCK = 64; export const FUSE_OPT = { shouldSort: true, includeScore: true, threshold: 0.6, location: 0, distance: 100, maxPatternLength: 32, minMatchCharLength: 1 }; <file_sep>export interface IAuthResponse { accessToken: string; expiresIn: number; signedRequest: string; userID: string; } export interface IKindResponse { status: "connected" | "not_authorized" | "unknown"; authResponse: IAuthResponse; } export type IReviewPosition = "first" | "second" | "third" | "other" | "mine"; export enum AvaliarAction { create = "create", edit = "edit" } <file_sep>export const Home = "/"; export const MinhasReacoes = "/mreacoes"; export const MinhaConta = "/minhaconta"; export const MinhasAvaliacoes = "/mavaliacoes"; export const AvaliarDisciplina = "/avaliardisciplina/:id"; export const EditarAvaliacao = "/editaravaliacao/:id"; export const Logoff = "/logoff"; export const Cadastro = "/cadastro"; export const Login = "/login"; export const Usuario = "/usuario/:id"; export const Disciplina = "/disciplina/:id"; <file_sep>import { MutationResolvers, ReviewVotes } from "../generated/types"; import { Context, getUserData } from "../utils"; const register: MutationResolvers.RegisterResolver = async ( _, { user: { course, year } }, ctx: Context, info ) => { const { name, id } = await getUserData(ctx); return ctx.db.mutation.createUser( { data: { facebookId: id, name, course, year } }, info ); }; const recountReviewScore = async (ctx: Context, reviewId: string) => { const info = ` { type } `; const votes = await ctx.db.query.reviewVoteses( { where: { review: { id: reviewId } } }, info ); const score = votes.reduce((prev, curr) => { const currVal = curr.type === "Agree" ? 1 : -1; return prev + currVal; }, 0); await ctx.db.mutation.updateReview({ data: { score }, where: { id: reviewId } }); }; const setVote: MutationResolvers.SetVoteResolver = async ( _, { data: { type, reviewId } }, ctx: Context, info ) => { const { id } = await getUserData(ctx); const maybeOldVote = await ctx.db.query .reviewVoteses({ where: { user: { facebookId: id }, review: { id: reviewId } } }) .then(x => x[0]); let toRet: ReviewVotes | null = null; if (maybeOldVote) { toRet = await ctx.db.mutation.updateReviewVotes({ data: { type }, where: { id: maybeOldVote.id } }); } else { toRet = await ctx.db.mutation.createReviewVotes({ data: { type, user: { connect: { facebookId: id } }, review: { connect: { id: reviewId } } } }); } await recountReviewScore(ctx, reviewId); return toRet; }; const deleteAcc: MutationResolvers.DeleteAccResolver = async ( _, __, ctx: Context, info ) => { const { id } = await getUserData(ctx); return ctx.db.mutation.deleteUser({ where: { facebookId: id } }, info); }; const editReview: MutationResolvers.EditReviewResolver = async ( _, { data: { useful, easy, description, anonymous, recommended, id, teacher, cod } }, ctx: Context, info ) => { const infox = ` { id useful easy recommended } `; const allReviews = (await ctx.db.query.reviews( { where: { classReviewed: { cod } } }, infox )) || []; const reviews = allReviews.filter(x => x.id !== id); const length = reviews.length + 1; const usefulSum = reviews.reduce((prev, curr) => { return prev + parseInt(curr.useful[1]); }, 0) + parseInt(useful[1]); const easySum = reviews.reduce((prev, curr) => { return prev + parseInt(curr.easy[1]); }, 0) + parseInt(easy[1]); let recommendedLength = reviews.filter(x => x.recommended).length; if (recommended) { recommendedLength++; } const newUseful = usefulSum / length; const newEasy = easySum / length; await ctx.db.mutation.updateUfvClass({ data: { recommended: recommendedLength, useful: newUseful, easy: newEasy }, where: { cod } }); return ctx.db.mutation.updateReview( { where: { id }, data: { useful, easy, description, anonymous, recommended, teacher } }, info ); }; const createReview: MutationResolvers.CreateReviewResolver = async ( _, { data: { useful, easy, description, anonymous, recommended, cod, teacher } }, ctx: Context, info ) => { const { id } = await getUserData(ctx); const infox = ` { reviewer { facebookId } useful easy recommended } `; const allReviews = (await ctx.db.query.reviews( { where: { classReviewed: { cod } } }, infox )) || []; const mine = allReviews.find(x => x.reviewer.facebookId === id); if (mine) { throw Error("Usuário já tem avaliação nessa matéria"); } const length = allReviews.length + 1; const usefulSum = allReviews.reduce((prev, curr) => { return prev + parseInt(curr.useful[1]); }, 0) + parseInt(useful[1]); const easySum = allReviews.reduce((prev, curr) => { return prev + parseInt(curr.easy[1]); }, 0) + parseInt(easy[1]); let recommendedLength = allReviews.filter(x => x.recommended).length; if (recommended) { recommendedLength++; } const newUseful = usefulSum / length; const newEasy = easySum / length; await ctx.db.mutation.updateUfvClass({ data: { recommended: recommendedLength, useful: newUseful, easy: newEasy }, where: { cod } }); return ctx.db.mutation.createReview( { data: { teacher, useful, easy, description, anonymous, recommended, classReviewed: { connect: { cod } }, reviewer: { connect: { facebookId: id } } } }, info ); }; export const Mutation = { setVote, register, createReview, editReview, deleteAcc }; <file_sep>/* tslint:disable */ import { GraphQLResolveInfo } from "graphql"; type Resolver<Result, Args = any> = ( parent: any, args: Args, context: any, info: GraphQLResolveInfo ) => Promise<Result> | Result; export type DateTime = any; /** An object with an ID */ export interface Node { id: string /** The id of the object. */; } export interface Query { searchAll: SearchResult[]; listClasses: UfvClass[]; user?: User | null; ufvClass?: UfvClass | null; reviews: Review[]; review?: Review | null; me?: User | null; myvote?: ReviewVotes | null; myreviews: Review[]; myvotes: ReviewVotes[]; } export interface UfvClass extends Node { id: string; cod: string; name: string; optional: boolean; department: Department; useful: number; easy: number; recommended: number; reviews?: Review[] | null; } export interface Review extends Node { id: string; useful: ReviewUseful; easy: ReviewEasy; description: string; anonymous: boolean; recommended: boolean; teacher: string; score: number; classReviewed: UfvClass; reviewer: User; votes?: ReviewVotes[] | null; createdAt: DateTime; updatedAt: DateTime; } export interface User extends Node { id: string; facebookId: string; name: string; course: UfvCourses; year: UfvYears; rate: UserRate; reviews?: Review[] | null; votes?: ReviewVotes[] | null; createdAt: DateTime; updatedAt: DateTime; } export interface ReviewVotes extends Node { id: string; review: Review; user: User; type: ReviewVotesTypes; } export interface Mutation { register: User; deleteAcc?: User | null; createReview?: Review | null; editReview?: Review | null; setVote?: ReviewVotes | null; } export namespace QueryResolvers { export interface Resolvers { searchAll?: SearchAllResolver; listClasses?: ListClassesResolver; user?: UserResolver; ufvClass?: UfvClassResolver; reviews?: ReviewsResolver; review?: ReviewResolver; me?: MeResolver; myvote?: MyvoteResolver; myreviews?: MyreviewsResolver; myvotes?: MyvotesResolver; } export type SearchAllResolver = Resolver<SearchResult[], SearchAllArgs>; export interface SearchAllArgs { where: SearchInput; } export type ListClassesResolver = Resolver<UfvClass[], ListClassesArgs>; export interface ListClassesArgs { where: UfvListClassesInput; } export type UserResolver = Resolver<User | null, UserArgs>; export interface UserArgs { where: UserInput; } export type UfvClassResolver = Resolver<UfvClass | null, UfvClassArgs>; export interface UfvClassArgs { where: UfvClassInput; } export type ReviewsResolver = Resolver<Review[], ReviewsArgs>; export interface ReviewsArgs { where: ReviewsWhereInput; } export type ReviewResolver = Resolver<Review | null, ReviewArgs>; export interface ReviewArgs { where: ReviewWhereInput; } export type MeResolver = Resolver<User | null>; export type MyvoteResolver = Resolver<ReviewVotes | null, MyvoteArgs>; export interface MyvoteArgs { where: VoteWhereInput; } export type MyreviewsResolver = Resolver<Review[]>; export type MyvotesResolver = Resolver<ReviewVotes[]>; } export namespace UfvClassResolvers { export interface Resolvers { id?: IdResolver; cod?: CodResolver; name?: NameResolver; optional?: OptionalResolver; department?: DepartmentResolver; useful?: UsefulResolver; easy?: EasyResolver; recommended?: RecommendedResolver; reviews?: ReviewsResolver; } export type IdResolver = Resolver<string>; export type CodResolver = Resolver<string>; export type NameResolver = Resolver<string>; export type OptionalResolver = Resolver<boolean>; export type DepartmentResolver = Resolver<Department>; export type UsefulResolver = Resolver<number>; export type EasyResolver = Resolver<number>; export type RecommendedResolver = Resolver<number>; export type ReviewsResolver = Resolver<Review[] | null, ReviewsArgs>; export interface ReviewsArgs { where?: ReviewWhereInput | null; orderBy?: ReviewOrderByInput | null; skip?: number | null; after?: string | null; before?: string | null; first?: number | null; last?: number | null; } } export namespace ReviewResolvers { export interface Resolvers { id?: IdResolver; useful?: UsefulResolver; easy?: EasyResolver; description?: DescriptionResolver; anonymous?: AnonymousResolver; recommended?: RecommendedResolver; teacher?: TeacherResolver; score?: ScoreResolver; classReviewed?: ClassReviewedResolver; reviewer?: ReviewerResolver; votes?: VotesResolver; createdAt?: CreatedAtResolver; updatedAt?: UpdatedAtResolver; } export type IdResolver = Resolver<string>; export type UsefulResolver = Resolver<ReviewUseful>; export type EasyResolver = Resolver<ReviewEasy>; export type DescriptionResolver = Resolver<string>; export type AnonymousResolver = Resolver<boolean>; export type RecommendedResolver = Resolver<boolean>; export type TeacherResolver = Resolver<string>; export type ScoreResolver = Resolver<number>; export type ClassReviewedResolver = Resolver<UfvClass, ClassReviewedArgs>; export interface ClassReviewedArgs { where?: UfvClassWhereInput | null; } export type ReviewerResolver = Resolver<User, ReviewerArgs>; export interface ReviewerArgs { where?: UserWhereInput | null; } export type VotesResolver = Resolver<ReviewVotes[] | null, VotesArgs>; export interface VotesArgs { where?: ReviewVotesWhereInput | null; orderBy?: ReviewVotesOrderByInput | null; skip?: number | null; after?: string | null; before?: string | null; first?: number | null; last?: number | null; } export type CreatedAtResolver = Resolver<DateTime>; export type UpdatedAtResolver = Resolver<DateTime>; } export namespace UserResolvers { export interface Resolvers { id?: IdResolver; facebookId?: FacebookIdResolver; name?: NameResolver; course?: CourseResolver; year?: YearResolver; rate?: RateResolver; reviews?: ReviewsResolver; votes?: VotesResolver; createdAt?: CreatedAtResolver; updatedAt?: UpdatedAtResolver; } export type IdResolver = Resolver<string>; export type FacebookIdResolver = Resolver<string>; export type NameResolver = Resolver<string>; export type CourseResolver = Resolver<UfvCourses>; export type YearResolver = Resolver<UfvYears>; export type RateResolver = Resolver<UserRate>; export type ReviewsResolver = Resolver<Review[] | null, ReviewsArgs>; export interface ReviewsArgs { where?: ReviewWhereInput | null; orderBy?: ReviewOrderByInput | null; skip?: number | null; after?: string | null; before?: string | null; first?: number | null; last?: number | null; } export type VotesResolver = Resolver<ReviewVotes[] | null, VotesArgs>; export interface VotesArgs { where?: ReviewVotesWhereInput | null; orderBy?: ReviewVotesOrderByInput | null; skip?: number | null; after?: string | null; before?: string | null; first?: number | null; last?: number | null; } export type CreatedAtResolver = Resolver<DateTime>; export type UpdatedAtResolver = Resolver<DateTime>; } export namespace ReviewVotesResolvers { export interface Resolvers { id?: IdResolver; review?: ReviewResolver; user?: UserResolver; type?: TypeResolver; } export type IdResolver = Resolver<string>; export type ReviewResolver = Resolver<Review, ReviewArgs>; export interface ReviewArgs { where?: ReviewWhereInput | null; } export type UserResolver = Resolver<User, UserArgs>; export interface UserArgs { where?: UserWhereInput | null; } export type TypeResolver = Resolver<ReviewVotesTypes>; } export namespace MutationResolvers { export interface Resolvers { register?: RegisterResolver; deleteAcc?: DeleteAccResolver; createReview?: CreateReviewResolver; editReview?: EditReviewResolver; setVote?: SetVoteResolver; } export type RegisterResolver = Resolver<User, RegisterArgs>; export interface RegisterArgs { user: UserRegisterInput; } export type DeleteAccResolver = Resolver<User | null>; export type CreateReviewResolver = Resolver<Review | null, CreateReviewArgs>; export interface CreateReviewArgs { data: CreateReviewData; } export type EditReviewResolver = Resolver<Review | null, EditReviewArgs>; export interface EditReviewArgs { data: EditReviewData; } export type SetVoteResolver = Resolver<ReviewVotes | null, SetVoteArgs>; export interface SetVoteArgs { data: SetVoteData; } } export interface SearchInput { value: string; } export interface ReviewWhereInput { id: string; } export interface UfvClassWhereInput { AND?: UfvClassWhereInput[] | null /** Logical AND on all given filters. */; OR?: UfvClassWhereInput[] | null /** Logical OR on all given filters. */; NOT?: | UfvClassWhereInput[] | null /** Logical NOT on all given filters combined by AND. */; id?: string | null; id_not?: string | null /** All values that are not equal to given value. */; id_in?: string[] | null /** All values that are contained in given list. */; id_not_in?: | string[] | null /** All values that are not contained in given list. */; id_lt?: string | null /** All values less than the given value. */; id_lte?: string | null /** All values less than or equal the given value. */; id_gt?: string | null /** All values greater than the given value. */; id_gte?: | string | null /** All values greater than or equal the given value. */; id_contains?: string | null /** All values containing the given string. */; id_not_contains?: | string | null /** All values not containing the given string. */; id_starts_with?: | string | null /** All values starting with the given string. */; id_not_starts_with?: | string | null /** All values not starting with the given string. */; id_ends_with?: string | null /** All values ending with the given string. */; id_not_ends_with?: | string | null /** All values not ending with the given string. */; cod?: string | null; cod_not?: string | null /** All values that are not equal to given value. */; cod_in?: string[] | null /** All values that are contained in given list. */; cod_not_in?: | string[] | null /** All values that are not contained in given list. */; cod_lt?: string | null /** All values less than the given value. */; cod_lte?: string | null /** All values less than or equal the given value. */; cod_gt?: string | null /** All values greater than the given value. */; cod_gte?: | string | null /** All values greater than or equal the given value. */; cod_contains?: string | null /** All values containing the given string. */; cod_not_contains?: | string | null /** All values not containing the given string. */; cod_starts_with?: | string | null /** All values starting with the given string. */; cod_not_starts_with?: | string | null /** All values not starting with the given string. */; cod_ends_with?: string | null /** All values ending with the given string. */; cod_not_ends_with?: | string | null /** All values not ending with the given string. */; name?: string | null; name_not?: string | null /** All values that are not equal to given value. */; name_in?: string[] | null /** All values that are contained in given list. */; name_not_in?: | string[] | null /** All values that are not contained in given list. */; name_lt?: string | null /** All values less than the given value. */; name_lte?: | string | null /** All values less than or equal the given value. */; name_gt?: string | null /** All values greater than the given value. */; name_gte?: | string | null /** All values greater than or equal the given value. */; name_contains?: string | null /** All values containing the given string. */; name_not_contains?: | string | null /** All values not containing the given string. */; name_starts_with?: | string | null /** All values starting with the given string. */; name_not_starts_with?: | string | null /** All values not starting with the given string. */; name_ends_with?: | string | null /** All values ending with the given string. */; name_not_ends_with?: | string | null /** All values not ending with the given string. */; optional?: boolean | null; optional_not?: | boolean | null /** All values that are not equal to given value. */; department?: Department | null; department_not?: Department | null /** All values that are not equal to given value. */; department_in?: | Department[] | null /** All values that are contained in given list. */; department_not_in?: | Department[] | null /** All values that are not contained in given list. */; useful?: number | null; useful_not?: | number | null /** All values that are not equal to given value. */; useful_in?: | number[] | null /** All values that are contained in given list. */; useful_not_in?: | number[] | null /** All values that are not contained in given list. */; useful_lt?: number | null /** All values less than the given value. */; useful_lte?: | number | null /** All values less than or equal the given value. */; useful_gt?: number | null /** All values greater than the given value. */; useful_gte?: | number | null /** All values greater than or equal the given value. */; easy?: number | null; easy_not?: number | null /** All values that are not equal to given value. */; easy_in?: number[] | null /** All values that are contained in given list. */; easy_not_in?: | number[] | null /** All values that are not contained in given list. */; easy_lt?: number | null /** All values less than the given value. */; easy_lte?: | number | null /** All values less than or equal the given value. */; easy_gt?: number | null /** All values greater than the given value. */; easy_gte?: | number | null /** All values greater than or equal the given value. */; recommended?: number | null; recommended_not?: | number | null /** All values that are not equal to given value. */; recommended_in?: | number[] | null /** All values that are contained in given list. */; recommended_not_in?: | number[] | null /** All values that are not contained in given list. */; recommended_lt?: number | null /** All values less than the given value. */; recommended_lte?: | number | null /** All values less than or equal the given value. */; recommended_gt?: | number | null /** All values greater than the given value. */; recommended_gte?: | number | null /** All values greater than or equal the given value. */; reviews_every?: ReviewWhereInput | null; reviews_some?: ReviewWhereInput | null; reviews_none?: ReviewWhereInput | null; } export interface UserWhereInput { AND?: UserWhereInput[] | null /** Logical AND on all given filters. */; OR?: UserWhereInput[] | null /** Logical OR on all given filters. */; NOT?: | UserWhereInput[] | null /** Logical NOT on all given filters combined by AND. */; id?: string | null; id_not?: string | null /** All values that are not equal to given value. */; id_in?: string[] | null /** All values that are contained in given list. */; id_not_in?: | string[] | null /** All values that are not contained in given list. */; id_lt?: string | null /** All values less than the given value. */; id_lte?: string | null /** All values less than or equal the given value. */; id_gt?: string | null /** All values greater than the given value. */; id_gte?: | string | null /** All values greater than or equal the given value. */; id_contains?: string | null /** All values containing the given string. */; id_not_contains?: | string | null /** All values not containing the given string. */; id_starts_with?: | string | null /** All values starting with the given string. */; id_not_starts_with?: | string | null /** All values not starting with the given string. */; id_ends_with?: string | null /** All values ending with the given string. */; id_not_ends_with?: | string | null /** All values not ending with the given string. */; facebookId?: string | null; facebookId_not?: | string | null /** All values that are not equal to given value. */; facebookId_in?: | string[] | null /** All values that are contained in given list. */; facebookId_not_in?: | string[] | null /** All values that are not contained in given list. */; facebookId_lt?: string | null /** All values less than the given value. */; facebookId_lte?: | string | null /** All values less than or equal the given value. */; facebookId_gt?: string | null /** All values greater than the given value. */; facebookId_gte?: | string | null /** All values greater than or equal the given value. */; facebookId_contains?: | string | null /** All values containing the given string. */; facebookId_not_contains?: | string | null /** All values not containing the given string. */; facebookId_starts_with?: | string | null /** All values starting with the given string. */; facebookId_not_starts_with?: | string | null /** All values not starting with the given string. */; facebookId_ends_with?: | string | null /** All values ending with the given string. */; facebookId_not_ends_with?: | string | null /** All values not ending with the given string. */; name?: string | null; name_not?: string | null /** All values that are not equal to given value. */; name_in?: string[] | null /** All values that are contained in given list. */; name_not_in?: | string[] | null /** All values that are not contained in given list. */; name_lt?: string | null /** All values less than the given value. */; name_lte?: | string | null /** All values less than or equal the given value. */; name_gt?: string | null /** All values greater than the given value. */; name_gte?: | string | null /** All values greater than or equal the given value. */; name_contains?: string | null /** All values containing the given string. */; name_not_contains?: | string | null /** All values not containing the given string. */; name_starts_with?: | string | null /** All values starting with the given string. */; name_not_starts_with?: | string | null /** All values not starting with the given string. */; name_ends_with?: | string | null /** All values ending with the given string. */; name_not_ends_with?: | string | null /** All values not ending with the given string. */; course?: UfvCourses | null; course_not?: UfvCourses | null /** All values that are not equal to given value. */; course_in?: | UfvCourses[] | null /** All values that are contained in given list. */; course_not_in?: | UfvCourses[] | null /** All values that are not contained in given list. */; year?: UfvYears | null; year_not?: UfvYears | null /** All values that are not equal to given value. */; year_in?: | UfvYears[] | null /** All values that are contained in given list. */; year_not_in?: | UfvYears[] | null /** All values that are not contained in given list. */; rate?: UserRate | null; rate_not?: UserRate | null /** All values that are not equal to given value. */; rate_in?: | UserRate[] | null /** All values that are contained in given list. */; rate_not_in?: | UserRate[] | null /** All values that are not contained in given list. */; createdAt?: DateTime | null; createdAt_not?: DateTime | null /** All values that are not equal to given value. */; createdAt_in?: | DateTime[] | null /** All values that are contained in given list. */; createdAt_not_in?: | DateTime[] | null /** All values that are not contained in given list. */; createdAt_lt?: DateTime | null /** All values less than the given value. */; createdAt_lte?: DateTime | null /** All values less than or equal the given value. */; createdAt_gt?: DateTime | null /** All values greater than the given value. */; createdAt_gte?: DateTime | null /** All values greater than or equal the given value. */; updatedAt?: DateTime | null; updatedAt_not?: DateTime | null /** All values that are not equal to given value. */; updatedAt_in?: | DateTime[] | null /** All values that are contained in given list. */; updatedAt_not_in?: | DateTime[] | null /** All values that are not contained in given list. */; updatedAt_lt?: DateTime | null /** All values less than the given value. */; updatedAt_lte?: DateTime | null /** All values less than or equal the given value. */; updatedAt_gt?: DateTime | null /** All values greater than the given value. */; updatedAt_gte?: DateTime | null /** All values greater than or equal the given value. */; reviews_every?: ReviewWhereInput | null; reviews_some?: ReviewWhereInput | null; reviews_none?: ReviewWhereInput | null; votes_every?: ReviewVotesWhereInput | null; votes_some?: ReviewVotesWhereInput | null; votes_none?: ReviewVotesWhereInput | null; } export interface ReviewVotesWhereInput { AND?: ReviewVotesWhereInput[] | null /** Logical AND on all given filters. */; OR?: ReviewVotesWhereInput[] | null /** Logical OR on all given filters. */; NOT?: | ReviewVotesWhereInput[] | null /** Logical NOT on all given filters combined by AND. */; id?: string | null; id_not?: string | null /** All values that are not equal to given value. */; id_in?: string[] | null /** All values that are contained in given list. */; id_not_in?: | string[] | null /** All values that are not contained in given list. */; id_lt?: string | null /** All values less than the given value. */; id_lte?: string | null /** All values less than or equal the given value. */; id_gt?: string | null /** All values greater than the given value. */; id_gte?: | string | null /** All values greater than or equal the given value. */; id_contains?: string | null /** All values containing the given string. */; id_not_contains?: | string | null /** All values not containing the given string. */; id_starts_with?: | string | null /** All values starting with the given string. */; id_not_starts_with?: | string | null /** All values not starting with the given string. */; id_ends_with?: string | null /** All values ending with the given string. */; id_not_ends_with?: | string | null /** All values not ending with the given string. */; type?: ReviewVotesTypes | null; type_not?: ReviewVotesTypes | null /** All values that are not equal to given value. */; type_in?: | ReviewVotesTypes[] | null /** All values that are contained in given list. */; type_not_in?: | ReviewVotesTypes[] | null /** All values that are not contained in given list. */; review?: ReviewWhereInput | null; user?: UserWhereInput | null; } export interface UfvListClassesInput { sort: ClassesRanks; department?: Department | null; optional?: boolean | null; } export interface UserInput { id: string; } export interface UfvClassInput { id?: string | null; cod?: string | null; } export interface ReviewsWhereInput { userId: string; first: number; } export interface VoteWhereInput { reviewId: string; } export interface UserRegisterInput { course: UfvCourses; year: UfvYears; } export interface CreateReviewData { cod: string; teacher: string; useful: ReviewUseful; easy: ReviewEasy; description: string; anonymous: boolean; recommended: boolean; } export interface EditReviewData { id: string; cod: string; teacher: string; useful: ReviewUseful; easy: ReviewEasy; description: string; anonymous: boolean; recommended: boolean; } export interface SetVoteData { reviewId: string; type: ReviewVotesTypes; } export interface SearchAllQueryArgs { where: SearchInput; } export interface ListClassesQueryArgs { where: UfvListClassesInput; } export interface UserQueryArgs { where: UserInput; } export interface UfvClassQueryArgs { where: UfvClassInput; } export interface ReviewsQueryArgs { where: ReviewsWhereInput; } export interface ReviewQueryArgs { where: ReviewWhereInput; } export interface MyvoteQueryArgs { where: VoteWhereInput; } export interface ReviewsUfvClassArgs { where?: ReviewWhereInput | null; orderBy?: ReviewOrderByInput | null; skip?: number | null; after?: string | null; before?: string | null; first?: number | null; last?: number | null; } export interface ClassReviewedReviewArgs { where?: UfvClassWhereInput | null; } export interface ReviewerReviewArgs { where?: UserWhereInput | null; } export interface VotesReviewArgs { where?: ReviewVotesWhereInput | null; orderBy?: ReviewVotesOrderByInput | null; skip?: number | null; after?: string | null; before?: string | null; first?: number | null; last?: number | null; } export interface ReviewsUserArgs { where?: ReviewWhereInput | null; orderBy?: ReviewOrderByInput | null; skip?: number | null; after?: string | null; before?: string | null; first?: number | null; last?: number | null; } export interface VotesUserArgs { where?: ReviewVotesWhereInput | null; orderBy?: ReviewVotesOrderByInput | null; skip?: number | null; after?: string | null; before?: string | null; first?: number | null; last?: number | null; } export interface ReviewReviewVotesArgs { where?: ReviewWhereInput | null; } export interface UserReviewVotesArgs { where?: UserWhereInput | null; } export interface RegisterMutationArgs { user: UserRegisterInput; } export interface CreateReviewMutationArgs { data: CreateReviewData; } export interface EditReviewMutationArgs { data: EditReviewData; } export interface SetVoteMutationArgs { data: SetVoteData; } export type Department = | "Depto__de_Economia_Rural" | "Depto__de_Engenharia_Agricola" | "Depto__de_Engenharia_Florestal" | "Depto__de_Fitopatologia" | "Depto__de_Fitotecnia" | "Depto__de_Solos" | "Depto__de_Zootecnia" | "Depto__de_Biologia_Animal" | "Depto__de_Biologia_Geral" | "Depto__de_Biologia_Vegetal" | "Depto__de_Bioquimica_e_Biologia_Molecular" | "Depto__de_Educacao_Fisica" | "Depto__de_Entomologia" | "Depto__de_Microbiologia" | "Depto__de_Medicina_e_Enfermagem" | "Depto__de_Nutricaoo_e_Saude" | "Depto__de_Veterinaria" | "Depto__de_Arquitetura_e_Urbanismo" | "Depto__de_Engenharia_Civil" | "Depto__de_Engenharia_Eletrica" | "Depto__de_Engenharia_de_Producao_e_Mecanica" | "Depto__de_Estatistica" | "Depto__de_Fisica" | "Depto__de_Informatica" | "Depto__de_Matematica" | "Depto__de_Quimica" | "Depto__de_Tecnologia_de_Alimentos" | "Depto__de_Administracao_e_Contabilidade" | "Depto__de_Artes_e_Humanidades" | "Depto__de_Ciencias_Sociais" | "Depto__de_Comunicacao_Social" | "Depto__de_Direito" | "Depto__de_Economia" | "Depto__de_Economia_Domestica" | "Depto__de_Educacao" | "Depto__de_Geografia" | "Depto__de_Historia" | "Depto__de_Letras"; export type ReviewOrderByInput = | "id_ASC" | "id_DESC" | "useful_ASC" | "useful_DESC" | "easy_ASC" | "easy_DESC" | "description_ASC" | "description_DESC" | "anonymous_ASC" | "anonymous_DESC" | "recommended_ASC" | "recommended_DESC" | "teacher_ASC" | "teacher_DESC" | "score_ASC" | "score_DESC" | "createdAt_ASC" | "createdAt_DESC" | "updatedAt_ASC" | "updatedAt_DESC"; export type ReviewUseful = "U0" | "U1" | "U2" | "U3" | "U4" | "U5"; export type ReviewEasy = "E0" | "E1" | "E2" | "E3" | "E4" | "E5"; export type UfvCourses = | "Agronegocio" | "Agronomia" | "Cooperativismo" | "Engenharia_Agricola_e_Ambiental" | "Engenharia_Florestal" | "Zootecnia" | "Bioquimica" | "Ciencias_Biologicas__Bacharelado_Licenciatura_" | "Educacao_Fisica__Bacharelado_Licenciatura_" | "Enfermagem" | "Licenciatura_em_Ciencias_Biologicas__Noturno_" | "Medicina" | "Medicina_Veterinaria" | "Nutricao" | "Arquitetura_e_Urbanismo" | "Ciencia_da_Computacaoo" | "Ciencia_e_Tecnologia_de_Laticinios" | "Engenharia_Ambiental" | "Engenharia_Civil" | "Engenharia_de_Agrimensura_e_Cartografica" | "Engenharia_de_Alimentos" | "Engenharia_de_Producao" | "Engenharia_Eletrica" | "Engenharia_Mecanica" | "Engenharia_Quimica" | "Fisica__Bacharelado_Licenciatura_" | "Licenciatura_em_Fisica" | "Licenciatura_em_Matematica" | "Licenciatura_em_Quimica" | "Matematica__Bacharelado_Licenciatura_" | "Quimica__Bacharelado_Licenciatura_" | "Administracao" | "Ciencias_Contabeis" | "Ciencias_Economicas" | "Ciencias_Sociais__Bacharelado_Licenciatura_" | "Comunicacao_Social___Jornalismo" | "Danca__Bacharelado_Licenciatura_" | "Direito" | "Economia_Domestica" | "Educacao_do_Campo" | "Educacao_Infantil" | "Geografia__Bacharelado_Licenciatura_" | "Historia__Bacharelado_Licenciatura_" | "Letras" | "Pedagogia" | "Secretariado_Executivo_Trilingue___Portugues__Frances_e_Ingles" | "Servico_Social"; export type UfvYears = | "Y19201" | "Y19211" | "Y19221" | "Y19231" | "Y19241" | "Y19251" | "Y19261" | "Y19271" | "Y19281" | "Y19291" | "Y19301" | "Y19311" | "Y19321" | "Y19331" | "Y19341" | "Y19351" | "Y19361" | "Y19371" | "Y19381" | "Y19391" | "Y19401" | "Y19411" | "Y19421" | "Y19431" | "Y19441" | "Y19451" | "Y19461" | "Y19471" | "Y19481" | "Y19491" | "Y19501" | "Y19511" | "Y19521" | "Y19531" | "Y19541" | "Y19551" | "Y19561" | "Y19571" | "Y19581" | "Y19591" | "Y19601" | "Y19611" | "Y19621" | "Y19631" | "Y19641" | "Y19651" | "Y19661" | "Y19671" | "Y19681" | "Y19691" | "Y19701" | "Y19711" | "Y19721" | "Y19731" | "Y19741" | "Y19751" | "Y19761" | "Y19771" | "Y19781" | "Y19791" | "Y19801" | "Y19811" | "Y19821" | "Y19831" | "Y19841" | "Y19851" | "Y19861" | "Y19871" | "Y19881" | "Y19891" | "Y19901" | "Y19911" | "Y19921" | "Y19931" | "Y19941" | "Y19951" | "Y19961" | "Y19971" | "Y19981" | "Y19991" | "Y20001" | "Y20011" | "Y20021" | "Y20031" | "Y20041" | "Y20051" | "Y20061" | "Y20071" | "Y20081" | "Y20091" | "Y20101" | "Y20111" | "Y20121" | "Y20131" | "Y20141" | "Y20151" | "Y20161" | "Y20171" | "Y20181"; export type UserRate = "Iniciante" | "Confiavel"; export type ReviewVotesTypes = "Agree" | "Disagree"; export type ReviewVotesOrderByInput = | "id_ASC" | "id_DESC" | "type_ASC" | "type_DESC" | "updatedAt_ASC" | "updatedAt_DESC" | "createdAt_ASC" | "createdAt_DESC"; export type ClassesRanks = "Useful" | "Easy" | "Recommended"; export type SearchResult = UfvClass | User; <file_sep>import { Context, getUserData } from "../utils"; import { Query as QueryType, ListClassesQueryArgs, QueryResolvers, User } from "../generated/types"; import Fuse from "fuse.js"; import { UfvClass } from "../generated/prisma"; export const FUSE_OPT = { shouldSort: true, includeScore: true, threshold: 0.6, location: 0, distance: 100, maxPatternLength: 32, minMatchCharLength: 1 }; const myvote: QueryResolvers.MyvoteResolver = async ( _, { where: { reviewId } }, ctx: Context, info ) => { try { const { id } = await getUserData(ctx); const response = await ctx.db.query.reviewVoteses( { where: { review: { id: reviewId }, user: { facebookId: id } } }, info ); return response[0]; } catch { return null; } }; const myvotes: QueryResolvers.MyvotesResolver = async ( _, __, ctx: Context, info ) => { try { const { id } = await getUserData(ctx); return ctx.db.query.reviewVoteses( { where: { user: { facebookId: id } } }, info ); } catch { return []; } }; const reviews: QueryResolvers.ReviewsResolver = async ( _, { where: { userId, first } }, ctx: Context, info ) => { return ctx.db.query.reviews( { first, where: { reviewer: { id: userId } } }, info ); }; const myreviews: QueryResolvers.MyreviewsResolver = async ( _, __, ctx: Context, info ) => { const { id } = await getUserData(ctx); return ctx.db.query.reviews( { where: { reviewer: { facebookId: id } } }, info ); }; const ufvClass: QueryResolvers.UfvClassResolver = async ( _, { where: { id, cod } }, ctx: Context, info ) => { return ctx.db.query.ufvClass( { where: { id: id || undefined, cod: cod || undefined } }, info ); }; const user: QueryResolvers.UserResolver = async ( _, { where: { id } }, ctx: Context, info ) => { return ctx.db.query.user({ where: { id: id } }, info); }; const review: QueryResolvers.ReviewResolver = async ( _, { where: { id } }, ctx: Context, info ) => { return ctx.db.query.review({ where: { id: id } }, info); }; const me: QueryResolvers.MeResolver = async (_, __, ctx: Context, info) => { try { const { id } = await getUserData(ctx); return ctx.db.query.user({ where: { facebookId: id } }, info); } catch { return null; } }; const searchAll: QueryResolvers.SearchAllResolver = async ( _, args, ctx: Context, info ) => { const { where: { value } } = args; const usersResponse = await ctx.db.query.users( { where: { name_contains: value }, first: 5 }, info ); const usersFuse = new Fuse( usersResponse.map(x => ({ ...x, __typename: "User" })), { ...FUSE_OPT, keys: ["name"] } ); const ufvClassesResponse = await ctx.db.query.ufvClasses( { where: { OR: [{ name_contains: value }, { cod_contains: value }] }, first: 5 }, info ); const ufvClassesFuse = new Fuse( ufvClassesResponse.map(x => ({ ...x, __typename: "UfvClass" })), { ...FUSE_OPT, keys: ["name", "cod"] } ); const users = usersFuse.search(value) as { item: User; score: number }[]; const classes = ufvClassesFuse.search(value) as { item: UfvClass; score: number; }[]; const results = [...classes, ...users]; const sorted = results.sort((a, b) => { return a.score - b.score; }); const res = sorted.map(x => x.item); return res; }; export const Query = { review, myvote, myreviews, myvotes, reviews, ufvClass, searchAll, me, user, listClasses( parent: never, args: ListClassesQueryArgs, ctx: Context, info: never ): Promise<QueryType["listClasses"]> { const { sort, optional, department } = args.where; const where = { optional: optional === null ? undefined : optional, department: department === null ? undefined : department }; const first = 15; if (sort === "Easy") { return ctx.db.query.ufvClasses( { orderBy: "easy_DESC", where, first }, info ); } if (sort === "Useful") { return ctx.db.query.ufvClasses( { orderBy: "useful_DESC", where, first }, info ); } if (sort === "Recommended") { return ctx.db.query.ufvClasses( { orderBy: "recommended_DESC", where, first }, info ); } throw Error("Não implementado"); } }; <file_sep>export function debounce(func: () => void, wait = 50) { let h: number; return () => { clearTimeout(h); h = setTimeout(() => func(), wait) as any; }; } <file_sep># Como rodar 1. Insale o yarn (qualquer versão), node (versão 8), npm (qualquer versão compatível com node 8) 1. Copie esta pasta 1. Vá até a raiz desta pasta com o terminal 1. $ yarn (para instalar as dependencias) 1. $ yarn start (para iniciar o servidor) 1. acesse localhost:3000
10172762610a103ad05ba2c03d7a92a217f25cc2
[ "Markdown", "TypeScript", "JavaScript" ]
18
Markdown
lucasavila00/INF221TF
d39994eccb8bbc72c7fb15df3eba3b3c6b27bead
0e85416f82e30f9bcd677404ae4897ee8e61069e
refs/heads/master
<file_sep>from enum import Enum from typing import Dict, List, Tuple import bpy from bpy.props import ( StringProperty, IntProperty, BoolProperty, FloatProperty, FloatVectorProperty, CollectionProperty, EnumProperty, PointerProperty, IntVectorProperty ) from .operator_func.texture_generator import ( list_mask_types_as_blender_enum, UvMaskTypes, list_mix_mask_modes_as_blender_enum) from .operator_func.common import MeshType # UV-mask stripe properties class OBJECT_NusiqMcblendStripeProperties(bpy.types.PropertyGroup): '''Properties of a UV-mask stripe.''' width: IntProperty( # type: ignore name='Width', default=1) width_relative: FloatProperty( # type: ignore name='Width', min=0.0, max=1.0, default=0.1) strength: FloatProperty( # type: ignore name='Strength', min=0.0, max=1.0, default=1.0) def json(self, relative: bool) -> Dict: ''' :returns: JSON representation of this object. ''' result = {'strength': self.strength} if relative: result['width'] = round(self.width_relative, 5) else: result['width'] = self.width return result # UV-mask color properties class OBJECT_NusiqMcblendColorProperties(bpy.types.PropertyGroup): '''Properties of a UV-mask color.''' color: FloatVectorProperty( # type: ignore name='Color', subtype='COLOR', min=0, max=1, step=1000, default=(1.0, 1.0, 1.0)) def json(self) -> List[float]: ''' :returns: JSON representation of this object ''' # 1/256 = 0.00390625 (8 digits precision) return [round(i, 8) for i in self.color] # UV-mask properties class OBJECT_NusiqMcblendUvMaskProperties(bpy.types.PropertyGroup): '''Properties of UV-mask.''' ui_hidden: BoolProperty( # type: ignore name='Hide', default=False) ui_collapsed: BoolProperty( # type: ignore name='Collapse', default=False) mask_type: EnumProperty( # type: ignore items=list_mask_types_as_blender_enum, name='Mask type') # mode: str # MixMask mode: EnumProperty( # type: ignore items=list_mix_mask_modes_as_blender_enum, name='Mix mode') # MixMask children: IntProperty( # type: ignore name='Number of children', min=1, default=2) # colors: List[Color] # ColorPaletteMask colors: CollectionProperty( # type: ignore type=OBJECT_NusiqMcblendColorProperties, name='Colors') # interpolate: bool # ColorPaletteMask interpolate: BoolProperty( # type: ignore name='Interpolate') # normalize: bool # ColorPaletteMask normalize: BoolProperty( # type: ignore name='Normalize') # p1: Tuple[float, float] # GradientMask EllipseMask RectangleMask p1_relative: FloatVectorProperty( # type: ignore name='Point A', min=0.0, max=1.0, default=(0.0, 0.0), size=2) # p2: Tuple[float, float] # GradientMask EllipseMask RectangleMask p2_relative: FloatVectorProperty( # type: ignore name='Point B', min=0.0, max=1.0, default=(0.0, 0.0), size=2) # p1: Tuple[float, float] # GradientMask EllipseMask RectangleMask p1: IntVectorProperty( # type: ignore name='Point A', default=(0.1, 0.1), size=2) # p2: Tuple[float, float] # GradientMask EllipseMask RectangleMask p2: IntVectorProperty( # type: ignore name='Point B', default=(0.9, 0.9), size=2) # stripes: List[Stripe] # GradientMask StripesMask stripes: CollectionProperty( # type: ignore type=OBJECT_NusiqMcblendStripeProperties, name='Stripes') # relative_boundaries: bool # GradientMask EllipseMask RectangleMask # StripesMask relative_boundaries: BoolProperty( # type: ignore name='Relative boundaries') # expotent: float # GradientMask EllipseMask RectangleMask RandomMask # MixMask expotent: FloatProperty( # type: ignore name='Expotent', default=1.0, soft_min=-10.0, soft_max=10.0) # strength: Tuple[float, float] # EllipseMask RectangleMask RandomMask # MixMask strength: FloatVectorProperty( # type: ignore min=0.0, max=1.0, default=(0.0, 1.0), size=2) # hard_edge: bool # EllipseMask RectangleMask hard_edge: BoolProperty( # type: ignore name='Hard edge') # horizontal: bool # StripesMask horizontal: BoolProperty( # type: ignore name='Horizontal') # seed: Optional[int] # RandomMask use_seed: BoolProperty( # type: ignore name='Use seed') seed: IntProperty( # type: ignore name='Seed') # color: Tuple[float, float, float] # ColorMask color: PointerProperty( # type: ignore type=OBJECT_NusiqMcblendColorProperties, name='Color') def json(self) -> Dict: ''' :returns: JSON represetnation of this object. ''' result = { "mask_type": self.mask_type } if self.mask_type == UvMaskTypes.MIX_MASK.value: result['mode'] = self.mode result['children'] = self.children if self.mask_type == UvMaskTypes.COLOR_PALLETTE_MASK.value: result['colors'] = [color.json() for color in self.colors] result['interpolate'] = self.interpolate result['normalize'] = self.normalize if self.mask_type in [ UvMaskTypes.GRADIENT_MASK.value, UvMaskTypes.ELLIPSE_MASK.value, UvMaskTypes.RECTANGLE_MASK.value]: if self.relative_boundaries: result['p1'] = [round(i, 5) for i in self.p1_relative] result['p2'] = [round(i, 5) for i in self.p2_relative] else: result['p1'] = list(self.p1) result['p2'] = list(self.p2) if self.mask_type in [ UvMaskTypes.GRADIENT_MASK.value, UvMaskTypes.STRIPES_MASK.value]: result['stripes'] = [ stripe.json(self.relative_boundaries) for stripe in self.stripes] if self.mask_type in [ UvMaskTypes.GRADIENT_MASK.value, UvMaskTypes.ELLIPSE_MASK.value, UvMaskTypes.RECTANGLE_MASK.value, UvMaskTypes.STRIPES_MASK.value]: result['relative_boundaries'] = self.relative_boundaries if self.mask_type in [ UvMaskTypes.GRADIENT_MASK.value, UvMaskTypes.ELLIPSE_MASK.value, UvMaskTypes.RECTANGLE_MASK.value, UvMaskTypes.MIX_MASK.value, UvMaskTypes.RANDOM_MASK.value]: result['expotent'] = round(self.expotent, 5) if self.mask_type in [ UvMaskTypes.ELLIPSE_MASK.value, UvMaskTypes.RECTANGLE_MASK.value, UvMaskTypes.MIX_MASK.value, UvMaskTypes.RANDOM_MASK.value]: result['strength'] = [round(i, 5) for i in self.strength] if self.mask_type in [ UvMaskTypes.ELLIPSE_MASK.value, UvMaskTypes.RECTANGLE_MASK.value]: result['hard_edge'] = self.hard_edge if self.mask_type == UvMaskTypes.STRIPES_MASK.value: result['horizontal'] = self.horizontal if self.mask_type == UvMaskTypes.RANDOM_MASK.value: result['use_seed'] = self.use_seed result['seed'] = self.seed if self.mask_type == UvMaskTypes.COLOR_MASK.value: result['color'] = self.color.json() return result # UV-group properties def get_unused_uv_group_name(base_name: str, i=1): ''' Gets the name of UV-group which is not used by any other UV-group. Uses the base name and adds number at the end of it to find unique name with pattern :code:`{base_name}.{number:04}`. ''' uv_groups = bpy.context.scene.nusiq_mcblend_uv_groups name = base_name # f'{base_name}.{i:04}' while name in uv_groups.keys(): name = f'{base_name}.{i:04}' i += 1 return name def _update_uv_group_name(uv_group, new_name: str, update_references: bool): # Update the names of all of the meshes if update_references: for obj in bpy.data.objects: if obj.type == "MESH": obj_props = obj.nusiq_mcblend_object_properties if obj_props.uv_group == uv_group.name: obj_props.uv_group = new_name # Update the name of the UV group uv_group['name'] = new_name def _set_uv_group_name(self, value): groups = bpy.context.scene.nusiq_mcblend_uv_groups # Empty name is no allowed if value == '': return # Objects use '' as the UV-group name when they have no uv-group. # The '' is also the default value of the UV-group (but it's instantly # changed to something else on creation). This prevents assigning all # of the object without an UV group to newly added UV-group. update_references = 'name' in self # If name already in use rename the other uv group for other_group in groups: if ( # Change the of the duplicate if there is one other_group.path_from_id() != self.path_from_id() and other_group.name == value): # Get starting name index i = 1 base_name = value split_name = value.split('.') try: prev_i = int(split_name[-1]) i = i if prev_i <= 0 else prev_i base_name = '.'.join(split_name[:-1]) except ValueError: pass other_new_name = get_unused_uv_group_name(base_name, i) _update_uv_group_name(other_group, other_new_name) break _update_uv_group_name(self, value, update_references) def _get_uv_group_name(self): if 'name' not in self: return '' return self['name'] class OBJECT_NusiqMcblendUvGroupProperties(bpy.types.PropertyGroup): '''Properties of UV-group.''' name: StringProperty( # type: ignore name="Name", description='The name of the UV group.', # The Add operator overwrites default value on creation to trigger the # update function default='', maxlen=1024, set=_set_uv_group_name, get=_get_uv_group_name) side1: CollectionProperty( # type: ignore type=OBJECT_NusiqMcblendUvMaskProperties, description='Collection of the filters for side1 of the cuboid.') side2: CollectionProperty( # type: ignore type=OBJECT_NusiqMcblendUvMaskProperties, description='Collection of the filters for side2 of the cuboid.') side3: CollectionProperty( # type: ignore type=OBJECT_NusiqMcblendUvMaskProperties, description='Collection of the filters for side3 of the cuboid.') side4: CollectionProperty( # type: ignore type=OBJECT_NusiqMcblendUvMaskProperties, description='Collection of the filters for side4 of the cuboid.') side5: CollectionProperty( # type: ignore type=OBJECT_NusiqMcblendUvMaskProperties, description='Collection of the filters for side5 of the cuboid.') side6: CollectionProperty( # type: ignore type=OBJECT_NusiqMcblendUvMaskProperties, description='Collection of the filters for side6 of the cuboid.') def json(self) -> Dict: ''' :returns: JSON representation of this object. ''' return { 'version': 1, 'name': self.name, 'side1': [ mask.json() for mask in self.side1 ], 'side2': [ mask.json() for mask in self.side2 ], 'side3': [ mask.json() for mask in self.side3 ], 'side4': [ mask.json() for mask in self.side4 ], 'side5': [ mask.json() for mask in self.side5 ], 'side6': [ mask.json() for mask in self.side6 ] } # Model object properties def list_mesh_types_as_blender_enum(self, context): '''List mesh types for EnumProperty.''' # pylint: disable=unused-argument return [(i.value, i.value, i.value) for i in MeshType] class OBJECT_NusiqMcblendObjectProperties(bpy.types.PropertyGroup): '''Custom properties of an object.''' mirror: BoolProperty( # type: ignore name="Mirror", description="Defines how to layout the UV during UV generation.", default=False, ) uv_group: StringProperty( # type: ignore name="UV group", description=( "Objects with the same UV group can be mapped to the same spot on " "the texture if they have the same dimensions. Empty string means " "that the object doesn't belong to any UV group."), default="", maxlen=1024 ) is_bone: BoolProperty( # type: ignore name="Export as bone", description=( "If true than this object will be exported as minecraft bone."), default=False, ) inflate: FloatProperty( # type: ignore name="Inflate", description="The inflate value of this object.", default=0.0 ) mesh_type: EnumProperty( # type: ignore items=list_mesh_types_as_blender_enum, name='Mesh type') # Animation sound and particle effects class EffectTypes(Enum): ''' EffectTypes types of the effects in the event. ''' SOUND_EFFECT='Sound Effect' PARTICLE_EFFECT='Particle Effect' def list_effect_types_as_blender_enum(self, context): ''' List effect types for EnumProperty. ''' # pylint: disable=unused-argument return [(i.value, i.value, i.value) for i in EffectTypes] class OBJECT_NusiqMcblendEffectProperties(bpy.types.PropertyGroup): ''' An effect of an event (sound or particles) ''' effect_type: EnumProperty( # type: ignore items=list_effect_types_as_blender_enum, name='Effect type') effect: StringProperty( # type: ignore name="Effect", description='The identifier of the sound effect.', default='', maxlen=1024) locator: StringProperty( # type: ignore name="Locator", description='The identifier of the locator effect.', default='', maxlen=1024) pre_effect_script: StringProperty( # type: ignore name="Locator", description='A Molang script that will be run when the particle emitter is initialized.', default='', maxlen=2048) bind_to_actor: BoolProperty( # type: ignore name="Bind to actor", description="Whether the should be spawned in the world without being bound to an actor.", default=True) def get_unused_event_name(base_name: str, i=1): ''' Gets the name of event which is not used by any other event in the animation. Uses the base name and adds number at the end of it to find unique name with pattern :code:`{base_name}.{number:04}`. This function assumes there is an active event and active animation. It will throw errors without asserting these conditions. ''' events = bpy.context.scene.nusiq_mcblend_events name = base_name while name in events.keys(): name = f'{base_name}.{i:04}' i += 1 return name def _update_event_name(event, new_name: str): # Update the names of all of the meshes pass # TODO - update names of existing timeline markers # Update the name of the UV group event['name'] = new_name def _set_event_name(self, value): events = bpy.context.scene.nusiq_mcblend_events # Empty name is no allowed if value == '': return # If name already in use rename the other uv group for other_event in events: if ( # Change the of the duplicate if there is one other_event.path_from_id() != self.path_from_id() and other_event.name == value): # Get starting name index i = 1 base_name = value split_name = value.split('.') try: prev_i = int(split_name[-1]) i = i if prev_i <= 0 else prev_i base_name = '.'.join(split_name[:-1]) except ValueError: pass other_new_name = get_unused_event_name(base_name, i) _update_event_name(other_event, other_new_name) break _update_event_name(self, value) def _get_event_name(self): if 'name' not in self: return '' return self['name'] class OBJECT_NusiqMcblendEventProperties(bpy.types.PropertyGroup): ''' A collection of sound and particle events. ''' name: StringProperty( # type: ignore name="Name", description=( "The name of the of the event. Also used to identify timeline " "markers that trigger this event."), # The Add operator overwrites default value on creation to trigger the # update function default='', maxlen=1024, set=_set_event_name, get=_get_event_name) effects: CollectionProperty( # type: ignore type=OBJECT_NusiqMcblendEffectProperties, description='Collection of effects triggered of this event.', name='Sound effects') def get_effects_dict(self) -> Tuple[List[Dict], List[Dict]]: ''' Returns tuple of two lists (sound effects, particle effects). ''' sound_effects: List[Dict] = [] particle_effects: List[Dict] = [] for effect in self.effects: if effect.effect_type == EffectTypes.PARTICLE_EFFECT.value: result = {"effect": effect.effect} if effect.locator != '': result["locator"] = effect.locator if effect.pre_effect_script != '': result["pre_effect_script"] = ( effect.pre_effect_script) if not effect.bind_to_actor: result["bind_to_actor"] = effect.bind_to_actor particle_effects.append(result) elif effect.effect_type == EffectTypes.SOUND_EFFECT.value: sound_effects.append({"effect": effect.effect}) else: raise ValueError('Unknown effect type.') return sound_effects, particle_effects # Animation properties class OBJECT_NusiqMcblendTimelineMarkerProperties(bpy.types.PropertyGroup): '''Saves the data about a timeline marker.''' name: StringProperty( # type: ignore name="Name", description="Name of the timeline marker.", default="marker", maxlen=1024 ) frame: IntProperty( # type: ignore name="Frame", description="The frame of the timeline marker.", default=0 ) class OBJECT_NusiqMcblendAnimationProperties(bpy.types.PropertyGroup): '''Properties of an animation template.''' name: StringProperty( # type: ignore name="Name", description="Name of the animation.", default="animation", maxlen=1024 ) single_frame: BoolProperty( # type: ignore name="Single frame", description="Exports current pose as single frame animation", default=False, ) skip_rest_poses: BoolProperty( # type: ignore name="Skip rest poses", description=( "Whether bone transformations that represent a rest position " "throughout the whole animation should be ignored."), default=False, ) anim_time_update: StringProperty( # type: ignore name="anim_time_update", description="Adds anim_time_update value unless is left empty", default="", maxlen=1024 ) loop: BoolProperty( # type: ignore name="Loop", description="Decides if animation should be looped", default=True, ) frame_start: IntProperty( # type: ignore name="Frame start", description="The first frame of the animation.", default=0, min=0 ) frame_current: IntProperty( # type: ignore name="Frame current", description="The current frame of the animation.", default=100, min=0 ) frame_end: IntProperty( # type: ignore name="Frame end", description="The last frame of the animation.", default=100, min=0 ) timeline_markers: CollectionProperty( # type: ignore type=OBJECT_NusiqMcblendTimelineMarkerProperties, name='Timeline Markers', description='Timeline markers related to this animation.' ) # Mcblend properties class OBJECT_NusiqMcblendExporterProperties(bpy.types.PropertyGroup): '''Global properties of Mcblend.''' model_name: StringProperty( # type: ignore name="", description="Name of the model", default="model", maxlen=1024 ) visible_bounds_offset: FloatVectorProperty( # type: ignore name="Visible bounds offset", description="visible_bounds_offset of the model", default=(0.0, 0.0, 0.0) ) visible_bounds_width: FloatProperty( # type: ignore name="Visible bounds width", description="visible_bounds_width of the model", default=1.0 ) visible_bounds_height: FloatProperty( # type: ignore name="Visible bounds height", description="visible_bounds_height of the model", default=1.0 ) texture_width: IntProperty( # type: ignore name="", description="Minecraft UV parameter width.", default=64, min=1 ) texture_height: IntProperty( # type: ignore name="", description=( "Minecraft UV parameter height. If you set it to 0 than the height" " of the texture will be picked automatically for you." ), default=64, min=1 ) texture_template_resolution: IntProperty( # type: ignore name="Template texture resolution", description=( 'Sets the resolution of the template texture.' 'describes how many pixels on the image is represented by one ' 'texture_width or texture_height unit in model definition. ' 'The value of 1 gives the standard minecraft texture ' 'resolution.' ), default=1, min=1, soft_max=5, ) allow_expanding: BoolProperty( # type: ignore name="Allow Texture Expanding", description="Allows expanding texture during texture generation.", default=True, ) generate_texture: BoolProperty( # type: ignore name="Generate texture", description="Generates texture during UV mapping.", default=True, )<file_sep># Mcblend ![CI](https://github.com/Nusiq/mcblend/workflows/CI/badge.svg) Blender 2.83 addon for working with Minecraft models. ### User guides User guides and installation instructions can be found on the project site: https://nusiq.github.io/mcblend/ ## Features - Exporting models for Minecraft bedrock edition. Custom Minecraft model properties like `mirror`, `inflate` or `locator` are supported. - Exporting animations for Minecraft bedrock edition. Support for Minecraft property `anim_time_update`. - Importing models from Minecraft bedrock edition. - Generating the UV maps and the template textures. - Easy way to create animations that would be really tedious to do thanks to physics simulation and rigging. ## Planned features - Planned features can be found in the ["Issues"](https://github.com/Nusiq/Blender-Export-MC-Bedrock-Model/issues) tab of this project on Github. <file_sep># Overview A Blender addon for creating and animating Minecraft Bedrock Edition models. ## Features - Importing Minecraft Bedrock Edition models. - Tools that support Minecraft models: - Generating Minecraft style UV-mapping (default and per-face UV-mapping), - Generating textures. - Exporting keyframe animations. - Exporting poses of models as a single frame looped animations. - Mcblend supports models that use Armatures so it's possible to use features such as inverse kinematics to your advantage to create animating easier. You can also use baked animations with physics. ## Planned features The improvements planned for this Blender plugin are often listed as ["issues"](https://github.com/Nusiq/mcblend/issues) on the Github project page. ## Installation 1. Download and install [Blender](https://www.blender.org/download/). 2. Download the latest version (zip file) from the project page on Github: [https://github.com/Nusiq/mcblend/releases](https://github.com/Nusiq/mcblend/releases) 3. In Blender go to: `Edit -> Preferences -> Add-ons -> Install...` ![](./img/blender_addons.png) 4. Select the zip file in the file explorer. ![](./img/blender_addons_filechooser.png) 5. Search for "Mcblend" in Add-ons and select the check box to enable the add-on ![](./img/blender_addons_checkbox.png) <file_sep># Animating effects The sound effects and particle effects are animated with use of events. You can define events in _Mcblend events_ menu in [scene properties](../gui_changes/#scene-properties). One event can contain multiple particles and sound effects. The effects are not visible in preview of the animation in Blender. They only add some information to exported animation. Effects can be attached to the animation by adding markers in the timeline with the name of the event. You can trigger the same event multiple times in the animation by adding multiple timeline markers with the same name. Timeline markers that do not have a matching event name are ignored when exporting the animation and serve the same purpose as any other timeline marker in Blender. A timeline with timeline markers for events: ![](../img/effect_animation.png)<file_sep>''' This is a testing script fomr model importer. Imports file, exports imported content and than compares the exported file with the original. ''' import os import subprocess import json from pathlib import Path import typing as tp import pytest import shutil from .common import assert_is_model, blender_run_script, make_comparable_json def make_comparison_files( source: str, tmp: str, use_empties: bool ) -> tp.Tuple[tp.Dict, tp.Dict, str]: ''' Loads model from source to blender using nusiq_mcblend_import_operator Exports this model to tmp (to a file with the same name as source file). Returns two dictionaries and a string: - the original model - the exported model. - path to exported model temporary file ''' source = os.path.abspath(source) tmp = os.path.abspath(tmp) target = os.path.join(tmp, os.path.split(source)[1]) script = os.path.abspath('./blender_scripts/import_export.py') # Windows uses wierd path separators source = source.replace('\\', '/') tmp = tmp.replace('\\', '/') target = target.replace('\\', '/') script = script.replace('\\', '/') # Create tmp if not exists Path(tmp).mkdir(parents=True, exist_ok=True) # Run blender actions if use_empties: blender_run_script(script, source, target, "use_empties") else: blender_run_script(script, source, target) # Validate results with open(source, 'r') as f: source_dict = json.load(f) with open(target, 'r') as f: target_dict = json.load(f) return ( source_dict, target_dict, target ) # PYTEST FUNCTIONS MODEL_FILES = [ # Import empties ("cube_translated.geo.json", True), ("cube_with_offset_pivot.geo.json", True), ("cube.geo.json", True), ("single_bone_rotated_x.geo.json", True), ("single_bone_rotated_xyz.geo.json", True), ("single_bone_rotated_y.geo.json", True), ("single_bone_rotated_z.geo.json", True), ("single_bone_translated.geo.json", True), ("single_bone.geo.json", True), ("three_bones_rotated_x.geo.json", True), ("three_bones.geo.json", True), ("two_bones.geo.json", True), ("battle_mech.geo.json", True), # Import bones ("cube_translated.geo.json", True), ("cube_with_offset_pivot.geo.json", True), ("cube.geo.json", True), # ("single_bone_rotated_x.geo.json", True), # Single bones not supported by export # ("single_bone_rotated_xyz.geo.json", True), # ("single_bone_rotated_y.geo.json", True), # ("single_bone_rotated_z.geo.json", True), # ("single_bone_translated.geo.json", True), ("single_bone.geo.json", True), ("three_bones_rotated_x.geo.json", True), ("three_bones.geo.json", True), ("two_bones.geo.json", True), ("battle_mech.geo.json", True), ] def setup_module(module): '''Runs before tests''' tmp_path = "./.tmp/test_importer" if os.path.exists(tmp_path): shutil.rmtree(tmp_path) @pytest.fixture(params=MODEL_FILES) def import_properties(request): return request.param # TESTS def test_importer(import_properties): model_file = os.path.join('./tests/data/test_importer/models/', import_properties[0]) use_empties = import_properties[1] source_dict, target_dict, _ = make_comparison_files( model_file, "./.tmp/test_importer", use_empties ) assert_is_model(target_dict) set_paths = { ("minecraft:geometry"), ("minecraft:geometry", 0, "bones"), ("minecraft:geometry", 0, "bones", 0, "cubes"), } source_comparable = make_comparable_json(source_dict, set_paths) target_comparable = make_comparable_json(target_dict, set_paths) assert source_comparable == target_comparable <file_sep>''' Functions used directly by the blender operators. ''' from __future__ import annotations from typing import Dict, Optional import numpy as np import bpy import bpy_types from .uv import UvMapper, CoordinatesConverter from .animation import AnimationExport from .model import ModelExport from .common import MINECRAFT_SCALE_FACTOR, McblendObjectGroup from .importer import ImportGeometry, ModelLoader def export_model(context: bpy_types.Context) -> Dict: ''' Creates a Minecraft model JSON dict from selected objects. Raises NameConflictException if name conflicts in some bones are detected. :param context: the context of running the operator. :returns: JSON dict with Minecraft model. ''' object_properties = McblendObjectGroup(context) model = ModelExport( texture_width=context.scene.nusiq_mcblend.texture_width, texture_height=context.scene.nusiq_mcblend.texture_height, visible_bounds_offset=tuple( # type: ignore context.scene.nusiq_mcblend.visible_bounds_offset), visible_bounds_width=context.scene.nusiq_mcblend.visible_bounds_width, visible_bounds_height=context.scene.nusiq_mcblend.visible_bounds_height, model_name=context.scene.nusiq_mcblend.model_name, ) model.load(object_properties, context) return model.json() def export_animation( context: bpy_types.Context, old_dict: Optional[Dict] ) -> Dict: ''' Creates a Minecraft animation (dictionary) from selected objects. Raises NameConflictException if name conflicts in some bones are duplicated. :param context: the context of running the operator. :param old_dict: optional - JSON dict with animation to write into. :returns: JSON dict of Minecraft animations. ''' # Check and create object properties object_properties = McblendObjectGroup(context) anim_data = context.scene.nusiq_mcblend_animations[ context.scene.nusiq_mcblend_active_animation] animation = AnimationExport( name=anim_data.name, length=(context.scene.frame_end-1)/context.scene.render.fps, loop_animation=anim_data.loop, single_frame=anim_data.single_frame, anim_time_update=anim_data.anim_time_update, fps=context.scene.render.fps, effect_events={ event.name: event.get_effects_dict() for event in context.scene.nusiq_mcblend_events } ) animation.load_poses(object_properties, context) return animation.json( old_json=old_dict, skip_rest_poses=anim_data.skip_rest_poses) def set_uvs(context: bpy_types.Context): ''' Maps the UV for selected objects. Raises NotEnoughTextureSpace when the texture width and height wasn't big enough. Raises NameConflictException if name conflicts in some bones are detected. :param context: the execution context. ''' width = context.scene.nusiq_mcblend.texture_width height = context.scene.nusiq_mcblend.texture_height allow_expanding = context.scene.nusiq_mcblend.allow_expanding generate_texture = context.scene.nusiq_mcblend.generate_texture resolution = context.scene.nusiq_mcblend.texture_template_resolution object_properties = McblendObjectGroup(context) mapper = UvMapper(width, height) mapper.load_uv_boxes(object_properties, context) mapper.plan_uv(allow_expanding) # Replace old mappings for objprop in mapper: objprop.clear_uv_layers() # Update height and width if allow_expanding: widths = [width] heights = [height] for box in mapper.uv_boxes: widths.append(box.uv[0] + box.size[0]) heights.append(box.uv[1] + box.size[1]) height = max(heights) width = max(widths) context.scene.nusiq_mcblend.texture_height = height context.scene.nusiq_mcblend.texture_width = width if generate_texture: old_image = None if "template" in bpy.data.images: old_image = bpy.data.images['template'] image = bpy.data.images.new( "template", width*resolution, height*resolution, alpha=True ) if old_image is not None: # If exists remap users of old image and remove it old_image.user_remap(image) bpy.data.images.remove(old_image) image.name = "template" # This array represents new texture # DIM0:up axis DIM1:right axis DIM2:rgba axis arr = np.zeros([image.size[1], image.size[0], 4]) for uv_cube in mapper.uv_boxes: uv_cube.paint_texture(arr, resolution) image.pixels = arr.ravel() # Apply texture pixels values # Set blender UVs converter = CoordinatesConverter( np.array([[0, width], [0, height]]), np.array([[0, 1], [1, 0]]) ) for curr_uv in mapper.uv_boxes: curr_uv.new_uv_layer() curr_uv.set_blender_uv(converter) def round_dimensions(context: bpy_types.Context) -> int: ''' Rounds dimensions of selected objects in such way that they'll be integers in exported Minecraft model. :param context: the context of running the operator. :returns: the number of edited objects. ''' counter = 0 for obj in context.selected_objects: if obj.type == 'MESH': # Clear parent from children for a moment children = obj.children for child in children: old_matrix = child.matrix_world.copy() child.parent = None child.matrix_world = old_matrix # Set new dimensions dimensions = np.array(obj.dimensions) if obj.nusiq_mcblend_object_properties.inflate != 0.0: dimensions -= ( obj.nusiq_mcblend_object_properties.inflate * 2 / MINECRAFT_SCALE_FACTOR ) dimensions = np.array( dimensions * MINECRAFT_SCALE_FACTOR ).round() / MINECRAFT_SCALE_FACTOR if obj.nusiq_mcblend_object_properties.inflate != 0.0: dimensions += ( obj.nusiq_mcblend_object_properties.inflate * 2 / MINECRAFT_SCALE_FACTOR ) obj.dimensions = dimensions context.view_layer.update() # Add children back and set their previous transformations for child in children: child.parent = obj child.matrix_parent_inverse = obj.matrix_world.inverted() counter += 1 return counter def import_model( data: Dict, geometry_name: str, replace_bones_with_empties: bool, context: bpy_types.Context ): ''' Import and build model from JSON dict. :param data: JSON dict with minecraft model. :param geometry_name: the name of the geometry to load from the model. :param replace_bones_with_empties: Whether to import bones as empties (True) or as armature and bones (False). :param context: the context of running the operator. ''' geometry = ImportGeometry(ModelLoader(data, geometry_name)) if replace_bones_with_empties: geometry.build_with_empties(context) else: geometry.build_with_armature(context) context.scene.nusiq_mcblend.texture_width = geometry.texture_width context.scene.nusiq_mcblend.texture_height = geometry.texture_height context.scene.nusiq_mcblend.visible_bounds_offset = geometry.visible_bounds_offset context.scene.nusiq_mcblend.visible_bounds_width = geometry.visible_bounds_width context.scene.nusiq_mcblend.visible_bounds_height = geometry.visible_bounds_height if geometry.identifier.startswith('geometry.'): context.scene.nusiq_mcblend.model_name = geometry.identifier[9:] else: context.scene.nusiq_mcblend.model_name = geometry.identifier <file_sep>''' This module contains all of the operators. ''' # don't import future annotations Blender needs that import json from json.decoder import JSONDecodeError from typing import Any, List, Optional, Dict from numpy.lib.arraysetops import isin import bpy_types import bpy from bpy.props import ( StringProperty, FloatProperty, EnumProperty, BoolProperty, IntProperty) from bpy_extras.io_utils import ExportHelper, ImportHelper from .custom_properties import ( get_unused_event_name, list_effect_types_as_blender_enum) from .operator_func.common import inflate_objects from .operator_func import ( export_model, export_animation, set_uvs, round_dimensions, import_model) from .operator_func.json_tools import CompactEncoder from .operator_func.exception import ( NameConflictException, NotEnoughTextureSpace,) from .operator_func.jsonc_decoder import JSONCDecoder from .operator_func.texture_generator import ( list_mask_types_as_blender_enum, UvMaskTypes, MixMaskMode) from .custom_properties import get_unused_uv_group_name # Model exporter class OBJECT_OT_NusiqMcblendExportModelOperator( bpy.types.Operator, ExportHelper): '''Operator used for exporting minecraft models from blender.''' # pylint: disable=unused-argument, no-member bl_idname = "object.nusiq_mcblend_export_operator" bl_label = "Export model" bl_options = {'REGISTER'} bl_description = "Exports selected objects from scene to bedrock model." filename_ext = '.geo.json' filter_glob: StringProperty( # type: ignore default='*.json', options={'HIDDEN'}, maxlen=1000 ) @classmethod def poll(cls, context: bpy_types.Context): if context.mode != 'OBJECT': return False if len(context.selected_objects) < 1: return False return True def execute(self, context): try: result = export_model(context) except NameConflictException as e: self.report({'WARNING'}, str(e)) return {'FINISHED'} with open(self.filepath, 'w') as f: json.dump(result, f, cls=CompactEncoder) self.report({'INFO'}, f'Model saved in {self.filepath}.') return {'FINISHED'} def menu_func_nusiq_mcblend_export_model(self, context): '''Registers ExportModel operator to the F3 menu.''' # pylint: disable=unused-argument self.layout.operator( OBJECT_OT_NusiqMcblendExportModelOperator.bl_idname, text="Mcblend: Export model" ) # Animation exporter class OBJECT_OT_NusiqMcblendExportAnimationOperator( bpy.types.Operator, ExportHelper): '''Operator used for exporting Minecraft animations from blender.''' # pylint: disable=unused-argument, no-member bl_idname = "object.nusiq_mcblend_export_animation_operator" bl_label = "Export animation" bl_options = {'REGISTER'} bl_description = ( "Exports animation of selected objects to bedrock entity animation " "format." ) filename_ext = '.animation.json' filter_glob: StringProperty( # type: ignore default='*.json', options={'HIDDEN'}, maxlen=1000 ) @classmethod def poll(cls, context: bpy_types.Context): if context.mode != 'OBJECT': return False if len(context.selected_objects) < 1: return False len_anims = len(context.scene.nusiq_mcblend_animations) curr_anim_id = context.scene.nusiq_mcblend_active_animation if 0 > curr_anim_id >= len_anims: return False return True def execute(self, context): # Read and validate old animation file old_dict: Optional[Dict] = None try: with open(self.filepath, 'r') as f: old_dict = json.load(f, cls=JSONCDecoder) except (json.JSONDecodeError, OSError): pass try: animation_dict = export_animation(context, old_dict) except NameConflictException as e: self.report({'WARNING'}, str(e)) return {'FINISHED'} # Save file and finish with open(self.filepath, 'w') as f: json.dump(animation_dict, f, cls=CompactEncoder) self.report({'INFO'}, f'Animation saved in {self.filepath}.') return {'FINISHED'} def menu_func_nusiq_mcblend_export_animation(self, context): '''Registers ExportAnimation operator to the F3 menu.''' # pylint: disable=unused-argument self.layout.operator( OBJECT_OT_NusiqMcblendExportAnimationOperator.bl_idname, text="Mcblend: Export animation" ) # UV mapper class OBJECT_OT_NusiqMcblendMapUvOperator(bpy.types.Operator): ''' Operator used for creating UV-mapping and optionally the tamplate texture for Minecraft model. ''' # pylint: disable=unused-argument, no-member bl_idname = "object.nusiq_mcblend_map_uv_operator" bl_label = "Map uv for bedrock model." bl_options = {'REGISTER', 'UNDO'} bl_description = ( "Set UV-mapping for minecraft objects." ) @classmethod def poll(cls, context: bpy_types.Context): if context.mode != 'OBJECT': return False if len(context.selected_objects) < 1: return False return True def execute(self, context): try: set_uvs(context) except NotEnoughTextureSpace: self.report({'ERROR'}, "Unable to create UV-mapping.") return {'FINISHED'} except NameConflictException as e: self.report({'WARNING'}, str(e)) return {'FINISHED'} width = context.scene.nusiq_mcblend.texture_width height = context.scene.nusiq_mcblend.texture_height self.report( {'INFO'}, f'UV map created successfully for {width}x{height} texture.' ) return {'FINISHED'} # UV grouping class OBJECT_OT_NusiqMcblendUvGroupOperator(bpy.types.Operator): '''Operator used for adding selected objects to an UV-group''' # pylint: disable=unused-argument, no-member bl_idname = "object.nusiq_mcblend_uv_group_operator" bl_label = "Set uv_group for object." bl_options = {'UNDO'} bl_description = ( "Set uv_group for bedrock model. Objects that have the same width, " "depth and height and are in the same uv_group are mapped to the " "same spot on the texture" ) def _list_uv_groups(self, context): items = [ (x.name, x.name, x.name) for x in bpy.context.scene.nusiq_mcblend_uv_groups] return items uv_groups_enum: bpy.props.EnumProperty( # type: ignore items=_list_uv_groups, name="UV Groups") @classmethod def poll(cls, context: bpy_types.Context): if context.mode != 'OBJECT': return False if len(context.selected_objects) < 1: return False if len(bpy.context.scene.nusiq_mcblend_uv_groups) == 0: return False return True def invoke(self, context, event): return context.window_manager.invoke_props_dialog(self) def execute(self, context): for obj in context.selected_objects: if obj.type == 'MESH': obj.nusiq_mcblend_object_properties.uv_group = ( self.uv_groups_enum) self.report( {'INFO'}, f'Set UV group of selected objects to {self.uv_groups_enum}.') # The object properties display the property edited by this operator # redraw it. for area in context.screen.areas: if area.type == 'PROPERTIES': area.tag_redraw() return {'FINISHED'} class OBJECT_OT_NusiqMcblendClearUvGroupOperator(bpy.types.Operator): '''Operator used for removing selected objects from their UV-groups''' # pylint: disable=unused-argument, no-member bl_idname = "object.nusiq_mcblend_clear_uv_group_operator" bl_label = "Clear uv_group for object." bl_options = {'UNDO'} bl_description = 'Clears the UV group from an object.' @classmethod def poll(cls, context: bpy_types.Context): if context.mode != 'OBJECT': return False if len(context.selected_objects) < 1: return False if len(bpy.context.scene.nusiq_mcblend_uv_groups) == 0: return False return True def execute(self, context): for obj in context.selected_objects: if obj.type == 'MESH': obj.nusiq_mcblend_object_properties.uv_group = '' self.report({'INFO'}, 'Cleared UV group of selected objects.') # The object properties display the property edited by this operator # redraw it. for area in context.screen.areas: if area.type == 'PROPERTIES': area.tag_redraw() return {'FINISHED'} # Mirror property class OBJECT_OT_NusiqMcblendToggleMirrorOperator(bpy.types.Operator): ''' Operator used for toggling custom "mirror" propert of selected objects. ''' # pylint: disable=unused-argument, no-member bl_idname = "object.nusiq_mcblend_toggle_mirror_operator" bl_label = "Toggle mirror for selected objects." bl_options = {'UNDO'} bl_description = ( "Toggle mirror for selected objects. Adds or removes mirror " "property from a cube in minecraft model" ) @classmethod def poll(cls, context: bpy_types.Context): if context.mode != 'OBJECT': return False if len(context.selected_objects) < 1: return False return True def execute(self, context): is_clearing = False for obj in context.selected_objects: if obj.type == "MESH": if obj.nusiq_mcblend_object_properties.mirror: is_clearing = True break if is_clearing: for obj in context.selected_objects: if obj.type == "MESH": (obj.nusiq_mcblend_object_properties ).mirror = False self.report({'INFO'}, 'Disabled the mirror for generating UV for ' 'selected objects.') else: for obj in context.selected_objects: if obj.type == "MESH": (obj.nusiq_mcblend_object_properties ).mirror = True self.report({'INFO'}, 'Enabled the mirror for generating UV for ' 'selected objects.') # The object properties display the property edited by this operator # redraw it. for area in context.screen.areas: if area.type == 'PROPERTIES': area.tag_redraw() return {'FINISHED'} # is_bone property class OBJECT_OT_NusiqMcblendToggleIsBoneOperator(bpy.types.Operator): ''' Operator used for toggling custom "is_bone" property of selected objects. ''' # pylint: disable=unused-argument, no-member bl_idname = "object.nusiq_mcblend_toggle_is_bone_operator" bl_label = "Toggle is_bone for selected objects." bl_options = {'UNDO'} bl_description = ( "Toggles is_bone for selected objects. Setting is_bone property " "to 1 ensures that the object will be converted to a bone in minecraft" " model" ) @classmethod def poll(cls, context: bpy_types.Context): if context.mode != 'OBJECT': return False if len(context.selected_objects) < 1: return False for obj in context.selected_objects: if obj.type == "MESH" or obj.type == "EMPTY": return True return False def execute(self, context): is_clearing = False for obj in context.selected_objects: if obj.type == "MESH": if obj.nusiq_mcblend_object_properties.is_bone: is_clearing = True break if is_clearing: for obj in context.selected_objects: if obj.type == "MESH" or obj.type == "EMPTY": obj.nusiq_mcblend_object_properties.is_bone = False self.report( {'INFO'}, 'Objects are not market to export as bones anymore.') else: for obj in context.selected_objects: if obj.type == "MESH" or obj.type == "EMPTY": obj.nusiq_mcblend_object_properties.is_bone = True self.report({'INFO'}, 'Marked selected objects to export as bones') # The object properties display the property edited by this operator # redraw it. for area in context.screen.areas: if area.type == 'PROPERTIES': area.tag_redraw() return {'FINISHED'} # Inflate property class OBJECT_OT_NusiqMcblendSetInflateOperator(bpy.types.Operator): ''' Operator used for setting the inflate value of selected objects. ''' # pylint: disable=unused-argument, no-member bl_idname = "object.nusiq_mcblend_set_inflate_operator" bl_label = "Set inflate" bl_options = {'REGISTER', 'UNDO'} bl_description = ( "Set the inflate vale for selected objects and change their " "dimensions to fit the inflate values." ) inflate_value: FloatProperty(default=0) # type: ignore mode: EnumProperty( # type: ignore items=( ('RELATIVE', 'Relative', 'Add or remove to current inflate value'), ('ABSOLUTE', 'Absolute', 'Set the inflate value'), ), name='Mode' ) @classmethod def poll(cls, context: bpy_types.Context): if context.mode != 'OBJECT': return False if len(context.selected_objects) < 1: return False return True def invoke(self, context, event): self.inflate_value = 0 self.mode = 'RELATIVE' return {'FINISHED'} def execute(self, context): inflate_objects( context, context.selected_objects, self.inflate_value, self.mode) return {'FINISHED'} # Rounding dimensions class OBJECT_OT_NusiqMcblendRoundDimensionsOperator(bpy.types.Operator): ''' Operator used for rounding the width, depth and height of selected objects in such way that they'll have integer dimensions in exported Minecraft model file. ''' # pylint: disable=unused-argument, R0201, no-member bl_idname = "object.nusiq_mcblend_round_dimensions_operator" bl_label = "Round dimensions" bl_options = {'UNDO'} bl_description = ( "Round the dimensions of selected object to integers." ) @classmethod def poll(cls, context: bpy_types.Context): if context.mode != 'OBJECT': return False if len(context.selected_objects) < 1: return False return True def execute(self, context): round_dimensions( # Returns number of edited objects context ) return {'FINISHED'} # Model Importer class OBJECT_OT_NusiqMcblendImport(bpy.types.Operator, ImportHelper): '''Operator used for importing Minecraft models to Blender.''' # pylint: disable=unused-argument, no-member bl_idname = "object.nusiq_mcblend_import_operator" bl_label = "Import model" bl_options = {'REGISTER'} bl_description = "Import model from json file." # ImportHelper mixin class uses this filename_ext = ".json" filter_glob: StringProperty( # type: ignore default="*.json", options={'HIDDEN'}, maxlen=1000, ) geometry_name: StringProperty( # type: ignore default='', maxlen=500, name='Geometry name' ) replace_bones_with_empties: BoolProperty( # type: ignore default=False, description='Creates empties instead of armature and bones', name='Replace bones with empties' ) def execute(self, context): # Save file and finish with open(self.filepath, 'r') as f: data = json.load(f, cls=JSONCDecoder) try: import_model( data, self.geometry_name, self.replace_bones_with_empties, context) except AssertionError as e: self.report( {'ERROR'}, f'Invalid model: {e}' ) except ValueError as e: self.report( {'ERROR'}, f'{e}' ) return {'FINISHED'} # Animation (GUI) def menu_func_nusiq_mcblend_import(self, context): '''Registers Import operator to the F3 menu.''' # pylint: disable=unused-argument self.layout.operator( OBJECT_OT_NusiqMcblendImport.bl_idname, text="Mcblend: Import model" ) def save_animation_properties(animation, context): ''' Saves animation properties from context to OBJECT_NusiqMcblendAnimationProperties object. ''' animation.frame_start = context.scene.frame_start animation.frame_end = context.scene.frame_end animation.frame_current = context.scene.frame_current animation.timeline_markers.clear() for timeline_marker in context.scene.timeline_markers: anim_timeline_marker = animation.timeline_markers.add() anim_timeline_marker.name = timeline_marker.name anim_timeline_marker.frame = timeline_marker.frame def load_animation_properties(animation, context): ''' Saves animation properties from OBJECT_NusiqMcblendAnimationProperties object to the context. ''' context.scene.frame_start = animation.frame_start context.scene.frame_end = animation.frame_end context.scene.frame_current = animation.frame_current context.scene.timeline_markers.clear() for anim_timeline_marker in animation.timeline_markers: context.scene.timeline_markers.new( anim_timeline_marker.name, frame=anim_timeline_marker.frame) class OBJECT_OT_NusiqMcblendListAnimations(bpy.types.Operator): ''' Operator used for listing the animations for GUI. ''' bl_idname = "object.nusiq_mcblend_list_animations" bl_label = "List animations and save them to Enum to display them in GUI" def _list_animations(self, context): # pylint: disable=unused-argument items = [ (str(i), x.name, x.name) for i, x in enumerate(bpy.context.scene.nusiq_mcblend_animations)] return items animations_enum: bpy.props.EnumProperty( # type: ignore items=_list_animations, name="Animations") # @classmethod # def poll(cls, context): # return context.mode == 'OBJECT' def execute(self, context): ''' Runs when user picks an item from the dropdown menu in animations panel. Sets the active animation. ''' # If OK than save old animation state len_anims = len(context.scene.nusiq_mcblend_animations) curr_anim_id = context.scene.nusiq_mcblend_active_animation if 0 <= curr_anim_id < len_anims: save_animation_properties( context.scene.nusiq_mcblend_animations[curr_anim_id], context) # Set new animation and load its state new_anim_id=int(self.animations_enum) context.scene.nusiq_mcblend_active_animation=new_anim_id load_animation_properties( context.scene.nusiq_mcblend_animations[new_anim_id], context) return {'FINISHED'} class OBJECT_OT_NusiqMcblendAddAnimation(bpy.types.Operator): '''Operator used creating animation settings templates.''' bl_idname = "object.nusiq_mcblend_add_animation" bl_label = '''Adds new animation to the list.''' bl_options = {'UNDO'} def execute(self, context): # If OK save old animation len_anims = len(context.scene.nusiq_mcblend_animations) curr_anim_id = context.scene.nusiq_mcblend_active_animation if 0 <= curr_anim_id < len_anims: save_animation_properties( context.scene.nusiq_mcblend_animations[curr_anim_id], context) context.scene.timeline_markers.clear() # Add new animation and set its properties animation_new = context.scene.nusiq_mcblend_animations.add() len_anims = len(context.scene.nusiq_mcblend_animations) context.scene.nusiq_mcblend_active_animation=len_anims-1 animation_new.name = f'animation{len_anims}' # The object properties display the property edited by this operator # redraw it. for area in context.screen.areas: if area.type == 'PROPERTIES': area.tag_redraw() return {'FINISHED'} class OBJECT_OT_NusiqMcblendRemoveAnimation(bpy.types.Operator): ''' Operator used for loading saved animation templates to the context. ''' bl_idname = "object.nusiq_mcblend_remove_animation" bl_label = "Remove current animation from the list." bl_options = {'UNDO'} @classmethod def poll(cls, context): return len(context.scene.nusiq_mcblend_animations) > 0 def execute(self, context): # Remove animation context.scene.nusiq_mcblend_animations.remove( context.scene.nusiq_mcblend_active_animation) # Set new active animation last_active=context.scene.nusiq_mcblend_active_animation len_anims=len(context.scene.nusiq_mcblend_animations) if last_active > 0: context.scene.nusiq_mcblend_active_animation=last_active-1 # Load data from new active animation curr_anim_id=context.scene.nusiq_mcblend_active_animation if 0 <= curr_anim_id < len_anims: load_animation_properties( context.scene.nusiq_mcblend_animations[curr_anim_id], context) # The object properties display the property edited by this operator # redraw it. for area in context.screen.areas: if area.type == 'PROPERTIES': area.tag_redraw() return {'FINISHED'} # UV group (GUI) class OBJECT_OT_NusiqMcblendListUvGroups(bpy.types.Operator): ''' Operator that used for listing the UV-groups for GUI. ''' bl_idname = "object.nusiq_mcblend_list_uv_groups" bl_label = "List UV groups and save them to Enum to display them in GUI" def _list_uv_groups(self, context): # pylint: disable=unused-argument items = [ (str(i), x.name, x.name) for i, x in enumerate(bpy.context.scene.nusiq_mcblend_uv_groups)] return items uv_groups_enum: bpy.props.EnumProperty( # type: ignore items=_list_uv_groups, name="UV Groups") def execute(self, context): ''' Runs when user picks an item from the dropdown menu in uv_groups panel. Sets the active uv_group. ''' # Set new uv_group and load its state new_uv_group_id=int(self.uv_groups_enum) context.scene.nusiq_mcblend_active_uv_group=new_uv_group_id return {'FINISHED'} class OBJECT_OT_NusiqMcblendAddUvGroup(bpy.types.Operator): '''Operator used for creating new UV-groups.''' bl_idname = "object.nusiq_mcblend_add_uv_group" bl_label = '''Adds new uv_group to the list.''' bl_options = {'UNDO'} def execute(self, context): # If OK save old uv_group len_groups = len(context.scene.nusiq_mcblend_uv_groups) # Add new uv_group and set its properties uv_group_new = context.scene.nusiq_mcblend_uv_groups.add() len_groups = len(context.scene.nusiq_mcblend_uv_groups) context.scene.nusiq_mcblend_active_uv_group=len_groups-1 uv_group_new.name = get_unused_uv_group_name('uv_group') sides = [ uv_group_new.side1, uv_group_new.side2, uv_group_new.side3, uv_group_new.side4, uv_group_new.side5, uv_group_new.side6] colors = [ (0, 0.15, 0), (0.15, 0, 0.15), (0.15, 0, 0), (0, 0.15, 0.15), (0, 0, 0.15), (0.15, 0.15, 0)] for color, side in zip(colors, sides): mask = side.add() mask.mask_type = UvMaskTypes.COLOR_MASK.value mask.color.color = color mask.colors.add() mask.stripes.add() return {'FINISHED'} class OBJECT_OT_NusiqMcblendRemoveUvGroup(bpy.types.Operator): '''Operator useful for removing UV-groups.''' bl_idname = "object.nusiq_mcblend_remove_uv_group" bl_label = "Remove current uv_group from the list." bl_options = {'UNDO'} @classmethod def poll(cls, context): return len(context.scene.nusiq_mcblend_uv_groups) > 0 def execute(self, context): group_id = context.scene.nusiq_mcblend_active_uv_group group_name = context.scene.nusiq_mcblend_uv_groups[group_id].name # Remove uv_group context.scene.nusiq_mcblend_uv_groups.remove(group_id) # Update the names of all of the meshes for obj in bpy.data.objects: if obj.type == "MESH": obj_props = obj.nusiq_mcblend_object_properties if obj_props.uv_group == group_name: obj_props.uv_group = '' # Set new active uv_group if group_id > 0: context.scene.nusiq_mcblend_active_uv_group=group_id-1 return {'FINISHED'} class OBJECT_OT_NusiqMcblendCopyUvGroupSide(bpy.types.Operator): '''Operator used for copying sides of UV-groups.''' bl_idname = "object.nusiq_mcblend_copy_uv_group_side" bl_label = 'Copy active UV group side other to UV group' bl_options = {'UNDO'} def _list_uv_groups(self, context): # pylint: disable=unused-argument items = [ (str(i), x.name, x.name) for i, x in enumerate(bpy.context.scene.nusiq_mcblend_uv_groups)] return items uv_groups_enum: bpy.props.EnumProperty( # type: ignore items=_list_uv_groups, name="UV Groups") side1: BoolProperty(name='side1') # type: ignore side2: BoolProperty(name='side2') # type: ignore side3: BoolProperty(name='side3') # type: ignore side4: BoolProperty(name='side4') # type: ignore side5: BoolProperty(name='side5') # type: ignore side6: BoolProperty(name='side6') # type: ignore def invoke(self, context, event): return context.window_manager.invoke_props_dialog(self) @classmethod def poll(cls, context): return len(context.scene.nusiq_mcblend_uv_groups) >= 1 def _copy_side( self, context, source_group_id: int, source_side_id: int, target_group_id: int, target_side_id: int): if ( source_group_id == target_group_id and source_side_id == target_side_id ): return # If source and target is the same don't do anything # Get source source_group = context.scene.nusiq_mcblend_uv_groups[source_group_id] source_sides = [ source_group.side1, source_group.side2, source_group.side3, source_group.side4, source_group.side5, source_group.side6] source_masks = source_sides[source_side_id] # Get target target_group = context.scene.nusiq_mcblend_uv_groups[target_group_id] target_sides = [ target_group.side1, target_group.side2, target_group.side3, target_group.side4, target_group.side5, target_group.side6] target_masks = target_sides[target_side_id] # Clear target target_masks.clear() # Copy from source from target for mask in source_masks: new_mask = target_masks.add() new_mask.mask_type = mask.mask_type for color in mask.colors: new_color = new_mask.colors.add() new_color.color = color.color new_mask.interpolate = mask.interpolate new_mask.normalize = mask.normalize new_mask.p1_relative = mask.p1_relative new_mask.p2_relative = mask.p2_relative new_mask.p1 = mask.p1 new_mask.p2 = mask.p2 for stripe in mask.stripes: new_stripe = new_mask.stripes.add() new_stripe.width = stripe.width new_stripe.strength = stripe.strength new_mask.relative_boundaries = mask.relative_boundaries new_mask.expotent = mask.expotent new_mask.strength = mask.strength new_mask.hard_edge = mask.hard_edge new_mask.horizontal = mask.horizontal new_mask.use_seed = mask.use_seed new_mask.seed = mask.seed new_mask.color.color = mask.color.color # pointer property new_mask.mode = mask.mode new_mask.children = mask.children def execute(self, context): # Get source masks source_group_id = context.scene.nusiq_mcblend_active_uv_group source_side_id = int(context.scene.nusiq_mcblend_active_uv_groups_side) # Get target UV group target_group_id = int(self.uv_groups_enum) if self.side1: self._copy_side( context, source_group_id, source_side_id, target_group_id, 0) if self.side2: self._copy_side( context, source_group_id, source_side_id, target_group_id, 1) if self.side3: self._copy_side( context, source_group_id, source_side_id, target_group_id, 2) if self.side4: self._copy_side( context, source_group_id, source_side_id, target_group_id, 3) if self.side5: self._copy_side( context, source_group_id, source_side_id, target_group_id, 4) if self.side6: self._copy_side( context, source_group_id, source_side_id, target_group_id, 5) self.report({'INFO'}, 'Successfully copied UV face.') return {'FINISHED'} # UV Mask (GUI) def get_active_masks(context): '''Returns active masks of active UV Group from context.''' curr_group_id = context.scene.nusiq_mcblend_active_uv_group curr_group = context.scene.nusiq_mcblend_uv_groups[curr_group_id] sides = [ curr_group.side1, curr_group.side2, curr_group.side3, curr_group.side4, curr_group.side5, curr_group.side6 ] masks = sides[int(context.scene.nusiq_mcblend_active_uv_groups_side)] return masks class OBJECT_OT_NusiqMcblendAddUvMask(bpy.types.Operator): '''Operator used for adding UV-masks to UV groups.''' bl_idname = "object.nusiq_mcblend_add_uv_mask" bl_label = '''Adds new mask to active uv group at active face.''' bl_options = {'UNDO'} mask_type: EnumProperty( # type: ignore items=list_mask_types_as_blender_enum, name='Mask type' ) @classmethod def poll(cls, context: bpy_types.Context): if len(context.scene.nusiq_mcblend_uv_groups) < 1: return False return True def execute(self, context): masks = get_active_masks(context) new_mask = masks.add() new_mask.mask_type = self.mask_type new_mask.colors.add() new_mask.stripes.add() return {'FINISHED'} class OBJECT_OT_NusiqMcblendRemoveUvMask(bpy.types.Operator): '''Operator used for removing UV-masks from UV-groups.''' bl_idname = "object.nusiq_mcblend_remove_uv_mask" bl_label = '''Removes mask from active face of active uv group.''' bl_options = {'UNDO'} target: IntProperty() # type: ignore @classmethod def poll(cls, context: bpy_types.Context): if len(context.scene.nusiq_mcblend_uv_groups) < 1: return False return True def execute(self, context): masks = get_active_masks(context) masks.remove(self.target) return {'FINISHED'} class OBJECT_OT_NusiqMcblendMoveUvMask(bpy.types.Operator): '''Operator used for changing the order of UV-masks in UV groups.''' bl_idname = "object.nusiq_mcblend_move_uv_mask" bl_label = ( 'Moves mask in active face of active uv group to different place on ' 'the list.') bl_options = {'UNDO'} move_from: IntProperty() # type: ignore move_to: IntProperty() # type: ignore @classmethod def poll(cls, context: bpy_types.Context): if len(context.scene.nusiq_mcblend_uv_groups) < 1: return False return True def execute(self, context): masks = get_active_masks(context) masks.move(self.move_from, self.move_to) return {'FINISHED'} # UV Mask side colors (GUI) class OBJECT_OT_NusiqMcblendAddUvMaskColor(bpy.types.Operator): '''Operator used for adding colors to UV-masks.''' bl_idname = "object.nusiq_mcblend_add_uv_mask_color" bl_label = '''Adds new color to a mask.''' bl_options = {'UNDO'} mask_index: IntProperty() # type: ignore @classmethod def poll(cls, context: bpy_types.Context): if len(context.scene.nusiq_mcblend_uv_groups) < 1: return False return True def execute(self, context): masks = get_active_masks(context) mask = masks[self.mask_index] mask.colors.add() return {'FINISHED'} class OBJECT_OT_NusiqMcblendRemoveUvMaskColor(bpy.types.Operator): '''Operator used for removing colors from UV-masks.''' bl_idname = "object.nusiq_mcblend_remove_uv_mask_color" bl_label = 'Removes color from colors of active face of active uv group.' bl_options = {'UNDO'} mask_index: IntProperty() # type: ignore color_index: IntProperty() # type: ignore @classmethod def poll(cls, context: bpy_types.Context): if len(context.scene.nusiq_mcblend_uv_groups) < 1: return False return True def execute(self, context): masks = get_active_masks(context) mask = masks[self.mask_index] mask.colors.remove(self.color_index) return {'FINISHED'} class OBJECT_OT_NusiqMcblendMoveUvMaskColor(bpy.types.Operator): '''Operator used for changing the order of the colors in UV-masks.''' bl_idname = "object.nusiq_mcblend_move_uv_mask_color" bl_label = ( 'Moves color in active mask of active face of active uv group to' 'different place on the list.') bl_options = {'UNDO'} mask_index: IntProperty() # type: ignore move_from: IntProperty() # type: ignore move_to: IntProperty() # type: ignore @classmethod def poll(cls, context: bpy_types.Context): if len(context.scene.nusiq_mcblend_uv_groups) < 1: return False return True def execute(self, context): masks = get_active_masks(context) mask = masks[self.mask_index] mask.colors.move(self.move_from, self.move_to) return {'FINISHED'} # UV Mask side stripes (GUI) class OBJECT_OT_NusiqMcblendAddUvMaskStripe(bpy.types.Operator): '''Operator used for adding stripes to UV-masks.''' bl_idname = "object.nusiq_mcblend_add_uv_mask_stripe" bl_label = '''Adds new color to a mask.''' bl_options = {'UNDO'} mask_index: IntProperty() # type: ignore @classmethod def poll(cls, context: bpy_types.Context): if len(context.scene.nusiq_mcblend_uv_groups) < 1: return False return True def execute(self, context): masks = get_active_masks(context) mask = masks[self.mask_index] mask.stripes.add() return {'FINISHED'} class OBJECT_OT_NusiqMcblendRemoveUvMaskStripe(bpy.types.Operator): '''Operator used for removing UV-masks from UV-groups.''' bl_idname = "object.nusiq_mcblend_remove_uv_mask_stripe" bl_label = 'Removes color from colors of active face of active uv group.' bl_options = {'UNDO'} mask_index: IntProperty() # type: ignore stripe_index: IntProperty() # type: ignore @classmethod def poll(cls, context: bpy_types.Context): if len(context.scene.nusiq_mcblend_uv_groups) < 1: return False return True def execute(self, context): masks = get_active_masks(context) mask = masks[self.mask_index] mask.stripes.remove(self.stripe_index) return {'FINISHED'} class OBJECT_OT_NusiqMcblendMoveUvMaskStripe(bpy.types.Operator): '''Operator used for changing the order of the stripes in UV-groups.''' bl_idname = "object.nusiq_mcblend_move_uv_mask_stripe" bl_label = ( 'Moves color in active mask of active face of active uv group to' 'different place on the list.') bl_options = {'UNDO'} mask_index: IntProperty() # type: ignore move_from: IntProperty() # type: ignore move_to: IntProperty() # type: ignore @classmethod def poll(cls, context: bpy_types.Context): if len(context.scene.nusiq_mcblend_uv_groups) < 1: return False return True def execute(self, context): masks = get_active_masks(context) mask = masks[self.mask_index] mask.stripes.move(self.move_from, self.move_to) return {'FINISHED'} # UV Mask exporter class OBJECT_OT_NusiqMcblendExportUvGroupOperator( bpy.types.Operator, ExportHelper): '''Operator used for exporting active UV-group from Blender.''' # pylint: disable=unused-argument, no-member bl_idname = "object.nusiq_mcblend_export_uv_group_operator" bl_label = "Export UV-group" bl_options = {'REGISTER'} bl_description = "Exports active UV-group" filename_ext = '.uvgroup.json' filter_glob: StringProperty( # type: ignore default='*.uvgroup.json', options={'HIDDEN'}, maxlen=1000 ) @classmethod def poll(cls, context: bpy_types.Context): return len(context.scene.nusiq_mcblend_uv_groups) > 0 def execute(self, context): group_id = context.scene.nusiq_mcblend_active_uv_group uv_group = context.scene.nusiq_mcblend_uv_groups[group_id] with open(self.filepath, 'w') as f: json.dump(uv_group.json(), f, cls=CompactEncoder) self.report({'INFO'}, f'UV-group saved in {self.filepath}.') return {'FINISHED'} # UV Mask exporter class OBJECT_OT_NusiqMcblendImportUvGroupOperator(bpy.types.Operator, ImportHelper): '''Operator used for importing Minecraft models to Blender.''' # pylint: disable=unused-argument, no-member bl_idname = "object.nusiq_mcblend_import_uv_group_operator" bl_label = "Import UV-group" bl_options = {'REGISTER'} bl_description = "Import UV-group from JSON file." # ImportHelper mixin class uses this filename_ext = ".json" filter_glob: StringProperty( # type: ignore default="*.json", options={'HIDDEN'}, maxlen=1000, ) def _load_mask_data(self, mask_data, side) -> Optional[str]: loading_warning: Optional[str] = None if "mask_type" not in mask_data: return ( "Some of the masks are missing the 'mask_type' definition.") mask_type = mask_data["mask_type"] if not isinstance(mask_type, str): return ( f"Mask type property must be a string not a {type(mask_type)}") if mask_type not in [m.value for m in UvMaskTypes]: return f'Unknown mask type: {mask_type}' mask=side.add() mask.mask_type = mask_type # Loading properties of the mask # Loading relative_boundries first because they affect other properties relative_boundaries: bool = False if mask_type in [ UvMaskTypes.GRADIENT_MASK.value, UvMaskTypes.ELLIPSE_MASK.value, UvMaskTypes.RECTANGLE_MASK.value, UvMaskTypes.STRIPES_MASK.value]: if 'relative_boundaries' in mask_data: if isinstance(mask_data['relative_boundaries'], bool): relative_boundaries = mask_data['relative_boundaries'] mask.relative_boundaries = relative_boundaries else: loading_warning = ( '"relative_boundaries" property must be a boolean') if mask_type == UvMaskTypes.MIX_MASK.value: if 'mode' in mask_data: mode = mask_data['mode'] if mode not in [m.value for m in MixMaskMode]: loading_warning=f'Unknown mode {mode}' else: mask.mode = mode if 'children' in mask_data: children = mask_data['children'] if not isinstance(children, int): loading_warning = f'Children property must be an integer' else: mask.children = mask_data['children'] if mask_type == UvMaskTypes.COLOR_PALLETTE_MASK.value: if 'colors' in mask_data: colors = mask_data['colors'] if not isinstance(colors, list): loading_warning = ( 'Colors property must be a list of lists of floats') else: for color_data in colors: if ( not isinstance(color_data, list) or len(color_data) != 3): loading_warning = ( 'Every color on colors list should be ' 'a list of floats.') continue is_color = True for value_data in color_data: if not isinstance(value_data, (float, int)): is_color = False loading_warning =( 'All values of color must be ' 'floats in range 0.0-1.0') break if is_color: color = mask.colors.add() color.color = color_data if 'interpolate' in mask_data: interpolate = mask_data['interpolate'] if not isinstance(interpolate, bool): loading_warning = 'Interpolate property must be a boolean' else: mask.interpolate = interpolate if 'normalize' in mask_data: normalize = mask_data['normalize'] if not isinstance(normalize, bool): loading_warning = 'Normalize property must be a boolean' else: mask.normalize = normalize if mask_type in [ UvMaskTypes.GRADIENT_MASK.value, UvMaskTypes.ELLIPSE_MASK.value, UvMaskTypes.RECTANGLE_MASK.value]: if relative_boundaries: if 'p1' in mask_data: if ( isinstance(mask_data['p1'], list) and len(mask_data['p1']) == 2 and isinstance(mask_data['p1'][0], (float, int)) and isinstance(mask_data['p1'][1], (float, int)) and 0.0 <= mask_data['p1'][0] <= 1.0 and 0.0 <= mask_data['p1'][1] <= 1.0): mask.p1_relative = mask_data['p1'] else: loading_warning = ( '"p1" property must be a float in range 0.0 to ' '1.0 if "relative_boundaries" are True') if 'p2' in mask_data: if ( isinstance(mask_data['p2'], list) and len(mask_data['p2']) == 2 and isinstance(mask_data['p2'][0], (float, int)) and isinstance(mask_data['p2'][1], (float, int)) and 0.0 <= mask_data['p2'][0] <= 1.0 and 0.0 <= mask_data['p2'][1] <= 1.0): mask.p2_relative = mask_data['p2'] else: loading_warning = ( '"p2" property must be a float in range 0.0 to ' '1.0 if "relative_boundaries" are True') else: if 'p1' in mask_data: if ( isinstance(mask_data['p1'], list) and len(mask_data['p1']) == 2 and isinstance(mask_data['p1'][0], int) and isinstance(mask_data['p1'][1], int)): mask.p1 = mask_data['p1'] else: loading_warning = ( '"p1" property must be an integer if ' '"relative_boundaries" are False') if 'p2' in mask_data: if ( isinstance(mask_data['p2'], list) and len(mask_data['p2']) == 2 and isinstance(mask_data['p2'][0], int) and isinstance(mask_data['p2'][1], int)): mask.p2 = mask_data['p2'] else: loading_warning = ( '"p2" property must be an integer if ' '"relative_boundaries" are False') if mask_type in [ UvMaskTypes.GRADIENT_MASK.value, UvMaskTypes.STRIPES_MASK.value]: if 'stripes' in mask_data: stripes = mask_data['stripes'] if not isinstance(stripes, list): loading_warning = '"stripes" property must be a list.' else: for stripe_data in stripes: if not isinstance(stripe_data, dict): loading_warning = ( 'Every stripe in the stripes list must be an ' 'object') continue stripe = mask.stripes.add() if 'width' in stripe_data: width = stripe_data['width'] if relative_boundaries: if ( isinstance(width, (float, int)) and 0.0 <= width <= 1.0): stripe.width_relative = width else: loading_warning = ( "Stripe width must be a float in " "range 0.0 to 1.0 if " "relative_boundaries is True") else: if isinstance(width, int): stripe.width = width else: loading_warning = ( "Stripe width must be an integer if " "relative_boundaries is True") if 'strength' in stripe_data: strength = stripe_data['strength'] if isinstance(strength, (float, int)): stripe.strength = strength else: loading_warning = ( 'Stripe strength must be a float.') if mask_type in [ UvMaskTypes.GRADIENT_MASK.value, UvMaskTypes.ELLIPSE_MASK.value, UvMaskTypes.RECTANGLE_MASK.value, UvMaskTypes.MIX_MASK.value, UvMaskTypes.RANDOM_MASK.value]: if 'expotent' in mask_data: expotent = mask_data['expotent'] if isinstance(expotent, (float, int)): mask.expotent = mask_data['expotent'] else: loading_warning = 'Expotent property must be a float.' if mask_type in [ UvMaskTypes.ELLIPSE_MASK.value, UvMaskTypes.RECTANGLE_MASK.value, UvMaskTypes.MIX_MASK.value, UvMaskTypes.RANDOM_MASK.value]: if 'strength' in mask_data: strength = mask_data['strength'] if ( isinstance(strength, list) and len(strength) == 2 and isinstance(strength[0], (float, int)) and isinstance(strength[1], (float, int)) and 0.0 <= strength[0] <= 1.0 and 0.0 <= strength[1] <= 1.0): mask.strength = mask_data['strength'] else: loading_warning = ( '"strength" property must be a list of ' 'two floats in range 0.0 to 1.0.') if mask_type in [ UvMaskTypes.ELLIPSE_MASK.value, UvMaskTypes.RECTANGLE_MASK.value]: if 'hard_edge' in mask_data: if isinstance(mask_data['hard_edge'], bool): hard_edge = mask_data['hard_edge'] mask.hard_edge = hard_edge else: loading_warning = '"hard_edge" property must be a boolean' if mask_type == UvMaskTypes.STRIPES_MASK.value: if 'horizontal' in mask_data: if isinstance(mask_data['horizontal'], bool): horizontal = mask_data['horizontal'] mask.horizontal = horizontal else: loading_warning = '"horizontal" property must be a boolean' if mask_type == UvMaskTypes.RANDOM_MASK.value: if 'use_seed' in mask_data: if isinstance(mask_data['use_seed'], bool): use_seed = mask_data['use_seed'] mask.use_seed = use_seed else: loading_warning = '"use_seed" property must be a boolean' if 'seed' in mask_data: seed = mask_data['seed'] if isinstance(seed, int): mask.seed = mask_data['seed'] else: loading_warning = '"seed" property must be an interger.' if mask_type == UvMaskTypes.COLOR_MASK.value: if 'color' in mask_data: color_data = mask_data['color'] if ( not isinstance(color_data, list) or len(color_data) != 3): loading_warning = ( 'Every color on colors list should be ' 'a list of floats.') else: is_color = True for value_data in color_data: if not isinstance(value_data, (float, int)): is_color = False loading_warning =( 'All values of color must be ' 'floats in range 0.0-1.0') break if is_color: mask.color.color = color_data return loading_warning def _load_side(self, side: Any, side_data: List) -> Optional[str]: loading_warning = None for mask_data in side_data: loading_warning = self._load_mask_data(mask_data, side) return loading_warning def execute(self, context): name: str = get_unused_uv_group_name('uv_group') # Save file and finish try: with open(self.filepath, 'r') as f: data = json.load(f, cls=JSONCDecoder) version = data['version'] if version != 1: self.report({'ERROR'}, "Unknown UV-group version.") return {'CANCELLED'} except (KeyError, TypeError, JSONDecodeError) as e: self.report({'ERROR'}, "Unable to to read the UV-group data.") return {'CANCELLED'} # Create new UV-group len_groups = len(context.scene.nusiq_mcblend_uv_groups) # Add new uv_group and set its properties uv_group_new = context.scene.nusiq_mcblend_uv_groups.add() len_groups = len(context.scene.nusiq_mcblend_uv_groups) context.scene.nusiq_mcblend_active_uv_group=len_groups-1 # Currently only version 1 is supported if 'name' in data and isinstance(data['name'], str): name = get_unused_uv_group_name(data['name']) uv_group_new.name = name # Used for showing warnings about loading process (the loader shows # only one warning at a time) loading_warning: Optional[str] = None if 'side1' in data and isinstance(data['side1'], list): loading_warning=self._load_side(uv_group_new.side1, data['side1']) if 'side2' in data and isinstance(data['side2'], list): loading_warning=self._load_side(uv_group_new.side2, data['side2']) if 'side3' in data and isinstance(data['side3'], list): loading_warning=self._load_side(uv_group_new.side3, data['side3']) if 'side4' in data and isinstance(data['side4'], list): loading_warning=self._load_side(uv_group_new.side4, data['side4']) if 'side5' in data and isinstance(data['side5'], list): loading_warning=self._load_side(uv_group_new.side5, data['side5']) if 'side6' in data and isinstance(data['side6'], list): loading_warning=self._load_side(uv_group_new.side6, data['side6']) # If something didn't load propertly also display a warning if loading_warning is not None: self.report({'WARNING'}, loading_warning) if context.area is not None: # There is no area when running from CLI context.area.tag_redraw() return {'FINISHED'} # Events class OBJECT_OT_NusiqMcblendAddEvent(bpy.types.Operator): '''Operator used for adding events to scene.''' bl_idname = "object.nusiq_mcblend_add_event" bl_label = '''Adds new event to scene.''' bl_options = {'UNDO'} def execute(self, context): event_new = bpy.context.scene.nusiq_mcblend_events.add() event_new.name = get_unused_event_name('event') return {'FINISHED'} class OBJECT_OT_NusiqMcblendRemoveEvent(bpy.types.Operator): '''Operator used for removing events.''' bl_idname = "object.nusiq_mcblend_remove_event" bl_label = '''Removes event from scene.''' bl_options = {'UNDO'} @classmethod def poll(cls, context: bpy_types.Context): events = bpy.context.scene.nusiq_mcblend_events active_event_id = bpy.context.scene.nusiq_mcblend_active_event if not (0 <= active_event_id < len(events)): return False return True def execute(self, context): active_event_id = bpy.context.scene.nusiq_mcblend_active_event # Remove animation bpy.context.scene.nusiq_mcblend_events.remove( active_event_id) # Set new active event if active_event_id > 0: bpy.context.scene.nusiq_mcblend_active_event=active_event_id-1 return {'FINISHED'} class OBJECT_OT_NusiqMcblendAddEffect(bpy.types.Operator): '''Operator used for adding effects to events.''' bl_idname = "object.nusiq_mcblend_add_effect" bl_label = '''Adds new effect to active event.''' bl_options = {'UNDO'} effect_type: EnumProperty( # type: ignore items=list_effect_types_as_blender_enum, name='Effect type' ) @classmethod def poll(cls, context: bpy_types.Context): events = bpy.context.scene.nusiq_mcblend_events active_event_id = bpy.context.scene.nusiq_mcblend_active_event if not (0 <= active_event_id < len(events)): return False return True def execute(self, context): events = bpy.context.scene.nusiq_mcblend_events active_event_id = bpy.context.scene.nusiq_mcblend_active_event event = events[active_event_id] effect = event.effects.add() effect.effect_type = self.effect_type return {'FINISHED'} class OBJECT_OT_NusiqMcblendRemoveEffect(bpy.types.Operator): '''Operator used for removeing effects effects from events.''' bl_idname = "object.nusiq_mcblend_remove_effect" bl_label = '''Remove effect from active event.''' bl_options = {'UNDO'} effect_index: IntProperty() # type: ignore @classmethod def poll(cls, context: bpy_types.Context): events = bpy.context.scene.nusiq_mcblend_events active_event_id = bpy.context.scene.nusiq_mcblend_active_event if not (0 <= active_event_id < len(events)): return False event = events[active_event_id] effects = event.effects if len(effects) <= 0: return False return True def execute(self, context): events = bpy.context.scene.nusiq_mcblend_events active_event_id = bpy.context.scene.nusiq_mcblend_active_event event = events[active_event_id] event.effects.remove(self.effect_index) return {'FINISHED'} <file_sep>''' This is a testing script for UV-group importer and exporter. It imports UV-group, exports it and compares if the result is the same as the original. ''' # pylint: disable=missing-docstring import os import json from pathlib import Path import typing as tp import shutil import pytest from .common import assert_is_model, blender_run_script, make_comparable_json def make_comparison_files( source: str, tmp: str) -> tp.Tuple[tp.Dict, tp.Dict, str]: ''' Loads UV-group from JSON to Blender using nusiq_mcblend_import_uv_group_operator. Exports this model to tmp (to a file with the same name as source file). Returns two dictionaries: - the original UV-group - the exported UV-group ''' source = os.path.abspath(source) tmp = os.path.abspath(tmp) target = os.path.join(tmp, os.path.split(source)[1]) script = os.path.abspath('./blender_scripts/import_export_uv_group.py') # Windows uses weird path separators source = source.replace('\\', '/') tmp = tmp.replace('\\', '/') target = target.replace('\\', '/') script = script.replace('\\', '/') # Create tmp if not exists Path(tmp).mkdir(parents=True, exist_ok=True) # Run blender actions blender_run_script(script, source, target) # Get the results with open(source, 'r') as f: source_dict = json.load(f) with open(target, 'r') as f: target_dict = json.load(f) return source_dict, target_dict # PYTEST FUNCTIONS UV_GROUP_FILES = [ # Import empties # TODO - The names of the test files here "test.uvgroup.json" ] def setup_module(module): '''Runs before tests''' # pylint: disable=unused-argument tmp_path = "./.tmp/test_importer" if os.path.exists(tmp_path): shutil.rmtree(tmp_path) @pytest.fixture(params=UV_GROUP_FILES) def import_properties(request): return request.param # TESTS def test_importer(import_properties: str): # pylint: disable=redefined-outer-name model_file = os.path.join( './tests/data/test_uv_group/import_export', import_properties) source_dict, target_dict = make_comparison_files( model_file, "./.tmp/test_uv_group_import_export" ) # JSON paths to lists where order doesn't matter. set_paths = {} # The order always matter in this case source_comparable = make_comparable_json(source_dict, set_paths) target_comparable = make_comparable_json(target_dict, set_paths) assert source_comparable == target_comparable <file_sep># GUI Changes Mcblend adds new panels to Blender GUI: - _Mcblend_ tab on [sidebar](#sidebar), - _Mcblend UV groups_ tab in the [Scene Properties](#scene-properties), - _Mcblend events_ tab in the [Scene Properties](#scene-properties), - _Mcblend object properties_ in the [Object Properties](#object-properties), - _Mcblend: Export model_ and _Mcblend: Export animation_ in the export menu, - _Mcblend: Import model_ in the import menu. ## Sidebar The sidebar gives access to most of the Mcblend functionality. It contains 5 panels: - [Export bedrock animation](../basic_operators/#exporting-animations), - [Export bedrock model](../basic_operators/#exporting-models), - [Set bedrock UVs](../basic_operators/#uv-mapping), - [Operators](../basic_operators/#modifying-mcblend-properties), - [Import bedrock model](../basic_operators/#importing-models). ![](../img/mcblend_gui_image.png) ## Scene properties Scene properties tab has two new panels: - _Mcblend UV groups_ - used for [creating](../uv_groups) and [customizing](../texture_customization) UV-groups. - _Mcblend events_ - used for adding events for creating [sound- and particle-effects animations](../animating_effects). ![](../img/uv_groups_panel.png) ## Object properties Object properties tab has new panel - _Mcblend object properties_. It lets you view and edit [custom object properties](../basic_operators/#modifying-mcblend-properties) used by Mcblend. ![](../img/object_properties.png) !!! note Editing the Inflate value through this panel does not change the dimensions of the object, but it does change the inflate value in the exported object. If you want to inflate/deflate the object you should use the inflate operator from the sidebar.<file_sep>''' Import UV-group from source_path and export it to target_path. This script is used for testing the import and export UV-group operators. ''' # TODO - implement import sys import bpy # Collect arguments after "--" argv = sys.argv argv = argv[argv.index("--") + 1:] def main(source_path: str, target_path: str): # Load model from source file bpy.ops.object.nusiq_mcblend_import_uv_group_operator( filepath=source_path) # Save model to target file bpy.ops.object.nusiq_mcblend_export_uv_group_operator( filepath=target_path) if __name__ == "__main__": main(argv[0], argv[1]) <file_sep>''' Custom mcblend exceptions. ''' from __future__ import annotations from typing import List class NameConflictException(Exception): '''Raise when two bones in Minecraft model have the same name.''' class NotEnoughTextureSpace(Exception): '''Raise when there is no enough UV space for uv-mapping.''' class InvalidDictPathException(LookupError): ''' Raise when using using function for quick access to dictionary path fails. ''' class NoCubePolygonsException(Exception): ''' Raise when trying to get CubePolygons from McblendObject but some data is missing. ''' class NotAStandardUvException(Exception): ''' Raised by StandardCubeUvExport class when the UV of an object doesn't have standard Minecraft UV mapping shape ''' class FileIsNotAModelException(Exception): ''' Raised in importer when the loaded file is not a model. ''' class ImportingNotImplementedError(NotImplementedError): ''' Raised by imported when given property is valid but there is no implementation for loading it into blender. ''' def __init__(self, what: str, path: List): super().__init__( f'{path}:: importing {what} is not implemented in this version of' ' mcblend.') <file_sep>''' Utility functions for working with JSON and dictionaries. ''' from __future__ import annotations import json from typing import Iterable, List, Optional, Union, Dict, Any from collections import UserDict, UserList from .exception import InvalidDictPathException def get_vect_json(arr: Iterable) -> List[float]: ''' Changes the iterable of numbers into basic python list of floats. Values from the original iterable are rounded to the 3rd deimal digit. :param arr: an iterable of numbers. ''' result = [round(i, 3) for i in arr] for i, _ in enumerate(result): if result[i] == -0.0: result[i] = 0.0 return result def get_path( jsonable: Optional[Union[Dict, List, str, float, int, bool]], path: List[Union[str, int]] ) -> Optional[Any]: ''' Returns the object from given JSON path. Raises InvalidDictPathException when path is invalid. :param jsonable: An object which can be saved in JSON format :param path: a path to target object :returns: the target object. ''' curr_obj = jsonable for path_item in path: try: curr_obj = curr_obj[path_item] # type: ignore except (LookupError, TypeError) as e: raise InvalidDictPathException() from e return curr_obj class CompactEncoder(json.JSONEncoder): ''' JSONEncoder which can encode JSON in compact yet still readable form. Additionally it can encode UserDict and UserList from collections. ''' def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.indent = -1 self.respect_indent = True @staticmethod def _is_primitive(obj): return isinstance(obj, (int, bool, str, float)) def encode(self, obj): ''' Return a JSON string representation of a Python data structure. .. code-block:: python >>> CompactEncoder().encode({"foo": ["bar", "baz"]}) '{\\n\\t"foo": ["bar", "baz"]\\n}' ''' # pylint: disable=arguments-differ return ''.join(self.iterencode(obj)) def iterencode(self, obj): # pylint: disable=W0221, R0912 ''' Encode the given object and yield each string representation line by line. .. code-block:: python >>> item = {"foo": ["bar", "baz"]} >>> ''.join(list(CompactEncoder().iterencode(item))) == \\ ... CompactEncoder().encode(item) True ''' self.indent += 1 if self.respect_indent: ind = self.indent*'\t' else: ind = '' if isinstance(obj, (dict, UserDict)): if not obj: # if empty yield f"{ind}{{}}" else: body = [] for k, v in obj.items(): body.extend([ f'{j[:self.indent]}"{k}": {j[self.indent:]}' for j in self.iterencode(v) ]) body_str = ",\n".join(body) yield ( f'{ind}{{\n' f'{body_str}\n' f'{ind}}}' ) elif isinstance(obj, (list, tuple, UserList)): primitive_list = True for i in obj: if not self._is_primitive(i): primitive_list = False break if primitive_list: body = [] self.respect_indent = False for i in obj: body.extend(self.iterencode(i)) self.respect_indent = True yield f'{ind}[{", ".join(body)}]' else: body = [] for i in obj: body.extend(self.iterencode(i)) body_str = ",\n".join(body) yield ( f'{ind}[\n' f'{body_str}\n' f'{ind}]' ) elif self._is_primitive(obj): if isinstance(obj, str): yield f'{ind}"{str(obj)}"' elif isinstance(obj, float) and obj.is_integer(): yield f'{ind}{str(int(obj))}' else: yield f'{ind}{str(obj).lower()}' elif obj is None: yield f'{ind}null' else: raise TypeError('Object of type set is not JSON serializable') self.indent -= 1 <file_sep>''' Functions related to creating UV map. ''' from __future__ import annotations from typing import ( Dict, Tuple, List, Iterator, Collection, NamedTuple, Sequence) from enum import Enum from dataclasses import dataclass, field from itertools import filterfalse import numpy as np import bpy import bpy_types from .texture_generator import Mask from .exception import NotEnoughTextureSpace from .json_tools import get_vect_json from .common import ( MINECRAFT_SCALE_FACTOR, McblendObject, McblendObjectGroup, CubePolygon) class CoordinatesConverter: ''' An object which allows conversion of coordinates defined by space_a to space_b (passed in the constructor). Example: [[1, 2], [3, 4], [5, 6]] is a 3D space first dimension in range from 1 to 2, second from 3 to 4 and third from 5 to 6. Both spaces should have the same number of dimensions. :param space_a: The space to convert from. :param space_b: The space to convert to. ''' def __init__(self, space_a: np.ndarray, space_b: np.ndarray): self.space_a = np.copy(space_a.T) self.space_b = np.copy(space_b.T) self.scale_a = self.space_a[1] - self.space_a[0] self.scale_b = self.space_b[1] - self.space_b[0] def convert(self, x: Collection[float]) -> Collection[float]: ''' Performs a conversion on coordinates passed to the function with x argument (from space_a to space_b). :param x: the vector with coordinates. :returns: converted vector. ''' x = np.array(x).T return tuple( # type: ignore (((x-self.space_a[0])/self.scale_a)*self.scale_b)+self.space_b[0] ) # (U, V) - 0, 0 = top left class UvCorner(Enum): ''' Used by the Suggestion object to point at corner of a UvBox. ''' TOP_RIGHT = 'TOP_RIGHT' TOP_LEFT = 'TOP_LEFT' BOTTOM_RIGHT = 'BOTTOM_RIGHT' BOTTOM_LEFT = 'BOTTOM_LEFT' class Suggestion(NamedTuple): ''' A class used by UvBoxes to suggest free spaces on the texture during UV-mapping. :prop position: Position that other UvBox should touch with its coroner. :prop corner: Which corner should touch the position. ''' position: Tuple[int, int] corner: UvCorner class UvBox: '''Rectangular space on the texture.''' def __init__( self, size: Tuple[int, int], uv: Tuple[int, int] = None ): if uv is None: uv = (0, 0) self.is_mapped = False else: self.is_mapped = True self.size: Tuple[int, int] = size self.uv: Tuple[int, int] = uv def collides(self, other: UvBox): ''' Returns True if this UvBox is colliding with other UvBox. Otherwise returns False. :param other: The other UvBox to test the collision. :returns: True if there is a collision. ''' # min max self_x = (self.uv[0], self.uv[0] + self.size[0]) self_y = (self.uv[1], self.uv[1] + self.size[1]) other_x = (other.uv[0], other.uv[0] + other.size[0]) other_y = (other.uv[1], other.uv[1] + other.size[1]) return ( self_x[0] < other_x[1] and other_x[0] < self_x[1] and self_y[0] < other_y[1] and other_y[0] < self_y[1] ) def suggest_positions(self) -> List[Suggestion]: ''' Returns list of positions touching this UvBox for other UvBox without overlapping. :returns: list of suggestions for other UV-box to try while looking for empty space on the texture. ''' size = (self.size[0]-1, self.size[1]-1) uv = self.uv # (near which wall?, which side of the wall?) return [ # U, V-1 BOTTOM_LEFT (top left) Suggestion((uv[0], uv[1] - 1), UvCorner.BOTTOM_LEFT), # U+S, V-1 BOTTOM_RIGHT (top right) Suggestion((uv[0] + size[0], uv[1] - 1), UvCorner.BOTTOM_RIGHT), # U+S+1, V TOP_LEFT (right top) Suggestion((uv[0] + size[0] + 1, uv[1]), UvCorner.TOP_LEFT), # U+S+1, V+S BOTTOM_LEFT (right bottom) Suggestion( (uv[0] + size[0] + 1, uv[1] + size[1]), UvCorner.BOTTOM_LEFT ), # U+S, V+S+1 TOP_RIGHT (bottom right) Suggestion( (uv[0] + size[0], uv[1] + size[1] + 1), UvCorner.TOP_RIGHT ), # U, V+S+1 TOP_LEFT (bottom left) Suggestion((uv[0], uv[1] + size[1] + 1), UvCorner.TOP_LEFT), # U-1, V+S BOTTOM_RIGHT (left bottom) Suggestion((uv[0] - 1, uv[1] + size[1]), UvCorner.BOTTOM_RIGHT), # U-1,V TOP_RIGHT (left top) Suggestion((uv[0] - 1, uv[1]), UvCorner.TOP_RIGHT), ] def apply_suggestion(self, suggestion: Suggestion): ''' Uses a suggestion to set the UV for this UvBox. :param suggestion: the suggestion. ''' size = (self.size[0]-1, self.size[1]-1) if suggestion.corner == UvCorner.TOP_LEFT: self.uv = suggestion.position elif suggestion.corner == UvCorner.TOP_RIGHT: self.uv = ( suggestion.position[0] - size[0], suggestion.position[1] ) elif suggestion.corner == UvCorner.BOTTOM_LEFT: self.uv = ( suggestion.position[0], suggestion.position[1] - size[1] ) elif suggestion.corner == UvCorner.BOTTOM_RIGHT: self.uv = ( suggestion.position[0] - size[0], suggestion.position[1] -size[1] ) def paint_texture( self, arr: np.ndarray, resolution: int = 1 ): ''' Paints the UvBox on the texture represented by the numpy array. :param arr: the texture array. :param resolution: the resolution of the Minecraft texture. Where 1 is standard Minecraft texture resolution (16 pixels for one block). ''' min1 = int(arr.shape[0]/resolution)-int(self.uv[1]+self.size[1]) max1 = int(arr.shape[0]/resolution)-int(self.uv[1]) min2, max2 = int(self.uv[0]), int(self.uv[0]+self.size[0]) min1 = min1 * resolution min2 = min2 * resolution max1 = max1 * resolution max2 = max2 * resolution # Alway paint white texture_part = arr[min1:max1, min2:max2] texture_part[...] = 1 # Set RGBA white class McblendObjUvBox(UvBox): ''' An UvBox that holds reference to an McblendObject and provides a method to set it's UV. ''' def new_uv_layer(self): '''Adds new UV-layer to contained McblendObject.''' raise NotImplementedError() def set_blender_uv(self, converter: CoordinatesConverter): ''' Sets the UV of a blender object. :param converter: The coordinates converter used to convert from Minecraft UV coordinates (used internally by this object) to Blender UV coordinates. ''' raise NotImplementedError() def clear_uv_layers(self): ''' Clears the uv layers from the wrapped McblendObject. ''' raise NotImplementedError() class UvMcCubeFace(UvBox): ''' A single face in the UvBox. ''' def __init__( self, cube: UvMcCube, cube_polygon: CubePolygon, size: Tuple[int, int], masks: Sequence[Mask], uv: Tuple[int, int]=None): super().__init__(size, uv=uv) self.cube = cube self.cube_polygon = cube_polygon self.masks = masks def set_blender_uv(self, converter: CoordinatesConverter): ''' Sets the UV of a blender object. :param converter: the coordinates converter used to convert from Minecraft UV coordinates (used internally by this object) to Blender UV coordinates. ''' # Order of the faces for: left_down, right_down, right_up, left_up # Cube polygon data cp_loop_indices = self.cube_polygon.side.loop_indices cp_order = self.cube_polygon.order left_down = cp_loop_indices[cp_order[0]] right_down = cp_loop_indices[cp_order[1]] right_up = cp_loop_indices[cp_order[2]] left_up = cp_loop_indices[cp_order[3]] uv_data = self.cube.thisobj.obj_data.uv_layers.active.data uv_data[left_down].uv = converter.convert( (self.uv[0], self.uv[1] + self.size[1])) uv_data[right_down].uv = converter.convert( (self.uv[0] + self.size[0], self.uv[1] + self.size[1])) uv_data[right_up].uv = converter.convert( (self.uv[0] + self.size[0], self.uv[1])) uv_data[left_up].uv = converter.convert(self.uv) def paint_texture( self, arr: np.ndarray, resolution: int = 1 ): ''' Paints the UvBox on the texture. :param arr: the texture array. :param resolution: the resolution of the Minecraft texture. Where 1 is standard Minecraft texture resolution (16 pixels for one block). ''' min1 = int(arr.shape[0]/resolution)-int(self.uv[1]+self.size[1]) max1 = int(arr.shape[0]/resolution)-int(self.uv[1]) min2, max2 = int(self.uv[0]), int(self.uv[0]+self.size[0]) min1 = min1 * resolution min2 = min2 * resolution max1 = max1 * resolution max2 = max2 * resolution # Alway paint white texture_part = arr[min1:max1, min2:max2] texture_part[...] = 1.0 # Set RGBA white texture_part = texture_part[..., :3] # No alpha channel filters yet for mask in self.masks: mask.apply(texture_part) class UvMcCube(McblendObjUvBox): ''' Class that Combiens Six UvMcCubeFaces grouped together to represent space on the texture needed for UV mapping of single cube in Minecraft model. ''' def __init__( self, width: int, depth: int, height: int, thisobj: McblendObject): size = ( 2*depth + 2*width, height + depth ) self.depth = depth self.height = height self.width = width self.thisobj = thisobj cube_polygons = self.thisobj.cube_polygons() if self.thisobj.mirror: cp1, cp3 = cube_polygons.west, cube_polygons.east else: cp1, cp3 = cube_polygons.east, cube_polygons.west # right/left self.side1 = UvMcCubeFace( self, cp1, (depth, height), thisobj.side1_uv_masks, uv=(0, depth)) # front self.side2 = UvMcCubeFace( self, cube_polygons.north, (width, height), thisobj.side2_uv_masks, uv=(depth, depth)) # left/right self.side3 = UvMcCubeFace( self, cp3, (depth, height), thisobj.side3_uv_masks, uv=(depth + width, depth)) # back self.side4 = UvMcCubeFace( self, cube_polygons.south, (width, height), thisobj.side4_uv_masks, uv=(2*depth + width, depth)) # top self.side5 = UvMcCubeFace( self, cube_polygons.up, (width, depth), thisobj.side5_uv_masks, uv=(depth, 0)) # bottom self.side6 = UvMcCubeFace( self, cube_polygons.down, (width, depth), thisobj.side6_uv_masks, uv=(depth + width, 0)) super().__init__(size, None) @property # type: ignore def uv(self) -> Tuple[int, int]: # type: ignore '''UV of the object.''' return self._uv @uv.setter def uv(self, uv: Tuple[int, int]): self._uv = uv self.side1.uv = (uv[0], uv[1] + self.depth) self.side2.uv = (uv[0] + self.depth, uv[1] + self.depth) self.side3.uv = (uv[0] + self.depth + self.width, uv[1] + self.depth) self.side4.uv = (uv[0] + 2*self.depth + self.width, uv[1] + self.depth) self.side5.uv = (uv[0] + self.depth, uv[1]) self.side6.uv = (uv[0] + self.depth + self.width, uv[1]) def collides(self, other: UvBox): for i in [ self.side1, self.side2, self.side3, self.side4, self.side5, self.side6 ]: if i.collides(other): return True return False def suggest_positions(self) -> List[Suggestion]: ''' Returns list of positions next to this UV box that can be used by other UV box to set the UV that doesn't overlap this object. :returns: list of suggestions for other UV-box to try while looking for empty space on the texture. ''' # 0. (top left) 1. (top right) 2. (right top) 3. (right bottom) # 4. (bottom right) 5. (bottom left) 6. (left bottom) 7. (left top) result = [] result.extend([ s for i, s in enumerate(self.side1.suggest_positions()) if i in [0, 5, 6] ]) result.extend([ s for i, s in enumerate(self.side5.suggest_positions()) if i in [0, 6, 7] ]) result.extend([ s for i, s in enumerate(self.side6.suggest_positions()) if i in [1, 2, 3] ]) result.extend([ s for i, s in enumerate(self.side4.suggest_positions()) if i in [1, 3, 4] ]) return result def set_blender_uv(self, converter: CoordinatesConverter): self.side1.set_blender_uv(converter) self.side2.set_blender_uv(converter) self.side3.set_blender_uv(converter) self.side4.set_blender_uv(converter) self.side5.set_blender_uv(converter) self.side6.set_blender_uv(converter) def clear_uv_layers(self): while len(self.thisobj.obj_data.uv_layers) > 0: self.thisobj.obj_data.uv_layers.remove( self.thisobj.obj_data.uv_layers[0] ) def paint_texture( self, arr: np.ndarray, resolution: int = 1 ): self.side1.paint_texture(arr, resolution) self.side2.paint_texture(arr, resolution) self.side3.paint_texture(arr, resolution) self.side4.paint_texture(arr, resolution) self.side5.paint_texture(arr, resolution) self.side6.paint_texture(arr, resolution) def new_uv_layer(self): self.thisobj.obj_data.uv_layers.new() class UvGroup(McblendObjUvBox): ''' A collection of McblendObjUvBoxes that have the same UV mapping. Internally all of the properties are read from the first box on the list. The set_blender_uv function applies changes to all of the objects. ''' def __init__(self, main_object: McblendObjUvBox): # pylint: disable=super-init-not-called self._objects: List[McblendObjUvBox] = [main_object] def append(self, obj: McblendObjUvBox): '''Adds another McblendObjUvBox to this group.''' obj.uv = self.uv # type: ignore obj.is_mapped = self.is_mapped # type: ignore obj.size = self.size self._objects.append(obj) @property # type: ignore def uv(self) -> Tuple[int, int]: # type: ignore '''Uv of the object.''' return self._objects[0].uv @uv.setter def uv(self, uv: Tuple[int, int]): for obj in self._objects: obj.uv = uv @property # type: ignore def size(self) -> Tuple[int, int]: # type: ignore '''Size of the object.''' return self._objects[0].size @size.setter def size(self, size: Tuple[int, int]): for obj in self._objects: obj.size = size @property # type: ignore def is_mapped(self) -> bool: # type: ignore '''Returns whether the object has assigned UV-mapping.''' return self._objects[0].is_mapped @is_mapped.setter def is_mapped(self, val: bool): for obj in self._objects: obj.is_mapped = val def collides(self, other: UvBox) -> bool: return self._objects[0].collides(other) def suggest_positions( self ) -> List[Suggestion]: return self._objects[0].suggest_positions() def apply_suggestion( self, suggestion: Suggestion ): for obj in self._objects: obj.apply_suggestion(suggestion) def set_blender_uv(self, converter: CoordinatesConverter): for obj in self._objects: obj.set_blender_uv(converter) def clear_uv_layers(self): for obj in self._objects: obj.clear_uv_layers() def paint_texture(self, arr: np.ndarray, resolution: int = 1): # They mapped to one place (paint only one) # for obj in self._objects: if len(self._objects) > 0 : self._objects[0].paint_texture(arr, resolution) def new_uv_layer(self): for obj in self._objects: obj.new_uv_layer() @dataclass class UvMapper: ''' A class that helps with UV-mapping. ''' width: int height: int uv_boxes: List[McblendObjUvBox] = field(default_factory=list) def load_uv_boxes( self, object_properties: McblendObjectGroup, context: bpy_types.Context ): # pylint: disable=duplicate-code ''' Populates the uv_boxes dictionary. # Properties: :prop object_properties: The properties of all of the Minecraft cubes and bones. :prop context: The context of running the operator. ''' bpy.ops.screen.animation_cancel() original_frame = context.scene.frame_current try: context.scene.frame_set(0) # Dictionary identified by width, depth, height, group name cube_uv_groups: Dict[Tuple[int, int, int, str], UvGroup] = {} objprop: McblendObject for objprop in object_properties.values(): if objprop.obj_type != 'MESH': continue scale = ( objprop.mcube_size * # scale np.array(objprop.obj_matrix_world.decompose()[2].xzy) * MINECRAFT_SCALE_FACTOR ) if objprop.inflate != 0: scale = scale - objprop.inflate * 2 # width, height, depth - rounded down to int # first round with get_json_vect to avoid numerical errors and # than round down to int (like minecraft does). width, height, depth = [ int(i) for i in get_vect_json(scale)] if objprop.uv_group != '': curr_key = (width, depth, height, objprop.uv_group) if curr_key in cube_uv_groups: cube_uv_groups[curr_key].append( UvMcCube(width, depth, height, objprop) ) else: cube_uv_groups[curr_key] = UvGroup( UvMcCube(width, depth, height, objprop) ) self.uv_boxes.append(cube_uv_groups[curr_key]) else: self.uv_boxes.append( UvMcCube(width, depth, height, objprop) ) finally: context.scene.frame_set(original_frame) def plan_uv(self, allow_expanding: bool): ''' Plans UVs for all of the boxes on the list. Uses self.width and self.height to limit the area unless the allow_expanding is set to True. Raises NotEnoughTextureSpace when the texture width and height wasn't big enough to map all of the boxes. :param allow_expanding: Whether the texture space can be expanded to fit all of the objects in it. ''' self.uv_boxes.sort(key=lambda box: box.size[0], reverse=True) if allow_expanding and len(self.uv_boxes) > 0: self.width = max([self.width, self.uv_boxes[0].size[0]]) suggestions: List[Suggestion] = [Suggestion((0, 0), UvCorner.TOP_LEFT)] authors: List[UvBox] = [] # authors of the suggestions mapped_boxes = [] unmapped_boxes = [] for box in self.uv_boxes: if box.is_mapped: mapped_boxes.append(box) else: unmapped_boxes.append(box) def _is_out_of_bounds(uv, size=(0, 0)): return ( uv[0] < 0 or uv[1] < 0 or uv[0] + size[0] > self.width or (not allow_expanding and uv[1] + size[1] > self.height) ) # pylint: disable=too-many-nested-blocks for box in unmapped_boxes: suggestion_i = -1 while len(suggestions) > suggestion_i + 1: suggestion_i += 1 # Apply suggestion box.apply_suggestion(suggestions[suggestion_i]) # Test if box in texture space if not _is_out_of_bounds(box.uv, box.size): # Test if suggestion doesn't collide for other_box in mapped_boxes: if box.collides(other_box): # Bad suggestion. Find more if other_box not in authors: authors.append(other_box) suggestions.extend(filterfalse( lambda x: _is_out_of_bounds(x.position), other_box.suggest_positions() )) break else: # didn't found collisions. Good suggestion, break the loop box.is_mapped = True mapped_boxes.append(box) suggestions.extend(filterfalse( lambda x: _is_out_of_bounds(x.position), box.suggest_positions() )) del suggestions[suggestion_i] break else: # No good suggestion found for current box. box.uv = (0, 0) raise NotEnoughTextureSpace() def __iter__(self) -> Iterator[McblendObjUvBox]: for i in self.uv_boxes: yield i <file_sep>''' Common functions for tests. This module uses additional file called config.py (located in the tests/ directory) which defines th BLENDER_EXEC_PATH - a string with a path to the blender executable. If BLENDER_EXEC_PATH is not specified the test script tries to run blender with `blender` command. The config.py file is blacklisted in .gitignore because it can be different on different devices. ''' import typing as tp import subprocess try: from .config import BLENDER_EXEC_PATH except: BLENDER_EXEC_PATH = 'blender' def blender_run_script( script, *args, blend_file_path: tp.Optional[str] = None ): ''' Run blender script with *args arguments. You can pass optional argument blend_file_path if the scrupt should be executed in certain file path. ''' if not blend_file_path: command = [BLENDER_EXEC_PATH, '-b', '--python', script, '--', *args] else: command = [ BLENDER_EXEC_PATH, blend_file_path, '-b', '--python', script, '--', *args ] subprocess.call(command) def assert_is_vector(vect: tp. Any, length: int, types: tp.Tuple): assert type(vect) is list assert len(vect) == length assert all([isinstance(i, types) for i in vect]) def assert_is_model(a: tp.Dict): '''Check if the input is a valid model''' assert type(a) is dict assert set(a.keys()) == {'format_version', 'minecraft:geometry'} assert a['format_version'] == "1.12.0" geometries = a['minecraft:geometry'] assert type(geometries) is list assert len(geometries) > 0 # minecraft:geometry for geometry in geometries: assert type(geometry) is dict assert set(geometry.keys()) == {'description', 'bones'} desc = geometry['description'] bones = geometry['bones'] # minecraft:geometry -> description assert type(desc) is dict assert set(desc.keys()) == { 'identifier', 'texture_width', 'texture_height', 'visible_bounds_width', 'visible_bounds_height', 'visible_bounds_offset' } assert type(desc['identifier']) is str assert type(desc['texture_width']) is int assert type(desc['texture_height']) is int assert isinstance(desc['visible_bounds_width'], (float, int)) assert isinstance(desc['visible_bounds_height'], (float, int)) assert_is_vector(desc['visible_bounds_offset'], 3, (int, float)) # minecraft:geometry -> bones assert type(bones) is list for bone in bones: assert type(bone) is dict assert set(bone.keys()) <= { # acceptable keys 'name', 'cubes', 'pivot', 'rotation', 'parent', 'locators' } assert set(bone.keys()) >= { # obligatory keys 'name', 'pivot', 'rotation' } assert type(bone['name']) is str assert_is_vector(bone['pivot'], 3, (int, float)) assert_is_vector(bone['rotation'], 3, (int, float)) if 'parent' in bone: assert type(bone['parent']) is str # minecraft:geometry -> bones -> locators if 'locators' in bone: assert type(bone['locators']) is dict for locator_name, locator in bone['locators'].items(): assert type(locator_name) is str assert_is_vector(locator, 3, (int, float)) # minecraft:geometry -> bones -> cubes if 'cubes' in bone: assert type(bone['cubes']) is list for cube in bone['cubes']: assert type(cube) is dict assert set(cube.keys()) <= { # acceptable keys 'uv', 'size', 'origin', 'pivot', 'rotation', 'mirror' } assert set(cube.keys()) >= { # obligatory keys 'uv', 'size', 'origin', 'pivot', 'rotation' } assert_is_vector(cube['uv'], 2, (int, float)) assert_is_vector(cube['size'], 3, (int, float)) assert_is_vector(cube['origin'], 3, (int, float)) assert_is_vector(cube['pivot'], 3, (int, float)) assert_is_vector(cube['rotation'], 3, (int, float)) if 'mirror' in cube: assert type(cube['mirror']) is bool def make_comparable_json( jsonable: tp.Any, set_paths: tp.Set[tp.Tuple], curr_path=None): ''' Replaces some of the lists in JSON with frozen sets so the objects can be safely compared and the order doesn't matter. Dictionaries are replaced with tuples of key value pairs because dictionaries are mutable and can't be part of a frozenset. ''' if curr_path is None: curr_path = [] if isinstance(jsonable, dict): result = [ (k, make_comparable_json(v, set_paths, curr_path+[k])) for k, v in jsonable.items()] return frozenset(result) if isinstance(jsonable, list): result = [ make_comparable_json(i, set_paths, curr_path+[0]) for i in jsonable] if tuple(curr_path) in set_paths: return frozenset(result) return tuple(result) if isinstance(jsonable, (type(None), bool, int, float, str)): return jsonable <file_sep>[tox] envlist = py37 skipsdist = true [testenv] deps = pytest mypy pylint mkdocs numpy mkdocs-material fake-bpy-module-2.83 commands = ; run tests pytest {posargs} ; run style tests python -m mypy ./mcblend pylint ./mcblend <file_sep>''' Functions related to exporting models. ''' from __future__ import annotations from typing import List, Dict, Tuple, Any, Optional from dataclasses import dataclass, field import mathutils import numpy as np import bpy import bpy_types from .common import ( MINECRAFT_SCALE_FACTOR, McblendObject, McblendObjectGroup, MCObjType, CubePolygons, CubePolygon, MeshType ) from .json_tools import get_vect_json from .exception import NoCubePolygonsException, NotAStandardUvException from .uv import CoordinatesConverter from copy import copy @dataclass class ModelExport: ''' Object that represents model during export. :param model_name: name of the model :param texture_width: Minecraft model property - texture_width. :param texture_height: Minecraft model property - texture_height. :param visible_bounds_offset: Minecraft model property - visible_bounds_offset. :param visible_bounds_width: Minecraft model property - visible_bounds_width. :param visible_bounds_height: Minecraft model property - visible_bounds_height. :param bones: Optional - list of :class:`BoneExport` objects that represent the bones of this model. ''' model_name: str texture_width: int texture_height: int visible_bounds_offset: Tuple[float, float, float] visible_bounds_width: float visible_bounds_height: float bones: List[BoneExport] = field(default_factory=list) def load( self, object_properties: McblendObjectGroup, context: bpy_types.Context ): ''' Populates the self.bones dictionary. :param object_properties: Group of mcblend objects. :param context: The context of running the operator. ''' bpy.ops.screen.animation_cancel() original_frame = context.scene.frame_current try: context.scene.frame_set(0) for _, objprop in object_properties.items(): if objprop.mctype in [MCObjType.BONE, MCObjType.BOTH]: self.bones.append(BoneExport(objprop, self)) finally: context.scene.frame_set(original_frame) def json(self) -> Dict: ''' Creates a dict that represents the Minecraft model JSON file. :returns: Minecraft model JSON dict. ''' result: Dict = { "format_version": "1.12.0", "minecraft:geometry": [ { "description": { "identifier": f"geometry.{self.model_name}", "visible_bounds_width": round(self.visible_bounds_width, 3), "visible_bounds_height": round(self.visible_bounds_height, 3), "visible_bounds_offset": get_vect_json(self.visible_bounds_offset) }, "bones": [bone.json() for bone in self.bones] } ] } if self.texture_width > 0: # Don't export invalid values result["minecraft:geometry"][0]["description"][ "texture_width"] = self.texture_width if self.texture_height > 0: # Don't export invalid values result["minecraft:geometry"][0]["description"][ "texture_height"] = self.texture_height return result class BoneExport: ''' Object that represents a Bone during model export. # Properties - `model: ModelExport` - a model that contains this bone. - `name: str` - the name of the bone. - `parent: Optional[str]` - the name of a parent of this bone - `rotation: np.ndarray` - rotation of the bone. - `pivot: np.ndarray` - pivot of the bone. - `cubes: List[CubeExport]` - list of cubes to export. - `locators: Dict[str, LocatorExport]` - list of locators to export. (if exists) or None ''' def __init__(self, bone: McblendObject, model: ModelExport): ''' Creates BoneExport. If the input value of BONE or BOTH McObjectType than ValueError is raised. ''' self.model = model # Test if bone is valid input object if bone.mctype not in [MCObjType.BONE, MCObjType.BOTH]: raise ValueError('Input object is not a bone.') # Create cubes and locators list cubes: List[McblendObject] = [] if bone.mctype == MCObjType.BOTH: # Else MCObjType == BOTH cubes.append(bone) locators: List[McblendObject] = [] # Add children cubes if they are MCObjType.CUBE type for child in bone.children: if child.mctype is MCObjType.CUBE: cubes.append(child) elif child.mctype is MCObjType.LOCATOR: locators.append(child) self.name: str = bone.obj_name self.parent: Optional[str] = ( None if bone.parent is None else bone.parent.obj_name) self.rotation: np.ndarray = bone.get_mcrotation(bone.parent) self.pivot: np.ndarray = bone.mcpivot * MINECRAFT_SCALE_FACTOR self.cubes: List[CubeExport] = [] self.poly_mesh: PolyMesh = PolyMesh() self.locators: Dict[str, LocatorExport] = {} self.load(bone, cubes, locators) def load( self, thisobj: McblendObject, cube_objs: List[McblendObject], locator_objs: List[McblendObject]): ''' Used in constructor to cubes and locators. ''' uv_factory = UvExportFactory( (self.model.texture_width, self.model.texture_height) ) def _scale(objprop: McblendObject) -> np.ndarray: '''Scale of a bone''' _, _, scale = objprop.obj_matrix_world.decompose() return np.array(scale.xzy) # Set locators for locatorprop in locator_objs: _l_scale = _scale(locatorprop) l_pivot = locatorprop.mcpivot * MINECRAFT_SCALE_FACTOR l_origin = l_pivot + ( locatorprop.mccube_position * _l_scale * MINECRAFT_SCALE_FACTOR ) self.locators[locatorprop.obj_name] = LocatorExport(l_origin) # Set cubes for cubeprop in cube_objs: if cubeprop.mesh_type is MeshType.CUBE: _c_scale = _scale(cubeprop) c_size = cubeprop.mcube_size * _c_scale * MINECRAFT_SCALE_FACTOR c_pivot = cubeprop.mcpivot * MINECRAFT_SCALE_FACTOR c_origin = c_pivot + ( cubeprop.mccube_position * _c_scale * MINECRAFT_SCALE_FACTOR ) c_rot = cubeprop.get_mcrotation(thisobj) if cubeprop.inflate != 0: c_size = c_size - cubeprop.inflate*2 c_origin = c_origin + cubeprop.inflate uv = uv_factory.get_uv_export(cubeprop, c_size) cube = CubeExport( size=c_size, pivot=c_pivot, origin=c_origin, rotation=c_rot, inflate=cubeprop.inflate, uv=uv) self.cubes.append(cube) elif cubeprop.mesh_type is MeshType.POLY_MESH: cubeprop.obj_data.calc_normals_split() polygons = cubeprop.obj_data.polygons # loop ids and vertices vertices = cubeprop.obj_data.vertices # crds loops = cubeprop.obj_data.loops # normals uv_data = cubeprop.obj_data.uv_layers.active.data # uv inv_bone_matrix = cubeprop.get_local_matrix(thisobj) positions: List[List[float]] = [] normals: List[List[float]] = [] polys: List[List[Tuple[int, int, int]]] = [] uvs: List[List[int]] = [list(i.uv) for i in uv_data] for vertex in vertices: transformed_vertex = inv_bone_matrix @ vertex.co transformed_vertex = ( np.array(transformed_vertex) * MINECRAFT_SCALE_FACTOR * np.array(thisobj.obj_matrix_world.to_scale()) )[[0, 2, 1]] + self.pivot positions.append(list(transformed_vertex)) for loop in loops: transformed_normal = mathutils.Vector( np.array(loop.normal)[[0, 2, 1]] ).normalized() normals.append(list(transformed_normal)) for poly in polygons: # vertex data -> List[(positions, normals, uvs)] curr_poly: List[Tuple[int, int, int]] = [] for loop_id, vertex_id in zip( poly.loop_indices, poly.vertices): curr_poly.append((vertex_id, loop_id, loop_id)) if len(curr_poly) == 3: curr_poly.append(copy(curr_poly[2])) polys.append(curr_poly) self.poly_mesh.extend_mesh_data(positions, normals, polys, uvs) def json(self) -> Dict: ''' Returns the dictionary that represents a single mcbone in json file of model. # Returns: `Dict` - the single bone from Minecraft model. ''' # Basic bone properties mcbone: Dict = {'name': self.name} if self.parent is not None: mcbone['parent'] = self.parent mcbone['pivot'] = get_vect_json(self.pivot) mcbone['rotation'] = get_vect_json(self.rotation) # Locators if len(self.locators) > 0: mcbone['locators'] = {} for name, locator in self.locators.items(): mcbone['locators'][name] = locator.json() # Cubess if len(self.cubes) > 0: mcbone['cubes'] = [] for cube in self.cubes: mcbone['cubes'].append(cube.json()) if not self.poly_mesh.is_empty: mcbone['poly_mesh'] = self.poly_mesh.json() return mcbone @dataclass class LocatorExport: '''Object that represents a Locator during model export.''' origin: np.ndarray def json(self): '''Returns JSON representation of this object''' return get_vect_json(self.origin) @dataclass class CubeExport: '''Object that represents a cube during model export.''' size: np.ndarray pivot: np.ndarray origin: np.ndarray rotation: np.ndarray inflate: float uv: UvExport def json(self): '''Returns JSON representation of this object.''' cube_dict = { 'uv': self.uv.json(), 'size': get_vect_json(self.size), 'origin': get_vect_json(self.origin), 'pivot': get_vect_json(self.pivot), 'rotation': [ # Change -180 in rotations to 180 i if i != -180 else 180 for i in get_vect_json(self.rotation) ] } if self.inflate != 0: cube_dict['inflate'] = round(self.inflate, 3) if self.uv.mirror: cube_dict['mirror'] = True return cube_dict class PolyMesh: '''Object that represents a poly_mesh of a bone.''' def __init__(self): self.positions: List[List[float]] = [] self.normals: List[List[float]] = [] self.uvs: List[List[int]] = [] self.polys: List[List[List[int]]] = [] self.normalized_uvs: bool = True def extend_mesh_data( self, positions: List[List[float]], normals: List[List[float]], polys: List[List[Tuple[int, int, int]]], uvs: List[List[int]]): vertex_id_offset = len(self.positions) normal_id_offset = len(self.normals) loop_id_offset = len(self.uvs) self.positions.extend(positions) self.normals.extend(normals) self.uvs.extend(uvs) for poly in polys: curr_poly: List[List[int]] = [] for vertex_data in poly: curr_poly.append([ vertex_data[0] + vertex_id_offset, # position id vertex_data[1] + normal_id_offset, # normal id vertex_data[2] + loop_id_offset, # uv id ]) self.polys.append(curr_poly) @property def is_empty(self) -> bool: return len(self.polys) == 0 def json(self): poly_mesh = { 'normalized_uvs': self.normalized_uvs, 'positions': [get_vect_json(i) for i in self.positions], 'normals': [get_vect_json(i) for i in self.normals], 'uvs': [get_vect_json(i) for i in self.uvs], 'polys': self.polys, } return poly_mesh class UvExport: ''' Base class for objects that represent the UV of exported cube. ''' def __init__(self): # Mirror is used only for StandardCubeUvExport but any other UV has to # be able to return False when asked about mirror property self.mirror = False def json(self) -> Any: '''Returns JSON representation of this object.''' # pylint: disable=no-self-use return [0, 0] class PerFaceUvExport(UvExport): ''' Object that represents the UV of a cube during export in per-face UV-mapping style. ''' def __init__( self, cube_polygons: CubePolygons, uv_layer: bpy.types.MeshUVLoopLayer, blend_to_mc_converter: CoordinatesConverter): super().__init__() self.cube_polygons = cube_polygons self.uv_layer = uv_layer self.converter = blend_to_mc_converter def json(self): result = {} if not self._is_face_uv_outside(self.cube_polygons.north): result["north"] = self._one_face_uv( self.cube_polygons.north, '--+', '+--') if not self._is_face_uv_outside(self.cube_polygons.east): result["east"] = self._one_face_uv( self.cube_polygons.east, '-++', '---') if not self._is_face_uv_outside(self.cube_polygons.south): result["south"] = self._one_face_uv( self.cube_polygons.south, '+++', '-+-') if not self._is_face_uv_outside(self.cube_polygons.west): result["west"] = self._one_face_uv( self.cube_polygons.west, '+-+', '++-') if not self._is_face_uv_outside(self.cube_polygons.up): result["up"] = self._one_face_uv( self.cube_polygons.up, '-++', '+-+') if not self._is_face_uv_outside(self.cube_polygons.down): result["down"] = self._one_face_uv( self.cube_polygons.down, '+--', '-+-') return result def _is_face_uv_outside(self, cube_polygon): '''Tests if UV face is completely outside of the texture''' face: bpy_types.MeshPolygon = cube_polygon.side for loop_index in face.loop_indices: curr_loop = np.array(self.uv_layer.data[loop_index].uv) if not ((curr_loop < 0).any() or (curr_loop > 1).any()): return False # Something isn't outside return True # Went through the loop (everything is outside) def _one_face_uv( self, cube_polygon: CubePolygon, corner1_name: str, corner2_name: str) -> Dict: face: bpy_types.MeshPolygon = cube_polygon.side corner1_index = cube_polygon.orientation.index(corner1_name) corner2_index = cube_polygon.orientation.index(corner2_name) corner1_crds = np.array(self.converter.convert( self.uv_layer.data[face.loop_indices[corner1_index]].uv )) corner2_crds = np.array(self.converter.convert( self.uv_layer.data[face.loop_indices[corner2_index]].uv )) uv = corner1_crds uv_size = corner2_crds-corner1_crds return { "uv": [round(i, 3) for i in uv], "uv_size": [round(i, 3) for i in uv_size], } class StandardCubeUvExport(UvExport): ''' Object that represents the UV of a cube during export in default Minecraft UV-mapping style - defined by a vector with two values (the shape of the faces is implicitly determined by the dimensions of the cube). ''' def __init__( self, cube_polygons: CubePolygons, uv_layer: bpy.types.MeshUVLoopLayer, cube_size: np.array, blend_to_mc_converter: CoordinatesConverter): super().__init__() self.cube_size = cube_size self.cube_polygons = cube_polygons self.uv_layer = uv_layer self.converter = blend_to_mc_converter # test if the shape of the UV is the standard Minecraft shape self.assert_standard_uv_shape() def _uv_from_name( self, cube_polygon: CubePolygon, name: str) -> np.ndarray: ''' Helper function used to get certain UV coordinates from a face by its name. ''' face: bpy_types.MeshPolygon = cube_polygon.side name_index = cube_polygon.orientation.index(name) uv_layer_data_index = face.loop_indices[name_index] return self.converter.convert( np.array(self.uv_layer.data[uv_layer_data_index].uv) ) def assert_standard_uv_shape(self): ''' Asserts that this object has a UV-shape that conforms to standard Minecraft UV-mapping shape. If not than NotAStandardUvException is risen. ''' # Get min and max value of he loop coordinates loop_crds_list: List[np.array] = [] for loop in self.uv_layer.data: loop_crds_list.append( self.converter.convert(np.array(loop.uv)) ) loop_crds_arr: np.ndarray = np.vstack(loop_crds_list) min_loop_crds = loop_crds_arr.min(0) # max_loop_crds = loop_crds_arr.max(0) # Depth width height # first round with get_json_vect to avoid numerical errors and than # round down to int (like minecraft does). w, h, d = [ int(i) for i in get_vect_json(self.cube_size)] expected_shape = np.array([ [d, d + h], # north/front LD 0 [d + w, d + h], # north/front RD 1 [d + w, d], # north/front RU 2 [d, d], # north/front LU 3 [0, d + h], # east/right LD 4 [d, d + h], # east/right RD 5 [d, d], # east/right RU 6 [0, d], # east/right LU 7 [2 * d + w, d + h], # south/back LD 8 [2 * d + 2 * w, d + h], # south/back RD 9 [2 * d + 2 * w, d], # south/back RU 10 [2 * d + w, d], # south/back LU 11 [d + w, d + h], # west/left LD 12 [2 * d + w, d + h], # west/left RD 13 [2 * d + w, d], # west/left RU 14 [d + w, d], # west/left LU 15 [d, d], # up/up LD 16 [d + w, d], # up/up RD 17 [d + w, 0], # up/up RU 18 [d, 0], # up/up LU 19 [d + w, d], # down/down LD 20 [d + 2 * w, d], # down/down RD 21 [d + 2 * w, 0], # down/down RU 22 [d + w, 0], # down/down LU 23 ], dtype=np.float64) # Shift the expected values so they start from the minimal point # instead of 0 expected_shape += min_loop_crds expected_shape_mirror = expected_shape[[ 1, 0, 3, 2, # Mirror front 13, 12, 15, 14, # Mirror left (and swap with right) 9, 8, 11, 10, # Mirror back 5, 4, 7, 6, # Mirror right (and swap with left) 17, 16, 19, 18, # Mirror up 21, 20, 23, 22, # Mirror down ]] real_shape = np.array([ self._uv_from_name(self.cube_polygons.north, '---'), # north/front LD self._uv_from_name(self.cube_polygons.north, '+--'), # north/front RD self._uv_from_name(self.cube_polygons.north, '+-+'), # north/front RU self._uv_from_name(self.cube_polygons.north, '--+'), # north/front LU self._uv_from_name(self.cube_polygons.east, '-+-'), # east/right LD self._uv_from_name(self.cube_polygons.east, '---'), # east/right RD self._uv_from_name(self.cube_polygons.east, '--+'), # east/right RU self._uv_from_name(self.cube_polygons.east, '-++'), # east/right LU self._uv_from_name(self.cube_polygons.south, '++-'), # south/back LD self._uv_from_name(self.cube_polygons.south, '-+-'), # south/back RD self._uv_from_name(self.cube_polygons.south, '-++'), # south/back RU self._uv_from_name(self.cube_polygons.south, '+++'), # south/back LU self._uv_from_name(self.cube_polygons.west, '+--'), # west/left LD self._uv_from_name(self.cube_polygons.west, '++-'), # west/left RD self._uv_from_name(self.cube_polygons.west, '+++'), # west/left RU self._uv_from_name(self.cube_polygons.west, '+-+'), # west/left LU self._uv_from_name(self.cube_polygons.up, '--+'), # up/up LD self._uv_from_name(self.cube_polygons.up, '+-+'), # up/up RD self._uv_from_name(self.cube_polygons.up, '+++'), # up/up RU self._uv_from_name(self.cube_polygons.up, '-++'), # up/up LU self._uv_from_name(self.cube_polygons.down, '---'), # down/down LD self._uv_from_name(self.cube_polygons.down, '+--'), # down/down RD self._uv_from_name(self.cube_polygons.down, '++-'), # down/down RU self._uv_from_name(self.cube_polygons.down, '-+-'), # down/down LU ], dtype=np.float64) if not np.isclose(expected_shape, real_shape).all(): if not np.isclose(expected_shape_mirror, real_shape).all(): raise NotAStandardUvException() self.mirror = True def json(self): loop_crds_list: List[np.array] = [] for loop in self.uv_layer.data: loop_crds_list.append( self.converter.convert(np.array(loop.uv)) ) loop_crds_arr: np.ndarray = np.vstack(loop_crds_list) min_loop_crds = loop_crds_arr.min(0) return [round(i, 3) for i in min_loop_crds] class UvExportFactory: ''' Object used for creating the UvExport objects. Decides which subtype of the UvExport object should be used. ''' def __init__(self, texture_size: Tuple[int, int]): self.blend_to_mc_converter = CoordinatesConverter( np.array([[0, 1], [1, 0]]), np.array([[0, texture_size[0]], [0, texture_size[1]]]) ) self.mc_to_blend_converter = CoordinatesConverter( np.array([[0, texture_size[0]], [0, texture_size[1]]]), np.array([[0, 1], [1, 0]]) ) def get_uv_export( self, mcobj: McblendObject, cube_size: np.ndarray) -> UvExport: ''' Creates UvExport object for given McblendObject. :param mcobj: Object that needs UvExport. :param cube_size: Size of the cube expressed in Minecraft coordinates system. ''' layer: Optional[bpy.types.MeshUVLoopLayer] = ( mcobj.obj_data.uv_layers.active) if layer is None: # Make sure that UV exists return UvExport() try: polygons = mcobj.cube_polygons() except NoCubePolygonsException: return UvExport() try: return StandardCubeUvExport( polygons, layer, cube_size, self.blend_to_mc_converter) except NotAStandardUvException: return PerFaceUvExport(polygons, layer, self.blend_to_mc_converter) <file_sep>''' Functions and objects related to importing Minecraft models to Blender. ''' from __future__ import annotations import math from typing import Dict, List, Optional, Any, Tuple, Set import numpy as np import bpy_types import mathutils import bpy from .common import ( MINECRAFT_SCALE_FACTOR, CubePolygons, CubePolygon) from .uv import CoordinatesConverter from .exception import FileIsNotAModelException, ImportingNotImplementedError def _assert(expr: bool, msg: str = ''): '''Used in this module to raise exceptions based on condition.''' if not expr: raise FileIsNotAModelException(msg) def _assert_is_vector( name: str, obj: Any, length: int, types: Tuple, json_path: List ) -> None: ''' Asserts that object is an array of specific length with specific type of items. ''' _assert(isinstance(obj, list), f'{json_path}::{name} is not a list') _assert( len(obj) == length, f'{json_path}::{name} has invalid length {len(obj)} != {length}') _assert( all([isinstance(i, types) for i in obj]), f'{json_path}::{name} is not instance of List[{types}]') def _assert_has_required_keys( what: str, has_keys: Set, required_keys: Set, json_path: List): '''Asserts that object has required keys.''' missing_keys = required_keys - has_keys if len(missing_keys) != 0: raise FileIsNotAModelException( f'{json_path}::{what} is missing properties: {missing_keys}') def _assert_has_accepted_keys_only( what: str, has_keys: Set, accepted_keys: Set, json_path: List): '''Asserts that object has only keys from accepted set.''' additional_keys = has_keys - accepted_keys if len(additional_keys) != 0: raise FileIsNotAModelException( f'{json_path}::{what} has unexpected properties: {additional_keys}') def _assert_is_type( name: str, obj: Any, types: Tuple, json_path: List): '''Asserts that object is instance of specific type''' if not isinstance(obj, types): raise FileIsNotAModelException( f'{json_path}::{name} is not an instance of {types}') def pick_version_parser(parsers: Tuple[str, ...], version: str): ''' Picks the earliest possible format_version greater or equal to the known version ot of list of parser names for different format versions. :param parsers: The list of format_versions that identify the parser to use for parsing an object. ''' def to_tuple(version: str) -> Tuple[int]: try: return tuple( # type: ignore map(int, version.split('.'))) except Exception as e: raise FileIsNotAModelException( f'Unable to parse format version number: {version}') from e t_parsers = [to_tuple(parser) for parser in parsers] t_parsers.sort(reverse=True) t_version = to_tuple(version) best_choice = None for t_parser in t_parsers: if t_parser <= t_version: best_choice = t_parser break if best_choice is None: raise FileIsNotAModelException( f'Unsupported format version: {version}') return '.'.join([str(i) for i in best_choice]) class ModelLoader: ''' Interface loads model from a JSON dict with Minecraft model. Fills missing, optional data with default values. :param data: The JSON dict with models file. :param geometry_name: Optional - the name of the geometry to load. ''' def __init__(self, data: Dict, geometry_name: str = ""): self.data = data self.format_version = self._load_format_version(data) geometry, geometry_path = self._load_geometry( geometry_name, self.data) self.description: Dict = self._load_description( geometry, geometry_path) self.bones: List = self._load_bones( geometry['bones'], geometry_path + ['bones']) def _load_format_version(self, data: Dict) -> str: ''' Returns the version of the model from JSON file loaded into data. :param data: JSON dict with model file. ''' # pylint: disable=no-self-use _assert_has_required_keys( 'model file', set(data.keys()), {'format_version'}, []) parser_version = pick_version_parser( ('1.12.0', '1.8.0'), data['format_version']) if parser_version == '1.12.0': _assert_has_required_keys( 'model file', set(data.keys()), {'minecraft:geometry', 'format_version'}, []) _assert_has_accepted_keys_only( 'model file', set(data.keys()), {'minecraft:geometry', 'format_version', 'cape'}, []) if 'cape' in data.keys(): raise ImportingNotImplementedError('cape', []) return data['format_version'] if parser_version == '1.8.0': # All geometries must start with geometry. for k in data.keys(): # key must be string because its from json _assert( ( k.startswith('geometry.') or k in ['debug', 'format_version'] ), f'{[]}::{k} is invalid geometry name (it should start ' 'with "geometry."' ) if 'debug' in data.keys(): raise ImportingNotImplementedError('debug', []) return data['format_version'] raise FileIsNotAModelException('Unsupported format version') def _load_geometry( self, geometry_name: str, data: Any) -> Tuple[Dict, List]: ''' Finds and returns geometry with specific name from list of geometries from JSON dict with models. Returns the geometry dict with all of the missing default values added and the JSON path to the geometry. :param geometry_name: The name of geometry :param data: Root object of the JSON. ''' parser_version = pick_version_parser( ('1.12.0', '1.8.0'), self.format_version) if parser_version == '1.12.0': geometries = data['minecraft:geometry'] path: List = ['minecraft:geometry'] _assert_is_type('geometries', geometries, (list,), path) for i, geometry in enumerate(geometries): path = ['minecraft:geometry', i] _assert_is_type('geometry', geometry, (dict,), path) _assert_has_required_keys( 'geometry', set(geometry.keys()), {'description', 'bones'}, path) _assert_has_accepted_keys_only( 'geometry', set(geometry.keys()), {'description', 'bones'}, path) desc = geometry['description'] if 'identifier' not in desc: raise FileIsNotAModelException( f'{path}::description is missing identifier') identifier = desc['identifier'] if geometry_name in (identifier, ''): return geometry, path raise ValueError(f'Unable to find geometry called geometry.{geometry_name}') if parser_version == '1.8.0': geometries = data path = [] _assert_is_type('geometries', geometries, (dict,), path) for k, geometry in geometries.items(): if k in ['format_version', 'debug']: continue path = [k] _assert_is_type('geometry', geometry, (dict,), path) _assert_has_accepted_keys_only( 'geometry', set(geometry.keys()), { "debug", "visible_bounds_width", "visible_bounds_height", "visible_bounds_offset", "texturewidth", "textureheight", "cape", "bones" }, path) identifier = k if geometry_name in (identifier, ''): return geometry, path raise ValueError(f'Unable to find geometry called geometry.{geometry_name}') raise FileIsNotAModelException(f'Unsupported format version: {self.format_version}') def _load_description(self, geometry: Any, geometry_path: List) -> Dict: ''' Returns the description of the geometry. :param geometry: The geometry with description. :param geometry_path: The JSON path to the geometry (used for error messages) ''' result = { "texture_width" : 64, "texture_height" : 64, "visible_bounds_offset" : [0, 0, 0], "visible_bounds_width" : 1, "visible_bounds_height": 1 } parser_version = pick_version_parser( ('1.12.0', '1.8.0'), self.format_version) if parser_version == '1.12.0': desc = geometry['description'] path = geometry_path + ['description'] _assert_has_required_keys( 'description', set(desc.keys()), {'identifier'}, path) acceptable_keys = { 'identifier', 'texture_width', 'texture_height', 'visible_bounds_offset', 'visible_bounds_width', 'visible_bounds_height'} _assert_has_accepted_keys_only( 'description', set(desc.keys()), acceptable_keys, path) _assert_is_type( 'identifier', desc['identifier'], (str,), geometry_path + ['identifier']) result['identifier'] = desc['identifier'] if 'texture_width' in desc: _assert_is_type( 'texture_width', desc['texture_width'], (int, float), geometry_path + ['texture_width']) result['texture_width'] = int(desc['texture_width']) if 'texture_height' in desc: _assert_is_type( 'texture_height', desc['texture_height'], (int, float), geometry_path + ['texture_height']) result['texture_height'] = int(desc['texture_height']) if 'visible_bounds_offset' in desc: _assert_is_vector( 'visible_bounds_offset', desc['visible_bounds_offset'], 3, (int, float), geometry_path + ['visible_bounds_offset']) result['visible_bounds_offset'] = desc['visible_bounds_offset'] if 'visible_bounds_width' in desc: _assert_is_type( 'visible_bounds_width', desc['visible_bounds_width'], (int, float), geometry_path + ['visible_bounds_width']) result['visible_bounds_width'] = desc['visible_bounds_width'] if 'visible_bounds_height' in desc: _assert_is_type( 'visible_bounds_height', desc['visible_bounds_height'], (int, float), geometry_path + ['visible_bounds_height']) result['visible_bounds_height'] = desc['visible_bounds_height'] return result if parser_version == '1.8.0': desc = geometry path = geometry_path acceptable_keys = { "debug", "visible_bounds_width", "visible_bounds_height", "visible_bounds_offset", "texturewidth", "textureheight", "cape", "bones"} _assert_has_accepted_keys_only( 'geometry', set(desc.keys()), acceptable_keys, path) _assert_is_type( 'identifier', path[-1], (str,), geometry_path + ['identifier']) result['identifier'] = path[-1] if 'debug' in desc: _assert_is_type( 'debug', desc['debug'], (bool,), geometry_path + ['debug']) raise ImportingNotImplementedError('debug', path + ['debug']) if 'texturewidth' in desc: _assert_is_type( 'texturewidth', desc['texturewidth'], (int, float), geometry_path + ['texturewidth']) # texture_width not texturewidth (not a bug!!!) result['texture_width'] = int(desc['texturewidth']) if 'textureheight' in desc: _assert_is_type( 'textureheight', desc['textureheight'], (int, float), geometry_path + ['textureheight']) # texture_height not textureheight (not a bug!!!) result['texture_height'] = int(desc['textureheight']) if 'visible_bounds_offset' in desc: _assert_is_vector( 'visible_bounds_offset', desc['visible_bounds_offset'], 3, (int, float), geometry_path + ['visible_bounds_offset']) result['visible_bounds_offset'] = desc['visible_bounds_offset'] if 'visible_bounds_width' in desc: _assert_is_type( 'visible_bounds_width', desc['visible_bounds_width'], (int, float), geometry_path + ['visible_bounds_width']) result['visible_bounds_width'] = desc['visible_bounds_width'] if 'visible_bounds_height' in desc: _assert_is_type( 'visible_bounds_height', desc['visible_bounds_height'], (int, float), geometry_path + ['visible_bounds_height']) result['visible_bounds_height'] = desc['visible_bounds_height'] return result raise FileIsNotAModelException('Unsupported format version') def _load_bones( self, bones: Any, bones_path: List) -> List[Dict[str, Any]]: ''' Returns the bones from a list of bones, adds missing default values. :param bones: List of bones. :param bones_path: Path to the bones list (used for error messages). ''' result: List = [] parser_version = pick_version_parser( ('1.12.0', '1.8.0'), self.format_version) if parser_version in ('1.12.0', '1.8.0'): _assert_is_type('bones property', bones, (list,), bones_path) for i, bone in enumerate(bones): bone_path = bones_path + [i] result.append(self._load_bone(bone, bone_path)) return result raise FileIsNotAModelException('Unsupported format version') def _load_bone(self, bone: Any, bone_path: List) -> Dict[str, Any]: ''' Returns a bone, adds all of the missing default values of the properties. :param bone: Part of the json file that has the inforation about the bone. :param bone_path: Path to the bone (used for error messages). ''' result: Dict[str, Any] = { "parent": None, # str "pivot" : [0, 0, 0], # List[float] len=3 "rotation" : [0, 0, 0], # List[float] len=3 "mirror" : False, # bool "inflate": 0.0, # float "debug": False, # bool "render_group_id": 0, # int >= 0 "cubes" : [], # List[Dict] "locators": {}, # Dict[...] "poly_mesh": None, # Dict "texture_meshes": [] # List[Dict] } parser_version = pick_version_parser( ('1.12.0', '1.8.0'), self.format_version) if parser_version == '1.12.0': _assert_is_type('bone', bone, (dict,), bone_path) _assert_has_required_keys( 'bone', set(bone.keys()), {'name'}, bone_path) acceptable_keys = { 'name', 'parent', 'pivot', 'rotation', 'mirror', 'inflate', 'debug', 'render_group_id', 'cubes', 'locators', 'poly_mesh', 'texture_meshes'} _assert_has_accepted_keys_only( 'bone', set(bone.keys()), acceptable_keys, bone_path) if 'name' in bone: _assert_is_type( 'name', bone['name'], (str,), bone_path + ['name']) result['name'] = bone['name'] if 'parent' in bone: _assert_is_type( 'parent', bone['parent'], (str,), bone_path + ['parent']) result['parent'] = bone['parent'] if 'pivot' in bone: _assert_is_vector( 'pivot', bone['pivot'], 3, (int, float), bone_path + ['pivot']) result['pivot'] = bone['pivot'] if 'rotation' in bone: _assert_is_vector( 'rotation', bone['rotation'], 3, (int, float), bone_path + ['rotation']) result['rotation'] = bone['rotation'] if 'mirror' in bone: _assert_is_type( 'mirror', bone['mirror'], (bool,), bone_path + ['mirror']) result['mirror'] = bone['mirror'] if 'inflate' in bone: _assert_is_type( 'inflate', bone['inflate'], (float, int), bone_path + ['inflate']) result['inflate'] = bone['inflate'] if 'debug' in bone: _assert_is_type( 'debug', bone['debug'], (bool,), bone_path + ['debug']) raise ImportingNotImplementedError( 'debug', bone_path + ['debug']) if 'render_group_id' in bone: _assert_is_type( 'render_group_id', bone['render_group_id'], (int, float), bone_path + ['render_group_id']) # int >= 0 raise ImportingNotImplementedError( 'render_group_id', bone_path + ['render_group_id']) if 'cubes' in bone: # default mirror for cube is the bones mirror property result['cubes'] = self._load_cubes( bone['cubes'], bone_path + ['cubes'], result['mirror'], result['inflate']) if 'locators' in bone: result['locators'] = self._load_locators( bone['locators'], bone_path + ['locators']) if 'poly_mesh' in bone: result['poly_mesh'] = self._load_poly_mesh( bone['poly_mesh'], bone_path + ['poly_mesh']) if 'texture_meshes' in bone: # type: list raise ImportingNotImplementedError( 'texture_meshes', bone_path + ['texture_meshes']) return result if parser_version == '1.8.0': _assert_is_type('bone', bone, (dict,), bone_path) _assert_has_required_keys( 'bone', set(bone.keys()), {'name'}, bone_path) acceptable_keys = { 'name', 'reset', 'neverRender', 'parent', 'pivot', 'rotation', 'bind_pose_rotation', 'mirror', 'inflate', 'debug', 'render_group_id', 'cubes', 'locators', 'poly_mesh', 'texture_meshes'} _assert_has_accepted_keys_only( 'bone', set(bone.keys()), acceptable_keys, bone_path) if 'name' in bone: _assert_is_type( 'name', bone['name'], (str,), bone_path + ['name']) result['name'] = bone['name'] if 'reset' in bone: _assert_is_type( 'reset', bone['reset'], (bool,), bone_path + ['reset']) raise ImportingNotImplementedError( 'reset', bone_path + ['reset']) if 'neverRender' in bone: _assert_is_type( 'neverRender', bone['neverRender'], (bool,), bone_path + ['neverRender']) raise ImportingNotImplementedError( 'neverRender', bone_path + ['neverRender']) if 'parent' in bone: _assert_is_type( 'parent', bone['parent'], (str,), bone_path + ['parent']) result['parent'] = bone['parent'] if 'pivot' in bone: _assert_is_vector( 'pivot', bone['pivot'], 3, (int, float), bone_path + ['pivot']) result['pivot'] = bone['pivot'] if 'rotation' in bone: _assert_is_vector( 'rotation', bone['rotation'], 3, (int, float), bone_path + ['rotation']) result['rotation'] = bone['rotation'] if 'bind_pose_rotation' in bone: _assert_is_vector( 'bind_pose_rotation', bone['bind_pose_rotation'], 3, (int, float), bone_path + ['bind_pose_rotation']) raise ImportingNotImplementedError( 'bind_pose_rotation', bone_path + ['bind_pose_rotation']) if 'mirror' in bone: _assert_is_type( 'mirror', bone['mirror'], (bool,), bone_path + ['mirror']) result['mirror'] = bone['mirror'] if 'inflate' in bone: _assert_is_type( 'inflate', bone['inflate'], (float, int), bone_path + ['inflate']) result['inflate'] = bone['inflate'] if 'debug' in bone: _assert_is_type( 'debug', bone['debug'], (bool,), bone_path + ['debug']) raise ImportingNotImplementedError( 'debug', bone_path + ['debug']) if 'render_group_id' in bone: _assert_is_type( 'render_group_id', bone['render_group_id'], (int, float), bone_path + ['render_group_id']) # int >= 0 raise ImportingNotImplementedError( 'render_group_id', bone_path + ['render_group_id']) if 'cubes' in bone: # default mirror for cube is the bones mirror property result['cubes'] = self._load_cubes( bone['cubes'], bone_path + ['cubes'], result['mirror'], result['inflate']) if 'locators' in bone: result['locators'] = self._load_locators( bone['locators'], bone_path + ['locators']) if 'poly_mesh' in bone: result['poly_mesh'] = self._load_poly_mesh( bone['poly_mesh'], bone_path + ['poly_mesh']) if 'texture_meshes' in bone: # type: list raise ImportingNotImplementedError( 'texture_meshes', bone_path + ['texture_meshes']) return result raise FileIsNotAModelException('Unsupported format version') def _load_cubes( self, cubes: Any, cubes_path: List[Any], default_mirror: bool, default_inflate: float) -> List[Dict[str, Any]]: ''' Returns the cubes from the list of cubes, add missing default values. :param cubes: List of cubes. :param cubes_path: Path to the cubes list (used for error messages). :param default_mirror: Mirror value of a bone that owns this list of cubes. ''' result = [] parser_version = pick_version_parser( ('1.12.0', '1.8.0'), self.format_version) if parser_version in ('1.12.0', '1.8.0'): _assert_is_type('cubes property', cubes, (list,), cubes_path) for i, cube in enumerate(cubes): cube_path = cubes_path + [i] result.append( self._load_cube( cube, cube_path, default_mirror, default_inflate)) return result raise FileIsNotAModelException('Unsupported format version') def _create_default_uv( self, size: Tuple[float, float, float], mirror: bool, uv: Tuple[float, float] = (0.0, 0.0)) -> Dict: ''' Creates default UV dictionary (in per-face UV-mapping format) based on some other properties of a cube. :param size: The size of the cube. :param mirror: The mirror property of the cube. :param uv: Optional - the UV property of the cube (if the cube uses the standard Minecraft UV-mapping format). ''' # pylint: disable=no-self-use width, height, depth = (int(i) for i in size) def _face(size: Tuple[float, float], uv: Tuple[float, float]): return {"uv_size": size, "uv": uv, "material_instance": ""} face1 = _face((depth, height), (uv[0], uv[1] + depth)) face2 = _face((width, height), (uv[0]+depth, uv[1] + depth)) face3 = _face((depth, height), (uv[0]+depth + width, uv[1] + depth)) face4 = _face((width, height), (uv[0]+2*depth + width, uv[1] + depth)) face5 = _face((width, depth), (uv[0]+depth, uv[1])) face6 = _face((width, depth), (uv[0]+depth + width, uv[1])) if mirror: face_west, face_east = face1, face3 else: face_east, face_west = face1, face3 # No mirror: | # Mirror: # 5 6 | # 5 6 # 1 2 3 4 | # 3 2 1 4 result: Dict = { "north": face2, "south": face4, "east": face_east, "west": face_west, "up": face5, "down": face6} return result def _load_cube( self, cube: Any, cube_path: List, default_mirror: bool, default_inflate: float) -> Dict[str, Any]: ''' Returns a cube with added all of the missing default values of the properties. :param cube: Part of the JSON dict that has the inforation about the cube. :param cube_path: JSON path to the cube (used for error messages). :param default_mirror: Mirror value of a bone that owns this cube. ''' result = { "origin" : (0, 0, 0), # Listfloat] len=3 "size" : (0, 0, 0), # Listfloat] len=3 "rotation" : (0, 0, 0), # Listfloat] len=3 "pivot" : (0, 0, 0), # Listfloat] len=3 "inflate" : default_inflate, # float "mirror" : default_mirror, # mirror # Default UV value is based on the size and mirror of the cube # before return statement "uv": None } parser_version = pick_version_parser( ('1.12.0', '1.8.0'), self.format_version) if parser_version == '1.12.0': _assert_is_type('cube', cube, (dict,), cube_path) # There is no required keys {} is a valid cube acceptable_keys = { "mirror", "inflate", "pivot", "rotation", "origin", "size", "uv"} _assert_has_accepted_keys_only( 'cube', set(cube.keys()), acceptable_keys, cube_path) if 'origin' in cube: _assert_is_vector( 'origin', cube['origin'], 3, (int, float), cube_path + ['origin']) result['origin'] = cube['origin'] if 'size' in cube: _assert_is_vector( 'size', cube['size'], 3, (int, float), cube_path + ['size']) result['size'] = cube['size'] if 'rotation' in cube: _assert_is_vector( 'rotation', cube['rotation'], 3, (int, float), cube_path + ['rotation']) result['rotation'] = cube['rotation'] if 'pivot' in cube: _assert_is_vector( 'pivot', cube['pivot'], 3, (int, float), cube_path + ['pivot']) result['pivot'] = cube['pivot'] if 'inflate' in cube: _assert_is_type( 'inflate', cube['inflate'], (int, float), cube_path + ['inflate']) result['inflate'] = cube['inflate'] if 'mirror' in cube: _assert_is_type( 'mirror', cube['mirror'], (bool,), cube_path + ['mirror']) result['mirror'] = cube['mirror'] if 'uv' in cube: _assert_is_type( 'uv', cube['uv'], (list, dict), cube_path + ['uv']) if isinstance(cube['uv'], dict): result['uv'] = self._load_uv( cube['uv'], cube_path + ['uv'], tuple(result['size']) # type: ignore ) elif isinstance(cube['uv'], list): _assert_is_vector( 'uv', cube['uv'], 2, (int, float), cube_path + ['uv']) result['uv'] = self._create_default_uv( tuple(result['size']), # type: ignore result['mirror'], # type: ignore tuple(cube['uv'])) # type: ignore else: raise FileIsNotAModelException( f'{cube_path + ["uv"]}::{"uv"} is not an ' f'instance of {(list, dict)}') # Create default UV based on size and mirror if result['uv'] is None: result['uv'] = result['uv'] = self._create_default_uv( tuple(result['size']), # type: ignore result['mirror']) # type: ignore return result if parser_version == '1.8.0': _assert_is_type('cube', cube, (dict,), cube_path) # There is no required keys {} is a valid cube acceptable_keys = {"origin", "size", "uv", "inflate", "mirror"} _assert_has_accepted_keys_only( 'cube', set(cube.keys()), acceptable_keys, cube_path) if 'origin' in cube: _assert_is_vector( 'origin', cube['origin'], 3, (int, float), cube_path + ['origin']) result['origin'] = cube['origin'] if 'size' in cube: _assert_is_vector( 'size', cube['size'], 3, (int, float), cube_path + ['size']) result['size'] = cube['size'] if 'inflate' in cube: _assert_is_type( 'inflate', cube['inflate'], (int, float), cube_path + ['inflate']) result['inflate'] = cube['inflate'] if 'mirror' in cube: _assert_is_type( 'mirror', cube['mirror'], (bool,), cube_path + ['mirror']) result['mirror'] = cube['mirror'] if 'uv' in cube: _assert_is_type( 'uv', cube['uv'], (list,), cube_path + ['uv']) _assert_is_vector( 'uv', cube['uv'], 2, (int, float), cube_path + ['uv']) result['uv'] = self._create_default_uv( tuple(result['size']), # type: ignore result['mirror'], # type: ignore tuple(cube['uv'])) # type: ignore # Create default UV based on size and mirror if result['uv'] is None: result['uv'] = result['uv'] = self._create_default_uv( tuple(result['size']), # type: ignore result['mirror']) # type: ignore return result raise FileIsNotAModelException('Unsupported format version') def _load_poly_mesh( self, poly_mesh: Any, poly_mesh_path: List) -> Dict[str, Any]: ''' Returns a cube with added all of the missing default values of the properties. :param cube: Part of the JSON dict that has the inforation about the cube. :param cube_path: JSON path to the cube (used for error messages). :param default_mirror: Mirror value of a bone that owns this cube. ''' result = { 'normalized_uvs': False, 'positions': [], 'normals': [], 'uvs': [], 'polys': [], # 'tri_list' or 'quad_list" or list with data } parser_version = pick_version_parser( ('1.12.0', '1.8.0'), self.format_version) if parser_version in ['1.12.0', '1.8.0']: _assert_is_type('poly_mesh', poly_mesh, (dict,), poly_mesh_path) # There is no required keys {} is a valid poly_mesh _assert_has_required_keys( 'poly_mesh', set(poly_mesh.keys()), {'polys'}, poly_mesh_path) acceptable_keys = { "normalized_uvs", "positions", "normals", "uvs", "polys"} _assert_has_accepted_keys_only( 'poly_mesh', set(poly_mesh.keys()), acceptable_keys, poly_mesh_path) # Acceptable keys if 'normalized_uvs' in poly_mesh: _assert_is_type( 'normalized_uvs', poly_mesh['normalized_uvs'], (bool,), poly_mesh_path + ['normalized_uvs']) result['normalized_uvs'] = poly_mesh['normalized_uvs'] if 'positions' in poly_mesh: positions = poly_mesh['positions'] positions_path = poly_mesh_path + ['position'] _assert_is_type('positions', positions, (list,), positions_path) for position_id, position in enumerate(positions): _assert_is_vector( 'position', position, 3, (float, int,), positions_path + [position_id]) result['positions'].append(tuple(position)) # type: ignore if 'normals' in poly_mesh: normals = poly_mesh['normals'] normals_path = poly_mesh_path + ['normal'] _assert_is_type('normals', normals, (list,), normals_path) for normal_id, normal in enumerate(normals): _assert_is_vector( 'normal', normal, 3, (float, int,), normals_path + [normal_id]) result['normals'].append(tuple(normal)) # type: ignore if 'uvs' in poly_mesh: uvs = poly_mesh['uvs'] uvs_path = poly_mesh_path + ['uv'] _assert_is_type('uvs', uvs, (list,), uvs_path) for uv_id, uv in enumerate(uvs): _assert_is_vector( 'uv', uv, 2, (float, int,), uvs_path + [uv_id]) result['uvs'].append(tuple(uv)) # type: ignore # Required keys _assert_is_type( 'polys', poly_mesh['polys'], (str, list), poly_mesh_path + ['polys']) if isinstance(poly_mesh['polys'], str): result['polys'] = self._create_default_polys( poly_mesh['polys'], result['positions'], # type: ignore result['normals'], # type: ignore result['uvs'], # type: ignore poly_mesh_path) elif isinstance(poly_mesh['polys'], list): polys_path = poly_mesh_path + ['polys'] for poly_id, poly in enumerate(poly_mesh['polys']): curr_result_poly: List[Tuple[int, int, int]] = [] result['polys'].append(curr_result_poly) # type: ignore poly_path = polys_path + [poly_id] _assert_is_type( 'poly', poly, (list,), poly_path) for poly_vertex_id, poly_vertex in enumerate(poly): _assert_is_vector( 'vertex', poly_vertex, 3, (int,), poly_path + [poly_vertex_id]) curr_result_poly.append( tuple(poly_vertex)) # type: ignore else: raise FileIsNotAModelException( f'{poly_mesh_path + ["polys"]}::{"polys"} is not an ' f'instance of {(str, list)}') return result raise FileIsNotAModelException('Unsupported format version') def _create_default_polys( self, grouping_mode: str, positions: List[List[float]], normals: List[List[float]], uvs: List[List[float]], poly_mesh_path: List[str] ) -> List[List[List[int]]]: ''' Creates default "polys" property of a polymesh for "tri_list" or "quad_list" mode. Checks if positions, normals and uv are the same length and can be divided by 3 (for tri_list mode) or 4 (for quad_list mode). Rises an exception if the creating default polys list is impossible with input data. :param grouping_mode: a string with grouping mode. It should be either 'tri_list' or 'quad_list' otherwise an exception is risen. :param positions: list of positions of the vertices. :param normals: list of normals of the loops. :param uvs: list of the uv coordinates of the loops. :parma poly_mesh_path: the JSON path to the poly_mesh that contains this polys property. ''' # Get polygon group size (three or four items) if grouping_mode == 'tri_list': group_size = 3 elif grouping_mode == 'quad_list': group_size = 4 else: raise FileIsNotAModelException( f'{poly_mesh_path + ["polys"]}::{"polys"} is not an a list of polys or a ' 'literal string "quad_list" or "tri_list"') # Check if positions, normals and uvs are the same lengts pos_length = len(positions) if not (pos_length == len(normals) == len(uvs)): raise FileIsNotAModelException( f'{poly_mesh_path}::"positions", "normals" and "uvs" are not ' 'the same lengths. They must be the same lengths in "tri_list"' ' and "quad_list" polys grouping mode.') # Check if list length is divisible by the group_size if not (pos_length % group_size == 0): raise FileIsNotAModelException( f'{poly_mesh_path}::"positions" list length must be ' f'divisible by {group_size} in {grouping_mode}.') # Build default polys property in list format result = np.repeat( range(pos_length), 3 ).reshape( -1, group_size, 3 ).to_list() return result def _load_uv( self, uv: Any, uv_path: List, cube_size: Tuple[float, float, float]) -> Dict[str, Any]: ''' Returns UV and adds all of the missing default values of its properties. :param uv: Part of the JSON dict that has the inforation about the uv. :param uv_path: Path to the UV (used for error messages). :param cube_size: Size of the cube which is being mapped (used for getting default UV values). ''' width, height, depth = cube_size def _face(size: Tuple[float, float], uv: Tuple[float, float]): return {"uv_size": size, "uv": uv, "material_instance": ""} result = { # Faces outside of the texture are invisible and should be skipped # on export "north": _face((0, 0), (0, -1)), "south": _face((0, 0), (0, -1)), "east": _face((0, 0), (0, -1)), "west": _face((0, 0), (0, -1)), "up": _face((0, 0), (0, -1)), "down": _face((0, 0), (0, -1)) } parser_version = pick_version_parser( ('1.12.0',), self.format_version) if parser_version == '1.12.0': _assert_is_type('uv', uv, (dict,), uv_path) # There is no required keys {} is a valid UV acceptable_keys = {"north", "south", "east", "west", "up", "down"} _assert_has_accepted_keys_only( 'uv', set(uv.keys()), acceptable_keys, uv_path) if "north" in uv: _assert_is_type( 'north', uv['north'], (dict,), uv_path + ['north']) result["north"] = self._load_uv_face( uv["north"], uv_path + ["north"], (depth, height)) if "south" in uv: _assert_is_type( 'south', uv['south'], (dict,), uv_path + ['south']) result["south"] = self._load_uv_face( uv["south"], uv_path + ["south"], (width, height)) if "east" in uv: _assert_is_type( 'east', uv['east'], (dict,), uv_path + ['east']) result["east"] = self._load_uv_face( uv["east"], uv_path + ["east"], (depth, height)) if "west" in uv: _assert_is_type( 'west', uv['west'], (dict,), uv_path + ['west']) result["west"] = self._load_uv_face( uv["west"], uv_path + ["west"], (width, height)) if "up" in uv: _assert_is_type( 'up', uv['up'], (dict,), uv_path + ['up']) result["up"] = self._load_uv_face( uv["up"], uv_path + ["up"], (width, depth)) if "down" in uv: _assert_is_type( 'down', uv['down'], (dict,), uv_path + ['down']) result["down"] = self._load_uv_face( uv["down"], uv_path + ["down"], (width, depth)) return result raise FileIsNotAModelException('Unsupported format version') def _load_uv_face( self, uv_face: Any, uv_face_path: List, default_size: Tuple[float, float]) -> Dict[str, Any]: ''' Returns UV and adds all of the missing default values of its properties. :param uv_face: Part of the JSON dict that has the inforation about the uv face. :param uv_face_path: Path to the uv face (used for error messages). :param default_size: Default size of the UV face. ''' result = { "uv_size": default_size, "uv": [0, 0], "material_instance": "" } parser_version = pick_version_parser( ('1.12.0',), self.format_version) if parser_version == '1.12.0': _assert_is_type('uv_face', uv_face, (dict,), uv_face_path) _assert_has_required_keys( 'uv', set(uv_face.keys()), {'uv'}, uv_face_path) _assert_has_accepted_keys_only( 'uv_face', set(uv_face.keys()), {"uv", "uv_size", "material_instance"}, uv_face_path) _assert_is_vector( 'uv', uv_face['uv'], 2, (int, float), uv_face_path + ['uv']) result["uv"] = uv_face["uv"] if "uv_size" in uv_face: _assert_is_vector( 'uv_size', uv_face['uv_size'], 2, (int, float), uv_face_path + ['uv_size']) result["uv_size"] = uv_face["uv_size"] if "material_instance" in uv_face: _assert_is_type( 'material_instance', uv_face['material_instance'], (str,), uv_face_path + ['material_instance']) raise ImportingNotImplementedError( 'material_instance', uv_face_path + ['material_instance']) return result raise FileIsNotAModelException('Unsupported format version') def _load_locators( self, locators: Any, locators_path: List) -> Dict[str, Any]: ''' Returns the locators from the list of locators with added missing default values. :param locators: List of the locators. :param locators_path: Path to the locators list (used for error messages) ''' result = {} parser_version = pick_version_parser( ('1.12.0', '1.8.0'), self.format_version) if parser_version in ['1.12.0', '1.8.0']: _assert_is_type( 'locators property', locators, (dict,), locators_path) for i, locator in locators.items(): locator_path = locators_path + [i] result[i] = self._load_locator(locator, locator_path) return result raise FileIsNotAModelException('Unsupported format version') def _load_locator(self, locator: Any, locator_path: List) -> Any: ''' Returns the locator with added missing default values. :param locator: The locator :param locator_path: Path to the locator ''' parser_version = pick_version_parser( ('1.12.0', '1.8.0'), self.format_version) if parser_version == '1.12.0': if isinstance(locator, list): _assert_is_vector('locator', locator, 3, (int, float), locator_path) return locator if isinstance(locator, dict): raise ImportingNotImplementedError('locator', locator_path) raise FileIsNotAModelException( f'{locator_path + ["locator"]}::{"locator"} is not an ' f'instance of {(list, dict)}') if parser_version == '1.8.0': _assert_is_type('locator', locator, (list,), locator_path) _assert_is_vector('locator', locator, 3, (int, float), locator_path) return locator raise FileIsNotAModelException('Unsupported format version') class ImportLocator: ''' Represents Minecraft locator during import operation. :param name: Name of the locator. :param position: The position of the locator. ''' def __init__(self, name: str, position: Tuple[float, float, float]): self.name = name self.position = position self.blend_empty: Optional[bpy.types.Object] = None class ImportCube: ''' Represents minecraft cube during import operation. :param data: The part of the Minecraft model JSON dict that represents this cube. ''' def __init__( self, data: Dict): ''' Creates ImportCube object created from a dictionary (part of the JSON) file in the model. # Arguments: - `data: Dict` - the part of the Minecraft model JSON file that represents the cube. ''' self.blend_cube: Optional[bpy.types.Object] = None self.uv: Dict = data['uv'] self.mirror: bool = data['mirror'] self.inflate: bool = data['inflate'] self.origin: Tuple[float, float, float] = tuple( # type: ignore data['origin']) self.pivot: Tuple[float, float, float] = tuple( # type: ignore data['pivot']) self.size: Tuple[float, float, float] = tuple( # type: ignore data['size']) self.rotation: Tuple[float, float, float] = tuple( # type: ignore data['rotation']) class ImportPolyMesh: ''' Represents Minecraft poly_mesh during import operation. :param data: The part of the Minecraft model JSON dict that represents this poly_mesh. ''' def __init__( self, data: Dict): ''' Creates ImportPolyMesh object created from a dictionary (part of the JSON) file in the model. :param data: The part of the Minecraft model JSON file that represents the poly_mesh. ''' self.blend_object: Optional[bpy.types.Object] = None self.normalized_uvs: bool = data['normalized_uvs'] self.positions: List[Tuple[float, float, float]] = data['positions'] self.normals: List[Tuple[float, float, float]] = data['normals'] self.uvs: List[Tuple[float, float]] = data['uvs'] self.polys: List[List[Tuple[int, int, int]]] = data['polys'] def unpack_data(self): ''' Unpacks the data about polymesh to a format more useful in blender. The data is not converted to minecraft format. ''' # positions -> vertices # polys -> [loops] # vertex ID # loop normal ID # loop uv ID # normals -> normals (coordinates) # uvs -> uvs (coordinates) # vertex IDs to create polygons blender_polygons: List[List[int]] = [] # List of vectors with normals blender_normals: List[Tuple[float, float, float]] = [] # List of vectors with UVs blender_uvs: List[Tuple[float, float]] = [] # TODO - this function or earlier data processing should make sure # the indices doesn't go out of bounds for poly in self.polys: curr_polygon: List[int] = [] for vertex_id, normal_id, uv_id in poly: curr_polygon.append(vertex_id) blender_normals.append(self.normals[normal_id]) blender_uvs.append(self.uvs[uv_id]) blender_polygons.append(curr_polygon) return blender_polygons, self.positions, blender_normals, blender_uvs class ImportBone: ''' Represents Minecraft bone during import operation. :param data: The part of the Minecraft model JSON dict that represents the bone. ''' def __init__(self, data: Dict): self.blend_empty: Optional[bpy.types.Object] = None # Locators locators: List[ImportLocator] = [] for k, v in data['locators'].items(): locators.append(ImportLocator(k, tuple(v))) # type: ignore # Cubes import_cubes: List[ImportCube] = [] for cube in data['cubes']: import_cubes.append(ImportCube(cube)) self.name: str = data['name'] self.parent: str = data['parent'] self.cubes = import_cubes self.poly_mesh: Optional[ImportPolyMesh] = None if data['poly_mesh'] is not None: self.poly_mesh = ImportPolyMesh(data['poly_mesh']) self.locators = locators self.pivot: Tuple[float, float, float] = tuple( # type: ignore data['pivot']) self.rotation: Tuple[float, float, float] = tuple( # type: ignore data['rotation']) self.mirror = data['mirror'] class ImportGeometry: ''' Represents whole Minecraft geometry during import operation. :param loader: Loader object with all of the required model properties. ''' def __init__(self, loader: ModelLoader): # Set the values self.identifier = loader.description['identifier'] self.texture_width = int(loader.description['texture_width']) self.texture_height = int(loader.description['texture_height']) self.visible_bounds_offset = loader.description['visible_bounds_offset'] self.visible_bounds_width = loader.description['visible_bounds_width'] self.visible_bounds_height = loader.description['visible_bounds_height'] self.bones: Dict[str, ImportBone] = {} self.uv_converter = CoordinatesConverter( np.array([[0, self.texture_width], [0, self.texture_height]]), np.array([[0, 1], [1, 0]]) ) # Read bones for bone in loader.bones: import_bone = ImportBone(bone) self.bones[import_bone.name] = import_bone def build_with_empties(self, context: bpy_types.Context): ''' Builds the geometry in Blender. Uses empties to represent Minecraft bones. :param context: The context of running the operator. ''' # Create objects - and set their pivots for bone in self.bones.values(): # 1. Spawn bone (empty) bpy.ops.object.empty_add(type='SPHERE', location=(0, 0, 0), radius=0.2) bone_obj: bpy.types.Object bone_obj = bone.blend_empty = context.object _mc_pivot(bone_obj, bone.pivot) # 2. Apply translation bone_obj.name = bone.name # 3. Apply custom properties bone_obj.nusiq_mcblend_object_properties.is_bone = True for cube in bone.cubes: cube_obj: bpy.types.Object # 1. Spawn cube bpy.ops.mesh.primitive_cube_add( size=1, enter_editmode=False, location=(0, 0, 0) ) cube_obj = cube.blend_cube = context.object # 2. Set uv # warning! Moving this code below cube transformation would # break it because bound_box is not getting updated properly # before the end of running of the opperator. cube_obj.nusiq_mcblend_object_properties.mirror = cube.mirror _set_uv( self.uv_converter, CubePolygons.build(cube_obj, cube.mirror), cube.uv, cube_obj.data.uv_layers.active) # 3. Set size & inflate cube.blend_cube.nusiq_mcblend_object_properties.inflate = ( cube.inflate) _mc_set_size(cube_obj, cube.size, inflate=cube.inflate) _mc_pivot(cube_obj, cube.pivot) # 4. Move pivot # 5. Apply translation _mc_translate(cube_obj, cube.origin, cube.size, cube.pivot) if bone.poly_mesh is not None: # 1. Unpack the data to format suitable for creating Blender # mesh blender_polygons: List[List[int]] = [] blender_normals: List[mathutils.Vector] = [] blender_uvs: List[Tuple[float, float]] = [] blender_vertices: List[Tuple[float, float, float]] = [] for vertex in bone.poly_mesh.positions: blender_vertices.append(( vertex[0] / MINECRAFT_SCALE_FACTOR, vertex[2] / MINECRAFT_SCALE_FACTOR, vertex[1] / MINECRAFT_SCALE_FACTOR)) for poly in bone.poly_mesh.polys: curr_polygon: List[int] = [] for vertex_id, normal_id, uv_id in poly: if vertex_id in curr_polygon: # vertex can apear only onece per polygon. The # exporter sometimes adds vertex twice to exported # meshes because Minecraft can't handle triangles # properly. A polygon that uses same vertex twice # won't work in Blender. continue curr_polygon.append(vertex_id) curr_normal = bone.poly_mesh.normals[normal_id] blender_normals.append( mathutils.Vector(( curr_normal[0], curr_normal[2], curr_normal[1]) ).normalized() ) blender_uvs.append(bone.poly_mesh.uvs[uv_id]) blender_polygons.append(curr_polygon) # 2. Create mesh mesh = bpy.data.meshes.new(name='poly_mesh') mesh.from_pydata(blender_vertices, [], blender_polygons) if not mesh.validate(): # Valid geometry # 3. Create an object and connect mesh to it poly_mesh_obj = bpy.data.objects.new('poly_mesh', mesh) context.collection.objects.link(poly_mesh_obj) bone.poly_mesh.blend_object = poly_mesh_obj # 4. Set mesh normals and UVs mesh.create_normals_split() mesh.use_auto_smooth = True mesh.normals_split_custom_set(blender_normals) if mesh.uv_layers.active is None: mesh.uv_layers.new() uv_layer = mesh.uv_layers.active.data # type: ignore for i, uv in enumerate(blender_uvs): uv_layer[i].uv = uv else: del mesh raise FileIsNotAModelException('Invalid poly_mesh geometry!') for locator in bone.locators: # 1. Spawn locator (empty) locator_obj: bpy.types.Object bpy.ops.object.empty_add(type='SPHERE', location=(0, 0, 0), radius=0.1) locator_obj = locator.blend_empty = context.object _mc_pivot(locator_obj, locator.position) # 2. Apply translation # 3. Apply custom properties locator_obj.name = locator.name # Parent objects (keep offset) for bone in self.bones.values(): bone_obj = bone.blend_empty # 1. Parent bone keep transform if bone.parent is not None and bone.parent in self.bones: parent_obj: bpy.types.Object = self.bones[ bone.parent ].blend_empty context.view_layer.update() bone_obj.parent = parent_obj bone_obj.matrix_parent_inverse = ( parent_obj.matrix_world.inverted() ) # 2. Parent cubes keep transform for cube in bone.cubes: cube_obj = cube.blend_cube context.view_layer.update() cube_obj.parent = bone_obj cube_obj.matrix_parent_inverse = ( bone_obj.matrix_world.inverted() ) # 3. Parent poly_mesh keep transform if bone.poly_mesh is not None: poly_mesh_obj = bone.poly_mesh.blend_object context.view_layer.update() poly_mesh_obj.parent = bone_obj poly_mesh_obj.matrix_parent_inverse = ( bone_obj.matrix_world.inverted() ) # 4. Parent locators keep transform for locator in bone.locators: locator_obj = locator.blend_empty context.view_layer.update() locator_obj.parent = bone_obj locator_obj.matrix_parent_inverse = ( bone_obj.matrix_world.inverted() ) # Rotate objects for bone in self.bones.values(): bone_obj = bone.blend_empty context.view_layer.update() _mc_rotate(bone_obj, bone.rotation) for cube in bone.cubes: cube_obj = cube.blend_cube _mc_rotate(cube_obj, cube.rotation) def build_with_armature(self, context: bpy_types.Context): ''' Builds the geometry in Blender. Uses armature and bones to represent the Minecraft bones. :param context: The context of running the operator. ''' # Build everything using empties self.build_with_empties(context) # Build armature # Create empty armature and enter edit mode: bpy.ops.object.armature_add(enter_editmode=True, align='WORLD', location=(0, 0, 0)) bpy.ops.armature.select_all(action='SELECT') bpy.ops.armature.delete() # Save the armature armature = context.object edit_bones = armature.data.edit_bones # Create bones for bone in self.bones.values(): add_bone(edit_bones, 0.3, bone) # Parent bones for bone in self.bones.values(): # 1. Parent bone keep transform if bone.parent is not None and bone.parent in self.bones: parent_obj: bpy.types.Object = self.bones[ bone.parent ] # context.view_layer.update() edit_bones[bone.name].parent = edit_bones[parent_obj.name] bpy.ops.object.mode_set(mode='OBJECT') def parent_bone_keep_transform( obj: bpy.types.Object, bone: ImportBone): ''' Used for replacing empty parent with new bone parent ''' context.view_layer.update() # Copy matrix_parent_inverse from previous parent # It can be copied because old parent (locator) has the same # transformation as the new one (bone) parent_inverse = ( obj.matrix_parent_inverse.copy() # type:ignore ) obj.parent = armature # type: ignore obj.parent_bone = bone.name # type: ignore obj.parent_type = 'BONE' # type: ignore obj.matrix_parent_inverse = parent_inverse # type: ignore # Correct parenting to tail of the bone instead of head context.view_layer.update() blend_bone = armature.pose.bones[bone.name] # pylint: disable=no-member correction = mathutils.Matrix.Translation( blend_bone.head-blend_bone.tail ) obj.matrix_world = ( # type: ignore correction @ obj.matrix_world # type: ignore ) # Replace empties with bones for bone in self.bones.values(): bone_obj = bone.blend_empty # 2. Parent cubes keep transform for cube in bone.cubes: parent_bone_keep_transform(cube.blend_cube, bone) # 3. Parent poly_mesh keep transform if bone.poly_mesh is not None: parent_bone_keep_transform(bone.poly_mesh.blend_object, bone) # 4. Parent locators keep transform for locator in bone.locators: parent_bone_keep_transform(locator.blend_empty, bone) # remove the locators bpy.data.objects.remove(bone_obj) def _mc_translate( obj: bpy.types.Object, mctranslation: Tuple[float, float, float], mcsize: Tuple[float, float, float], mcpivot: Tuple[float, float, float] ): ''' Translates a Blender object using a translation vector written in Minecraft coordinates system. :param obj: Blender object to transform.. :param mctranslation: Minecraft translation. :param mcsize: Minecraft size. :param mcpivot: Minecraft pivot. ''' pivot_offset = mathutils.Vector( np.array(mcpivot)[[0, 2, 1]] / MINECRAFT_SCALE_FACTOR ) size_offset = mathutils.Vector( (np.array(mcsize)[[0, 2, 1]] / 2) / MINECRAFT_SCALE_FACTOR ) translation = mathutils.Vector( np.array(mctranslation)[[0, 2, 1]] / MINECRAFT_SCALE_FACTOR ) for vertex in obj.data.vertices: vertex.co += (translation - pivot_offset + size_offset) def _mc_set_size( obj: bpy.types.Object, mcsize: Tuple[float, float, float], inflate: Optional[float]=None): ''' Scales a Blender object using scale vector written in Minecraft coordinates system. :param obj: Blender object :param mcsize: Minecraft object size. ''' # cube_obj.dimensions = ( # np.array(cube_obj.dimensions) + # (2*-cube.inflate/MINECRAFT_SCALE_FACTOR) # ) effective_inflate: float = 0.0 if inflate is not None: effective_inflate = inflate/MINECRAFT_SCALE_FACTOR pos_delta = ( (np.array(mcsize)[[0, 2, 1]] / 2) / MINECRAFT_SCALE_FACTOR ) pos_delta += effective_inflate data = obj.data # 0. ---; 1. --+; 2. -+-; 3. -++; 4. +--; 5. +-+; 6. ++- 7. +++ data.vertices[0].co = mathutils.Vector(pos_delta * np.array([-1, -1, -1])) data.vertices[1].co = mathutils.Vector(pos_delta * np.array([-1, -1, 1])) data.vertices[2].co = mathutils.Vector(pos_delta * np.array([-1, 1, -1])) data.vertices[3].co = mathutils.Vector(pos_delta * np.array([-1, 1, 1])) data.vertices[4].co = mathutils.Vector(pos_delta * np.array([1, -1, -1])) data.vertices[5].co = mathutils.Vector(pos_delta * np.array([1, -1, 1])) data.vertices[6].co = mathutils.Vector(pos_delta * np.array([1, 1, -1])) data.vertices[7].co = mathutils.Vector(pos_delta * np.array([1, 1, 1])) def _mc_pivot(obj: bpy.types.Object, mcpivot: Tuple[float, float, float]): ''' Moves a pivot of an Blender object using pivot value in Minecraft coordinates system. :param obj: Blender object :param mcpivot: Minecraft object pivot point. ''' translation = mathutils.Vector( np.array(mcpivot)[[0, 2, 1]] / MINECRAFT_SCALE_FACTOR ) obj.location += translation def _mc_rotate( obj: bpy.types.Object, mcrotation: Tuple[float, float, float] ): ''' Rotates a Blender object using rotation written in Minecraft coordinates system. :param obj: Blender object :param mcrotation: Minecraft object rotation. ''' rotation = mathutils.Euler( # pylint: disable=too-many-function-args (np.array(mcrotation)[[0, 2, 1]] * np.array([1, 1, -1])) * math.pi/180, 'XZY' ) obj.rotation_euler.rotate(rotation) def _set_uv( uv_converter: CoordinatesConverter, cube_polygons: CubePolygons, uv: Dict, uv_layer: bpy.types.MeshUVLoopLayer): ''' Sets the UV of a face of a Blender cube mesh based on some Minecraft properties. :param uv_converter: converter used for converting from Minecraft UV coordinates (dependent on the scale of the texture) to Blender UV coordinates (values from 0 to 1). :param cube_polygons: CubePolygons object created from the mesh. :param uv: UV mapping for each face. :param uv_layer: UV layer of the mesh. ''' uv_data = uv_layer.data def set_uv( cube_polygon: CubePolygon, size: Tuple[float, float], uv: Tuple[float, float]): cp_loop_indices = cube_polygon.side.loop_indices cp_order = cube_polygon.order left_down = cp_loop_indices[cp_order[0]] right_down = cp_loop_indices[cp_order[1]] right_up = cp_loop_indices[cp_order[2]] left_up = cp_loop_indices[cp_order[3]] uv_data[left_down].uv = uv_converter.convert((uv[0], uv[1] + size[1])) uv_data[right_down].uv = uv_converter.convert( (uv[0] + size[0], uv[1] + size[1])) uv_data[right_up].uv = uv_converter.convert((uv[0] + size[0], uv[1])) uv_data[left_up].uv = uv_converter.convert((uv[0], uv[1])) # right/left set_uv(cube_polygons.east, uv["east"]["uv_size"], uv["east"]["uv"]) # front set_uv(cube_polygons.north, uv["north"]["uv_size"], uv["north"]["uv"]) # left/right set_uv(cube_polygons.west, uv["west"]["uv_size"], uv["west"]["uv"]) # back set_uv(cube_polygons.south, uv["south"]["uv_size"], uv["south"]["uv"]) # top set_uv(cube_polygons.up, uv["up"]["uv_size"], uv["up"]["uv"]) # bottom set_uv(cube_polygons.down, uv["down"]["uv_size"], uv["down"]["uv"]) def add_bone( edit_bones: bpy.types.bpy_prop_collection, length: float, import_bone: ImportBone): ''' :param edit_bones: edit bones of the armature (from armature.data.edit_bones). :param length: length of the bone. :param import_bone: import bone with all of the Minecraft data and the reference to empty object that currently represents the bone. ''' matrix_world: mathutils.Matrix = ( import_bone.blend_empty.matrix_world # type: ignore ) bone = edit_bones.new(import_bone.name) bone.head, bone.tail = (0.0, 0.0, 0.0), (0.0, length, 0.0) bone.matrix = matrix_world <file_sep>''' This is a testing script for exporting animations. Exports animations from blend file and compares them with the expected result. ''' import os import shutil import json from pathlib import Path import typing as tp import pytest from .common import assert_is_model, blender_run_script, make_comparable_json def make_comparison_files( tmp: str, scene_name: str, blend_file_path: str, ) -> tp.Tuple[tp.Dict, str]: ''' Opens blender file, selects_scene and exports animation from that to given tmp path. Returns the result JSON in a dictionary and the path to newly created file. ''' tmp = os.path.abspath(tmp) target = os.path.join(tmp, f'{scene_name}.animation.json') expected_result_path = ( f'./tests/data/test_animation_export/{scene_name}.animation.json' ) script = os.path.abspath('./blender_scripts/export_animation.py') blend_file_path = os.path.abspath(blend_file_path) # Windows uses wierd path separators tmp = tmp.replace('\\', '/') target = target.replace('\\', '/') script = script.replace('\\', '/') # Create tmp if not exists Path(tmp).mkdir(parents=True, exist_ok=True) # Run blender actions blender_run_script( script, scene_name, target, blend_file_path=blend_file_path ) # Return results with open(target, 'r') as f: target_dict = json.load(f) with open(expected_result_path, 'r') as f: expected_result = json.load(f) return target_dict, target, expected_result # type: ignore # PYTEST FUNCTIONS SCENES = [ 'ObjectAnimation', 'ArmatureAnimation', 'issue71' # 'BattleMech' ] def setup_module(module): '''Runs before tests''' tmp_path = "./.tmp/test_animation_export" if os.path.exists(tmp_path): shutil.rmtree(tmp_path) @pytest.fixture(params=SCENES) def scene(request): return request.param # TESTS def test_animation_export(scene): result_dict, result_path, expected_result = make_comparison_files( "./.tmp/test_animation_export", scene, './tests/data/tests_project.blend' ) assert result_dict == expected_result <file_sep># Tips and tricks This is a list of tips and tricks related to Mcbled. Some of them are related to the addon and some of them explain some basics of using Blender which can be useful for working with Mcblend. ## Matching framerate By default blender uses 24FPS framerate. Minecraft uses seconds to define the timestamps of keyframes in animation. It's good to change the framerate setting into something that divides 1 second period into something nice - for example (25FPS or 20FPS). 1/24 is 0.0416666 but 1/25 is 0.04 which looks way better in the animation file. You can find the framerate setting in `Output Properties -> Frame Rate`. ![](../img/framerate_setting.png) ## World unit scale By default 1 meter in your model is equal to one block in Minecraft. One meter in Minecraft model is 16 pixels on the texture of the model. You might want to measure the size in pixels instead of meters. You can change the setting that you can find under `Scene properties -> Unit scale`. Changing the value of this property to 16 will cause that one meter of your Blender model will be converted into 1 pixel in Minecraft. ![](../img/unit_scale_setting.png) ## Creating materials Currently the addon doesn't create materials for your model during UV mapping and generating textures. When you create your UV map and texture template, it is not visible on your model. If you want to make the texture visible you have to create new material in shader editor and assign it to every object in your model. 1. Generate texture template using the [Set bedrock UV panel](../gui). 2. Go to the shading tab and create new material. Based on the image below: ![](../img/simple_shader.png) 3. Enable the transparency in material properties. ![](../img/transparency_setting.png) 4. Assign the material to other objects in your model. Select all of your objects and make sure that the object that uses your material is active. Go to `Material Properties` right click on the material and select "Copy to selected". <file_sep># Conversion rules There are no directly equivalent objects in blender models to Minecraft models. Mcblend uses a set of rules to decide which parts of the model should be turned into bone, locator or cube. !!! note The best way to learn what kinds of object are converted to what is trial and error. You can use the set of rules below if you notice something unexpected. 1. An empty or mesh with custom [_export as bone_](../basic_operators/#toggle-export-as-bones) property always creates a a bone or a bone with cube, respectively. 2. A Blender bone is converted into a Minecraft bone unless it has no children and no parents. In this case it isn't converted at all. This behavior is to prevent the exporting of inverse kinematics bones. 3. An empty becomes a bone unless it has a parent but no children. In this case it creates a locator. 4. Mesh without parent becomes a bone with a cube inside. Mesh with a parent becomes a cube. **The conversion rules can also be represented with this table:** ||Export as bone|no parent, no children| parent, no children|no parent,children|parent and children| |---|---|---|---|---|---| |__Bone__ |N/A|NONE|bone|bone|bone| |__Empty__|bone|bone|locator|bone|bone| |__Mesh__ |bone & cube|bone & cube|cube| bone & cube|cube| <file_sep>Blender scripts used for testing. Run in blender with: ``` blender -b --python <script-path> -- <script-args> ``` <file_sep># Texture customization (advanced) ![](../img/customized_uv_groups.png) You can customize the appearance of a UV-group by applying masks to its faces in new custom panel in [Scene Properties](../gui_changes/#scene-properties). Masks are filters that are applied to the texture when it is generated. Each UV-group has 6 faces. You can switch between faces to edit with "Side:" dropdown menu. The image below shows how the names of the sides (side1-6) are correlated to their placement on the texture. ![](../img/faces.png) New masks are added with "Add mask" dropdown menu. There are 8 different types of masks. !!! note All masks have an eye icon in the upper right corner that can be used to temporarily disable / enable the mask. ## Color Palette Mask ![](../img/color_palette_mask.png) This mask takes the grayscale image as an input and maps its brightness values to a color image with a palette defined as a list of colors. Properties: - Colors - list of colors in the palette - Interpolate - whether there should be a smooth transition between the colors on the palette. - Normalize - normalizes the input values so that the whole color palette is used. !!! note When input image is not grayscale than it gets converted to grayscale before applying the mask. !!! note Color palette mask is the only mask which can't be used inside the Mix Mask. If you put this mask into mix mask it will have no effect. ## Gradient Mask ![](../img/gradient_mask.png) The Gradient mask creates a grayscale gradient with stripes of varying darkness and width. The direction in which the stripes are drawn is defined with two points on the texture. The grayscale image is than multiplied by the input image. Properties: - Point A - the starting point of drawing gradient stripes. - Point B - the end point of drawing gradient stripes. - Relative boundaries - whether points A and B should be passed as absolute values (number of pixels from the lower left corner of the texture) or as a fraction of the texture size (0.0 lower left corner, 1.0 upper right corner). The absolute values can be negative, meaning they represent the number of pixels from the top right corner (starting at -1). - Stripes - the list of stripes in the gradient their colors (strengths) and their widths. The widths define their placement on a line between points A and B so in most cases the width of the first stripe is 0, which means that this stripe should be drawn at the Point A. - Exponent - the filter image is raised to the power of this value before it is multiplied by the image. ## Ellipse Mask ![](../img/ellipse_mask.png) Ellipse mask creates a grayscale image of ellipse that fits between Point A and Point B. The grayscale image is than multiplied by the input image. Properties: - Point A and B - boundaries of the ellipse. - Relative boundaries - whether points A and B should be passed as absolute values (number of pixels from the lower left corner of the texture) or as a fraction of the texture size (0.0 lower left corner, 1.0 upper right corner). The absolute values can be negative, meaning they represent the number of pixels from the top right corner (starting at -1). - Exponent - the filter image is raised to the power of this value before it is multiplied by the image. - Strength - the min and max values of brightness of created filter image. - Hard edge - whether the ellipse should have hard edges or the brightness of the inner part of the ellipse should be smoothly interpolated to the edges of the image. ## Rectangle Mask ![](../img/rectangle_mask.png) Rectangle mask creates a grayscale image of rectangle between Point A and Point B. The grayscale image is than multiplied by the input image. Properties: - Point A and B - opposite corners of the rectangle. - Relative boundaries - whether points A and B should be passed as absolute values (number of pixels from the lower left corner of the texture) or as a fraction of the texture size (0.0 lower left corner, 1.0 upper right corner). The absolute values can be negative, meaning they represent the number of pixels from the top right corner (starting at -1). - Exponent - the filter image is raised to the power of this value before it is multiplied by the image. - Strength - the min and max values of brightness of created filter image. - Hard edge - whether the rectangle should have hard edges or the brightness of the inner part of the rectangle should be smoothly interpolated to the edges of the image. ## Stripes Mask ![](../img/stripes_mask.png) Rectangle mask creates a grayscale image with repeating stripes of certain width and brightness. The grayscale image is than multiplied by the input image. Properties: - Relative boundaries - whether the width of the stripes is expressed as a fraction of the image width / height. - Stripes - the list of the stripes, their width and their brightness. - Horizontal - whether the stripes should be vertical or horizontal. ## Random Mask ![](../img/random_mask.png) Random mask creates a grayscale image with randomly bright pixels. The grayscale image is than multiplied by the input image. - Exponent - the filter image is raised to the power of this value before it is multiplied by the image. - Strength - the min and max brightness values of the pixels on the filter image. - Use seed - allows you to set the seed for the color randomization. ## Color Mask ![](../img/color_mask.png) Color mask multiplies the input mask by a color. ## Mix Mask ![](../img/mix_mask.png) Mix mask lets you mix multiple masks in different way than just default multiplication. Properties: - Exponent - the filter image is raised to the power of this value before it is multiplied by the image. - Strength - the min and max values of the brightness of returned filter image. The filter image brightness values are mapped to fit on the scale between values defined with "strength" - Mix mode - how to mix filter images produced by other masks. There are 4 options: min, max, mean and median. - Number of children - Number of mixed masks. !!! note Mix mask ignores the color palette masks because color palette mask does not create a filter image (it just alters the image from the input).<file_sep>''' Import model from source_path and export it to target_path. This script is used for testing the import and export operators. ''' import sys import bpy # Collect arguments after "--" argv = sys.argv argv = argv[argv.index("--") + 1:] def main(source_path: str, target_path: str): # Remove all starting objects bpy.ops.object.select_all(action='SELECT') bpy.ops.object.delete(use_global=False) # Load model from source file if "use_empties" in argv: bpy.ops.object.nusiq_mcblend_import_operator( filepath=source_path, replace_bones_with_empties=True) else: bpy.ops.object.nusiq_mcblend_import_operator( filepath=source_path, replace_bones_with_empties=False) # Save model to target file bpy.ops.object.select_all(action='SELECT') bpy.ops.object.nusiq_mcblend_export_operator(filepath=target_path) if __name__ == "__main__": main(argv[0], argv[1]) <file_sep>''' Installs mcblend from a zip path. This script is used in github actions. ''' import sys import bpy # Collect arguments after "--" argv = sys.argv argv = argv[argv.index("--") + 1:] def main(zip_path): ''' - zip_path - absolute path to zip file with mcblend. ''' print(f'Installing the addon from {zip_path}') bpy.ops.preferences.addon_install(filepath=zip_path) print(f'Enabling module "mcblend"') bpy.ops.preferences.addon_enable(module="mcblend") bpy.ops.wm.save_userpref() if __name__ == "__main__": main(argv[0]) <file_sep>''' Functions and objects shared between other modules of Mcblend. ''' from __future__ import annotations import math from enum import Enum from typing import ( Dict, NamedTuple, List, Optional, Tuple, Any, Iterable, Sequence) import numpy as np import bpy_types import bpy import mathutils from .texture_generator import Mask, ColorMask, get_masks_from_side from .exception import NameConflictException, NoCubePolygonsException MINECRAFT_SCALE_FACTOR = 16 '''The scale convertion from blender to minecraft (16 units == 1 meter).''' class MCObjType(Enum): '''The types of Minecraft objects created from blender objects.''' CUBE = 'CUBE' BONE = 'BONE' BOTH = 'BOTH' LOCATOR = 'LOCATOR' class MeshType(Enum): ''' Type of the exported mesh. Changes the way of representation of this object in exported model file. ''' CUBE = 'Cube' POLY_MESH = 'Poly Mesh' class ObjectId(NamedTuple): ''' Object that represents Unique ID of blender object (bone, empty or mesh). For meshes and empties: - :code:`bone_name` is just an empty string. - :code:`name` is the name of the object. For bones: - :code:`bone_name` is the name of the bone. - :code:`name` is the name of the armature that owns the bone. ''' name: str bone_name: str class McblendObject: ''' A class that wraps Blender objects (meshes, empties and bones) and provides access to various properties used by Mcblend. :param thisobj_id: The :class:`ObjectId` that identifies this object. :param thisobj: Blender object wrapped inside this object. :param parentobj_id: The :class:`ObjectId` of the parent of this object. :param children_ids: The list of :class:`ObjectId`s of the children of this object. :param mctype: The :class:`MCObjType` of of this object. :param group: The :class:`McblendObjectGroup` that stores all of the :class:`McblendObject`s being processed with this object. ''' def __init__( self, thisobj_id: ObjectId, thisobj: bpy.types.Object, parentobj_id: Optional[ObjectId], children_ids: List[ObjectId], mctype: MCObjType, group: McblendObjectGroup): self.thisobj_id = thisobj_id self.thisobj: bpy.types.Object = thisobj self.parentobj_id: Optional[ObjectId] = parentobj_id self.children_ids: List[ObjectId] = children_ids self.mctype: MCObjType = mctype self.group = group @property def parent(self) -> Optional[McblendObject]: '''Parent of this object.''' try: if self.parentobj_id is None: return None return self.group[self.parentobj_id] except KeyError: return None @property def children(self) -> Tuple[McblendObject]: ''' Children of this object from the :class:`McblendObjectGroup` of this object. ''' children: List[McblendObject] = [] for child_id in self.children_ids: if child_id in self.group: children.append(self.group[child_id]) return tuple(children) # type: ignore @property def inflate(self) -> float: '''Inflate value of this object''' return self.thisobj.nusiq_mcblend_object_properties.inflate @inflate.setter def inflate(self, inflate: float): self.thisobj.nusiq_mcblend_object_properties.inflate = inflate @property def mesh_type(self) -> MeshType: '''Mesh type of this object''' return MeshType(self.thisobj.nusiq_mcblend_object_properties.mesh_type) @mesh_type.setter def mesh_type(self, mesh_type: MeshType): self.thisobj.nusiq_mcblend_object_properties.mesh_type = mesh_type @property def mirror(self) -> bool: '''Whether the objects UV is mirrored.''' return self.thisobj.nusiq_mcblend_object_properties.mirror @mirror.setter def mirror(self, mirror: bool): self.thisobj.nusiq_mcblend_object_properties.mirror = mirror @property def is_bone(self) -> bool: '''Whether the object should be exported as bone to Minecraft model.''' return self.thisobj.nusiq_mcblend_object_properties.is_bone @is_bone.setter def is_bone(self, is_bone: bool): self.thisobj.nusiq_mcblend_object_properties.is_bone = is_bone @property def uv_group(self) -> str: '''The name of the UV-group of this object.''' return self.thisobj.nusiq_mcblend_object_properties.uv_group @uv_group.setter def uv_group(self, uv_group: str): self.thisobj.nusiq_mcblend_object_properties.uv_group = uv_group @property def obj_data(self) -> Any: ''' The "data" property of the blender object wrapped inside this object. ''' return self.thisobj.data @property def obj_name(self) -> str: '''The name of this object used for exporting to Minecraft model.''' if self.thisobj.type == 'ARMATURE': return self.thisobj.pose.bones[ self.thisobj_id.bone_name ].name return self.thisobj.name.split('.')[0] @property def obj_type(self) -> str: ''' The type of the blender object wrapped inside this object (ARMATURE, MESH or EMPTY). ''' return self.thisobj.type @property def obj_bound_box(self) -> Any: '''The bound_box of the blender object wrapped inside this object.''' return self.thisobj.bound_box @property def obj_matrix_world(self) -> mathutils.Matrix: ''' The copy of the translation matrix (matrix_world) of the blender wrapped inside this object. ''' if self.thisobj.type == 'ARMATURE': return self.thisobj.matrix_world.copy() @ self.thisobj.pose.bones[ self.thisobj_id.bone_name ].matrix.copy() return self.thisobj.matrix_world.copy() @property def mcube_size(self) -> np.ndarray: ''' The cube size in Minecraft format based on the bounding box of the blender object wrapped inside this object. ''' # 0. ---; 1. --+; 2. -++; 3. -+-; 4. +--; 5. +-+; 6. +++; 7. ++- bound_box = self.obj_bound_box return (np.array(bound_box[6]) - np.array(bound_box[0]))[[0, 2, 1]] @property def mccube_position(self) -> np.ndarray: ''' The cube position in Minecraft format based on the bounding box of the blender object wrapped inside this object. ''' return np.array(self.obj_bound_box[0])[[0, 2, 1]] @property def mcpivot(self) -> np.ndarray: ''' The pivot point of Minecraft object exported using this object. ''' def local_crds( parent: McblendObject, child: McblendObject ) -> mathutils.Vector: '''Local coordinates of child matrix inside parent matrix''' # Applying normalize() function to matrix world of parent and child # suppose to fix some errors with scaling but tests doesn't show any # difference. # It does fix the issue #62 so PLEASE don't change it again! return child.get_local_matrix( parent, normalize=True).to_translation() def _get_mcpivot(objprop: McblendObject) -> mathutils.Vector: if objprop.parent is not None: result = local_crds(objprop.parent, objprop) result += _get_mcpivot(objprop.parent) else: result = objprop.obj_matrix_world.to_translation() return result return np.array(_get_mcpivot(self).xzy) def get_local_matrix( self, other: Optional[McblendObject] = None, normalize: bool = False ) -> mathutils.Matrix: ''' Returns translation matrix of this object optionally in translation space of the other :class:`McblendObject`. :param other: Optional - the other :class:`McblendObject` :param normalize: Whether to normalizes parent and child matrixes before calculating the relative matrix. This solves problems related to different scales of parent and child transformations (see github issue #62 and #71) :returns: translation matrix of this object. ''' if other is not None: p_matrix = other.obj_matrix_world else: p_matrix = ( # pylint: disable=no-value-for-parameter mathutils.Matrix() ) c_matrix = self.obj_matrix_world if normalize: p_matrix.normalize() c_matrix.normalize() return p_matrix.inverted() @ c_matrix def get_mcrotation( self, other: Optional[McblendObject] = None ) -> np.ndarray: ''' Returns the Minecraft rotation of this object optionally in relation to the other :class:`McblendObject`. # Arguments: :param other: Optional - the the other :class:`McblendObject`. :returns: numpy array with the rotation of this object in Minecraft format. ''' def local_rotation( child_matrix: mathutils.Matrix, parent_matrix: mathutils.Matrix ) -> mathutils.Euler: ''' Returns Euler rotation of a child matrix in relation to parent matrix ''' child_matrix = child_matrix.normalized() parent_matrix = parent_matrix.normalized() return ( parent_matrix.inverted() @ child_matrix ).to_quaternion().to_euler('XZY') if other is not None: result_euler = local_rotation( self.obj_matrix_world, other.obj_matrix_world ) else: result_euler = self.obj_matrix_world.to_euler('XZY') result: np.ndarray = np.array(result_euler)[[0, 2, 1]] result = result * np.array([1, -1, 1]) result = result * 180/math.pi # math.degrees() for array return result def cube_polygons(self) -> CubePolygons: ''' Returns the :class:`CubePolygons` of this object (always new copy of the object). ''' return CubePolygons.build(self.thisobj, self.mirror) @property def side1_uv_masks(self) -> Sequence[Mask]: ''' Sequence of masks affecting the texture of side 1 of the cube of this object. ''' if self.uv_group == '': return [ColorMask((0, 1, 0))] uv_group = bpy.context.scene.nusiq_mcblend_uv_groups[self.uv_group] return get_masks_from_side(uv_group.side1) @property def side2_uv_masks(self) -> Sequence[Mask]: ''' Sequence of masks affecting the texture of side 2 of the cube of this object. ''' if self.uv_group == '': return [ColorMask((1, 0, 1))] uv_group = bpy.context.scene.nusiq_mcblend_uv_groups[self.uv_group] return get_masks_from_side(uv_group.side2) @property def side3_uv_masks(self) -> Sequence[Mask]: ''' Sequence of masks affecting the texture of side 3 of the cube of this object. ''' if self.uv_group == '': return [ColorMask((1, 0, 0))] uv_group = bpy.context.scene.nusiq_mcblend_uv_groups[self.uv_group] return get_masks_from_side(uv_group.side3) @property def side4_uv_masks(self) -> Sequence[Mask]: ''' Sequence of masks affecting the texture of side 4 of the cube of this object. ''' if self.uv_group == '': return [ColorMask((0, 1, 1))] uv_group = bpy.context.scene.nusiq_mcblend_uv_groups[self.uv_group] return get_masks_from_side(uv_group.side4) @property def side5_uv_masks(self) -> Sequence[Mask]: ''' Sequence of masks affecting the texture of side 5 of the cube of this object. ''' if self.uv_group == '': return [ColorMask((0, 0, 1))] uv_group = bpy.context.scene.nusiq_mcblend_uv_groups[self.uv_group] return get_masks_from_side(uv_group.side5) @property def side6_uv_masks(self) -> Sequence[Mask]: ''' Sequence of masks affecting the texture of side 6 of the cube of this object. ''' if self.uv_group == '': return [ColorMask((1, 1, 0))] uv_group = bpy.context.scene.nusiq_mcblend_uv_groups[self.uv_group] masks = get_masks_from_side(uv_group.side6) return masks # key (side, is_mirrored) : value (names of the vertices) # Used in CubePolygons constructor _MC_MAPPING_UV_ORDERS = { ('east', False) :('-+-', '---', '--+', '-++'), ('north', False) :('---', '+--', '+-+', '--+'), ('west', False) :('+--', '++-', '+++', '+-+'), ('south', False) :('++-', '-+-', '-++', '+++'), ('up', False) :('--+', '+-+', '+++', '-++'), ('down', False) :('---', '+--', '++-', '-+-'), ('west', True) :('++-', '+--', '+-+', '+++'), ('north', True) :('+--', '---', '--+', '+-+'), ('east', True) :('---', '-+-', '-++', '--+'), ('south', True) :('-+-', '++-', '+++', '-++'), ('up', True) :('+-+', '--+', '-++', '+++'), ('down', True) :('+--', '---', '-+-', '++-'), } class CubePolygons(NamedTuple): ''' Polygons of blender cube object that correspond to Minecraft cube faces. ''' east: CubePolygon # Cube Right north: CubePolygon # Cube Front west: CubePolygon # Cube Left south: CubePolygon # Cube Back up: CubePolygon # Cube Up down: CubePolygon # Cube Down @staticmethod def build(cube: bpy.types.Object, mirror: bool) -> CubePolygons: ''' Creates :class:`CubePolygons` object for given blender object cube. :param cube: blender cube mesh. :param mirror: Whether the order of vertices in returned :class:`CubePolygons` should match Minecraft mirrored mapping format or not. ''' def get_order( name: str, mirror: bool, bound_box_vertices: Tuple[str, str, str, str] ) -> Tuple[int, int, int, int]: '''Gets the order of vertices for given cube polygon''' mc_mapping_uv_order = _MC_MAPPING_UV_ORDERS[(name, mirror)] result = [] for vertex_name in mc_mapping_uv_order: # Throws ValueError index = bound_box_vertices.index(vertex_name) result.append(index) return tuple(result) # type: ignore # 1. Check if object has 6 quadrilateral faces if len(cube.data.polygons) != 6: raise NoCubePolygonsException( f"Object {cube.name.split('.')} is not a cube. Number of faces != 6." ) for polygon in cube.data.polygons: if len(polygon.vertices) != 4: raise NoCubePolygonsException( f"Object {cube.name.split('.')} is not a cube. Not all faces are " "quadrilateral." ) # Blender crds (bounding box): # 0. ---; 1. --+; 2. -++; 3. -+-; 4. +--; 5. +-+; 6. +++; 7. ++- mmm, mmp, mpp, mpm, pmm, pmp, ppp, ppm = tuple(cube.bound_box) # MC: 0+0 top; -00 right; 00- front; # Blender: 00+ top; -00 right; 0-0 front bb_crds = { "---": np.array(mmm), "--+": np.array(mmp), "-++": np.array(mpp), "-+-": np.array(mpm), "+--": np.array(pmm), "+-+": np.array(pmp), "+++": np.array(ppp), "++-": np.array(ppm) } north: List[str] = ['--+', '+-+', '+--', '---'] # Cube Front east: List[str] = ['---', '-+-', '-++', '--+'] # Cube Right south: List[str] = ['-+-', '++-', '+++', '-++'] # Cube Back west: List[str] = ['+--', '++-', '+++', '+-+'] # Cube Left up: List[str] = ['--+', '+-+', '+++', '-++'] # Cube Up down: List[str] = ['---', '+--', '++-', '-+-'] # Cube Down cube_polygon_builder = {} # Input for CubePolygons constructor for polygon in cube.data.polygons: bbv: List[str] = [] # bound box vertices for vertex_id in polygon.vertices: vertex_crds = np.array( cube.data.vertices[vertex_id].co ) # Find the closest point of bounding box (key from bb_crds) shortest_distance: Optional[float] = None closest_bb_point = '---' for k, v in bb_crds.items(): curr_distance = np.linalg.norm(v-vertex_crds) if shortest_distance is None: shortest_distance = curr_distance closest_bb_point = k elif curr_distance < shortest_distance: shortest_distance = curr_distance closest_bb_point = k bbv.append(closest_bb_point) # Im not sure which order of vertices is correct so I just check # original and reversed rbbv = list(reversed(bbv)) if cyclic_equiv(north, bbv) or cyclic_equiv(north, rbbv): t_bbv: Tuple[str, str, str, str] = tuple(bbv) # type: ignore cube_polygon_builder['north'] = CubePolygon( polygon, t_bbv, get_order('north', mirror, t_bbv) ) elif cyclic_equiv(east, bbv) or cyclic_equiv(east, rbbv): t_bbv: Tuple[str, str, str, str] = tuple(bbv) # type: ignore cube_polygon_builder['east'] = CubePolygon( polygon, t_bbv, get_order('east', mirror, t_bbv) ) elif cyclic_equiv(south, bbv) or cyclic_equiv(south, rbbv): t_bbv: Tuple[str, str, str, str] = tuple(bbv) # type: ignore cube_polygon_builder['south'] = CubePolygon( polygon, t_bbv, get_order('south', mirror, t_bbv) ) elif cyclic_equiv(west, bbv) or cyclic_equiv(west, rbbv): t_bbv: Tuple[str, str, str, str] = tuple(bbv) # type: ignore cube_polygon_builder['west'] = CubePolygon( polygon, t_bbv, get_order('west', mirror, t_bbv) ) elif cyclic_equiv(up, bbv) or cyclic_equiv(up, rbbv): t_bbv: Tuple[str, str, str, str] = tuple(bbv) # type: ignore cube_polygon_builder['up'] = CubePolygon( polygon, t_bbv, get_order('up', mirror, t_bbv) ) elif cyclic_equiv(down, bbv) or cyclic_equiv(down, rbbv): t_bbv: Tuple[str, str, str, str] = tuple(bbv) # type: ignore cube_polygon_builder['down'] = CubePolygon( polygon, t_bbv, get_order('down', mirror, t_bbv) ) try: return CubePolygons(**cube_polygon_builder) except TypeError as e: # Missing argument raise NoCubePolygonsException( f"Object {cube.name.split('.')} is not filling a bounding box " "good enough to approximate its shape to a cube." ) from e class CubePolygon(NamedTuple): ''' Single face in :class:`CubePolygons`. :param side: :class:`bpy_types.MeshPolygon` object from blender mesh. :param orientation: The names of the vertices of the Mesh polygon. Vertices are named with 3-character-string (using only '+' and '-'). Where each character symbolizes whether the vertex is on increasing (+) or decreasing (-) side of the corresponding axis (XYZ) in local space of the object. :param order: Stores the order (values from 0 to 4) in which the loops of the face should be rearranged to this order 0 left bottom corner, 1 right bottom corner, 2 right top corner, 3 left top corner. ''' side: bpy_types.MeshPolygon orientation: Tuple[str, str, str, str] order: Tuple[int, int, int, int] class McblendObjectGroup: ''' A group of :class:`McblendObject`s often used as a main datasource for operations executed by Mcblend. The objects can be accessed with ObjectId with __getitem__ method like from a dict. :param context: the context of runing an operator. ''' def __init__(self, context: bpy_types.Context): self.data: Dict[ObjectId, McblendObject] = {} '''the content of the group.''' self._load_objects(context) self._check_name_conflicts() def __len__(self): return len(self.data) def __getitem__(self, key: ObjectId) -> McblendObject: return self.data[key] def __contains__(self, item): return item in self.data def __iter__(self): return self.data.__iter__() def values(self): '''Lists values of this group (the :class:`McblendObject`s).''' return self.data.values() def keys(self): '''Lists valid keys to use in this object.''' return self.data.keys() def items(self): '''Iterator going through pairs of keys and values of this group.''' return self.data.items() def _load_objects(self, context: bpy_types.Context): ''' Loops through selected objects and and creates :class:`McblendObjects` for this group. Used by constructor. :param context: the context of running an operator. ''' for obj_id, obj in self._loop_objects(context.selected_objects): curr_obj_mc_type: MCObjType curr_obj_mc_parent: Optional[ObjectId] = None if obj.type == 'EMPTY': curr_obj_mc_type = MCObjType.BONE if (obj.parent is not None and len(obj.children) == 0 and not obj.nusiq_mcblend_object_properties.is_bone): curr_obj_mc_type = MCObjType.LOCATOR if obj.parent is not None: curr_obj_mc_parent = self._get_parent_mc_bone(obj) elif obj.type == 'MESH': if (obj.parent is None or obj.nusiq_mcblend_object_properties.is_bone): curr_obj_mc_type = MCObjType.BOTH else: curr_obj_mc_type = MCObjType.CUBE # If parent is none than it will return none curr_obj_mc_parent = self._get_parent_mc_bone(obj) elif obj.type == 'ARMATURE': bone = obj.data.bones[obj_id.bone_name] if ( bone.parent is None and len(bone.children) == 0 and len([ # Children of a bone which are not other bones. c for c in obj.children if c.parent_bone == bone.name ]) == 0 ): continue # Skip empty bones curr_obj_mc_type = MCObjType.BONE if bone.parent is not None: curr_obj_mc_parent = ObjectId(obj.name, bone.parent.name) else: # Handle only empty, meshes and armatures continue self.data[obj_id] = McblendObject( obj_id, obj, curr_obj_mc_parent, [], curr_obj_mc_type, self ) # Fill the children property. Must be in separate loop to reverse the # effect of _get_parent_mc_bone() function. for objid, objprop in self.data.items(): if objprop.parentobj_id is not None and objprop.parentobj_id in self.data: self.data[objprop.parentobj_id].children_ids.append(objid) def _check_name_conflicts(self): ''' Looks through the dictionary of :class:`McblendObject`s of this object and tries to find the names conflicts in the names of the objects. Raises NameConflictException if name conflicts in some bones are detected. Used in constructor. ''' names: List[str] = [] for objprop in self.values(): if objprop.mctype not in [MCObjType.BONE, MCObjType.BOTH]: continue # Only bone names conflicts count if objprop.obj_name in names: raise NameConflictException( f'Name conflict "{objprop.obj_name}". Please rename theobject."' ) names.append(objprop.obj_name) @staticmethod def _loop_objects(objects: List) -> Iterable[Tuple[ObjectId, Any]]: ''' Loops over the empties, meshes and armature objects from the list and yields them and their ids. If object is an armature than it also loops over every bone and yields the pair of armature and the id of the bone. Used in the constructor. :param objects: The list of blender objects. :returns: Iterable that goes through objects and bones. ''' for obj in objects: if obj.type in ['MESH', 'EMPTY']: yield ObjectId(obj.name, ''), obj elif obj.type == 'ARMATURE': for bone in obj.data.bones: yield ObjectId(obj.name, bone.name), obj @staticmethod def _get_parent_mc_bone(obj: bpy.types.Object) -> Optional[ObjectId]: ''' Goes up through the ancestors of an :class:`bpy.types.Object` and tries to find the object that represents its parent bone in Minecraft model. Used in constructor. :param obj: Blender object which will be a bone in Minecraft model. :returns: Id of the object that represents a parent bone in Minecraft model. ''' obj_id = None while obj.parent is not None: if obj.parent_type == 'BONE': return ObjectId(obj.parent.name, obj.parent_bone) if obj.parent_type == 'OBJECT': obj = obj.parent obj_id = ObjectId(obj.name, '') if (obj.type == 'EMPTY' or obj.nusiq_mcblend_object_properties.is_bone): return obj_id else: raise Exception(f'Unsupported parent type {obj.parent_type}') return obj_id def cyclic_equiv(u: List, v: List) -> bool: ''' Compare cyclic equivalency of two lists. Source: https://stackoverflow.com/questions/31000591/ ''' n, i, j = len(u), 0, 0 if n != len(v): return False while i < n and j < n: k = 1 while k <= n and u[(i + k) % n] == v[(j + k) % n]: k += 1 if k > n: return True if u[(i + k) % n] > v[(j + k) % n]: i += k else: j += k return False def inflate_objects( context: bpy_types.Context, objects: List[bpy.types.Object], inflate: float, mode: str) -> int: ''' Adds inflate property to objects and changes their dimensions. Returns the number of edited objects. :param context: Context of running the operator. :param objects: List of objects to inflate. :param inflate: The inflation value. :param mode: Either "RELATIVE" or "ABSOLUTE". If "RELATIVE" than the value before applying the operator is taken as a base (0 means that no changes should be applied). If "ABSOLUTE" than the inflate value passed by the user is passed directly to the inflate value of Minecraft model. :returns: number of edited objects ''' if mode == 'RELATIVE': relative = True elif mode == 'ABSOLUTE': relative = False else: raise ValueError(f'Unknown mode for set_inflate operator: {mode}') counter = 0 for obj in objects: if obj.type == 'MESH': if obj.nusiq_mcblend_object_properties.inflate != 0.0: if relative: effective_inflate = ( obj.nusiq_mcblend_object_properties.inflate + inflate) else: effective_inflate = inflate delta_inflate = ( effective_inflate - obj.nusiq_mcblend_object_properties.inflate) obj.nusiq_mcblend_object_properties.inflate = effective_inflate else: delta_inflate = inflate obj.nusiq_mcblend_object_properties.inflate = inflate # Clear parent from children for a moment children = obj.children for child in children: old_matrix = child.matrix_world.copy() child.parent = None child.matrix_world = old_matrix dimensions = np.array(obj.dimensions) # Set new dimensions dimensions = ( dimensions + (2*delta_inflate/MINECRAFT_SCALE_FACTOR) ) obj.dimensions = dimensions context.view_layer.update() # Add children back and set their previous transformations for child in children: child.parent = obj child.matrix_parent_inverse = obj.matrix_world.inverted() counter += 1 return counter <file_sep>''' Functions related to exporting animations. ''' from __future__ import annotations from typing import NamedTuple, Dict, Optional, List, Tuple, Set import math from dataclasses import dataclass, field from itertools import cycle, tee, islice import bpy import bpy_types import numpy as np from .json_tools import get_vect_json from .common import ( MINECRAFT_SCALE_FACTOR, MCObjType, McblendObjectGroup ) def _pick_closest_rotation( base: np.ndarray, close_to: np.ndarray, original_rotation: Optional[np.ndarray] = None ) -> np.ndarray: ''' Takes two arrays with euler rotations in degrees. Looks for rotations that result in same orientation ad the base rotation. Picks the vector which is the closest to the :code:`close_to` using euclidean distance. *The :code:`original_rotation` is added specifically to fix some issues with bones rotated before the animation. Issue #25 on Github describes the problem in detail. :base: np.ndarray: the base rotation. Function is looking for different representations of this orientation. :param close_to: target rotation. Function returns the result as close as possible to this vector. :param original_rotation: optional - the original rotation of the object before the start of the animation. :returns: another euler angle that represents the same rotation as the base rotation. ''' if original_rotation is None: original_rotation = np.array([0.0, 0.0, 0.0]) def _pick_closet_location( base: np.ndarray, close_to: np.ndarray ) -> Tuple[float, np.ndarray]: choice: np.ndarray = base distance = np.linalg.norm(choice - close_to) for i in range(3): # Adds removes 360 to all 3 axis (picks the best) arr = np.zeros(3) arr[i] = 360 while choice[i] < close_to[i]: new_choice = choice + arr new_distance = np.linalg.norm(new_choice - close_to) if new_distance > distance: break distance, choice = new_distance, new_choice while choice[i] > close_to[i]: new_choice = choice - arr new_distance = np.linalg.norm(new_choice - close_to) if new_distance > distance: break distance, choice = new_distance, new_choice return distance, choice distance1, choice1 = _pick_closet_location(base, close_to) distance2, choice2 = _pick_closet_location( # Counterintuitive but works ( base + np.array([180, 180 + original_rotation[1] * 2, 180])) * np.array([1, -1, 1] ), close_to ) if distance2 < distance1: return choice2 return choice1 def _get_keyframes(context: bpy_types.Context) -> List[int]: ''' Lists keyframe numbers of the animation from keyframes of NLA tracks and actions of selected objects. :param context: the context of running the operator. :returns: the list of the keyframes for the animation. ''' def get_action_keyframes(action: bpy.types.Action) -> Set[int]: '''Gets set of keyframes from an action.''' if action.fcurves is None: return set() result: Set[int] = set() for fcurve in action.fcurves: if fcurve.keyframe_points is None: continue for keyframe_point in fcurve.keyframe_points: result.add(round(keyframe_point.co[0])) return result keyframes: Set[int] = set() for obj in context.selected_objects: if obj.animation_data is None: continue if obj.animation_data.action is not None: keyframes.update(get_action_keyframes(obj.animation_data.action)) if obj.animation_data.nla_tracks is None: continue for nla_track in obj.animation_data.nla_tracks: if nla_track.mute: continue for strip in nla_track.strips: if strip.type != 'CLIP': continue strip_action_keyframes = get_action_keyframes(strip.action) # Scale/strip the action data with the strip # transformations offset = strip.frame_start limit_down = strip.action_frame_start limit_up = strip.action_frame_end scale = strip.scale cycle_length = limit_up - limit_down scaled_cycle_length = cycle_length * scale repeat = strip.repeat transformed_keyframes: Set[int] = set() for keyframe in sorted(strip_action_keyframes): if keyframe < limit_down or keyframe > limit_up: continue transformed_keyframe_base = keyframe * scale for i in range(math.ceil(repeat)): transformed_keyframe = ( (i * scaled_cycle_length) + transformed_keyframe_base ) if transformed_keyframe/scaled_cycle_length > repeat: # Can happen when we've got for example 4th # repeat but we only need 3.5 break transformed_keyframe += offset transformed_keyframes.add( min(round(transformed_keyframe), strip.frame_end)) keyframes.update(transformed_keyframes) return sorted(keyframes) # Sorted list of ints class PoseBone(NamedTuple): '''Properties of a pose of single bone.''' name: str location: np.array rotation: np.array scale: np.array parent_name: Optional[str] = None def relative(self, original: PoseBone) -> PoseBone: ''' Returns :class:`PoseBone` object with properties of the bone relative to the original pose. :param original: the original pose. ''' return PoseBone( name=self.name, scale=self.scale / original.scale, location=self.location - original.location, rotation=self.rotation - original.rotation, parent_name=original.parent_name ) class Pose: '''A pose in a frame of animation.''' def __init__(self): self.pose_bones: Dict[str, PoseBone] = {} '''dict of bones in a pose keyed by the name of the bones''' def load_poses( self, object_properties: McblendObjectGroup ): ''' Builds :class:`Pose` object from object properties. :param object_properties: group of mcblend objects. ''' for objprop in object_properties.values(): if objprop.mctype in [MCObjType.BONE, MCObjType.BOTH]: # Scale local_matrix = objprop.get_local_matrix( objprop.parent, normalize=False) scale = np.array(local_matrix.to_scale())[[0, 2, 1]] # Location location = np.array(local_matrix.to_translation()) location = location[[0, 2, 1]] * MINECRAFT_SCALE_FACTOR # Rotation rotation = objprop.get_mcrotation(objprop.parent) if objprop.parent is not None: parent_name=objprop.parent.obj_name else: parent_name=None self.pose_bones[objprop.obj_name] = PoseBone( name=objprop.obj_name, location=location, scale=scale, rotation=rotation, parent_name=parent_name) @dataclass class AnimationExport: ''' Object that represents animation during export. :param name: Name of the animation. :param length: Length of animation in seconds. :param loop_animation: Whether the Minecraft animation should be exported with loop property set to true. :param anim_time_update: Value of anim_time_update property of Minecraft animation. :param fps: The FPS setting of the scene. :param effect_events: The events of the animation from OBJECT_NusiqMcblendEventProperties. :param original_pose: Optional - the base pose of the animated object. The pose is empty by default after object creation until it's loaded. :param single_frame: Optional - whether the animation should be exported as a single frame pose (True) or as whole animation. False by default. :param poses: Optional - poses of the animation (keyframes) keyed by the number of the frame. This dictionary is empty by default after the creation and it gets populated on loading the poses. ''' name: str length: float loop_animation: bool anim_time_update: str fps: float effect_events: Dict[str, Tuple[List[Dict], List[Dict]]] original_pose: Pose = field(default_factory=Pose) single_frame: bool = field(default_factory=bool) # bool() = False poses: Dict[int, Pose] = field(default_factory=dict) sound_effects: Dict[int, List[Dict]] = field(default_factory=dict) particle_effects: Dict[int, List[Dict]] = field(default_factory=dict) def load_poses( self, object_properties: McblendObjectGroup, context: bpy_types.Context ): ''' Populates the poses dictionary of this object. :param object_properties: group of mcblend objects. :param context: the context of running the operator. ''' original_frame = context.scene.frame_current bpy.ops.screen.animation_cancel() try: context.scene.frame_set(0) self.original_pose.load_poses(object_properties) if self.single_frame: context.scene.frame_set(original_frame) pose = Pose() pose.load_poses(object_properties) # The frame value in the dictionary key doesn't really matter self.poses[original_frame] = pose else: for keyframe in _get_keyframes(context): if ( keyframe < context.scene.frame_start or keyframe > context.scene.frame_end ): continue # skip frames out of range context.scene.frame_set(keyframe) curr_pose = Pose() curr_pose.load_poses(object_properties) self.poses[keyframe] = curr_pose # Load sound effects and particle effects for timeline_marker in context.scene.timeline_markers: if timeline_marker.name not in self.effect_events: continue sound, particle = self.effect_events[timeline_marker.name] if len(sound) > 0: self.sound_effects[timeline_marker.frame] = sound if len(particle) > 0: self.particle_effects[timeline_marker.frame] = particle finally: context.scene.frame_set(original_frame) def json( self, old_json: Optional[Dict]=None, skip_rest_poses: bool=True) -> Dict: ''' Returns the JSON dict with Minecraft animation. If JSON dict with valid animation file is passed to the function the function modifies it's content. :param old_json: The original animation file to write into. :param skip_rest_poses: If true the exported animation won't contain information about bones that remain in the rest pose. :returns: JSON dict with Minecraft animation. ''' # Create result dict result: Dict = {"format_version": "1.8.0", "animations": {}} try: if isinstance(old_json['animations'], dict): # type: ignore result: Dict = old_json # type: ignore except (TypeError, LookupError): pass bones: Dict = {} for bone_name in self.original_pose.pose_bones: bone = self._json_bone(bone_name, skip_rest_poses) if bone != {}: # Nothing to export bones[bone_name] = bone if self.single_frame: # Other properties don't apply result["animations"][f"animation.{self.name}"] = { "bones": bones, "loop": True } else: result["animations"][f"animation.{self.name}"] = { "animation_length": self.length, "bones": bones } if len(self.particle_effects) > 0: particle_effects = {} for key_frame, value in self.particle_effects.items(): timestamp = str(round((key_frame-1) / self.fps, 4)) particle_effects[timestamp] = value result["animations"][f"animation.{self.name}"][ 'particle_effects'] = particle_effects if len(self.sound_effects) > 0: sound_effects = {} for key_frame, value in self.sound_effects.items(): timestamp = str(round((key_frame-1) / self.fps, 4)) sound_effects[timestamp] = value result["animations"][f"animation.{self.name}"][ 'sound_effects'] = sound_effects data = result["animations"][f"animation.{self.name}"] if self.loop_animation: data['loop'] = True if self.anim_time_update != "": data['anim_time_update'] = self.anim_time_update return result def _json_bone(self, bone_name: str, skip_rest_pose: bool) -> Dict: ''' Returns optimized JSON dict with an animation of single bone. :param bone_name: the name of the bone. :param skip_rest_pose: whether the properties of the bone being in its rest pose should be skipped. :returns: the part of animation with animation of a single bone. ''' # t, rot, loc, scale poses: List[Dict] = [] prev_pose_bone = PoseBone( name=bone_name, scale=np.zeros(3), location=np.zeros(3), rotation=np.zeros(3), ) for key_frame in self.poses: # Get relative PoseBone with minimized rotation original_pose_bone = self.original_pose.pose_bones[bone_name] parent_name = original_pose_bone.parent_name # Get original parent scale. Scaling the location with original # parent scale allows to have issue #71 fixed and also being able # to use scale in animations (issue #76) which was impossible to do # after commit 19ef865943da7fde039bba7b7f50d1fa69a140b6 (the one # which closed issue #71). if parent_name in self.original_pose.pose_bones: original_parent_pose_bone = self.original_pose.pose_bones[ parent_name] original_parent_scale = original_parent_pose_bone.scale else: original_parent_scale = np.ones(3) pose_bone = self.poses[key_frame].pose_bones[bone_name].relative( original_pose_bone) pose_bone = PoseBone( name=pose_bone.name, scale=pose_bone.scale, location=pose_bone.location * original_parent_scale, rotation=_pick_closest_rotation( pose_bone.rotation, prev_pose_bone.rotation, original_pose_bone.rotation) ) timestamp = str(round((key_frame-1) / self.fps, 4)) poses.append({ 't': timestamp, 'loc': get_vect_json(pose_bone.location), 'scl': get_vect_json(pose_bone.scale), 'rot': get_vect_json(pose_bone.rotation), }) # Update prev pose prev_pose_bone = pose_bone # Filter unnecessary frames and add them to bone if not poses: # If empty return empty animation return {'position': {}, 'rotation': {}, 'scale': {}} if self.single_frame: # Returning single frame pose is easier result = {} loc, rot, scl = poses[0]['loc'], poses[0]['rot'], poses[0]['scl'] # Filter rest pose positions if loc != [0, 0, 0] or not skip_rest_pose: result['position'] = poses[0]['loc'] if rot != [0, 0, 0] or not skip_rest_pose: result['rotation'] = poses[0]['rot'] if scl != [1, 1, 1] or not skip_rest_pose: result['scale'] = poses[0]['scl'] return result bone: Dict = { # dictionary populated with 0 timestamp frame 'position': {poses[0]['t']: poses[0]['loc']}, 'rotation': {poses[0]['t']: poses[0]['rot']}, 'scale': {poses[0]['t']: poses[0]['scl']}, } # iterate in threes (previous, current , next), remove unnecessary # items prev, curr, next_ = tee(poses, 3) for prv, crr, nxt in zip( prev, islice(curr, 1, None), islice(next_, 2, None) ): if prv['scl'] != crr['scl'] or crr['scl'] != nxt['scl']: bone['scale'][crr['t']] = crr['scl'] if prv['loc'] != crr['loc'] or crr['loc'] != nxt['loc']: bone['position'][crr['t']] = crr['loc'] if prv['rot'] != crr['rot'] or crr['rot'] != nxt['rot']: bone['rotation'][crr['t']] = crr['rot'] # Add last element unless there is only one (in which case it's already # added) if len(poses) > 1: bone['rotation'][poses[-1]['t']] = poses[-1]['rot'] bone['position'][poses[-1]['t']] = poses[-1]['loc'] bone['scale'][poses[-1]['t']] = poses[-1]['scl'] # Filter rest pose positions if skip_rest_pose: for v in bone['position'].values(): if v != [0, 0, 0]: break # found non-rest pose item else: # this is rest pose del bone['position'] for v in bone['rotation'].values(): if v != [0, 0, 0]: break # found non-rest pose item else: # this is rest pose del bone['rotation'] for v in bone['scale'].values(): if v != [1, 1, 1]: break # found non-rest pose item else: # this is rest pose del bone['scale'] return bone <file_sep>''' Set of various image filters used for generating textures for models. Uses numpy arrays with colors with colors encoded with values in range 0-1. ''' # pylint: disable=invalid-name from __future__ import annotations from itertools import cycle, accumulate from typing import Tuple, Iterable, NamedTuple, List, Optional, Sequence from abc import ABC, abstractmethod from enum import Enum import numpy as np class UvMaskTypes(Enum): ''' UvMaskTypes are used for selecting one of the avaliable masks types in dropdown lists. ''' COLOR_PALLETTE_MASK='Color Palette Mask' GRADIENT_MASK='Gradient Mask' ELLIPSE_MASK='Ellipse Mask' RECTANGLE_MASK='Rectangle Mask' STRIPES_MASK='Stripes Mask' RANDOM_MASK='Random Mask' COLOR_MASK='Color Mask' MIX_MASK='Mix Mask' def list_mask_types_as_blender_enum(self, context): ''' Passing list itself to some operators/panels didn't work. This function is a workaround that uses alternative definition for EnumProperty. https://docs.blender.org/api/current/bpy.props.html#bpy.props.EnumProperty ''' # pylint: disable=unused-argument return [(i.value, i.value, i.value) for i in UvMaskTypes] class MixMaskMode(Enum): '''MixMaskMode is used to define the behavior of the MixMask''' mean='mean' min='min' max='max' median='median' def list_mix_mask_modes_as_blender_enum(self, context): ''' Returns list of tuples for creating EnumProperties with MixMaskMode enum. ''' # pylint: disable=unused-argument return [(i.value, i.value, i.value) for i in MixMaskMode] class Mask(ABC): '''Abstract class, parent of all Filters.''' @abstractmethod def apply(self, image: np.ndarray): ''' Applies the image to the image. :param image: The image filtered by the mask. ''' class Color(NamedTuple): '''Color palette color.''' r: float g: float b: float @staticmethod def create_from_hex(color: str): '''Creates color object from hex string e.g. "ffffff"''' if len(color) != 6: raise Exception( 'The color should be passed as 6 digit a hex number with ' 'format "rrggbb"' ) return Color( int(color[:2], 16)/255.0, int(color[2:4], 16)/255.0, int(color[4:], 16)/255.0 ) class ColorPaletteMask(Mask): ''' ColorPaletteMask is a mask that maps values (0 to 1) from the image to colors from the color palette. ''' def __init__( self, colors: List[Color], *, interpolate: bool = False, normalize: bool = False): self.colors = colors self.interpolate = interpolate self.normalize = normalize def apply(self, image: np.ndarray): # xp and fp for np.interp if self.interpolate: fp_r = [c.r for c in self.colors] fp_g = [c.g for c in self.colors] fp_b = [c.b for c in self.colors] xp = np.array(list(range(len(self.colors)))) xp = xp/(len(self.colors)-1) else: def repeated_list(iterable): for i in iterable: yield i yield i fp_r = [c.r for c in repeated_list(self.colors)] fp_g = [c.g for c in repeated_list(self.colors)] fp_b = [c.b for c in repeated_list(self.colors)] xp = np.array(list(range(len(self.colors)))) xp = xp/len(self.colors) unpacked_xp = [0.0] for xpi in repeated_list(xp[1:]): unpacked_xp.append(xpi) unpacked_xp.append(1.0) xp = np.array(unpacked_xp) # Input image must be converted to grayscale gray = np.mean(image, axis=2) if self.normalize: gray = np.interp( gray, [np.min(gray), np.max(gray)], [0, 1] ) image[:,:,:] = np.stack([gray for _ in range(3)], axis=2) # Apply filters image[:,:,0] = np.interp(image[:,:,0], xp, fp_r) image[:,:,1] = np.interp(image[:,:,1], xp, fp_g) image[:,:,2] = np.interp(image[:,:,2], xp, fp_b) class MultiplicativeMask(Mask): ''' A mask which can return a matrix which can be multiplied element-wise by the image matrix to get the result of applying the mask. ''' def apply(self, image: np.ndarray): mask = self.get_mask(image) image[:,:,:] = image*mask @abstractmethod def get_mask(self, image: np.array) -> np.array: '''Returns 2D matrix with the filter array.''' class DummyMask(MultiplicativeMask): ''' A multiplicative mask that always return a white image. ''' def get_mask(self, image): w, h, _ = image.shape return np.ones((w, h))[:,:, np.newaxis] class Stripe(NamedTuple): ''' Stripes are used in StripesMask and ColorPaletteMask mask in a collection to define width and the value of the items that compose the mask. ''' width: float strength: float class TwoPointSurfaceMask(MultiplicativeMask): ''' Abstract class for masks that require two points on the textures to define which area should be affected. ''' def __init__( self, p1: Tuple[float, float], p2: Tuple[float, float], *, relative_boundaries: bool=True): self.p1 = p1 self.p2 = p2 self.relative_boundaries = relative_boundaries def get_surface_properties( self, image: np.ndarray, sort_points=False) -> Tuple[int, int, int, int, int, int]: ''' Uses points passed in the constructor and the image to return the coordinates of the points that define which area of the texture should be affected by the mask. :param sort_points: whether the returned points should be sorted by the coordinates (minx, miny), (maxx, maxy)/ ''' w, h, _ = image.shape wh = np.array([w, h]) # Get highlighted area indices if self.relative_boundaries: # The values from relative boundaries should always be between # 0 and 1. # The result values are clipped to range 0 to size-1 p1 = np.clip( np.array(np.array(self.p1)*wh, dtype=int), (0, 0), (max(0, w-1), max(0, h-1)) ) p2 = np.clip( np.array(np.array(self.p2)*wh, dtype=int), (0, 0), (max(0, w-1), max(0, h-1)) ) else: p1 = np.array( self.p1, dtype=int) p2 = np.array( self.p2, dtype=int) u1, v1 = p1%wh u2, v2 = p2%wh if sort_points: u1, u2 = min(u1, u2), max(u1, u2) v1, v2 = min(v1, v2), max(v1, v2) return w, h, u1, u2, v1, v2 class GradientMask(TwoPointSurfaceMask): ''' Uses stripes with different widths and strenghts to create a grayscale gradient between two points. ''' def __init__( self, p1: Tuple[float, float], p2: Tuple[float, float], *, stripes: Iterable[Stripe]=( Stripe(0.0, 0.0), Stripe(1.0, 1.0) ), relative_boundaries: bool=True, expotent: float=1.0): super().__init__(p1, p2, relative_boundaries=relative_boundaries) self.stripe_strength: List[float] = [] stripe_width = [] for i in stripes: if i.width < 0: raise Exception( 'All stripe width must be greater or equal 0') stripe_width.append(i.width) self.stripe_strength.append(i.strength) self.stripe_width = np.array(stripe_width)/np.sum(stripe_width) self.expotent=expotent def get_mask(self, image): w, h, u1, u2, v1, v2 = self.get_surface_properties( image, sort_points=False) def split_complex(c): return (c.real, c.imag) a = np.array((u1, v1)) b = np.array((u2, v2)) # Rotate b around a 90 degrees b_prime = np.array(split_complex(complex(*b-a)*1j))+a # Get the line that connects a and b_prime if a[0] == b_prime[0]: abc = np.array([1, 0, -a[0]]) else: slope = (a[1]-b_prime[1])/(a[0]-b_prime[0]) # Point slope form: (y-b[1])=slope*(x-b[0]) # Standard form parameters: 0 = A*x + B*y + C abc = np.array([slope, -1, -slope*b_prime[0] + b_prime[1]]) crds = np.indices((w, h), dtype=float) # Add one more dimension for C (assign "1" for every pixel) crds = np.stack([crds[0], crds[1], np.ones((w, h))], axis=2) # https://en.wikipedia.org/wiki/Distance_from_a_point_to_a_line mask = np.abs( np.sum(abc*crds, axis=2))/((np.sum(abc[:2]**2))**0.5 ) interp_len = np.linalg.norm(b-a) xp = list(accumulate(self.stripe_width*interp_len)) fp = self.stripe_strength mask = np.interp( mask, xp, fp) mask=mask**self.expotent return mask[:, :, np.newaxis] class EllipseMask(TwoPointSurfaceMask): ''' Creates ellipse in-between two points. ''' def __init__( self, p1: Tuple[float, float], p2: Tuple[float, float], *, strength: Tuple[float, float]=(0.0, 1.0), relative_boundaries: bool=True, hard_edge: bool=False, expotent: float=1.0): super().__init__( p1, p2, relative_boundaries=relative_boundaries) self.strength = strength self.hard_edge = hard_edge self.expotent=expotent def get_mask(self, image): w, h, u1, u2, v1, v2 = self.get_surface_properties(image) # img = np.ones((w, h, 3), dtype=np.float) a = (u2-u1)/2 b = (v2-v1)/2 a = a if a >= 1 else 1 b = b if b >= 1 else 1 offset_x = np.mean([u1, u2]) offset_y = np.mean([v1, v2]) crds = np.indices((w, h), dtype=float)+0.5 crds[0] -= offset_x crds[1] -= offset_y mask = crds[0]**2/a**2 + crds[1]**2/b**2 inside = mask <= 1 outside = mask > 1 if self.hard_edge: mask[outside] = self.strength[1] mask[inside] = self.strength[0] else: mask[inside] = self.strength[1] try: mask = np.interp(mask, [np.min(mask[outside]), np.max(mask[outside])], self.strength ) except ValueError: # when mask[outside] or is empty pass mask=mask**self.expotent return mask[:, :, np.newaxis] class RectangleMask(TwoPointSurfaceMask): ''' Creates a rectangle in-between two points. ''' def __init__( self, p1: Tuple[float, float], p2: Tuple[float, float], *, strength: Tuple[float, float]=(0.0, 1.0), relative_boundaries: bool=True, hard_edge: bool=False, expotent: float=1.0): super().__init__( p1, p2, relative_boundaries=relative_boundaries) self.strength = strength self.expotent = expotent self.hard_edge = hard_edge def get_mask(self, image: np.array): w, h, u1, u2, v1, v2 = self.get_surface_properties(image) # Create basic mask array mask = np.zeros((w, h)) if self.hard_edge or (u1 == 0 and v1 == 0 and w == u2+1 and h == v2+1): mask[:,:] = self.strength[1] mask[u1:u2+1, v1:v2+1] = self.strength[0] return mask[:, :, np.newaxis] # Else: # Set values of 9 segments # Left top segment_shape = mask[:u1+1,:v1+1].shape idx1, idx2 = np.indices(segment_shape) dist = np.array([np.flipud(idx1), np.fliplr(idx2)]) mask[:u1+1,:v1+1] = np.linalg.norm(dist, axis=0) # Top segment_shape = mask[:u1+1,v1:v2+1].shape idx1, idx2 = np.indices(segment_shape) dist = np.array([np.flipud(idx1), np.zeros(segment_shape)]) mask[:u1+1,v1:v2+1] = np.linalg.norm(dist, axis=0) # Right top segment_shape = mask[:u1+1,v2:].shape idx1, idx2 = np.indices(segment_shape) dist = np.array([np.flipud(idx1), idx2]) mask[:u1+1,v2:] = np.linalg.norm(dist, axis=0) # # Left mid segment_shape = mask[u1:u2+1,:v1+1].shape idx1, idx2 = np.indices(segment_shape) dist = np.array([np.zeros(segment_shape), np.fliplr(idx2)]) mask[u1:u2+1,:v1+1] = np.linalg.norm(dist, axis=0) # # Mid # # Already filled with zeros # Right mid segment_shape = mask[u1:u2+1,v2:].shape idx1, idx2 = np.indices(segment_shape) dist = np.array([np.zeros(segment_shape), idx2]) mask[u1:u2+1,v2:] = np.linalg.norm(dist, axis=0) # Left bottom segment_shape = mask[u2:,:v1+1].shape idx1, idx2 = np.indices(segment_shape) dist = np.array([idx1, np.fliplr(idx2)]) mask[u2:,:v1+1] = np.linalg.norm(dist, axis=0) # Bottom segment_shape = mask[u2:,v1:v2+1].shape idx1, idx2 = np.indices(segment_shape) dist = np.array([idx1, np.zeros(segment_shape)]) mask[u2:,v1:v2+1] = np.linalg.norm(dist, axis=0) # Right bottom segment_shape = mask[u2:,v2:].shape idx1, idx2 = np.indices(segment_shape) dist = np.array([idx1, idx2]) mask[u2:,v2:] = np.linalg.norm(dist, axis=0) mask = np.interp(mask, (mask.min(), mask.max()), self.strength) mask = mask**self.expotent return mask[:, :, np.newaxis] class StripesMask(MultiplicativeMask): ''' Creates horizontal or vertical grayscale stripes. ''' def __init__( self, stripes: List[Stripe], *, horizontal: bool=True, relative_boundaries: bool=True): self.stripe_width: List[float] = [] self.stripe_strength: List[float] = [] for i in stripes: if i.width <= 0: raise Exception('All stripe widths must be greater than 0') self.stripe_width.append(i.width) self.stripe_strength.append(i.strength) self.horizontal = horizontal self.relative_boundaries = relative_boundaries def get_mask(self, image: np.array) -> np.array: w, h, _ = image.shape mask = np.ones((w, h)) stripe_width = np.array(self.stripe_width) if self.relative_boundaries: stripe_width *= w if self.horizontal else h # One pixel is minimal stripe width stripe_width[stripe_width < 1] = 1 stripes_limit = w if self.horizontal else h prev_index = 0 # infinite loop for i, strength in zip( accumulate(cycle(stripe_width)), cycle(self.stripe_strength) ): curr_index = int(i) if self.horizontal: mask[prev_index:curr_index,:] = strength else: mask[:, prev_index:curr_index] = strength prev_index = curr_index if curr_index >= stripes_limit: break return mask[:, :, np.newaxis] class RandomMask(MultiplicativeMask): ''' Creates randomly colored grayscale pixels. ''' def __init__( self, *, strength: Tuple[float, float]=(0.0, 1.0), expotent: float=1.0, seed: Optional[int]=None): self.strength = strength self.expotent = expotent self.seed = seed def get_mask(self, image): # Get the shape of the image w, h, _ = image.shape np.random.seed(self.seed) mask = np.random.rand(w, h) mask = np.interp(mask, (0.0, 1.0), self.strength) mask = mask**self.expotent return mask[:,:,np.newaxis] class ColorMask(MultiplicativeMask): ''' Fileters the image with a color. ''' def __init__(self, color: Tuple[float, float, float]): self.r, self.g, self.b = color def get_mask(self, image): # Get the shape of the image w, h, _ = image.shape mask = np.zeros((w, h, 3)) mask[:,:,0] = self.r mask[:,:,1] = self.g mask[:,:,2] = self.b return mask[:,:,:] class MixMask(MultiplicativeMask): ''' Mixes multiple masks by calculating their pixelwise mean min, max or median values. ''' def __init__( self, masks: Iterable[MultiplicativeMask], *, strength: Tuple[float, float]=(0.0, 1.0), expotent: float=1.0, mode='mean'): self.strength = strength self.expotent = expotent self.masks = masks self.mode = mode def get_mask(self, image): # Get the shape of the image w, h, _ = image.shape # If there is no masks on the list than return blank mask if len(self.masks) == 0: mask = np.ones((w, h)) return mask[:,:,np.newaxis] is_rgb = False for m in self.masks: if len(m.get_mask(image).shape) == 3: is_rgb = True break mask_arrays = [] for m in self.masks: mask_array = m.get_mask(image) if is_rgb: if len(mask_array.shape) == 2: # Convert grayscale to RGB mask_array = np.stack( [mask_array for _ in range(3)], axis=2) mask_arrays.append(mask_array) if self.mode == 'mean': mask = np.mean(mask_arrays, axis=0) elif self.mode == 'min': mask = np.min(mask_arrays, axis=0) elif self.mode == 'max': mask = np.max(mask_arrays, axis=0) elif self.mode == 'median': mask = np.median(mask_arrays, axis=0) else: raise Exception(f"Unknown mix mode! {self.mode}") mask = np.interp(mask, (0.0, 1.0), self.strength) mask = mask**self.expotent return mask def _get_color_from_gui_color(color) -> Color: ''' Returns Color object from definition created with the GUI. (OBJECT_NusiqMcblendColorProperties) ''' #pylint: disable=singleton-comparison # convert linear rgb to srgb rgb = np.array(color.color) selector = rgb < 0.0031308 rgb[selector] *= 12.92 rgb[selector == False] = 1.055 * rgb[selector == False]**(1/2.4) - 0.055 return Color(*rgb) def get_masks_from_side(side) -> Sequence[Mask]: ''' Returns tuple of Masks from one masks side definition created in GUI. ''' def _get_masks_from_side(side: Iterable, n_steps: int) -> Sequence[Mask]: result: List[Mask] = [] mask: Mask # n_steps limits maximal number of consumed items # side is an iterator shared by all nested iterations for _, s_props in zip(range(n_steps), side): if s_props.mask_type == UvMaskTypes.COLOR_PALLETTE_MASK.value: mask = ColorPaletteMask( [_get_color_from_gui_color(c) for c in s_props.colors], interpolate=s_props.interpolate, normalize=s_props.normalize) elif s_props.mask_type == UvMaskTypes.GRADIENT_MASK.value: if s_props.relative_boundaries: p1 = tuple(s_props.p1_relative) p2 = tuple(s_props.p2_relative) else: p1 = tuple(s_props.p1) p2 = tuple(s_props.p2) mask = GradientMask( p1, p2, # type: ignore # Gradient mask never uses relative with for stripes stripes=[Stripe(s.width, s.strength) for s in s_props.stripes], relative_boundaries=s_props.relative_boundaries, expotent=s_props.expotent) elif s_props.mask_type == UvMaskTypes.ELLIPSE_MASK.value: if s_props.relative_boundaries: p1 = tuple(s_props.p1_relative) p2 = tuple(s_props.p2_relative) else: p1 = tuple(s_props.p1) p2 = tuple(s_props.p2) mask = EllipseMask( p1, p2, # type: ignore strength=tuple(s_props.strength), #type: ignore relative_boundaries=s_props.relative_boundaries, hard_edge=s_props.hard_edge, expotent=s_props.expotent) elif s_props.mask_type == UvMaskTypes.RECTANGLE_MASK.value: if s_props.relative_boundaries: p1 = tuple(s_props.p1_relative) p2 = tuple(s_props.p2_relative) else: p1 = tuple(s_props.p1) p2 = tuple(s_props.p2) mask = RectangleMask( p1, p2, # type: ignore strength=tuple(s_props.strength), #type: ignore relative_boundaries=s_props.relative_boundaries, hard_edge=s_props.hard_edge, expotent=s_props.expotent) elif s_props.mask_type == UvMaskTypes.STRIPES_MASK.value: if s_props.relative_boundaries: stripes = [ Stripe(s.width_relative, s.strength) for s in s_props.stripes] else: stripes = [ Stripe(s.width, s.strength) for s in s_props.stripes] mask = StripesMask( stripes, horizontal=s_props.horizontal, relative_boundaries=s_props.relative_boundaries) elif s_props.mask_type == UvMaskTypes.RANDOM_MASK.value: seed: Optional[int] = None if s_props.use_seed: seed = s_props.seed mask = RandomMask( strength=tuple(s_props.strength), # type: ignore expotent=s_props.expotent, seed=seed) elif s_props.mask_type == UvMaskTypes.COLOR_MASK.value: mask = ColorMask(_get_color_from_gui_color(s_props.color)) elif s_props.mask_type == UvMaskTypes.MIX_MASK.value: mask = MixMask( masks=[ # Non multiplicative masks are ignored submask for submask in _get_masks_from_side(side, n_steps=s_props.children) if isinstance(submask, MultiplicativeMask) ], strength=s_props.strength, expotent=s_props.expotent, mode=s_props.mode) else: raise ValueError('Unknown mask type') if s_props.ui_hidden: result.append(DummyMask()) else: result.append(mask) return tuple(result) return _get_masks_from_side(iter(side), n_steps=len(side)) <file_sep>''' This module contains all of the panels for mcblend GUI. ''' # don't import future annotations Blender needs that from typing import List, Optional from dataclasses import dataclass from .custom_properties import EffectTypes import bpy from bpy.props import ( StringProperty, IntProperty, BoolProperty, FloatProperty, FloatVectorProperty, CollectionProperty, EnumProperty, PointerProperty, IntVectorProperty ) from .operator_func.texture_generator import ( list_mask_types_as_blender_enum, UvMaskTypes, list_mix_mask_modes_as_blender_enum) # GUI # UV-groups names list class OBJECT_UL_NusiqMcblendUVGroupList(bpy.types.UIList): '''GUI item used for drawing list of names of UV-groups.''' def draw_item( self, context, layout, data, item, icon, active_data, active_propname): ''' Drawing OBJECT_NusiqMcblendUvGroupProperties in a list. :param context: the contexts of operator :param layout: layout in which the object is drawn :param data: the RNA object containing the collection :param item: the item currently drawn in the collection :param icon: not used - "the "computed" icon for the item" (?) :param active_data: the RNA object containing the active property for the collection. :param active_propname: the name of the active property. For more info see the UI Template called: "UI List Simple". ''' # pylint: disable=arguments-differ, unused-argument if self.layout_type in {'DEFAULT', 'COMPACT', 'CENTER'}: # No rename functionality: # layout.label(text=item.name, translate=False) # With rename functionality: layout.prop(item, "name", text="", emboss=False) # UV-group panel @dataclass class _UIStackItem(): ''' Object used in OBJECT_PT_NusiqMcblendUVGroupPanel for saving the information about nested UV-groups in stack data structure. ''' ui: Optional[bpy.types.UILayout] # None if parent is collapsed depth: int class OBJECT_PT_NusiqMcblendUVGroupPanel(bpy.types.Panel): '''Panel used for editing UV-groups.''' bl_space_type = 'PROPERTIES' bl_region_type = 'WINDOW' bl_context = 'scene' bl_label = "Mcblend UV groups" def draw_colors(self, mask, mask_index: int, col: bpy.types.UILayout): '''Draws colors of UV-mask.''' box = col.box() row = box.row() row.label(text='Colors') op_props = row.operator( "object.nusiq_mcblend_add_uv_mask_color", text="", icon='ADD') op_props.mask_index = mask_index colors_len = len(mask.colors) for color_index, color in enumerate(mask.colors): row = box.row() row.prop(color, "color", text="") up_down_row = row.row(align=True) # Move down if color_index - 1 >= 0: op_props = up_down_row.operator( "object.nusiq_mcblend_move_uv_mask_color", icon='TRIA_UP', text='') op_props.mask_index = mask_index op_props.move_from = color_index op_props.move_to = color_index - 1 # Move up if color_index + 1 < colors_len: op_props = up_down_row.operator( "object.nusiq_mcblend_move_uv_mask_color", icon='TRIA_DOWN', text='') op_props.mask_index = mask_index op_props.move_from = color_index op_props.move_to = color_index + 1 # Delete button op_props = row.operator( "object.nusiq_mcblend_remove_uv_mask_color", icon='X', text='') op_props.mask_index = mask_index op_props.color_index = color_index def draw_stripes(self, mask, mask_index: int, col: bpy.types.UILayout): '''Draws stripes of UV-mask.''' box = col.box() row = box.row() row.label(text='Stripes') op_props = row.operator( "object.nusiq_mcblend_add_uv_mask_stripe", text="", icon='ADD') op_props.mask_index = mask_index stripes_len = len(mask.stripes) for stripe_index, stripe in enumerate(mask.stripes): row = box.row() if ( mask.relative_boundaries and mask.mask_type != UvMaskTypes.GRADIENT_MASK.value): # Gradient mask always uses absolute values row.prop(stripe, "width_relative") else: row.prop(stripe, "width") row.prop(stripe, "strength") up_down_row = row.row(align=True) # Move down if stripe_index - 1 >= 0: op_props = up_down_row.operator( "object.nusiq_mcblend_move_uv_mask_stripe", icon='TRIA_UP', text='') op_props.mask_index = mask_index op_props.move_from = stripe_index op_props.move_to = stripe_index - 1 # Move up if stripe_index + 1 < stripes_len: op_props = up_down_row.operator( "object.nusiq_mcblend_move_uv_mask_stripe", icon='TRIA_DOWN', text='') op_props.mask_index = mask_index op_props.move_from = stripe_index op_props.move_to = stripe_index + 1 # Delete button op_props = row.operator( "object.nusiq_mcblend_remove_uv_mask_stripe", icon='X', text='') op_props.mask_index = mask_index op_props.stripe_index = stripe_index def draw_mask_properties( self, mask, index: int, col: bpy.types.UILayout, *, colors=False, interpolate=False, normalize=False, p1p2=False, stripes=False, relative_boundaries=False, expotent=False, strength=False, hard_edge=False, horizontal=False, seed=False,color=False, children=False, mode=False): '''Draws properties of UV-mask.''' if colors: self.draw_colors(mask, index, col) # colors if interpolate: col.prop(mask, "interpolate") if normalize: col.prop(mask, "normalize") if p1p2: row = col.row() if mask.relative_boundaries: row.prop(mask, "p1_relative") row = col.row() row.prop(mask, "p2_relative") else: row.prop(mask, "p1") row = col.row() row.prop(mask, "p2") if relative_boundaries: col.prop(mask, "relative_boundaries") if stripes: self.draw_stripes(mask, index, col) # stripes if expotent: col.prop(mask, "expotent") if strength: col.row().prop(mask, "strength") if hard_edge: col.prop(mask, "hard_edge") if horizontal: col.prop(mask, "horizontal") if seed: row = col.row() row.prop(mask, "use_seed") if mask.use_seed: row.prop(mask, "seed") if color: col.prop(mask.color, "color") if mode: col.prop(mask, "mode") if children: col.prop(mask, "children") def draw_mask( self, mask, index: int, masks_len: int, ui_stack: List[_UIStackItem]): ''' Draws whole UV-mask gui with additional GUI items for navigation between masks like buttons for moving and removing masks. ''' col = None # If parent is collapsed don't draw anything if ui_stack[-1].ui is not None: col = ui_stack[-1].ui box = col.box() # box.scale_x = True col = box.column() row = col.row() if mask.ui_collapsed: row.prop( mask, "ui_collapsed", text="", icon='DISCLOSURE_TRI_RIGHT', emboss=False) else: row.prop( mask, "ui_collapsed", text="", icon='DISCLOSURE_TRI_DOWN', emboss=False) row.label(text=f'{mask.mask_type}') up_down_row = row.row(align=True) # Move down if index - 1 >= 0: op_props = up_down_row.operator( "object.nusiq_mcblend_move_uv_mask", icon='TRIA_UP', text='') op_props.move_from = index op_props.move_to = index - 1 # Move up if index + 1 < masks_len: op_props = up_down_row.operator( "object.nusiq_mcblend_move_uv_mask", icon='TRIA_DOWN', text='') op_props.move_from = index op_props.move_to = index + 1 # Hide button if mask.ui_hidden: row.prop( mask, "ui_hidden", text="", icon='HIDE_ON', emboss=False) else: row.prop( mask, "ui_hidden", text="", icon='HIDE_OFF', emboss=False) # Delete button op_props = row.operator( "object.nusiq_mcblend_remove_uv_mask", icon='X', text='') op_props.target = index # Drawing the mask itself unless collapsed if not mask.ui_collapsed: if mask.mask_type == UvMaskTypes.COLOR_PALLETTE_MASK.value: if len(ui_stack) > 1: col.label( text="This mask can't be put inside mix mask", icon='ERROR') else: self.draw_mask_properties( mask, index, col, colors=True, interpolate=True, normalize=True) if mask.mask_type == UvMaskTypes.GRADIENT_MASK.value: self.draw_mask_properties( mask, index, col, p1p2=True, stripes=True, relative_boundaries=True, expotent=True) if mask.mask_type == UvMaskTypes.ELLIPSE_MASK.value: self.draw_mask_properties( mask, index, col, p1p2=True, relative_boundaries=True, expotent=True, strength=True, hard_edge=True) if mask.mask_type == UvMaskTypes.RECTANGLE_MASK.value: self.draw_mask_properties( mask, index, col, p1p2=True, relative_boundaries=True, expotent=True, strength=True, hard_edge=True) if mask.mask_type == UvMaskTypes.STRIPES_MASK.value: self.draw_mask_properties( mask, index, col, stripes=True, relative_boundaries=True, horizontal=True) if mask.mask_type == UvMaskTypes.RANDOM_MASK.value: self.draw_mask_properties( mask, index, col, strength=True, expotent=True, seed=True) if mask.mask_type == UvMaskTypes.COLOR_MASK.value: self.draw_mask_properties(mask, index, col, color=True) if mask.mask_type == UvMaskTypes.MIX_MASK.value: self.draw_mask_properties( mask, index, col, children=True, strength=True, expotent=True, mode=True) if mask.mask_type == UvMaskTypes.MIX_MASK.value and col is not None: # mask.children+1 because it counts itself as a member if not mask.ui_collapsed: ui_stack.append(_UIStackItem( col.box(), mask.children+1)) else: ui_stack.append(_UIStackItem( None, mask.children+1)) def draw(self, context): '''Draws whole UV-group panel.''' col = self.layout.column(align=True) # Add group row = col.row() row.operator( "object.nusiq_mcblend_add_uv_group", text="New UV group", icon='ADD' ) row_import_export = col.row() row_import_export.operator( "object.nusiq_mcblend_import_uv_group_operator", text="Import UV group", icon='IMPORT' ) active_uv_group_id = bpy.context.scene.nusiq_mcblend_active_uv_group uv_groups = bpy.context.scene.nusiq_mcblend_uv_groups col.template_list( listtype_name="OBJECT_UL_NusiqMcblendUVGroupList", list_id="", dataptr=context.scene, propname="nusiq_mcblend_uv_groups", active_dataptr=context.scene, active_propname="nusiq_mcblend_active_uv_group") if active_uv_group_id < len(uv_groups): active_uv_group = uv_groups[active_uv_group_id] # Delete group row.operator( "object.nusiq_mcblend_remove_uv_group", text="Delete this UV group", icon='X') row_import_export.operator( "object.nusiq_mcblend_export_uv_group_operator", text="Export UV group", icon='EXPORT' ) # Select side row = col.row() row.label(text='Side:') row.prop( context.scene, "nusiq_mcblend_active_uv_groups_side", text="") col.separator() col.operator( 'object.nusiq_mcblend_copy_uv_group_side', text='Copy current UV face', icon='DUPLICATE') # Add mask col.operator_menu_enum( "object.nusiq_mcblend_add_uv_mask", "mask_type", text="Add mask", icon="ADD") # Draw selected side sides = [ active_uv_group.side1, active_uv_group.side2, active_uv_group.side3, active_uv_group.side4, active_uv_group.side5, active_uv_group.side6 ] masks = sides[ int(context.scene.nusiq_mcblend_active_uv_groups_side)] # Stack of UI items to draw in ui_stack: List[_UIStackItem] = [ _UIStackItem(col, 0)] for i, mask in enumerate(masks): col.separator(factor=0.5) self.draw_mask(mask, i, len(masks), ui_stack) # Remove empty ui containers from top of ui_stack while len(ui_stack) > 1: # Except the first one ui_stack[-1].depth -= 1 if ui_stack[-1].depth <= 0: ui_stack.pop() else: break # Event group panel class OBJECT_UL_NusiqMcblendEventsList(bpy.types.UIList): '''GUI item used for drawing list of names of events.''' def draw_item( self, context, layout, data, item, icon, active_data, active_propname): ''' Drawing OBJECT_NusiqMcblendEventGroupProperties in a list. :param context: the contexts of operator :param layout: layout in which the object is drawn :param data: the RNA object containing the collection :param item: the item currently drawn in the collection :param icon: not used - "the "computed" icon for the item" (?) :param active_data: the RNA object containing the active property for the collection. :param active_propname: the name of the active property. ''' # pylint: disable=arguments-differ, unused-argument if self.layout_type in {'DEFAULT', 'COMPACT', 'CENTER'}: # No rename functionality: # layout.label(text=item.name, translate=False) # With rename functionality: layout.prop(item, "name", text="", emboss=False) class OBJECT_PT_NusiqMcblendEventsPanel(bpy.types.Panel): '''Panel used for editing events.''' bl_space_type = 'PROPERTIES' bl_region_type = 'WINDOW' bl_context = 'scene' bl_label = "Mcblend events" def draw_effect(self, effect, index: int, col: bpy.types.UILayout): '''Draw single effect in the event''' # If parent is collapsed don't draw anything box = col.box() col = box.column() row = col.row() row.label(text=f'{effect.effect_type}') # Delete button op_props = row.operator( "object.nusiq_mcblend_remove_effect", icon='X', text='') op_props.effect_index = index if effect.effect_type == EffectTypes.PARTICLE_EFFECT.value: col.prop(effect, "effect", text="Effect") col.prop(effect, "locator", text="Locator") col.prop(effect, "pre_effect_script", text="Pre effect script") col.prop(effect, "bind_to_actor", text="Bind to actor") elif effect.effect_type == EffectTypes.SOUND_EFFECT.value: col.prop(effect, "effect", text="Effect") def draw(self, context): '''Draws whole event group panel.''' col = self.layout.column(align=True) row = col.row() events = bpy.context.scene.nusiq_mcblend_events active_event_id = bpy.context.scene.nusiq_mcblend_active_event col.template_list( listtype_name="OBJECT_UL_NusiqMcblendEventsList", list_id="", dataptr=bpy.context.scene, propname="nusiq_mcblend_events", active_dataptr=bpy.context.scene, active_propname="nusiq_mcblend_active_event") row.operator( "object.nusiq_mcblend_add_event", text="New event", icon='ADD') if 0 <= active_event_id < len(events): row.operator( "object.nusiq_mcblend_remove_event", text="Delete this UV group", icon='X') event = events[active_event_id] effects = event.effects col.operator_menu_enum( "object.nusiq_mcblend_add_effect", "effect_type", text="Add effect", icon="ADD") if len(effects) > 0: for i, effect in enumerate(effects): col.separator(factor=0.5) self.draw_effect(effect, i, col) # Custom object properties panel class OBJECT_PT_NusiqMcblendObjectPropertiesPanel(bpy.types.Panel): '''Panel used for editing custom model object properties.''' bl_space_type = 'PROPERTIES' bl_region_type = 'WINDOW' bl_context = 'object' bl_label = "Mcblend object properties" @classmethod def poll(cls, context): if context.active_object: return ( context.active_object.type == "MESH" or context.active_object.type == "EMPTY") return False def draw(self, context): col = self.layout.column(align=True) if context.mode == "OBJECT" and context.object is not None: object_properties = context.object.nusiq_mcblend_object_properties col.prop(object_properties, "is_bone", text="Export as bone") col.prop(object_properties, "mesh_type", text="") if context.object.type == 'MESH': col.prop(object_properties, "mirror", text="Mirror") if object_properties.uv_group != '': col.label(text=f'UV Group: {object_properties.uv_group}') else: col.label(text="This object doesn't have a UV group") col.prop(object_properties, "inflate", text="Inflate") # Model export panel class OBJECT_PT_NusiqMcblendExportPanel(bpy.types.Panel): ''' Panel used for launching the model export operator and changing its settings. ''' # pylint: disable=unused-argument bl_label = "Export bedrock model" bl_category = "Mcblend" bl_space_type = "VIEW_3D" bl_region_type = "UI" def draw(self, context): col = self.layout.column(align=True) # col.prop(context.scene.nusiq_mcblend, "path", text="") col.prop( context.scene.nusiq_mcblend, "model_name", text="Name" ) col.prop( context.scene.nusiq_mcblend, "visible_bounds_width", text="Visible bounds width" ) col.prop( context.scene.nusiq_mcblend, "visible_bounds_height", text="Visible bounds height" ) col.prop( context.scene.nusiq_mcblend, "visible_bounds_offset", text="Visible bounds offset" ) self.layout.row().operator( "object.nusiq_mcblend_export_operator", text="Export model" ) # Model import panel class OBJECT_PT_NusiqMcblendImportPanel(bpy.types.Panel): '''Panel used for launching the model import operator.''' # pylint: disable=unused-argument bl_label = "Import bedrock model" bl_category = "Mcblend" bl_space_type = "VIEW_3D" bl_region_type = "UI" def draw(self, context): self.layout.row().operator( "object.nusiq_mcblend_import_operator", text="Import model" ) # Animation export panel class OBJECT_PT_NusiqMcblendExportAnimationPanel(bpy.types.Panel): ''' Panel used launching the animation export operator and changing its settings. ''' # pylint: disable=unused-argument bl_label = "Export bedrock animation" bl_category = "Mcblend" bl_space_type = "VIEW_3D" bl_region_type = "UI" def draw(self, context): col = self.layout.column(align=True) row = col.row() row.operator( "object.nusiq_mcblend_add_animation", text="New animation" ) active_anim_id = bpy.context.scene.nusiq_mcblend_active_animation anims = bpy.context.scene.nusiq_mcblend_animations if active_anim_id < len(anims): row.operator( "object.nusiq_mcblend_remove_animation", text="Remove this animation" ) col.operator_menu_enum( "object.nusiq_mcblend_list_animations", "animations_enum", text="Select animation" ) active_anim = anims[active_anim_id] col.prop(active_anim, "name", text="Name") col.prop(active_anim, "skip_rest_poses", text="Skip rest poses") col.prop(active_anim, "single_frame", text="Export as pose") if active_anim.single_frame: col.prop(bpy.context.scene, "frame_current", text="Frame") else: col.prop(active_anim, "loop", text="Loop") col.prop(active_anim, "anim_time_update", text="anim_time_update") col.prop(bpy.context.scene, "frame_start", text="Frame start") col.prop(bpy.context.scene, "frame_end", text="Frame end") col.operator( "object.nusiq_mcblend_export_animation_operator", text="Export animation") # UV-mapper panel class OBJECT_PT_NusiqMcblendSetUvsPanel(bpy.types.Panel): ''' Panel used for launching the UV-mapping operator and changing its settings. ''' # pylint: disable=unused-argument bl_label = "Set bedrock UVs" bl_category = "Mcblend" bl_space_type = "VIEW_3D" bl_region_type = "UI" def draw(self, context): col = self.layout.column(align=True) col.prop( context.scene.nusiq_mcblend, "texture_width", text="Texture width") col.prop( context.scene.nusiq_mcblend, "texture_height", text="Texture height") col.prop( context.scene.nusiq_mcblend, "allow_expanding", text="Allow texture expanding") col.prop( context.scene.nusiq_mcblend, "generate_texture", text="Generate Texture") if context.scene.nusiq_mcblend.generate_texture: col.prop( context.scene.nusiq_mcblend, "texture_template_resolution", text="Template resolution") self.layout.row().operator( "object.nusiq_mcblend_map_uv_operator", text="Set minecraft UVs") # "Other" operators panel class OBJECT_PT_NusiqMcblendOperatorsPanel(bpy.types.Panel): ''' Panel that gives the user access to various operators used by Mcblend. ''' # pylint: disable=unused-argument bl_label = "Operators" bl_category = "Mcblend" bl_space_type = "VIEW_3D" bl_region_type = "UI" def draw(self, context): col = self.layout.column() col.operator( "object.nusiq_mcblend_toggle_mirror_operator", text="Toggle mirror for UV mapping" ) col.operator( "object.nusiq_mcblend_uv_group_operator", text="Set the UV group" ) col.operator( "object.nusiq_mcblend_clear_uv_group_operator", text="Clear UV group" ) col.operator( "object.nusiq_mcblend_toggle_is_bone_operator", text="Toggle export as bones" ) col.operator( "object.nusiq_mcblend_set_inflate_operator", text="Inflate" ) col.operator( "object.nusiq_mcblend_round_dimensions_operator", text="Round dimensions" ) <file_sep># Basic operators All of the basic operators are in the sidebar. ## Importing models This panel is used for importing bedrock models from JSON files. ![](../img/import_model_panel.png) __Usage__ 1. Press the "Import model" button. 2. Set the import properties on the right side of the file explorer. - "Geometry name" lets you specify the name of the model that you want to import from the file. You can leave it blank to import the first model from the list. Don't add `geometry.` prefix to the model name (it's added automatically). - "Replace bones with empties" checkbox decides whether the model should be imported using empties to represent Minecraft bones (checked) or an armature and bones (unchecked, default value). 3. Find the model file in the file explorer and press import model to finalize the importing. ![](../img/import_model_file_explorer.png) The import model operator can also be accessed via `File -> Import` menu. ## Exporting models The "Export models" panel is used to export models. You can set the name and visible model bounds here. Exported model will automatically add the `geometry.` prefix to the name so you don't have to do that. ![](../img/export_model_panel.png) __Usage__ 1. Select all of the objects that you want to export. 2. Insert the name of the model. 3. Press the "Export model" button. 4. Choose the output path in the file explorer window. The export model operator can also be accessed via `File -> Export` menu. ## Exporting animations Exporting animations is done with "Export bedrock animations" panel. !!! note Mcblend exports animations using key frames. Molang is not supported. Blender let's you implement various dependencies between animated objects with constraints and rigging. This means that animating one object can affect movement of another object even if the second object doesn't have any key frames. That is why the key frames in Blender animations aren't directly translated to key frames in Minecraft animations. Mcblend collects the time stamps of every keyframe of every selected objects and goes through that list looking for movements. Every movement of every object between two frames is saved to exported file. Non-linear animations (NLA) are also supported even though Blender doesn't let you view the exact times of the key frames from animation strips on a timeline. ![](../img/export_animation_panel.png) __Usage__ 1. Create new animation with "New animation" button (some of the parts of panel won't be visible until you have an animations). You may have as many animation settings saved as you want and you can switch between them with "Select animation" dropdown menu. 2. Select the objects to export in the 3d viewport. 3. Fill in the form with the information about the animation. - **Name** - the name of the animation. - **Skip rest poses** - whether the bones in rest poses should be exported as a part of animation. A rest pose is a pose with default location, rotation and scale. This setting typically significantly reduces the size of the exported animation. - **Export as pose** - exports current frame as a looped animation with a pose instead of exporting full animation. - **Loop** - whether the animation should be looped. - **anim_time_update** - the content of this text field is copied to "anim_time_update" property of the Minecraft animation. If you leave it blank the the animation won't use the anim_time_update. 4. Press the "Export animation" button. !!! note The time of the animation is determined by the time of animation that you can set in the timeline. The animations should always start at frame 1. Frame 0 should have the model in the default pose. ## UV-mapping UV-mapping is performed via "Set bedrock UVs" panel. You can also use this panel for generating textures. ![](../img/set_bedrock_uvs_panel.png) __Usage__ To perform the UV-mapping fill in the form and press the "Set Minecraft UVs" button. - **Texture width** - the texture_width property of the Minecraft model. - **Texture height** - the texture_height property of the Minecraft model. - **Allow texture expanding** - whether the texture can be expanded if there is no space for UV-mapping. - **Generate Texture** - whether the operator should generate a texture ( the name of the created texture is always "template") - **Template resolution** - Sets the resolution of the template texture. This value describes how many pixels on the image is represented by one texture_widht or texture_height unit in the model definition. The value 1 gives the standard Minecraft texture resolution. Higher values can be used to create "HD textures". You can adjust the UV-mapping using [UV-groups](../uv_groups/) to get best. !!! note After the UV mapping, you can still go to the UV editor and move everything to your liking. This operator tries to arrange the UVs of the selected objects on the texture space using the basic non-per-face Minecraft UV-mapping. If you move the UV in such a way that it cannot be mapped in standard Minecraft UV-mapping way than mcblend will detect that and uses per-face UV mapping. Don't move individual vertices of the faces on the UV unless you know what you're doing. The faces on the UV must remain rectangles, or the UV of the exported model may have unexpected shapes. ## Modifying Mcblend properties The "Operators" panel gives access to operators that modify various properties used by Mcblend. ![](../img/operators_panel.png) ### Toggle mirror for UV mapping Toggles the mirror property of selected objects. The mirror property affects the process of UV-mapping. Objects with this property are mapped as if they had the Minecraft mirror property. Using this operator doesn't remap the UV. If you want to change the UV of the object, you must also use the operator from the [Set bedrock UVs](#uv-mapping) panel to update the UVs. ### Set the UV group Adds selected objects to one of the existing [UV-groups](../uv_groups). ### Clear the UV group Removes selected objects from UV-groups. ### Toggle export as bones Toggles the export bone property of selected objects. Objects with this property are always exported as bones. Mcblend usually tries to export most meshes as cubes and groups them together with some parent object that becomes a bone. If you want to mark a certain cube as an independent bone, you need to use this operator. !!! note The best way of using Mcblend is to have one rig for the model and parenting the meshes to bones of that rig. This allows you to take the advantage of features like inverse kinematics and also creates a model in which it's really easy to distinguish which Blender object becames what in Minecraft model. Bones will be translated into Minecraft bones and meshes into cubes. If you add some empties without children they will become the locators. ### Inflate Inflates the selected object using Minecraft inflate property. Running this operator opens a panel in the bottom left corner of the 3D viewport. You can use this panel to adjust the "inflate" value. ![](../img/inflate_redo_panel.png) ### Round dimensions Rounds the dimensions of the object in such way that they become integers in exported Minecraft model. <file_sep>''' Select scene passed in commandline arguments, select all, export animation to path passed in arguments. This script should be executed after opening testing file with a model that have animation. ''' import sys import bpy # Collect arguments after "--" argv = sys.argv argv = argv[argv.index("--") + 1:] def main(scene_name: str, target_path: str): '''Main function.''' bpy.context.window.scene = bpy.data.scenes[scene_name] bpy.ops.object.select_all(action='SELECT') bpy.ops.object.nusiq_mcblend_export_operator(filepath=target_path) if __name__ == "__main__": main(argv[0], argv[1]) <file_sep>''' This is a testing script for exporting models. Exports model from blend file and compares them with the expected result. ''' import os import shutil import json from pathlib import Path import typing as tp import pytest from .common import assert_is_model, blender_run_script, make_comparable_json def make_comparison_files( tmp: str, scene_name: str, blend_file_path: str, ) -> tp.Tuple[tp.Dict, tp.Dict]: ''' Opens blender file, selects_scene and exports model from that to given tmp path. Returns the result JSON in a dictionary and the path to newly created file. ''' tmp = os.path.abspath(tmp) target = os.path.join(tmp, f'{scene_name}.geo.json') expected_result_path = ( f'./tests/data/test_model_export/{scene_name}.geo.json' ) script = os.path.abspath('./blender_scripts/export_model.py') blend_file_path = os.path.abspath(blend_file_path) # Windows uses wierd path separators tmp = tmp.replace('\\', '/') target = target.replace('\\', '/') script = script.replace('\\', '/') # Create tmp if not exists Path(tmp).mkdir(parents=True, exist_ok=True) # Run blender actions blender_run_script( script, scene_name, target, blend_file_path=blend_file_path ) # Return results with open(target, 'r') as f: target_dict = json.load(f) with open(expected_result_path, 'r') as f: expected_result = json.load(f) return target_dict, expected_result # type: ignore # PYTEST FUNCTIONS SCENES = [ 'issue62' ] def setup_module(module): '''Runs before tests''' tmp_path = "./.tmp/test_model_export" if os.path.exists(tmp_path): shutil.rmtree(tmp_path) @pytest.fixture(params=SCENES) def scene(request): return request.param # TESTS def test_importer(scene): result, expected_result = make_comparison_files( "./.tmp/test_model_export", scene, './tests/data/tests_project.blend' ) assert_is_model(result) set_paths = { ("minecraft:geometry"), ("minecraft:geometry", 0, "bones"), ("minecraft:geometry", 0, "bones", 0, "cubes"), ("minecraft:geometry", 0, "bones", 0, "cubes", 0, "locators"), } expected_result_comparable = make_comparable_json( expected_result, set_paths) result_comparable = make_comparable_json(result, set_paths) assert expected_result_comparable == result_comparable <file_sep>''' Builds project and creates the release file. Release file name follows the naming convension pattern: 'mcblend_VERSION.zip' ''' import zipfile import os import re SRC_PATH = 'mcblend' def main(): '''Main function.''' # Get version number init_path = 'mcblend/__init__.py' version = None with open(init_path) as f: for line in f: line = line.strip() if line.startswith('"version"'): version = '.'.join( re.findall('\\d+', line.split(':')[-1]) ) break if version is None: raise Exception("Unable to read project version.") # Create mapping SOURCE:TARGET (in zip file) file_mapping = {'LICENSE': 'LICENSE'} for dir_name, _, files in os.walk(SRC_PATH): for file_name in files: match = False if re.fullmatch(r'\w+\.py', file_name): match = True if match: file_mapping[os.path.join(dir_name, file_name)] = ( os.path.join( 'mcblend', dir_name[len(SRC_PATH) + 1:], file_name ) ) # Create the zip file output_path = f'mcblend_{version}.zip' with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as zipf: for k, v in file_mapping.items(): zipf.write(k, v) # This output is caputured while tesing print(f'Project build in {output_path}') if __name__ == "__main__": main() <file_sep>''' This module is used by Blender to register/unregister the plugin. ''' # don't import future annotations Blender needs that import bpy from bpy.props import ( PointerProperty, BoolProperty, FloatVectorProperty, CollectionProperty, IntProperty, EnumProperty) from .operator import ( OBJECT_OT_NusiqMcblendExportModelOperator, OBJECT_OT_NusiqMcblendExportAnimationOperator, OBJECT_OT_NusiqMcblendMapUvOperator, OBJECT_OT_NusiqMcblendUvGroupOperator, OBJECT_OT_NusiqMcblendClearUvGroupOperator, OBJECT_OT_NusiqMcblendToggleIsBoneOperator, OBJECT_OT_NusiqMcblendToggleMirrorOperator, OBJECT_OT_NusiqMcblendSetInflateOperator, menu_func_nusiq_mcblend_export_model, menu_func_nusiq_mcblend_export_animation, OBJECT_OT_NusiqMcblendRoundDimensionsOperator, OBJECT_OT_NusiqMcblendImport, menu_func_nusiq_mcblend_import, OBJECT_OT_NusiqMcblendListAnimations, OBJECT_OT_NusiqMcblendAddAnimation, OBJECT_OT_NusiqMcblendRemoveAnimation, OBJECT_OT_NusiqMcblendListUvGroups, OBJECT_OT_NusiqMcblendAddUvGroup, OBJECT_OT_NusiqMcblendRemoveUvGroup, OBJECT_OT_NusiqMcblendAddUvMask, OBJECT_OT_NusiqMcblendRemoveUvMask, OBJECT_OT_NusiqMcblendMoveUvMask, OBJECT_OT_NusiqMcblendCopyUvGroupSide, OBJECT_OT_NusiqMcblendAddUvMaskColor, OBJECT_OT_NusiqMcblendRemoveUvMaskColor, OBJECT_OT_NusiqMcblendMoveUvMaskColor, OBJECT_OT_NusiqMcblendAddUvMaskStripe, OBJECT_OT_NusiqMcblendRemoveUvMaskStripe, OBJECT_OT_NusiqMcblendMoveUvMaskStripe, OBJECT_OT_NusiqMcblendExportUvGroupOperator, OBJECT_OT_NusiqMcblendImportUvGroupOperator, OBJECT_OT_NusiqMcblendAddEvent, OBJECT_OT_NusiqMcblendRemoveEvent, OBJECT_OT_NusiqMcblendAddEffect, OBJECT_OT_NusiqMcblendRemoveEffect, ) from .custom_properties import ( OBJECT_NusiqMcblendExporterProperties, OBJECT_NusiqMcblendTimelineMarkerProperties, OBJECT_NusiqMcblendAnimationProperties, OBJECT_NusiqMcblendObjectProperties, OBJECT_NusiqMcblendStripeProperties, OBJECT_NusiqMcblendColorProperties, OBJECT_NusiqMcblendUvMaskProperties, OBJECT_NusiqMcblendUvGroupProperties, OBJECT_NusiqMcblendEffectProperties, OBJECT_NusiqMcblendEventProperties, ) from .panel import ( OBJECT_PT_NusiqMcblendObjectPropertiesPanel, OBJECT_PT_NusiqMcblendExportPanel, OBJECT_PT_NusiqMcblendExportAnimationPanel, OBJECT_PT_NusiqMcblendSetUvsPanel, OBJECT_PT_NusiqMcblendOperatorsPanel, OBJECT_PT_NusiqMcblendImportPanel, OBJECT_PT_NusiqMcblendUVGroupPanel, OBJECT_UL_NusiqMcblendUVGroupList, OBJECT_PT_NusiqMcblendEventsPanel, OBJECT_UL_NusiqMcblendEventsList, ) bl_info = { "name": "Mcblend", "author": "Artur", "description": "", "blender": (2, 80, 0), "version": (6, 2, 0), # COMPATIBILITY BREAKING CHANGE, NEW FEATURE, BUGFIX "location": "", "warning": "", "category": "Generic" } classes = ( OBJECT_NusiqMcblendExporterProperties, OBJECT_NusiqMcblendEffectProperties, OBJECT_NusiqMcblendEventProperties, OBJECT_NusiqMcblendTimelineMarkerProperties, OBJECT_NusiqMcblendAnimationProperties, OBJECT_PT_NusiqMcblendObjectPropertiesPanel, OBJECT_OT_NusiqMcblendExportModelOperator, OBJECT_OT_NusiqMcblendExportAnimationOperator, OBJECT_PT_NusiqMcblendExportAnimationPanel, OBJECT_PT_NusiqMcblendExportPanel, OBJECT_OT_NusiqMcblendMapUvOperator, OBJECT_PT_NusiqMcblendSetUvsPanel, OBJECT_OT_NusiqMcblendUvGroupOperator, OBJECT_OT_NusiqMcblendClearUvGroupOperator, OBJECT_OT_NusiqMcblendToggleIsBoneOperator, OBJECT_OT_NusiqMcblendToggleMirrorOperator, OBJECT_PT_NusiqMcblendOperatorsPanel, OBJECT_OT_NusiqMcblendSetInflateOperator, OBJECT_OT_NusiqMcblendRoundDimensionsOperator, OBJECT_OT_NusiqMcblendImport, OBJECT_PT_NusiqMcblendImportPanel, OBJECT_PT_NusiqMcblendUVGroupPanel, OBJECT_UL_NusiqMcblendUVGroupList, OBJECT_OT_NusiqMcblendAddEvent, OBJECT_OT_NusiqMcblendRemoveEvent, OBJECT_OT_NusiqMcblendAddEffect, OBJECT_OT_NusiqMcblendRemoveEffect, OBJECT_PT_NusiqMcblendEventsPanel, OBJECT_UL_NusiqMcblendEventsList, OBJECT_OT_NusiqMcblendListAnimations, OBJECT_OT_NusiqMcblendAddAnimation, OBJECT_OT_NusiqMcblendRemoveAnimation, OBJECT_OT_NusiqMcblendListUvGroups, OBJECT_OT_NusiqMcblendAddUvGroup, OBJECT_OT_NusiqMcblendRemoveUvGroup, OBJECT_OT_NusiqMcblendAddUvMask, OBJECT_OT_NusiqMcblendRemoveUvMask, OBJECT_OT_NusiqMcblendMoveUvMask, OBJECT_OT_NusiqMcblendCopyUvGroupSide, OBJECT_OT_NusiqMcblendAddUvMaskColor, OBJECT_OT_NusiqMcblendRemoveUvMaskColor, OBJECT_OT_NusiqMcblendMoveUvMaskColor, OBJECT_OT_NusiqMcblendAddUvMaskStripe, OBJECT_OT_NusiqMcblendRemoveUvMaskStripe, OBJECT_OT_NusiqMcblendMoveUvMaskStripe, OBJECT_NusiqMcblendObjectProperties, OBJECT_NusiqMcblendStripeProperties, OBJECT_NusiqMcblendColorProperties, OBJECT_NusiqMcblendUvMaskProperties, # must be before UvGroupProperties OBJECT_NusiqMcblendUvGroupProperties, OBJECT_OT_NusiqMcblendExportUvGroupOperator, OBJECT_OT_NusiqMcblendImportUvGroupOperator, ) def register(): '''Registers the plugin''' # pylint: disable=assignment-from-no-return, no-member for _class in classes: bpy.utils.register_class(_class) # Model export properties (the scope is the whole scene) bpy.types.Scene.nusiq_mcblend = PointerProperty( type=OBJECT_NusiqMcblendExporterProperties) # Animation properties bpy.types.Scene.nusiq_mcblend_active_animation = IntProperty( default=0) bpy.types.Scene.nusiq_mcblend_animations = CollectionProperty( type=OBJECT_NusiqMcblendAnimationProperties) # Events bpy.types.Scene.nusiq_mcblend_events = CollectionProperty( type=OBJECT_NusiqMcblendEventProperties) bpy.types.Scene.nusiq_mcblend_active_event = IntProperty( default=0) # UV Groups bpy.types.Scene.nusiq_mcblend_active_uv_group = IntProperty( default=0) bpy.types.Scene.nusiq_mcblend_uv_groups = CollectionProperty( type=OBJECT_NusiqMcblendUvGroupProperties) sides = [(str(i), f'side{i+1}', f'side{i+1}') for i in range(6)] bpy.types.Scene.nusiq_mcblend_active_uv_groups_side = EnumProperty( items=sides, name="Face") # Object properties bpy.types.Object.nusiq_mcblend_object_properties = PointerProperty( type=OBJECT_NusiqMcblendObjectProperties) # Append operators to the F3 menu bpy.types.TOPBAR_MT_file_export.append( menu_func_nusiq_mcblend_export_model ) bpy.types.TOPBAR_MT_file_export.append( menu_func_nusiq_mcblend_export_animation ) bpy.types.TOPBAR_MT_file_import.append( menu_func_nusiq_mcblend_import ) def unregister(): '''Unregisters the plugin''' # pylint: disable=no-member for _class in reversed(classes): bpy.utils.unregister_class(_class) del bpy.types.Scene.nusiq_mcblend bpy.types.TOPBAR_MT_file_export.remove( menu_func_nusiq_mcblend_export_model ) bpy.types.TOPBAR_MT_file_export.remove( menu_func_nusiq_mcblend_export_animation ) bpy.types.TOPBAR_MT_file_import.remove( menu_func_nusiq_mcblend_import ) <file_sep># Limitations ## Cuboids only Minecraft Bedrock Edition models must only be made of cuboids (unless you are using an experimental feature that is not yet supported). You cannot use other shapes in your model. Each cube in the model must be a separate mesh. The mesh must be exactly the same shape as its bounding box. If the model shape is different from the bounding box, Mcblend may fail on export, and if not, the exported model will only be an approximation of what you see in the viewport. !!! note The best way to avoid problems with invalid meshes is to always use the "Object mode" for editing the model and always scaling the cuboids in their local space. Additionally, you can enable drawing of an object boundary in: `Object properties -> Viewport display -> Bounds` ## No wide angles in animations There must be no more than 180° rotation between two keyframes. This issue is caused by the way Mcblend computes Minecraft's rotations internally. Blender supports multiple rotation modes and uses different rotation types for different kinds of objects. For example, bone rotations in armatures use quaternions, but meshes use Euler angles. Additionally, users can choose different rotation modes for each object. Minecraft uses Euler angles, but the axes are set differently. Mcblend can export models and animations regardless of the rotation modes used, but internally everything is converted to quaternions / translation matrices. The design decision for the internal use of quaternions was motivated by the fact that quaternions help avoid some calculation errors. Unfortunately, the quaternion number system has only one unique representation for each rotation orientation, so one cannot distinguish full rotation from no rotation (360° == 0°). Therefore, you cannot use angles greater than 180° between two keyframes. !!! note A quick fix to this problem is adding additional keyframes for wide rotations. ## No dots in names names The add-on does not allow the use of names that have periods. Anything after the first dot in the object name is ignored during the conversion. You can use dots in the names of the objects that aren't converted to bones in exported Minecraft model. The conversion rules are described in the [next](../conversion_rules/) section of the user documentation. <file_sep># UV-groups UV-groups influence the process of UV-mapping. The main purpose of UV-groups is to group cubes together so that they are mapped to the same spot on the texture. Using UV groups can help reduce the texture size when the model has many identical parts. UV-groups also let you can change the appearance of generated textures to your preference. This can be useful for setting some distinguishable look for UV-group members. You can create some fairly advanced textures with it, but it's usually better to create textures in a usual way. ## Creating and removing UV-groups UV-groups are added / removed using the "Mcblend UV groups" panel from the [Scene Properties](../gui_changes/#scene-properties) with "New UV group" and "Delete this UV group" buttons. You can also export and import UV groups by using the "Export UV group" and "Import UV group" buttons respectively. ## Adding objects to UV-groups Adding object do UV-groups is done with [Set the UV group](../basic_operators/#set-the-uv-group) button on sidebar. You must create at least one UV-group in order to add objects to it.
d74cb2a81b9bfce3befffd50fddc2e641b998ae5
[ "Markdown", "Python", "INI" ]
35
Python
Nusiq/Blender-Export-MC-Bedrock-Model
1374e7193c87976ed5f7010354673a8b4530c69d
fe417b58ebbac83e386cb7c13205361e90b6ade4
refs/heads/master
<repo_name>moura232/Student-homeassistant-config<file_sep>/custom_components/gpodder/__init__.py """ Component to integrate with gPodder. For more details about this component, please refer to https://github.com/custom-components/gpodder """ import logging import os from datetime import timedelta from urllib.request import Request, urlopen import homeassistant.helpers.config_validation as cv import podcastparser import voluptuous as vol from homeassistant.helpers import discovery from homeassistant.util import Throttle from mygpoclient import api from custom_components.gpodder.const import ( CONF_DEVICE, CONF_ENABLED, CONF_NAME, CONF_PASSWORD, CONF_SENSOR, CONF_USERNAME, DEFAULT_NAME, DOMAIN, DOMAIN_DATA, PLATFORMS, REQUEST_HEADERS, REQUIRED_FILES, STARTUP, ) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=30) _LOGGER = logging.getLogger(__name__) SENSOR_SCHEMA = vol.Schema( { vol.Optional(CONF_ENABLED, default=True): cv.boolean, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_DEVICE, default="homeassistant"): cv.string, } ) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_SENSOR, default=[{}]): vol.All( cv.ensure_list, [SENSOR_SCHEMA] ), } ) }, extra=vol.ALLOW_EXTRA, ) def setup(hass, config): """Set up this component.""" # Print startup message _LOGGER.info(STARTUP) # Check that all required files are present file_check = check_files(hass) if not file_check: return False # Create DATA dict hass.data[DOMAIN_DATA] = {} hass.data[DOMAIN] = {} component_config = config[DOMAIN] # Create gPodder client hass.data[DOMAIN]["client"] = api.MygPodderClient( component_config[CONF_USERNAME], component_config[CONF_PASSWORD] ) # Load platforms for platform in PLATFORMS: # Get platform specific configuration platform_config = component_config.get(platform, {}) if not platform_config: continue for entry in platform_config: entry_config = entry # If entry is not enabled, skip. if not entry_config[CONF_ENABLED]: continue hass.async_create_task( discovery.async_load_platform( hass, platform, DOMAIN, entry_config, config ) ) return True @Throttle(MIN_TIME_BETWEEN_UPDATES) def update_data(hass, device): """Update data.""" try: urls = hass.data[DOMAIN]["client"].get_subscriptions(device) _LOGGER.debug(f"{len(urls)} urls for device '{device}'") hass.data[DOMAIN_DATA] = update_using_feedservice(urls) except Exception as error: # pylint: disable=broad-except _LOGGER.error(f"Could not update data for device '{device}' - {error}") def parse_entry(entry): download_url = entry["enclosures"][0]["url"] return { "title": entry["title"], "description": entry.get("description", ""), "url": download_url, "mime_type": entry["enclosures"][0]["mime_type"], "guid": entry.get("guid", download_url), "link": entry.get("link", ""), "published": entry.get("published", 0), "total_time": entry.get("total_time", 0), } def update_using_feedservice(urls): podcasts = [] for url in urls: try: feed = podcastparser.parse( url, urlopen(Request(url, headers=REQUEST_HEADERS)), 5 ) except Exception as error: # pylint: disable=broad-except _LOGGER.error("Could not update %s - %s", url, error) feed = None if feed is None: _LOGGER.info("Feed not updated: %s", url) continue # Handle permanent redirects if feed.get("new_location", False): new_url = feed["new_location"] _LOGGER.info("Redirect %s => %s", url, new_url) url = new_url # Error handling if feed.get("errors", False): _LOGGER.error("Error parsing feed: %s", repr(feed["errors"])) continue # Update per-podcast metadata podcast = { "title": feed.get("title", ""), "link": feed.get("link", url), "description": feed.get("description", ""), "cover_url": feed.get("logo", ""), "episodes": [parse_entry(entry) for entry in feed["episodes"]], } podcasts.append(podcast) _LOGGER.debug(f"Podcasts: {podcasts}") return podcasts def check_files(hass): """Return bool that indicates if all files are present.""" # Verify that the user downloaded all files. base = f"{hass.config.path()}/custom_components/{DOMAIN}/" missing = [] for file in REQUIRED_FILES: fullpath = f"{base}{file}" if not os.path.exists(fullpath): missing.append(file) if missing: _LOGGER.critical(f"The following files are missing: {str(missing)}") returnvalue = False else: returnvalue = True return returnvalue <file_sep>/custom_components/gpodder/sensor.py """Sensor platform for gPodder.""" from homeassistant.helpers.entity import Entity from custom_components.gpodder import update_data from custom_components.gpodder.const import DOMAIN_DATA, ICON, CONF_NAME, CONF_DEVICE def setup_platform( hass, config, add_entities, discovery_info=None ): # pylint: disable=unused-argument """Setup sensor platform.""" add_entities([GpodderSensor(hass, discovery_info)], True) class GpodderSensor(Entity): """gPodder Sensor class.""" def __init__(self, hass, config): self.hass = hass self.attr = {} self._state = None self._name = config[CONF_NAME] self._device = config[CONF_DEVICE] def update(self): """Update the sensor.""" # Send update "signal" to the component update_data(self.hass, self._device) # Get new data (if any) updated = self.hass.data[DOMAIN_DATA] self._state = len(updated) # Set/update attributes self.attr["podcasts"] = updated @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state @property def icon(self): """Return the icon of the sensor.""" return ICON @property def device_state_attributes(self): """Return the state attributes.""" return self.attr
12435753d35c943faeb1079ed74e96ba1a21a564
[ "Python" ]
2
Python
moura232/Student-homeassistant-config
c71a44a9d93212dd1cbc302376e287a825df71e5
67bd71e6d46ae26b8141447415d363c2a1f61114
refs/heads/main
<file_sep>package uz.pdp.daggeruser.module import dagger.Module import dagger.Provides import okhttp3.OkHttpClient import okhttp3.logging.HttpLoggingInterceptor import retrofit2.Retrofit import retrofit2.converter.gson.GsonConverterFactory import uz.pdp.daggeruser.retrofit.ApiService import javax.inject.Singleton @Module class NetworkModule { var BASE_URL = "https://jsonplaceholder.typicode.com/" @Singleton @Provides fun getRetrofit():ApiService{ return Retrofit.Builder() .addConverterFactory(GsonConverterFactory.create()) .baseUrl(BASE_URL) .build() .create(ApiService::class.java) } @Provides fun getOkHttpClient():OkHttpClient{ val httpLoggingInterceptor = HttpLoggingInterceptor() httpLoggingInterceptor.level = HttpLoggingInterceptor.Level.BODY val okHttpClient = OkHttpClient.Builder() okHttpClient.addInterceptor(httpLoggingInterceptor) return okHttpClient.build() } }<file_sep>include ':app' rootProject.name = "Dagger User"<file_sep>package uz.pdp.daggeruser import androidx.appcompat.app.AppCompatActivity import android.os.Bundle import android.util.Log import androidx.lifecycle.Observer import kotlinx.android.synthetic.main.activity_main.* import uz.pdp.daggeruser.adapters.RvAdapter import uz.pdp.daggeruser.models.User import uz.pdp.daggeruser.repository.UserViewModel import javax.inject.Inject class MainActivity : AppCompatActivity() { @Inject lateinit var userViewModel: UserViewModel override fun onCreate(savedInstanceState: Bundle?) { (application as App).applicationComponent.inject(this) super.onCreate(savedInstanceState) setContentView(R.layout.activity_main) userViewModel.getUserRepository().observe(this, Observer { recycler.adapter = RvAdapter(it as ArrayList<User>) }) } }<file_sep>package uz.pdp.daggeruser import dagger.Component import uz.pdp.daggeruser.module.NetworkModule import uz.pdp.daggeruser.repository.UserViewModel import javax.inject.Singleton @Singleton @Component(modules = [NetworkModule::class]) interface ApplicationComponent { fun inject(mainActivity: MainActivity) fun inject(userViewModel: UserViewModel) }<file_sep>package uz.pdp.daggeruser.adapters import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import androidx.recyclerview.widget.RecyclerView import kotlinx.android.synthetic.main.item_user.view.* import uz.pdp.daggeruser.R import uz.pdp.daggeruser.models.User class RvAdapter(val userList:ArrayList<User>):RecyclerView.Adapter<RvAdapter.Vh>() { inner class Vh(itemView: View):RecyclerView.ViewHolder(itemView){ fun onBind(user: User){ itemView.user_name.text = user.name itemView.user_email.text = user.email } } override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): Vh { return Vh(LayoutInflater.from(parent.context).inflate(R.layout.item_user,parent,false)) } override fun onBindViewHolder(holder: Vh, position: Int) { holder.onBind(userList[position]) } override fun getItemCount(): Int = userList.size }<file_sep>package uz.pdp.daggeruser.repository import androidx.lifecycle.MutableLiveData import uz.pdp.daggeruser.models.User import javax.inject.Inject class UserViewModel @Inject constructor(private val userRepository: UserRepository) { fun getUserRepository():MutableLiveData<List<User>>{ return userRepository.getAllUser() } }<file_sep>package uz.pdp.daggeruser.retrofit import retrofit2.http.GET import uz.pdp.daggeruser.models.User interface ApiService { @GET("users") suspend fun getUser():List<User> }<file_sep>package uz.pdp.daggeruser.repository import uz.pdp.daggeruser.retrofit.ApiService import javax.inject.Inject class UserRemoteDataSource @Inject constructor(private var apiService: ApiService) { suspend fun getUsers() = apiService.getUser() }<file_sep>package uz.pdp.daggeruser.models data class Geo( val lat: String, val lng: String )<file_sep>package uz.pdp.daggeruser.repository import androidx.lifecycle.MutableLiveData import kotlinx.coroutines.GlobalScope import kotlinx.coroutines.launch import uz.pdp.daggeruser.models.User import javax.inject.Inject import javax.inject.Singleton @Singleton class UserRepository @Inject constructor(private val userRemoteDataSource: UserRemoteDataSource) { val mutableLiveData = MutableLiveData<List<User>>() init { loadUser() } private fun loadUser() { GlobalScope.launch { mutableLiveData.postValue(userRemoteDataSource.getUsers()) } } fun getAllUser():MutableLiveData<List<User>>{ return mutableLiveData } }
4982286537d9270cf85b89bf40af76a926fe74fa
[ "Kotlin", "Gradle" ]
10
Kotlin
AAkmalDev/DaggerUser
f4c032b0c062b8425d0efaa3cee3ad1ea60ce54a
7e3df4024409511c3ff954afaf95544d7beab798
refs/heads/trunk
<file_sep>/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.meecrowave; import org.apache.cxf.helpers.FileUtils; import org.apache.meecrowave.io.IO; import org.apache.meecrowave.runner.cli.CliOption; import org.junit.Test; import org.superbiz.app.Bounced; import org.superbiz.app.Endpoint; import org.superbiz.app.RsApp; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Writer; import java.net.URL; import java.util.Properties; import java.util.stream.Stream; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class MeecrowaveTest { @Test public void configBinding() { final MyConfig config = new Meecrowave.Builder() .property("my-prefix-port", "1234") .property("my-prefix-another-port", "5678") .property("my-prefix-a-last-port-value", "9632") .property("my-prefix-passthrough", "any value") .property("my-prefix-bool", "true") .bind(new MyConfig()); assertNotNull(config); assertEquals(1234, config.port); assertEquals(5678, config.anotherPort); assertEquals(9632, config.aLastPortValue); assertEquals("any value", config.passthrough); assertTrue(config.bool); } @Test public void valueTransformationMainConfig() { assertEquals(1234, new Meecrowave.Builder() {{ loadFromProperties(new Properties() {{ setProperty("http", "decode:Static3DES:+yYyC7Lb5+k="); }}); }}.getHttpPort()); } @Test public void valueTransformationExtension() { assertEquals(1234, new Meecrowave.Builder() .property("my-prefix-port", "decode:Static3DES:+yYyC7Lb5+k=") .bind(new MyConfig()).port); } @Test public void simpleWebapp() { final File root = new File("target/MeecrowaveTest/simpleWebapp/app"); FileUtils.mkDir(root); Stream.of(Endpoint.class, RsApp.class).forEach(type -> { final String target = type.getName().replace(".", "/"); File targetFile = new File(root, "WEB-INF/classes/" + target + ".class"); FileUtils.mkDir(targetFile.getParentFile()); try (final InputStream from = Thread.currentThread().getContextClassLoader().getResourceAsStream(target + ".class"); final OutputStream to = new FileOutputStream(targetFile)) { IO.copy(from, to); } catch (final IOException e) { fail(); } }); Classes.dump(new File(root, "WEB-INF/classes/")); try (final Writer indexHtml = new FileWriter(new File(root, "index.html"))) { indexHtml.write("hello"); } catch (final IOException e) { fail(e.getMessage()); } try (final Meecrowave meecrowave = new Meecrowave(new Meecrowave.Builder().randomHttpPort().includePackages("org.superbiz.app")).start()) { meecrowave.deployWebapp("", root); assertEquals("hello", slurp(new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/index.html"))); assertEquals("simple", slurp(new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/api/test"))); assertEquals("simplepathinfo", slurp(new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/api/test?checkcustom=pathinfo#is=fine"))); assertEquals("simple", slurp(new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/api/other"))); assertEquals("simplefiltertrue", slurp(new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/filter"))); assertEquals("filtertrue", slurp(new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/other"))); } catch (final IOException e) { fail(e.getMessage()); } } @Test public void classpath() { try (final Meecrowave meecrowave = new Meecrowave(new Meecrowave.Builder().randomHttpPort().includePackages("org.superbiz.app")).bake()) { assertEquals("simple", slurp(new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/api/test"))); assertEquals("simplefiltertrue", slurp(new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/filter"))); assertEquals( "sci:" + Bounced.class.getName() + Endpoint.class.getName() + RsApp.class.getName(), slurp(new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/sci"))); } catch (final IOException e) { fail(e.getMessage()); } } @Test public void json() { try (final Meecrowave meecrowave = new Meecrowave(new Meecrowave.Builder().randomHttpPort().includePackages("org.superbiz.app")).bake()) { assertEquals("{\"name\":\"test\"}", slurp(new URL("http://localhost:" + meecrowave.getConfiguration().getHttpPort() + "/api/test/json"))); } catch (final IOException e) { fail(e.getMessage()); } } private String slurp(final URL url) { try (final InputStream is = url.openStream()) { return IO.toString(is); } catch (final IOException e) { fail(e.getMessage()); } return null; } public static class MyConfig { @CliOption(name = "my-prefix-port", description = "") private int port; @CliOption(name = "my-prefix-another-port", description = "") private int anotherPort; @CliOption(name = "my-prefix-a-last-port-value", description = "") private int aLastPortValue; @CliOption(name = "my-prefix-passthrough", description = "") private String passthrough; @CliOption(name = "my-prefix-bool", description = "") private boolean bool; } } <file_sep>= Meecrowave Gradle :jbake-date: 2016-10-24 :jbake-type: page :jbake-status: published :jbake-meecrowavepdf: :jbake-meecrowavetitleicon: icon icon_puzzle_alt :jbake-meecrowavecolor: body-purple :icons: font [source,groovy] ---- buildscript { repositories { mavenCentral() } dependencies { classpath "org.apache.meecrowave:meecrowave-gradle-plugin:${meecrowave.version}" } } group 'com.app' version '1.0-SNAPSHOT' apply plugin: 'java' apply plugin: 'org.apache.meecrowave.meecrowave' meecrowave { httpPort = 9090 // most of the meecrowave core configuration } ---- More coming soon, for now use gradle IDE integration or configuration documentation please. <file_sep>= Quick start :jbake-date: 2016-10-24 :jbake-type: page :jbake-status: published :jbake-meecrowavepdf: :jbake-meecrowavecolor: body-green :icons: font == Your first application Meecrowave relies on JAX-RS and CDI so to start you just need to write a JAX-RS endpoint: [source,java] ---- @Path("kitchen") @ApplicationScoped public class HelloKitchen { @GET public String getMenu() { return "good things"; } } ---- Then booting Meecrowave is as easy as launching: [source,java] ---- try (final Meecrowave meecrowave = new Meecrowave().bake()) { new Scanner(System.in).nextLine(); } ---- You should get some output containing: [source] ---- [19:54:55.397][INFO][main][.meecrowave.cxf.CxfCdiAutoSetup] REST Application: / -> org.apache.cxf.cdi.DefaultApplication [19:54:55.399][INFO][main][.meecrowave.cxf.CxfCdiAutoSetup] Service URI: /kitchen -> org.app.HelloKitchen [19:54:55.401][INFO][main][.meecrowave.cxf.CxfCdiAutoSetup] GET /kitchen/ -> String getMenu() ---- And you can check it works doing: [source] ---- curl http://localhost:8080/kitchen ---- == You're in a hurry? Use groovy! IMPORTANT: this feature is supported starting from version 0.3.0 only. Create a file called `hello.groovy`: [source,java] ---- @Grab('org.apache.meecrowave:meecrowave-core:0.3.0') import org.apache.meecrowave.Meecrowave import javax.ws.rs.GET import javax.ws.rs.Path import javax.enterprise.context.ApplicationScoped @Path("hello") @ApplicationScoped class Hello { @GET hi() { "hi" } } new Meecrowave().bake().await() ---- then [source,bash] ---- groovy hello.groovy ---- Finally you can test it: [source,bash] ---- curl http://localhost:8080/hello ---- == Sample https://github.com/apache/openwebbeans-meecrowave-examples contains ready to use examples using meecrowave.
b90241c8fda3401fa5930520b8ad7b70277cf0e5
[ "Java", "AsciiDoc" ]
3
Java
Fork-World/meecrowave
9e8228bdd55d56ec67fa3dabd66ce24d4a721764
4bfa8f2380beade3dba07b06fdd4ecc0b0a63b06
refs/heads/master
<repo_name>machu/rpaproxy-node<file_sep>/models/db_connect.js exports = mongoose = require('mongoose'); mongoose.connect(process.env.MONGOHQ_URL); exports = Schema = mongoose.Schema; <file_sep>/web.js /** * Module dependencies. */ var express = require('express'); var redis = require('redis-url').createClient(process.env.REDISTOGO_URL); var app = express.createServer(express.logger()); var Proxy = require('./models/proxy.js'); var async = require('async'); var http = require('http'); var url = require('url'); var util = require('util'); // Configuration app.configure(function(){ app.set('views', __dirname + '/views'); app.set('view engine', 'ejs'); app.use(app.router); app.use(express.static(__dirname + '/public')); }); app.configure('development', function(){ app.use(express.errorHandler({ dumpExceptions: true, showStack: true })); }); app.configure('production', function(){ app.use(express.errorHandler()); }); // Routes app.param('locale', function(req, res, next, locale) { if (/^[a-z]{2}$/.test(locale)) { req.locale = locale; req.query = url.parse(req.url).query; next(); } else { res.send("Bad Request", 400); } }); app.get('/rpaproxy/:locale', function(req, res) { // TODO: 適切なラウンドロビン Proxy .find({ locales: req.locale }) .run(function(err, proxies) { if (err) { return res.send("Error", 500); } // proxy実行&チェック&成功・失敗記録 async.until( // ループ継続判定 function() { console.log("check"); return (proxies.length == 0 || res.statusCode == 302); }, // ループ処理 function(callback) { console.log("do"); var proxy = proxies.shift(); var endpoint = url.parse(proxy.endpoint); var options = { host: endpoint.host, port: endpoint.port || 80, path: endpoint.pathname + req.locale + '/?' + req.query }; console.log("connecting to: " + options.host + options.path); http.get(options, function(proxy_res) { if (proxy_res.headers.location && proxy_res.statusCode == 302) { // レスポンスヘッダ設定 res.statusCode = 302; res.setHeader('location', proxy_res.headers.location); // TODO: 成功回数を増分(非同期処理) } else { // TODO: 失敗回数を増分(非同期処理) } callback(); }).on('error', function(err) { console.log("http proxy error: "); console.log(util.inspect(err)); // TODO: 失敗回数を増分 callback(); }); }, // 後処理 function(err) { if (err) { // 例外処理 console.log(util.inspect(err)); return res.send(err.message, 500); } if (res.statusCode != 302) { return res.send("not found available proxy", 503) } // 正常終了 console.log("finished"); res.statusCode = 200; // for debug res.send(res.getHeader('location')); // for debug } ); }); }); app.get('/create_proxy', function(req, res) { var proxy = new Proxy(); proxy.name = "test"; proxy.endpoint = "http://www.machu.jp/amazon_proxy/"; proxy.locales = ["ja", "de", "en"]; proxy.save(); res.send("create proxy"); /* redis.get('counter', function(err, counter) { console.log('counter is: ' + counter); counter = counter + 1; redis.set('counter', counter); res.send('Hello World! ' + counter); }); */ }); var port = process.env.PORT || 3000; app.listen(port, function() { console.log("Express server listening on port %d in %s mode", app.address().port, app.settings.env); });
eaa519d797492418c8aaf569e950d96e3600339b
[ "JavaScript" ]
2
JavaScript
machu/rpaproxy-node
1efd75cab7f5a67d42e47acf7f1ad3d1c68c88d0
6366ed9876d2d1b93c788c8291057c02fb0020d7
refs/heads/master
<file_sep>/****************************************************************************** @file simple_peripheral.c @brief This file contains the Simple BLE Peripheral sample application for use with the CC2650 Bluetooth Low Energy Protocol Stack. Group: WCS, BTS Target Device: CC2650, CC2640 ****************************************************************************** Copyright (c) 2013-2018, Texas Instruments Incorporated All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Texas Instruments Incorporated nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ****************************************************************************** Release Name: ble_sdk_2_02_02_25 Release Date: 2018-04-02 18:03:35 *****************************************************************************/ /********************************************************************* * INCLUDES */ #include <string.h> #include <ti/sysbios/knl/Task.h> #include <ti/sysbios/knl/Clock.h> #include <ti/sysbios/knl/Semaphore.h> #include <ti/sysbios/knl/Queue.h> #include "hci_tl.h" #include "gatt.h" #include "linkdb.h" #include "gapgattserver.h" #include "gattservapp.h" #include "devinfoservice.h" #include "evrs_gatt_profile.h" #include "peripheral.h" #include "gapbondmgr.h" #include "osal_snv.h" #include "icall_apimsg.h" #include "util.h" #ifdef USE_RCOSC #include "rcosc_calibration.h" #endif //USE_RCOSC #include "board_key.h" #include "board_led.h" #include "board_display.h" #include "board.h" #include "evrs_tx_main.h" /********************************************************************* * CONSTANTS */ // Advertising interval when device is discoverable (units of 625us, 160=100ms) #define DEFAULT_ADVERTISING_INTERVAL 160 // Limited discoverable mode advertises for 30.72s, and then stops // General discoverable mode advertises indefinitely #define DEFAULT_DISCOVERABLE_MODE GAP_ADTYPE_FLAGS_GENERAL // Minimum connection interval (units of 1.25ms, 80=100ms) if automatic // parameter update request is enabled #define DEFAULT_DESIRED_MIN_CONN_INTERVAL 16 // Maximum connection interval (units of 1.25ms, 800=1000ms) if automatic // parameter update request is enabled #define DEFAULT_DESIRED_MAX_CONN_INTERVAL 400 // Slave latency to use if automatic parameter update request is enabled #define DEFAULT_DESIRED_SLAVE_LATENCY 0 // Supervision timeout value (units of 10ms, 1000=10s) if automatic parameter // update request is enabled #define DEFAULT_DESIRED_CONN_TIMEOUT 1000 // Whether to enable automatic parameter update request when a connection is // formed #define DEFAULT_ENABLE_UPDATE_REQUEST GAPROLE_LINK_PARAM_UPDATE_INITIATE_BOTH_PARAMS // Connection Pause Peripheral time value (in seconds) #define CONN_PAUSE_PERIPHERAL 6 // How often to perform periodic event (in msec) #define ETX_PERIODIC_EVT_PERIOD 5000 // Task configuration #define ETX_TASK_PRIORITY 1 #ifndef ETX_TASK_STACK_SIZE #define ETX_TASK_STACK_SIZE 1024 #endif #define ETX_ADTYPE_DEST 0xAF #define ETX_ADTYPE_DEVID 0xAE // Application state typedef enum { APP_STATE_INIT, APP_STATE_IDLE, APP_STATE_ADVERT } appStates_t; // Internal Events for RTOS application #define ETX_STATE_CHANGE_EVT 0x0001 #define ETX_CHAR_CHANGE_EVT 0x0002 #define ETX_PERIODIC_EVT 0x0004 #define ETX_CONN_EVT_END_EVT 0x0008 #define ETX_KEY_CHANGE_EVT 0x0010 #define ETX_DEVID_LEN 4 #define ETX_DEVID_NV_ID 0x80 #define ETX_DEVID_PREFIX 0x95 /********************************************************************* * TYPEDEFS */ // App event passed from profiles. typedef struct { appEvtHdr_t hdr; // event header. } sbpEvt_t; /********************************************************************* * GLOBAL VARIABLES */ /********************************************************************* * LOCAL VARIABLES */ // Entity ID globally used to check for source and/or destination of messages static ICall_EntityID selfEntity; // Semaphore globally used to post events to the application thread static ICall_Semaphore sem; // Clock instances for internal periodic events. //static Clock_Struct periodicClock; // Queue object used for app messages static Queue_Struct appMsg; static Queue_Handle appMsgQueue; // events flag for internal application events. static uint16_t events = 0; // Task configuration Task_Struct sbpTask; Char sbpTaskStack[ETX_TASK_STACK_SIZE]; // Profile state and parameters static appStates_t appState = APP_STATE_INIT; // GAP - Advertisement data (max size = 31 bytes, though this is // best kept short to conserve power while advertisting) static uint8_t advertData[] = { // Flags; this sets the device to use limited discoverable // mode (advertises for 30 seconds at a time) instead of general // discoverable mode (advertises indefinitely) 0x02,// length of this data GAP_ADTYPE_FLAGS, DEFAULT_DISCOVERABLE_MODE | GAP_ADTYPE_FLAGS_BREDR_NOT_SUPPORTED, // service UUID, to notify central devices what services are included // in this peripheral 0x03,// length of this data GAP_ADTYPE_16BIT_MORE, // some of the UUID's, but not all LO_UINT16(EVRSPROFILE_SERV_UUID), HI_UINT16(EVRSPROFILE_SERV_UUID), 0x02, ETX_ADTYPE_DEST, 0x00 }; // GAP - SCAN RSP data (max size = 31 bytes) static uint8_t scanRspData[15] = { // complete name //0x14,// length of this data //GAP_ADTYPE_LOCAL_NAME_COMPLETE, 'S', 'i', 'm', 'p', 'l', 'e', 'B', 'L', //'E', 'P', 'e', 'r', 'i', 'p', 'h', 'e', 'r', 'a', 'l', // connection interval range 0x05,// length of this data GAP_ADTYPE_SLAVE_CONN_INTERVAL_RANGE, LO_UINT16(DEFAULT_DESIRED_MIN_CONN_INTERVAL), // 100ms HI_UINT16(DEFAULT_DESIRED_MIN_CONN_INTERVAL), LO_UINT16(DEFAULT_DESIRED_MAX_CONN_INTERVAL), // 1s HI_UINT16(DEFAULT_DESIRED_MAX_CONN_INTERVAL), // Tx power level 0x02, // length of this data GAP_ADTYPE_POWER_LEVEL, 0x00, // 0dBm // Device ID rsp 0x05, ETX_ADTYPE_DEVID, 0x00, 0x00, 0x00, 0x00 }; // GAP GATT Attributes static const uint8_t attDeviceName[GAP_DEVICE_NAME_LEN] = "EVRS Transmitter"; // Globals used for ATT Response retransmission static gattMsgEvent_t *pAttRsp = NULL; static uint8_t rspTxRetry = 0; // Destiny base station ID static uint8_t destBsID = 0x00; // device ID params about Flash static uint8_t devID[ETX_DEVID_LEN] = {0}; /********************************************************************* * LOCAL FUNCTIONS */ static void ETX_init(void); static void ETX_taskFxn(UArg a0, UArg a1); static uint8_t ETX_processStackMsg(ICall_Hdr *pMsg); static uint8_t ETX_processGATTMsg(gattMsgEvent_t *pMsg); static void ETX_processAppMsg(sbpEvt_t *pMsg); static void ETX_processStateChangeEvt(gaprole_States_t newState); static void ETX_processCharValueChangeEvt(uint8_t paramID); //static void ETX_performPeriodicTask(void); //static void ETX_clockHandler(UArg arg); static void ETX_sendAttRsp(void); static void ETX_freeAttRsp(uint8_t status); static void ETX_stateChangeCB(gaprole_States_t newState); #ifndef FEATURE_OAD_ONCHIP static void ETX_charValueChangeCB(uint8_t paramID); #endif //!FEATURE_OAD_ONCHIP static void ETX_enqueueMsg(uint8_t event, uint8_t state); void ETX_keyChangeHandler(uint8_t keys); static void ETX_handleKeys(uint8_t shift, uint8_t keys); //device id static void ETX_DevId_Find(uint8_t* nvBuf); static void ETX_DevId_Refresh(uint8_t IdPrefix, uint8_t* nvBuf); static void ETX_ScanRsp_UpdateDeviceID(); static void ETX_Advert_UpdateDestinyBS(); /********************************************************************* * EXTERN FUNCTIONS */ extern void AssertHandler(uint8 assertCause, uint8 assertSubcause); /********************************************************************* * PROFILE CALLBACKS */ // GAP Role Callbacks static gapRolesCBs_t ETX_gapRoleCBs = { ETX_stateChangeCB // Profile State Change Callbacks }; // GAP Bond Manager Callbacks static gapBondCBs_t ETX_BondMgrCBs = { NULL, // Passcode callback (not used by application) NULL // Pairing / Bonding state Callback (not used by application) }; // Simple GATT Profile Callbacks static EVRSProfileCBs_t ETX_EVRSProfileCBs = { ETX_charValueChangeCB // Characteristic value change callback }; /********************************************************************* * PUBLIC FUNCTIONS */ /********************************************************************* * @fn ETX_createTask * * @brief Task creation function for the Simple BLE Peripheral. * * @param None. * * @return None. */ void ETX_createTask(void) { Task_Params taskParams; // Configure task Task_Params_init(&taskParams); taskParams.stack = sbpTaskStack; taskParams.stackSize = ETX_TASK_STACK_SIZE; taskParams.priority = ETX_TASK_PRIORITY; Task_construct(&sbpTask, ETX_taskFxn, &taskParams, NULL); } /********************************************************************* * @fn ETX_init * * @brief Called during initialization and contains application * specific initialization (ie. hardware initialization/setup, * table initialization, power up notification, etc), and * profile initialization/setup. * * @param None. * * @return None. */ static void ETX_init(void) { // ****************************************************************** // N0 STACK API CALLS CAN OCCUR BEFORE THIS CALL TO ICall_registerApp // ****************************************************************** // Register the current thread as an ICall dispatcher application // so that the application can send and receive messages. ICall_registerApp(&selfEntity, &sem); #ifdef USE_RCOSC RCOSC_enableCalibration(); #endif // USE_RCOSC // Create an RTOS queue for message from profile to be sent to app. appMsgQueue = Util_constructQueue(&appMsg); Board_initKeys(ETX_keyChangeHandler); Board_initLEDs(); Board_Display_Init(); // Device ID check { ETX_DevId_Find(devID); if (devID[3] != ETX_DEVID_PREFIX) // no valid device id found ETX_DevId_Refresh(ETX_DEVID_PREFIX, devID); ETX_ScanRsp_UpdateDeviceID(); } // Setup the GAP GAP_SetParamValue(TGAP_CONN_PAUSE_PERIPHERAL, CONN_PAUSE_PERIPHERAL); // Setup the GAP Peripheral Role Profile { // For all hardware platforms, device starts advertising upon initialization uint8_t initialAdvertEnable = TRUE; // By setting this to zero, the device will go into the waiting state after // being discoverable for 30.72 second, and will not being advertising again // until the enabler is set back to TRUE uint16_t advertOffTime = 0; uint8_t enableUpdateRequest = DEFAULT_ENABLE_UPDATE_REQUEST; uint16_t desiredMinInterval = DEFAULT_DESIRED_MIN_CONN_INTERVAL; uint16_t desiredMaxInterval = DEFAULT_DESIRED_MAX_CONN_INTERVAL; uint16_t desiredSlaveLatency = DEFAULT_DESIRED_SLAVE_LATENCY; uint16_t desiredConnTimeout = DEFAULT_DESIRED_CONN_TIMEOUT; // Set the GAP Role Parameters GAPRole_SetParameter(GAPROLE_ADVERT_ENABLED, sizeof(uint8_t), &initialAdvertEnable); GAPRole_SetParameter(GAPROLE_ADVERT_OFF_TIME, sizeof(uint16_t), &advertOffTime); GAPRole_SetParameter(GAPROLE_SCAN_RSP_DATA, sizeof(scanRspData), scanRspData); GAPRole_SetParameter(GAPROLE_ADVERT_DATA, sizeof(advertData), advertData); GAPRole_SetParameter(GAPROLE_PARAM_UPDATE_ENABLE, sizeof(uint8_t), &enableUpdateRequest); GAPRole_SetParameter(GAPROLE_MIN_CONN_INTERVAL, sizeof(uint16_t), &desiredMinInterval); GAPRole_SetParameter(GAPROLE_MAX_CONN_INTERVAL, sizeof(uint16_t), &desiredMaxInterval); GAPRole_SetParameter(GAPROLE_SLAVE_LATENCY, sizeof(uint16_t), &desiredSlaveLatency); GAPRole_SetParameter(GAPROLE_TIMEOUT_MULTIPLIER, sizeof(uint16_t), &desiredConnTimeout); } // Set the GAP Characteristics GGS_SetParameter(GGS_DEVICE_NAME_ATT, GAP_DEVICE_NAME_LEN, (void*)attDeviceName); // Set advertising interval { uint16_t advInt = DEFAULT_ADVERTISING_INTERVAL; GAP_SetParamValue(TGAP_LIM_DISC_ADV_INT_MIN, advInt); GAP_SetParamValue(TGAP_LIM_DISC_ADV_INT_MAX, advInt); GAP_SetParamValue(TGAP_GEN_DISC_ADV_INT_MIN, advInt); GAP_SetParamValue(TGAP_GEN_DISC_ADV_INT_MAX, advInt); } // Setup the GAP Bond Manager { uint32_t passkey = 0; // passkey "000000" uint8_t pairMode = GAPBOND_PAIRING_MODE_NO_PAIRING; uint8_t mitm = FALSE; uint8_t ioCap = GAPBOND_IO_CAP_NO_INPUT_NO_OUTPUT; uint8_t bonding = FALSE; GAPBondMgr_SetParameter(GAPBOND_DEFAULT_PASSCODE, sizeof(uint32_t), &passkey); GAPBondMgr_SetParameter(GAPBOND_PAIRING_MODE, sizeof(uint8_t), &pairMode); GAPBondMgr_SetParameter(GAPBOND_MITM_PROTECTION, sizeof(uint8_t), &mitm); GAPBondMgr_SetParameter(GAPBOND_IO_CAPABILITIES, sizeof(uint8_t), &ioCap); GAPBondMgr_SetParameter(GAPBOND_BONDING_ENABLED, sizeof(uint8_t), &bonding); } // Initialize GATT attributes GGS_AddService(GATT_ALL_SERVICES); // GAP GATTServApp_AddService(GATT_ALL_SERVICES); // GATT attributes DevInfo_AddService(); // Device Information Service EVRSProfile_AddService(GATT_ALL_SERVICES); // EVRS GATT Profile // Setup the EVRSProfile Characteristic Values { uint8_t sysIdVal = 0xA1; uint8_t devIdVal = 0xA2; uint8_t cmdVal = 0xA3; uint8_t dataVal = 0xA4; EVRSProfile_SetParameter(EVRSPROFILE_SYSID, sizeof(sysIdVal), &sysIdVal); EVRSProfile_SetParameter(EVRSPROFILE_DEVID, sizeof(devIdVal), &devIdVal); EVRSProfile_SetParameter(EVRSPROFILE_CMD, sizeof(cmdVal), &cmdVal); EVRSProfile_SetParameter(EVRSPROFILE_DATA, sizeof(dataVal), &dataVal); } // Register callback with SimpleGATTprofile EVRSProfile_RegisterAppCBs(&ETX_EVRSProfileCBs); // Start the Device VOID GAPRole_StartDevice(&ETX_gapRoleCBs); // Start Bond Manager VOID GAPBondMgr_Register(&ETX_BondMgrCBs); // Register with GAP for HCI/Host messages GAP_RegisterForMsgs(selfEntity); // Register for GATT local events and ATT Responses pending for transmission GATT_RegisterForMsgs(selfEntity); HCI_LE_ReadMaxDataLenCmd(); uout0("EVRS TX initialized"); Board_ledControl(BOARD_LED_ID_G, BOARD_LED_STATE_FLASH, 300); //Board_ledControl(BOARD_LED_ID_R, BOARD_LED_STATE_ON, 0); } /********************************************************************* * @fn ETX_taskFxn * * @brief Application task entry point for the Simple BLE Peripheral. * * @param a0, a1 - not used. * * @return None. */ static void ETX_taskFxn(UArg a0, UArg a1) { // Initialize application ETX_init(); // Application main loop for (;;) { // Waits for a signal to the semaphore associated with the calling thread. // Note that the semaphore associated with a thread is signaled when a // message is queued to the message receive queue of the thread or when // ICall_signal() function is called onto the semaphore. ICall_Errno errno = ICall_wait(ICALL_TIMEOUT_FOREVER); if (errno == ICALL_ERRNO_SUCCESS) { ICall_EntityID dest; ICall_ServiceEnum src; ICall_HciExtEvt *pMsg = NULL; if (ICall_fetchServiceMsg(&src, &dest, (void **) &pMsg) == ICALL_ERRNO_SUCCESS) { uint8 safeToDealloc = TRUE; if ((src == ICALL_SERVICE_CLASS_BLE) && (dest == selfEntity)) { ICall_Stack_Event *pEvt = (ICall_Stack_Event *) pMsg; // Check for BLE stack events first if (pEvt->signature == 0xffff) { if (pEvt->event_flag & ETX_CONN_EVT_END_EVT) { // Try to retransmit pending ATT Response (if any) ETX_sendAttRsp(); } } else { // Process inter-task message safeToDealloc = ETX_processStackMsg( (ICall_Hdr *) pMsg); } } if (pMsg && safeToDealloc) { ICall_freeMsg(pMsg); } } // If RTOS queue is not empty, process app message. while (!Queue_empty(appMsgQueue)) { sbpEvt_t *pMsg = (sbpEvt_t *) Util_dequeueMsg(appMsgQueue); if (pMsg) { // Process message. ETX_processAppMsg(pMsg); // Free the space from the message. ICall_free(pMsg); } } } if (events & ETX_PERIODIC_EVT) { events &= ~ETX_PERIODIC_EVT; //Util_startClock(&periodicClock); // Perform periodic application task //ETX_performPeriodicTask(); } } } /********************************************************************* * @fn ETX_processStackMsg * * @brief Process an incoming stack message. * * @param pMsg - message to process * * @return TRUE if safe to deallocate incoming message, FALSE otherwise. */ static uint8_t ETX_processStackMsg(ICall_Hdr *pMsg) { uint8_t safeToDealloc = TRUE; switch (pMsg->event) { case GATT_MSG_EVENT: // Process GATT message safeToDealloc = ETX_processGATTMsg( (gattMsgEvent_t *) pMsg); break; case HCI_GAP_EVENT_EVENT: { // Process HCI message switch (pMsg->status) { case HCI_COMMAND_COMPLETE_EVENT_CODE: // Process HCI Command Complete Event break; case HCI_BLE_HARDWARE_ERROR_EVENT_CODE: { AssertHandler(HAL_ASSERT_CAUSE_HARDWARE_ERROR, 0); } break; default: break; } } break; default: // do nothing break; } return (safeToDealloc); } /********************************************************************* * @fn ETX_processGATTMsg * * @brief Process GATT messages and events. * * @return TRUE if safe to deallocate incoming message, FALSE otherwise. */ static uint8_t ETX_processGATTMsg(gattMsgEvent_t *pMsg) { // See if GATT server was unable to transmit an ATT response if (pMsg->hdr.status == blePending) { // No HCI buffer was available. Let's try to retransmit the response // on the next connection event. if (HCI_EXT_ConnEventNoticeCmd(pMsg->connHandle, selfEntity, ETX_CONN_EVT_END_EVT) == SUCCESS) { // First free any pending response ETX_freeAttRsp(FAILURE); // Hold on to the response message for retransmission pAttRsp = pMsg; // Don't free the response message yet return (FALSE); } } else if (pMsg->method == ATT_FLOW_CTRL_VIOLATED_EVENT) { // ATT request-response or indication-confirmation flow control is // violated. All subsequent ATT requests or indications will be dropped. // The app is informed in case it wants to drop the connection. // Display the opcode of the message that caused the violation. uout1("FC Violated: %d", pMsg->msg.flowCtrlEvt.opcode); } else if (pMsg->method == ATT_MTU_UPDATED_EVENT) { // MTU size updated uout1("MTU Size: $d", pMsg->msg.mtuEvt.MTU); } // Free message payload. Needed only for ATT Protocol messages GATT_bm_free(&pMsg->msg, pMsg->method); // It's safe to free the incoming message return (TRUE); } /********************************************************************* * @fn ETX_sendAttRsp * * @brief Send a pending ATT response message. * * @param none * * @return none */ static void ETX_sendAttRsp(void) { // See if there's a pending ATT Response to be transmitted if (pAttRsp != NULL) { uint8_t status; // Increment retransmission count rspTxRetry++; // Try to retransmit ATT response till either we're successful or // the ATT Client times out (after 30s) and drops the connection. status = GATT_SendRsp(pAttRsp->connHandle, pAttRsp->method, &(pAttRsp->msg)); if ((status != blePending) && (status != MSG_BUFFER_NOT_AVAIL)) { // Disable connection event end notice HCI_EXT_ConnEventNoticeCmd(pAttRsp->connHandle, selfEntity, 0); // We're done with the response message ETX_freeAttRsp(status); } else { // Continue retrying uout1("Rsp send retry: %d", rspTxRetry); } } } /********************************************************************* * @fn ETX_freeAttRsp * * @brief Free ATT response message. * * @param status - response transmit status * * @return none */ static void ETX_freeAttRsp(uint8_t status) { // See if there's a pending ATT response message if (pAttRsp != NULL) { // See if the response was sent out successfully if (status == SUCCESS) { uout1("Rsp sent retry: %d", rspTxRetry); } else { // Free response payload GATT_bm_free(&pAttRsp->msg, pAttRsp->method); uout1("Rsp retry failed: %d", rspTxRetry); } // Free response message ICall_freeMsg(pAttRsp); // Reset our globals pAttRsp = NULL; rspTxRetry = 0; } } /********************************************************************* * @fn ETX_processAppMsg * * @brief Process an incoming callback from a profile. * * @param pMsg - message to process * * @return None. */ static void ETX_processAppMsg(sbpEvt_t *pMsg) { switch (pMsg->hdr.event) { case ETX_STATE_CHANGE_EVT: ETX_processStateChangeEvt( (gaprole_States_t) pMsg->hdr.state); break; case ETX_CHAR_CHANGE_EVT: ETX_processCharValueChangeEvt(pMsg->hdr.state); break; case ETX_KEY_CHANGE_EVT: ETX_handleKeys(0, pMsg->hdr.state); default: // Do nothing. break; } } /********************************************************************* * @fn ETX_stateChangeCB * * @brief Callback from GAP Role indicating a role state change. * * @param newState - new state * * @return None. */ static void ETX_stateChangeCB(gaprole_States_t newState) { ETX_enqueueMsg(ETX_STATE_CHANGE_EVT, newState); } /********************************************************************* * @fn ETX_processStateChangeEvt * * @brief Process a pending GAP Role state change event. * * @param newState - new state * * @return None. */ static void ETX_processStateChangeEvt(gaprole_States_t newState) { #ifdef PLUS_BROADCASTER static bool firstConnFlag = false; #endif // PLUS_BROADCASTER switch (newState) { case GAPROLE_STARTED: { uint8_t ownAddress[B_ADDR_LEN]; uint8_t systemId[DEVINFO_SYSTEM_ID_LEN]; GAPRole_GetParameter(GAPROLE_BD_ADDR, ownAddress); // use 6 bytes of device address for 8 bytes of system ID value systemId[0] = ownAddress[0]; systemId[1] = ownAddress[1]; systemId[2] = ownAddress[2]; // set middle bytes to zero systemId[4] = 0x00; systemId[3] = 0x00; // shift three bytes up systemId[7] = ownAddress[5]; systemId[6] = ownAddress[4]; systemId[5] = ownAddress[3]; DevInfo_SetParameter(DEVINFO_SYSTEM_ID, DEVINFO_SYSTEM_ID_LEN, systemId); appState = APP_STATE_IDLE; // Display device address uout0(Util_convertBdAddr2Str(ownAddress)); uout0("Initialized"); Board_ledControl(BOARD_LED_ID_R, BOARD_LED_STATE_OFF, 0); } break; case GAPROLE_ADVERTISING: appState = APP_STATE_IDLE; uout0("Advertising"); Board_ledControl(BOARD_LED_ID_R, BOARD_LED_STATE_FLASH, 100); break; #ifdef PLUS_BROADCASTER /* After a connection is dropped a device in PLUS_BROADCASTER will continue * sending non-connectable advertisements and shall sending this change of * state to the application. These are then disabled here so that sending * connectable advertisements can resume. */ case GAPROLE_ADVERTISING_NONCONN: { uint8_t advertEnabled = FALSE; // Disable non-connectable advertising. GAPRole_SetParameter(GAPROLE_ADV_NONCONN_ENABLED, sizeof(uint8_t), &advertEnabled); advertEnabled = TRUE; // Enabled connectable advertising. GAPRole_SetParameter(GAPROLE_ADVERT_ENABLED, sizeof(uint8_t), &advertEnabled); // Reset flag for next connection. firstConnFlag = false; ETX_freeAttRsp(bleNotConnected); } break; #endif //PLUS_BROADCASTER case GAPROLE_CONNECTED: { linkDBInfo_t linkInfo; uint8_t numActive = 0; //Util_startClock(&periodicClock); numActive = linkDB_NumActive(); // Use numActive to determine the connection handle of the last // connection if (linkDB_GetInfo(numActive - 1, &linkInfo) == SUCCESS) { uout1("Num Conns: %d", (uint16_t )numActive); uout0(Util_convertBdAddr2Str(linkInfo.addr)); } else { uint8_t peerAddress[B_ADDR_LEN]; GAPRole_GetParameter(GAPROLE_CONN_BD_ADDR, peerAddress); uout0("Connected"); uout0(Util_convertBdAddr2Str(peerAddress)); } Board_ledControl(BOARD_LED_ID_R, BOARD_LED_STATE_FLASH, 500); #ifdef PLUS_BROADCASTER // Only turn advertising on for this state when we first connect // otherwise, when we go from connected_advertising back to this state // we will be turning advertising back on. if (firstConnFlag == false) { uint8_t advertEnabled = FALSE; // Turn on Advertising // Disable connectable advertising. GAPRole_SetParameter(GAPROLE_ADVERT_ENABLED, sizeof(uint8_t), &advertEnabled); // Set to true for non-connectabel advertising. advertEnabled = TRUE; // Enable non-connectable advertising. GAPRole_SetParameter(GAPROLE_ADV_NONCONN_ENABLED, sizeof(uint8_t), &advertEnabled); firstConnFlag = true; } #endif // PLUS_BROADCASTER } break; case GAPROLE_CONNECTED_ADV: uout0("Connected Advertising"); break; case GAPROLE_WAITING: //Util_stopClock(&periodicClock); ETX_freeAttRsp(bleNotConnected); uout0("Disconnected"); Board_ledControl(BOARD_LED_ID_R, BOARD_LED_STATE_OFF, 0); // Clear remaining lines //Display_clearLines(dispHandle, 3, 5); break; case GAPROLE_WAITING_AFTER_TIMEOUT: ETX_freeAttRsp(bleNotConnected); uout0("Timed Out"); // Clear remaining lines //Display_clearLines(dispHandle, 3, 5); #ifdef PLUS_BROADCASTER // Reset flag for next connection. firstConnFlag = false; #endif //#ifdef (PLUS_BROADCASTER) break; case GAPROLE_ERROR: uout0("Error"); break; default: //Display_clearLine(dispHandle, 2); break; } } /********************************************************************* * @fn ETX_charValueChangeCB * * @brief Callback from Simple Profile indicating a characteristic * value change. * * @param paramID - parameter ID of the value that was changed. * * @return None. */ static void ETX_charValueChangeCB(uint8_t paramID) { ETX_enqueueMsg(ETX_CHAR_CHANGE_EVT, paramID); } /********************************************************************* * @fn ETX_processCharValueChangeEvt * * @brief Process a pending Simple Profile characteristic value change * event. * * @param paramID - parameter ID of the value that was changed. * * @return None. */ static void ETX_processCharValueChangeEvt(uint8_t paramID) { uint8_t newValue; switch (paramID) { case EVRSPROFILE_DEVID: EVRSProfile_GetParameter(EVRSPROFILE_DEVID, &newValue); uout1("Device Id: 0x%02x", (uint8_t )newValue); break; case EVRSPROFILE_CMD: EVRSProfile_GetParameter(EVRSPROFILE_CMD, &newValue); uout1("BS Command: 0x%02x", (uint8_t )newValue); break; case EVRSPROFILE_DATA: EVRSProfile_GetParameter(EVRSPROFILE_DATA, &newValue); uout1("User Data: 0x%02x", (uint8_t )newValue); break; default: // should not reach here! break; } } /********************************************************************* * @fn ETX_enqueueMsg * * @brief Creates a message and puts the message in RTOS queue. * * @param event - message event. * @param state - message state. * * @return None. */ static void ETX_enqueueMsg(uint8_t event, uint8_t state) { sbpEvt_t *pMsg; // Create dynamic pointer to message. if ((pMsg = ICall_malloc(sizeof(sbpEvt_t)))) { pMsg->hdr.event = event; pMsg->hdr.state = state; // Enqueue the message. Util_enqueueMsg(appMsgQueue, sem, (uint8*) pMsg); } } /********************************************************************* * @fn ETX_keyChangeHandler * * @brief Key event handler function * * @param a0 - ignored * * @return none */ void ETX_keyChangeHandler(uint8_t keys) { ETX_enqueueMsg(ETX_KEY_CHANGE_EVT, keys); } /********************************************************************* * @fn ETX_handleKeys * * @brief Handles all key events for this device. * * @param shift - true if in shift/alt. * @param keys - bit field for key events. Valid entries: * HAL_KEY_SW_2 * HAL_KEY_SW_1 * * @return none */ static void ETX_handleKeys(uint8_t shift, uint8_t keys) { //uout0("handleKey() called"); uint8_t advertEnable = FALSE; switch (appState) { case APP_STATE_INIT: break; case APP_STATE_IDLE: if (keys & KEY_RIGHT) { ETX_Advert_UpdateDestinyBS(); bStatus_t rtn = 0; rtn = GAPRole_SetParameter(GAPROLE_ADVERT_DATA, sizeof(advertData), advertData); if (rtn == SUCCESS) uout1("BS set to 0x%02x", destBsID); appState = APP_STATE_ADVERT; } break; case APP_STATE_ADVERT: if (keys & KEY_LEFT) { advertEnable = FALSE; GAPRole_SetParameter(GAPROLE_ADVERT_ENABLED, sizeof(uint8_t), &advertEnable); appState = APP_STATE_IDLE; } if (keys & KEY_RIGHT) { uint32 newValue = 0; EVRSProfile_GetParameter(EVRSPROFILE_DATA, &newValue); newValue += 1; EVRSProfile_SetParameter(EVRSPROFILE_DATA, sizeof(uint32), &newValue ); appState = APP_STATE_IDLE; } break; default: break; } } static void ETX_DevId_Find(uint8_t* nvBuf) { uint8_t rtn = osal_snv_read(ETX_DEVID_NV_ID, ETX_DEVID_LEN, (uint8 *)nvBuf); if (rtn == SUCCESS) uout1("Device ID found: 0x%08x", BUILD_UINT32(nvBuf[0], nvBuf[1], nvBuf[2], nvBuf[3])); return; } static void ETX_DevId_Refresh(uint8_t IdPrefix, uint8_t* nvBuf) { uint32_t rnd = Util_GetTRNG() % 0xFFFFFF; nvBuf[0] = rnd % 0xFF ; nvBuf[1] = (rnd >> 8) % 0xFF; nvBuf[2] = (rnd >> 16) % 0xFF; nvBuf[3] = IdPrefix; uint8_t rtn = osal_snv_write(ETX_DEVID_NV_ID, ETX_DEVID_LEN, (uint8 *)nvBuf); if (rtn == SUCCESS) { rtn = osal_snv_read(ETX_DEVID_NV_ID, ETX_DEVID_LEN, (uint8 *)nvBuf); if (rtn == SUCCESS) uout1("Device ID refreshed: 0x%08x", BUILD_UINT32(nvBuf[0], nvBuf[1], nvBuf[2], nvBuf[3])); } return; } static void ETX_ScanRsp_UpdateDeviceID() { scanRspData[11] = devID[0]; scanRspData[12] = devID[1]; scanRspData[13] = devID[2]; scanRspData[14] = devID[3]; } static void ETX_Advert_UpdateDestinyBS() { destBsID = 0x02; advertData[9] = destBsID; } /********************************************************************* *********************************************************************/ <file_sep>################################################################################ # Automatically-generated file. Do not edit! ################################################################################ USER_OBJS := LIBS := -llibc.a -l"C:/ti/tirtos_cc13xx_cc26xx_2_21_01_08/products/cc26xxware_2_24_03_17272/driverlib/bin/ccs/driverlib.lib" -l"C:/ti/ble_sdk_2_02_02_25/src/rom/common_rom_releases/03282014/common_rom.symbols" <file_sep>/***************************************************************************** file evrs_bs_rssi.c brief This file contain operations about polling rssi data proj EVRS date 0527pm 15 Aug 2018 author Ziyi *****************************************************************************/ #include "evrs_bs_rssi.h" #include "linkdb.h" static readRssi_t *EBS_RssiAlloc(uint16_t connHandle); static void EBS_RssiFree(uint16_t connHandle); void EBS_readRssiHandler(UArg a0); /********************************************************************* * @fn EBS_StartRssi * * @brief Start periodic RSSI reads on a link. * * @param connHandle - connection handle of link * @param period - RSSI read period in ms * * @return SUCCESS: Terminate started * bleIncorrectMode: No link * bleNoResources: No resources */ bStatus_t EBS_StartRssi(uint16_t connHandle, uint16_t period) { readRssi_t *pRssi; // Verify link is up if (!linkDB_Up(connHandle)) { return bleIncorrectMode; } // If already allocated if ((pRssi = EBS_RssiFind(connHandle)) != NULL) { // Stop timer Util_stopClock(pRssi->pClock); pRssi->period = period; } // Allocate structure else if ((pRssi = EBS_RssiAlloc(connHandle)) != NULL) { pRssi->period = period; } // Allocate failed else { return bleNoResources; } // Start timer Util_restartClock(pRssi->pClock, period); return SUCCESS; } /********************************************************************* * @fn EBS_CancelRssi * * @brief Cancel periodic RSSI reads on a link. * * @param connHandle - connection handle of link * * @return SUCCESS: Operation successful * bleIncorrectMode: No link */ bStatus_t EBS_CancelRssi(uint16_t connHandle) { readRssi_t *pRssi; if ((pRssi = EBS_RssiFind(connHandle)) != NULL) { // Stop timer Util_stopClock(pRssi->pClock); // Free RSSI structure EBS_RssiFree(connHandle); return SUCCESS; } // Not found return bleIncorrectMode; } /********************************************************************* * @fn gapCentralRole_RssiAlloc * * @brief Allocate an RSSI structure. * * @param connHandle - Connection handle * * @return pointer to structure or NULL if allocation failed. */ static readRssi_t *EBS_RssiAlloc(uint16_t connHandle) { uint8_t i; // Find free RSSI structure for (i = 0; i < MAX_NUM_BLE_CONNS; i++) { if (readRssi[i].connHandle == GAP_CONNHANDLE_ALL) { readRssi_t *pRssi = &readRssi[i]; pRssi->pClock = (Clock_Struct *) ICall_malloc(sizeof(Clock_Struct)); if (pRssi->pClock) { Util_constructClock(pRssi->pClock, EBS_readRssiHandler, 0, 0, false, i); pRssi->connHandle = connHandle; return pRssi; } } } // No free structure found return NULL; } /********************************************************************* * @fn gapCentralRole_RssiFind * * @brief Find an RSSI structure. * * @param connHandle - Connection handle * * @return pointer to structure or NULL if not found. */ readRssi_t *EBS_RssiFind(uint16_t connHandle) { uint8_t i; // Find free RSSI structure for (i = 0; i < MAX_NUM_BLE_CONNS; i++) { if (readRssi[i].connHandle == connHandle) { return &readRssi[i]; } } // Not found return NULL; } /********************************************************************* * @fn gapCentralRole_RssiFree * * @brief Free an RSSI structure. * * @param connHandle - Connection handle * * @return none */ static void EBS_RssiFree(uint16_t connHandle) { uint8_t i; // Find RSSI structure for (i = 0; i < MAX_NUM_BLE_CONNS; i++) { if (readRssi[i].connHandle == connHandle) { readRssi_t *pRssi = &readRssi[i]; if (pRssi->pClock) { Clock_destruct(pRssi->pClock); // Free clock struct ICall_free(pRssi->pClock); pRssi->pClock = NULL; } pRssi->connHandle = GAP_CONNHANDLE_ALL; break; } } } /********************************************************************* * @fn EBS_readRssiHandler * * @brief Read RSSI handler function * * @param a0 - read RSSI index * * @return none */ void EBS_readRssiHandler(UArg a0) { EBS_enqueueMsg(EBS_RSSI_READ_EVT, SUCCESS, (uint8_t *) &readRssi[a0]); } <file_sep># EVRS_BLE_FIRMWARE (moved to new repos, EBS and ETX) The CCS project for Transmitter and Basestation prototype in EVRS, using launchxl-cc2650. The Electronic Voting Response System (EVRS) is our capstone project topic in UoM, it mainly contains multiple transmitter(TX) using CC2650 and a base station(BS) using Raspberry PI 3B+. The wireless communication protocol would be chosen from BLE, Zigbee and 6LowPAN. This project is the source code for the transmitter and basestation prototype using launchxl-cc2650 evaluation board. Development would be based on the TI-RTOS real time system. <file_sep>/**************************************** * * @filename evrs_bs_main.c * * @project evrs_bs_cc2650lp_app * * @brief main functionality of base station * * @date 22 Aug. 2018 * * @author <EMAIL> * ****************************************/ /********************************************************************* * INCLUDES */ #include <string.h> #include <ti/sysbios/knl/Task.h> #include <ti/sysbios/knl/Clock.h> #include <ti/sysbios/knl/Semaphore.h> #include <ti/sysbios/knl/Queue.h> #include "bcomdef.h" #include "hci_tl.h" #include "linkdb.h" #include "gatt.h" #include "gapgattserver.h" #include "gattservapp.h" #include "central.h" #include "gapbondmgr.h" //#include "simple_gatt_profile.h" #include "osal_snv.h" #include "icall_apimsg.h" #include "evrs_bs_typedefs.h" #include "util.h" #include "board_key.h" #include "board_led.h" #include "board_display.h" #include "evrs_bs_rssi.h" //#include <ti/mw/display/Display.h> #include "board.h" #include "ble_user_config.h" /********************************************************************* * MACROS */ /********************************************************************* * CONSTANTS */ // Maximum number of scan responses #define MAX_SCAN_RES 20 // Scan duration in ms #define DEFAULT_SCAN_DURATION 10000 // Discovery mode (limited, general, all) #define DEFAULT_DISCOVERY_MODE DEVDISC_MODE_ALL // TRUE to use active scan #define DEFAULT_DISCOVERY_ACTIVE_SCAN TRUE // TRUE to use white list during discovery #define DEFAULT_DISCOVERY_WHITE_LIST FALSE // TRUE to use high scan duty cycle when creating link #define LINK_HIGH_DUTY_CYCLE TRUE // TRUE to use white list when creating link #define LINK_WHITE_LIST FALSE // Initial minimum connection interval (units of 1.25 ms.) #define INITIAL_MIN_CONN_INTERVAL 16 // Initial minimum connection interval (units of 1.25 ms.) #define INITIAL_MAX_CONN_INTERVAL 400 // Initial slave latency #define INITIAL_SLAVE_LATENCY 0 // Initial supervision timeout (units of 1.25 ms) #define INITIAL_CONN_TIMEOUT 700 // Default RSSI polling period in ms #define DEFAULT_RSSI_PERIOD 1000 // Whether to enable automatic parameter update request when a connection is // formed #define DEFAULT_ENABLE_UPDATE_REQUEST FALSE // Minimum connection interval (units of 1.25ms) if automatic parameter update // request is enabled #define DEFAULT_UPDATE_MIN_CONN_INTERVAL 80 // Maximum connection interval (units of 1.25ms) if automatic parameter update // request is enabled #define DEFAULT_UPDATE_MAX_CONN_INTERVAL 160 // Slave latency to use if automatic parameter update request is enabled #define DEFAULT_UPDATE_SLAVE_LATENCY 0 // Supervision timeout value (units of 10ms) if automatic parameter update // request is enabled #define DEFAULT_UPDATE_CONN_TIMEOUT 600 // Default service discovery timer delay in ms #define SVC_DISCOVERY_DELAY 500 // TRUE to filter discovery results on desired service UUID #define DEV_DISC_BY_SVC_UUID TRUE // Length of bd addr as a string #define B_ADDR_STR_LEN 15 // Task configuration #define EBS_TASK_PRIORITY 1 #ifndef EBS_TASK_STACK_SIZE #define EBS_TASK_STACK_SIZE 864 #endif // GATT Params // EVRS Profile Service UUID #define EVRSPROFILE_SERV_UUID 0xAFF0 #define EVRSPROFILE_SYSID_UUID 0xAFF2 #define EVRSPROFILE_DEVID_UUID 0xAFF4 #define EVRSPROFILE_CMD_UUID 0xAFF8 #define EVRSPROFILE_DATA_UUID 0xAFFE #define ETX_ADTYPE_DEST 0xAF #define ETX_ADTYPE_DEVID 0xAE #define ETX_DEVID_LEN 4 #define ETX_DEVID_PREFIX 0x95 // Application states typedef enum { EBS_STATE_INIT, EBS_STATE_DISCOVERY, EBS_STATE_UPLOAD, EBS_STATE_POLLING } EbsState_t; // Discovery states typedef enum { EBS_DISC_STATE_IDLE, // Idle EBS_DISC_STATE_MTU, // Exchange ATT MTU size EBS_DISC_STATE_SVC, // Service discovery EBS_DISC_STATE_CHAR // Characteristic discovery } EbsDiscState_t; // Polling states typedef enum { EBS_POLL_STATE_IDLE, EBS_POLL_STATE_CONNECT, EBS_POLL_STATE_READ, EBS_POLL_STATE_WRITE, EBS_POLL_STATE_TERMINATE } EbsPollState_t; typedef enum { EVRSPROFILE_SYSID, EVRSPROFILE_DEVID, EVRSPROFILE_CMD, EVRSPROFILE_DATA } ProfileId_t; /********************************************************************* * TYPEDEFS */ // App event passed from profiles. typedef struct { appEvtHdr_t hdr; // event header uint8_t *pData; // event data } EbsEvt_t; /** * Type of device discovery (Scan) to perform. */ typedef struct { //char localName[20]; //!< Device's Name uint8_t addrType; //!< Address Type: @ref ADDRTYPE_DEFINES uint8_t addr[B_ADDR_LEN]; //!< Device's Address uint8_t txDevID[ETX_DEVID_LEN]; // Tx Id } DevRecInfo_t; typedef struct { uint8_t addrType; //!< Address Type: @ref ADDRTYPE_DEFINES uint8_t addr[B_ADDR_LEN]; //!< Device's Address uint8_t txDevID[ETX_DEVID_LEN]; // Tx Id uint16_t connHdl; // connection handle EbsPollState_t state; // connection state } TargetInfo_t; /********************************************************************* * GLOBAL VARIABLES */ // Display Interface // Display_Handle dispHdl = NULL; /********************************************************************* * EXTERNAL VARIABLES */ /********************************************************************* * LOCAL VARIABLES */ // Entity ID globally used to check for source and/or destination of messages static ICall_EntityID selfEntity; // Semaphore globally used to post events to the application thread static ICall_Semaphore sem; // Clock object used to signal timeout static Clock_Struct startDiscClock; // Clock object used to timeout connection static Clock_Struct connectingClock; // Queue object used for app messages static Queue_Struct appMsg; static Queue_Handle appMsgQueue; // Task pending events static uint16_t events = 0; // Task configuration Task_Struct ebsTask; Char ebsTaskStack[EBS_TASK_STACK_SIZE]; // GAP GATT Attributes static const uint8_t attDeviceName[GAP_DEVICE_NAME_LEN] = "EVRS BaseStation"; // Number of scan results and scan result index static uint8_t scanRes; static uint8_t scanIdx; // Scan result list static DevRecInfo_t discTxList[MAX_SCAN_RES]; // Scanning state static bool scanningStarted = FALSE; // Connection handle of current connection //static uint16_t connHandleList[MAX_NUM_BLE_CONNS] = GAP_CONNHANDLE_INIT; // Application state static EbsState_t ebsState = EBS_STATE_INIT; //static bleState_t state = BLE_STATE_IDLE; // Discovery state static EbsDiscState_t discState = EBS_DISC_STATE_IDLE; // Discovered service start and end handle static uint16_t svcStartHdl = 0; static uint16_t svcEndHdl = 0; // Discovered characteristic handle static uint16_t charHdl[4] = {0}; // GATT read/write procedure state static bool procedureInProgress = FALSE; // Maximum PDU size (default = 27 octets) static uint16_t maxPduSize; // Array of RSSI read structures readRssi_t readRssi[MAX_NUM_BLE_CONNS]; // counter for profile found int profileCounter = 0; // Base Station Identifier uint8_t baseStationID = 0x02; // Target Tx List TargetInfo_t targetList[MAX_NUM_BLE_CONNS]; TargetInfo_t* pVacantSlot = targetList; TargetInfo_t* pConnectingSlot = NULL; Semaphore_Handle targetConnSem; // test int tcounter = 0; /********************************************************************* * LOCAL FUNCTIONS */ static void EBS_init(void); static void EBS_taskFxn(UArg a0, UArg a1); static void EBS_processGATTMsg(gattMsgEvent_t *pMsg); static void EBS_handleKeys(uint8_t shift, uint8_t keys); static void EBS_processStackMsg(ICall_Hdr *pMsg); static void EBS_processAppMsg(EbsEvt_t *pMsg); static void EBS_processRoleEvent(gapCentralRoleEvent_t *pEvent); static void EBS_processGATTDiscEvent(gattMsgEvent_t *pMsg); static uint8_t EBS_writeCharbyHandle(uint16_t connHandle, ProfileId_t charHdlId, uint8_t* pData, uint8_t len); static uint8_t EBS_readCharbyHandle(uint16_t connHandle, ProfileId_t charHdlId); static void EBS_startDiscovery(void); static bool EBS_findSvcUuid(uint16_t uuid, uint8_t *pData, uint8_t dataLen); static void EBS_discoverDevices(void); void EBS_timeoutConnecting(UArg arg0); static bool EBS_checkBSId(uint8_t bsID, uint8_t *pEvtData, uint8_t dataLen); static void EBS_addDeviceInfo(uint8_t *pAddr, uint8_t addrType); // static bool EBS_findLocalName(uint8_t *pEvtData, uint8_t dataLen); static void EBS_addDeviceID(uint8_t index, uint8_t *pEvtData, uint8_t dataLen); static void EBS_processPairState(uint8_t pairState, uint8_t status); //static void EBS_processPasscode(uint16_t connectionHandle, // uint8_t uiOutputs); static void EBS_processCmdCompleteEvt(hciEvt_CmdComplete_t *pMsg); static uint8_t EBS_eventCB(gapCentralRoleEvent_t *pEvent); //static void EBS_passcodeCB(uint8_t *deviceAddr, // uint16_t connHandle, uint8_t uiInputs, uint8_t uiOutputs); static void EBS_pairStateCB(uint16_t connHandle, uint8_t pairState, uint8_t status); void EBS_startDiscHandler(UArg a0); void EBS_keyChangeHandler(uint8_t keys); static void EBS_updateEbsState(EbsState_t newState); static void EBS_stateChange(EbsState_t newState); static void EBS_updatePollState(uint8_t targetIndex, EbsPollState_t newState); static void EBS_updateTargetList(uint8_t* txID); static uint32_t EBS_parseDevID(uint8_t* devID); /********************************************************************* * PROFILE CALLBACKS */ // GAP Role Callbacks static gapCentralRoleCB_t EBS_roleCB = { EBS_eventCB // Event callback }; // Bond Manager Callbacks static gapBondCBs_t EBS_bondCB = { NULL, // Passcode callback EBS_pairStateCB // Pairing state callback }; /********************************************************************* * PUBLIC FUNCTIONS */ /********************************************************************* * @fn SimpleBLEPeripheral_createTask * * @brief Task creation function for the Simple BLE Peripheral. * * @param none * * @return none */ void EBS_createTask(void) { Task_Params taskParams; // Configure task Task_Params_init(&taskParams); taskParams.stack = ebsTaskStack; taskParams.stackSize = EBS_TASK_STACK_SIZE; taskParams.priority = EBS_TASK_PRIORITY; Task_construct(&ebsTask, EBS_taskFxn, &taskParams, NULL); } /********************************************************************* * @fn EBS_Init * * @brief Initialization function for the Simple BLE Central App Task. * This is called during initialization and should contain * any application specific initialization (ie. hardware * initialization/setup, table initialization, power up * notification). * * @param none * * @return none */ static void EBS_init(void) { uint8_t i; // ****************************************************************** // N0 STACK API CALLS CAN OCCUR BEFORE THIS CALL TO ICall_registerApp // ****************************************************************** // Register the current thread as an ICall dispatcher application // so that the application can send and receive messages. ICall_registerApp(&selfEntity, &sem); // Create an RTOS queue for message from profile to be sent to app. appMsgQueue = Util_constructQueue(&appMsg); // Setup discovery delay as a one-shot timer Util_constructClock(&startDiscClock, EBS_startDiscHandler, SVC_DISCOVERY_DELAY, 0, false, 0); // Set initial connection parameter values GAP_SetParamValue(TGAP_CONN_EST_INT_MIN, INITIAL_MIN_CONN_INTERVAL); GAP_SetParamValue(TGAP_CONN_EST_INT_MAX, INITIAL_MAX_CONN_INTERVAL); GAP_SetParamValue(TGAP_CONN_EST_SUPERV_TIMEOUT, INITIAL_CONN_TIMEOUT); GAP_SetParamValue(TGAP_CONN_EST_LATENCY, INITIAL_SLAVE_LATENCY); // Construct clock for connecting timeout Util_constructClock(&connectingClock, EBS_timeoutConnecting, DEFAULT_SCAN_DURATION, 0, false, 0); Board_initKeys(EBS_keyChangeHandler); Board_initLEDs(); Board_Display_Init(); // Initialize internal data for (i = 0; i < MAX_NUM_BLE_CONNS; i++) { readRssi[i].connHandle = GAP_CONNHANDLE_ALL; readRssi[i].pClock = NULL; } // Setup Central Profile { uint8_t maxScanRes = MAX_SCAN_RES; GAPCentralRole_SetParameter(GAPCENTRALROLE_MAX_SCAN_RES, sizeof(uint8_t), &maxScanRes); } // Setup GAP GAP_SetParamValue(TGAP_GEN_DISC_SCAN, DEFAULT_SCAN_DURATION); GAP_SetParamValue(TGAP_LIM_DISC_SCAN, DEFAULT_SCAN_DURATION); GGS_SetParameter(GGS_DEVICE_NAME_ATT, GAP_DEVICE_NAME_LEN, (void *) attDeviceName); // Setup the GAP Bond Manager { uint32_t passkey = 0; // passkey "000000" uint8_t pairMode = GAPBOND_PAIRING_MODE_WAIT_FOR_REQ; uint8_t mitm = FALSE; uint8_t ioCap = GAPBOND_IO_CAP_NO_INPUT_NO_OUTPUT; uint8_t bonding = FALSE; GAPBondMgr_SetParameter(GAPBOND_DEFAULT_PASSCODE, sizeof(uint32_t), &passkey); GAPBondMgr_SetParameter(GAPBOND_PAIRING_MODE, sizeof(uint8_t), &pairMode); GAPBondMgr_SetParameter(GAPBOND_MITM_PROTECTION, sizeof(uint8_t), &mitm); GAPBondMgr_SetParameter(GAPBOND_IO_CAPABILITIES, sizeof(uint8_t), &ioCap); GAPBondMgr_SetParameter(GAPBOND_BONDING_ENABLED, sizeof(uint8_t), &bonding); } // Initialize GATT Client VOID GATT_InitClient(); // Register to receive incoming ATT Indications/Notifications GATT_RegisterForInd(selfEntity); // Initialize GATT attributes GGS_AddService(GATT_ALL_SERVICES); // GAP GATTServApp_AddService(GATT_ALL_SERVICES); // GATT attributes // Start the Device VOID GAPCentralRole_StartDevice(&EBS_roleCB); // Register with bond manager after starting device GAPBondMgr_Register(&EBS_bondCB); // Register with GAP for HCI/Host messages (for RSSI) GAP_RegisterForMsgs(selfEntity); // Register for GATT local events and ATT Responses pending for transmission GATT_RegisterForMsgs(selfEntity); targetConnSem = Semaphore_create(0, NULL, NULL); Board_ledControl(BOARD_LED_ID_G, BOARD_LED_STATE_FLASH, 300); } /********************************************************************* * @fn EBS_taskFxn * * @brief Application task entry point for the Simple BLE Central. * * @param none * * @return events not processed */ static void EBS_taskFxn(UArg a0, UArg a1) { // Initialize application EBS_init(); // Application main loop for (;;) { // Waits for a signal to the semaphore associated with the calling thread. // Note that the semaphore associated with a thread is signaled when a // message is queued to the message receive queue of the thread or when // ICall_signal() function is called onto the semaphore. ICall_Errno errno = ICall_wait(ICALL_TIMEOUT_FOREVER); if (errno == ICALL_ERRNO_SUCCESS) { ICall_EntityID dest; ICall_ServiceEnum src; ICall_HciExtEvt *pMsg = NULL; if (ICall_fetchServiceMsg(&src, &dest, (void **) &pMsg) == ICALL_ERRNO_SUCCESS) { if ((src == ICALL_SERVICE_CLASS_BLE) && (dest == selfEntity)) { // Process inter-task message EBS_processStackMsg((ICall_Hdr *) pMsg); } if (pMsg) { ICall_freeMsg(pMsg); } } } // If RTOS queue is not empty, process app message while (!Queue_empty(appMsgQueue)) { EbsEvt_t *pMsg = (EbsEvt_t *) Util_dequeueMsg(appMsgQueue); if (pMsg) { // Process message EBS_processAppMsg(pMsg); // Free the space from the message ICall_free(pMsg); } } if (events & EBS_START_DISCOVERY_EVT) { events &= ~EBS_START_DISCOVERY_EVT; EBS_startDiscovery(); } } } /********************************************************************* * @fn EBS_processStackMsg * * @brief Process an incoming task message. * * @param pMsg - message to process * * @return none */ static void EBS_processStackMsg(ICall_Hdr *pMsg) { switch (pMsg->event) { case GAP_MSG_EVENT: EBS_processRoleEvent((gapCentralRoleEvent_t *) pMsg); break; case GATT_MSG_EVENT: EBS_processGATTMsg((gattMsgEvent_t *) pMsg); break; case HCI_GAP_EVENT_EVENT: { // Process HCI message switch (pMsg->status) { case HCI_COMMAND_COMPLETE_EVENT_CODE: EBS_processCmdCompleteEvt( (hciEvt_CmdComplete_t *) pMsg); break; default: break; } } break; default: break; } } /********************************************************************* * @fn EBS_processAppMsg * * @brief Central application event processing function. * * @param pMsg - pointer to event structure * * @return none */ static void EBS_processAppMsg(EbsEvt_t *pMsg) { switch (pMsg->hdr.event) { case EBS_STACK_MSG_EVT: EBS_processStackMsg((ICall_Hdr *) pMsg->pData); // Free the stack message ICall_freeMsg(pMsg->pData); break; case EBS_STATE_CHANGE_EVT: EBS_stateChange((EbsState_t)pMsg->hdr.state); break; case EBS_KEY_CHANGE_EVT: EBS_handleKeys(0, pMsg->hdr.state); break; case EBS_RSSI_READ_EVT: { readRssi_t *pRssi = (readRssi_t *) pMsg->pData; // If link is up and RSSI reads active if (pRssi->connHandle != GAP_CONNHANDLE_ALL && linkDB_Up(pRssi->connHandle)) { // Restart timer Util_restartClock(pRssi->pClock, pRssi->period); // Read RSSI VOID HCI_ReadRssiCmd(pRssi->connHandle); } } break; // Pairing event case EBS_PAIRING_STATE_EVT: { EBS_processPairState(pMsg->hdr.state, *pMsg->pData); ICall_free(pMsg->pData); break; } /* Passcode event case EBS_PASSCODE_NEEDED_EVT: { EBS_processPasscode(connHandle, *pMsg->pData); ICall_free(pMsg->pData); break; } */ // Connecting to device timed out case EBS_CONNECTING_TIMEOUT_EVT: { GAPCentralRole_TerminateLink(pConnectingSlot->connHdl); } default: // Do nothing. break; } } /********************************************************************* * @fn EBS_processRoleEvent * * @brief Central role event processing function. * * @param pEvent - pointer to event structure * * @return none */ static void EBS_processRoleEvent(gapCentralRoleEvent_t *pEvent) { switch (pEvent->gap.opcode) { case GAP_DEVICE_INIT_DONE_EVENT: { maxPduSize = pEvent->initDone.dataPktLen; uout0("EVRS BS initialized"); uout0(Util_convertBdAddr2Str(pEvent->initDone.devAddr)); uout1("BS ID: 0x%02x", baseStationID); } break; case GAP_DEVICE_INFO_EVENT: { //Find tx device address by UUID if (EBS_findSvcUuid(EVRSPROFILE_SERV_UUID, pEvent->deviceInfo.pEvtData, pEvent->deviceInfo.dataLen) && EBS_checkBSId(baseStationID, pEvent->deviceInfo.pEvtData, pEvent->deviceInfo.dataLen)) { EBS_addDeviceInfo(pEvent->deviceInfo.addr, pEvent->deviceInfo.addrType); } // Check if the discovered device is already in scan results uint8_t index; for (index = 0; index < scanRes; index++) { if (memcmp(pEvent->deviceInfo.addr, discTxList[index].addr, B_ADDR_LEN) == 0) { //Update deviceInfo entry with the name EBS_addDeviceID(index, pEvent->deviceInfo.pEvtData, pEvent->deviceInfo.dataLen); } } } break; case GAP_DEVICE_DISCOVERY_EVENT: { // discovery complete scanningStarted = FALSE; // initialize scan index to first scanIdx = 0; uout1("%d Device(s) found", scanRes); EBS_updateEbsState(EBS_STATE_UPLOAD); } break; case GAP_LINK_ESTABLISHED_EVENT: { if (pEvent->gap.hdr.status == SUCCESS) { //Connect to selected device //state = BLE_STATE_CONNECTED; //connHandle = pEvent->linkCmpl.connectionHandle; procedureInProgress = TRUE; // If service discovery not performed initiate service discovery if (charHdl[0] == 0) { Util_startClock(&startDiscClock); } //Find device ID in discTxList struct uint8_t i; for (i = 0; i < scanRes; i++) { if (memcmp(pEvent->linkCmpl.devAddr, targetList[i].addr, B_ADDR_LEN) == NULL) { break; } } targetList[i].connHdl = pEvent->linkCmpl.connectionHandle; uout1("Tx ID 0x%08x Connected", EBS_parseDevID(targetList[i].txDevID)); uout1("Tx Addr %s", Util_convertBdAddr2Str(pEvent->linkCmpl.devAddr)); } else { //connHandle = GAP_CONNHANDLE_INIT; discState = EBS_DISC_STATE_IDLE; uout1("Connect Failed: 0x%02x",pEvent->gap.hdr.status); } } break; case GAP_LINK_TERMINATED_EVENT: { //state = BLE_STATE_IDLE; //connHandle = GAP_CONNHANDLE_INIT; discState = EBS_DISC_STATE_IDLE; memset(charHdl,0x00,4); profileCounter = 0; procedureInProgress = FALSE; EBS_updatePollState(1, EBS_POLL_STATE_IDLE); // Cancel RSSI reads EBS_CancelRssi(pEvent->linkTerminate.connectionHandle); //Clear screen and display disconnect reason uout1("Disconnected: 0x%02x", pEvent->linkTerminate.reason); } break; /* case GAP_LINK_PARAM_UPDATE_EVENT: { if (state == BLE_STATE_CONNECTED) { if (pEvent->linkUpdate.status == SUCCESS) { Display_print1(dispHdl, ROW_FOUR, 0, "ParUpd: %d ms", pEvent->linkUpdate.connInterval * 1.25); } else { Display_print1(dispHdl, ROW_FOUR, 0, "Param error: %d", pEvent->linkUpdate.status); } } } break; */ default: break; } } /********************************************************************* * @fn EBS_processGATTMsg * * @brief Process GATT messages and events. * * @return none */ static void EBS_processGATTMsg(gattMsgEvent_t *pMsg) { if (ebsState == EBS_STATE_POLLING) { // See if GATT server was unable to transmit an ATT response if (pMsg->hdr.status == blePending) { // No HCI buffer was available. App can try to retransmit the response // on the next connection event. Drop it for now. uout1("ATT Rsp drped %d", pMsg->method); } else if ((pMsg->method == ATT_READ_RSP) || ((pMsg->method == ATT_ERROR_RSP) && (pMsg->msg.errorRsp.reqOpcode == ATT_READ_REQ))) { if (pMsg->method == ATT_ERROR_RSP) { uout1("Read Error 0x%02x", pMsg->msg.errorRsp.errCode); } else { // After a successful read, display the read value uout1("Read rsp: 0x%02x", pMsg->msg.readRsp.pValue[0]); EBS_updatePollState(0, EBS_POLL_STATE_WRITE); } procedureInProgress = FALSE; } else if ((pMsg->method == ATT_WRITE_RSP) || ((pMsg->method == ATT_ERROR_RSP) && (pMsg->msg.errorRsp.reqOpcode == ATT_WRITE_REQ))) { if (pMsg->method == ATT_ERROR_RSP) { uout1("Write Error 0x%02x", pMsg->msg.errorRsp.errCode); } else { // After a successful write, display the value that was written and // increment value uout0("Write done"); EBS_updatePollState(0, EBS_POLL_STATE_TERMINATE); } procedureInProgress = FALSE; } else if (pMsg->method == ATT_FLOW_CTRL_VIOLATED_EVENT) { // ATT request-response or indication-confirmation flow control is // violated. All subsequent ATT requests or indications will be dropped. // The app is informed in case it wants to drop the connection. // Display the opcode of the message that caused the violation. uout1("FC Violated: %d", pMsg->msg.flowCtrlEvt.opcode); } else if (pMsg->method == ATT_MTU_UPDATED_EVENT) { // MTU size updated uout1("MTU Size: %d", pMsg->msg.mtuEvt.MTU); } else if (discState != EBS_DISC_STATE_IDLE) { EBS_processGATTDiscEvent(pMsg); } } // else - in case a GATT message came after a connection has dropped, ignore it. // Needed only for ATT Protocol messages GATT_bm_free(&pMsg->msg, pMsg->method); } /********************************************************************* * @fn EBS_processCmdCompleteEvt * * @brief Process an incoming OSAL HCI Command Complete Event. * * @param pMsg - message to process * * @return none */ static void EBS_processCmdCompleteEvt(hciEvt_CmdComplete_t *pMsg) { switch (pMsg->cmdOpcode) { case HCI_READ_RSSI: { if (ebsState == EBS_STATE_POLLING) { int8 rssi = (int8) pMsg->pReturnParam[3]; uout1("RSSI -dB: %d", (uint32_t )(-rssi)); } } break; default: break; } } /********************************************************************* * @fn EBS_processPairState * * @brief Process the new paring state. * * @return none */ static void EBS_processPairState(uint8_t pairState, uint8_t status) { if (pairState == GAPBOND_PAIRING_STATE_STARTED) { uout0("Pairing started"); } else if (pairState == GAPBOND_PAIRING_STATE_COMPLETE) { if (status == SUCCESS) { uout0("Pairing success"); } else { uout1("Pairing fail: %d", status); } } else if (pairState == GAPBOND_PAIRING_STATE_BONDED) { if (status == SUCCESS) { uout0("Bonding success"); } } else if (pairState == GAPBOND_PAIRING_STATE_BOND_SAVED) { if (status == SUCCESS) { uout0("Bond save succ"); } else { uout1("Bond save fail: %d", status); } } } /********************************************************************* * @fn EBS_processPasscode * * @brief Process the Passcode request. * * @return none ************************** static void EBS_processPasscode(uint16_t connectionHandle, uint8_t uiOutputs) { uint32_t passcode; // Create random passcode passcode = Util_GetTRNG(); passcode %= 1000000; // Display passcode to user if (uiOutputs != 0) { Display_print0(dispHdl, ROW_FOUR, 0, "Passcode:"); Display_print1(dispHdl, ROW_FIVE, 0, "%d", passcode); } // Send passcode response GAPBondMgr_PasscodeRsp(connectionHandle, SUCCESS, passcode); } */ /********************************************************************* * @fn EBS_startDiscovery * * @brief Start service discovery. * * @return none */ static void EBS_startDiscovery(void) { attExchangeMTUReq_t req; // Initialize cached handles svcStartHdl = svcEndHdl = 0; memset(charHdl, 0x00, 4); discState = EBS_DISC_STATE_SVC; // Discovery simple BLE service uint8_t uuid[ATT_BT_UUID_SIZE] = { LO_UINT16(EVRSPROFILE_SERV_UUID), HI_UINT16(EVRSPROFILE_SERV_UUID) }; VOID GATT_DiscPrimaryServiceByUUID(pConnectingSlot->connHdl, uuid, ATT_BT_UUID_SIZE, selfEntity); // Discover GATT Server's Rx MTU size //req.clientRxMTU = maxPduSize - L2CAP_HDR_SIZE; // ATT MTU size should be set to the minimum of the Client Rx MTU // and Server Rx MTU values //VOID GATT_ExchangeMTU(connHandle, &req, selfEntity); } /********************************************************************* * @fn EBS_processGATTDiscEvent * * @brief Process GATT discovery event * * @return none */ static void EBS_processGATTDiscEvent(gattMsgEvent_t *pMsg) { if (discState == EBS_DISC_STATE_SVC) { // Service found, store handles if (pMsg->method == ATT_FIND_BY_TYPE_VALUE_RSP && pMsg->msg.findByTypeValueRsp.numInfo > 0) { svcStartHdl = ATT_ATTR_HANDLE( pMsg->msg.findByTypeValueRsp.pHandlesInfo, 0); svcEndHdl = ATT_GRP_END_HANDLE( pMsg->msg.findByTypeValueRsp.pHandlesInfo, 0); } // If procedure complete if (((pMsg->method == ATT_FIND_BY_TYPE_VALUE_RSP) && (pMsg->hdr.status == bleProcedureComplete)) || (pMsg->method == ATT_ERROR_RSP)) { if (svcStartHdl != 0) { // Discover characteristic VOID GATT_DiscAllChars(pConnectingSlot->connHdl, svcStartHdl, svcEndHdl, selfEntity); discState = EBS_DISC_STATE_CHAR; } } } else if (discState == EBS_DISC_STATE_CHAR) { // Characteristic found, store handle if ((pMsg->method == ATT_READ_BY_TYPE_RSP) && (pMsg->msg.readByTypeRsp.numPairs > 0)) { for (int counter = 0; counter < pMsg->msg.readByTypeRsp.numPairs; counter++) { switch(*(pMsg->msg.readByTypeRsp.pDataList + counter*7 + 5)) { case LO_UINT16(EVRSPROFILE_SYSID_UUID): charHdl[EVRSPROFILE_SYSID] = BUILD_UINT16( *(pMsg->msg.readByTypeRsp.pDataList + counter*7 + 3), *(pMsg->msg.readByTypeRsp.pDataList + counter*7 + 4)); profileCounter++; break; case LO_UINT16(EVRSPROFILE_DEVID_UUID): charHdl[EVRSPROFILE_DEVID] = BUILD_UINT16( *(pMsg->msg.readByTypeRsp.pDataList + counter*7 + 3), *(pMsg->msg.readByTypeRsp.pDataList + counter*7 + 4)); profileCounter++; break; case LO_UINT16(EVRSPROFILE_CMD_UUID): charHdl[EVRSPROFILE_CMD] = BUILD_UINT16( *(pMsg->msg.readByTypeRsp.pDataList + counter*7 + 3), *(pMsg->msg.readByTypeRsp.pDataList + counter*7 + 4)); profileCounter++; break; case LO_UINT16(EVRSPROFILE_DATA_UUID): charHdl[EVRSPROFILE_DATA] = BUILD_UINT16( *(pMsg->msg.readByTypeRsp.pDataList + counter*7 + 3), *(pMsg->msg.readByTypeRsp.pDataList + counter*7 + 4)); profileCounter++; break; } } } else if ((pMsg->method == ATT_READ_BY_TYPE_RSP) && (pMsg->hdr.status == bleProcedureComplete) || (pMsg->method == ATT_ERROR_RSP)) { uout1("%d Profile(s) Found ", profileCounter); procedureInProgress = FALSE; discState = EBS_DISC_STATE_IDLE; EBS_updatePollState(0, EBS_POLL_STATE_READ); } } } /********************************************************************* * @fn EBS_findSvcUuid * * @brief Find a given UUID in an advertiser's service UUID list. * * @return TRUE if service UUID found */ static bool EBS_findSvcUuid(uint16_t uuid, uint8_t *pData, uint8_t dataLen) { uint8_t adLen; uint8_t adType; uint8_t *pEnd; pEnd = pData + dataLen - 1; // While end of data not reached while (pData < pEnd) { // Get length of next AD item adLen = *pData++; if (adLen > 0) { adType = *pData; // If AD type is for 16-bit service UUID if ((adType == GAP_ADTYPE_16BIT_MORE) || (adType == GAP_ADTYPE_16BIT_COMPLETE)) { pData++; adLen--; // For each UUID in list while (adLen >= 2 && pData < pEnd) { // Check for match if ((pData[0] == LO_UINT16(uuid)) && (pData[1] == HI_UINT16(uuid))) { // Match found return TRUE; } // Go to next AD item pData += 2; adLen -= 2; } // Handle possible erroneous extra byte in UUID list if (adLen == 1) { pData++; } } else { // Go to next AD item pData += adLen; } } } // Match not found return FALSE; } /********************************************************************* * @fn EBS_discoverDevices * * @brief Scan to discover devices. * * @return none */ static void EBS_discoverDevices(void) { if (!scanningStarted) { scanningStarted = TRUE; //Clear old scan results scanRes = 0; memset(discTxList, NULL, sizeof(discTxList[0]) * MAX_SCAN_RES); uout0("Discovering..."); GAPCentralRole_StartDiscovery(DEFAULT_DISCOVERY_MODE, DEFAULT_DISCOVERY_ACTIVE_SCAN, DEFAULT_DISCOVERY_WHITE_LIST); } else { GAPCentralRole_CancelDiscovery(); } } /********************************************************************** * @fn EBS_timeoutConnecting * * @brief Post event if connecting is timed out. * * @return none */ Void EBS_timeoutConnecting(UArg arg0) { if (pConnectingSlot != NULL) { EBS_enqueueMsg(EBS_CONNECTING_TIMEOUT_EVT, 0, NULL); } } static bool EBS_checkBSId(uint8_t bsID, uint8_t *pEvtData, uint8_t dataLen) { uint8_t adLen; uint8_t adType; uint8_t *pEnd; int ii = 0; pEnd = pEvtData + dataLen - 1; //Display_print5(dispHdl, 9, 0,"len %d, 0x%02x, 0x%02x, 0x%02x, 0x%02x", // dataLen,pEvtData[8],pEvtData[9],pEvtData[10],pEvtData[11]); // While end of data not reached while (pEvtData < pEnd) { // Get length of next data item adLen = *pEvtData++; if (adLen > 0) { adType = *pEvtData; //Display_print1(dispHdl, ii+9, 0, "0x%02x",adType); // If AD type is for local name if (adType == ETX_ADTYPE_DEST) { pEvtData++; // For base station identifier in the advert data return (*pEvtData == bsID); } else { // Go to next item pEvtData += adLen; ii++; } } } // No name found return FALSE; } /********************************************************************* * @fn EBS_addDeviceInfo * * @brief Add a device to the device discovery result list * * @return none */ static void EBS_addDeviceInfo(uint8_t *pAddr, uint8_t addrType) { uint8_t i; // If result count not at max if (scanRes < MAX_SCAN_RES) { // Check if device is already in scan results for (i = 0; i < scanRes; i++) { if (memcmp(pAddr, discTxList[i].addr, B_ADDR_LEN) == 0) { return; } } // Add addr to scan result list memcpy(discTxList[scanRes].addr, pAddr, B_ADDR_LEN); discTxList[scanRes].addrType = addrType; // Increment scan result count scanRes++; } } /********************************************************************* * @fn EBS_findLocalName * * @brief Check if pEvtData contains a device local name * * @return TRUE if local name found ******************************************************* static bool EBS_findLocalName(uint8_t *pEvtData, uint8_t dataLen) { uint8_t adLen; uint8_t adType; uint8_t *pEnd; pEnd = pEvtData + dataLen - 1; // While end of data not reached while (pEvtData < pEnd) { // Get length of next data item adLen = *pEvtData++; if (adLen > 0) { adType = *pEvtData; // If AD type is for local name if ((adType == GAP_ADTYPE_LOCAL_NAME_SHORT) || (adType == GAP_ADTYPE_LOCAL_NAME_COMPLETE)) { pEvtData++; adLen--; // For each local name in list if (adLen >= 2 && pEvtData < pEnd) { return TRUE; } // Handle possible erroneous extra byte in advertisement data if (adLen == 1) { pEvtData++; } } else { // Go to next item pEvtData += adLen; } } } // No name found return FALSE; } */ /********************************************************************* * @fn EBS_addDeviceName * * @brief Add a name to an existing device in the scan result list * * @return none */ static void EBS_addDeviceID(uint8_t index, uint8_t *pEvtData, uint8_t dataLen) { uint8_t scanRspLen; uint8_t scanRspType; uint8_t *pEnd; pEnd = pEvtData + dataLen - 1; // While end of data not reached while (pEvtData < pEnd) { // Get length of next scan response item scanRspLen = *pEvtData++; if (scanRspLen > 0) { scanRspType = *pEvtData; // If scan response type is for local name if (scanRspType == ETX_ADTYPE_DEVID) { //Set name length in the device struct. pEvtData++; //Copy device id from the scan response data for (int j = 0; j < ETX_DEVID_LEN; j++) discTxList[index].txDevID[j] = *pEvtData++; } } else { // Go to next scan response item pEvtData += scanRspLen; } } } /********************************************************************* * @fn EBS_eventCB * * @brief Central event callback function. * * @param pEvent - pointer to event structure * * @return TRUE if safe to deallocate event message, FALSE otherwise. */ static uint8_t EBS_eventCB(gapCentralRoleEvent_t *pEvent) { // Forward the role event to the application if (EBS_enqueueMsg(EBS_STACK_MSG_EVT, SUCCESS, (uint8_t *) pEvent)) { // App will process and free the event return FALSE; } // Caller should free the event return TRUE; } /********************************************************************* * @fn EBS_pairStateCB * * @brief Pairing state callback. * * @return none */ static void EBS_pairStateCB(uint16_t connHandle, uint8_t pairState, uint8_t status) { uint8_t *pData; // Allocate space for the event data. if ((pData = ICall_malloc(sizeof(uint8_t)))) { *pData = status; // Queue the event. EBS_enqueueMsg(EBS_PAIRING_STATE_EVT, pairState, pData); } } /********************************************************************* * @fn EBS_passcodeCB * * @brief Passcode callback. * * @return none ********************************************************* static void EBS_passcodeCB(uint8_t *deviceAddr, uint16_t connHandle, uint8_t uiInputs, uint8_t uiOutputs) { uint8_t *pData; // Allocate space for the passcode event. if ((pData = ICall_malloc(sizeof(uint8_t)))) { *pData = uiOutputs; // Enqueue the event. EBS_enqueueMsg(EBS_PASSCODE_NEEDED_EVT, 0, pData); } } */ /********************************************************************* * @fn EBS_startDiscHandler * * @brief Clock handler function * * @param a0 - ignored * * @return none */ void EBS_startDiscHandler(UArg a0) { events |= EBS_START_DISCOVERY_EVT; // Wake up the application thread when it waits for clock event Semaphore_post(sem); } /********************************************************************* * @fn EBS_keyChangeHandler * * @brief Key event handler function * * @param a0 - ignored * * @return none */ void EBS_keyChangeHandler(uint8_t keys) { EBS_enqueueMsg(EBS_KEY_CHANGE_EVT, keys, NULL); } /********************************************************************* * @fn EBS_enqueueMsg * * @brief Creates a message and puts the message in RTOS queue. * * @param event - message event. * @param state - message state. * @param pData - message data pointer. * * @return TRUE or FALSE */ uint8_t EBS_enqueueMsg(uint8_t event, uint8_t status, uint8_t *pData) { EbsEvt_t *pMsg = ICall_malloc(sizeof(EbsEvt_t)); // Create dynamic pointer to message. if (pMsg) { pMsg->hdr.event = event; pMsg->hdr.state = status; pMsg->pData = pData; // Enqueue the message. return Util_enqueueMsg(appMsgQueue, sem, (uint8_t *) pMsg); } return FALSE; } static uint32_t EBS_parseDevID(uint8_t* devID) { return BUILD_UINT32(devID[0], devID[1], devID[2], devID[3]); } static uint8_t EBS_writeCharbyHandle(uint16_t connHandle, ProfileId_t charHdlId, uint8_t* pData, uint8_t len) { if (len > 23) return FAILURE; // Do a write using char handle attWriteReq_t req; uint8_t status; req.pValue = GATT_bm_alloc(connHandle, ATT_WRITE_REQ, len, NULL); if (req.pValue != NULL) { req.handle = charHdl[charHdlId]; req.len = len; //memcpy(req.pValue, pData, len); for (int i = 0; i < len; i++) req.pValue[i] = pData[i]; req.sig = 0; req.cmd = 0; status = GATT_WriteCharValue(connHandle, &req, selfEntity); //Display_print2(dispHandle,ROW_SIX,0,"Write req sent [%d,0x%02x]", req.len, *(req.pValue)); if (status != SUCCESS) GATT_bm_free((gattMsg_t *) &req, ATT_WRITE_REQ); } else { status = bleMemAllocError; } return status; } static uint8_t EBS_readCharbyHandle(uint16_t connHandle, ProfileId_t charHdlId) { // Do a read attReadReq_t req; uint8_t status; req.handle = charHdl[charHdlId]; status = GATT_ReadCharValue(connHandle, &req, selfEntity); return status; } static void EBS_updateEbsState(EbsState_t newState) { ebsState = newState; EBS_enqueueMsg(EBS_STATE_CHANGE_EVT, newState, NULL); } static void EBS_stateChange(EbsState_t newState) { switch (newState) { case EBS_STATE_INIT: uout0("ebsState = EBS_STATE_INIT"); break; case EBS_STATE_DISCOVERY: uout0("ebsState = EBS_STATE_DISCOVERY"); EBS_discoverDevices(); break; case EBS_STATE_UPLOAD: //TODO: send the discTxList to BC using UART uout0("ebsState = EBS_STATE_UPLOAD"); break; case EBS_STATE_POLLING: uout0("ebsState = EBS_STATE_POLLING"); Semaphore_post(targetConnSem); // enable target connect break; default: break; } } static void EBS_updatePollState(uint8_t targetIndex, EbsPollState_t newState) { if (ebsState != EBS_STATE_POLLING) return; targetList[targetIndex].state = newState; uint8_t rsp = 0xFF; switch (newState) { case EBS_POLL_STATE_IDLE: break; case EBS_POLL_STATE_CONNECT: // TODO: need a lookup process if using parallel connections Semaphore_pend(targetConnSem, -1); // waiting for a vacant conn slot // findNextVacantSlot(pVacantSlot) // TODO: find a vacant target connection slot pConnectingSlot = targetList + targetIndex; Util_startClock(&connectingClock); GAPCentralRole_EstablishLink(LINK_HIGH_DUTY_CYCLE, LINK_WHITE_LIST, targetList[targetIndex].addrType, targetList[targetIndex].addr); break; case EBS_POLL_STATE_READ: Semaphore_post(targetConnSem); // release the sem to allow next connect pConnectingSlot = NULL; EBS_readCharbyHandle(targetList[targetIndex].connHdl, EVRSPROFILE_DATA); // TODO: upload the data to EBC break; case EBS_POLL_STATE_WRITE: // finish read uout0("into write process"); EBS_writeCharbyHandle(targetList[targetIndex].connHdl, EVRSPROFILE_DATA, &rsp, 1); break; case EBS_POLL_STATE_TERMINATE: // finish write GAPCentralRole_TerminateLink(targetList[targetIndex].connHdl); break; default: break; } } /********************************************************************* * @fn EBS_handleKeys * * @brief Handles all key events for this device. * * @param shift - true if in shift/alt. * @param keys - bit field for key events. Valid entries: * HAL_KEY_SW_2 * HAL_KEY_SW_1 * * @return none */ static void EBS_handleKeys(uint8_t shift, uint8_t keys) { switch (ebsState) { case EBS_STATE_INIT: // TODO: pretend to receive a uart_ack if (keys & KEY_RIGHT) EBS_updateEbsState(EBS_STATE_DISCOVERY); break; //case EBS_STATE_DISCOVERY: case EBS_STATE_UPLOAD: // TODO: pretend to receive a uart_ack if (keys & KEY_LEFT) { EBS_updateTargetList(discTxList[0].txDevID); EBS_updateEbsState(EBS_STATE_POLLING); } break; case EBS_STATE_POLLING: if (keys & KEY_RIGHT) { EBS_updatePollState(0,EBS_POLL_STATE_CONNECT); } else if (keys & KEY_LEFT) { EBS_updateTargetList(discTxList[1].txDevID); EBS_updatePollState(0,EBS_POLL_STATE_CONNECT); } } } static void EBS_updateTargetList(uint8_t* txID) { uint8_t index; for (index = 0; index < scanRes; index++) if (memcmp(discTxList[index].txDevID, txID, ETX_DEVID_LEN) == NULL) break; memcpy(pVacantSlot->addr, discTxList[index].addr, B_ADDR_LEN); memcpy(pVacantSlot->txDevID, discTxList[index].txDevID, ETX_DEVID_LEN); pVacantSlot->addrType = discTxList[index].addrType; // TODO: need a targetList manager to find next vacant } /* switch (state) { case BLE_STATE_IDLE: //Display_print0(dispHdl, ROW_STATE, 0, "BLE_STATE_IDLE"); if (keys & KEY_RIGHT) { // Discover devices EBS_discoverDevices(); } //If LEFT is pressed, nothing happens. break; case BLE_STATE_DISCOVERED: //Display_print0(dispHdl, ROW_STATE, 0, "BLE_STATE_DISCOVERED"); if (keys & KEY_LEFT) { //Display Discovery Results if (!scanningStarted && scanRes > 0) { if (scanIdx >= scanRes) { Display_clearLines(dispHdl, ROW_TWO, ROW_SEVEN); Display_print0(dispHdl, ROW_SIX, 0, "<LEFT to browse"); Display_print0(dispHdl, ROW_SEVEN, 0, ">RIGHT to scan"); state = BLE_STATE_BROWSING; scanIdx = 0; } else { Display_print1(dispHdl, ROW_ONE, 0, "Device %d", (scanIdx + 1)); Display_print0(dispHdl, ROW_TWO, 0, Util_convertBdAddr2Str(discTxList[scanIdx].addr)); Display_print1(dispHdl, ROW_THREE, 0, "Tx ID 0x%08x", EBS_parseDevID(discTxList[scanIdx].txDevID)); Display_print0(dispHdl, ROW_SEVEN, 0, ">RIGHT to connect"); state = BLE_STATE_BROWSING; scanIdx++; } } return; } else if (keys & KEY_RIGHT) { //Start scanning EBS_discoverDevices(); } break; case BLE_STATE_BROWSING: //Display_print0(dispHdl, ROW_STATE, 0, "BLE_STATE_BROWSING"); if (keys & KEY_LEFT) { //Navigate through discovery results if (!scanningStarted && scanRes > 0) { if (scanIdx >= scanRes) { //Display the scan option Display_clearLines(dispHdl, ROW_ONE, ROW_SEVEN); Display_print1(dispHdl, ROW_ONE, 0, "Devices found %d", scanRes); Display_print0(dispHdl, ROW_SIX, 0, "<LEFT to browse"); Display_print0(dispHdl, ROW_SEVEN, 0, ">RIGHT to scan"); state = BLE_STATE_BROWSING; scanIdx = 0; } else { //Display next device Display_print1(dispHdl, ROW_ONE, 0, "Device %d", (scanIdx + 1)); Display_print0(dispHdl, ROW_TWO, 0, Util_convertBdAddr2Str(discTxList[scanIdx].addr)); Display_print1(dispHdl, ROW_THREE, 0, "Tx ID 0x%08x", EBS_parseDevID(discTxList[scanIdx].txDevID)); Display_print0(dispHdl, ROW_SEVEN, 0, ">RIGHT to connect"); state = BLE_STATE_BROWSING; scanIdx++; } } } else if (keys & KEY_RIGHT) { //Scan for devices if the scan option is displayed if (scanIdx == 0) { EBS_discoverDevices(); } //Connect to displayed device else { uint8_t addrType; uint8_t *peerAddr; if (scanRes > 0 && state == BLE_STATE_BROWSING) { // connect to current device in scan result peerAddr = discTxList[scanIdx - 1].addr; addrType = discTxList[scanIdx - 1].addrType; state = BLE_STATE_CONNECTING; Util_startClock(&connectingClock); GAPCentralRole_EstablishLink(LINK_HIGH_DUTY_CYCLE, DEFAULT_LINK_WHITE_LIST, addrType, peerAddr); Display_clearLines(dispHdl, ROW_FOUR, ROW_SEVEN); Display_print0(dispHdl, ROW_TWO, 0, Util_convertBdAddr2Str(peerAddr)); Display_print0(dispHdl, ROW_FOUR, 0, "Connecting"); } } } break; case BLE_STATE_CONNECTING: //Display_print0(dispHdl, ROW_STATE, 0, "BLE_STATE_CONNECTING"); //Nothing happens if buttons are pressed while the device is connecting. break; case BLE_STATE_CONNECTED: //Display_print0(dispHdl, ROW_STATE, 0, "BLE_STATE_CONNECTED"); if (keys & KEY_LEFT) //Navigate though menu. { //Iterate through rows switch (selectedMenuItem) { case MENU_ITEM_CONN_PARAM_UPDATE: selectedMenuItem = MENU_ITEM_RSSI; if (EBS_RssiFind(connHandle) == NULL) { Display_print0(dispHdl, ROW_SEVEN, 0, ">Start RSSI poll"); } else { Display_print0(dispHdl, ROW_SEVEN, 0, ">Stop RSSI poll"); } break; case MENU_ITEM_RSSI: selectedMenuItem = MENU_ITEM_READ_WRITE; Display_print0(dispHdl, ROW_SEVEN, 0, ">Read/write req"); break; case MENU_ITEM_READ_WRITE: selectedMenuItem = MENU_ITEM_DISCONNECT; Display_print0(dispHdl, ROW_SEVEN, 0, ">Disconnect"); break; case MENU_ITEM_DISCONNECT: selectedMenuItem = MENU_ITEM_CONN_PARAM_UPDATE; Display_print0(dispHdl, ROW_SEVEN, 0, ">Param upd req"); break; } } if (keys & KEY_RIGHT) { switch (selectedMenuItem) { case MENU_ITEM_CONN_PARAM_UPDATE: //Connection Parameter Update Display_print0(dispHdl, ROW_FOUR, 0, "Param upd req"); switch (currentConnectionParameter) { case INITIAL_PARAMETERS: GAPCentralRole_UpdateLink(connHandle, DEFAULT_UPDATE_MIN_CONN_INTERVAL, DEFAULT_UPDATE_MAX_CONN_INTERVAL, DEFAULT_UPDATE_SLAVE_LATENCY, DEFAULT_UPDATE_CONN_TIMEOUT); currentConnectionParameter = DEFAULT_UPDATE_PARAMETERS; break; case DEFAULT_UPDATE_PARAMETERS: GAPCentralRole_UpdateLink(connHandle, INITIAL_MIN_CONN_INTERVAL, INITIAL_MAX_CONN_INTERVAL, INITIAL_SLAVE_LATENCY, INITIAL_CONN_TIMEOUT); currentConnectionParameter = INITIAL_PARAMETERS; break; } break; case MENU_ITEM_RSSI: // Start or cancel RSSI polling if (EBS_RssiFind(connHandle) == NULL) { Display_clearLine(dispHdl, ROW_FIVE); EBS_StartRssi(connHandle, DEFAULT_RSSI_PERIOD); Display_print0(dispHdl, ROW_SEVEN, 0, ">Stop RSSI poll"); } else { EBS_CancelRssi(connHandle); Display_print0(dispHdl, ROW_FIVE, 0, "RSSI Cancelled"); if (selectedMenuItem == MENU_ITEM_RSSI) { Display_print0(dispHdl, ROW_SEVEN, 0, ">Start RSSI poll"); } } break; case MENU_ITEM_READ_WRITE: if (state == BLE_STATE_CONNECTED&& charHdl != 0 && procedureInProgress == FALSE) { uint8_t status; // Do a read or write as long as no other read or write is in progress if (doWrite) { // Do a write attWriteReq_t req; req.pValue = GATT_bm_alloc(connHandle, ATT_WRITE_REQ, 1, NULL); if (req.pValue != NULL) { Display_print0(dispHdl, ROW_SIX, 0, "Write req sent"); req.handle = charHdl[EVRSPROFILE_DATA]; req.len = 1; req.pValue[0] = 0xaf; req.sig = 0; req.cmd = 0; status = GATT_WriteCharValue(connHandle, &req, selfEntity); //Display_print1(dispHdl, 9, 0, "0x%04x",req.handle); if (status != SUCCESS) { GATT_bm_free((gattMsg_t *) &req, ATT_WRITE_REQ); } } else { status = bleMemAllocError; } } else { // Do a read attReadReq_t req; req.handle = charHdl[EVRSPROFILE_DATA]; status = GATT_ReadCharValue(connHandle, &req, selfEntity); Display_print0(dispHdl, ROW_SIX, 0, "Read req sent"); } if (status == SUCCESS) { procedureInProgress = TRUE; doWrite = !doWrite; } } break; case MENU_ITEM_DISCONNECT: GAPCentralRole_TerminateLink(connHandle); state = BLE_STATE_DISCONNECTING; Display_clearLines(dispHdl, ROW_ONE, ROW_SEVEN); Display_print0(dispHdl, ROW_ONE, 0, "Disconnecting"); break; } } } return; } */ <file_sep>################################################################################ # Automatically-generated file. Do not edit! ################################################################################ USER_OBJS := LIBS := -llibc.a -l"C:/ti/ble_sdk_2_02_02_25/examples/cc2650lp/simple_peripheral/ccs/config/lib_linker.cmd" -l"C:/ti/ble_sdk_2_02_02_25/src/rom/ble_rom_releases/04242014/ble_rom_patch.symbols" -l"C:/ti/tirtos_cc13xx_cc26xx_2_21_01_08/products/cc26xxware_2_24_03_17272/driverlib/bin/ccs/driverlib.lib" <file_sep>################################################################################ # Automatically-generated file. Do not edit! ################################################################################ USER_OBJS := LIBS := -l"C:/ti/ble_sdk_2_02_02_25/ble_examples/examples/cc2650lp/simple_central/ccs/config/lib_linker.cmd" -l"C:/ti/ble_sdk_2_02_02_25/src/rom/enc_lib/cc26xx_ecc_rom_api.a" -l"C:/ti/ble_sdk_2_02_02_25/src/rom/ble_rom_releases/04242014/ble_rom_patch.symbols" -l"C:/ti/tirtos_cc13xx_cc26xx_2_21_01_08/products/cc26xxware_2_24_03_17272/driverlib/bin/ccs/driverlib.lib" -l"C:/ti/ble_sdk_2_02_02_25/src/rom/enc_lib/cc26xx_ecc_rom_api.a" -llibc.a <file_sep>/**************************************** * * @filename evrs_bs_typedefs.h * * @project evrs_bs_cc2650lp_app * * @brief global typedefs and macros * * @date 22 Aug. 2018 * * @author <EMAIL> * ****************************************/ #ifndef EVRS_BS_TYPEDEFS_H_ #define EVRS_BS_TYPEDEFS_H_ #include "Util.h" // RSSI read data structure typedef struct { Clock_Struct *pClock; // pointer to clock struct uint16_t period; // how often to read RSSI uint16_t connHandle; // connection handle } readRssi_t; // Simple BLE Central Task Events #define EBS_START_DISCOVERY_EVT 0x0001 #define EBS_PAIRING_STATE_EVT 0x0002 // #define EBS_PASSCODE_NEEDED_EVT 0x0004 #define EBS_RSSI_READ_EVT 0x0008 #define EBS_KEY_CHANGE_EVT 0x0010 #define EBS_STATE_CHANGE_EVT 0x0020 #define EBS_CONNECTING_TIMEOUT_EVT 0x0040 #define EBS_STACK_MSG_EVT 0x0080 #endif /* EVRS_BS_TYPEDEFS_H_ */ <file_sep>################################################################################ # Automatically-generated file. Do not edit! ################################################################################ USER_OBJS := LIBS := -l"C:/ti/ble_sdk_2_02_02_25/ble_examples/examples/cc2650lp/simple_central/ccs/config/ccs_linker_defines.cmd" -l"C:/ti/ble_sdk_2_02_02_25/src/common/cc26xx/ccs/cc26xx_app.cmd" -l"C:/ti/tirtos_cc13xx_cc26xx_2_21_01_08/products/cc26xxware_2_24_03_17272/driverlib/bin/ccs/driverlib.lib" -l"C:/ti/ble_sdk_2_02_02_25/src/rom/common_rom_releases/03282014/common_rom.symbols" -llibc.a <file_sep>/***************************************************************************** file evrs_gatt_profile.c brief This is the source file of gatt profile configuration of the transmitter using cc2650lp. proj EVRS date 0351pm 14 Aug 2018 author Ziyi *****************************************************************************/ /********************************************************************* * INCLUDES */ #include <string.h> #include "bcomdef.h" #include "osal.h" #include "linkdb.h" #include "att.h" #include "gatt.h" #include "gatt_uuid.h" #include "gattservapp.h" #include "gapbondmgr.h" #include "evrs_gatt_profile.h" /********************************************************************* * MACROS */ /********************************************************************* * CONSTANTS */ #define SERVAPP_NUM_ATTR_SUPPORTED 17 /********************************************************************* * TYPEDEFS */ /********************************************************************* * GLOBAL VARIABLES */ // EVRS GATT Profile Service UUID: 0xAFF0 CONST uint8 EVRSProfileServUUID[ATT_BT_UUID_SIZE] = { LO_UINT16( EVRSPROFILE_SERV_UUID), HI_UINT16(EVRSPROFILE_SERV_UUID) }; // System serial number UUID: 0xAFF1 CONST uint8 EVRSProfileSysIdUUID[ATT_BT_UUID_SIZE] = { LO_UINT16( EVRSPROFILE_SYSID_UUID), HI_UINT16(EVRSPROFILE_SYSID_UUID) }; // Device serial number UUID: 0xAFF2 CONST uint8 EVRSProfileDevIdUUID[ATT_BT_UUID_SIZE] = { LO_UINT16( EVRSPROFILE_DEVID_UUID), HI_UINT16(EVRSPROFILE_DEVID_UUID) }; // Command number UUID: 0xAFF4 CONST uint8 EVRSProfileCmdUUID[ATT_BT_UUID_SIZE] = { LO_UINT16( EVRSPROFILE_CMD_UUID), HI_UINT16(EVRSPROFILE_CMD_UUID) }; // User data UUID: 0xAFF8 CONST uint8 EVRSProfileDataUUID[ATT_BT_UUID_SIZE] = { LO_UINT16( EVRSPROFILE_DATA_UUID), HI_UINT16(EVRSPROFILE_DATA_UUID) }; /********************************************************************* * EXTERNAL VARIABLES */ /********************************************************************* * EXTERNAL FUNCTIONS */ /********************************************************************* * LOCAL VARIABLES */ static EVRSProfileCBs_t *EVRSProfile_AppCBs = NULL; /********************************************************************* * Profile Attributes - variables */ // EVRS Profile Service attribute static CONST gattAttrType_t EVRSProfileService = { ATT_BT_UUID_SIZE, EVRSProfileServUUID }; // EVRS Profile System Id Properties static uint8 EVRSProfileSysIdProps = GATT_PROP_READ; // System Id Value static uint8 EVRSProfileSysId = 0; // EVRS Profile System Id User Description static uint8 EVRSProfileSysIdUserDesp[10] = "System Id"; // EVRS Profile Device Id Properties static uint8 EVRSProfileDevIdProps = GATT_PROP_READ | GATT_PROP_WRITE; // Device Id Value static uint8 EVRSProfileDevId = 0; // EVRS Profile Device Id User Description static uint8 EVRSProfileDevIdUserDesp[10] = "Device Id"; // EVRS Profile Command BS Properties static uint8 EVRSProfileCmdProps = GATT_PROP_READ | GATT_PROP_WRITE; // Command BS Value static uint8 EVRSProfileCmd = 0; // EVRS Profile BS Command User Description static uint8 EVRSProfileCmdUserDesp[11] = "BS Command"; // EVRS Profile User Data Properties static uint8 EVRSProfileDataProps = GATT_PROP_READ | GATT_PROP_WRITE; // User Data Value static uint8 EVRSProfileData = 0; // EVRS Profile User Data User Description static uint8 EVRSProfileDataUserDesp[10] = "User Data"; /********************************************************************* * Profile Attributes - Table */ static gattAttribute_t EVRSProfileAttrTbl[SERVAPP_NUM_ATTR_SUPPORTED] = { // EVRS Profile Service { { ATT_BT_UUID_SIZE, primaryServiceUUID }, /* type */ GATT_PERMIT_READ, /* permissions */ 0, /* handle */ (uint8 *) &EVRSProfileService /* pValue */ }, // System Id Declaration { { ATT_BT_UUID_SIZE, characterUUID }, GATT_PERMIT_READ, 0, &EVRSProfileSysIdProps }, // System Id Value { { ATT_BT_UUID_SIZE, EVRSProfileSysIdUUID }, GATT_PERMIT_READ, 0, &EVRSProfileSysId }, // System Id User Description { { ATT_BT_UUID_SIZE, charUserDescUUID }, GATT_PERMIT_READ, 0, EVRSProfileSysIdUserDesp }, // Device Id Declaration { { ATT_BT_UUID_SIZE, characterUUID }, GATT_PERMIT_READ, 0, &EVRSProfileDevIdProps }, // Device Id Value { { ATT_BT_UUID_SIZE, EVRSProfileDevIdUUID }, GATT_PERMIT_READ | GATT_PERMIT_WRITE, 0, &EVRSProfileDevId }, // Device Id User Description { { ATT_BT_UUID_SIZE, charUserDescUUID }, GATT_PERMIT_READ, 0, EVRSProfileDevIdUserDesp }, // BS Command Declaration { { ATT_BT_UUID_SIZE, characterUUID }, GATT_PERMIT_READ, 0, &EVRSProfileCmdProps }, // BS Command Value { { ATT_BT_UUID_SIZE, EVRSProfileCmdUUID }, GATT_PERMIT_READ | GATT_PERMIT_WRITE, 0, &EVRSProfileCmd }, // BS Command User Description { { ATT_BT_UUID_SIZE, charUserDescUUID }, GATT_PERMIT_READ, 0, EVRSProfileCmdUserDesp }, // User Data Declaration { { ATT_BT_UUID_SIZE, characterUUID }, GATT_PERMIT_READ, 0, &EVRSProfileDataProps }, // User Data Value { { ATT_BT_UUID_SIZE, EVRSProfileDataUUID }, GATT_PERMIT_READ | GATT_PERMIT_WRITE, 0, &EVRSProfileData }, // User Data User Description { { ATT_BT_UUID_SIZE, charUserDescUUID }, GATT_PERMIT_READ, 0, EVRSProfileDataUserDesp }, }; /********************************************************************* * LOCAL FUNCTIONS */ static bStatus_t EVRSProfile_ReadAttrCB(uint16_t connHandle, gattAttribute_t *pAttr, uint8_t *pValue, uint16_t *pLen, uint16_t offset, uint16_t maxLen, uint8_t method); static bStatus_t EVRSProfile_WriteAttrCB(uint16_t connHandle, gattAttribute_t *pAttr, uint8_t *pValue, uint16_t len, uint16_t offset, uint8_t method); /********************************************************************* * PROFILE CALLBACKS */ // EVRS Profile Service Callbacks // Note: When an operation on a characteristic requires authorization and // pfnAuthorizeAttrCB is not defined for that characteristic's service, the // Stack will report a status of ATT_ERR_UNLIKELY to the client. When an // operation on a characteristic requires authorization the Stack will call // pfnAuthorizeAttrCB to check a client's authorization prior to calling // pfnReadAttrCB or pfnWriteAttrCB, so no checks for authorization need to be // made within these functions. CONST gattServiceCBs_t EVRSProfileCBs = { EVRSProfile_ReadAttrCB, // Read callback function pointer EVRSProfile_WriteAttrCB, // Write callback function pointer NULL // Authorization callback function pointer }; /********************************************************************* * PUBLIC FUNCTIONS */ /********************************************************************* * @fn EVRSProfile_AddService * * @brief Initializes the EVRS Profile service by registering * GATT attributes with the GATT server. * * @param services - services to add. This is a bit map and can * contain more than one service. * * @return Success or Failure */ bStatus_t EVRSProfile_AddService(uint32 services) { uint8 status; // Allocate Client Characteristic Configuration table // Initialize Client Characteristic Configuration attributes //GATTServApp_InitCharCfg( INVALID_CONNHANDLE, EVRSProfileDataConfig ); if (services & EVRSPROFILE_SERVICE) { // Register GATT attribute list and CBs with GATT Server App status = GATTServApp_RegisterService(EVRSProfileAttrTbl, GATT_NUM_ATTRS(EVRSProfileAttrTbl), GATT_MAX_ENCRYPT_KEY_SIZE, &EVRSProfileCBs); } else { status = SUCCESS; } return (status); } /********************************************************************* * @fn EVRSProfile_RegisterAppCBs * * @brief Registers the application callback function. Only call * this function once. * * @param callbacks - pointer to application callbacks. * * @return SUCCESS or bleAlreadyInRequestedMode */ bStatus_t EVRSProfile_RegisterAppCBs(EVRSProfileCBs_t *appCallbacks) { if (appCallbacks) { EVRSProfile_AppCBs = appCallbacks; return ( SUCCESS); } else { return ( bleAlreadyInRequestedMode); } } /********************************************************************* * @fn EVRSProfile_SetParameter * * @brief Set a EVRS Profile parameter. * * @param param - Profile parameter ID * @param len - length of data to write * @param value - pointer to data to write. This is dependent on * the parameter ID and WILL be cast to the appropriate * data type (example: data type of uint16 will be cast to * uint16 pointer). * * @return bStatus_t */ bStatus_t EVRSProfile_SetParameter(uint8 param, uint8 len, void *value) { bStatus_t ret = SUCCESS; switch (param) { case EVRSPROFILE_SYSID: if (len == sizeof(uint8)) { EVRSProfileSysId = *((uint8*) value); } else { ret = bleInvalidRange; } break; case EVRSPROFILE_DEVID: if (len == sizeof(uint8)) { EVRSProfileDevId = *((uint8*) value); } else { ret = bleInvalidRange; } break; case EVRSPROFILE_CMD: if (len == sizeof(uint8)) { EVRSProfileCmd = *((uint8*) value); } else { ret = bleInvalidRange; } break; case EVRSPROFILE_DATA: if (len == sizeof(uint8)) { EVRSProfileData = *((uint8*) value); } else { ret = bleInvalidRange; } break; default: ret = INVALIDPARAMETER; break; } return (ret); } /********************************************************************* * @fn EVRSProfile_GetParameter * * @brief Get a EVRS Profile parameter. * * @param param - Profile parameter ID * @param value - pointer to data to put. This is dependent on * the parameter ID and WILL be cast to the appropriate * data type (example: data type of uint16 will be cast to * uint16 pointer). * * @return bStatus_t */ bStatus_t EVRSProfile_GetParameter(uint8 param, void *value) { bStatus_t ret = SUCCESS; switch (param) { case EVRSPROFILE_SYSID: *((uint8*) value) = EVRSProfileSysId; break; case EVRSPROFILE_DEVID: *((uint8*) value) = EVRSProfileDevId; break; case EVRSPROFILE_CMD: *((uint8*) value) = EVRSProfileCmd; break; case EVRSPROFILE_DATA: *((uint8*) value) = EVRSProfileData; break; default: ret = INVALIDPARAMETER; break; } return (ret); } /********************************************************************* * @fn EVRSProfile_ReadAttrCB * * @brief Read an attribute. * * @param connHandle - connection message was received on * @param pAttr - pointer to attribute * @param pValue - pointer to data to be read * @param pLen - length of data to be read * @param offset - offset of the first octet to be read * @param maxLen - maximum length of data to be read * @param method - type of read message * * @return SUCCESS, blePending or Failure */ static bStatus_t EVRSProfile_ReadAttrCB(uint16_t connHandle, gattAttribute_t *pAttr, uint8_t *pValue, uint16_t *pLen, uint16_t offset, uint16_t maxLen, uint8_t method) { bStatus_t status = SUCCESS; // Make sure it's not a blob operation (no attributes in the profile are long) if (offset > 0) { return ( ATT_ERR_ATTR_NOT_LONG); } if (pAttr->type.len == ATT_BT_UUID_SIZE) { // 16-bit UUID uint16 uuid = BUILD_UINT16(pAttr->type.uuid[0], pAttr->type.uuid[1]); switch (uuid) { // No need for "GATT_SERVICE_UUID" or "GATT_CLIENT_CHAR_CFG_UUID" cases; // gattserverapp handles those reads case EVRSPROFILE_SYSID_UUID: case EVRSPROFILE_DEVID_UUID: case EVRSPROFILE_CMD_UUID: case EVRSPROFILE_DATA_UUID: *pLen = 1; pValue[0] = *pAttr->pValue; break; default: // Should never get here! (characteristics 3 and 4 do not have read permissions) *pLen = 0; status = ATT_ERR_ATTR_NOT_FOUND; break; } } else { // 128-bit UUID *pLen = 0; status = ATT_ERR_INVALID_HANDLE; } return (status); } /********************************************************************* * @fn EVRSProfile_WriteAttrCB * * @brief Validate attribute data prior to a write operation * * @param connHandle - connection message was received on * @param pAttr - pointer to attribute * @param pValue - pointer to data to be written * @param len - length of data * @param offset - offset of the first octet to be written * @param method - type of write message * * @return SUCCESS, blePending or Failure */ static bStatus_t EVRSProfile_WriteAttrCB(uint16_t connHandle, gattAttribute_t *pAttr, uint8_t *pValue, uint16_t len, uint16_t offset, uint8_t method) { bStatus_t status = SUCCESS; uint8 notifyApp = 0xFF; if (pAttr->type.len == ATT_BT_UUID_SIZE) { // 16-bit UUID uint16 uuid = BUILD_UINT16(pAttr->type.uuid[0], pAttr->type.uuid[1]); switch (uuid) { case EVRSPROFILE_DEVID_UUID: case EVRSPROFILE_CMD_UUID: case EVRSPROFILE_DATA_UUID: //Validate the value // Make sure it's not a blob oper if (offset == 0) { if (len != 1) { status = ATT_ERR_INVALID_VALUE_SIZE; } } else { status = ATT_ERR_ATTR_NOT_LONG; } //Write the value if (status == SUCCESS) { uint8 *pCurValue = (uint8 *)pAttr->pValue; *pCurValue = pValue[0]; if (pAttr->pValue == &EVRSProfileDevId) notifyApp = EVRSPROFILE_DEVID; else if (pAttr->pValue == &EVRSProfileCmd) notifyApp = EVRSPROFILE_CMD; else notifyApp = EVRSPROFILE_DATA; } break; default: // Should never get here! (characteristics 2 and 4 do not have write permissions) status = ATT_ERR_ATTR_NOT_FOUND; break; } } else { // 128-bit UUID status = ATT_ERR_INVALID_HANDLE; } // If a characteristic value changed then callback function to notify application of change if ((notifyApp != 0xFF) && EVRSProfile_AppCBs && EVRSProfile_AppCBs->pfnEVRSProfileChange) { EVRSProfile_AppCBs->pfnEVRSProfileChange(notifyApp); } return (status); } /********************************************************************* *********************************************************************/ <file_sep>/***************************************************************************** file evrs_bs_rssi.h brief This file contain definitions and prototypes about polling rssi data proj EVRS date 0527pm 15 Aug 2018 author Ziyi *****************************************************************************/ #ifndef EVRS_BS_RSSI_H_ #define EVRS_BS_RSSI_H_ #include "bcomdef.h" #include "gap.h" #include "evrs_bs_typedefs.h" #include "util.h" #include "ble_user_config.h" #include "evrs_bs_main.h" extern readRssi_t readRssi[MAX_NUM_BLE_CONNS]; extern bStatus_t EBS_StartRssi(uint16_t connHandle, uint16_t period); extern bStatus_t EBS_CancelRssi(uint16_t connHandle); extern readRssi_t *EBS_RssiFind(uint16_t connHandle); #endif /* EVRS_BS_RSSI_H_ */ <file_sep>################################################################################ # Automatically-generated file. Do not edit! ################################################################################ SHELL = cmd.exe # Add inputs and outputs from these tool invocations to the build variables C_SRCS += \ ../drv/board_display.c \ ../drv/board_led.c \ ../drv/file_header.c C_DEPS += \ ./drv/board_display.d \ ./drv/board_led.d \ ./drv/file_header.d OBJS += \ ./drv/board_display.obj \ ./drv/board_led.obj \ ./drv/file_header.obj OBJS__QUOTED += \ "drv\board_display.obj" \ "drv\board_led.obj" \ "drv\file_header.obj" C_DEPS__QUOTED += \ "drv\board_display.d" \ "drv\board_led.d" \ "drv\file_header.d" C_SRCS__QUOTED += \ "../drv/board_display.c" \ "../drv/board_led.c" \ "../drv/file_header.c" <file_sep>################################################################################ # Automatically-generated file. Do not edit! ################################################################################ SHELL = cmd.exe # Add inputs and outputs from these tool invocations to the build variables CMD_SRCS += \ C:/ti/ble_sdk_2_02_02_25/ble_examples/examples/cc2650lp/simple_central/ccs/config/lib_linker.cmd
381de90061a57c3b646476e4a78a2fa30808708f
[ "Markdown", "C", "Makefile" ]
13
C
ZiyiLikeIt/EVRS_BLE_Firmware
9d6e752af880c71bf3ec2f62ab11f310a5ed5ceb
204ec55cc7be85220e6a885a3a7e4f736eaab9e0
refs/heads/main
<repo_name>Atcsy/BookLibraryAPI<file_sep>/controllers/auth.js const ErrorResponse = require('../utils/errorResponse'); const asyncHandler = require('../middleware/asyncHandler'); const User = require('../models/User'); // POST api/v1/register exports.registerUser = asyncHandler(async (req, res, next) => { const { name, email, password, phone } = req.body; const user = await User.create({ name, email, password, phone, }); const token = user.getJwt(); res.status(201).json({ success: true, token }); }); // POST api/v1/login exports.loginUser = asyncHandler(async (req, res, next) => { const { email, password } = req.body; if (!email || !password) { return next(new ErrorResponse('Please provide email and password', 400)); } const user = await User.findOne({ email }).select('password'); if (!user) { return next(new ErrorResponse('Invalid Credentials', 401)); } // Check for matching passwords const isMatch = await user.matchPasswords(password); if (!isMatch) { return next(new ErrorResponse('Invalid Credentials', 401)); } const token = user.getJwt(); res.status(201).json({ success: true, token }); }); // GET api/v1/me auth exports.getMe = asyncHandler(async (req, res, next) => { const user = await User.findById(req.user.id); res.status(201).json({ success: true, data: user }); }); // POST api/v1/registeradmin //ONLY IN DEV! DELETE THIS IN PROD! exports.registerAdmin = asyncHandler(async (req, res, next) => { const { name, email, password, phone } = req.body; const user = await User.create({ name, email, password, role: 'admin', phone, }); const token = user.getJwt(); res.status(201).json({ success: true, token }); }); <file_sep>/routes/auth.js const express = require('express'); const { registerUser, loginUser, getMe, registerAdmin, } = require('../controllers/auth'); const router = express.Router(); const { auth } = require('../middleware/auth'); router.route('/register').post(registerUser); router.route('/login').post(loginUser); router.route('/me').get(auth, getMe); router.route('/register/admin').post(registerAdmin); module.exports = router; <file_sep>/routes/rentals.js const express = require('express'); const { rentBook, returnBook, getOverDueRentals, } = require('../controllers/rentals'); const { auth } = require('../middleware/auth'); const router = express.Router(); router.route('/overdue/').get(auth, getOverDueRentals); router.route('/rent/').post(auth, rentBook); router.route('/return/').post(auth, returnBook); module.exports = router; <file_sep>/models/Rental.js const mongoose = require('mongoose'); const RentalSchema = new mongoose.Schema({ book: { type: mongoose.Schema.Types.ObjectId, ref: 'Book', }, issueDate: { type: Date, default: Date.now(), }, returnDate: { type: Date }, user: { type: mongoose.Schema.Types.ObjectId, ref: 'User', }, }); module.exports = mongoose.model('Rental', RentalSchema); <file_sep>/middleware/auth.js const jwt = require('jsonwebtoken'); const asyncHandler = require('./asyncHandler'); const ErrorResponse = require('../utils/errorResponse'); const User = require('../models/User'); exports.auth = asyncHandler(async (req, res, next) => { let token; if ( req.headers.authorization && req.headers.authorization.split(' ')[0] === 'Bearer' ) { token = req.headers.authorization.split(' ')[1]; } // Check for token if (!token) { return next(new ErrorResponse('No authorized access for this route', 401)); } try { //verify token const decoded = jwt.verify(token, process.env.JWT_SECRET); req.user = await User.findById(decoded.id); next(); } catch (err) { return next(new ErrorResponse('No authorized access for this route', 401)); } }); exports.admin = asyncHandler(async (req, res, next) => { if (req.user.role !== 'admin') { return next(new ErrorResponse('Not authorized to access this route', 401)); } next(); }); <file_sep>/controllers/rentals.js const moment = require('moment'); const ErrorResponse = require('../utils/errorResponse'); const asyncHandler = require('../middleware/asyncHandler'); const Book = require('../models/Book'); const User = require('../models/User'); const Rental = require('../models/Rental'); //POST api/v1/rentals/rent/ auth exports.rentBook = asyncHandler(async (req, res, next) => { const user = await User.findById(req.user); const book = await Book.findById(req.body.bookId); const rental = await Rental.find({ user: req.user }); //If we dont have user or book respond error if (!user || !book) { return next(new ErrorResponse('Resource not found', 404)); } if (rental.length >= 5) { return next(new ErrorResponse('Please return a book to rent another', 400)); } //Get the DueBooks from rental collection const dueBooks = await Rental.find({ user: { $eq: req.user, }, returnDate: { $lt: Date.now(), }, }); if (dueBooks.length >= 1) { return next(new ErrorResponse('Please return overdue book(s) ', 400)); } // check book is in stock if (book.inStock <= 0) { return next( new ErrorResponse(`Sorry, We dont have ${book.title} in stock`, 200) ); } await Rental.create({ book: book._id, issueDate: Date.now(), returnDate: moment().add(14, 'days').toDate(), user: req.user, }); // decrement inStock by 1 await book.updateOne({ $inc: { inStock: -1 } }, { runValidators: true }); res .status(200) .json({ succes: true, message: `succefully rented ${book.title}` }); }); //POST api/v1/rentals/return/ auth exports.returnBook = asyncHandler(async (req, res, next) => { const user = await User.findById(req.user); const book = await Book.findById(req.body.bookId); const rental = await Rental.find({ user: req.user, book: req.body.bookId, }); //If we dont have user, book or any rental respond error if (!user || !book || rental.length === 0) { return next(new ErrorResponse('Resource not found', 404)); } await Rental.findOneAndDelete({ user: req.user, book: req.body.bookId, }); // increment inStock by 1 await book.updateOne({ $inc: { inStock: +1 } }, { runValidators: true }); res.status(200).json({ succes: true, message: 'Thank you for returning' }); }); //GET api/v1/rentals/overdue/ auth exports.getOverDueRentals = asyncHandler(async (req, res, next) => { const overDuerentals = await Rental.find({ returnDate: { $lt: Date.now(), }, }) .populate({ path: 'book', select: 'title author', }) .populate({ path: 'user', select: 'name phone email', }); if (overDuerentals.length === 0) { return next(new ErrorResponse('No overdue rentals found', 400)); } res.status(200).json({ succes: true, message: overDuerentals }); }); <file_sep>/README.md # Library Management System API This is a backend API application that exposes routes for a library. This API allows new users to register and authenticate by using [JSON Web Tokens](https://jwt.io/). Registered users are able to rent a maximum of 5 books and return it within 2 weeks. Admins have additional CRUD functionality on users and on books and can view rental that are overdue. Non registered visitors only can view the books # Table of Contents - [Getting Started](#getting-started) - [Technology Stack](#technology-stack) - [Installation](#installation) - [Documentation](#documentation) ## Getting Started This is a server-side javascript application built with [Express JS](https://expressjs.com/). The database is [MongoDB](https://www.mongodb.com/) with [Mongoose](https://www.mongoose.com/) ODM. ## Technology Stack **Server Side** 1. ExpressJs 2. MongoDB ## Installation 1. Install [**Node JS**](https://nodejs.org/en/) 2. Install [Mongo Database](https://docs.mongodb.com/compass/master/install/) 3. Clone the [**repository here**](https://github.com/Atcsy/BookLibraryAPI.git) 4. [**cd**] into the root of the **project directory**. 5. Run `npm run install` on the terminal to install project dependecies 6. Create an config.env file in the root folder and define variables from config.env-sample. MONGO_URI and JWT_SECRET variables are required to start application 7. Run `$ node seeder.js -i` to seed the database or `-d` to destroy it 8. Run `$ npm run dev` to start the application 9. Navigate to `http://localhost:5000/api/v1` on your preferred browser ## Documentation API DOCUMENTATION (https://documenter.getpostman.com/view/16916239/TzskEihh) <file_sep>/controllers/books.js const ErrorResponse = require("../utils/errorResponse"); const asyncHandler = require("../middleware/asyncHandler"); const Book = require("../models/Book"); // GET api/v1/books //filter by title ?name=string exports.getBooks = asyncHandler(async (req, res, next) => { let regExObj = {}; // Only search with regexp if we have query for name otherwise get all books if (req.query.name) { const searchString = req.query.name.toString(); regExObj = { title: { $regex: searchString, $options: "i" } }; } const books = await Book.find(regExObj); res.status(200).json({ status: "success", data: books, }); }); // GET api/v1/books/:id exports.getBook = asyncHandler(async (req, res, next) => { const book = await Book.findById(req.params.id); if (!book) { return next(new ErrorResponse("Resource not found", 404)); } res.status(200).json({ succes: true, data: book }); }); // POST api/v1/books auth exports.createBook = asyncHandler(async (req, res, next) => { const { title, author, year, pages, language, country } = req.body; const book = await Book.create({ title, author, year, pages, language, country, }); res.status(201).json({ success: true, data: book }); }); // PUT api/v1/books/:id auth exports.updateBook = asyncHandler(async (req, res, next) => { const book = await Book.findByIdAndUpdate(req.params.id, req.body, { new: true, runValidators: true, }); if (!book) { return next(new ErrorResponse("Resource not found", 404)); } res.status(200).json({ succes: true, data: book }); }); // DELETE api/v1/books/:id auth exports.deleteBook = asyncHandler(async (req, res, next) => { const book = await Book.findByIdAndDelete(req.params.id); if (!book) { return next(new ErrorResponse("Resource not found", 404)); } res.status(200).json({ success: true }); });
1f3c594516d51d50f8c24d017ed825088ec6e021
[ "JavaScript", "Markdown" ]
8
JavaScript
Atcsy/BookLibraryAPI
8abf5797efa6eb65ded7d56b08ae4ac90ab0b2ae
aef5b87c6940254176f4bd822227c0eb89510bb1
refs/heads/master
<repo_name>nikse28/t_mob<file_sep>/src/data/RecordData.js const recordData = [ { sourceId: 1, recordId: 1, recordName: "Record 1" }, { sourceId: 1, recordId: 2, recordName: "Record 2" }, { sourceId: 2, recordId: 3, recordName: "Record 3" }, { sourceId: 3, recordId: 4, recordName: "Record 4" } ]; export default recordData;<file_sep>/src/data/SourceData.js const sourceData = [ { "sourceId": 1, "sourceName": "Source 1" }, { "sourceId": 2, "sourceName": "Source 2" }, ] export default sourceData;<file_sep>/src/data/EntityData.js const entityData = [ { recordId:1, entityId:1, entityData:"Entity 1" }, { recordId:1, entityId:2, entityData:"Entity 2" }, { recordId:2, entityId:3, entityData:"Entity 3" } ]; export default entityData;<file_sep>/src/actions/Constants.js /* All Action Part goes here */<file_sep>/src/components/Recon/MapTable.js import React, { Component } from 'react'; import { Table,Icon, Menu,Dropdown,Input, Button, Popconfirm, Form ,Select} from 'antd'; const EditableContext = React.createContext(); const { Option } = Select; const EditableRow = ({ form, index, ...props }) => ( <EditableContext.Provider value={form}> <tr {...props} /> </EditableContext.Provider> ); const EditableFormRow = Form.create()(EditableRow); class EditableCell extends React.Component { state = { editing: false, }; toggleEdit = () => { const editing = !this.state.editing; this.setState({ editing }, () => { if (editing) { this.input.focus(); } }); }; save = e => { const { record, handleSave } = this.props; this.form.validateFields((error, values) => { if (error && error[e.currentTarget.id]) { return; } this.toggleEdit(); handleSave({ ...record, ...values }); }); }; renderCell = form => { this.form = form; const { children, dataIndex, record, title } = this.props; const { editing } = this.state; const menu = ( <Menu> <Menu.Item key="1">1st menu item</Menu.Item> <Menu.Item key="2">2nd menu item</Menu.Item> <Menu.Item key="3">3rd menu item</Menu.Item> </Menu> ); return editing ? ( <Form.Item style={{ margin: 0 }}> {form.getFieldDecorator(dataIndex, { rules: [ { required: true, message: `${title} is required.`, }, ], initialValue: record[dataIndex], })( <Dropdown overlay={menu}> <span style={{ userSelect: "none" }}>hover on Me</span> </Dropdown> )} </Form.Item> ) : ( <div className="editable-cell-value-wrap" style={{ paddingRight: 24 }} onClick={this.toggleEdit} > {children} </div> ); }; render() { const { editable, dataIndex, title, record, index, handleSave, children, ...restProps } = this.props; return ( <td {...restProps}> {editable ? ( <EditableContext.Consumer>{this.renderCell}</EditableContext.Consumer> ) : ( children )} </td> ); } } class EditableTable extends React.Component { handleFieldChange(e){ console.log('e',e); console.log('e val',e); } onSaveTransactionRow(e,key) { console.log('EE',e.value); console.log('Key',key); } onDeleteTransactionRow(e,keya) { console.log('key',keya); console.log(e) this.setState({}) } constructor(props) { super(props); console.log('PROP PROP',props); this.columns = [ { title: 'Parameter', dataIndex: 'name', width: '30%', render:()=>{ return( <Select style={{width:120}} onChange={this.handleFieldChange}> <Option value={"val1"}> SAKDL </Option> <Option value={'val2'}> QWNKSJA </Option> </Select> ) } }, { title: 'Operator', dataIndex: 'age', render:()=>{ return( <Select style={{width:120}}> <Option value="EQUAL">{"="}</Option> <Option value="NOTEQUAL">{"!="}</Option> <Option value="LTE">{"<="}</Option> <Option value="GTE">{">="}</Option> <Option value="LT">{"<"}</Option> </Select> ) } }, { title: 'Value', dataIndex: 'address', render:()=>{ return( <Input/> ) } }, { title: 'Notes', dataIndex: 'Notes', render:()=>{ return( <Input/> ) } }, { title: 'operation', dataIndex: 'operation', render: (text, record) => { return( <div> <Icon style={{fontSize:18,color:"black"}} type="save" /> | <Icon style={{fontSize:18,color:"black"}} onClick={e => this.onDeleteTransactionRow(e, record.key)} type="delete"/> </div> ) } }, ]; this.state = { dataSource: [ { key: '0', name: '<NAME>', age: '32', address: 'London, Park Lane no. 0', }, ], count: 1, }; } handleDelete = key => { const dataSource = [...this.state.dataSource]; this.setState({ dataSource: dataSource.filter(item => item.key !== key) }); }; handleAdd = () => { const { count, dataSource } = this.state; const newData = { key: count, name: `<NAME> ${count}`, age: 32, address: `London, Park Lane no. ${count}`, render:()=>{ return( <Select><Option>AHSDasnd</Option> </Select> ) } }; this.setState({ dataSource: [...dataSource, newData], count: count + 1, }); }; handleSave = row => { const newData = [...this.state.dataSource]; const index = newData.findIndex(item => row.key === item.key); const item = newData[index]; newData.splice(index, 1, { ...item, ...row, }); this.setState({ dataSource: newData }); }; render() { const { dataSource } = this.state; const components = { body: { row: EditableFormRow, cell: EditableCell, }, }; const columns = this.columns.map(col => { if (!col.editable) { return col; } return { ...col, onCell: record => ({ record, editable: col.editable, dataIndex: col.dataIndex, title: col.title, handleSave: this.handleSave, }), }; }); return ( <div> <Button onClick={this.handleAdd} type="primary" style={{ marginBottom: 16 }}> Add a row </Button> <Table components={components} rowClassName={() => 'editable-row'} bordered dataSource={dataSource} columns={columns} /> </div> ); } } export default EditableTable;<file_sep>/src/reducers/index.js const reducer=(currentState,action)=>{ switch (action.type) { case "ADD": const newState = currentState + action.payload; return newState; break; default: return currentState; break; } } export default reducer;
c24911e47a89c073021b4812a037e28ca3f11ac4
[ "JavaScript" ]
6
JavaScript
nikse28/t_mob
56189d5cd3fc6accc5e3eddd066d1bba1a4f3016
81565e578a7a2d4a45f6c40e7c735bfbcfe33379
refs/heads/master
<repo_name>miguelureta/webdevtfp<file_sep>/app/Http/Controllers/EventController.php <?php namespace App\Http\Controllers; use Illuminate\Http\Request; use App\Event; class EventController extends Controller { /** * Display a listing of the resource. * * @return \Illuminate\Http\Response */ public function index() { $listing=Event::all(); return view('events.list', compact('listing')); } /** * Show the form for creating a new resource. * * @return \Illuminate\Http\Response */ public function create() { return view('events.create'); } /** * Store a newly created resource in storage. * * @param \Illuminate\Http\Request $request * @return \Illuminate\Http\Response */ public function store(Request $request) { $this->validate($request, [ 'eventname'=>'required', 'venue' =>'required', 'sdate' =>'required', 'edate'=>'required', 'description'=>'required', 'eimage'=>'nullable' ]); $event = new Event; $event->EventName=$request->eventname; $event->Description=$request->description; $event->Venue=$request->venue; $event->Startdate=$request->sdate; $event->Enddate=$request->edate; $event->Image=$request->eimage; $event->save(); return redirect(route('event_list'))->with('success', 'Event Added'); } /** * Display the specified resource. * * @param int $id * @return \Illuminate\Http\Response */ public function show($id) { // } /** * Show the form for editing the specified resource. * * @param int $id * @return \Illuminate\Http\Response */ public function edit($EventsID) { $listing = Event::where("EventsID", $EventsID)->first(); return view('events.edit', compact('listing')); } /** * Update the specified resource in storage. * * @param \Illuminate\Http\Request $request * @param int $id * @return \Illuminate\Http\Response */ public function update(Request $request, $EventsID) { $this->validate($request, [ 'eventname'=>'required', 'venue' =>'required', 'sdate' =>'required', 'edate'=>'required', 'description'=>'required', 'eimage'=>'nullable' ]); $event = Event::where('EventsID', $EventsID)->first(); $event->EventName=$request->eventname; $event->Description=$request->description; $event->Venue=$request->venue; $event->Startdate=$request->sdate; $event->Enddate=$request->edate; $event->Image=$request->eimage; $event->save(); return redirect(route('event_list'))->with('success', 'Event Updated'); } /** * Remove the specified resource from storage. * * @param int $id * @return \Illuminate\Http\Response */ public function destroy($id) { // } public function delete($EventsID) { $listing = Event::where('EventsID', $EventsID)->first(); $listing->delete(); return redirect(route('event_list'))->with('success', 'Event Deleted'); } } <file_sep>/routes/web.php <?php use Illuminate\Support\Facades\Route; /* |-------------------------------------------------------------------------- | Web Routes |-------------------------------------------------------------------------- | | Here is where you can register web routes for your application. These | routes are loaded by the RouteServiceProvider within a group which | contains the "web" middleware group. Now create something great! | */ Route::get('/', function () { return view('welcome'); }); Route::get('/events', 'EventController@index')->name('event_list'); Route::get('/events/create', 'EventController@create')->name('event_create'); Route::post('/events/create', 'EventController@store')->name('event_store'); Route::get('/events/edit/{EventsID}', 'EventController@edit')->name('event_edit'); Route::post('/events/edit/{EventsID}', 'EventController@update')->name('event_update'); Route::delete('/events/delete/{EventsID}', 'EventController@delete')->name('event_delete'); Route::get('/gallery/view', 'ImageController@index'); Route::post('/gallery/view', 'ImageController@store')->name('upload');
5c168e727f7a7961d62212297920743705ca5202
[ "PHP" ]
2
PHP
miguelureta/webdevtfp
8973767463dbc7ba7c6d4c4559bd67e52e1c474f
be0d22233feb4a53173c345e9db0e4fa681fd39d
refs/heads/master
<repo_name>6unz/cpe_utils<file_sep>/docs/source/index.rst .. cpe_utils documentation master file, created by sphinx-quickstart on Wed Dec 14 11:42:49 2016. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. cpe_utils ========= Getting Started ^^^^^^^^^^^^^^^ ``Common Platform Enumeration`` (CPE) is considered to be an industry standard that is used to provide a uniform way to show information on operating systems, hardward and software. This tool is a collection of CPE-related utilities. cpe_utils handles both CPE 1.0 and CPE 2.3 formats, provides functions for comparing cpes, determining if they match, and expanding a CPE that contains wildcards. Installation ^^^^^^^^^^^^ cpe_utils can be installed from the command line as follows: .. code-block:: python pip install cpe_utils Once installed users can use the tool using the following methods. Human Readable Representation ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The Human Readable method translates a ''Common Platform Enumeration'' (CPE) string to readable text. One arguement is required as shown in the example below: .. code-block:: python import cpe_utils cpe_str = "cpe:/o:microsoft:windows_8:::x64" cpe = cpe_utils.CPE(cpe_str) >>> cpe.human() Once a CPE is created the ''get_human'' method returns a single string containing the readable value. The following translation is performed: * Underscore character is replaced with a space * The first letter of each section is capitalized Returns .. code-block:: python >>> Microsoft Windows 8 x64 CPE Matching ^^^^^^^^^^^^ CPE matching can be used as follows: * matches(self, cpe) - Compare if a CPE object exactly matches the provided cpe_obj * has_wildcards(self) - Compare if a CPE with wildcards matches another cpe(using a provided list of reference CPEs) * expand_cpe(cpe_str, cpe_list) - Test a cpe against a list of CPEs ``matches()`` .. code-block:: python import cpe_utils cpe_str1 = "cpe:/windows:microsoft:version:edition" cpe1 = cpe_utils.CPE(cpe_str1) cpe_str2 = "cpe:/linux:ubuntu:version:edition" cpe2 = cpe_utils.CPE(cpe_str2) cpe1.matches(cpe2) Returns .. code-block:: python False ``has_wildcards()`` .. code-block:: python import cpe_utils cpe_str1 = "cpe:/*:vendor:product:version:update:edition" cpe1 = cpe_utils.CPE(cpe_str1) cpe1.has_wildcards() Results .. code-block:: python True ``expand_cpe(cpe_str, cpe_list)`` .. code-block:: python import cpe_utils cpe_list = ["cpe:/o:microsoft:windows_7:::x64", "cpe:/a:mozilla:firefox:38.1.0", "cpe:/a:mozilla:firefox:38.3.0", "cpe:/a:adobe:shockwave_player:11.6.5.635", "cpe:/a:adobe:reader:11.0.10"] cpe_utils.expand_cpe("cpe:/a:adobe", cpe_list) Results .. code-block:: python ['cpe:/a:adobe:shockwave_player:11.6.5.635', 'cpe:/a:adobe:reader:11.0.10'] to_json() and to_dict() ^^^^^^^^^^^^^^^^^^^^^^^ ``JavaScript Object Notation`` (JSON), is a lightweight data interchange format inspired by JavaScript object literal syntax. The ``to_json()`` method takes a cpe string which is then translated into json syntax by using the following: * Data is represented in name/value pairs * Curly braces hold objects and each name is followed by ':'(colon), the name/value paris are sperated by , (commma) * Square brackets hold arrays and values are separted by , (comma) .. code-block:: python import cpe_utils cpe_str = "cpe:/a:something:something:" cpe = cpe_utils.CPE(cpe_str) cpe.json() Returns .. code-block:: python {"product": "something", "vendor": "something", "version": " ", "update": "", "edition": "", "part": "a"} The ``to_dict()`` method creates a dictionary from a cpe string. .. code-block:: python cpe_str = "cpe:/a:something:something" cpe = cpe_utils.CPE(cpe_str) cpe.to_dict() Returns .. code-block:: python {'product': 'something', 'vendor': 'something', 'version': '', 'update': '', 'edition': '', 'part': 'a'} Contents ######### .. toctree:: :maxdepth: 2 .. automodule:: cpe_utils :members: Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` <file_sep>/cpe_utils/__init__.py #!/usr/bin/env python # -*- coding: utf-8 -*- """ CPE UTILS TODO: Add a good description of this module """ from collections import deque import json import re import fnmatch class CPEException(Exception): pass class CPE(object): """A container class for parsed cpe strings. """ attrs = ["part", "vendor", "product", "version", "update", "edition"] def __init__(self, cpe_str): """Create a new CPE object that represents the cpe_str :param str cpe_str: The cpe string """ self.part = "" self.vendor = "" self.product = "" self.version = "" self.update = "" self.edition = "" if cpe_str.startswith("cpe:/"): cpe_str = cpe_str.replace("cpe:/", "") elif cpe_str.startswith("cpe:2.3:"): cpe_str = cpe_str.replace("cpe:2.3:", "") else: raise CPEException("Invalid cpe string {!r}".format(cpe_str)) parts = deque(cpe_str.split(":")) to_set = deque(self.attrs) while len(parts) > 0 and len(to_set) > 0: next_attr = to_set.popleft() setattr(self, next_attr, parts.popleft()) def has_wildcards(self): """Return true or false if any of this cpe's fields contain wildcards """ if ("*") in self.part: return True if ("?") in self.part: return True elif ("?") in self.vendor: return True elif ("*") in self.vendor: return True elif ("*") in self.product: return True elif ("?") in self.product: return True elif ("?") in self.version: return True elif ("*") in self.version: return True elif ("*") in self.update: return True elif ("?") in self.update: return True elif ("?") in self.edition: return True elif ("*") in self.edition: return True else: return False def get_human(self, attr): val = getattr(self, attr) val = val.replace("_", " ") product_mapping = { "ie": "Internet Explorer" } if attr == "product" and val in product_mapping: val = product_mapping[val] # if there's lowercase letters in the value, make it a title # (if there'FAILEDs not, leave it alone - e.g. SP3) if re.search('[a-z]', val) is not None: val = val.title() if val.upper() in ["SP0", "SP1", "SP2", "SP3", "SP4", "SP5", "SP6"]: val = val.upper() if val.lower() in ["x86", "x64"]: val = val.lower() return val def __eq__(self, cpe_obj): """Return true or false if this CPE object matches the provided cpe_obj EXACTLY :param CPE cpe_obj: The cpe object to compare against """ # TODO see issue #1 # test each of these fields on self and cpe_obj: # part, vendor, product, version, update, edition if self.part != cpe_obj.part: return False if self.vendor != cpe_obj.vendor: return False if self.product != cpe_obj.product: return False if self.version != cpe_obj.version: return False if self.update != cpe_obj.update: return False if self.edition != cpe_obj.edition: return False return True def matches(self, cpe): """Return true or false if this CPE object matches the provided cpe_str, using wildcards. :param cpe: The cpe to compare against """ # TODO see issue #3 if self.vendor and not fnmatch.fnmatch(cpe.vendor, self.vendor): print ("vendor was false") return False elif self.product and not fnmatch.fnmatch(cpe.product, self.product): print ("product was false") return False elif self.version and not fnmatch.fnmatch(cpe.version, self.version): print ("version was false") return False elif self.update and not fnmatch.fnmatch(cpe.update, self.update): print ("update was false") return False elif self.edition and not fnmatch.fnmatch(cpe.edition, self.edition): print ("edition was false") return False elif self.part and not fnmatch.fnmatch(cpe.part, self.part): print ("part was false") return False else: return True def human(self): """Makes cpe version of it user friendly""" res = [] if self.vendor != "": res.append(self.get_human("vendor")) if self.product != "": res.append(self.get_human("product")) if self.version != "": res.append(self.get_human("version")) if self.update != "": res.append(self.get_human("update")) if self.edition != "": res.append(self.get_human("edition")) return " ".join(res) #return self.get_human("vendor")+" "+self.get_human("product")+ " "+self.get_human("version")+" "+self.get_human("update")+" "+self.get_human("edition") def to_dict(self): return {"part": self.part, "vendor": self.vendor, "product": self.product, "version": self.version, "update": self.update, "edition": self.edition} def to_json(self): cpe1_json = json.dumps(self.to_dict()) return cpe1_json def expand_cpe(cpe_str, cpe_list): """Expand the provided cpe_str into any matching CPEs in the provided cpe list :param str cpe_str: The cpe string to expand :param list cpe_list: A list of cpe strings """ cpe = CPE(cpe_str) result_cpe = [] for other_cpe_str in cpe_list: other_cpe = CPE(other_cpe_str) if cpe.matches(other_cpe): result_cpe.append(other_cpe_str) return result_cpe <file_sep>/tests/test_basic.py #!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import unittest import json sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) import cpe_utils class TestBasic(unittest.TestCase): """Test the basic functionality of cpe_utils """ def setUp(self): pass def tearDown(self): pass def test_cpe_parsing(self): cpe_str = "cpe:/part:vendor:product:version:update:edition" cpe_obj = cpe_utils.CPE(cpe_str) self.assertEqual(cpe_obj.part, "part") self.assertEqual(cpe_obj.vendor, "vendor") self.assertEqual(cpe_obj.product, "product") self.assertEqual(cpe_obj.version, "version") self.assertEqual(cpe_obj.update, "update") self.assertEqual(cpe_obj.edition, "edition") # see issue #5 # TODO Test vendor # TODO Test product # TODO Test version # TODO Test update # TODO Test edition def test_matches(self): tests = [ ["cpe:/a:vendor:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/X:vendor:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:X:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:X:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:X:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.1:X:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.1:sp3:X", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vandor:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:ndor:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:dor:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:or:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:r:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vbndo:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vand:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:ven:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:ve:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:v:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vbndor:produc:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:produ:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vcndor:prod:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vindor:pro:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vondor:pr:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vundor:p:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vondor::1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.0:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product::sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:product:1.1:sp:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.1:s:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.1::x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:product:1.1:sp3:x8", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.1:sp3:x", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.1:sp3:", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vndor:poduct:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vedor:prduct:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:venor:prouct:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendr:prodct:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendo:produt:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:produc:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:space:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:space:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.10:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.11:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.12:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.13:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.14:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.15:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.16:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.17:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.18:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.19:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", False], ["cpe:/a:vendor:product:1.1:sp3:*", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:product:1.1:*:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:product:*:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:*:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:*:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/*:vendor:product:1.1:sp3:x8?", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:product:1.1:sp3:x?6", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:product:1.1:sp3:?86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:product:1.1:sp?:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:product:1.1:s?3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:product:1.1:?p3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:product:1.?:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:product:1?1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:product:?.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:produc?:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:produ?t:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:prod?ct:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:pro?uct:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:pr?duct:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:p?oduct:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendor:?roduct:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vendo?:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:vend?r:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:ven?or:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:ve?dor:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:v?ndor:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/a:?endor:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ["cpe:/?:vendor:product:1.1:sp3:x86", "cpe:/a:vendor:product:1.1:sp3:x86", True], ] count = 0 for test_info in tests: count += 1 cpe_str1, cpe_str2, match_result = test_info cpe1 = cpe_utils.CPE(cpe_str1) cpe2 = cpe_utils.CPE(cpe_str2) self.assertTrue(cpe1.matches(cpe2) == match_result, "[{}] {}.match({}) was not {}".format( count, cpe_str1, cpe_str2, match_result )) def test_cpe_parsing_23(self): cpe_str = "cpe:2.3:o:vendor:product:version:update:edition" cpe_obj = cpe_utils.CPE(cpe_str) self.assertEqual(cpe_obj.part, "o") self.assertEqual(cpe_obj.vendor, "vendor") self.assertEqual(cpe_obj.product, "product") self.assertEqual(cpe_obj.version, "version") self.assertEqual(cpe_obj.update, "update") self.assertEqual(cpe_obj.edition, "edition") # see issue #5 # TODO Test vendor # TODO Test product # TODO Test version # TODO Test update # TODO Test edition def test_cpe_exception(self): with self.assertRaises(cpe_utils.CPEException): cpe_utils.CPE("cpe:::::") def test_human(self): tests = [ ["cpe:/" "a:vendor:product:1.1:sp3:x86", "Vendor Product 1.1 SP3 x86"], ["cpe:/a:vendor_name:product:1.1:sp3:x86", "Vendor Name Product 1.1 SP3 x86"], ["cpe:/a:vendor:product::sp3:x86", "Vendor Product SP3 x86"], ["cpe:/a:vendor:::sp3:x86", "Vendor SP3 x86"], ["cpe:/a:vendor::::", "Vendor"], ["cpe:/a::::sp3:x86", "SP3 x86"], ["cpe:/a:vendor:product:1.1::", "Vendor Product 1.1"], ["cpe:/a:::::", ""], ["cpe:/a::product:::", "Product"], ["cpe:/a:::1.1::", "1.1"], ["cpe:/a::::sp3:", "SP3"], ["cpe:/a:::::x86", "x86"], ["cpe:/a:vendor:product:::", "Vendor Product"], ["cpe:/a:vendor:product:1.1:sp3:", "Vendor Product 1.1 SP3"], ["cpe:/a:vendor_name::::x86", "Vendor Name x86"], ["cpe:/a:vendor_name:::sp3:", "Vendor Name SP3"], ["cpe:/a:vendor_name:product:1.1::", "Vendor Name Product 1.1"], ["cpe:/a:vendor_name::::", "Vendor Name"], ["cpe:/a:vendor::::x86", "Vendor x86"], ["cpe:/a:vendor:::sp3:", "Vendor SP3"], ] for test_info in tests: cpe_string = test_info[0] correct_human = test_info[1] cpe = cpe_utils.CPE(cpe_string) self.assertEqual(cpe.human(), correct_human, "{!r} was not {!r} (for cpe {})".format( cpe.human(), correct_human, cpe_string )) def test_to_json(self): tests = [ ["cpe:/a:vendor:product:1.1:sp3:x86",{ "part": "a", "vendor": "vendor", "product": "product", "version": "1.1", "update": "sp3", "edition": "x86" }], ["cpe:/a::product:1.1:sp3:x86",{ "part": "a", "vendor": "", "product": "product", "version": "1.1", "update": "sp3", "edition": "x86" }], ["cpe:/a:vendor::1.1:sp3:x86",{ "part": "a", "vendor": "vendor", "product": "", "version": "1.1", "update": "sp3", "edition": "x86" }], ["cpe:/a:vendor:product::sp3:x86",{ "part": "a", "vendor": "vendor", "product": "product", "version": "", "update": "sp3", "edition": "x86" }], ["cpe:/a:vendor:product:1.1::x86",{ "part": "a", "vendor": "vendor", "product": "product", "version": "1.1", "update": "", "edition": "x86" }], ["cpe:/a:vendor:product:1.1:sp3",{ "part": "a", "vendor": "vendor", "product": "product", "version": "1.1", "update": "sp3", "edition": "" }], ] for test_info in tests: cpe_string = test_info[0] correct_dict = test_info[1] cpe = cpe_utils.CPE(cpe_string) assert isinstance(cpe_string, object) self.assertEqual(cpe.to_json(), json.dumps(correct_dict), "{!r} was not {!r} (for cpe {})".format( cpe.to_json(), correct_dict, cpe_string )) def test_cpe_obj_equals(self): orig_cpe = "cpe:/o:vendor:product:version:update:edition" cpe_obj1 = cpe_utils.CPE(orig_cpe) cpe_obj2 = cpe_utils.CPE(orig_cpe) false_cpes = [ "cpe:/a:vendor:product:version:update:edition", "cpe:/o:vendor1:product:version:update:edition", "cpe:/o:vendor:product1:version:update:edition", "cpe:/o:vendor:product:version1:update:edition", "cpe:/o:vendor:product:version:update1:edition", "cpe:/o:vendor:product:version:update:edition1", ] for false_cpe in false_cpes: false_cpe_obj = cpe_utils.CPE(false_cpe) self.assertFalse(cpe_obj1 == false_cpe_obj, "{} is not equal to {}".format( false_cpe, orig_cpe )) def test_has_wildcards(self): cpe_tests = [ "cpe:/*:vendor:product:version:update:edition", "cpe:/?:vendor:product:version:update:edition", "cpe:/o:v*ndor:product:version:update:edition", "cpe:/o:v?ndor:product:version:update:edition", "cpe:/o:vendor:pr*duct:version:update:edition", "cpe:/o:vendor:pr?duct:version:update:edition", "cpe:/o:vendor:product:vers*on:update:edition", "cpe:/o:vendor:product:vers?on:update:edition", "cpe:/o:vendor:product:version:upda*e:edition", "cpe:/o:vendor:product:version:upda?e:edition", "cpe:/o:vendor:product:version:update:ed*tion", "cpe:/o:vendor:product:version:update:ed?tion", ] for cpe_str in cpe_tests: cpe_obj = cpe_utils.CPE(cpe_str) self.assertTrue(cpe_obj.has_wildcards()) no_wildcards = cpe_utils.CPE("cpe:/o:vendor:product:version:update:edition") self.assertFalse(no_wildcards.has_wildcards()) if __name__ == "__main__": unittest.main() <file_sep>/README.md cpe_utils ========= Getting Started ^^^^^^^^^^^^^^^ **Common Platform Enumeration** (CPE) is considered to be an industry standard that is used to provide a uniform way to show information on operating systems, hardward and software. This tool is a collection of CPE-related utilities. cpe_utils handles both CPE 1.0 and CPE 2.3 formats, provides functions for comparing cpes, determining if they match, and expanding a CPE that contains wildcards. Installation ^^^^^^^^^^^^ cpe_utils can be installed from the command line as follows: ```python pip install cpe_utils ``` Once installed users can use the tool using the following methods. Human Readable Representation ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The Human Readable method translates a Common Platform Enumeration (CPE) string to readable text. One arguement is required as shown in the example below: ```python import cpe_utils cpe_str = "cpe:/o:microsoft:windows_8:::x64" cpe = cpe_utils.CPE(cpe_str) cpe.human() ``` Once a CPE is created the get_human method returns a single string containing the readable value. The following translation is performed: * Underscore character is replaced with a space * The first letter of each section is capitalized Returns ```python Microsoft Windows 8 x64 ``` CPE Matching ^^^^^^^^^^^^ CPE matching can be used as follows: * matches(self, cpe) - Compare if a CPE object exactly matches the provided cpe_obj * has_wildcards(self) - Compare if a CPE with wildcards matches another cpe(using a provided list of reference CPEs) * expand_cpe(cpe_str, cpe_list) - Test a cpe against a list of CPEs **matches()** ```python import cpe_utils cpe_str1 = "cpe:/windows:microsoft:version:edition" cpe1 = cpe_utils.CPE(cpe_str1) cpe_str2 = "cpe:/linux:ubuntu:version:edition" cpe2 = cpe_utils.CPE(cpe_str2) cpe1.matches(cpe2) ``` Returns ``` python False ``` **has_wildcards()** ```python import cpe_utils cpe_str1 = "cpe:/*:vendor:product:version:update:edition" cpe1 = cpe_utils.CPE(cpe_str1) cpe1.has_wildcards() ``` Results ```python True ``` **expand_cpe(cpe_str, cpe_list)** ``` python import cpe_utils cpe_list = ["cpe:/o:microsoft:windows_7:::x64", "cpe:/a:mozilla:firefox:38.1.0", "cpe:/a:mozilla:firefox:38.3.0", "cpe:/a:adobe:shockwave_player:11.6.5.635", "cpe:/a:adobe:reader:11.0.10"] cpe_utils.expand_cpe("cpe:/a:adobe", cpe_list) ``` Results ``` python ['cpe:/a:adobe:shockwave_player:11.6.5.635', 'cpe:/a:adobe:reader:11.0.10'] ``` to_json() and to_dict() ^^^^^^^^^^^^^^^^^^^^^^^ **JavaScript Object Notation** (JSON), is a lightweight data interchange format inspired by JavaScript object literal syntax. The to_json() method takes a cpe string which is then translated into json syntax by using the following: * Data is represented in name/value pairs * Curly braces hold objects and each name is followed by ':'(colon), the name/value paris are sperated by , (commma) * Square brackets hold arrays and values are separted by , (comma) ``` import cpe_utils cpe_str = "cpe:/a:something:something:" cpe = cpe_utils.CPE(cpe_str) cpe.json() ``` Returns ```python {"product": "something", "vendor": "something", "version": " ", "update": "", "edition": "", "part": "a"} ``` The ``to_dict()`` method creates a dictionary from a cpe string. ```python cpe_str = "cpe:/a:something:something" cpe = cpe_utils.CPE(cpe_str) cpe.to_dict() ``` Returns ```python {'product': 'something', 'vendor': 'something', 'version': '', 'update': '', 'edition': '', 'part': 'a'} ```
1c36f75d914d15f025eff0768a8f3870d951cc34
[ "Markdown", "Python", "reStructuredText" ]
4
reStructuredText
6unz/cpe_utils
945e967e63b5310907e4cffb5ba28eeda0927c99
f52d6f3b1eea9a29871bdde180b402195adaa772
refs/heads/master
<repo_name>lila/altbeacon-transmitter-android<file_sep>/src/org/altbeacon/beacon/BeaconTransmitter.java package org.altbeacon.beacon; import java.nio.ByteBuffer; import java.util.UUID; import org.altbeacon.beacon.Beacon; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothManager; import android.bluetooth.le.AdvertiseCallback; import android.bluetooth.le.AdvertiseSettings; import android.bluetooth.le.AdvertisementData; import android.bluetooth.le.BluetoothLeAdvertiser; import android.content.Context; import android.util.Log; public class BeaconTransmitter { private static final String TAG = "BeaconTransmitter"; private BluetoothAdapter mBluetoothAdapter; private BluetoothLeAdvertiser mBluetoothLeAdvertiser; private int mAdvertiseMode = AdvertiseSettings.ADVERTISE_MODE_LOW_POWER; private int mAdvertiseTxPowerLevel = AdvertiseSettings.ADVERTISE_TX_POWER_HIGH; private Beacon mBeacon; public BeaconTransmitter(Context context, Beacon beacon) { BluetoothManager bluetoothManager = (BluetoothManager) context.getSystemService(Context.BLUETOOTH_SERVICE); mBluetoothAdapter = bluetoothManager.getAdapter(); mBluetoothLeAdvertiser = mBluetoothAdapter.getBluetoothLeAdvertiser(); if (beacon == null) { throw new NullPointerException("Beacon cannot be null"); } mBeacon = beacon; } public int getAdvertiseMode() { return mAdvertiseMode; } /** * AdvertiseSettings.ADVERTISE_MODE_BALANCED 3 Hz * AdvertiseSettings.ADVERTISE_MODE_LOW_LATENCY 1 Hz * AdvertiseSettings.ADVERTISE_MODE_LOW_POWER 10 Hz * @param mAdvertiseMode */ public void setAdvertiseMode(int mAdvertiseMode) { this.mAdvertiseMode = mAdvertiseMode; } public int getAdvertiseTxPowerLevel() { return mAdvertiseTxPowerLevel; } /** * AdvertiseSettings.ADVERTISE_TX_POWER_HIGH -56 dBm @ 1 meter with Nexus 5 * AdvertiseSettings.ADVERTISE_TX_POWER_LOW -75 dBm @ 1 meter with Nexus 5 * AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM -66 dBm @ 1 meter with Nexus 5 * AdvertiseSettings.ADVERTISE_TX_POWER_ULTRA_LOW not detected with Nexus 5 * @param mAdvertiseTxPowerLevel */ public void setAdvertiseTxPowerLevel(int mAdvertiseTxPowerLevel) { this.mAdvertiseTxPowerLevel = mAdvertiseTxPowerLevel; } /** * Starts this beacon advertising */ public void startAdvertising() { String id1 = mBeacon.getIdentifiers().get(0).toString(); int id2 = Integer.parseInt(mBeacon.getIdentifiers().get(1).toString()); int id3 = Integer.parseInt(mBeacon.getIdentifiers().get(2).toString()); int manufacturerCode = mBeacon.getManufacturer(); byte[] advertisingBytes = getAltBeaconAdvertisementData(mBeacon.getBeaconTypeCode(), mBeacon.getManufacturer(), id1, id2, id3, -59); Log.d(TAG, "Starting advertising with ID1: "+id1+" ID2: "+id2+" ID3: "+id3); try{ AdvertisementData.Builder dataBuilder = new AdvertisementData.Builder(); dataBuilder.setManufacturerData(manufacturerCode, advertisingBytes); AdvertiseSettings.Builder settingsBuilder = new AdvertiseSettings.Builder(); settingsBuilder.setAdvertiseMode(mAdvertiseMode); settingsBuilder.setTxPowerLevel(mAdvertiseTxPowerLevel); settingsBuilder.setType(AdvertiseSettings.ADVERTISE_TYPE_NON_CONNECTABLE); mBluetoothLeAdvertiser.startAdvertising(settingsBuilder.build(), dataBuilder.build(), advertiseCallback); String byteString = ""; for (int i= 0; i < advertisingBytes.length; i++) { byteString += String.format("%02X", advertisingBytes[i]); byteString += " "; } Log.e(TAG, "Started advertising with data: "+byteString); } catch (Exception e){ Log.e(TAG, "Cannot start advetising due to excepton: ",e); } } /** * Stops this beacon from advertising */ public void stopAdvertising() { Log.d(TAG, "Stopping advertising"); mBluetoothLeAdvertiser.stopAdvertising(advertiseCallback); } /** * Get BLE advertisement bytes for an AltBeacon * @param beaconTypeCode a 2 byte beacon type code * (0xbeac for an AltBeacon, a different value for other beacon transmissions) * @param id1 a 16 byte UUID represented as a string * @param id2 a 16 bit number * @param id3 a 16 bit number * @param power an 8 byte signed power calibration value * @return the byte array of the advertisement */ private byte[] getAltBeaconAdvertisementData(int beaconTypeCode, int manufacturerId, String id1, int id2, int id3, int power) { byte[] advertisingBytes; advertisingBytes = new byte[26]; advertisingBytes[0] = (byte) (manufacturerId & 0xff); // little endian advertisingBytes[1] = (byte) ((manufacturerId >> 8) & 0xff); advertisingBytes[2] = (byte) ((beaconTypeCode >> 8) & 0xff); // big endian advertisingBytes[3] = (byte) (beaconTypeCode & 0xff); System.arraycopy( uuidToBytes(id1), 0, advertisingBytes, 4, 16 ); System.arraycopy( uint16ToBytes(id2), 0, advertisingBytes, 20, 2 ); System.arraycopy( uint16ToBytes(id3), 0, advertisingBytes, 22, 2 ); advertisingBytes[24] = int8ToByte(power); advertisingBytes[25] = 0; // manufacturer reserved return advertisingBytes; } private AdvertiseCallback advertiseCallback = new AdvertiseCallback() { @Override public void onFailure(int errorCode) { Log.e(TAG,"Advertisement failed."); } @Override public void onSuccess(AdvertiseSettings settingsInEffect) { Log.i(TAG,"Advertisement succeeded."); } }; private byte[] uuidToBytes(String uuidString) { UUID uuid = UUID.fromString(uuidString); ByteBuffer bb = ByteBuffer.wrap(new byte[16]); bb.putLong(uuid.getMostSignificantBits()); bb.putLong(uuid.getLeastSignificantBits()); return bb.array(); } private byte[] uint16ToBytes(int i) { byte[] bytes = new byte[2]; bytes[0] = (byte) (i / 256); bytes[1] = (byte) (i & 0xff); return bytes; } private byte int8ToByte(int i) { if (i < 0) { return (byte) (256+i); } return (byte) (i & 0x7f); } }
ce814686c5b125e05767554b984472ea4a819f97
[ "Java" ]
1
Java
lila/altbeacon-transmitter-android
cad0e381a2eb4c4f766984651f7668437d5e346a
50ab5d5a84057824313ce8d668394dab9dc4f864
refs/heads/master
<file_sep>package defautpackage; import org.testng.annotations.Test; import org.testng.annotations.BeforeTest; import java.util.Random; import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.chrome.ChromeDriver; import org.testng.Reporter; import org.testng.annotations.AfterTest; public class SellerRegistration extends Initialization{ int ran; String email; @BeforeTest public void beforeTest() { driver.get("https://prepayqa.swipezoom.com:18443/bpm/signup/"); //Random number generator int max=10000; int min=5000; int diff=max-min; Random rn = new Random(); ran = rn.nextInt(diff+1); ran+=min; System.out.print("The Random Number is " + ran); } @Test(priority=0) public void SignUp() { email = "<EMAIL>"+<EMAIL>"; driver.findElement(By.id("signupBean_userFirstName")).sendKeys("Test"); driver.findElement(By.id("signupBean_userLastName")).sendKeys("Test"); driver.findElement(By.id("signupBean_companyName")).sendKeys("TestComp"); driver.findElement(By.id("signupBean_landlineNumber")).sendKeys("28838388"); driver.findElement(By.id("signupBean_userEmail")).sendKeys(email); driver.findElement(By.id("signupBean_mobileNumber")).sendKeys("2299933"); driver.findElement(By.id("signupBean_userPassword")).sendKeys("<PASSWORD>#"); driver.findElement(By.id("signupBean_userConfirmPassword")).sendKeys("<PASSWORD>#"); driver.findElement(By.xpath("//*[@id='sign_up_form']/div[2]/div[7]/div/input")).click(); Reporter.log("Sign up process completed successfully"); } @Test(priority=1) public void Login() throws InterruptedException { Thread.sleep(3000); driver.findElement(By.xpath(".//*[@id='userBean_userId']")).sendKeys(email); driver.findElement(By.xpath(".//*[@id='userBean_password']")).sendKeys("<PASSWORD>#"); driver.findElement(By.xpath(".//*[@id='login_form']/div/div[5]/div/input")).click();; Reporter.log("User logged in successfully"); } @AfterTest public void afterTest() { //driver.close } }
84d8efa0f5b5a6cd66c6d39bd39ead22efe5f34e
[ "Java" ]
1
Java
awafa28/PrePay-Solutions
7d732e26016c49c93d36ed004797e11b2a15e4ea
f0352c2962a52809af197071d91090b625084234
refs/heads/master
<repo_name>Haseoo/SimpleQuizC<file_sep>/src/headers/quiz_menu.h #ifndef QUIZ_MENU_H_INCLUDED #define QUIZ_MENU_H_INCLUDED #include "players.h" #include "questions.h" #include <allegro5/allegro.h> #include <allegro5/allegro_ttf.h> #include <allegro5/allegro_image.h> #include <allegro5/allegro_native_dialog.h> size_t gameModeMenu( ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display); size_t numberOfPlayersMenu( ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display); int playersInfoMenu(size_t numberOfPlayers, struct player **playersList, ALLEGRO_FONT *font, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display); int printScore(struct player *playersList, ALLEGRO_FONT *font, size_t numberOfPlayers, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display); int addScoreToBoard(size_t score,char playerName[BUFFER_SIZE], ALLEGRO_DISPLAY *display); int printHiScore(ALLEGRO_FONT *font, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display); #endif // QUIZ_MENU_H_INCLUDED <file_sep>/src/playersList.c #include "headers/players.h" int addPlayer (struct player **playersList, const char *playerName) { struct player *newElement = (struct player*)malloc(sizeof(struct player)); if (newElement) { strncpy(newElement->playerName, playerName, BUFFER_SIZE); newElement->score = 0; newElement->isPlaying = true; if (*playersList == NULL) { newElement->next = newElement->prev = newElement; *playersList = newElement; } else { newElement->next = *playersList; newElement->prev = (*playersList)->prev; (*playersList)->prev->next = newElement; (*playersList)->prev = newElement; } } else { fprintf(stderr,"Could't add player to list- couldn't create new element\n"); return -1; } return 0; } void removePlayersList(struct player **playersList) { if (*playersList) { struct player *start = *playersList; do { struct player *tmp = (*playersList)->next; free(*playersList); *playersList = tmp; } while (*playersList != start); *playersList = NULL; } } <file_sep>/src/allegroMenu.c #include "headers/quiz_menu.h" static size_t getGameModeChoice(int mouseX, int mouseY) { if (mouseX >= 90 && mouseX <= 635) { if (mouseY >= 135 && mouseY <= 180) return 1; if (mouseY >= 203 && mouseY <= 245) return 2; if (mouseY >= 265 && mouseY <= 305) return 3; if (mouseY >= 325 && mouseY <= 367) return 4; if (mouseY >= 390 && mouseY <= 435) return 5; } else if (mouseX >= 640 && mouseY <= 80) return 6; return 0; } size_t gameModeMenu( ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display) { ALLEGRO_BITMAP *bmp = al_load_bitmap("assets/menu.bmp"); size_t gameModeChoice = 0; if (!bmp) { fprintf(stderr,"Missing file assets/menu.bmp\n"); al_show_native_message_box(display, "An error occurred" , "Missing file", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return 0; } al_draw_bitmap(bmp,0,0,0); al_flip_display(); while (gameModeChoice == 0) { al_wait_for_event(event_queue, &ev); if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) { al_destroy_bitmap(bmp); return 0; } if (ev.type == ALLEGRO_EVENT_MOUSE_BUTTON_DOWN && ev.mouse.button == 1) gameModeChoice = getGameModeChoice(ev.mouse.x, ev.mouse.y); } al_destroy_bitmap(bmp); return gameModeChoice; } static size_t getNumberOfPlayers (int mouseX, int mouseY) { int begX = 170, barHeight = 35, offset = 50; if (mouseX >= 167 && mouseX <= 560) { for(size_t i = 0; i < 4; i++) { if (mouseY >= begX && mouseY <= begX + barHeight) return i + 1; begX += offset; } } return 0; } size_t numberOfPlayersMenu(ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display) { ALLEGRO_BITMAP *bmp = al_load_bitmap("assets/menu2.bmp"); size_t numberOfPlayers = 0; if (!bmp) { fprintf(stderr,"Missing file assets/menu2.bmp\n"); al_show_native_message_box(display, "An error occurred" , "Missing file", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return 0; } al_draw_bitmap(bmp,0,0,0); al_flip_display(); while (numberOfPlayers == 0) { al_wait_for_event(event_queue, &ev); if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) { al_destroy_bitmap(bmp); return 0; } if (ev.type == ALLEGRO_EVENT_MOUSE_BUTTON_DOWN && ev.mouse.button == 1) numberOfPlayers = getNumberOfPlayers(ev.mouse.x, ev.mouse.y); } al_destroy_bitmap(bmp); return numberOfPlayers; } static int getNick(size_t numberOfPlayers, struct player **playersList, ALLEGRO_FONT *font, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display) { char nick[BUFFER_SIZE]; ALLEGRO_BITMAP *whiteBar = al_load_bitmap("assets/whitebar.bmp"); if (!whiteBar) { fprintf(stderr, "Missing file assets/white_bar.bmp\n"); al_show_native_message_box(display, "An error occurred" , "Missing file", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return -1; } al_draw_textf(font, al_map_rgb(0,0,0), 110, 190, ALLEGRO_ALIGN_CENTRE, "Player: %zu",numberOfPlayers); al_flip_display(); for (int i = 0; i < BUFFER_SIZE - 1; i++) { al_wait_for_event(event_queue, &ev); while (ev.type != ALLEGRO_EVENT_KEY_DOWN) { if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) { al_destroy_bitmap(whiteBar); return -2; } al_wait_for_event(event_queue, &ev); } int tmp = ev.keyboard.keycode; if ((tmp < 0 || tmp > 36) && tmp!= 67 && tmp!=63) { i--; continue; } if (tmp == 63) { if (i > 0) { nick[i - 1] = 0; i -= 2; al_draw_bitmap(whiteBar,179,190,0); al_draw_text(font, al_map_rgb(0,0,0), 180, 190,ALLEGRO_ALIGN_LEFT, nick); al_flip_display(); continue; } else { i--; continue; } } char oneChar = 0; if ((tmp >= 27 && tmp <= 36)) oneChar = (char)tmp - 27 + '0'; else oneChar = (char)tmp + 'a' - 1; if (tmp == 67) break; nick[i] = oneChar; nick[i + 1] = 0; al_draw_bitmap(whiteBar,180,188,0); al_draw_text(font, al_map_rgb(0,0,0), 180, 190,ALLEGRO_ALIGN_LEFT, nick); al_flip_display(); } al_destroy_bitmap(whiteBar); return addPlayer(playersList,nick); } int playersInfoMenu(size_t numberOfPlayers, struct player **playersList, ALLEGRO_FONT *font, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display) { for (size_t i = 0; i < numberOfPlayers; i++) { al_wait_for_event(event_queue, &ev); if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) { return -1; } ALLEGRO_BITMAP *bmp = al_load_bitmap("assets/menu3.bmp"); if (!bmp) { fprintf(stderr,"Missing file assets/menu3.bmp\n"); al_show_native_message_box(display, "An error occurred" , "Missing file", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return -1; } al_draw_bitmap(bmp,0,0,0); al_draw_textf(font, al_map_rgb(0,0,0), 720 / 2, 130, ALLEGRO_ALIGN_CENTER, "Enter your nickname (max %d chars, only small letters and digits)", BUFFER_SIZE - 1); int result = getNick(i + 1, playersList, font, event_queue, ev, display); if (result == -1) { al_destroy_bitmap(bmp); al_show_native_message_box(display, "An error occurredd" , "", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return -1; } if (result == -2) { al_destroy_bitmap(bmp); return -1; } al_destroy_bitmap(bmp); } return 0; } int printScore(struct player *playersList, ALLEGRO_FONT *font, size_t numberOfPlayers, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display) { ALLEGRO_BITMAP *bmp = al_load_bitmap("assets/scoreboard.bmp"); if (!bmp) { fprintf(stderr, "Missing file assets/scoreboard.bmp\n"); al_show_native_message_box(display, "An error occurred" , "Missing file", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return -1; } al_draw_bitmap(bmp,0,0,0); int textX1 = 355, textX2 = 625, textY = 170; al_draw_text(font, al_map_rgb(0,0,0), textX1, textY, ALLEGRO_ALIGN_RIGHT,"Gracz:"); al_draw_text(font, al_map_rgb(0,0,0), textX2, textY, ALLEGRO_ALIGN_RIGHT, "wynik:"); textY += 33; //go to next row for (size_t i = 0; i < numberOfPlayers; i++) { al_draw_text(font, al_map_rgb(0,0,0), textX1, textY, ALLEGRO_ALIGN_RIGHT, playersList->playerName); al_draw_textf(font, al_map_rgb(0,0,0), textX2, textY, ALLEGRO_ALIGN_RIGHT, "%zu",playersList->score); textY += 33; //go to next row playersList = playersList->next; } al_flip_display(); do { al_wait_for_event(event_queue, &ev); if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) { al_destroy_bitmap(bmp); return -1; } } while (ev.type != ALLEGRO_EVENT_MOUSE_BUTTON_DOWN); al_destroy_bitmap(bmp); return 0; } <file_sep>/src/main.c #include "headers/players.h" #include "headers/questions.h" #include "headers/quiz_menu.h" #include "headers/quiz_game.h" static int allegroInit(void) { if (!al_init()) { fprintf( stderr, "Failed to initialize Allegro5 library.\n" ); return -1; } if (!al_init_image_addon()) { fprintf( stderr, "Failed to initialize Allegro5 image library.\n" ); return -1; } al_init_font_addon(); if (!al_init_ttf_addon()) { fprintf( stderr, "Failed to initialize Allegro5 font library.\n" ); return -1; } if (!al_install_keyboard()) { fprintf( stderr, "Failed to initialize Allegro5 keyboard.\n" ); return -1; } if (!al_install_mouse()) { fprintf( stderr, "Failed to initialize Allegro5 mouse.\n" ); return -1; } return 0; } int main(void) { FILE *error = freopen("error.log","w",stderr); if (!error) { return EXIT_FAILURE; } if (allegroInit() == - 1) { return EXIT_FAILURE; } ALLEGRO_EVENT_QUEUE *event_queue = NULL; event_queue = al_create_event_queue(); ALLEGRO_EVENT ev; ALLEGRO_BITMAP *icon = al_load_bitmap("assets/icon.png"); ALLEGRO_DISPLAY *gameWindow = al_create_display(720, 450); if (icon) { al_set_display_icon(gameWindow, icon); } ALLEGRO_FONT *font = al_load_ttf_font("assets/arial.ttf", 22, 0); ALLEGRO_FONT *questionFont = al_load_ttf_font("assets/arial.ttf", 20, 0); if (!font || !questionFont) { al_show_native_message_box(gameWindow, "Failed to load Simple Quiz" , "Font error","" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return EXIT_FAILURE; } al_register_event_source(event_queue, al_get_display_event_source(gameWindow)); al_register_event_source(event_queue, al_get_mouse_event_source()); al_register_event_source(event_queue,al_get_keyboard_event_source()); al_set_window_title(gameWindow ,"Simple Quiz"); const size_t NUMBER_OF_CATEGORIES = countNumberOfCategories("assets/categories.txt"); if (NUMBER_OF_CATEGORIES <= 0) { al_show_native_message_box(gameWindow, "Failed to load Simple Quiz" , "An error occurred while initializing questions", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return EXIT_FAILURE; } struct category *categoriesList = (struct category*)calloc(NUMBER_OF_CATEGORIES, sizeof(struct category)); struct questionsList *questionList = NULL; if (initCategoriesList(categoriesList, "assets/categories.txt", NUMBER_OF_CATEGORIES) == -1) { al_show_native_message_box(gameWindow, "Failed to load Simple Quiz" , "An error occurred while initializing questions", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return EXIT_FAILURE; } const size_t NUMBER_OF_ALL_QUESTIONS = countMaxQuestion(categoriesList, NUMBER_OF_CATEGORIES); struct player *playersList = NULL; bool end = false; while (!end) { size_t gameModeChoice = 0; size_t numberOfPlayers = 0; setUsedQuestionToZero(categoriesList, NUMBER_OF_CATEGORIES); gameModeChoice = gameModeMenu(event_queue, ev, gameWindow); if (gameModeChoice == 0) { end = true; } if (gameModeChoice != 0 && gameModeChoice != 6) { numberOfPlayers = numberOfPlayersMenu(event_queue, ev, gameWindow); } if (numberOfPlayers == 0 && gameModeChoice != 6) { end = true; } if (numberOfPlayers != 0) { if (playersInfoMenu(numberOfPlayers, &playersList, font, event_queue, ev, gameWindow) == -1) { end = true; } } if (end == true) break; srand(time(0)); switch (gameModeChoice) { case 1 : { if (ask10Questions(playersList, numberOfPlayers, &questionList,categoriesList, NUMBER_OF_CATEGORIES, questionFont, event_queue, ev,gameWindow, false) == -1) { end = true; } break; } case 2 : { if (ask10Questions(playersList, numberOfPlayers, &questionList,categoriesList, NUMBER_OF_CATEGORIES, questionFont, event_queue, ev, gameWindow, true) == -1) { end = true; } break; } case 3 : { if (ask10QuestionsWithChoicingCategory(playersList, numberOfPlayers, &questionList, categoriesList, NUMBER_OF_CATEGORIES, questionFont, event_queue, ev, gameWindow) == -1) { end = true; } break; } case 4 : { if (askMaxQuestions(playersList, numberOfPlayers, &questionList, categoriesList, NUMBER_OF_CATEGORIES, NUMBER_OF_ALL_QUESTIONS, questionFont, event_queue, ev, gameWindow, false) == -1) { end = true; } break; } case 5 : { if (askMaxQuestions(playersList, numberOfPlayers, &questionList, categoriesList, NUMBER_OF_CATEGORIES, NUMBER_OF_ALL_QUESTIONS, questionFont, event_queue, ev, gameWindow, true) == -1) { end = true; } break; } case 6 : { if(printHiScore(font, event_queue, ev, gameWindow) == -1) { end = true; } } } if (gameModeChoice == 5 && !end) { for (size_t i = 0; i < numberOfPlayers; i++) { if (addScoreToBoard(playersList->score, playersList->playerName, gameWindow) == -1) { end = true; break; } playersList = playersList->next; } } if (!end && gameModeChoice != 6) { if (printScore(playersList, font, numberOfPlayers, event_queue, ev, gameWindow) == -1) { end = true; } } removePlayersList(&playersList); deleteList(&questionList); } fclose(error); al_destroy_display(gameWindow); if (icon) { al_destroy_bitmap(icon); } al_destroy_event_queue(event_queue); al_destroy_font(font); al_destroy_font(questionFont); removePlayersList(&playersList); deleteList(&questionList); free(categoriesList); return 0; } <file_sep>/readme.txt Deleting the questions currently contained in the game files is not recommended. The game to work requires at least 10 questions. The gamemode "10 questions with choosing category" requires three categories with unused questions for each round. The list of categories can be found in "assets/categories.txt". The question files must be added in a specific way, same as the present files are. The files with questions should be organised in a way that present files are and have at least one question. Otherwise, the game may work improperly or may crash. Questions are in polish, I didn't bother to translate them. They're only for demonstration purposes. <file_sep>/Makefile OBJS = src/allegroGameModes.c src/allegroMenu.c src/hiScore.c src/playersList.c src/questionsList.c src/main.c CC = gcc COMPILER_FLAGS = -Wall -Wextra LINKER_FLAGS = -lallegro -lallegro_image -lallegro_ttf -lallegro_font -lallegro_dialog OBJ_NAME = SimpleQuiz all: $(OBJS) $(CC) $(OBJS) $(COMPILER_FLAGS) $(LINKER_FLAGS) -o $(OBJ_NAME) clean: rm $(OBJ_NAME) <file_sep>/src/headers/players.h #ifndef PLAYERS_H_INCLUDED #define PLAYERS_H_INCLUDED #include <stdio.h> #include <string.h> #include <stdlib.h> #include <stdbool.h> #define BUFFER_SIZE 11 struct player { char playerName[BUFFER_SIZE]; size_t score; bool isPlaying; struct player *prev; struct player *next; }; int addPlayer (struct player**, const char *playerName); void removePlayersList(struct player **playersList); #endif // PLAYERS_H_INCLUDED <file_sep>/src/questionsList.c #include "headers/questions.h" extern void endStringProperly (char *string) { char *spot = strstr(string, "\n"); if (spot) { *spot = 0; } } size_t countNumberOfCategories(const char *filePath) { size_t i = 0; FILE *file = fopen(filePath, "r"); if (file) { char tmp [200]; memset(tmp, 0, sizeof(char) * 200); while (strncmp(tmp, "END_OF_LIST", 11) != 0 && !feof(file)){ fgets(tmp, sizeof(tmp)/sizeof(tmp[0]),file); i++; } i -= 2; fclose(file); } else { fprintf(stderr,"Couldn't open file with list of categories: %s\n",filePath); } return i; } static int countNumberOfQuestions (const char *filePath) { int i = 0; FILE *file = fopen(filePath,"r"); if (file) { char tmp[100]; memset(tmp, 0, sizeof(char) * 100); while (strncmp(tmp, "END_OF_LIST", 11) != 0 && !feof(file)) { fgets(tmp, sizeof(tmp)/sizeof(tmp[0]),file); if (strncmp(tmp, "BEGIN_OF_QUESTION", 17) == 0) i++; } fclose(file); } else { fprintf(stderr,"Couldn't open file with list of questions: %s \n",filePath); } return i; } void setUsedQuestionToZero (struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES) { for (size_t i = 0; i < NUMBER_OF_CATEGORIES; i++) { categoriesList[i].numberOfUsedQuestions = 0; } } int initCategoriesList (struct category *categoriesList, const char *fileName, const size_t NUMBER_OF_CATEGORIES) { FILE *file = fopen(fileName,"r"); if (file && categoriesList) { memset(categoriesList, 0, sizeof(struct category) * NUMBER_OF_CATEGORIES); char textLine [100]; fgets(textLine, sizeof(textLine)/sizeof(textLine[0]),file); for (size_t i = 0; i < NUMBER_OF_CATEGORIES; i++) { fgets(textLine, sizeof(textLine)/sizeof(textLine[0]),file); endStringProperly (textLine); char *minusKeySpot = strrchr(textLine, '-'); if (minusKeySpot) { strncpy(categoriesList[i].filePath, textLine, minusKeySpot - textLine); strncpy(categoriesList[i].categoryName, minusKeySpot + 1, 50); } else { fprintf(stderr, "Niepoprawne kodowanie w pliku %s", fileName); return -1; } size_t numberOfQuestions = countNumberOfQuestions(categoriesList[i].filePath); if (numberOfQuestions != 0) { categoriesList[i].numberOfQuestions = numberOfQuestions; categoriesList[i].numberOfUsedQuestions = 0; } else { return -1; } } fclose(file); } else { fprintf(stderr,"Couldn't open file with list of categories: %s \n",fileName); return -1; } return 0; } size_t countMaxQuestion (struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES) { size_t numberOfQuestions = 0; if (categoriesList) { for (size_t i = 0; i < NUMBER_OF_CATEGORIES; i++) { numberOfQuestions = numberOfQuestions + categoriesList[i].numberOfQuestions; } } return numberOfQuestions; } void addToList(struct questionsList **front, size_t categoryNumber, size_t questionNumber) { struct questionsList *newElement = (struct questionsList*)malloc(sizeof(struct questionsList)); if (newElement) { newElement->category = categoryNumber; newElement->question = questionNumber; if (*front == NULL) { newElement->next = NULL; *front = newElement; } else { newElement->next = *front; *front = newElement; } } else { fprintf(stderr,"Could't add question to list- couldn't create new element\n"); } } void deleteList(struct questionsList **front) { while (*front != NULL) { struct questionsList *tmp = (*front)->next; free (*front); *front = tmp; } } static int searchQuestion(struct questionsList *front, size_t categoryNumber, size_t questonNumber) { if (front == NULL) return 1; while (front != NULL) { if (front->category == categoryNumber && front->question == questonNumber) return 0; front = front->next; } return 1; } static size_t getCategoryNumber(struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES) { size_t randomCategory = 0; while (1) { randomCategory = rand() % NUMBER_OF_CATEGORIES; if (categoriesList[randomCategory].numberOfQuestions > categoriesList[randomCategory].numberOfUsedQuestions) { categoriesList[randomCategory].numberOfUsedQuestions += 1; break; } } return randomCategory; } size_t getQuestionNumber(struct category *categoriesList, struct questionsList *front, size_t categoryNumber) { size_t randomQuestion = 0; while (1) { randomQuestion = rand() % categoriesList[categoryNumber].numberOfQuestions; if (searchQuestion(front,categoryNumber,randomQuestion )) { break; } } return randomQuestion; } void getRandomQuestion(struct questionsList **front, struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES) { size_t categoryNumber = getCategoryNumber(categoriesList, NUMBER_OF_CATEGORIES); size_t questionNumber = getQuestionNumber(categoriesList, *front, categoryNumber); addToList(front, categoryNumber, questionNumber); } <file_sep>/src/headers/questions.h #ifndef QUESTIONS_H_INCLUDED #define QUESTIONS_H_INCLUDED #include <time.h> #include <stdio.h> #include <string.h> #include <stdlib.h> struct category { char filePath[50]; char categoryName[50]; size_t numberOfQuestions; size_t numberOfUsedQuestions; }; struct questionsList { size_t category; size_t question; struct questionsList *next; }; extern void endStringProperly (char *string); size_t countNumberOfCategories(const char *fileName); void setUsedQuestionToZero (struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES); size_t countMaxQuestion (struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES); int initCategoriesList (struct category *categoriesList, const char *fileName, const size_t NUMBER_OF_CATEGORIES); void addToList(struct questionsList ** ront, size_t categoryNumber, size_t questionNumber); void deleteList(struct questionsList **front); size_t getQuestionNumber(struct category *categoriesList, struct questionsList *front, size_t categoryNumber); void getRandomQuestion(struct questionsList **front, struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES); #endif // QUESTIONS_H_INCLUDED <file_sep>/src/hiScore.c #include <stdio.h> #include "headers/players.h" #include <allegro5/allegro.h> #include "headers/quiz_menu.h" #include <allegro5/allegro_ttf.h> #include <allegro5/allegro_image.h> #include <allegro5/allegro_native_dialog.h> static int findSpot(size_t score, char scoreTable[20][BUFFER_SIZE]) { int spot = -1; for (int i = 1; i < 20; i += 2) { if ((int)score > atoi(scoreTable[i])) { return spot = i - 1; } } return spot; } static void moveArray(char scoreTable[20][BUFFER_SIZE], int spot) { for(int i = 19; i >= spot; i--) { memcpy(scoreTable[i], scoreTable[i - 2], sizeof(char) * BUFFER_SIZE); } } int addScoreToBoard(size_t score, char playerName[BUFFER_SIZE], ALLEGRO_DISPLAY *display) { char scoreTable[20][BUFFER_SIZE], scoreChar[BUFFER_SIZE]; memset(scoreTable, 0, sizeof(scoreTable)); FILE *hiScoreFile = fopen("assets/hiStoreTable", "rw+"); if (hiScoreFile) { fread(scoreTable, sizeof(char), sizeof(scoreTable) / sizeof(char), hiScoreFile); int spot = findSpot(score, scoreTable); if (spot == -1) { return 0; } moveArray(scoreTable, spot); snprintf(scoreChar, BUFFER_SIZE, "%zu", score); memcpy(scoreTable[spot],playerName, sizeof(char) * BUFFER_SIZE); memcpy(scoreTable[spot + 1],scoreChar, sizeof(char) * BUFFER_SIZE); fseek(hiScoreFile, 0, SEEK_SET); fwrite(scoreTable, sizeof(char), sizeof(scoreTable) / sizeof(char), hiScoreFile); fclose(hiScoreFile); } else { al_show_native_message_box(display, "An error occurred" , "Missing file", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); fprintf(stderr,"Couldn't open file with score table: \"assets/hiStoreTable\"."); return -1; } return 0; } int printHiScore(ALLEGRO_FONT *font, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display) { al_flip_display(); ALLEGRO_BITMAP *bmp = al_load_bitmap("assets/hiScore.bmp"); if (!bmp) { al_show_native_message_box(display, "An error occurred" , "Missing file", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); fprintf(stderr,"Couldn't open bitmap: \"assets/hiScore.bmp\"."); return -1; } al_draw_bitmap(bmp, 0, 0, 0); int begTextY = 60, textX1 = 105, textX2 = 365, textX3 = 628; FILE *hiScoreFile = fopen("assets/hiStoreTable", "rb"); char scoreTable[20][BUFFER_SIZE]; if (hiScoreFile) { fread(scoreTable, sizeof(char), sizeof(scoreTable) / sizeof(char), hiScoreFile); al_draw_textf(font, al_map_rgb(0,0,0), 720 / 2, begTextY - 40, ALLEGRO_ALIGN_CENTER,"The best results for the all questions with falling off mode:"); al_draw_textf(font, al_map_rgb(0,0,0), textX1, begTextY, ALLEGRO_ALIGN_CENTER,"No."); al_draw_textf(font, al_map_rgb(0,0,0), textX2, begTextY, ALLEGRO_ALIGN_RIGHT,"Player:"); al_draw_textf(font, al_map_rgb(0,0,0), textX3, begTextY, ALLEGRO_ALIGN_RIGHT,"Score:"); begTextY += 33; for (size_t i = 0; i < 10; i++) { al_draw_textf(font, al_map_rgb(0,0,0), textX1, begTextY, ALLEGRO_ALIGN_CENTER,"%zu",i + 1); al_draw_textf(font, al_map_rgb(0,0,0), textX2, begTextY, ALLEGRO_ALIGN_RIGHT,"%s", scoreTable[i * 2]); al_draw_textf(font, al_map_rgb(0,0,0), textX3, begTextY, ALLEGRO_ALIGN_RIGHT,"%d", atoi(scoreTable[i * 2 + 1])); begTextY += 33; } fclose(hiScoreFile); } else { al_show_native_message_box(display, "An error occurred" , "Missing file", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); fprintf(stderr,"Couldn't open file with score table: \"assets/hiStoreTable\"."); return -1; } al_flip_display(); do { al_wait_for_event(event_queue, &ev); if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) { al_destroy_bitmap(bmp); return -1; } if(ev.type == ALLEGRO_EVENT_MOUSE_BUTTON_DOWN && ev.mouse.x >= 670 && ev.mouse.y <= 50) { hiScoreFile = fopen("assets/hiStoreTable", "wb"); memset(scoreTable, 0, sizeof(char) * 20 * BUFFER_SIZE); fwrite(scoreTable, sizeof(char), sizeof(scoreTable) / sizeof(char), hiScoreFile); fclose(hiScoreFile); printHiScore(font, event_queue, ev, display); } } while (ev.type != ALLEGRO_EVENT_MOUSE_BUTTON_DOWN); al_destroy_bitmap(bmp); return 0; } <file_sep>/src/allegroGameModes.c #include <stdio.h> #include "headers/quiz_game.h" #include "headers/questions.h" static char askQuestionAndGetAnswer(struct questionsList *front, struct category *categoriesList, ALLEGRO_FONT *font) { char correctAnswer = -1; size_t rep = 0; int questionY = 45, answerY = 240; //begin of text FILE *file = fopen(categoriesList[front->category].filePath,"r"); if (file) { char textLine[75]; memset(textLine, 0, sizeof(char) * 75); while ( !feof(file) || strncmp(textLine,"END_OF_LIST",11) != 0){ fgets(textLine, 75, file); if (strncmp(textLine,"BEGIN_OF_QUESTION",17) == 0) rep++; if (rep == front->question + 1) { fgets(textLine,75, file); while ( strncmp(textLine,"END_OF_QUESTION",15) && !feof(file)) { endStringProperly(textLine); al_draw_text(font, al_map_rgb(0,0,0), 720 / 2 ,questionY ,ALLEGRO_ALIGN_CENTER, textLine); questionY += 25; //new textline fgets(textLine,75,file); } for(int i = 0; i < 4; i++) { fgets(textLine,75,file); endStringProperly(textLine); al_draw_text(font, al_map_rgb(0,0,0), 720 / 2 ,answerY ,ALLEGRO_ALIGN_CENTER, textLine); answerY += 45; //go to next box } if (strncmp(textLine,"END_OF_LIST",11) != 0) correctAnswer=getc(file); else fprintf(stderr, "Failed to read question no.%zu forom file %s\n", front->question + 1, categoriesList[front->category].filePath); fclose(file); return correctAnswer; } } } else { fprintf(stderr,"Couldn't open file with list of categories: %s \n",categoriesList[front->category].filePath); return -1; } return -1; } static size_t getThreeCategories(struct player *playersList, struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES, ALLEGRO_FONT *font, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display) { size_t choices[3] = {0, 0, 0}; while (1) { choices[0] = rand() % NUMBER_OF_CATEGORIES; if (categoriesList[choices[0]].numberOfQuestions > categoriesList[choices[0]].numberOfUsedQuestions) { break; } } while (1) { choices[1] = rand() % NUMBER_OF_CATEGORIES; if (choices[1] != choices [0] && categoriesList[choices[1]].numberOfQuestions > categoriesList[choices[1]].numberOfUsedQuestions) { break; } } while (1) { choices[2] = rand() % NUMBER_OF_CATEGORIES; if (choices[2] != choices [0] && choices[2] != choices[1] && categoriesList[choices[2]].numberOfQuestions > categoriesList[choices[2]].numberOfUsedQuestions) { break; } } ALLEGRO_BITMAP *bmp = al_load_bitmap("assets/cat_choice.bmp"); if (!bmp) { fprintf(stderr, "Missing file assets/cat_choice.bmp\n"); al_show_native_message_box(display, "An error occurred" , "Missing file", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return -1; } int begY = 140; int answer = 0; bool gotAnswer = false; al_draw_bitmap(bmp, 0, 0, 0); al_draw_textf(font, al_map_rgb(0,0,0), 720 / 2, begY, ALLEGRO_ALIGN_CENTER, "The category chooser: %s",playersList->playerName); begY += 49; for (size_t i = 0; i < 3; i++) { al_draw_textf(font, al_map_rgb(0,0,0), 720 / 2, begY, ALLEGRO_ALIGN_CENTER, "%zu - %s", i + 1, categoriesList[choices[i]].categoryName); begY += 49; } al_flip_display(); do { al_wait_for_event(event_queue, &ev); if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) { al_destroy_bitmap(bmp); return -2; } if (ev.type == ALLEGRO_EVENT_MOUSE_BUTTON_DOWN && ev.mouse.button == 1) { if (ev.mouse.x >= 163 && ev.mouse.x <= 560 && ev.mouse.y >= 185 && ev.mouse.y <= 320) { int begY2 = 185; for (int i = 0; i < 3; i++) { if (ev.mouse.y >= begY2 && ev.mouse.y <= begY2 + 49) { answer = i; gotAnswer = true; } begY2 += 49; } } } } while (!gotAnswer); categoriesList[choices[answer]].numberOfUsedQuestions += 1; al_destroy_bitmap(bmp); return choices[answer]; } int ask10Questions (struct player *playersList, int numberOfPlayers, struct questionsList **front, struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES, ALLEGRO_FONT *font, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display, bool fallingOut) { char correctAnswer = 0, answer = 0; ALLEGRO_BITMAP *bmp = al_load_bitmap("assets/ask_question.bmp"); int numberOfPlayingPlayers = numberOfPlayers; if (!bmp) { fprintf(stderr, "Missing file assets/ask_question.bmp\n"); al_show_native_message_box(display, "An error occurred" , "Missing file", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return -1; } bool gotAnswer = false; for (int i = 0; i < 10; i++) { getRandomQuestion(front,categoriesList, NUMBER_OF_CATEGORIES); for (int j = 0; j < numberOfPlayers; j++) { if (playersList->isPlaying) { al_draw_bitmap(bmp,0,0,0); al_draw_textf(font, al_map_rgb(0,0,0), 1, 8, ALLEGRO_ALIGN_LEFT, "Question %d. from category %s.", i + 1, categoriesList[(*front)->category].categoryName); al_draw_textf(font, al_map_rgb(0,0,0), 690, 8, ALLEGRO_ALIGN_RIGHT,"Now answering: %s", playersList->playerName); correctAnswer = askQuestionAndGetAnswer(*front, categoriesList, font); al_flip_display(); if (correctAnswer == -1) { al_show_native_message_box(display, "An error occurred" , "", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); al_destroy_bitmap(bmp); return -1; } //getting answer do { al_wait_for_event(event_queue, &ev); if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) { al_destroy_bitmap(bmp); return -1; } if (ev.type == ALLEGRO_EVENT_MOUSE_BUTTON_DOWN && ev.mouse.button == 1) { if (ev.mouse.x >= 35 && ev.mouse.x <= 680 && ev.mouse.y >= 230 && ev.mouse.y <= 400) { int mouseY = 230; //first answer bar's y for (int i = 0; i < 4; i++) { if (ev.mouse.y >= mouseY && ev.mouse.y <= mouseY + 35) { answer = 'A' + i; gotAnswer = true; } mouseY += 45; //adding answer bar size } } } } while (!gotAnswer); gotAnswer = false; if (answer == correctAnswer) { al_draw_text(font, al_map_rgb(25,140,40), 720 / 2 ,412 ,ALLEGRO_ALIGN_CENTER, "Your answer is correct, press any key to continiue..."); al_flip_display(); playersList->score++; } else { if (fallingOut) { al_draw_text(font, al_map_rgb(255,0,0), 720 / 2 ,412 ,ALLEGRO_ALIGN_CENTER, "Your answer is incorrect, you lost, press any key to continiue..."); playersList->isPlaying = false; numberOfPlayingPlayers--; al_flip_display(); } else { al_draw_text(font, al_map_rgb(255,0,0), 720 / 2 ,412 ,ALLEGRO_ALIGN_CENTER, "Your answer is incorrect, press any key to continiue..."); al_flip_display(); } } do { al_wait_for_event(event_queue, &ev); if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) { al_destroy_bitmap(bmp); return -1; } } while (ev.type != ALLEGRO_EVENT_MOUSE_BUTTON_DOWN); answer = 0; } playersList = playersList->next; if (numberOfPlayingPlayers == 0) { al_destroy_bitmap(bmp); return 0; } } } al_destroy_bitmap(bmp); return 0; } int ask10QuestionsWithChoicingCategory (struct player *playersList, int numberOfPlayers, struct questionsList **front, struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES, ALLEGRO_FONT *font, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display) { char correctAnswer = 0, answer = 0; ALLEGRO_BITMAP *bmp = al_load_bitmap("assets/ask_question.bmp"); struct player *choicingList = playersList; if (!bmp) { fprintf(stderr, "Missing file assets/ask_question.bmp\n"); al_show_native_message_box(display, "An error occurred" , "Missing file", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return -1; } bool gotAnswer = false; for (int i = 0; i < 10; i++) { size_t categoryNum = getThreeCategories(choicingList, categoriesList, NUMBER_OF_CATEGORIES, font, event_queue, ev, display); choicingList = choicingList->next; if ((int) categoryNum == -1) { al_show_native_message_box(display, "An error occurred" , "", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); al_destroy_bitmap(bmp); return -1; } if ((int) categoryNum == -2) { al_destroy_bitmap(bmp); return -1; } size_t questionNum = getQuestionNumber(categoriesList, *front, categoryNum); addToList(front, categoryNum, questionNum); for (int j = 0; j < numberOfPlayers; j++) { al_draw_bitmap(bmp,0,0,0); al_draw_textf(font, al_map_rgb(0,0,0), 1, 8, ALLEGRO_ALIGN_LEFT, "Pytanie %d. z kategorii %s.", i + 1, categoriesList[(*front)->category].categoryName); al_draw_textf(font, al_map_rgb(0,0,0), 690, 8, ALLEGRO_ALIGN_RIGHT,"Teraz odpowiada: %s", playersList->playerName); correctAnswer = askQuestionAndGetAnswer(*front, categoriesList, font); al_flip_display(); if (correctAnswer == -1) { al_show_native_message_box(display, "An error occurred" , "", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); al_destroy_bitmap(bmp); return -1; } //getting answer do { al_wait_for_event(event_queue, &ev); if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) { al_destroy_bitmap(bmp); return -1; } if (ev.type == ALLEGRO_EVENT_MOUSE_BUTTON_DOWN && ev.mouse.button == 1) { if (ev.mouse.x >= 35 && ev.mouse.x <= 680 && ev.mouse.y >= 230 && ev.mouse.y <= 400) { int mouseY = 230; //first answer bar's y for (int i = 0; i < 4; i++) { if (ev.mouse.y >= mouseY && ev.mouse.y <= mouseY + 35) { answer = 'A' + i; gotAnswer = true; } mouseY += 45; //adding answer bar size } } } } while (!gotAnswer); gotAnswer = false; if (answer == correctAnswer) { al_draw_text(font, al_map_rgb(25,140,40), 720 / 2 ,412 ,ALLEGRO_ALIGN_CENTER, "Your answer is correct, press any key to continiue..."); al_flip_display(); playersList->score++; } else { al_draw_text(font, al_map_rgb(255,0,0), 720 / 2 ,412 ,ALLEGRO_ALIGN_CENTER, "Your answer is incorrect, press any key to continiue..."); al_flip_display(); } do { al_wait_for_event(event_queue, &ev); if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) { al_destroy_bitmap(bmp); return -1; } } while (ev.type != ALLEGRO_EVENT_MOUSE_BUTTON_DOWN); answer = 0; playersList = playersList->next; } } al_destroy_bitmap(bmp); return 0; } int askMaxQuestions (struct player *playersList, int numberOfPlayers, struct questionsList **front, struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES, const size_t NUMBER_OF_ALL_QUESTIONS, ALLEGRO_FONT *font, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev,ALLEGRO_DISPLAY *display, bool fallingOut) { char correctAnswer = 0, answer = 0; ALLEGRO_BITMAP *bmp = al_load_bitmap("assets/ask_question.bmp"); ALLEGRO_BITMAP *resignButton = al_load_bitmap("assets/resign_button.bmp"); int numberOfPlayingPlayers = numberOfPlayers; if (!bmp || !resignButton) { fprintf(stderr,"Missing file assets/ask_question.bmp or assets/resign_button.bmp\n"); al_show_native_message_box(display, "An error occurred" , "Missing file", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return -1; } bool gotAnswer = false; for (size_t i = 0; i < NUMBER_OF_ALL_QUESTIONS; i++) { getRandomQuestion(front, categoriesList, NUMBER_OF_CATEGORIES); for (int j = 0; j < numberOfPlayers; j++) { if (playersList->isPlaying) { al_draw_bitmap(bmp,0,0,0); al_draw_bitmap(resignButton,690,0,0); al_draw_textf(font, al_map_rgb(0,0,0), 1, 8, ALLEGRO_ALIGN_LEFT, "Question %zu. from category %s.", i + 1, categoriesList[(*front)->category].categoryName); al_draw_textf(font, al_map_rgb(0,0,0), 690, 8, ALLEGRO_ALIGN_RIGHT,"Now answering: %s", playersList->playerName); correctAnswer = askQuestionAndGetAnswer(*front, categoriesList, font); al_flip_display(); if (correctAnswer == -1) { al_destroy_bitmap(bmp); al_destroy_bitmap(resignButton); al_show_native_message_box(display, "An error occurred" , "", "Check errors.log for more details" ,NULL, ALLEGRO_MESSAGEBOX_ERROR); return -1; } //getting answer do { al_wait_for_event(event_queue, &ev); if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) return -1; if (ev.type == ALLEGRO_EVENT_MOUSE_BUTTON_DOWN && ev.mouse.button == 1) { if (ev.mouse.x >= 35 && ev.mouse.x <= 680 && ev.mouse.y >= 230 && ev.mouse.y <= 400) { int mouseY = 230; //first answer bar's y for (int i = 0; i < 4; i++) { if (ev.mouse.y >= mouseY && ev.mouse.y <= mouseY + 35) { answer = 'A' + i; gotAnswer = true; } mouseY += 45; //adding answer bar size } } if (ev.mouse.x >= 690 && ev.mouse.x <= 720 && ev.mouse.y >= 0 && ev.mouse.y <= 30) { answer = -2; gotAnswer = true; } } } while (!gotAnswer); gotAnswer = false; if (answer == correctAnswer) { al_draw_text(font, al_map_rgb(25,140,40), 720 / 2 ,412 ,ALLEGRO_ALIGN_CENTER, "Your answer is correct, press any key to continiue..."); al_flip_display(); playersList->score++; } if (answer != correctAnswer && answer != -2) { if (fallingOut) { al_draw_text(font, al_map_rgb(255,0,0), 720 / 2 ,412 ,ALLEGRO_ALIGN_CENTER, "Twoja odpowiedź jest niepoprawna, odpadasz! Kliknij aby kontynuować..."); playersList->isPlaying = false; numberOfPlayingPlayers--; al_flip_display(); } else { al_draw_text(font, al_map_rgb(255,0,0), 720 / 2 ,412 ,ALLEGRO_ALIGN_CENTER, "Your answer is incorrect, press any key to continiue..."); al_flip_display(); } } if (answer == -2) { al_draw_text(font, al_map_rgb(255,0,0), 720 / 2 ,412 ,ALLEGRO_ALIGN_CENTER, "You've given up, press any key to continiue..."); al_flip_display(); playersList->isPlaying = false; numberOfPlayingPlayers--; } do { al_wait_for_event(event_queue, &ev); if (ev.type == ALLEGRO_EVENT_DISPLAY_CLOSE) { al_destroy_bitmap(bmp); al_destroy_bitmap(resignButton); return -1; } } while (ev.type != ALLEGRO_EVENT_MOUSE_BUTTON_DOWN); answer = 0; } playersList = playersList->next; if (numberOfPlayingPlayers == 0) { al_destroy_bitmap(bmp); al_destroy_bitmap(resignButton); return 0; } } } al_destroy_bitmap(bmp); al_destroy_bitmap(resignButton); return 0; } <file_sep>/src/headers/quiz_game.h #ifndef QUIZ_GAME_H_INCLUDED #define QUIZ_GAME_H_INCLUDED #include "players.h" #include "questions.h" #include <allegro5/allegro.h> #include <allegro5/allegro_ttf.h> #include <allegro5/allegro_image.h> #include <allegro5/allegro_native_dialog.h> int ask10Questions (struct player *playersList, int numberOfPlayers, struct questionsList **front, struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES, ALLEGRO_FONT *font, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev,ALLEGRO_DISPLAY *display, bool fallingOut); int ask10QuestionsWithChoicingCategory (struct player *playersList, int numberOfPlayers, struct questionsList **front, struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES, ALLEGRO_FONT *font, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display); int askMaxQuestions (struct player *playersList, int numberOfPlayers, struct questionsList **front, struct category *categoriesList, const size_t NUMBER_OF_CATEGORIES, const size_t NUMBER_OF_ALL_QUESTIONS, ALLEGRO_FONT *font, ALLEGRO_EVENT_QUEUE *event_queue, ALLEGRO_EVENT ev, ALLEGRO_DISPLAY *display, bool fallingOut); #endif // QUIZ_GAME_H_INCLUDED
56ea1a9c9e3a20f34f402f897be1f9931d95f254
[ "Makefile", "C", "Text" ]
12
C
Haseoo/SimpleQuizC
bf25ce312ca5720f4c13512bb0e62832b762fb0c
cf911c17999ee9db34a30c1c385816205060c4cf
refs/heads/master
<repo_name>harishbhakuni21/Data-Structures-in-Cpp<file_sep>/README.md # Data-Structures-in-C- here, i'm going to keep data structure's general implementations in c++ which can be used in any program <file_sep>/Graph.cpp /////////////////////////// Graph /////////////////////// #include<iostream> #include<string.h> #include<iterator> #include<algorithm> #include<map> #include<utility> #include<set> #include<queue> #include<stack> #define INF INT_MAX using namespace std; class disjointSet { int n; int *parent,*rank,*size; public: disjointSet(int n){ parent= new int[n]; rank = new int[n]; size = new int[n]; for(int i = 0;i<n;i++) makeSet(i); this->n=n; } void makeSet(int x){ parent[x] = x; rank[x] = 1; size[x] = 1; } int findSet(int x){ if(parent[x]==x) return x; parent[x] = findSet(parent[x]); return parent[x]; } bool unionSet(int x,int y){ if(parent[x]==parent[y]){ return false; } int parX = parent[x]; int parY = parent[y]; if(rank[parX]>=rank[parY]){ parent[parY] = parent[parX]; if(rank[parX]==rank[parY]) rank[parX]++; size[parX]+=size[parY]; } else{ parent[parX] = parent[parY]; size[parY]+=size[parX]; } return true; } }; bool compare( pair<int,pair<int,int> > e1, pair<int,pair<int,int> > e2){ return e1.second.second<e2.second.second; } class Graph{ int v; set<int> vertices; vector<pair<int,pair<int,int> > > edges; int e; public: map<int,map<int,int> > adj; Graph(int v,int e){ this->v = v; this->e = e; } void addEdge(int u,int v,bool bidir){ vertices.insert(u); vertices.insert(v); adj[u][v] = 1; edges.push_back(make_pair(u,make_pair(v,1))); if(bidir){ adj[v][u] = 1; edges.push_back(make_pair(v,make_pair(u,1))); } cout<< u <<" is connected to "<< v<< endl; } void addEdge(int u,int v,int wt,bool bidir){ vertices.insert(u); vertices.insert(v); adj[u][v] = wt; edges.push_back(make_pair(u,make_pair(v,wt))); if(bidir){ adj[v][u] = wt; edges.push_back(make_pair(v,make_pair(u,wt))); } cout<< u <<" is connected to "<< v<< endl; } void dfsUtil(int src, map<int,map<int,int> > adj,map<int,bool> &vis){ vis[src] = true; cout<< src<< " "; for(auto it:adj[src]){ //cout<< it.first<< endl; if(!vis[it.first]) dfsUtil(it.first,adj,vis); } } void dfs(){ map<int,bool> visited; for(auto it: vertices){ //cout<< it.first<< endl; visited[it] = false; } for(auto it:vertices) if(!visited[it]) dfsUtil(it,adj,visited); cout<< endl; } void bfs(){ map<int,bool> visited; for(auto it:vertices){ visited[it] = false; } for(auto it:vertices){ if(!visited[it]){ queue<int> q; q.push(it); visited[it] = true; while(!q.empty()){ int curr = q.front(); q.pop(); cout<< curr<< " "; for(auto it:adj[curr]) if(!visited[it.first]){ q.push(it.first); visited[it.first] = true; } } } } cout<< endl; } void topologicalUtil(int src,stack<int> &s,map<int,bool> &visited){ if(visited[src]) return; visited[src] = true; for(auto it:adj[src]){ if(!visited[it.first]){ topologicalUtil(it.first,s,visited); } } s.push(src); } void topologicalSort(){ cout<< endl<< "Topological Sort is : "; stack<int> stk; map<int,bool> visited; for(auto it:vertices){ visited[it] = false; //cout<< it.first<< " "; } //cout<< endl; for(auto it: vertices) topologicalUtil(it,stk,visited); while(!stk.empty()){ cout<< stk.top()<< " "; stk.pop(); } cout<< endl; } void djikstra(int src){ priority_queue<pair<int,int>,vector<pair<int,int> >,greater<pair<int,int> > > q; q.push(make_pair(0,src)); map<int,int> dist,par; for(auto it:vertices){ dist[it] = INF; par[it] = INF; } dist[src]=0; map<int,bool> processed; while(!q.empty()){ bool flag = false; pair<int,int> curr; while(!q.empty()){ curr = q.top(); q.pop(); if(processed.find(curr.second)==processed.end()){ flag = true; break; } } if(!flag) break; int srce = curr.second; int wt = curr.first; processed[curr.second] = true; for(auto it:adj[srce]){ if(dist[it.first]>(wt+it.second)){ dist[it.first] = wt+it.second; q.push(make_pair(dist[it.first],it.first)); par[it.first] = srce; } } } for(auto it:vertices){ cout<< it<< " "<< dist[it]<<" "<< par[it]<< endl; } } void prims(){ priority_queue<pair<int,int>,vector<pair<int,int> >,greater<pair<int,int> > > q; map<int,int> par,dist; int src = *(vertices.begin()); q.push(make_pair(0,src)); map<int,bool> processed; vector<pair<int,int> > mst; for(auto it: vertices){ par[it] = INF; dist[it] = INF; } while(!q.empty()){ pair<int,int> curr; bool flag = false; while((!flag) && (!q.empty())){ curr = q.top(); q.pop(); if(processed.find(curr.second) == processed.end()) flag = true; } if(!flag) break; int srce = curr.second; processed[srce] = true; if(srce!=src){ mst.push_back(make_pair(srce,par[srce])); } for(auto it:adj[srce]){ if((processed.find(it.first)==processed.end()) && dist[it.first]>it.second){ dist[it.first] = it.second; par[it.first] = srce; q.push(make_pair(it.second,it.first)); } } } for(auto it:mst){ cout<< it.first<< "->"<< it.second<< endl; } } void bellmanford(int src){ map<int,int> par,dist; for(auto it:vertices){ par[it] = INF; dist[it] = INF; } dist[src] = 0; for(int i = 0;i<vertices.size()-1;i++){ //cout<< "here"; for(auto it:edges){ //cout<< "here"; int u = it.first; int v = it.second.first; //cout<< "arrived here"; if((long long int)dist[v]>((long long int)dist[u]+adj[u][v])){ //cout<< dist[v]<<" "<< dist[u]+adj[u][v]<< endl; dist[v] = dist[u]+adj[u][v]; par[v] = u; } } } bool flag = false; for(auto it:edges){ int u = it.first; int v = it.second.first; if((long long int)dist[v]> (long long int)dist[u]+adj[u][v]){ flag = true; break; } } if(flag){ cout<<"Negative cycle present"<< endl; return; } for(auto it:vertices){ cout<< it<< " "<< dist[it]<< " "<< par[it]<< endl; } } void floydWarshall(){ int dist[v][v],par[v][v]; for(int i =0;i<v;i++) for(int j =0;j<v;j++){ dist[i][j] = INF; par[i][j] = INF; } //memset(dist,INF,sizeof(dist)); //memset(par,INF,sizeof(par)); for(auto it:adj){ for(auto it2:adj[it.first]){ dist[it.first][it2.first] = it2.second; par[it.first][it2.first] = it.first; } } for(auto it:vertices) dist[it][it] = 0; for(int k = 0;k<v;k++){ for(int i = 0;i<v;i++){ //if(i!=j) for(int j = 0;j<v;j++){ if((dist[i][k]!=INF && dist[k][j]!=INF) && dist[i][j]>(dist[i][k]+dist[k][j])){ dist[i][j] = dist[i][k]+dist[k][j]; par[i][j] = k; } } } } for(int i = 0;i<v;i++,cout<< endl) for(int j = 0;j<v;j++) cout<< dist[i][j]<< " "; for(int i = 0;i<v;i++,cout<< endl) for(int j = 0;j<v;j++) cout<< par[i][j]<< " "; return; } void kruskal(){ disjointSet d(v); sort(edges.begin(),edges.end(),compare); vector<pair<int,pair<int, int> > > MST; for(auto it:edges){ if(d.unionSet(it.first,it.second.first)){ MST.push_back(it); } if(MST.size()>=(v-1)) break; } for(auto it:MST){ cout<< it.first<< "->"<< it.second.first<< "("<< it.second.second<< ")"<< endl; } } bool cycleDetectedUndirected(){ disjointSet d(v); for(int it =0;it<edges.size();it+=2){ //cout<< "checking "<< it.first<< " and "<< it.second.first<< endl; if(!d.unionSet(edges[it].first,edges[it].second.first)) return true; } return false; } bool cycleDetectedUndirected2Util(int src,int par,map<int,bool> &visited){ visited[src] = true; bool cycle = false; for(auto it:adj[src]){ if(visited[it.first] && it.first!=par) return true; if(!visited[it.first]) cycle= cycle||cycleDetectedUndirected2Util(it.first,src,visited); } return cycle; } bool cycleDetectedUndirected2(){ map<int,bool> visited; for(auto it:vertices) visited[it] = false; visited[*vertices.begin()] = true; return cycleDetectedUndirected2Util(*(vertices.begin()),-1,visited); } bool cycleDetectedDirectedUtil(int src,map<int,int> &visited){ visited[src] = 1; bool cycle = false; for(auto it:adj[src]){ if(visited[it.first]==1) return true; if(!visited[it.first]) cycle = cycle || cycleDetectedDirectedUtil(it.first,visited); } visited[src] = 2; return cycle; } bool cycleDetectedDirected(){ map<int,int> visited; for(auto it:vertices) visited[it] = 0; bool ans = false; for(auto it:vertices){ ans = ans || cycleDetectedDirectedUtil(it,visited); } return ans; } }; int main(){ int v; v = 8; int e; e= 8; Graph g(v,e); // g.addEdge(0,1,3,0); // g.addEdge(0,3,15,0); // g.addEdge(0,2,6,0); // g.addEdge(1,2,-2,0); // g.addEdge(2,3,2,0); // g.addEdge(3,0,1,0); g.addEdge(0,1,1); g.addEdge(1,2,1); g.addEdge(0,2,1); g.addEdge(3,2,1); g.addEdge(3,4,1); g.addEdge(4,5,1); g.addEdge(4,6,1); g.addEdge(4,7,1); cout<< endl<< "dfs traversal is:"<< endl; g.dfs(); cout<< endl<< "dfs traversal is:"<< endl; g.bfs(); g.topologicalSort(); cout<< endl<< "shortest distance using djikstra: "<< endl; g.djikstra(0); cout<< endl<< "shortest distance using bellmanford: "<< endl; g.bellmanford(0); cout<< endl<< "MST edges are: "<< endl; g.prims(); g.floydWarshall(); g.kruskal(); if(g.cycleDetectedUndirected()) cout<< "Cycle Present"<< endl; else cout<< "No cycle present"<< endl; if(g.cycleDetectedUndirected2()) cout<< "Cycle Present"<< endl; else cout<< "No cycle present"<< endl; return 0; } <file_sep>/trie.cpp /* Program for trie data Structure...*/ #include<bits/stdc++.h> using namespace std; class Trie{ struct trieNode{ trieNode* character[256]; int count; bool isEnd; }; public: trieNode *root=NULL; trieNode* createNode(){ //function to create trie Node trieNode* newN=new trieNode(); newN->isEnd=false; for(int i=0;i<256;i++) newN->character[i]=NULL; newN->count=0; return newN; } trieNode* searchPrefix(string key){ //function to search given prefix in trie trieNode *ptr=root; for(int i=0;i<key.length();i++){ if(!ptr->character[key[i]]) return NULL; ptr=ptr->character[key[i]]; } return ptr; } bool searchWord(string key){ //function to search a word in trie... trieNode *ptr=searchPrefix(key); if(!ptr||!ptr->isEnd) return false; return true; } void insertKey(string key){ //function to insert new Word in Trie trieNode *ptr=root; for(int i=0;i<key.length();i++){ if(!ptr->character[key[i]]){ ptr->character[key[i]]=createNode(); ptr->count++; } ptr=ptr->character[key[i]]; } ptr->isEnd=true; } bool deleteKey(trieNode *root,string key){ //Function to delete Word from trie trieNode *ptr=root->character[key[0]]; if(key.size()==1){ if(!ptr->count){ delete(ptr); root->character[key[0]]=NULL; root->count--; return true; } ptr->isEnd=false; return false; } if(deleteKey(ptr,key.substr(1))){ if((!ptr->count)&&(!ptr->isEnd)){ delete(ptr); root->character[key[0]]=NULL; root->count--; } else{ return false; } } } }; int main(){ //driver function for demonstrating trie Trie t; t.root= t.createNode(); t.insertKey("hello"); t.insertKey("hii"); if(t.searchPrefix("ho")) printf("not present\n"); else printf("present\n"); cout<<t.searchWord("hie")<<endl; t.deleteKey(t.root,"hi"); cout<<t.searchWord("hi")<<endl; cout<<t.searchWord("hii")<<endl; return 0; } <file_sep>/bstUtility.cpp //program to demonstrate all utility functions of BST.... #include<bits/stdc++.h> using namespace std; struct node{ //node structure for bst node int data; node *left,*right; }; struct llnode{ //node structure for linked list... int data; llnode *next; }; node* createNode(int data){ //function to create a new bst node node* newN=new node(); newN->data=data; newN->left=NULL; newN->right=NULL; return newN; } llnode* createllNode(int data){ //function to create a new linked list node llnode* newN=new llnode(); newN->data=data; newN->next=NULL; return newN; } llnode* insertLL(llnode* head,int data){ //function to insert in a linked list llnode* newN=createllNode(data); newN->next=head; return newN; } bool find(node *root,int data,node *prev,node **par){ //function to find in bst if(!root){ *par=prev; return false; } if(root->data==data){ *par=prev; return true; } else if(root->data<data){ return find(root->right,data,root,par); } else{ return find(root->left,data,root,par); } return false; } node* insert(node *root,int data){ //function to insert in a bst if(!root){ root=createNode(data); return root; } node *ptr=root; while(((ptr->data>data)&&(ptr->left))||((ptr->data<data)&&(ptr->right))){ if(ptr->data>data) ptr=ptr->left; else ptr=ptr->right; } if(ptr->data>data) ptr->left=createNode(data); else if(ptr->data<data) ptr->right=createNode(data); else printf("\n<<<< %d IS DUPLICATE ELEMENT>>>>\n",data); return root; } node* insertR(node *root,int data){ //recursive function to insert in a bst node *par=NULL; if(!root){ root=createNode(data); return root; } if(!find(root,data,NULL,&par)){ if(par->data>data) par->left=createNode(data); else par->right=createNode(data); } else{ printf("\n<<<< %d IS DUPLICATE ELEMENT>>>>\n",data); } return root; } void preorder(node *root){ //preorder traversal if(!root) return; printf("%d ",root->data); preorder(root->left); preorder(root->right); } void inorder(node *root){ // recursive inorder traversal if(!root) return; inorder(root->left); printf("%d ",root->data); inorder(root->right); } void postorder(node *root){ //recursive postorder traversal if(!root) return; postorder(root->left); postorder(root->right); printf("%d ",root->data); } int extractMin(node *root){ //returns minimum in bst if(!root) return INT_MAX; node *ptr=root; while(ptr->left){ ptr=ptr->left; } return ptr->data; } int extractMax(node *root){ //this function returns maximum in bst if(!root) return INT_MIN; node *ptr=root; while(ptr->right){ ptr=ptr->right; } return ptr->data; } node* lcar(node *root,node *alpha,node *beta){ //recursive function for lca of two nodes if(((alpha->data>root->data)&&(beta->data<root->data))||((alpha->data<root->data)&&(beta->data>root->data))) return root; else if(alpha->data<root->data){ lcar(root->left,alpha,beta); } else lcar(root->right,alpha,beta); return NULL; } node* lcai(node *root,node *alpha,node *beta){ //iterative function for lca of two nodes if(!root){ return NULL; } node *ptr=root; while(ptr){ if(ptr->data>alpha->data&&ptr->data>beta->data) ptr=ptr->left; else if(ptr->data<alpha->data&&ptr->data<beta->data) ptr=ptr->right; else break; } return ptr; } node* deleteNode(node *root,int data){ //deleting a bst node if(!root){ printf("\n<<<TREE EMPTY>>>\n"); return root; } node *par=NULL; if(!find(root,data,NULL,&par)){ printf("\n<<<ITEM NOT FOUND>>>\n"); return root; } node *ptr=NULL; if(par==NULL){ ptr=root; } else if((par->left)&&data==par->left->data) ptr=par->left; else ptr=par->right; if((!ptr->left)&&(!ptr->right)){ if(!par) root=NULL; else if(par->left==ptr) par->left=NULL; else par->right=NULL; free(ptr); return root; } if(!ptr->left){ if(!par) root=ptr->right; else if(par->left==ptr){ par->left=ptr->right; } else{ par->right=ptr->right; } free(ptr); return root; } else if(!ptr->right){ if(!par){ root=ptr->left; } if(par->left==ptr){ par->left=ptr->left; } else{ par->right=ptr->left; } free(ptr); return root; } else{ int pre=extractMax(ptr->left); ptr->left=deleteNode(ptr->left,pre); ptr->data=pre; return root; } return root; } bool isBST(node *root){ //function to check whether a binary tree is bst or not if(!root) return true; if(root->left&&root->left->data>=root->data) return false; if(root->right&&root->right->data<=root->data) return false; return isBST(root->left)&&isBST(root->right); } bool isBST2(node *root,int *prev){ //provided that tree cannot if(!root) // have duplicate values... return true; bool x=isBST2(root->left,prev); bool y=root->data>*prev; *prev=root->data; bool z=isBST2(root->right,prev); return x&&y&&z; } node* bst2dll(node *root,node **t){ //function to convert bst to doubly linked list if(root==NULL){ *t=NULL; return NULL; } node *lh,*lt,*rh,*rt; lh=bst2dll(root->left,&lt); rh=bst2dll(root->right,&rt); root->left=lt; root->right=rh; if(!rt){ *t=root; } else{ rh->left=root; *t=rt; } if(!lh){ return root; } else{ lt->right=root; return lh; } } node* bst2circulardll(node *root){ //function to convert bst to circular doubly linked list if(!root) return NULL; node *tail=NULL; node *head=bst2dll(root,&tail); head->left=tail; tail->right=head; return head; } node* getMedium(node *head){ //function returning medium of the linked list node* ptr=head,*med=head; head->left->right=NULL; while(ptr&&ptr->right){ med=med->right; ptr=ptr->right; if(ptr) ptr=ptr->right; } head->left->right=head; return med; } node* cdll2bst(node *head){ //function to convert circular doubly linked list to bst if(head->left==head){ head->left=NULL; head->right=NULL; return head; } node *med=getMedium(head); if(med==head){ med->left=NULL; med->right->right=NULL; return med; } med->left->right=head; med->right->left=head->left; head->left->right=med->right; head->left=med->left; med->left=cdll2bst(head); med->right=cdll2bst(med->right); return med; } void traversecdll(node *head){ //traversing a circular doubly linked list if(!head) return; head->left->right=NULL; node *ptr=head; while(ptr!=NULL){ printf("%d ",ptr->data); ptr=ptr->right; } head->left->right=head; } node* deleteTree(node *root){ //deleting a bst if(!root) return NULL; deleteTree(root->left); deleteTree(root->right); free(root); return NULL; } node* deletecdll(node *head){ //deleting a circular doubly linked list if(!head) return NULL; head->left->right=NULL; node *ptr=head; while(ptr->right!=NULL){ node *temp=ptr; ptr=ptr->right; free(temp); } return NULL; } node* sa2bst(int arr[],int si,int ei){ //function to convert a sorted array to binary search tree if(si>ei) return NULL; if(si==ei){ return createNode(arr[si]); } int med=(si+ei)/2; node *root=createNode(arr[med]); root->left=sa2bst(arr,si,med-1); root->right=sa2bst(arr,med+1,ei); return root; } node* con2bst(llnode** hd,int si,int ei){ llnode *ptr=*hd; if(!(ptr)) return NULL; if(si>ei) return NULL; node* root=NULL; if(si==ei){ root=createNode((*hd)->data); (*hd)=(*hd)->next; return root; } int mid=(si+ei)/2; node* l=con2bst(hd,si,mid-1); root=createNode((*hd)->data); (*hd)=(*hd)->next; root->left=l; root->right=con2bst(hd,mid+1,ei); return root; } node* sll2bst(llnode** hd){ //function to convert sorted linked list to bst llnode *ptr=*hd; int size=0; while(ptr){ size++; ptr=ptr->next; } return con2bst(hd,0,size-1); } int main(){ //driver function to demonstrate all utility functions... node *root=NULL; root=insert(root,6); root=insert(root,2); root=insert(root,1); root=insert(root,8); root=insert(root,3); root=insert(root,4); root=insert(root,5); root=insertR(root,7); preorder(root); printf("\n"); postorder(root); printf("\n"); inorder(root); printf("\n"); root=deleteNode(root,8); preorder(root); printf("\n"); postorder(root); printf("\n"); inorder(root); node *lca=lcai(root,root->left->left,root->left->right); printf("\nlca is %d",lca->data); printf("\nis the given tree a BST? :"); int prev=INT_MIN; isBST2(root,&prev)?printf("yes\n"):printf("no\n"); int max=extractMax(root); if(max!=INT_MIN){ printf("\nmaximum element in tree is %d",max); } int min=extractMin(root); if(min!=INT_MAX){ printf("\nminimum element in tree is %d",min); } node *cll=bst2circulardll(root); root=NULL; printf("\ncircular doubly linked list is: "); traversecdll(cll); root=cdll2bst(cll); cll=NULL; printf("\ninorder traversal of tree is: "); inorder(root); cll=deletecdll(cll); root=deleteTree(root); int arr[]={1,2,6,7,9,11,14,15}; root=sa2bst(arr,0,sizeof(arr)/sizeof(int)-1); printf("\ninorder traversal of tree is: "); inorder(root); root=deleteTree(root); llnode* head=NULL; head=insertLL(head,15); head=insertLL(head,14); head=insertLL(head,11); head=insertLL(head,9); head=insertLL(head,7); head=insertLL(head,6); head=insertLL(head,1); root=sll2bst(&head); printf("\ninorder traversal of tree is: "); inorder(root); root=deleteTree(root); return 0; } <file_sep>/btUtilityFunctions.cpp #include<iostream> #include<stack> #include<queue> #include<set> using namespace std; struct node{ int data; node *left,*right; }; node* createNode(int data){ node* newN=new node(); newN->data=data; newN->left=newN->right=NULL; return newN; } void preorderR(node* root){ //Recursive Preorder Traversal if(root==NULL) return ; printf("%d ",root->data); preorderR(root->left); preorderR(root->right); return; } void preorderI(node *root){ //Iterative Preorder Traversal stack<node*> s; s.push(NULL); if(root==NULL) return; printf("%d ",root->data); node* ptr=root; if(ptr->left==NULL) ptr=ptr->right; else{ s.push(ptr->right); ptr=ptr->left; } while(ptr!=NULL){ printf("%d ",ptr->data); if(ptr->left==NULL){ if(ptr->right==NULL){ ptr=s.top(); s.pop(); } else ptr=ptr->right; } else{ if(ptr->right!=NULL){ s.push(ptr->right); } ptr=ptr->left; } } } void preorder(node *root){ //Morris Preorder Traversal ( traversal without recursion and stack) if(!root) return; node *current=root; while(current){ if(!current->left){ printf("%d ",current->data); current=current->right; } else{ node *pre=current->left; if(pre->right&&pre->right!=current){ pre=pre->right; } if(!pre->right){ pre->right=current; printf("%d ",current->data); current=current->left; } else{ pre->right=NULL; current=current->right; } } } } void inorderR(node* root){ //Recursive inorder Traversal if(root==NULL) return; inorderR(root->left); printf("%d ",root->data); inorderR(root->right); } void inorderI(node* root){ //Iterative Inorder Traversal stack<node*> stk; while(1){ while(root){ stk.push(root); root=root->left; } if(stk.empty()) return; root=stk.top(); stk.pop(); printf("%d ",root->data); root=root->right; } return; } void inorder(node *root){ //Morris Inorder Traversal if(!root) return; node *current=root; while(current){ if(!current->left){ printf("%d ",current->data); current=current->right; } else{ node *pre=current->left; while((pre->right)&&pre->right!=current){ if(pre->right) pre=pre->right; } if(!pre->right){ pre->right=current; current=current->left; } else{ printf("%d ",current->data); current=current->right; pre->right=NULL; } } } } void postorderR(node* root){ //Recursive Postorder Traversal if(root==NULL) return; postorderR(root->left); postorderR(root->right); cout<<(root->data); void postorderI(node* root){ //Iterative Postorder Traversal static node* prev=NULL; stack<node*> stk; stk.push(NULL); if(root==NULL) return; while(root){ while(root&&(root->left==NULL||root->left!=prev)&&(root->right==NULL||root->right!=prev)){ stk.push(root); prev=root; root=root->left; } if(root==NULL){ root=stk.top(); stk.pop(); } if(root->right==NULL||root->right==prev){ printf("%d ",root->data); prev=root; root=stk.top(); stk.pop(); } else if(root->right!=NULL){ stk.push(root); root=root->right; } } } int levelOrder(node *root){ //Level order Traversal(BFS Traversal) if(root==NULL) return INT_MIN; queue<node*> q; q.push(root); int maximum=root->data; while(!q.empty()){ node *ptr=q.front(); printf("%d ",ptr->data); if(ptr->data>maximum) maximum=ptr->data; q.pop(); if(ptr->left) q.push(ptr->left); if(ptr->right) q.push(ptr->right); } return maximum; } int maximum(node *root){ //maximum of a Binary tree if(root==NULL) return INT_MIN; return max(root->data,max(maximum(root->left),maximum(root->right))); } int size(node* root){ //size of a Binary tree if(!root) return 0; return 1+size(root->left)+size(root->right); } void levelOrderReversed(node *root){ //BFS traversal in Reversed order if(root==NULL) return; vector<int> v; queue<node*> q; q.push(root); while(!q.empty()){ node *ptr=q.front(); q.pop(); v.push_back(ptr->data); if(ptr->right) q.push(ptr->right); if(ptr->left) q.push(ptr->left); } for(vector<int>::reverse_iterator it=v.rbegin();it!=v.rend();it++) printf("%d ",*it); } void insertEl(int data,node** root){ //Insertion in a Binary Tree node *newN=createNode(data); if(!*root){ *root= newN; return; } queue<node*> q; q.push(*root); while(!q.empty()){ node *ptr=q.front(); q.pop(); if(ptr->left==NULL){ ptr->left=newN; break; } else q.push(ptr->left); if(ptr->right==NULL){ ptr->right=newN; break; } else q.push(ptr->right); } return; } int heightR(node *root){ //Recursive function for height of a binary tree if(!root) return 0; return 1+max(heightR(root->left),heightR(root->right)); } int heightI(node *root){ //Iterative function for height of a binary tree if(root==NULL) return 0; queue<pair<node*,int> > q; q.push(make_pair(root,1)); pair<node*,int> p; while(!q.empty()){ p=q.front(); q.pop(); if(p.first->left) q.push(make_pair(p.first->left,p.second+1)); if(p.first->right) q.push(make_pair(p.first->right,p.second+1)); } return p.second; } node* deleteTree(node *root){ //Delete a binary tree if(root==NULL) return NULL; deleteTree(root->left); deleteTree(root->right); delete(root); return NULL; } node* deepestNodeI(node *root){ //Iterative function for Deepest node in a binary tree if(root==NULL){ printf("\nEmpty Tree"); return NULL; } node *temp=NULL; queue<node*> q; q.push(root); while(!q.empty()){ temp=q.front(); q.pop(); if(temp->right) q.push(temp->right); if(temp->left) q.push(temp->left); } return temp; } void deepestR(node *root,int h,int *max,node** deep){ //utility function for below written function... if(root==NULL) return; if(h>*max){ *max=h; *deep=root; } if(root->left) deepestR(root->left,h+1,max,deep); if(root->right) deepestR(root->right,h+1,max,deep); } node* deepestNodeR(node *root){ //Recursive function for deepest node in a binary tree if(root==NULL) return NULL; int max=1; node *deep=root; deepestR(root,1,&max,&deep); return deep; } bool compareTwoTrees(node* root1,node* root2){ //this function compares whether two binary trees are identical if((!root1)&&(!root2)){ return true; } if((root1&&!root2)||(!root1&&root2)) return false; if(root1->data!=root2->data) return false; return compareTwoTrees(root1->left,root2->left)&&compareTwoTrees(root1->right,root2->right); } int treeDiameter(node* root,int *maxx){ //calculates diameter of a binary tree... if(!root) return 0; int x=0; if(root->left) x=treeDiameter(root->left,maxx)+1; int y=0; if(root->right) y=treeDiameter(root->right,maxx)+1; if((x+y)>*maxx) *maxx=x+y; return (x>y)?x:y; } int maximumSumLevel(node *root){ //this function returns level having maximum sum if(root==NULL) return -1; queue<node*> q; q.push(root); q.push(NULL); int level=1,max=INT_MIN,l=0,sum=0; while(!q.empty()){ node* temp=q.front(); q.pop(); if(temp!=NULL){ sum+=temp->data; if(temp->left) q.push(temp->left); if(temp->right) q.push(temp->right); } else{ if(sum>max){ max=sum; l=level; } level++; sum=0; if(!q.empty()) q.push(NULL); } } return l; } void printingRootToLeaf(node *root,vector<int> &v){ //printing root to leaf paths for all nodes... if(root==NULL){ return; } if(root->left==NULL&&root->right==NULL){ printf("\n"); for(int i=0;i<v.size();i++) printf("%d ",v[i]); printf("%d",root->data); } v.push_back(root->data); if(root->left) printingRootToLeaf(root->left,v); if(root->right) printingRootToLeaf(root->right,v); v.pop_back(); } void printRootToLeaf(node *root){ vector<int> v; printingRootToLeaf(root,v); } bool givenSumPathExistence(node* root,int sum){ //this function checks whether path with given sum exists... if(root==NULL) return false; if(!(sum-root->data)) return true; return givenSumPathExistence(root->left,sum-root->data)||givenSumPathExistence(root->right,sum-root->data); } void interchangeToMirror(node *root){ //this function interchanges a tree to its mirror if(!root) return; node* temp=root->left; root->left=root->right; root->right=temp; if(root->left) interchangeToMirror(root->left); if(root->right) interchangeToMirror(root->right); } bool checkMirror(node* root1,node* root2){ //this function checks whether two trees are mirror of each other or not if(!root1&&!root2) return true; if(!root1||!root2) return false; return root1->data==root2->data&&(checkMirror(root1->left,root2->right)&&checkMirror(root1->right,root2->left)); } node* lca(node*root,node *n1,node *n2){ //this function computes lca of two nodes in a tree... if(root==NULL) return root; if(root==n1||root==n2) return root; node* l=lca(root->left,n1,n2); node* r=lca(root->right,n1,n2); if(l&&r) return root; return (l)?l:r; } void zigzag(node *root){ //this function prints zigzag traversal of a binary tree... if(!root) return; queue<node*> q; q.push(root); q.push(NULL); int level=1; stack<int> s; while(!q.empty()){ node *nod=q.front(); q.pop(); if(nod==NULL){ if(level%2==0){ while(!s.empty()){ printf("%d ",s.top()); s.pop(); } } level++; if(!q.empty()) q.push(NULL); } else{ if(level%2==0) s.push(nod->data); else printf("%d ",nod->data); if(nod->left) q.push(nod->left); if(nod->right) q.push(nod->right); } } } int main(){ //Driver function for checking utility functions... node *root=NULL,*root1=NULL; printf("enter the number of nodes in tree:"); int n; scanf("%d",&n); while(n--){ int el; scanf("%d",&el); insertEl(el,&root); insertEl(el,&root1); } interchangeToMirror(root1); printf("\nrecursive preorder traversal is: "); preorderR(root); printf("\niterative preorder traversal is: "); preorderI(root); printf("\nmorris preorder traversal is: "); preorder(root); printf("\nrecursive inorder traversal is: "); inorderR(root); printf("\niterative inorder traversal is: "); inorderI(root); printf("\nmorris inorder traversal is: "); inorder(root); printf("\nrecursive postorder traversal is: "); postorderR(root); printf("\niterative postorder traversal is: "); postorderI(root); printf("\nlevel order traversal is: "); printf("\n maximum is %d",levelOrder(root)); printf("\n maximum is %d",maximum(root)); printf("\nsize of tree is %d",size(root)); printf("\nlevel order reversed traversal is: "); levelOrderReversed(root); printf("\n height = %d",heightR(root)); printf("\n height = %d",heightI(root)); node* nn=deepestNodeR(root); if(nn) printf("\ndeepest = %d",nn->data); nn=deepestNodeI(root); if(nn) printf("\ndeepest = %d",nn->data); printf("\n%d",compareTwoTrees(root,root)); int td=0; treeDiameter(root,&td); printf("\ntree diameter is: %d",td); printf("\nmaximum Sum Level is: %d\n",maximumSumLevel(root)); printf("\n printing paths from root to leaf"); printRootToLeaf(root); printf("\n%d",givenSumPathExistence(root,16)); printf("\n%d",checkMirror(root,root)); printf("\nzigzag traversal:"); zigzag(root); printf("\nlevel order traversal before interchanging:"); levelOrder(root); interchangeToMirror(root); printf("\nlevel order traversal after interchanging:"); levelOrder(root); root=deleteTree(root); root1=deleteTree(root1); return 0; }
c87805b763b49bc7898ae619abbc13466ff2792f
[ "Markdown", "C++" ]
5
Markdown
harishbhakuni21/Data-Structures-in-Cpp
03326bf2d396030045abe44f5a60570dcf1d8c85
beacb2962e08fb32f37b3859e9f46f46ae0094e2
refs/heads/master
<repo_name>clemnyan/DatabaseSys<file_sep>/DriverDB.java import java.sql.*; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Random; import java.util.Scanner; public class DriverDB { public static final String SERVER = "jdbc:mysql://sunapee.cs.dartmouth.edu/"; public static final String USERNAME="F001743"; public static final String PASSWORD = "<PASSWORD>"; public static final String DATABASE ="F001743_db"; public static String QUERY = "SELECT * FROM Author"; public static Statement stmt = null; public static ResultSet res = null; public static DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); public static LocalDateTime now = LocalDateTime.now(); /**Function for logging in, Welcomes the user and gives him options to login or register * More like a home pages in a more visual app. If the user logs in, he is directed to the function * chooseLogin Mode, if he registers , he is directed to the function choose register mode. * * The function also handles error cases when the user inputs the wrong input */ public static void startOption () { Scanner sc = new Scanner (System.in); System.out.println("\nWELCOME TO THE MANUSCRIPT PUBLISHING DATABASE"); System.out.println("\nWant to Login or register? (TYPE IN RESPONSE AND HIT ENTER)"); String ans=sc.nextLine(); //input entered ans = ans.toLowerCase(); if (ans.equals("login")) { chooseLoginMode(); //methods directs to login mode } else if (ans.equals("register")) { chooseRegisterMode(); // method directs to register mode } else { System.out.println("\nPlease check your entry spelling, and type in a valid response\n"); startOption(); } sc.close(); } /** In this method, the user chooses the mode that he wants to login with. If he is an editor, he * logs in with his editor ID, if an author he logs in with his unique author id and if he is a * reviewer he logs in with his reviewer id. * * The function handles error cases when the user inputs the wrong input */ public static void chooseLoginMode () { Scanner sc = new Scanner (System.in); //scanner for author response Scanner m = new Scanner (System.in); //scanner for ID input System.out.println("\n------------------------------------------------------------------------------------------"); System.out.println("\nLogin as one of these options: Editor, Reviewer, Author"); System.out.println("\nType in you choice and hit enter: \n"); System.out.println("------------------------------------------------------------------------------------------"); String ans=sc.nextLine(); //input entered ans = ans.toLowerCase(); if (ans.equals("editor")) { System.out.println("Please enter your Editor ID here, (if not an Editor press 0)! "); //SHOULD GIVE OPTION TO EXIT, IF NOT AUTHOR System.out.println("\n------------------------------------------------------------------------------------------"); try { int numInput = m.nextInt(); if (numInput==0) { startOption(); sc.close(); m.close(); return; } else { EditorLoginMode(numInput); //Login as author sc.close(); m.close(); return; } }catch (Exception e){ System.out.println(e.getMessage()+" error: Please enter a valid number"); } startOption(); } else if (ans.equals("reviewer")) { System.out.println("Please enter your Reviewer ID here, (if not an Reviewer press 0)! "); //SHOULD GIVE OPTION TO EXIT, IF NOT AUTHOR System.out.println("\n------------------------------------------------------------------------------------------"); try { int numInput = m.nextInt(); if (numInput==0) { startOption(); sc.close(); m.close(); return; } else { ReviewerLoginMode(numInput); //Login as author sc.close(); m.close(); return; } }catch (Exception e){ System.out.println(e.getMessage()+" error: Please enter a valid number"); } startOption(); } else if (ans.equals("author")) { System.out.println("Please enter your Author ID here, (if not an Author press 0)! "); //SHOULD GIVE OPTION TO EXIT, IF NOT AUTHOR System.out.println("\n------------------------------------------------------------------------------------------"); try { int numInput = m.nextInt(); if (numInput==0) { startOption(); sc.close(); m.close(); return; } else { AuthorloginMode(numInput); //Login as author sc.close(); m.close(); return; } }catch (Exception e){ System.out.println(e.getMessage()+" error: Please enter a valid number"); } startOption(); } else { System.out.println("\nPlease check your entry spelling, and type in a valid response"); System.out.println("DO YOU STILL WANT TO LOGIN ?\n"); ans=sc.nextLine(); ans=ans.toLowerCase(); if (ans.equals("yes")){ chooseLoginMode(); sc.close(); m.close(); return; } else { System.out.println("Thank you, Exiting App.."); sc.close(); m.close(); return; } } } /**Helper method for the chooseLogin function. This functions allows the user to login as an editor after * providing his/her unique ID. * It also handles error cases pretty well * @param editorID */ public static void EditorLoginMode( int editorID) { Scanner sc = new Scanner (System.in); Scanner m = new Scanner (System.in); System.out.println("\nEditor ID Entered is: "+editorID); System.out.println("\n\nConfirm if your Editor ID is correctly entered (YES OR NO):\n"); System.out.println("\n------------------------------------------------------------------------------------------"); String resp = sc.nextLine(); resp = resp.toLowerCase(); if (resp.equals("yes")){ try { Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "SELECT * FROM Editor WHERE idEditor=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, editorID); ResultSet res = stmt.executeQuery(); int check =0; //to check if the user actually has a valid unique id while (res.next()){ System.out.println("WELCOME, EDITOR LOGIN SUCCESSFUL! "); System.out.println("\nEDITOR NAME: "+ res.getObject(2)+" "+res.getObject(3)); check=1; //user id is valid } if (check==0) { // user id is not valid System.out.println("Invalid ID, still want to login again (Yes/no)?"); String ans = m.nextLine(); ans=ans.toLowerCase(); if (ans.equals("yes")) { chooseLoginMode(); res.close(); con.close(); stmt.close(); m.close(); return; } else { startOption(); //go to login/register page res.close(); con.close(); stmt.close(); m.close(); return; } } View_All_Manuscripts(editorID); EditorOptions(editorID); res.close(); con.close(); stmt.close(); m.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } else { System.out.println("-------------------------------------------------------------------------------------"); System.out.println("Enter a valid ID: "); chooseLoginMode(); sc.close(); m.close(); return; } } /** Helper Method for function chooseLogin Mode for a reviewer to login with his unique reviewer ID * Handles error cases when the user enters invalid or wrong information * @param reviewerID */ public static void ReviewerLoginMode (int reviewerID) { Scanner sc = new Scanner (System.in); Scanner m = new Scanner (System.in); System.out.println("reviewer ID Entered is: "+reviewerID); System.out.println("\n\nConfirm if your reviewer ID is correctly entered (YES OR NO):\n"); System.out.println("\n------------------------------------------------------------------------------------------"); String resp = sc.nextLine(); resp = resp.toLowerCase(); if (resp.equals("yes")){ try { Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "SELECT * FROM Reviewer WHERE ReviewerID=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, reviewerID); ResultSet res = stmt.executeQuery(); int check =0; //to check if the user actually has a valid unique id while (res.next()){ System.out.println("WELCOME, LOG IN SUCCESSFUL! "); System.out.print("REVIEWER NAME: "+res.getObject(4)+ " "+res.getObject(5)); check=1; //user id is valid } if (check==0) { // user id is not valid System.out.println("Invalid ID, still want to login again (Yes/no)?"); String ans = m.nextLine(); ans=ans.toLowerCase(); if (ans.equals("yes")) { chooseLoginMode(); res.close(); con.close(); stmt.close(); m.close(); return; } else { startOption(); //go to login/register page res.close(); con.close(); stmt.close(); m.close(); return; } } ArrayList<Integer> mscripts = View_Assigned_Manuscripts(reviewerID); //get list of all manuscripts for a reviewer Get_Manuscripts(mscripts); ReviewerOptions(reviewerID); res.close(); con.close(); stmt.close(); m.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } else { System.out.println("-------------------------------------------------------------------------------------"); System.out.println("Enter a valid ID: "); chooseLoginMode(); sc.close(); m.close(); return;} } /**Helper method to the method choose login mode for a user who is an author. This method handles error cases * pretty well * @param authorID */ public static void AuthorloginMode (int authorID) { Scanner sc = new Scanner (System.in); Scanner m = new Scanner (System.in); System.out.println("Author ID Entered is: "+authorID); System.out.println("\n\nConfirm if your Author ID is correctly entered (YES OR NO):\n"); System.out.println("\n------------------------------------------------------------------------------------------"); String resp = sc.nextLine(); resp = resp.toLowerCase(); if (resp.equals("yes")){ try { Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "SELECT * FROM Author WHERE AuthorID=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, authorID); ResultSet res = stmt.executeQuery(); int check =0; //to check if the user actually has a valid unique id while (res.next()){ System.out.println("AUTHOR LOG IN SUCCESSFUL, WELCOME!\n\n"); System.out.println("FULLNAME: "+ res.getObject(5)+" "+ res.getObject(6)); System.out.println("\nMAILING ADDRESS: "+ res.getObject(2)); check=1; //user id is valid } if (check==1) { ManuscriptStatus(authorID); AuthorOptions(authorID); res.close(); con.close(); m.close(); sc.close(); stmt.close(); return; } else { // user id is not valid System.out.println("Invalid ID, still want to login again (Yes/no)?"); String ans = m.nextLine(); ans=ans.toLowerCase(); if (ans.equals("yes")) { chooseLoginMode(); res.close(); con.close(); sc.close(); m.close(); stmt.close(); return; } else { startOption(); //go to login/register page res.close(); con.close(); sc.close(); m.close(); stmt.close(); return; } } } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } else { System.out.println("-------------------------------------------------------------------------------------"); System.out.println("Enter a valid AuthorID: "); chooseLoginMode(); sc.close(); m.close(); return; } } /** Method to display and provide options for an editor once they login in so that he can * 1)view status, 2) assign 3) reject, 4)accept, 5) typeset a manuscript, 6)schedule, 7)publish an issue * Option 8 also allows the user to exit and return to the menu of choices * @param editorID * returns void */ public static void EditorOptions (int editorID) { Scanner sc = new Scanner (System.in); System.out.print("\n\n\nChoose what you want to do from this MENU: "); System.out.print("\n1. VIEW ALL MANUSCRIPT STATUS \n2. ASSIGN MANUSCRIPT TO A REVIEWER \n3. REJECT MANUSCRIPT " + "\n4. ACCEPT MANUSCRIPT"+ "\n5. TYPESET MANUSCRIPT "+"\n6. SCHEDULE ISSUE"+ "\n7. PUBLISH ISSUE" + "\n8. VIEWS"+ "\n9. EXIT\n\n"); String ans = sc.nextLine(); /*If menu option is 1, view all manuscripts*/ if(ans.equals("1")) { View_All_Manuscripts(editorID); EditorOptions(editorID); /*if option is 2, assign manuscript to reviewer*/ }else if (ans.equals("2")){ try { Scanner m = new Scanner (System.in); System.out.println("ENTER THE ID OF THE MANUSCRIPT TO BE ASSIGNED: "); System.out.println("If you have forgotten the ID, press 0 to return to menu and check status: "); int idManuscript = m.nextInt(); if (idManuscript!=0){ // if you didn't forget id Assign_To_Reviewers(idManuscript, editorID); //assign to reviewers } EditorOptions(editorID); } catch (Exception ec){ System.out.println("ERROR: Please Enter valid integer for ID"); }finally { EditorOptions(editorID); } //if option is 3, reject the manuscript (should be submitted or under review status) }else if (ans.equals("3")){ //First, see the reviews of the manuscript, then decide try{ Scanner m1 = new Scanner (System.in); System.out.println("ENTER THE ID OF THE MANUSCRIPT TO REJECT: "); System.out.println("If you have forgotten the ID, press 0 to return to menu and CHECK STATUS: "); int idManuscript = m1.nextInt(); //check to see status of manuscript, if under review, then it can be accepted String status = get_Manuscript_Status(idManuscript, editorID); if (status.equals("Under Review") || status.equals("submitted")){ if (idManuscript!=0){ // if you didn't forget id get_Manuscript_Feedback(idManuscript, editorID); Scanner m2 = new Scanner(System.in); System.out.println("\nAre you sure you want to reject (yes/no) ?"); String rep = m2.nextLine(); rep=rep.toLowerCase(); if (rep.equals("yes")){ Update_Status(idManuscript, "Rejected"); Update_Timestamp(idManuscript); } } EditorOptions(editorID); } else { System.out.println("CAN ONLY REJECT IF STATUS IS SUBMITTED OR UNDER REVIEW: "); } } catch (Exception ec){ System.out.println("ERROR: Please Enter valid integer for ID"); }finally{ EditorOptions(editorID); } //If option 4, you can accept the manuscript for publication if it has at least 3 reviews }else if (ans.equals("4")){ //First, see the reviews of the manuscript, and get count try{ Scanner m1 = new Scanner (System.in); System.out.println("ENTER THE ID OF THE MANUSCRIPT TO REVIEW FOR ACCEPTANCE: "); System.out.println("If you have forgotten the ID, press 0 to return to menu and CHECK STATUS: "); int idManuscript = m1.nextInt(); //check to see status of manuscript, if under review, then it can be accepted String status = get_Manuscript_Status(idManuscript, editorID); if (status.equals("Under Review")){ if (idManuscript!=0){ // if you didn't forget id int val=get_Manuscript_Feedback(idManuscript, editorID); if (val>=3){ Scanner m2 = new Scanner(System.in); System.out.println("\nAre you sure you want to accept (yes/no) ?"); String rep = m2.nextLine(); rep=rep.toLowerCase(); if (rep.equals("yes")){ Update_Status(idManuscript, "Accepted"); Update_Timestamp(idManuscript); //update timestamp }else { System.out.println("DECIDED NOT TO ACCEPT AT THE MOMENT"); } } } EditorOptions(editorID); } else { System.out.println("CAN ONLY ACCEPT IF IS UNDER REVIEW FOR ACCEPTANCE: "); } } catch (Exception ec){ System.out.println("ERROR: Please Enter valid integer for ID"); }finally{ EditorOptions(editorID); } //Set status of manuscript to typeset. Should be an accepted manuscript }else if (ans.equals("5")){ try{ Scanner m1 = new Scanner (System.in); System.out.println("ENTER THE ID OF THE MANUSCRIPT TO TYPESET: "); System.out.println("If you have forgotten the ID, press 0 to return to menu and CHECK MANUSCRIPT STATUS: "); int idManuscript = m1.nextInt(); if (idManuscript!=0){ Type_Set(idManuscript, editorID); } } catch (Exception ec){ System.out.println("ERROR: Please Enter valid integer for ID"); }finally{ EditorOptions(editorID); } //Schedule issue for publication }else if (ans.equals("6")){ try{ Scanner m1 = new Scanner (System.in); System.out.println("ENTER THE ID OF THE MANUSCRIPT TO SCHEDULED: "); System.out.println("If you have forgotten the ID, press 0 to return to menu and CHECK MANUSCRIPT STATUS: "); int idManuscript = m1.nextInt(); String status = get_Manuscript_Status(idManuscript, editorID); if (status.equals("TypeSet")){ if (idManuscript!=0){ schedule_Manuscript(idManuscript); } } }catch (Exception ec){ System.out.println("ERROR: Please Enter valid integer for ID"); }finally{ EditorOptions(editorID); } }else if (ans.equals("7")){ try{ Scanner m1 = new Scanner (System.in); System.out.println("ENTER THE ID MANUSCRIPT THAT IS LINKED TO SCHEDUELED ISSUE: "); System.out.println("If you have forgotten the ID, press 0 to return to menu and CHECK MANUSCRIPT STATUS: "); int idManuscript = m1.nextInt(); String status = get_Manuscript_Status(idManuscript, editorID); if (status.equals("Scheduled")){ if (idManuscript!=0){ publish_Issue(idManuscript); Update_Status(idManuscript, "published"); } } }catch (Exception ec){ System.out.println("ERROR: Please Enter valid integer for ID"); }finally{ EditorOptions(editorID); } }else if (ans.equals("8")){ Editor_Views(); EditorOptions(editorID); }else if (ans.equals("9")){ startOption(); //return to the starting page }else { EditorOptions(editorID); //repeat call if entry is incorrect } sc.close(); } public static void Author_Views () { try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "Select * From PublishedIssues"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.execute(); ResultSet res = stmt.executeQuery(); System.out.println("PUBLISHED ISSUE VIEW\n"); System.out.println("----------------------------------------------------------------------------------------"); System.out.printf("%1$-15s %2$-15s %3$-15s\n", "Publ. Year", "Publ. #", "Title"); System.out.println("----------------------------------------------------------------------------------------"); while (res.next()){ System.out.format("%1$-15s %2$-15s %3$-15s\n", res.getObject(1), res.getObject(2), res.getObject(3)); } con.close(); res.close(); stmt.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } public static void Editor_Views () { try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "Select * From LeadAuthorManuscripts"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.execute(); ResultSet res = stmt.executeQuery(); System.out.println("LEAD AUTHOR MANUSCRIPT VIEW\n"); System.out.println("----------------------------------------------------------------------------------------"); System.out.printf("%1$-15s %2$-15s %3$-15s %4$-15s\n", "LastName", "AuthorID", "idManuscript", "Man_Status"); System.out.println("----------------------------------------------------------------------------------------"); while (res.next()){ System.out.format("%1$-15s %2$-15s %3$-15s %4$-15s\n", res.getObject(1), res.getObject(2), res.getObject(3),res.getObject(4)); } QUERY = "Select * From PublishedIssues"; stmt = con.prepareStatement(QUERY); stmt.execute(); res = stmt.executeQuery(); System.out.println("/nPUBLISHED ISSUE VIEW\n"); System.out.println("----------------------------------------------------------------------------------------"); System.out.printf("%1$-15s %2$-15s %3$-15s\n", "Publ. Year", "Publ. #", "Title"); System.out.println("----------------------------------------------------------------------------------------"); while (res.next()){ System.out.format("%1$-15s %2$-15s %3$-15s\n", res.getObject(1), res.getObject(2), res.getObject(3)); } QUERY = "Select * From ReviewQueue"; stmt = con.prepareStatement(QUERY); stmt.execute(); res = stmt.executeQuery(); System.out.println("\nREVIEW QUEUE VIEW\n"); System.out.println("----------------------------------------------------------------------------------------"); System.out.printf("%1$-15s %2$-15s %3$-15s %4$-15s\n", "LastName", "Primary AuthorID", "idManuscript", "Reviewer_LastName"); System.out.println("----------------------------------------------------------------------------------------"); while (res.next()){ System.out.format("%1$-15s %2$-15s %3$-15s %4$-15s\n", res.getObject(1), res.getObject(2), res.getObject(3),res.getObject(5)); } QUERY = "Select * From WhatsLeft"; stmt = con.prepareStatement(QUERY); stmt.execute(); res = stmt.executeQuery(); System.out.println("\nWHATS LEFT VIEW\n"); System.out.println("----------------------------------------------------------------------------------------"); System.out.printf("%1$-15s %2$-15s %3$-20s\n", "idManuscript", "Man_Status", "DateReceived"); System.out.println("----------------------------------------------------------------------------------------"); while (res.next()){ System.out.format("%1$-15s %2$-15s %3$-15s\n", res.getObject(1), res.getObject(2), res.getObject(3)); } QUERY = "Select * From ReviewStatus"; stmt = con.prepareStatement(QUERY); stmt.execute(); res = stmt.executeQuery(); System.out.println("\nREVIEW STATUS VIEW\n"); System.out.println("-----------------------------------------------------------------------------------------------------------------------------------------------"); System.out.printf("%1$-23s %2$-15s %3$-25s %4$-15s %5$-15s %6$-15s %7$-15s %8$-15s\n", "Timestamp","idManuscript", "Title", "Appropriateness", "Clarity", "Methodology", "Contribution","Recommendation"); System.out.println("-----------------------------------------------------------------------------------------------------------------------------------------------"); while (res.next()){ System.out.format("%1$-23s %2$-15s %3$-25s %4$-15s %5$-15s %6$-15s %7$-15s %8$-15s\n", res.getObject(1), res.getObject(2), res.getObject(3),res.getObject(4), res.getObject(5), res.getObject(6), res.getObject(7),res.getObject(8)); } con.close(); res.close(); stmt.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } /** Method to publish an issue, and insert the date, correct number of pages etc * * @param idManuscript */ public static void publish_Issue (int idManuscript) { //first check if there is a published issue Random rand = new Random(); int od = rand.nextInt(10); int manIssue = get_Largest_Issue_Manuscript(); //largest issue in the manuscript table int unpubID = get_Next_UnPublishedIssue(); //largest possible next value of issue ID int numPages = find_Num_Of_Pages(manIssue); //find total number of pages in largest unpublished issue int pages = find_Total_Pages(idManuscript); //find total number of pages in the to be scheduled manuscript if (manIssue>unpubID-1) { //implying that there might still be space in an unpublished issue if (pages+numPages<100) { Insert_Issue(manIssue, pages+numPages, idManuscript);} else { Insert_Issue(unpubID, pages, idManuscript);} }else { Insert_Issue(unpubID+od, pages, idManuscript);} } /** Helper method to insert an issue, and gather the dates,year etc * * @param issueID * @param numPages */ public static void Insert_Issue (int issueID, int numPages,int idManuscript) { //to get publication date, year and month String date = dtf.format(now); Integer year = Integer.parseInt(date.substring(0, 4)); Integer month= Integer.parseInt(date.substring(5, 7)); Integer period = 0; if (month<=3) { period=1;} //fall } else if (month>3 && month<=6) { period=2 ;} else if (month>6 && month <=9) { period = 3; } else {period = 4; } try { Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "INSERT INTO Issue (Publication_Year,Number_Of_Pages,Publication_Number, PrintDate, IssueID)"+"VALUES(?,?,?,?,?)"; PreparedStatement preparedStmt = con.prepareStatement(QUERY); preparedStmt.setInt(1, year); preparedStmt.setInt(2, numPages); preparedStmt.setInt(3,period); preparedStmt.setString(4, date); preparedStmt.setInt(5, issueID); preparedStmt.execute(); System.out.println("--ISSUE PUBLICATION SUCCESSFUL--"); QUERY = "UPDATE Manuscript SET IssueID=? where idManuscript=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, issueID); stmt.setInt(2, idManuscript); stmt.execute(); stmt.close(); con.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } /** Helper method for the editor login mode option number 6 (Schedule). The function updates the * Manuscript status to schedule and inputs its issueID, page number and updates its shhedule to * "scheduled". It also checks to see that each Issue is less than 100 paged * @param idManuscript */ public static void schedule_Manuscript (int idManuscript) { //first check if there is a published issue int manIssue = get_Largest_Issue_Manuscript(); int unpub = get_Next_UnPublishedIssue(); if (manIssue>unpub-1) { //implying that there might still be space in an unpublished issue int numPages = find_Num_Of_Pages(manIssue); int pages = find_Total_Pages(idManuscript); if (pages+numPages>100) { Update_Man_IssueID(unpub, idManuscript, 1, "Scheduled", 1); } else { Update_Man_IssueID(manIssue, idManuscript, 2, "Scheduled", pages); } }else { Update_Man_IssueID(unpub, idManuscript, 1, "Scheduled", 1); } } /** Helper function for the above function. The function perfomms the update functions described above * * @param issueID * @param idManuscript * @param order * @param status * @param pageNum */ public static void Update_Man_IssueID (int issueID, int idManuscript, int order, String status,int pageNum) { try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "UPDATE Manuscript SET Man_Status=?,Order_In_Issue=?, Page_Number_In_Issue=? WHERE idManuscript=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setString(1, status); stmt.setInt(2, order); stmt.setInt(3, pageNum); stmt.setInt(4, idManuscript); stmt.execute(); System.out.println("--Manuscript UPDATED --"); stmt.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } /**Algorithm to get the nextID for an unpublished issue in the Database. The algorithm creates the ID by incrementing the * maximum ID from the published issue. This ID is entered in the Manuscript table for a scheduled issue * @return */ public static int get_Next_UnPublishedIssue() { int issue = 0; try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "Select MAX(IssueID) From Issue"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.execute(); ResultSet res = stmt.executeQuery(); while (res.next()){ issue = (int) res.getObject(1); //get the reviewer ID } con.close(); stmt.close(); res.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } return issue+1; } /** Helper method to get largest issueID in manuscripts * * @return id of largest manuscript */ public static int get_Largest_Issue_Manuscript() { int issue = 0; try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "Select MAX(IssueID) From Manuscript"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.execute(); ResultSet res = stmt.executeQuery(); while (res.next()){ issue = (int) res.getObject(1); //get the reviewer ID } con.close(); stmt.close(); res.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } return issue; } /**Helper method to find number of pages in a manuscript using IssueID * * @param issueID * @return number of pages */ public static int find_Num_Of_Pages (int issueID){ int sum=0; try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "SELECT Number_of_Pages FROM Manuscript WHERE IssueID = ?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, issueID); ResultSet res = stmt.executeQuery(); while (res.next()){ sum= sum+(int)(res.getObject(1)); } } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } return sum; } /** Helper Method to find total number of pages in a manuscript * * @param manID * @return number of pages */ public static int find_Total_Pages (int manID){ int sum=0; try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "SELECT Number_of_Pages FROM Manuscript WHERE idManuscript = ?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, manID); ResultSet res = stmt.executeQuery(); while (res.next()){ res.getObject(1); } } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } return sum; } /** Helper method for option 5 of editor mode. This method sets the manuscript status to Typeset and the manuscript is * ready for scheduling. It inputs the page numbers as well. * @param idManuscript * @param idEditor */ public static void Type_Set(int idManuscript, int idEditor) { /*To Typeset, the status of the Manuscript should be accepted*/ String status = get_Manuscript_Status(idManuscript, idEditor); if (status.equals("Accepted")){ //ready for Typesetting try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "UPDATE Manuscript SET Man_Status=? WHERE idManuscript=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setString(1, "TypeSet" ); stmt.setInt(2, idManuscript); stmt.execute(); Scanner sc = new Scanner (System.in); System.out.println("ENTER FINAL NUMBER OF MANUSCRIPT PAGES: "); int numPages = 200; while (numPages>100){ System.out.println("Number of Pages Should be less thanor equal to 100, Enter page number less than 100!"); numPages=sc.nextInt(); } QUERY = "UPDATE Manuscript SET Number_Of_Pages=? WHERE idManuscript=?"; stmt = con.prepareStatement(QUERY); stmt.setInt(1, numPages ); stmt.setInt(2, idManuscript); stmt.execute(); System.out.println("MANUSCRIPT TYPESET AND PAGES NUMBER INSERTED, READY FOR SCHEDULING"); stmt.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } } /** Helper method to update dates on manuscript * * @param Manuscript */ public static void Update_Timestamp (int idManuscript) { System.out.println("Date Received: "+dtf.format(now)); //e.g 2016-11-16 12:08:43 try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "UPDATE Manuscript SET DateReceived=? WHERE idManuscript=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setString(1,dtf.format(now) ); stmt.setInt(2, idManuscript); stmt.execute(); System.out.println("--TIMESTAMP UPDATED --"); stmt.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } /** Helper Method to get the feedback of a manuscript * * @param idManuscript * @param editorID * @returns the number of reviews that a manuscript has. */ public static int get_Manuscript_Feedback (int idManuscript, int editorID) { int check =0; try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "SELECT * FROM Feedback WHERE Manuscript_idManuscript=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, idManuscript); ResultSet res = stmt.executeQuery(); System.out.println("HERE IS THE FEEDBACK OF THE MANUSCRIPT FROM ALL REVIEWERS\n"); System.out.println("-------------------------------------------------------------------------------------------------------"); System.out.printf("%1$-15s %2$-15s %3$-15s %4$-20s %5$-15s \n", "Approp.", "Clarity", "Methodology","Contribution", "Recommendation"); System.out.println("-------------------------------------------------------------------------------------------------------"); while (res.next()){ System.out.format("%1$-15s %2$-15s %3$-15s %4$-20s %5$-15s \n", res.getObject(1), res.getObject(2), res.getObject(3), res.getObject(4), res.getObject(5)); check=check+1; //manuscripts present in system } if (check==0) { //no manuscripts in system System.out.println("Manuscript not yet reviewed!"); } } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } return check; } /**Helper Method to assign a manuscript to several reviewers under the control of the editor. * @param idManuscript * @param idEditor * returns void */ public static void Assign_To_Reviewers(int idManuscript, int idEditor) { /*Get status of manuscript, if the manuscript does not have status 'submitted or under review' then the * manuscript cannot be assigned to a reviewer */ String status = get_Manuscript_Status(idManuscript,idEditor ); if (status.equals("submitted")) { //need to fix case for under review //Get RICode of the manuscript, and use it to identify reviewers to review the manuscript int RICode = get_Manuscript_RICode (idManuscript); //get all reviewers with corresponding RICode ArrayList<Integer> reviewers = get_Reviewers_For_RICode(RICode); System.out.println("\nAvailable ReviewersID List: "+reviewers+"\n"); if (reviewers.size()<3) { System.out.println("Not enough reviewers to review manuscript, need at least 3 reviewers: "); EditorOptions(idEditor); return; } Scanner sc = new Scanner(System.in); int count=0; int peak =reviewers.size(); while (count<peak){ Assign_Manuscript_To_Reviewer(reviewers.get(count), idManuscript); Update_Manuscript_Editor (idManuscript, idEditor); count=count+1;} } else { System.out.println("Manuscript Already Assigned to Reviewers! ");} EditorOptions(idEditor); //return to main menu } /**Helper method to update the Editor who assigned reviewers to the manuscript * @param idManuscript * @param idEditor * return void */ public static void Update_Manuscript_Editor (int idManuscript, int idEditor){ try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "UPDATE Manuscript SET idEditor=? WHERE idManuscript=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, idEditor); stmt.setInt(2, idManuscript); stmt.execute(); System.out.println("--MANUSCRIPT EDITOR FIELD UPDATED ---"); stmt.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } /**Helper method to assign a specific manuscript to a reviewer * Inserts the reviewer to Reviewer Group which is directly linked with manuscript * @param reviewerID * @param idManuscript */ public static void Assign_Manuscript_To_Reviewer (int reviewerID, int idManuscript) { System.out.println("Date Received: "+dtf.format(now)); //e.g 2016-11-16 12:08:43 try { Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "INSERT INTO Reviewer_Group (ReviewerID,DateReceived,idManuscript)"+"VALUES(?,?,?)"; PreparedStatement preparedStmt = con.prepareStatement(QUERY); preparedStmt.setInt(1, reviewerID); preparedStmt.setString(2, dtf.format(now)); preparedStmt.setInt(3, idManuscript); preparedStmt.execute(); System.out.println("--REVIEWER ASSIGNMENT SUCCESSFUL--"); /*Get manuscript status, and check to see if it should be updated to received*/ String status =get_Manuscript_Status(idManuscript, reviewerID); if (status.equals("submitted")) { status = "Under Review"; Update_Status(idManuscript, status); //update manuscript to under review } con.close(); preparedStmt.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage());} } /** Helper method to update manuscript status, * @param idManuscript * @param status * returns void */ public static void Update_Status (int idManuscript, String status) { try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "UPDATE Manuscript SET Man_Status=? WHERE idManuscript=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setString(1, status); stmt.setInt(2, idManuscript); stmt.execute(); System.out.println("--STATUS UPDATED TO: "+status+"--"); stmt.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage());} } /**Helper method to view all the Manuscripts in the system ordered by status, and the manuscript id * @param editorID * returns void */ public static void View_All_Manuscripts(int editorID) { try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); Scanner out = new Scanner (System.in); QUERY = "SELECT * FROM Manuscript ORDER BY Man_Status, idManuscript"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.execute(); ResultSet res = stmt.executeQuery(); int check = 0; //to check is there any manuscripts in the system System.out.println("HERE ARE ALL THE MANUSCRIPTS IN THE SYSTEM\n\n"); System.out.println("------------------------------------------------------------"); System.out.printf("%1$-15s %2$-20s %3$-15s \n", "ManuscriptID", "TITLE", "STATUS"); System.out.println("------------------------------------------------------------"); while (res.next()){ System.out.format("%1$-15s %2$-20s %3$-15s\n", res.getObject(1), res.getObject(4), res.getObject(3)); check=1; //manuscripts present in system } if (check==0) { //no manuscripts in system System.out.println("No manuscripts in System! (Press any key to go back)"); } } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage());} } /** Method to display and provide options to an author so that he can * 1)view status, 2) submit manuscript 3) retract manuscript * Option 4 also allows the author to exit and return to the menu of choices * @param editorID * returns void */ public static void AuthorOptions (int authorID) { Scanner sc = new Scanner (System.in); Scanner out = new Scanner (System.in); Scanner out1 = new Scanner (System.in); //need to put output from status window System.out.print("\n\n\nChoose what you want to do from this MENU: "); System.out.print("\n1. VIEW MANUSCRIPTS' STATUS \n2. SUBMIT MANUSCRIPT \n3. RETRACT MANUSCRIPT \n4. VIEWS \n5. EXIT \n\n"); String ans = sc.nextLine(); if(ans.equals("1")) { ManuscriptStatus(authorID); AuthorOptions(authorID); }else if (ans.equals("2")){ Scanner ss = new Scanner(System.in); System.out.println("Enter manuscript title below: "); String title = out.nextLine(); System.out.println("Enter your current affiliation below: "); String aff = out.nextLine(); System.out.println("Enter the RICode for the manuscript: "); int RICode = out1.nextInt(); System.out.println("Enter the name of co-Author 1: If none input 0"); String auth1 = out.nextLine(); System.out.println("Enter the name of co-Author 2: If none input 0"); String auth2 = out.nextLine(); System.out.println("Enter the name of co-Author 3: If none input 0"); String auth3 = out.nextLine(); System.out.println("Enter the file text: "); String filename = out.nextLine(); System.out.println("The data entered is shown below: "); System.out.print("\nMANUSCRIPT TITLE: "+title+ "\nAFFILIATION: "+aff+"\nRICode: "+RICode+ "\nAuthor2: "+auth1+ "\nAuthor3: "+auth2+ "\nAuthor4: "+auth3+ "\nFILENAME\n\n"+filename); System.out.println("\nIs everything entered correctly"); String reply = ss.nextLine(); reply = reply.toLowerCase(); if (reply.equals("yes")){ int idManuscript= Manuscript_Insert(authorID,title, filename,RICode); //insert manuscript, and retain id Author_Group_Insert(authorID, idManuscript, 1); //insert primary author into group System.out.println("\nYour submitted manuscript ID is : "+idManuscript); System.out.println("\nSUBMISSION SUCCESSFUL!"); } AuthorOptions(authorID); ss.close(); }else if (ans.equals("3")){ Scanner s = new Scanner(System.in); System.out.println("\nEnter ID of Manuscript you want to delete?"); int idManuscript = s.nextInt(); String status = get_Manuscript_Status(idManuscript, authorID); if (status==null) { System.out.println("\nMANUSCRIPT DOES NOT EXIST IN SYSTEM"); } else if (status.equals("typeset")){ //dont delete if in typesetting mode System.out.println("\nIN TYPESETTED STATUS, CAN'T BE DELETED!"); } else { Manuscript_Delete(idManuscript,authorID); } AuthorOptions(authorID); s.close(); }else if (ans.equals("4")){ Author_Views(); AuthorOptions(authorID); }else if (ans.equals("5")){ startOption(); //go to main page }else { AuthorOptions(authorID); //repeat call if entry is incorrect } sc.close(); out.close(); out1.close(); } /** Helper method to delete a manuscript. Since author group uses a foreign key, before deleting a * manuscript, the Author_Group should be deleted as well. * * @param idManuscript * @param authorID */ public static void Manuscript_Delete (int idManuscript, int authorID) { try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); /*manuscript is foreign key to Author_Group hence to delete * it we first have to delete in Author_Group */ QUERY = "DELETE FROM Author_Group where idManuscript=? and AuthorID=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, idManuscript); stmt.setInt(2, authorID); stmt.execute(); System.out.println("--AUTHOR_GROUP DELETED--"); stmt.close(); QUERY = "DELETE FROM Manuscript where idManuscript=? and PrimaryAuthorID=?"; stmt = con.prepareStatement(QUERY); stmt.setInt(1, idManuscript); stmt.setInt(2, authorID); stmt.execute(); System.out.println("--MANUSCRIPT SUCCESSFULLY DELETED--"); stmt.close(); con.close(); return; } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); }catch (Exception ec){ System.out.println("Error: You can only delete your own manuscripts"); } } /** Helper method to insert a manuscript, and return its ID. USed in authorOptions * * @param pAuthorID * @param title * @param filename * @param RICode * @return */ public static int Manuscript_Insert (int pAuthorID, String title, String filename,int RICode) { int ans = 0; try { Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "INSERT INTO Manuscript (PrimaryAuthorID,Man_Status,Title,Man_Text,RICode)"+"VALUES(?,?,?,?,?)"; PreparedStatement preparedStmt = con.prepareStatement(QUERY); preparedStmt.setInt(1, pAuthorID); preparedStmt.setString(2, "submitted"); preparedStmt.setString(3, title); preparedStmt.setString(4,filename); preparedStmt.setInt(5, RICode); preparedStmt.execute(); System.out.println("--MANUSCRIPT SUBMISSION SUCCESSFUL--"); //retrieve manuscript id QUERY= "SELECT idManuscript FROM Manuscript where PrimaryAuthorID=? and Title=? and RICode=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, pAuthorID); stmt.setString(2, title); stmt.setInt(3, RICode); ResultSet res = stmt.executeQuery(); while (res.next()){ ans = (Integer)res.getObject(1); //get the id } con.close(); return ans; } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage());} return ans; } /** Helper method to Insert an author into an author group, which links author ID to Manuscrpt ID and * shows the order of the author * @param authorID * @param idManuscript * @param authorOrder */ public static void Author_Group_Insert (int authorID, int idManuscript, int authorOrder) { try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "INSERT INTO Author_Group (AuthorID, idManuscript,AuthorOrder)"+"VALUES(?,?,?)"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1,authorID); stmt.setInt (2, idManuscript); stmt.setInt(3,authorOrder); stmt.execute(); System.out.println("--AUTHOR INSERTION INTO GROUP SUCCESSFUL--"); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage());} } /** Helper method to get all the reviewers whose interest area is the RICode specified. * Input -> RICode * Output -> List with IDs of all reviewers with the specified interest field */ public static ArrayList <Integer> get_Reviewers_For_RICode (int RICode) { ArrayList<Integer> reviewList = new ArrayList<Integer>(); //list of reviewers try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "Select * From Area_of_Interest_Group where RICode=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, RICode); stmt.execute(); ResultSet res = stmt.executeQuery(); while (res.next()){ Integer reviewerID = (Integer) res.getObject(1); //get the reviewer ID reviewList.add(reviewerID); //add to list } con.close(); stmt.close(); return reviewList; } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } return reviewList; } /**Helper method to get RICode for a manuscript. * Input -> Manuscript iD * Output -> RICode (if 0, then manuscript was not found) */ public static int get_Manuscript_RICode (int idManuscript) { int RICode =0; try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "Select * From Manuscript where idManuscript=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, idManuscript); stmt.execute(); ResultSet res = stmt.executeQuery(); int check =0; //to check if manuscript is actually in: while (res.next()){ RICode = (int) res.getObject(12); check=1; //manuscript in } if (check==0) { System.out.println("Manuscript not in System"); stmt.close(); con.close(); return 0; } con.close(); stmt.close(); return RICode; } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } return 0; } /** Helper method to get the status of a Manuscript * */ public static String get_Manuscript_Status (int idManuscript, int authorID) { String status =""; try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "Select * From Manuscript where idManuscript=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, idManuscript); stmt.execute(); ResultSet res = stmt.executeQuery(); int check =0; //to check if manuscript is actually in: while (res.next()){ status = (String) res.getObject(3); check=1; //manuscript in } if (check==0) { System.out.println("Manuscript already not in System"); stmt.close(); con.close(); return null; } con.close(); stmt.close(); return status; } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } return null; } /** Helper method to get the status of a manuscript after inserting the authorID * * @param authorID */ public static void ManuscriptStatus(int authorID) { try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); Scanner out = new Scanner (System.in); QUERY = "Select * From Manuscript where PrimaryAuthorID=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, authorID); stmt.execute(); ResultSet res = stmt.executeQuery(); int check =0; //to check if the user actually has a manuscript System.out.println("HERE ARE YOUR MANUSCRIPTS IN THE SYSTEM\n\n"); System.out.println("----------------------------------------------------------------------------------------"); System.out.printf("%1$-15s %2$-20s %3$-15s %4$-20s\n", "ManuscriptID", "TITLE", "STATUS", "NUMBER OF PAGES"); System.out.println("----------------------------------------------------------------------------------------"); while (res.next()){ System.out.format("%1$-15s %2$-20s %3$-15s %4$-20s\n", res.getObject(1), res.getObject(4), res.getObject(3), res.getObject(6)); check=1; //user id is valid } if (check==0) { System.out.println("No manuscripts in System! (Press any key to go back)"); } else { System.out.println("\n\nPress any key to go back to options"); } } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } /** Method to register into the application. Gives options to register as an author, editor or a reviewer * */ public static void chooseRegisterMode () { Scanner sc = new Scanner (System.in); System.out.println("\n------------------------------------------------------------------------------------------"); System.out.println("\nRegister as on of these 3 options: Editor, Reviewer, Author"); System.out.println("\nType your in your choice and hit enter: \n"); String ans=sc.nextLine(); //input entered ans = ans.toLowerCase(); if (ans.equals("editor")) { System.out.println("Welcome to Editor mode"); registerEditor(); sc.close(); return; } else if (ans.equals("reviewer")) { System.out.println("Welcome to Reviewer mode"); registerReviewer(); sc.close(); return; } else if (ans.equals("author")) { System.out.println("Welcome to author mode"); registerAuthor(); sc.close(); return; } else { System.out.println("Please check your entry spelling, and type in a valid response\n"); System.out.println("\nDo you still want to register?"); ans=sc.nextLine(); ans=ans.toLowerCase(); if (ans.equals("yes")){ chooseRegisterMode(); sc.close(); return; } else { System.out.println("Thank you, Exiting App.."); sc.close(); return; } } } /** Method to register an editor * */ public static void registerEditor () { try { Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); Scanner sc = new Scanner (System.in); System.out.println("\nEnter ALL your personal details as requested by the prompts below:"); System.out.println("\nEnter your first name: "); String fname = sc.nextLine(); System.out.println("Enter your last name: "); String lname =sc.nextLine(); System.out.println("\nDetails entered: "); System.out.print("fname: "+fname+ "\nlname: "+lname); System.out.println("\n\nConfirm all details have been entered correctly\n"); System.out.println("Type Yes, or No to confirm\n"); String resp = sc.nextLine(); resp = resp.toLowerCase(); if (resp.equals("yes")){ QUERY = "INSERT INTO Editor (FirstName,LastName)"+"VALUES(?,?)"; //insert user into editor table //NOTE: I NEED TO CHECK IF ENTERED USER EXITS ALREADY IS IN THE DATABASE, AND FOR NULL CASES PreparedStatement preparedStmt = con.prepareStatement(QUERY); preparedStmt.setString(1, fname); preparedStmt.setString(2, lname); //execute the prepared statement preparedStmt.execute(); System.out.println("--EDITOR REGISTRATION SUCCESSFUL--"); //get id of this editor QUERY= "SELECT idEditor FROM Editor where FirstName=? and LastName=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setString(1, fname); stmt.setString(2, lname); ResultSet res = stmt.executeQuery(); while (res.next()){ System.out.println("\nYour unique editor id is: "); System.out.format("%d", res.getObject(1)); } System.out.println("\nYou can login with your EditorID now: "); chooseLoginMode(); //go to login page sc.close(); con.close(); preparedStmt.close(); stmt.close(); res.close(); return; } else { System.out.println("-------------------------------------------------------------------------------------"); System.out.println("Enter correct details again"); registerEditor(); sc.close(); con.close(); stmt.close(); res.close(); return; } } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } /** Method to register an author * */ public static void registerAuthor () { try { Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); Scanner sc = new Scanner (System.in); System.out.println("\nEnter ALL your personal details as requested by the prompts below:"); System.out.println("\nEnter your first name: "); String fname = sc.nextLine(); System.out.println("Enter your last name: "); String lname =sc.nextLine(); System.out.println("Enter your email: "); String email = sc.nextLine(); System.out.println("Enter your address: "); String address = sc.nextLine(); System.out.println("Enter your current affiliation"); String affiliation = sc.nextLine(); System.out.println("\nDetails entered: "); System.out.print("fname: "+fname+ "\nlname: "+lname+"\nemail: "+email+ "\naddress: "+address+ "\naffiliation: "+affiliation); System.out.println("\n\nConfirm all details have been entered correctly\n"); System.out.println("Type Yes, or No to confirm\n"); String resp = sc.nextLine(); resp = resp.toLowerCase(); if (resp.equals("yes")){ QUERY = "INSERT INTO Author (MailingAddress, EmailAddress, Affiliation, FirstName,LastName)"+"VALUES(?,?,?,?,?)"; //insert user into editor table //NOTE: I NEED TO CHECK IF ENTERED USER EXITS ALREADY IS IN THE DATABASE, AND FOR NULL CASES PreparedStatement preparedStmt = con.prepareStatement(QUERY); preparedStmt.setString(1, address); preparedStmt.setString(2, email); preparedStmt.setString(3, affiliation); preparedStmt.setString(4, fname); preparedStmt.setString(5, lname); //execute the prepared statement preparedStmt.execute(); System.out.println("--AUTHOR REGISTRATION SUCCESSFUL--"); //get id of this author QUERY= "SELECT AuthorID FROM Author where EmailAddress=? and FirstName=? and LastName=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setString(1, email); stmt.setString(2, fname); stmt.setString(3, lname); ResultSet res = stmt.executeQuery(); while (res.next()){ System.out.println("\nYour unique Author id is: "); System.out.format("%d", res.getObject(1)); } System.out.println("\nYou can login with your AuthorID now: "); chooseLoginMode(); //go to login page sc.close(); con.close(); preparedStmt.close(); stmt.close(); res.close(); return; } else { System.out.println("-------------------------------------------------------------------------------------"); System.out.println("Enter correct details again"); registerEditor(); sc.close(); con.close(); stmt.close(); res.close(); return; } } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } /** Method to register a reviewer * */ public static void registerReviewer() { try { Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); Scanner sc = new Scanner (System.in); Scanner s = new Scanner (System.in); System.out.println("\nEnter ALL your personal details as requested by the prompts below:"); System.out.println("\nEnter your first name: "); String fname = sc.nextLine(); System.out.println("Enter your last name: "); String lname =sc.nextLine(); System.out.println("Enter your first RICode: "); Integer RICode1 = s.nextInt(); System.out.println("Enter your 2nd RICode, if none input 0: "); Integer RICode2 = s.nextInt(); System.out.println("Enter your 3rd RICode, if none input 0: "); Integer RICode3 = s.nextInt(); System.out.println("Enter your email: "); String email = sc.nextLine(); System.out.println("Enter your Affiliation: "); String affiliation = sc.nextLine(); System.out.println("\nDetails entered: "); System.out.print("fname: "+fname+ "\nlname: "+lname+"\nemail: "+email+"\naffiliation: "+affiliation +"\nRICode1: "+RICode1+ "\nRICode2: "+RICode2+ "\nRICode3: "+RICode3); System.out.println("\n\nConfirm all details have been entered correctly\n"); System.out.println("Type Yes, or No to confirm\n"); String resp = sc.nextLine(); resp = resp.toLowerCase(); if (resp.equals("yes")){ QUERY = "INSERT INTO Reviewer (Email, Affiliation, FirstName,LastName)"+"VALUES(?,?,?,?)"; //insert user into editor table //NOTE: I NEED TO CHECK IF ENTERED USER EXITS ALREADY IS IN THE DATABASE, AND FOR NULL CASES PreparedStatement preparedStmt = con.prepareStatement(QUERY); preparedStmt.setString(1, email); preparedStmt.setString(2, affiliation); preparedStmt.setString(3, fname); preparedStmt.setString(4, lname); //execute the prepared statement preparedStmt.execute(); //get id of this reviewer QUERY= "SELECT ReviewerID FROM Reviewer where FirstName=? and LastName=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setString(1, fname); stmt.setString(2, lname); ResultSet res = stmt.executeQuery(); int id = 0; while (res.next()){ System.out.println("\nYour unique Reviewer id is: "); id = (int) res.getObject(1); System.out.format("%d", res.getObject(1)); } QUERY = "INSERT INTO Area_of_Interest_Group (ReviewerID, RICode) VALUES(?,?)"; PreparedStatement ps = con.prepareStatement(QUERY); ps.setInt(1, id); ps.setInt(2, RICode1); ps.execute(); if (RICode2 != 0){ QUERY= "INSERT INTO Area_of_Interest_Group (ReviewerID, RICode) VALUES(?,?)"; ps = con.prepareStatement(QUERY); ps.setInt(1, id); ps.setInt(2, RICode2); ps.execute(); } if (RICode3 != 0){ QUERY= "INSERT INTO Area_of_Interest_Group (ReviewerID, RICode) VALUES(?,?)"; ps = con.prepareStatement(QUERY); ps.setInt(1, id); ps.setInt(2, RICode3); ps.execute(); } System.out.println("\n--REGISTRATION SUCCESSFUL!!--"); System.out.println("\nYou can login with your Reviewer ID now: "); chooseLoginMode(); //go to login page sc.close(); s.close(); con.close(); preparedStmt.close(); ps.close(); stmt.close(); res.close(); return; } else { System.out.println("-------------------------------------------------------------------------------------"); System.out.println("Enter correct details again"); registerEditor(); sc.close(); s.close(); con.close(); stmt.close(); res.close(); return; } } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } /** Options for the reviewer, once he logs in. He can check status of manuscripts, resign and review manuscripts * * @param reviewerID */ public static void ReviewerOptions (int reviewerID) { Scanner sc = new Scanner (System.in); ArrayList<Integer> mscripts = View_Assigned_Manuscripts(reviewerID); //get list of all manuscripts for a reviewer System.out.print("\n\nChoose what you want to do from this MENU: "); System.out.print("\n1. VIEW YOUR ASSIGNED MANUSCRIPT STATUS \n2. REVIEW MANUSCRIPT (THEN ACCEPT OR REJECT IT)" + "\n3. RESIGN "+"\n4. VIEWS"+"\n5. EXIT\n"); System.out.println("\n------------------------------------------------------------------------------------------"); String ans = sc.nextLine(); /*If menu option is 1, view all manuscripts*/ if(ans.equals("1")) { Get_Manuscripts(mscripts); ReviewerOptions(reviewerID); //if option is 2, review }else if (ans.equals("2")){ Review_Manuscript(reviewerID); //review manuscript ReviewerOptions(reviewerID); //if option is 3, resign }else if (ans.equals("3")){ try { Scanner s = new Scanner (System.in); System.out.println("Enter you ID again inorder to resign?"); int Id = s.nextInt(); if (Id==reviewerID){ Resign_Reviewer(Id); } else { System.out.println("Please enter your actual ID to resign"); } } catch (Exception e) { System.out.println("Error: "+e.getMessage()); }finally { ReviewerOptions(reviewerID); } //if option 4, views }else if (ans.equals("4")){ Reviewer_Views(); ReviewerOptions(reviewerID); }else if (ans.equals("5")){ startOption(); //return to the starting page }else { ReviewerOptions(reviewerID); //repeat call if entry is incorrect } sc.close(); } public static void Reviewer_Views (){ try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "Select * From PublishedIssues"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.execute(); ResultSet res = stmt.executeQuery(); System.out.println("PUBLISHED ISSUE VIEW\n\n"); System.out.println("----------------------------------------------------------------------------------------"); System.out.printf("%1$-15s %2$-15s %3$-15s\n", "Publ. Year", "Publ. #", "Title"); System.out.println("----------------------------------------------------------------------------------------"); while (res.next()){ System.out.format("%1$-15s %2$-15s %3$-15s\n", res.getObject(1), res.getObject(2), res.getObject(3)); } QUERY = "Select * From ReviewQueue"; stmt = con.prepareStatement(QUERY); stmt.execute(); res = stmt.executeQuery(); System.out.println("\nREVIEW QUEUE VIEW\n"); System.out.println("----------------------------------------------------------------------------------------"); System.out.printf("%1$-15s %2$-15s %3$-15s %4$-15s\n", "LastName", "Primary AuthorID", "idManuscript", "Reviewer_LastName"); System.out.println("----------------------------------------------------------------------------------------"); while (res.next()){ System.out.format("%1$-15s %2$-15s %3$-15s %4$-15s\n", res.getObject(1), res.getObject(2), res.getObject(3),res.getObject(5)); } QUERY = "Select * From ReviewStatus"; stmt = con.prepareStatement(QUERY); stmt.execute(); res = stmt.executeQuery(); System.out.println("\nREVIEW STATUS VIEW\n"); System.out.println("-----------------------------------------------------------------------------------------------------------------------------------------------"); System.out.printf("%1$-23s %2$-15s %3$-25s %4$-15s %5$-15s %6$-15s %7$-15s %8$-15s\n", "Timestamp","idManuscript", "Title", "Appropriateness", "Clarity", "Methodology", "Contribution","Recommendation"); System.out.println("-----------------------------------------------------------------------------------------------------------------------------------------------"); while (res.next()){ System.out.format("%1$-23s %2$-15s %3$-25s %4$-15s %5$-15s %6$-15s %7$-15s %8$-15s\n", res.getObject(1), res.getObject(2), res.getObject(3),res.getObject(4), res.getObject(5), res.getObject(6), res.getObject(7),res.getObject(8)); } } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } /** Method to delete reviewer when he resigns from the database, deletes instances where it's the foreign key as well * * @param reviewerID */ public static void Resign_Reviewer (int reviewerID) { try{ //insert feedback Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "DELETE FROM Feedback WHERE ReviewerId = ?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1,reviewerID); stmt.execute(); QUERY = "DELETE FROM Reviewer_Group WHERE ReviewerID= ?"; stmt = con.prepareStatement(QUERY); stmt.setInt(1,reviewerID); stmt.execute(); QUERY = "DELETE FROM Area_of_Interest_Group WHERE ReviewerID = ?"; stmt = con.prepareStatement(QUERY); stmt.setInt(1,reviewerID); stmt.execute(); QUERY = "DELETE FROM Reviewer WHERE ReviewerID = ?"; stmt = con.prepareStatement(QUERY); stmt.setInt(1,reviewerID); stmt.execute(); System.out.println("THANK YOU FOR YOUR SERVICE!"); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage());} } /** Method to get list of all manuscripts belonging to a reviewer with a certain ID * * @param reviewerID * @return list of the manuscript IDs */ public static ArrayList<Integer> View_Assigned_Manuscripts(int reviewerID) { ArrayList<Integer> manuscriptIDs = new ArrayList<Integer> (); try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "Select * From Reviewer_Group where ReviewerID=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, reviewerID); stmt.execute(); ResultSet res = stmt.executeQuery(); while (res.next()){ manuscriptIDs.add((Integer)res.getObject(3)); } con.close(); stmt.close(); res.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } return manuscriptIDs; } /** Helper method to get all the manuscripts assigned to a reviewer. The list of manuscripts contain * The ID, title, status and number of pages * @param mscripts list, reviewerID */ public static void Get_Manuscripts(ArrayList<Integer> mscripts) { if (mscripts.isEmpty()) { System.out.println("\n\nNo manuscripts assigned to you at the moment!"); return; } System.out.println("\nHERE ARE MANUSCRIPTS ASSIGNED TO YOU IN THE SYSTEM\n\n"); System.out.println("----------------------------------------------------------------------------------------"); System.out.printf("%1$-15s %2$-20s %3$-15s %4$-20s\n", "ManuscriptID", "TITLE", "STATUS", "NUMBER OF PAGES"); System.out.println("----------------------------------------------------------------------------------------"); try{ Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); for (int i=0; i<mscripts.size();i++){ int idManuscript = mscripts.get(i); QUERY = "Select * From Manuscript where idManuscript=?"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1, idManuscript); stmt.execute(); ResultSet res = stmt.executeQuery(); while (res.next()){ System.out.format("%1$-15s %2$-20s %3$-15s %4$-20s\n", res.getObject(1), res.getObject(4), res.getObject(3), res.getObject(6)); } } con.close(); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage()); } } /** Method for the reviewer to insert feedback of an unrated manuscript. The method checks to see if the manuscript has already been * rated before. * @param reviewerID */ public static void Review_Manuscript (int reviewerID) { try{ Scanner m1 = new Scanner (System.in); System.out.println("ENTER THE ID OF THE MANUSCRIPT YOU WANT TO REVIEW : "); System.out.println("If you have forgotten the ID, press 0 to return to main menu and CHECK STATUS OF YOUR ASSIGNED MANUSCRIPTS: "); int idManuscript = m1.nextInt(); //check to see if this manuscript is part of the reviewer's list of manuscripts ArrayList<Integer> mlist = View_Assigned_Manuscripts(reviewerID); if (!mlist.contains(idManuscript)) { //If there is no manuscripts in list System.out.println("\nMANUSCRIPT NOT IN THE LIST OF YOUR ASSIGNED MANUSCRIPTS"); } else { //check to see if feedback already exists for the manuscript and print it if it does get_Manuscript_Feedback(idManuscript, reviewerID); Scanner s = new Scanner(System.in); System.out.println("\nRATE APPROPRIATENESS: Input number(1=low, 10=high)"); int appr = s.nextInt(); System.out.println("\nRATE CLARITY: Input number (1=low, 10=high)"); int clar = s.nextInt(); System.out.println("\nRATE METHODOLOGY: Input number (1=low, 10=high)"); int met = s.nextInt(); System.out.println("\nRATE CONTRIBUTION TO FIELD: Input number (1=low, 10=high)"); int cont = s.nextInt(); System.out.println("HERE IS YOUR SUMMARY OF YOUR RATINGS\n"); System.out.println("Appropriateness: "+appr); System.out.println("Clarity: "+clar); System.out.println("Methodology: "+met); System.out.println("ContributionToField: "+cont); Insert_Feedback(appr, clar, met, cont, reviewerID, idManuscript); } } catch (Exception ec){ System.out.println("Error: "+ec.getMessage()); }finally{ ReviewerOptions(reviewerID); } } /** Method to insert feedback about manuscript * * @param appr * @param clar * @param met * @param cont * @param reviewerId * @param idManuscript */ public static void Insert_Feedback (int appr, int clar, int met, int cont, int reviewerId, int idManuscript) { String recomm ="accept"; //default decision String date = dtf.format(now); // gets the when feedback is made Scanner sc = new Scanner (System.in); int check =0; //checks to see if input is correct while (check==0){ System.out.println("\nDo you recommend to ACCEPT OR REJECT the manuscript(Please Type 'accept' or 'reject' as an answer)"); String ans=sc.nextLine(); ans=ans.toLowerCase(); if (ans.equals("accept") || ans.equals("reject")){ recomm=ans; check=1; } } try{ //insert feedback Connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); QUERY = "INSERT INTO Feedback (Appropriateness,Clarity, Methodology,ContributionToField,Recommendation, Date, ReviewerId,Manuscript_idManuscript)" +"VALUES(?,?,?,?,?,?,?,?)"; PreparedStatement stmt = con.prepareStatement(QUERY); stmt.setInt(1,appr); stmt.setInt(2, clar); stmt.setInt(3,met); stmt.setInt(4, cont); stmt.setString(5, recomm); stmt.setString(6, date); stmt.setInt(7, reviewerId); stmt.setInt(8, idManuscript); stmt.execute(); System.out.println("--FEEDBACK SUBMISION SUCCESSFUL--"); } catch (SQLException e) { System.err.format("SQL Error: %s", e.getMessage());} } /**Program main**/ public static void main(String[] args) { Connection con = null; // attempt to connect to db try { // load mysql driver Class.forName("com.mysql.jdbc.Driver").newInstance(); //establish connection con = DriverManager.getConnection(SERVER+DATABASE, USERNAME, PASSWORD); System.out.println("Connection established.\n"); // Start application startOption(); }catch (SQLException e ) { // catch SQL errors System.err.format("SQL Error: %s", e.getMessage()); }catch (Exception e) { // anything else e.printStackTrace(); }finally { // cleanup try { con.close(); System.out.print("\nConnection terminated.\n"); } catch (Exception e) { /* ignore cleanup errors */ } } } } <file_sep>/BlogAssign/src/Blog.java import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.io.*; import org.bson.Document; import com.mongodb.BasicDBObject; import com.mongodb.MongoClient; import com.mongodb.MongoClientURI; import com.mongodb.MongoException; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; public class Blog { public static int entryID =0; public static void main(String [] args) throws IOException { try { MongoClientURI uri = new MongoClientURI( "mongodb://Team14:9XJMVkQCGCzxKLjH@cluster0-shard-00-00-ppp7l.mongodb.net:27017,cluster0-shard-00-01-ppp7l.mongodb.net:27017,cluster0-shard-00-02-ppp7l.mongodb.net:27017/Team14DB?ssl=true&replicaSet=Cluster0-shard-0&authSource=admin"); MongoClient mongo = new MongoClient(uri); MongoDatabase db = mongo.getDatabase("Team14DB"); MongoCollection<Document> postCollection = db.getCollection("post"); MongoCollection<Document> commentCollection = db.getCollection("comment"); // empty the database to start over postCollection.drop(); commentCollection.drop(); // create collections postCollection = db.getCollection("post"); commentCollection = db.getCollection("comment"); System.out.println("Connection Successful"); startQueries(postCollection, commentCollection); } catch (MongoException e){ System.out.println("Error Connecting"); } } /** Method accepts user input and starts the queries that the user wants. * Some decent error checking is implemented * @param postCollection * @param commentCollection */ public static void startQueries (MongoCollection<Document> postCollection, MongoCollection<Document> commentCollection){ promptCommand(); // create reader from stdin BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); String s; // read from stdin until ctr-z System.out.println("Enter a command"); try { while ((s = in.readLine()) != null && s.length() != 0) { String[] words = s.split("\\s+"); if (words[0].equals("post")) { ArrayList<String> store = parsePost(s); if (store.size()== 3) { String one = store.get(0); String two = store.get(1); String three = store.get(2); words=one.split("\\s+"); postBlog(words[1], words[2], words[3],two,three, postCollection); } else { System.out.println("command not inputted properly"); } System.out.println("enter another command"); } else if (words[0].equals("comment")) { ArrayList<String> store = parsePost(s); if (store.size() == 3) { String one = store.get(0); String two = store.get(1); words=one.split("\\s+"); commentBlog(words[1], Integer.parseInt(words[2]), words[3], two, commentCollection, postCollection); } else { System.out.println("command not inputted properly"); } System.out.println("enter another command"); } else if (words[0].equals("delete")) { if (words.length == 4) { deleteEntry(words[1], Integer.parseInt(words[2]), words[3], commentCollection, postCollection); } else { System.out.println("command not inputted properly"); } System.out.println("enter another command"); } else if (words[0].equals("show")) { if (words.length == 2) { showBlogs (words[1], postCollection, commentCollection); } System.out.println("enter another command"); } else { System.out.println("that is not a proper command"); promptCommand(); } } } catch(Exception ec){ System.out.println ("Input Error"); startQueries(postCollection, commentCollection); } } /** Parses user input to correctly insert the right parameters to * to qeury methods * @param s * @return */ public static ArrayList<String> parsePost (String s) { ArrayList<String> store = new ArrayList<>(); int count=0; int tick =0; while (count==0){ tick = tick+1; if (s.charAt(tick)=='"'){ count=1; String m=s.substring(0,tick); store.add(m); break; } } for (int i=0; i<s.length()-1; i++){ if (s.charAt(i)=='"'){ int counter = 0; int end=i-1; while (counter==0){ end=end+1; if (s.charAt(end+1)=='"'){ counter=1; String m=s.substring(i+1,end+1); store.add(m); } } } } if (store.size()==4){ store.remove(2); } if (store.size()==2){ store.add(""); } return store; } public static void promptCommand() { System.out.println(""); System.out.println("Enter one of the following commands in EXACT FORMAT:"); System.out.println(" post blogName userName title \"postBody\" \"tags\""); System.out.println(" comment blogname entryID userName \"commentBody\""); System.out.println(" delete blogname entryID userName"); System.out.println(" show blogName"); System.out.println(""); } public static void postBlog (String blogName, String username, String title, String postBody, String tags, MongoCollection<Document> postCollection) { DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); LocalDateTime now = LocalDateTime.now(); entryID =entryID+1; Document document = new Document(); document.put("_id", entryID); document.put("blogName", blogName); document.put("userName", username); document.put("title", title); document.put("body",postBody ); document.put("date", dtf.format(now)); document.put("tags", tags); postCollection.insertOne(document); } public static void commentBlog (String blogName, int id, String username, String commentBody, MongoCollection<Document> commentCollection, MongoCollection<Document> postCollection) { DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); LocalDateTime now = LocalDateTime.now(); entryID =entryID+1; int order = 1; if (isCommentEntryPresent(id, blogName, postCollection)){ System.out.println("entry is present"); order = 1; } else if (isCommentEntryPresent(id, blogName, commentCollection)){ System.out.println("entry is present"); order = getCommentOrder(id, blogName,commentCollection) + 1; } else { System.err.println("Referenced Entry is not present for comment"); startQueries(postCollection, commentCollection); return; } Document document = new Document(); document.put("_id", entryID); document.put("ref", id); document.put("blogName", blogName); document.put("userName", username); document.put("body",commentBody ); document.put("commentOrder",order ); document.put("date", dtf.format(now)); commentCollection.insertOne(document); } public static void deleteEntry (String blogName, int id, String username, MongoCollection<Document> commentCollection, MongoCollection<Document> postCollection) { MongoCollection<Document> collection = postCollection; if (isEntryPresent(id, blogName,username, postCollection)){ System.out.println("entry is present for delete"); collection = postCollection; } else if (isCommentEntryPresent(id, blogName, commentCollection)){ System.out.println("entry is present for delete"); collection = commentCollection; } else { System.err.println("Referenced Entry is not present for delete"); return; } BasicDBObject selectBlog = new BasicDBObject(); selectBlog.put("_id", id); Document current = collection.find(selectBlog).first(); DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); LocalDateTime now = LocalDateTime.now(); Document replacement = new Document(); replacement.put("_id", id); replacement.put("date", dtf.format(now)); replacement.put("blogName", blogName); replacement.put("userName", username); replacement.put("title", "Deleted"); replacement.put("tags", "Deleted"); replacement.put("body","deleted by " + username + " on " + dtf.format(now)); collection.replaceOne(current, replacement); } /** Checks if a comment is present * * @param entryIntID * @param blogName * @param collection * @return */ public static boolean isCommentEntryPresent (int entryIntID, String blogName,MongoCollection<Document> collection) { BasicDBObject selectBlog = new BasicDBObject(); selectBlog.put("blogName", blogName); selectBlog.put("_id", entryIntID); return collection.find(selectBlog).first()!=null; } /**Helper method to check is blog is present * * @param entryIntID * @param blogName * @param username * @param collection * @return */ public static boolean isEntryPresent (int entryIntID, String blogName,String username, MongoCollection<Document> collection) { BasicDBObject selectBlog = new BasicDBObject(); selectBlog.put("blogName", blogName); selectBlog.put("_id", entryIntID); selectBlog.put("userName", username); return collection.find(selectBlog).first()!=null; } /**Helper method to find printing order * * @param entryIntID * @param blogName * @param userName * @param collection * @return */ public static int getCommentOrder (int entryIntID, String blogName, MongoCollection<Document> collection) { BasicDBObject selectBlog = new BasicDBObject(); selectBlog.put("blogName", blogName); selectBlog.put("_id", entryIntID); Document doc = collection.find(selectBlog).first(); return (int)doc.get("commentOrder"); } /** Method to get a specified blog, it returns does not exist message if the blog is not found */ public static void showBlogs (String name, MongoCollection<Document> postCollection, MongoCollection<Document> commentCollection) { if (!isBlogPresent(name, postCollection)){ //if blog doesnt exist System.err.println("Referenced Blog is not present"); return; } BasicDBObject selectBlog = new BasicDBObject(); selectBlog.put("blogName", name); System.out.println(name); System.out.println(""); for (Document doc : postCollection.find(selectBlog)) { //iterate through matching documents System.out.println(" " + doc.get("date") + " "); System.out.println(" (" + doc.get("_id") + ") " + doc.get("title")); System.out.println(" " + doc.get("userName")); System.out.println(" " + doc.get("body")); System.out.println(" " + doc.get("tags")); System.out.println(""); int entryID = (Integer) doc.get("_id"); findComment(" ", entryID, commentCollection); } } /**Method to find Comment , and linked comments (comments of comments) * * @param entryID * @param commentCollection */ public static void findComment (String gap, int entryID, MongoCollection<Document> commentCollection){ BasicDBObject selectComment= new BasicDBObject(); selectComment.put("ref", entryID); for (Document doc: commentCollection.find(selectComment)){ System.out.println(gap+doc.get("date")); System.out.println(gap+ "(" +doc.get("_id") + ")" + doc.get("userName")); System.out.println(gap+doc.get("body")); System.out.println(""); int newEntryID = (int) doc.get("_id"); findComment(gap + " ", newEntryID, commentCollection); } } /**Method to show if a blog exists or not, * @param name * @param collection * @return true if the specified blog exists, and false if it doesnt */ public static boolean isBlogPresent (String name, MongoCollection<Document> collection) { BasicDBObject selectBlog = new BasicDBObject(); selectBlog.put("blogName", name); return collection.find(selectBlog).first()!=null; } }
b843537bfbe1ebbe659470c7753a7b7cf4f07bbb
[ "Java" ]
2
Java
clemnyan/DatabaseSys
8034223637344abf49160ffffeb5d91b20bad598
9ce2eca52aa588b2eac70638230a94dd5ca5f9de
refs/heads/main
<repo_name>Mak0y-Plaks/trade-bot-set<file_sep>/BTB-manager-telegram/custom_scripts/ratios.sh #!/bin/bash /usr/bin/python3 /home/ubuntu/BTB-manager-telegram/custom_scripts/ratios.py<file_sep>/BTB-manager-telegram/custom_scripts/health_status.sh #!/bin/bash free -m | awk 'NR==2{printf "Memory Usage: %s/%sMB (%.2f%%)\n", $3,$2,$3*100/$2 }' df -h | awk '$NF=="/"{printf "Disk Usage: %d/%dGB (%s)\n", $3,$2,$5}' top -bn1 | grep load | awk '{printf "CPU Load: %.2f\n", $(NF-2)}' vcgencmd measure_temp vcgencmd get_throttled vcgencmd measure_volts vcgencmd get_mem arm vcgencmd get_mem gpu echo "----IP----" ip a s eth0 | egrep -o 'inet [0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' | cut -d' ' -f2<file_sep>/BTB-manager-telegram/custom_scripts/ratios.py import re with open('./../binance-trade-bot/ratios.txt', 'r') as file: f=file.read() f=f.replace(",","\n").replace("{","\n").replace("}","\n").replace(" <","").replace("<","") f=re.sub(':{2}.*:','',f) print (f) <file_sep>/BTB-manager-telegram/custom_scripts/balance_eth_equiv.py class Ticker(object): def __init__(self, symbol, price): self.symbol = symbol self.price = price def __repr__(self): return "[{0},{1}]".format(self.symbol, self.price) def get_ticker(coin): with urllib.request.urlopen("https://api.binance.com/api/v3/ticker/price?symbol="+coin+"USDT") as url: data = json.loads(url.read().decode()) t = Ticker(**data) return t import urllib.request, json import sqlite3 #** DB location ** dbLoc = '/home/ubuntu/binance-trade-bot/data/crypto_trading.db' #***************** #** Reference coin ** refCoin = "ETH" #******************** conn = sqlite3.connect(dbLoc) cursor = conn.execute("SELECT * from coin_value order by id DESC limit 10") s0=[] tkREF = get_ticker(refCoin) acum = 0 for row in cursor: tkTemp = get_ticker(row[1]) if s0.count(row[1])>=1: break if (float(tkTemp.price)*float(row[2])) >= 10 : print (row[1]) print ("Amount -> ", float(row[2])) print ("Ticker -> U$ ", tkTemp.price) print ("Balance -> U$ ",float(tkTemp.price)*float(row[2])) print (refCoin," equiv -> ",float(tkTemp.price)*float(row[2])/float(tkREF.price)) print ("") acum = acum + float(tkTemp.price)*float(row[2])/float(tkREF.price) s0.append(row[1]) print ("Total ",refCoin," equiv -> ", acum)<file_sep>/BTB-manager-telegram/custom_scripts/balance_eth_equiv.sh #!/bin/bash /usr/bin/python3 /home/ubuntu/BTB-manager-telegram/custom_scripts/balance_eth_equiv.py <file_sep>/BTB-manager-telegram/custom_scripts/custom_progress.py import urllib.request, json import sqlite3 from datetime import datetime dbLoc = '/home/ubuntu/binance-trade-bot/data/crypto_trading.db' conn = sqlite3.connect(dbLoc) sqlstr=''' select trade_history.* from trade_history where trade_history.rowid in (select t2.rowid from trade_history t2 where t2.alt_coin_id = trade_history.alt_coin_id order by datetime desc limit 16 ) AND trade_history.selling=0 order by alt_coin_id, id desc; ''' cursor = conn.execute(sqlstr) for row in cursor: dt = row[9].split(".") created_date = datetime.strptime(dt[0], '%Y-%m-%d %H:%M:%S') d=f'{created_date.day:02d}'+"/"+f'{created_date.month:02d}'+"/"+str(created_date.year) t=f'{created_date.hour:02d}'+":"+f'{created_date.minute:02d}'+":"+f'{created_date.second:02d}' print (row[1],row[6],d,t)<file_sep>/BTB-manager-telegram/custom_scripts/custom_progress.sh #!/bin/bash /usr/bin/python3 /home/ubuntu/BTB-manager-telegram/custom_scripts/custom_progress.py
d79020369fb30bacb572df70d805763641306b0b
[ "Python", "Shell" ]
7
Shell
Mak0y-Plaks/trade-bot-set
3e01aaa1150bf6d037148139cc57a67c72fae311
75b08fa65c2b7e21cd969077ce95c2ba3ae8ead4
refs/heads/master
<repo_name>findshrey/restaurant-website<file_sep>/src/components/Footer.js import React from "react" import { FOOTER_CONTENTS } from "../data/constants" const Footer = () => { return ( <footer className="section-fade-in"> <div className="footer-upper"> <div className="container"> {FOOTER_CONTENTS.map((content, index) => ( <div className="footer-block" key={index}> <div className="block-icon">{content.icon}</div> <h3>{content.title}</h3> <div className="block-content">{content.content}</div> </div> ))} </div> </div> <div className="footer-lower"> <div className="container"> <span>Copyright &copy; <NAME> 2020</span> </div> </div> </footer> ) } export default Footer <file_sep>/src/components/sections/About.js import React from "react" import { Image } from "cloudinary-react" const About = () => { return ( <section id="about" className="about section-fade-in"> <div className="container"> <header className="head-red"> <h3>About Us</h3> <h2>Who we are</h2> </header> <div className="about-content"> <div className="about-block"> <p> Owners and the chef who hail from Emilia Romagna region of Northeastern Italy in settings that make you want to come back for more. <br /> <br /> The experience sparks memories of the Italian owners filled with nostalgia from their childhood as they prepare an entirely authentic and completely home-made Italian cuisine where we follow the tradition of serving an amazing selection of hand-made pasta from scratch every day – prepared in the same way as our mothers, and the mothers of their mothers, have always done it in our home region in Italy. </p> <Image cloudName="dg4arvkpw" publicId="la-casetta/about/about_1_rtkmdo.jpg" loading="lazy" srcSet=" https://res.cloudinary.com/dg4arvkpw/image/upload/w_600/la-casetta/about/about_1_rtkmdo.jpg 600w, https://res.cloudinary.com/dg4arvkpw/image/upload/w_1200/la-casetta/about/about_1_rtkmdo.jpg 1200w" sizes="(max-width: 800px) 90vw, 50vw" /> </div> <div className="about-block"> <Image cloudName="dg4arvkpw" publicId="la-casetta/about/about_2_s8yey7.jpg" loading="lazy" srcSet=" https://res.cloudinary.com/dg4arvkpw/image/upload/w_600/la-casetta/about/about_2_s8yey7.jpg 600w, https://res.cloudinary.com/dg4arvkpw/image/upload/w_1200/la-casetta/about/about_2_s8yey7.jpg 1200w" sizes="(max-width: 800px) 90vw, 50vw" /> <p> We pair our food with a long list of exquisite Italian wines and spirits to give you wholesome dining experience. We thrive on bringing people together with traditional Italian food and drinks. <br /> <br /> The ambiance in our dining room is sophisticated but never too formal. Every corner of the restaurant has been lovingly decorated to showcase the original and traditional Italy straight out of our Italian owner’s memories. </p> </div> </div> </div> </section> ) } export default About <file_sep>/src/components/sections/Menu.js import React, { useState, useEffect } from "react" import firebase from "../../firebase/firebase" // Filter dishes by category const filterMenu = (menu, filterBy) => { if (Object.keys(menu).length === 0) { return [] } return menu[filterBy] } const Menu = () => { const [menu, setMenu] = useState({}) const [activeBtn, setActiveBtn] = useState(0) const [selectedCategory, setSelectedCategory] = useState("appetizers") // Get menu from firebase and set state useEffect(() => { let data = {} firebase .firestore() .collection("menu") .get() .then((snapshot) => { snapshot.docs.forEach((doc) => { data = { ...data, ...doc.data() } }) setMenu(data) }) .catch((e) => { console.log(e) }) }, []) // Set active button const handleActiveBtn = (btnIndex) => { setActiveBtn(btnIndex) } // Change selected category const handleCategory = (navItem) => { setSelectedCategory(navItem) } // Get available menu categories const menuCategories = Object.keys(menu) // Get dishes to render const filteredItems = filterMenu(menu, selectedCategory) return ( <section id="menu" className="menu section-fade-in"> <div className="container"> <header className="head-white"> <h3>Choose & Taste</h3> <h2>Restaurant Menu</h2> </header> <nav className="menu-nav"> {menuCategories.map((category, index) => ( <button key={index} className={index === activeBtn ? "active" : ""} onClick={() => { handleActiveBtn(index) handleCategory(category) }} > {category} </button> ))} </nav> <ul className="menu-items"> {filteredItems.map((item, index) => ( <li className="item" key={index}> <div className="item-title"> <span className="item-name">{item.name}</span> <span className="item-price">{item.price}</span> </div> <div className="item-description">{item.description}</div> </li> ))} </ul> </div> </section> ) } export default Menu <file_sep>/src/firebase/firebase.js import firebase from "firebase/app" import "firebase/firestore" const firebaseConfig = { apiKey: "<KEY>", authDomain: "la-casetta-68948.firebaseapp.com", projectId: "la-casetta-68948", storageBucket: "la-casetta-68948.appspot.com", messagingSenderId: "940684025874", appId: "1:940684025874:web:a76a858184ab9128bb78cb", } firebase.initializeApp(firebaseConfig) export default firebase <file_sep>/README.md # La Casetta - Italian Restaurant An Italian restaurant website, hosted with [Netlify](https://www.netlify.com). ![demo](readme_images/demo-1.png?raw=true) ## Installation & Set Up 1. Install and use the correct version of Node using [NVM](https://github.com/nvm-sh/nvm) To verify that nvm has been installed, do: ```sh command -v nvm ``` 2. Install dependencies ```sh npm install ``` 3. Start the development server ```sh npm run dev ``` Open [http://localhost:8080](http://localhost:8080) with your browser to see the result. You can start editing the page by modifying `src/index.js`. The page auto-updates as you edit the file. ## Building for Production 1. Generate a full static production build ```sh npm run build ``` ## LICENSE The [MIT](LICENSE) License <file_sep>/src/components/sections/Features.js import React from "react" import { FEATURES } from "../../data/constants" const Features = () => { return ( <section id="features" className="features section-fade-in"> <div className="container"> {FEATURES.map((feature, index) => ( <div className="feature" key={index}> <div className="feature-icon">{feature.icon}</div> <h3>{feature.title}</h3> <p>{feature.description}</p> </div> ))} </div> </section> ) } export default Features <file_sep>/src/components/sections/Reservation.js import React from "react" const Reservation = () => { return ( <section id="reservation" className="reservation section-fade-in"> <div className="container"> <header className="head-white"> <h3>Book your table</h3> <h2>Reservation</h2> </header> <form name="reservation" className="form" method="POST" // action="/#reservation" > {/* Connects to the HTML form within index.html */} <input type="hidden" name="form-name" value="reservation" /> <div className="form-group"> <input type="text" name="date" placeholder="Date*" required /> <input type="text" name="time" placeholder="Time*" required /> <input type="number" name="party" placeholder="Party" /> </div> <div className="form-group"> <input type="text" name="user-name" placeholder="Name*" required /> <input type="number" name="phone" placeholder="Phone*" required /> <input type="email" name="email" placeholder="Email" /> </div> <textarea rows="13" name="message" placeholder="Message" /> <div className="btn-wrapper"> <button type="submit" className="btn-reserve"> Make Reservation </button> </div> </form> </div> </section> ) } export default Reservation <file_sep>/src/data/constants.js import React from "react" import * as FaIcons from "react-icons/fa" const NAV_LINKS = [ { name: "Home", url: "#home", }, { name: "About", url: "#about", }, { name: "Menu", url: "#menu", }, { name: "Gallery", url: "#gallery", }, { name: "Reservation", url: "#reservation", }, ] const FEATURES = [ { icon: <FaIcons.FaAward />, title: ( <> 100% Fresh <br /> Products </> ), description: "Fresh and organic vegetables. Good for your health, and it also tastes better!", }, { icon: <FaIcons.FaUtensils />, title: ( <> Delicious <br /> Food </> ), description: "Food paired with exquisite Italian wines and spirits for wholesome dining experience!", }, { icon: <FaIcons.FaUsers />, title: ( <> Friendly <br /> Staff </> ), description: "Our greatest asset is the customer! We treat each customer as if they are the only one!", }, { icon: <FaIcons.FaThumbsUp />, title: ( <> Relaxing <br /> Atmosphere </> ), description: "Relaxing and soothing atmosphere for you to enjoy your meal to the fullest!", }, ] const FOOTER_CONTENTS = [ { icon: <FaIcons.FaCompass />, title: "LOCATION", content: ( <> Brooklyn <br /> 376 Bjerg Street <br /> Brooklyn, NY — 11231 <br /> (718) 555-1234 </> ), }, { icon: <FaIcons.FaClock />, title: "HOURS", content: ( <> Monday — Thursday <br /> 8am — 11pm <br /> Friday — Sunday <br /> 11am — 11pm </> ), }, { icon: <FaIcons.FaPhone />, title: "CONTACT", content: ( <> No. 120 E 4th Ave, USA <br /> <EMAIL> <br /> +1234 567 789 </> ), }, ] export { NAV_LINKS, FEATURES, FOOTER_CONTENTS }
f5d89a802845473de34344464cee5adaea1643b0
[ "JavaScript", "Markdown" ]
8
JavaScript
findshrey/restaurant-website
97b9dcb3d737cdfec68a00e233bcce3ba4a55d84
d4e0aae7a2fd40cf508c94c5ff2bf7541666147e
refs/heads/master
<file_sep>#pragma once #ifndef STDAFX_H #include <wtypes.h> #include <Psapi.h> // PROCESS_MEMORY_COUNTERS #endif namespace basis { // プロセスのメモリ使用量を取得 class ProcessMemoryStatus : public PROCESS_MEMORY_COUNTERS { public: // 構造体のクラス化 ProcessMemoryStatus(int unit = 1) : m_unit((std::max)(unit, 1)), m_process(0), PROCESS_MEMORY_COUNTERS({ sizeof(PROCESS_MEMORY_COUNTERS) }) {}; ~ProcessMemoryStatus() { m_process && CloseHandle(m_process); } bool update() { if (m_process || (m_process = OpenProcess(PROCESS_QUERY_INFORMATION, 0, GetCurrentProcessId())) != 0) return FALSE != GetProcessMemoryInfo(m_process, this, cb); return false; } size_t usage() { return WorkingSetSize / m_unit; } private: HANDLE m_process; int m_unit; }; // システム全体での使用量を取得 class SystemMemoryStatus { public: SystemMemoryStatus(int unit = 1) : m({ sizeof(m) }), m_unit(unit) {}; bool update() { return FALSE != GlobalMemoryStatusEx(&m); } size_t total() { return static_cast<size_t>(m.ullTotalPhys / m_unit); } size_t avail() { return static_cast<size_t>(m.ullAvailPhys / m_unit); } size_t usage() { return static_cast<size_t>((m.ullTotalPhys - m.ullAvailPhys) / m_unit); } private: MEMORYSTATUSEX m; int m_unit; }; } // namespace <file_sep>#pragma once #ifndef GUID_DAA7C9A6D5F64054916D44959063EFA8 #define GUID_DAA7C9A6D5F64054916D44959063EFA8 #include "window.h" #include "string_buffer.h" namespace basis { /*! DWM provides the way to get actual window rect. Because of that GetWindowRect API returns the rectangle including skelton area on Windows Vista or later system, the rectangle returned by GetWindowRect API would be different from what we could see on the screen. In the case that actual window rect is necessary, IsCompositionEnabled() function tells us whether it should call GetExtendedRect() function instead of GetWindowRect API. */ class Window::DWM { typedef HRESULT(WINAPI *TyIsEnabled)(BOOL*); typedef HRESULT(WINAPI *TyAttribute)(HWND, DWORD, LPCVOID, DWORD); public: /* Checks whether Windows Aero is enabled or not. This helps us know what GetWindowRect() API returns. If IsCompositionEnabled() function returned true, Windows Aero has been enabled. GetWindowRect() API then returns Aero glass area. This means what the API returned is inconsitent with what we could see on the screen. To workaround this, GetExtendedRect() function can be used. When this function returned false, GetWindowRect() API has no matters about that. */ static bool IsCompositionEnabled() { static auto fp = reinterpret_cast<TyIsEnabled> (getProc("DwmIsCompositionEnabled")); BOOL b; return (fp && fp(&b) == S_OK && b != FALSE); } /*! Gets the practical rectangle of the window. This function works only on Windows Vista or later and when IsCompositionEnabled() function returns true. */ static bool GetExtendedRect(const Window *h, RECT* p) { return S_OK != getWindowAttribute(h, DWMWA_EXTENDED_FRAME_BOUNDS, p, sizeof(RECT)); } protected: static HRESULT getWindowAttribute( const Window *h, DWORD dwAttr, LPCVOID pAttr, DWORD cbAttr) { if (!GetFunction()) return -1; return GetFunction()(*h, dwAttr, pAttr, cbAttr); } static inline FARPROC getProc(LPCSTR name) { if (!module()) return nullptr; return GetProcAddress(module(), name); } private: static HMODULE module() { static HMODULE hDwm = load(); return hDwm; } static HMODULE load() { StringBuffer buf(GetSystemDirectory(0, 0)); GetSystemDirectory(buf.data(), static_cast<UINT>(buf.capacity())); return LoadLibrary(buf.append(_T("\\dwmapi.dll")).c_str()); } static TyAttribute GetFunction() { static auto fp = reinterpret_cast<TyAttribute> (getProc("DwmGetWindowAttribute")); return fp; } }; } // namespace #endif<file_sep>/* APIs LoadImage Windows NT 4.0 / 95 以降 Unicode:Windows NT / 2000 は Unicode 版と ANSI 版を実装 LoadCursorFromFile Windows NT/2000:Windows NT 3.5 以降 SetCursor ShowCursor Get/SetCursorPos Get/Set/ReleaseCapture Windows NT/2000:Windows NT 3.1 以降 Windows 95/98:Windows 95 以降 ヘッダーファイル:Winuser.h 内で宣言、Windows.h をインクルード インポートライブラリ:User32.lib を使用 */ #include "critical_section.h" #include "singleton.h" #include "cursor.h" namespace basis { class Cursor::Impl { public: Impl() : m_cursor(0) {} ~Impl() { reset(); } bool set(HCURSOR hCursor) { if (!hCursor) return false; CriticalSection cs = m_cs.local(); reset(); m_cursor = SetCursor(hCursor); return true; } void reset() { auto cs = m_cs.local(); if (m_cursor) SetCursor(m_cursor); m_cursor = nullptr; } bool load(ID idCursor) { return set(Load(idCursor)); } bool load(const TCHAR *file) { return set(LoadCursorFromFile(file)); } static HCURSOR Load(ID idCursor) { return static_cast<HCURSOR>(LoadImage(0, MAKEINTRESOURCE(static_cast<WORD>(idCursor)), IMAGE_CURSOR, 0, 0, LR_DEFAULTSIZE | LR_SHARED)); } private: CriticalSection m_cs; HCURSOR m_cursor; }; bool Cursor:: set(HCURSOR hCursor) { return get().set(hCursor); } bool Cursor:: set(ID id) { return get().load(id); } bool Cursor:: set(const TCHAR *fileName) { return get().load(fileName); } void Cursor:: reset() { return get().reset(); } Point Cursor:: pos() { POINT pt{}; GetCursorPos(&pt); return{ pt.x, pt.y }; } bool Cursor:: pos(Point pt) { return 0 != SetCursorPos(pt.x, pt.y); } void Cursor:: capture(HWND hWnd) { SetCapture(hWnd); } void Cursor:: release() { ReleaseCapture(); } bool Cursor:: show() { return ShowCursor(TRUE) >= 0; } bool Cursor:: hide() { return ShowCursor(FALSE) < 0; } Cursor::Impl & Cursor:: get() { return singleton<Impl>::get(); } } // namespace<file_sep>#pragma once #ifndef GUID_E7B2179BBB074D02BEF92772FB25260C #define GUID_E7B2179BBB074D02BEF92772FB25260C // Windows 2000 Professional[desktop apps only] #ifndef STDAFX_H #include <ole2.h> #pragma comment(lib, "ole32.lib") #endif /*! OLE初期化オブジェクト. 実行スレッドにおけるOLEの初期化と終了処理を行う。 @par OLEはスレッドセーフでないため、スレッドごとに実体を持つぽい。 そのためスレッドごとに初期化をしないといけない。 */ class OleInitializer { public: OleInitializer() : status(OleInitialize(0)) {} ~OleInitializer() { // 初期化時にS_FALSEをかえしたときも終了処理は行う。 if (isValid()) OleUninitialize(); } bool isValid() { return status == S_OK || status == S_FALSE; } private: HRESULT status; }; #endif<file_sep>#pragma once #ifndef GUID_47EFB4BE87AD49519D4171C3CC5BAB09 #define GUID_47EFB4BE87AD49519D4171C3CC5BAB09 #include "key_combination.h" #include "iunit_test.h" namespace basis { UNIT_TEST(CKeyMap) //! Accelerator key container class CKeyMap { public: using tstr = std::basic_string<TCHAR>; using Index = WORD; //! Command Identifier; Hi-WORD would be ignored. using Command = DWORD; CKeyMap() { clear(); } void clear() noexcept; //! Returns a maximum number of key-id entry. size_t capacity() noexcept { return _countof(keyset) - 1; } //! Returns a stored number of key-id entry. size_t size() noexcept { return index[0]; } bool isFull() noexcept { return (size() >= capacity()); } /*! Appends a pair of key and command ID. @param key @param id Command identifier (0, 0xFFFF) */ bool append(CKey key, Command id) noexcept; //! Erases an entry that has specified key. void eraseByKey(CKey key); //! Erases all entry that has spedicified command ID. void eraseByCommand(Command id); //! Erases an entry specified by the index. void erase(Index ix); /*! Returns a command ID related to the key. If bCompliment set to true, combination keys, Ctrl, Alt, Shift, its states are automatically tested. */ DWORD getCommand(CKey key, bool bCompliment = true); //! Returns n-th candidate related to the command ID. CKey getKey(Command id, int n); private: /*! Returns index of an entry related to the key. If identical entry is not exist, It returns one that have the same vkey. If identical or vkey-identical entry is not exist, then it returns null key. */ Index search(CKey key, Index start_ix); //! Container using Item = std::pair<CKey, WORD>; Item keyset[512]; //! index[vkey] will have an index of keyset, at where vkey first appears. //! index[0] have the number of stored entry. WORD index[256]; }; } // namespace #endif<file_sep>#ifndef _STDAFX #include <algorithm> #endif #include "stdfnc.h" #include "draw_list.h" #include "profile.h" #include "window.h" #include "menu.h" #include "filer.h" #include "loader.h" #include "ids.h" namespace { /*! Draws a string onto the screen. @return Output rectangle. @param pt Position of left-top corner of the text. @param bOut Whether put out the text or not. */ basis::Rect TextOutRect(HDC h, basis::Point pt, const TCHAR *str, bool bOut) { // It seems that TextOut function couldn't recognize SJIS strings, // so SJIS strings drawn by TextOut function corrupted. // To deal with this, we should call UNICODE version of it. auto wstr = basis::StringBuffer(0, str).toUTF16(); SIZE s{}; GetTextExtentPoint32W(h, wstr.c_str(), static_cast<int>(wstr.size()), &s); basis::Rect rc = { pt.x, pt.y, pt.x + s.cx, pt.y + s.cy }; if (bOut) TextOutW(h, pt.x, pt.y, wstr.c_str(), static_cast<int>(wstr.size())); return rc; } } // namespace namespace image_viewer { CImageViewer::CDrawList:: CDrawList(CImageViewer &parent_) : parent(parent_), m_enable(false) { sNoFileInfo = parent.profile->getTranslatedString(ID::LIST_EMPTY); m_offset.setBase({ 5, 5 }); m_offset.resetPos(); LOGFONT f; f.lfHeight = 0; f.lfWidth = 0; f.lfEscapement = 0; f.lfOrientation = 0; f.lfWeight = FW_BOLD; f.lfItalic = 0; f.lfUnderline = 0; f.lfStrikeOut = 0; f.lfCharSet = ANSI_CHARSET; f.lfOutPrecision = OUT_DEFAULT_PRECIS | OUT_TT_PRECIS; f.lfClipPrecision = CLIP_DEFAULT_PRECIS; f.lfQuality = DRAFT_QUALITY; f.lfPitchAndFamily = VARIABLE_PITCH | FF_DONTCARE; f.lfFaceName[0] = '\0'; boldFont = CreateFontIndirect(&f); } CImageViewer::CDrawList::~CDrawList() { DeleteObject(boldFont); } void CImageViewer::CDrawList:: invalidate() { parent.invalidate(m_offset.rect()); parent.invalidate(drawList(&parent.m_backbuffer, false)); } //! リストの出力範囲を返す /*! bDraw = true : リストを出力し、描画範囲を更新する bDraw = false: 描画範囲をかえす(更新はしない) */ basis::Rect CImageViewer::CDrawList:: drawList(Surface *surface, bool bDraw) { Rect rc; if (parent.filer->isEmpty()) { surface->setFont(boldFont); if (bDraw) SetTextColor(*surface, Colors[not_read]); rc = TextOutRect(*surface, m_offset.pos(), sNoFileInfo.c_str(), bDraw); } else if (m_enable) { rc = do_drawList(surface, bDraw); } if (bDraw) { m_offset.width((std::max)(rc.right - m_offset.pt().x, 0)); m_offset.height((std::max)(rc.bottom - m_offset.pt().y, 0)); } return rc; } basis::Rect CImageViewer::CDrawList:: do_drawList(Surface *surface, bool b) { Rect rc, total; int pitch = surface->getFontHeight(); int client_height = parent.getClientRect().height(); iterator iEnd = parent.filer->end(); if (b) m_pos.clear(); basis::Point pt = m_offset.pos(); ColorType color; for (iterator item = parent.filer->begin(); item != iEnd; ++item) { if (pt.y >= client_height) break; if (pt.y <= -pitch) { pt.y += pitch; continue; } if (item == parent.filer->current()) { surface->setFont(boldFont); color = current_file; } else { surface->resetFont(); if (!b || item->get()->isLoaded()) color = loaded_image; else if (parent.loader->isLoading(item)) color = now_loading; else color = not_read; } if (b) SetTextColor(*surface, Colors[color]); rc = TextOutRect(*surface, pt, item->get()->fileName(), b); total.unite(rc); if (b) m_pos.push_back({ item, rc }); pt.y += (color == current_file) ? surface->getFontHeight() : pitch; } surface->resetFont(); return total; } CImageViewer::iterator CImageViewer::CDrawList:: itemFromPt(basis::Point pt) { for (auto &i : m_pos) { if (i.second.isInclusive(pt)) return i.first; } return parent.filer->end(); } bool CImageViewer::CDrawList::isInclusive(basis::Point pt) { return itemFromPt(pt) != parent.filer->end(); } } // namespace<file_sep>#pragma once #ifndef CUnitDispenser_h #define CUnitDispenser_h #ifndef STDAFX_H #endif // 数値が一定の単位に達すると1または-1をかえし、余剰分は保存する // マウスホイールやカーソルの移動量を、回数分のコマンドに変換するとき使う template<class T> class CUnitDispenser { private: T value; T unit; public: CUnitDispenser() : value(0), unit(0) {}; CUnitDispenser(const T& Value, const T& Unit) { value = Value; unit = Unit; } ~CUnitDispenser() = default; void setUnit(const T& newUnit) { unit = newUnit; } void reset(const T& newValue) { value = newValue; } T get() { if (value >= unit) { value -= unit; return unit; } else if (value <= -unit) { value += unit; return -unit; } return{}; } bool add(const T& addition) { value += addition; return (over() != 0); } bool over() { return value >= unit || value <= -unit; } }; #endif<file_sep>#include "surface.h" #include "window.h" #include "exception.h" #include "stdfnc.h" #ifndef ARG_POS_SIZE #define ARG_POS_SIZE(rc) rc.left, rc.top, rc.right - rc.left, rc.bottom - rc.top #endif namespace basis { void Surface::reset(HDC hdc) noexcept { resetFont(); if (m_default && m_default != reinterpret_cast<HBITMAP>(-1)) { DeleteObject(swapBitmap(m_default)); m_default = 0; } if (m_h && m_default != reinterpret_cast<HBITMAP>(-1)) { DeleteDC(m_h); } m_size.reset(); m_h = hdc; } void Surface:: create(HDC src, Size s) { HDC hdc = src ? src : GetDC(0); reset(CreateCompatibleDC(hdc)); setBitmap(CreateCompatibleBitmap(hdc, s.x, s.y)); if (!src) ReleaseDC(0, hdc); } bool Surface:: isCompatible(HDC hdc) { return GetDeviceCaps(m_h, BITSPIXEL) == GetDeviceCaps(hdc, BITSPIXEL); } bool Surface:: compatible(HDC hdc, Size s) { if (!isCompatible(hdc) || size().x < s.x || m_size.y < s.y ) { create(hdc, s); return true; } return false; } HPEN Surface:: pen(HPEN h) const noexcept { // Null if error return static_cast<HPEN>(SelectObject(m_h, h)); } HPEN Surface:: pen(HGDIOBJ h) const noexcept { return pen(static_cast<HPEN>(h)); } HBRUSH Surface:: brush(HBRUSH h) const noexcept { return static_cast<HBRUSH>(SelectObject(m_h, h)); } HBRUSH Surface:: brush(HGDIOBJ h) const noexcept { return brush(static_cast<HBRUSH>(h)); } bool Surface:: rectangle(const Rect & rc) const noexcept { return 0 != Rectangle(m_h, rc.left, rc.top, rc.right, rc.bottom); } HFONT Surface:: setFont(HFONT f) { f = static_cast<HFONT>(SelectObject(m_h, f)); if (!m_font) { m_font = f; return 0; } return f; } HFONT Surface:: resetFont() { HFONT h; if (m_font) { h = setFont(m_font); m_font = 0; return h; } return 0; } int Surface:: getFontHeight() { return GetFontHeight(m_h, 0, 0); } Size Surface:: size() const { if (m_size.x) return m_size; return GetSize(static_cast<HBITMAP>( GetCurrentObject(m_h, OBJ_BITMAP))); // 0 if error } size_t Surface:: usage() const { return GetDeviceCaps(m_h, BITSPIXEL) * GetDeviceCaps(m_h, PLANES) / 8 * size().x * m_size.y; } Size Surface:: GetSize(HBITMAP h) { BITMAP bmp; if (!GetObject(h, sizeof bmp, &bmp)) throw std::runtime_error(LOCATION); return{ static_cast<int>(bmp.bmWidth), static_cast<int>(bmp.bmHeight) }; } bool Surface:: setBitmap(HBITMAP hBmp) { if (!hBmp) return false; Size s = GetSize(hBmp); if (!m_h) create(0, s); HBITMAP ret = swapBitmap(hBmp); if (ret == hBmp) return false; if (m_default) DeleteObject(ret); else m_default = ret; m_size = s; return ret != hBmp; } HBITMAP Surface:: swapBitmap(HBITMAP hBmp) { return static_cast<HBITMAP>(SelectObject(m_h, hBmp)); } bool Surface:: transfer(HDC hdc, const Rect &dest, const Rect& src) const { if (!m_h) return false; if (dest.size() == src.size()) { return (0 != BitBlt(hdc, ARG_POS_SIZE(dest), m_h, src.left, src.top, SRCCOPY)); } int prevMode = SetStretchBltMode(hdc, HALFTONE); if (prevMode == 0 || prevMode == ERROR_INVALID_PARAMETER) return false; // HALFTONEに設定した場合は必ず再設定 SetBrushOrgEx(hdc, 0, 0, nullptr); return (0 != StretchBlt(hdc, ARG_POS_SIZE(dest), m_h, ARG_POS_SIZE(src), SRCCOPY)); } } // namespace<file_sep>#ifndef STDAFX_H #include <assert.h> #include <vector> #endif #include "key_combination.h" #include "popup_menu.h" #include "profile.h" #include "filer.h" #include "ids.h" #include "menu.h" namespace image_viewer { class CImageViewer::ContextMenu::Impl { public: using CPopupMenu = basis::CPopupMenu; Impl(ContextMenu& parent_) : parent(parent_) {} ~Impl() = default; void create(); void access(bool bSave); std::basic_string<TCHAR> getAcceleratorString(ID id); ContextMenu &parent; basis::CPopupMenu m_menu; std::vector<std::pair<int, int>> m_radio; }; void CImageViewer::ContextMenu::Impl:: create() { // メニュー・サブメニューのスタック std::vector<CPopupMenu*> menus; std::unique_ptr<CPopupMenu> child; menus.push_back(new CPopupMenu); if (parent.parent.m_lastPath.exist()) { menus.back()->insert(0, static_cast<int>(ID::LAST_PATH), parent.parent.m_lastPath.getFileName().c_str()); } for (ID id = ID::MENU_BEGIN; id != ID::MENU_END; id = next(id)) { switch (id) { case ID::SORT_BEGIN: case ID::VIEW_BEGIN: menus.push_back(new CPopupMenu); break; case ID::SORT_END: case ID::VIEW_END: // Finalize creating child child.reset(menus.back()); menus.pop_back(); break; case ID::USE_PROFILE: case ID::VIEW_CENTER: case ID::WINDOW_CLOSE: menus.back()->insertSeparator(0); break; }// switch(id) std::basic_string<TCHAR> menu_rabel = parent.parent.profile->getTranslatedString(id); if (!menu_rabel.empty()) { menu_rabel += getAcceleratorString(id); if (child) { menus.back()->insert(0, *child, menu_rabel.c_str()); child.reset(); } else { menus.back()->insert(0, static_cast<int>(id), menu_rabel.c_str()); } } assert(child == nullptr); } assert(menus.size() == 1); m_menu = menus[0]->release(); delete menus[0]; m_radio.push_back({ static_cast<int>(ID::SORT_BEGIN) + 1, static_cast<int>(ID::SORT_END) - 1 }); } std::basic_string<TCHAR> CImageViewer::ContextMenu::Impl:: getAcceleratorString(ID id) { std::basic_string<TCHAR> str; basis::CKey accelerator_key; for (int i = 0;; i++) { accelerator_key = parent.parent.getKey(id, i); if (!accelerator_key) break; str += (i == 0) ? TEXT("\t") : TEXT(", "); str += accelerator_key.toStr(); } return str; } void CImageViewer::ContextMenu::Impl:: access(bool bSave) { parent.select(ID::USE_PROFILE); auto &p = parent.parent.profile->menu(); const ID ids[] = { ID::VIEW_POPUP, ID::VIEW_FILENAME, ID::VIEW_FILELIST, ID::VIEW_UPSCALE, ID::VIEW_DOWNSCALE, ID::VIEW_CENTER }; for (auto i : ids) { if (bSave) p.saveBoolean(i, parent.isSelected(i)); else if (p.loadBoolean(i, 0) != 0) parent.select(i); else parent.clear(i); } if (bSave) { int n = static_cast<int>(parent.getSortWay()) - static_cast<int>(ID::SORT_BEGIN); p.save(ID::SORT_END, n); return; } else { int id = p.load(ID::SORT_END, 0) + static_cast<int>(ID::SORT_BEGIN); parent.changeStatus(static_cast<ID>(id)); } } CImageViewer::ContextMenu::ContextMenu(CImageViewer & parent_) : parent(parent_), impl(new Impl(*this)) {} CImageViewer::ContextMenu::~ContextMenu() = default; void CImageViewer::ContextMenu:: initialize() { impl->create(); if (parent.profile->isEnable()) impl->access(false); updateStatus(); } void CImageViewer::ContextMenu:: saveSettings() { impl->access(true); } bool CImageViewer::ContextMenu:: changeStatus(ID id) { // 2State Button switch(id) { case ID::USE_PROFILE: case ID::VIEW_FILELIST: case ID::VIEW_FILENAME: case ID::VIEW_UPSCALE: case ID::VIEW_DOWNSCALE: case ID::VIEW_CENTER: case ID::VIEW_POPUP: impl->m_menu.invert(static_cast<int>(id)); return true; } // Radio button for (auto &i : impl->m_radio) { if (i.first <= static_cast<int>(id) && static_cast<int>(id) <= i.second) { impl->m_menu.radio(i.first, i.second, static_cast<int>(id)); return true; } } return false; } ID CImageViewer::ContextMenu:: getSortWay() { for (ID id = next(ID::SORT_BEGIN); id != ID::SORT_END; id = next(id)) { if (isSelected(id)) return id; } return next(ID::SORT_BEGIN); } void CImageViewer::ContextMenu:: disable(ID id) { impl->m_menu.disable(static_cast<int>(id)); } void CImageViewer::ContextMenu:: enable(ID id) { impl->m_menu.enable(static_cast<int>(id)); } void CImageViewer::ContextMenu:: select(ID id) { impl->m_menu.select(static_cast<int>(id)); } bool CImageViewer::ContextMenu:: isSelected(ID id) { return impl->m_menu.isSelected(static_cast<int>(id)); } void CImageViewer::ContextMenu:: clear(ID id) { impl->m_menu.clear(static_cast<int>(id)); } bool CImageViewer::ContextMenu::updateStatus() { const auto &f = parent.filer; const auto iCurrent = f->current(); const bool bInvalidFile = f->isEmpty() || iCurrent == f->cend(); ID ids[] = { ID::SHOW_PROPERTY, ID::FILE_RELOAD, ID::FILE_DELETE }; for (auto i : ids) { if (bInvalidFile) disable(i); else enable(i); } // Disable if it is showing a first item. if (bInvalidFile || iCurrent == f->cbegin()) { disable(ID::FILE_BACK); disable(ID::FILE_FIRST); } else { enable(ID::FILE_BACK); enable(ID::FILE_FIRST); } // Disable if it is showing a last item. if (bInvalidFile || iCurrent == f->clast()) { disable(ID::FILE_NEXT); disable(ID::FILE_LAST); } else { enable(ID::FILE_NEXT); enable(ID::FILE_LAST); } return true; } int CImageViewer::ContextMenu:: track(basis::Point pt) const { impl->m_menu.redraw(parent); return impl->m_menu.track(parent, pt); } } // namespace<file_sep>#pragma once #ifndef GUID_92604BE061ED490DB4ED5ACF19CCBF4E #define GUID_92604BE061ED490DB4ED5ACF19CCBF4E #ifndef STDAFX_H #include <exception> #include <string> #endif //! 送出したい例外クラスのコンストラクタに渡す文字列 #define LOCATION ::basis::MakeLocation(__func__, __FILE__, __LINE__) //! APIがエラーのときに投げる例外 #define api_runtime_error() \ std::runtime_error(::basis::GetErrorMessage(__func__, __FILE__, __LINE__)) namespace basis { /*! 例外 @file std::exceptionの派生クラスを基底クラスとすると、 virtual指定がないためにstd::exceptionとしてcatchすることはできない。 そのためC++標準の例外クラスのみを使うことにする。 例1)送出したい例外クラスにLOCATIONマクロの文字列を渡す throw std::invalid_argument(LOCATION); 例2)APIエラーの場合は専用マクロでruntime_error例外を作る。  このとき、GetLastError()の説明文が追加される。 throw api_runtime_error(); ※考察 標準例外はマクロによりファイル名などを含めることができるが、 メソッドの引数にわたされたbasic_string<TCHAR>を扱えない。 一長一短であるので、ユーザ定義例外クラスを作るときは 例外発生場所を知らせるchar型の文字列と、 引数やエラー内容を知らせるTCHAR型の文字列を持たせ、 MessageBoxA() と MessageBox()の2段階で通知する手もある */ //! LOCATION マクロの実体 std::string MakeLocation(const char *func, const char *file, int line); //! api_runtime_error 用メッセージ std::string GetErrorMessage(const char *func, const char *file, int line); } // namespace #endif // exception_h<file_sep>#pragma once #ifndef GUID_69EDC1CC13F64FE99777A48167DBC900 #define GUID_69EDC1CC13F64FE99777A48167DBC900 #include "thread.h" namespace basis { /*! 複合スレッドクラス. 指定した数のスレッドを管理下に置く。デフォルトは1スレッド。 タスク(関数)を渡すと空いているスレッドで処理を行う。 */ class CThreadUnity { public: //! 管理スレッド数の上限 static size_t constexpr MAX_THREADS = 255; CThreadUnity(); CThreadUnity(CThreadUnity&) = delete; CThreadUnity(CThreadUnity&&) = default; ~CThreadUnity(); CThreadUnity& operator=(CThreadUnity&) = delete; CThreadUnity& operator=(CThreadUnity&&) = default; //! 管理下のスレッド数を返す int threadCount(); /*! スレッド数の設定. @param nThreads (0, MAX_THREADS ] */ bool setThreadCount(int nThreads); /*! 実行タスクを追加. @param task 引数なし、戻り値なしの関数、ファンクタ、またはラムダ式 */ void addTask(basis::CThread::TaskTy task); /*! スレッドが待機状態になるのを待つ. @param bWaitAll いずれかのスレッドが待機状態になるのを待つときは false, 全てのスレッドを待つときは true を指定する。 @param waitMilliSeconds 待機時間の上限。ミリ秒単位。負数を指定すると上限なし @return 指定時間内にスレッドが待機状態になった場合はその時点で 処理を復帰し、true を返す。 依然稼働中なら false を返す。 */ bool wait(bool bWaitAll, int waitMilliSeconds); private: class Impl; Impl *impl; }; } // namespace #endif<file_sep>#ifndef STDAFX_H #include <algorithm> #endif #include "stdfnc.h" #include "window_impl.h" namespace basis { Window * Window::Impl::ConstructingInstance = nullptr; CriticalSection Window::Impl::m_cs; bool Window::Impl:: create() { if (m_thread.isBusy()) return false; m_cs.enter(); ConstructingInstance = parent; m_thread.addTask([this] { std::basic_string<TCHAR> className = TEXT("Address"); className.append(ToStr(reinterpret_cast<size_t>(this))); ATOM atom{ createWindowAtom(className.c_str(), &Dispatch) }; CreateWindowEx(0, reinterpret_cast<TCHAR*>(atom), 0, WS_OVERLAPPEDWINDOW, CW_USEDEFAULT, 0, CW_USEDEFAULT, 0, 0, 0, 0, 0); }); m_thread.wait(-1); m_cs.leave(); if (!m_h) return false; m_thread.addTask([this] { parent->run(); }); return true; } bool Window::Impl:: destroy() { if (m_h && !DestroyWindow(m_h)) return false; m_h = nullptr; return true; } void Window::Impl:: waitToEnd() { m_thread.wait(-1); } void Window::Impl:: join() { m_thread.join(); } void Window::Impl:: hook(IEventHandler * p) { if (p && !exist(p)) m_hook.push_front({ p,{} }); } void Window::Impl:: hook(Listener f) { m_hook.push_front({ nullptr, std::move(f) }); } void Window::Impl:: unhook(IEventHandler *p) { if (!p) return; for (auto i = m_hook.begin(); i != m_hook.end(); ++i) { if (i->first == p) { i = m_hook.erase(i); break; } } } int Window::Impl:: broadcast(Message msg, WPARAM wp, LPARAM lp) { int ret; for (auto &i : m_hook) { if (i.first) ret = i.first->onEvent(parent, msg, wp, lp); else ret = i.second(parent, msg, wp, lp); if (ret) return ret; } return 0; } int Window::Impl:: dispatch(Window *win, Message msg, WPARAM wp, LPARAM lp) const { return static_cast<int>(Dispatch(*win, static_cast<UINT>(msg), wp, lp)); } bool Window::Impl:: exist(const IEventHandler *p) { return p && std::any_of(m_hook.cbegin(), m_hook.cend(), [p](const Element& e) { return (e.first == p); } ); } ATOM Window::Impl:: createWindowAtom(const TCHAR *identifier, WNDPROC procedure) { WNDCLASSEX wc{ sizeof(WNDCLASSEX) }; // wc.cbClsExtra = // wc.hInstance = // wc.lpszMenuName = // wc.hIconSm = wc.lpfnWndProc = procedure; wc.cbWndExtra = sizeof(INT_PTR); // Window*を格納する wc.hIcon = static_cast<HICON>(LoadImage(0, IDI_APPLICATION, IMAGE_ICON, 0, 0, LR_DEFAULTSIZE | LR_SHARED)); wc.hCursor = static_cast<HCURSOR>(LoadImage(0, IDC_ARROW, IMAGE_CURSOR, 0, 0, LR_DEFAULTSIZE | LR_SHARED)); wc.style = CS_HREDRAW | CS_VREDRAW | CS_DBLCLKS; wc.hbrBackground = static_cast<HBRUSH>(GetStockObject(WHITE_BRUSH)); wc.lpszClassName = identifier; ATOM atom{ RegisterClassEx(&wc) }; if (atom == 0) { DWORD const error_class_atom_conflicted = 0x582; assert(GetLastError() != error_class_atom_conflicted); } return atom; } // システムコールバック。インスタンスにフックされたリスナに配信する LRESULT Window::Impl:: Dispatch(HWND hWnd, UINT msg, WPARAM wp, LPARAM lp) { Window *p{ GetInstance(hWnd) }; int ret = p ? p->impl->broadcast(static_cast<Message>(msg), wp, lp) : 0; return ret ? ret : DefWindowProc(hWnd, msg, wp, lp); } Window * Window::Impl:: GetInstance(HWND hWnd) { // ハンドルからインスタンスを取得 Window *pInst = reinterpret_cast<Window*>(GetWindowLongPtr(hWnd, GWLP_USERDATA)); // 生成時の最初の配信で初期設定 if (!pInst && ConstructingInstance) { pInst = ConstructingInstance; ConstructingInstance = nullptr; pInst->impl->m_h = hWnd; // インスタンスのアドレスを仕込む SetWindowLongPtr(hWnd, GWLP_USERDATA, reinterpret_cast<LONG_PTR>(pInst)); } return pInst; } } // namespace <file_sep>#pragma once #ifndef GUID_6B9CCE0D683A4A7BBBD72E7BFCEF70E0 #define GUID_6B9CCE0D683A4A7BBBD72E7BFCEF70E0 namespace basis { /*! Enumlator template class interface. */ template<typename T> class IEnumlator { public: virtual T& get() = 0; virtual bool next() = 0; virtual bool empty() const = 0; virtual ~IEnumlator() {} }; } // namespace #endif<file_sep>#ifndef STDAFX_H #include <algorithm> #endif #include "memory_status.h" #include "ids.h" #include "loader.h" #include "profile.h" #include "filer.h" #include "list_item.h" namespace image_viewer { CImageViewer::Loader::Loader(CImageViewer& parent_) : parent(parent_), m_maxPreload(parent.profile->general().load(ID::LOADER_RANGE_MAX, 20)), m_minPreload(parent.profile->load(ID::LOADER_RANGE_MIN, 4)), m_memoryCapMegaBytes(parent.profile->load(ID::LOADER_MEMORY_CAP, 0)) { SYSTEM_INFO si; GetSystemInfo(&si); if (0 < si.dwNumberOfProcessors) { m_threads.setThreadCount((std::min<int>)(si.dwNumberOfProcessors, 32)); } } // bWait = true : 画像を読み込む。ロードエラーはfalse // bWait = false : 事前読み込み開始。ロードエラーはfalse CImageViewer::Loader::Status CImageViewer::Loader:: loadImage(iterator itr, bool bWait) { assert(itr != parent.filer->end()); if (bWait) { waitIfLoading(itr); } else { if (isLoading(itr)) return Status::Loading; } if (itr->get()->isLoaded()) return Status::Finished; if (itr->get()->isLoadingFailed()) return Status::Failed; if (beginLoad(itr) == false) return Status::Failed; if (!bWait) return Status::Loading; waitIfLoading(itr); if (itr->get()->isLoaded()) return Status::Finished; if (itr->get()->isLoadingFailed()) return Status::Failed; return Status::CannotOpen; } // 別スレッドでのロードタスクを追加する bool CImageViewer::Loader:: beginLoad(iterator itr) try { m_cs.enter(); m_loading.emplace_back(itr, CreateEvent(0, 1, 0, 0)); m_cs.leave(); m_threads.addTask([itr, this]() { itr->get()->loadImage(parent.m_dir + itr->get()->fileName()); m_cs.enter(); for (auto &&i : m_loading) { if (i.first == itr) { SetEvent(i.second); break; } } m_cs.leave(); parent.post(WM::COMMAND, static_cast<int>(ID::LOADER_IMAGE_LOADED), 0); }); return true; } catch (std::exception &e) { MessageBoxA(0, e.what(), 0, 0); return false; } bool CImageViewer::Loader:: waitIfLoading(iterator itr, int time) { // m_loadingの要素を削除する責任がある // 待機完了したら削除すること DWORD t = time < 0 ? INFINITE : time; auto cs = m_cs.local(); for (auto &&i = m_loading.begin(); i != m_loading.end(); ++i) { if (i->first != itr) continue; cs.leave(); if (WaitForSingleObject(i->second, t) == WAIT_TIMEOUT) return false; cs.enter(); CloseHandle(i->second); m_loading.erase(i); break; } return true; } bool CImageViewer::Loader:: waitIfAnyImageIsLoading() { // waitIfLoadingを呼び出さないとロードタスクは更新されない。 auto cs = m_cs.local(); while (m_loading.size() > 0) { iterator itr = m_loading.front().first; cs.leave(); if (waitIfLoading(itr, 3000) == false) return false; cs.enter(); } return true; } void CImageViewer::Loader:: preloadAround(iterator iter) { constexpr int megabytes = 1024 * 1024; int avail; if (m_memoryCapMegaBytes) { basis::ProcessMemoryStatus pms(megabytes); pms.update(); avail = m_memoryCapMegaBytes - static_cast<int>(pms.usage()); } else { basis::SystemMemoryStatus sms(megabytes); sms.update(); avail = static_cast<int>(sms.avail()) - 200; // 200MBは残す } auto &f = parent.filer; iterator iForward = iter; iterator iBackward = iter; iterator *ref = nullptr; // 優先度順にロード for (int i = 0; i < m_maxPreload * 2; ++i) { ref = (i % 2 == 0) ? &iForward : &iBackward; iter = f->move(*ref, (i % 2 == 0) ? 1 : -1); if (iter == *ref) continue; if (iter->get()->isLoadingFailed()) { f->erase(iter); continue; } if (avail < 10 && i >= m_minPreload * 2) break; *ref = iter; // インクリメント if (!iter->get()->isLoaded()) { loadImage(iter, false); avail -= (std::max)(iter->get()->weight / megabytes, 20); } } } void CImageViewer::Loader:: helper_release(iterator itr, bool bPerform, bool bMark) { assert(itr != parent.filer->end()); iterator iEnd = std::next(parent.filer->move(itr, m_maxPreload)); for (iterator i = parent.filer->move(itr, -m_maxPreload); i != iEnd; ++i) { bool *bUnload = &i->get()->bUnload; if (bPerform) { if (*bUnload) releaseIfOk(i); } else { *bUnload = bMark; } } } void CImageViewer::Loader:: releaseIfOk(iterator itr) { assert(itr != parent.filer->end()); waitIfLoading(itr); itr->get()->unload(); } } // namespace<file_sep>#pragma once #ifndef GUID_26B3AE867F134EBEBDD64CF3A85974B6 #define GUID_26B3AE867F134EBEBDD64CF3A85974B6 #ifndef STDAFX_H #include <wtypes.h> #endif namespace basis { class Window; enum class Message : int; /*! ウィンドウメッセージのリスナインタフェース. ウィンドウクラスの各インスタンス、イベントリスナはこれを継承・実装する。 */ class IEventHandler { public: virtual ~IEventHandler() = default; virtual int onEvent(Window *, Message, WPARAM, LPARAM) = 0; }; } // namespace #endif <file_sep>#pragma once #ifndef GUID_E7FBC9207E264659B8BAC33A417692F2 #define GUID_E7FBC9207E264659B8BAC33A417692F2 #ifndef STDAFX_H #include <wtypes.h> #include <assert.h> #include <memory> #endif namespace basis { /*! Critical section for class members. If this object was declared as a static class member, using enter and leave function makes codes accessing other static members to be thread-safe. If this object is a non-static member, other non-static members can be thread-safe using this. @code class foo { public: foo() : var(0) {} int get() { // Standard way to enter and leave. m_cs.enter(); return m_var++; m_cs.leave(); } int resetIfGreaterThan(int n) { // Simple way to enter and leave automatically. auto cs = m_cs.local(); if (m_var <= n) return get(); return m_var = 0; } private: CriticalSection m_cs; int m_var; }; @endcode local() function create another CriticalSection object that shares core instance with origin. This scope-local CriticalSection created by local() function has independent counter for enter/leave. Thus local object leave critical section rightly as it scoped out. Making m_var chaged to static member needs that both of m_var and m_cs declared as static. So, all foo objects can share m_var resource thread-safely. @note When a class is copied or moved, CriticalSection objects of the class should be still unmoved. */ class CriticalSection { public: //! Default constructor //! @exception std::bad_alloc CriticalSection() : nEnter(0), bAutoLeave(false), m_p(new Instance){} CriticalSection(CriticalSection&) = delete; CriticalSection&operator=(CriticalSection&) = delete; CriticalSection(CriticalSection&& rhs) noexcept = default; CriticalSection&operator=(CriticalSection&&rhs) noexcept = default; /*! Destructor. If object is created by local, it makes leave() automatically while counter is greater than 0. Despite above case, we'd get to be claimed by assert() if not made leave() manually. Assertion code will claim if all of these are true. @li Valid object, not to be moved. @li Not an object created by local(). @li Counter is not 0. */ ~CriticalSection() noexcept { if (!m_p) return; // After moved. if (bAutoLeave) { // Made by local() while (nEnter > 0) leave(); } if (nEnter != 0) // Check our fault. assert(nEnter == 0);// <Set break point to debug. } /*! Enter critical section. There's a chance to be waited before entering. This function increments inner counter. */ CriticalSection& enter() noexcept { if (m_p) { EnterCriticalSection(&m_p->cs); ++nEnter; } return *this; } /*! Provides critical section object that is local to the scope. Created object has a independet inner counter that is entered 1 time and will make its leave as it scoped out. We should catch a returned value not to be soonly destructed because created object is temporal. */ CriticalSection local() const noexcept { CriticalSection cs(this->m_p); cs.enter(); return cs; } /*! Leaves critical section. This function decrements inner counter. */ void leave() noexcept { if (m_p) { LeaveCriticalSection(&m_p->cs); --nEnter; } } /*! Enter critical section if possible. It works as enter() if it is immediately possible and returns true.<br /> If not, this function returns false without waiting. */ bool tryEnter() noexcept { if (!m_p || TryEnterCriticalSection(&m_p->cs) == 0) return false; ++nEnter; return true; } private: //! Critical section core instance. struct Instance { CRITICAL_SECTION cs; Instance() noexcept { InitializeCriticalSection(&cs); } ~Instance() { DeleteCriticalSection(&cs); } }; // for local() function CriticalSection(std::shared_ptr<Instance> p) noexcept : m_p(p), nEnter(0), bAutoLeave(true) {} //! Refference to instance. std::shared_ptr<Instance> m_p; //! Enter/Leave counter int nEnter; /*! Whether it should leave automaticaly. Only instance created by local() function have it of true. */ bool bAutoLeave; }; } // namespace #endif <file_sep>#include "movable.h" namespace basis { CMovable::CMovable(CMovable *parent) : CMovable(parent, {}, {}) {} CMovable::CMovable(CMovable *parent, Point pos, Size size) : m_parent(parent), m_pos(pos), m_size(size) {} CMovable::~CMovable() {} CMovable::CMovable(const Rect& rc) : CMovable(nullptr, rc.lefttop(), rc.size()) {} CMovable& CMovable::operator=(const Rect &rc) { moveTo(rc.lefttop()); width(rc.width()); height(rc.height()); return *this; } //! 絶対座標を返す。 Point CMovable::pt() { auto cs = m_cs.local(); if (!m_parent) return m_pos; return m_parent->pt() + m_pos; } //! 相対座標に移動する。 bool CMovable::setPos(Point pos) { if (!isMovableTo(pos)) return false; forceSetPos(pos); return true; } //! 現在位置から相対移動する。 bool CMovable::move(basis::Size amount) { return setPos(m_pos + amount); } //! 絶対座標に移動する。 /*! @return 位置変更に成功したかどうか。 */ bool CMovable::moveTo(Point pt) { return setPos(posFromPt(pt)); } //! 指定座標に移動可能かどうかを返す。 /*! この関数をオーバーライドすることで移動範囲を制限する。 */ bool CMovable::isMovableTo(Point pos) { (void)pos; return true; } //! 4頂点の絶対座標を返す Rect CMovable::rect() { Point p(pt()); return{ p.x, p.y, p.x + m_size.x, p.y + m_size.y }; } //! 絶対座標を相対座標に変換 Point CMovable::posFromPt(Point pt) { auto cs = m_cs.local(); return (m_parent) ? pt - m_parent->pt() : pt; } //! 相対座標を絶対座標に変換 Point CMovable::ptFromPos(Point pos) { auto cs = m_cs.local(); return (m_parent) ? m_parent->pt() + m_pos : pos; } } // namespace<file_sep>#pragma once #ifndef GUID_84F71DC04FF2434A8E59D7163851E713 #define GUID_84F71DC04FF2434A8E59D7163851E713 #ifndef STDAFX_H #include <string> #endif #include "imemory.h" #include "iunit_test.h" namespace basis { UNIT_TEST(StringBuffer) /*! String class is a buffer aimed to cooperate with APIs. It can contain null terminated string. Although it needs to count its size in every functions, it is capable to return a non-const pointer to data. This object can contain a pointer to string both of dynamic or static. */ class StringBuffer { public: //! Maximum size of a buffer, in count of TCHAR. static const size_t CharLimit; //! Indicates an error while finding elements. static constexpr size_t npos = static_cast<size_t>(-1); /*! Default Constructor. It will allocate a buffer that has specified size. If max_buf is 0, then 1 will be used instead. Allocated buffer is guaranteed that first character is null character. */ StringBuffer(size_t max_buf = 1) : StringBuffer(max_buf, nullptr) {} /*! Copies a string or created as reference to a static string. If src is a nullptr, this constructor works as the same as StringBuffer(size_t max_buf). If max_buf is 0 and src is not nullptr, this object behave like alias of src. In that case, it creates no buffer and unable to modify the string. Otherwise, it creates buffers and copies string as possible as it can have. */ StringBuffer(size_t max_buf, const TCHAR *src); //! Copies a string. StringBuffer(const TCHAR *src) : StringBuffer(length(src) + 1, src) {} ~StringBuffer() = default; //! @exception std::bad_alloc StringBuffer(const StringBuffer &src) : StringBuffer(src.capacity(), src.c_str()) {} //! @exception std::bad_alloc StringBuffer&operator=(const StringBuffer &src) { alloc(src.capacity()); append(src.c_str()); return *this; } StringBuffer(StringBuffer&&) = default; StringBuffer&operator=(StringBuffer&&) = default; //! @exception std::bad_alloc //! @exception std::runtime_error StringBuffer&operator=(const TCHAR *src); bool operator==(const TCHAR *rhs) const noexcept; bool operator==(const StringBuffer &rhs) const noexcept { return *this == rhs.c_str(); } bool operator!=(const TCHAR *rhs) const noexcept { return !(*this == rhs); } bool operator!=(const StringBuffer &rhs) const noexcept { return !(*this == rhs.c_str()); } //! Gets to be a reference to a static string. const TCHAR *refer(const TCHAR*str); //! Returns a const pointer to the string. const TCHAR *c_str() const noexcept { return m_p ? m_p : TEXT(""); } /*! Returns a non-const pointer to the string. One can read and write using returned pointer. @exception std::logic_error This is a static string. */ TCHAR *data(); /*! Returns capacity of the dynamic string. If string is static, this function returns 0. Otherwise, it indicates maximum size that the buffer can contain, including null terminator. */ size_t capacity() const noexcept { return m_buf.capacity() / sizeof(TCHAR); } /*! Returns size of string despite null terminator. */ size_t getSize() const noexcept; //! Shrinks buffer size to fit a contained string. //! If string is static, this function returns false. bool shrinkToFit() noexcept; //! ReAllocates a buffer and copies a string. //! May throw if any error rose. void realloc(size_t s); /* Releases a string and allocate another buffer. If a parameter was set to be 0, this function makes buffer size doubled. In this case, it may throw std::logic_error if the string is static, its capacity is 0 in other words. Also, this function may throw std::bad_alloc if any error rose in allocating a new buffer. */ void resize(size_t s = 0); /* Reallocate a buffer if capacity is shorter than one desired. It makes no change to contained string. This function may throw while reallocating. */ void let(size_t desired_capacity); //! Return whether buffer is empty or not. /*! Return true when buffer is mis-allocated as well as its size is equal to 0. */ bool empty() const noexcept; /*! Fill each field of bytes with 0. Return false if buffer didn't have a dynamic string. */ void flush() noexcept; /*! Compare to strings. @param p To which compared. @param n Number of char to be comapred. @param pos Offset of this string. @return Return true if both of them contains the same char, that of specified number. */ bool compare(const TCHAR *p, size_t n = 0, size_t pos = 0) const noexcept; /*! Finds up a char first appears in this string. @param c char to be searched. @param pos starting position [0, size()] @return Index of a char. @pre pos <= size() */ size_t find(TCHAR c, size_t pos = 0) const { return find(c, pos, true); }; /*! Finds up a char last appears in this string. @param c char to be searched. @param pos starting position [0, size()] @return Index of a char. */ size_t rfind(TCHAR c, size_t pos = 0) const { return find(c, pos, false); } /*! Finds up a chunk of chars first appear. @param p null terminated string that includes chars to be searched. @param pos stating position [0, size()] @param n number of chars to be searched. If this is 0, length of p string will be used instead. */ size_t find(const TCHAR *p, size_t pos = 0, size_t n = 0) const { return find(p, pos, n, true); } /*! Finds up a chunk of chars last appear. @param p a string that includes chars to be searched. @param pos starting position [0, size()] @param n number of chars to be searched. @pre p is null terminated or (n != 0 && n <= length(p)) */ size_t rfind(const TCHAR *p, size_t pos = 0, size_t n = 0) const { return find(p, pos, n, false); } StringBuffer substr(size_t pos, size_t n) const; /*! Adds a string. */ StringBuffer& append(const TCHAR *str) { return write(getSize(), str, length(str)); } StringBuffer& append(const TCHAR *str, size_t n) { return write(getSize(), str, n); } /*! Overwrite by a string. If buffers are too short, this function call let() to expand buffers. @param pos position to start. [0, size()] @param n number of char to be transfered. */ StringBuffer& write(size_t pos, const TCHAR *str, size_t n); /*! Translate the string to wide char string. If _UNICODE is defined, this will simply copy the string. */ std::wstring toUTF16() const; /*! Count of chars to reach the index. This function is for SJIS, single and multibyte character mixed string. */ size_t count() const { return count(npos); } size_t count(size_t cb) const; /*! Returns size of a string terminated with null char. This is a wrapper function of _tcslen and _tcsnlen. @return If buf is nullptr then this function returns 0. Otherwise, size of a string in count of TCHAR. */ static size_t length(const TCHAR *buf, size_t cap = 0) noexcept; private: //! @exception std::bad_alloc void alloc(size_t size); //! Finds up a char first or last appears. size_t find(TCHAR c, size_t pos, bool first) const; //! Finds up a chunk of chars first or last appears. size_t find(const TCHAR *p, size_t pos, size_t n, bool first) const; static bool copy(TCHAR *buf, size_t max_buf, const TCHAR *str, size_t max_str) noexcept; TCHAR *m_p; CMemory m_buf; mutable size_t m_size; // npos if it's not known yet }; } // namespace #endif <file_sep>#include "window_hook.h" #include "ievent_handler.h" #ifndef STDAFX_H #include <algorithm> #endif namespace basis { void WindowHook:: push(IEventHandler * p) { if (p && !exist(p)) listeners.push_front({ p, {} }); } void WindowHook:: push(Function f) { listeners.push_front({ nullptr, std::move(f) }); } void WindowHook:: unhook(IEventHandler *p) { if (!p) return; auto i = listeners.begin(); while (i != listeners.end()) { if (i->first == p) i = listeners.erase(i); else ++i; } } void WindowHook:: clear() { listeners.clear(); } int WindowHook:: dispatch(Window *win, Message msg, WPARAM wp, LPARAM lp) const { int ret; for (auto &i : listeners) { if (i.first) ret = i.first->onEvent(win, msg, wp, lp); else ret = i.second(win, msg, wp, lp); if (ret) return ret; } return 0; } bool WindowHook:: exist(const IEventHandler *p) const { return p && std::any_of(listeners.cbegin(), listeners.cend(), [p](const Element& e) { return (e.first == p); } ); } } // namespace <file_sep>/*!///////////////////////////////////////////////////////////////////////////// @file Defines WinMain function, entry point of the program. This function will lanch the application class. Unit tests would be performed when WinMain is called if it was compiled in debug mode(_DEBUG is defined). Unit tests are declaired in their headers, xxxx.h, and defined in test_xxxx.cc files. //////////////////////////////////////////////////////////////////////////////*/ #include "imemory.h" #include "singleton.h" #include "image_viewer.h" namespace { void CheckLeaks() { #ifdef _DEBUG auto i = basis::HeapMemory::TotalAmount(); // Images auto j = basis::CMemory::TotalAmount(); // Others if (i || j) throw 0; #endif } void PerformUnitTest() { #ifdef _DEBUG for (auto e = IUnitTest::GetEnumlator(); e.next(); ) { assert(e.get()->test()); CheckLeaks(); } #endif } } // namespace int WINAPI WinMain(HINSTANCE, HINSTANCE, LPSTR, int nShow) { // Declairation to perform leak-checking when the program ends. _CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF); PerformUnitTest(); image_viewer::CImageViewer().create().show(nShow).waitToEnd(); CheckLeaks(); // Finalizes singleton objects not to be caught by CrtDbg. basis::SingletonFinalizers::Finalize(); return 0; } <file_sep>#include "cursor.h" namespace basis { UNIT_TEST_FUNC(Cursor) { Cursor s; if (s.set(Cursor::ID::CROSS) == false) throw 0; auto pos = s.pos(); if (s.pos(pos) == false) throw 0; s.reset(); return true; } } // namespace<file_sep>#pragma once #ifndef GUID_02A9C3213D894D5198B860AA07BEC6D7 #define GUID_02A9C3213D894D5198B860AA07BEC6D7 #ifndef STDAFX_H #pragma comment( lib, "Gdiplus.lib" ) #pragma warning(push) #pragma warning(disable:4458) #include <Gdiplus.h> #pragma warning(pop) #endif #include "singleton.h" namespace basis { /*! Impl. 生成・解体時にGDI+の初期化・終了処理を行うクラス GdiplusInit 上記クラスにシングルトンパターンでアクセスする。 最初に生成したときに初期化を、 シングルトンの解体時(アプリ終了時)に終了の処理を行う。 GDI+を利用するアプリは、GdiplusInitを生成すること。 実体はシングルトンオブジェクトとして残るので、 すぐに解体して構わない。 */ class GdiplusInit { class Impl; using Instance = singleton<Impl>; public: GdiplusInit() { Instance::get(); } ~GdiplusInit() {} bool available() { return Instance::get().available(); } void uninit() { Instance::get().uninit(); } private: //! 隠蔽されたシングルトンな内部実装 class Impl { public: Impl() { init(); } ~Impl() { uninit(); } void uninit() { if (m_token) return; Gdiplus::GdiplusShutdown(m_token); m_token = 0; } bool available() { return result_startup == Gdiplus::Ok; } private: void init() { Gdiplus::GdiplusStartupInput input; // コンストラクタ付き構造体 result_startup = Gdiplus::GdiplusStartup(&m_token, &input, &m_startup); } Gdiplus::Status result_startup; ULONG_PTR m_token; Gdiplus::GdiplusStartupOutput m_startup; }; }; } // basis #endif<file_sep>#pragma once #ifndef GUID_E47B5B4E32ED48FF9396F82D9615F6B1 #define GUID_E47B5B4E32ED48FF9396F82D9615F6B1 #ifndef STDAFX_H #include <wtypes.h> #include <vector> #endif #include "types.h" namespace basis { // メニュー。 // 解体時に所有権のあるメニュー・サブメニューは解体される。 class CPopupMenu { public: CPopupMenu() : CPopupMenu(CreatePopupMenu()) {} virtual ~CPopupMenu(); CPopupMenu(const CPopupMenu&) = default; CPopupMenu& operator=(const CPopupMenu&) = default; CPopupMenu(CPopupMenu&&rhs) noexcept { *this = std::move(rhs); } CPopupMenu& operator=(CPopupMenu&&rhs) noexcept; CPopupMenu(HMENU h); // 項目をpositionの前に挿入 // 末尾なら0または(UINT)-1にすること void insert(int position, int id, const TCHAR *title); // セパレータを挿入 void insertSeparator(int position); // 子メニューをpositionの前に挿入 // メニューハンドルは親メニューに委譲される void insert(int position, CPopupMenu& submenu, const TCHAR *title); // 指定されたidを持つ項目を取り除く void erace(int id) noexcept; // すべての項目、サブメニューを切り離す(破壊されない) void detach() noexcept; // メニューの所有権を放棄し、ハンドルをかえす HMENU release() noexcept; // 変更を反映する。track前に必ず実行すること void redraw(HWND) const; /*! ショートカットメニューを表示する. 表示中は処理がブロックされる。 @param position 表示するスクリーン座標 @return item ID, if user selected. Otherwise, 0. */ int track(HWND hWnd, basis::Point pos) const; // first~lastのうちidだけを選択状態にする void radio(int first, int last, int id) const; // チェックマーク void select(int id) const; void clear(int id) const; bool isSelected(int id) const; bool isRadioButton(int id) const; bool invert(int id) const; // 戻り値はisSelected // 有効化/無効化 void enable(int id) const; void disable(int id) const; bool isEnable(int id) const; bool invertEnable(int id) const; // 戻り値はisEnable protected: int state(int id, int stat = -1) const; int stateHelper(int id, int flag, int modify) const; HMENU m_h; MENUITEMINFO *mi; }; } // namespace #endif <file_sep>#ifndef STDAFX_H #include <string> #include <tchar.h> #endif #include "imemory.h" namespace { bool wrong_capa(std::unique_ptr<basis::IMemory>& p, size_t desired) { if (p->capacity() < desired) return true; if (p->capacity() >= desired + sizeof(intptr_t)) return true; return false; } } namespace basis { UNIT_TEST_FUNC(IMemory) { auto prev_usage_cmemory = CMemory::TotalAmount(); auto prev_usage_hmemory = HeapMemory::TotalAmount(); decltype(prev_usage_cmemory) usage; std::basic_string<TCHAR> sz{ TEXT("abcdefghijklmnopqrstuvwxyz") }; for (int type = 0; type < 3; type++) { std::unique_ptr<IMemory> p; switch (type) { case 0: p.reset(new GlobalMemory); break; case 1: p.reset(new HeapMemory); break; case 2: p.reset(new CMemory); break; } // 空メモリの動作チェック if (p->capacity() != 0) throw 0; p->reset(); // do nothing try { p->address(); // must throw return false; } catch(...) {} // 割り当て size_t size = sizeof(TCHAR) * (sz.size() + 1); if (!p->alloc(size)) throw 0; if (wrong_capa(p, size)) throw 0; auto ptr = static_cast<TCHAR*>(p->address()); if (ptr == nullptr) throw 0; // 書き込み *ptr = 0; _tcscpy_s(ptr, p->capacity() / sizeof(TCHAR), sz.c_str()); if (_tcslen(ptr) != sz.size()) throw 0; if (sz != ptr) throw 0; ptr = static_cast<TCHAR*>(p->address()); // 拡張 size = sizeof(TCHAR) * 50; if (p->realloc(size) == false) throw 0; ptr = static_cast<TCHAR*>(p->address()); if (wrong_capa(p, size)) throw 0; if (_tcslen(ptr) != sz.size()) throw 0; if (sz != ptr) throw 0; // 縮小 size = sizeof(TCHAR) * (sz.size() + 1); if (p->realloc(size) == false) throw 0; ptr = static_cast<TCHAR*>(p->address()); if (wrong_capa(p, size)) throw 0; if (sz != ptr) throw 0; // 再確保 size = sizeof(TCHAR) * (sz.size() * 2 + 1); if (p->alloc(size) == false) throw 0; if (wrong_capa(p, size)) throw 0; if (p->address() == nullptr) throw 0; // リークチェック用サイズの動作確認 p->reset(); usage = HeapMemory::TotalAmount(); if (usage != prev_usage_hmemory) throw 0; usage = CMemory::TotalAmount(); if (usage != prev_usage_cmemory) throw 0; } return true; } } // namespace<file_sep>#ifndef STDAFX_H #endif #include "popup_menu.h" #include "exception.h" namespace basis { CPopupMenu::CPopupMenu(HMENU h) : m_h(h), mi(new MENUITEMINFO{ sizeof(MENUITEMINFO) }) { if (h == 0) throw std::runtime_error(LOCATION); } CPopupMenu::~CPopupMenu() { if (m_h) DestroyMenu(m_h); delete mi; } CPopupMenu& CPopupMenu::operator=(CPopupMenu&&rhs) noexcept { std::swap(m_h, rhs.m_h); return *this; } void CPopupMenu::insert(int id_pos, int id, const TCHAR *str) { UINT const flag = (str ? MF_STRING : MF_SEPARATOR); // 0のままだと順番がおかしくなるので確実に末尾にする if (!id_pos) id_pos = -1; BOOL const success = InsertMenu(m_h, static_cast<UINT>(id_pos), flag, static_cast<UINT>(id), str); if (!success) { throw api_runtime_error(); } } void CPopupMenu::insertSeparator(int id_pos) { insert(id_pos, 0, 0); } void CPopupMenu::insert(int id_pos, CPopupMenu& submenu, const TCHAR *str) { mi->fMask = MIIM_STRING | MIIM_SUBMENU; mi->dwTypeData = const_cast<TCHAR*>(str); mi->hSubMenu = submenu.m_h; // 0のままだと順番がおかしくなるので確実に末尾にする if (!id_pos) id_pos = -1; BOOL success = InsertMenuItem(m_h, static_cast<UINT>(id_pos), FALSE, mi); if (!success) { throw api_runtime_error(); } submenu.m_h = nullptr; // ハンドルを渡した } int CPopupMenu::track(HWND hWnd, basis::Point pos) const { UINT flag = TPM_RIGHTALIGN | TPM_TOPALIGN | TPM_RETURNCMD; int id = TrackPopupMenu(m_h, flag, pos.x, pos.y, 0, hWnd, nullptr); if (id) { PostMessage(hWnd, WM_COMMAND, id, 0); } return id; } bool CPopupMenu::isRadioButton(int id) const { mi->fMask = MIIM_FTYPE; mi->fType = MFT_RADIOCHECK; if (GetMenuItemInfo(m_h, static_cast<UINT>(id), FALSE, mi) == FALSE) throw 0; return (mi->fType & MFT_RADIOCHECK) != 0; } void CPopupMenu::select(int id) const { stateHelper(id, MFS_CHECKED, 1); } void CPopupMenu::clear(int id) const { stateHelper(id, MFS_CHECKED, 0); } bool CPopupMenu::isSelected(int id) const { return (stateHelper(id, MFS_CHECKED, 2) == 1); } bool CPopupMenu::invert(int id) const { return (stateHelper(id, MFS_CHECKED, -1) == 1); } void CPopupMenu::enable(int id) const { stateHelper(id, MFS_DISABLED, 0); } void CPopupMenu::disable(int id) const { stateHelper(id, MFS_DISABLED, 1); } bool CPopupMenu::isEnable(int id) const { return (stateHelper(id, MFS_DISABLED, 2) == 0); } bool CPopupMenu::invertEnable(int id) const { return (stateHelper(id, MFS_DISABLED, -1) == 0); } void CPopupMenu::radio(int first, int last, int select) const { BOOL const success = CheckMenuRadioItem(m_h, static_cast<UINT>(first), static_cast<UINT>(last), static_cast<UINT>(select), MF_BYCOMMAND); if (!success) throw api_runtime_error(); } void CPopupMenu::erace(int id) noexcept { RemoveMenu(m_h, static_cast<UINT>(id), 0U); } void CPopupMenu::detach() noexcept { if (m_h) { while (RemoveMenu(m_h, 0U, MF_BYPOSITION)); } } HMENU CPopupMenu::release() noexcept { HMENU const hMenu = m_h; m_h = nullptr; return hMenu; } void CPopupMenu::redraw(HWND hWnd) const { if (DrawMenuBar(hWnd) == FALSE) throw api_runtime_error(); } //--- private // stateを設定/取得(-1)する // 戻り値は新しいstate int CPopupMenu::state(int id, int stat) const { mi->fMask = MIIM_STATE; mi->fState = stat; BOOL success; if (stat == -1) { success = GetMenuItemInfo(m_h, static_cast<UINT>(id), FALSE, mi); } else { success = SetMenuItemInfo(m_h, static_cast<UINT>(id), FALSE, mi); } if (!success) { throw api_runtime_error(); } return mi->fState; } // チェック状態のOFF/ONを設定( 0/1 ) // 反転( -1 ) または取得する( 2 ) // 戻り値:新しいステート int CPopupMenu::stateHelper(int id, int flag, int modify) const { // 実行時のステート UINT stat = state(id); // ステートを変更 if (modify != 2) { UINT operation; if (modify == 0) operation = 0; // flagをoff else if (modify == -1) operation = ~stat; // flagを反転 else operation = flag; // flagをon stat = ((stat & ~flag) | (operation & flag)); state(id, stat); } // ステートのフラグ状態をかえす return (int)((stat & flag) != 0); } } // namespace<file_sep>#ifndef STDAFX_H #include <process.h> #include <algorithm> #include <list> #endif #include "exception.h" #include "critical_section.h" #include "thread.h" namespace basis { //! Private Implimentation of CThread. class CThread::Impl { public: Impl(); ~Impl() { release(); } //! Indices of array of handle. enum stat : BYTE { added, //!< Event; Tasks were added. tarminate, //!< Event; Destructor was called. ended, //!< Event; Thread finished tasks on the list. thread //!< Thread handle. }; //! Handles that is explained by stat. HANDLE h[4]; //! Adds a task into the list. void addTask(TaskTy f) { auto cs = m_cs.local(); m_tasks.push_back(std::move(f)); res(ended); set(added); } //! Waits specified event to be signaled. bool wait(stat s, DWORD t = INFINITE) const noexcept { return WaitForSingleObject(h[s], t) != WAIT_TIMEOUT; } private: static unsigned __stdcall ThreadEntryPoint(void *p) { while (reinterpret_cast<Impl*>(p)->threadProcedure()) ; return EXITCODE; } bool threadProcedure(); //! Set event to be signaled. void set(stat s) { SetEvent(h[s]); } //! Reset event. void res(stat s) { ResetEvent(h[s]); } void release() { for (;;) { // Waits all tasks finished. m_cs.enter(); if (!m_tasks.size()) break; m_cs.leave(); wait(ended); } set(tarminate); m_cs.leave(); h[thread] && wait(thread); for (auto i : h) { i && CloseHandle(i); } } //! for re/setting events and tasks. CriticalSection m_cs; //! FIFO list of tasks. std::list<TaskTy> m_tasks; }; CThread::Impl:: Impl() { for (int i = 0; i <= ended; i++) h[i] = CreateEvent(0, 1, (i <= tarminate) ? 0 : 1, 0); h[thread] = reinterpret_cast<HANDLE>(_beginthreadex(0, 0, ThreadEntryPoint, this, 0, 0)); // Check validity. if (std::any_of(h, &h[_countof(h)], [](HANDLE i) { return i == 0; })) { release(); throw std::runtime_error(LOCATION); } } //! TaskPtr will be called back in ThreadEntryPoint() bool CThread::Impl:: threadProcedure() { //! Wait for added and tarminate WaitForMultipleObjects(2, &h[added], false, INFINITE); if (wait(tarminate, 0)) return false; // Break to tarminate. m_tasks.front().operator()(); // This thread will still be running if there's a next task. CriticalSection cs = m_cs.local(); m_tasks.pop_front(); if (m_tasks.empty()) { set(ended); res(added); } return true; // Continue looping. } // -------------------- CThread -------------------- // CThread::CThread() : impl(new Impl) {} CThread::~CThread() { delete impl; } void CThread::join() { delete impl; impl = nullptr; } void CThread::addTask(TaskTy f) { if (!f) throw std::invalid_argument(LOCATION); if (impl) impl->addTask(std::move(f)); } bool CThread::wait(int time) const noexcept { DWORD t = time < 0 ? INFINITE : static_cast<DWORD>(time); return impl ? impl->wait(impl->ended, t) : true; } HANDLE CThread::getWaitHandle() { return impl ? impl->h[impl->ended] : nullptr; } HANDLE CThread::getThreadHandle() { return impl ? impl->h[impl->thread] : nullptr; } } // namespace<file_sep>#include "mouse_drag.h" #include "window_message.h" #ifndef GET_X_LPARAM // windowsx.h #define GET_X_LPARAM(lp) ((int)(short)LOWORD(lp)) #endif #ifndef GET_Y_LPARAM #define GET_Y_LPARAM(lp) ((int)(short)HIWORD(lp)) #endif namespace { inline bool isStillPressing(WPARAM wp, UINT vkey) { if (vkey == VK_LBUTTON && (wp & MK_LBUTTON)) return true; if (vkey == VK_RBUTTON && (wp & MK_RBUTTON)) return true; return false; } inline basis::Point GetMousePosition(const LPARAM lp) { basis::Point p; p.x = GET_X_LPARAM(lp); p.y = GET_Y_LPARAM(lp); return p; } } // namespace namespace basis { CMouseDrag::CMouseDrag() : m_keys(0), m_vkey(0), m_state(STATE::BUTTON_UP) {} // BUTTONDOWN時にstate = MD_BUTTON_DOWN // MOUSEMOVE時にstate |= MD_MOUSE_MOVE // スレッショルドを越えてたら |= MD_MOUSE_DRAG // BUTTONUP時にstate |= MD_BUTTON_UP bool CMouseDrag::proc(HWND hWnd, basis::Message msg, WPARAM wp, LPARAM lp) { // ドラッグ検出 using WM = basis::Message; if (msg == WM::MOUSEMOVE) { updatePos(hWnd, lp); if (m_state & STATE::BUTTON_DOWN) { // 離された瞬間を検出できていない場合に備える if (!isStillPressing(wp, m_vkey)) { m_state = (m_state & ~STATE::BUTTON_DOWN) | STATE::BUTTON_UP; return false; } // すでにドラッグ中 if (isDragged()) return true; // スレッショルドチェック m_state |= STATE::MOUSE_MOVE; if (isOveringThreshold(m_pos)) { m_state |= STATE::MOUSE_DRAG; return true; } } return false; } // モニタ開始 if (msg == WM::LBUTTONDOWN || msg == WM::RBUTTONDOWN) { m_state = STATE::BUTTON_DOWN; m_start = m_prev = m_pos = GetMousePosition(lp); m_vkey = (msg == WM::LBUTTONDOWN) ? VK_LBUTTON : VK_RBUTTON; m_keys = static_cast<UINT>(wp); if (GetKeyState(VK_MENU)) { m_keys |= MK_MENU; } return true; } // モニタ終了 if (msg == WM::LBUTTONUP || msg == WM::RBUTTONUP) { // 対象のキーでなければ通知しない if (m_vkey == VK_LBUTTON && msg != WM::LBUTTONUP) return false; if (m_vkey == VK_RBUTTON && msg != WM::RBUTTONUP) return false; m_state = (m_state & ~STATE::BUTTON_DOWN) | STATE::BUTTON_UP; return true; } return false; } // m_prev, m_posを更新する // マルチモニタ環境での負数の座標値にLOWORDマクロは使えない Point& CMouseDrag::updatePos(HWND hWnd, const LPARAM lp) { m_prev = m_pos; m_pos = GetMousePosition(lp); // 絶対座標の保存 m_prev_scr = m_scr; POINT pt{ m_pos.x, m_pos.y }; ClientToScreen(hWnd, &pt); m_scr = static_cast<Point>(pt); return m_pos; } // スレッショルドを(すでに)越えたかどうか bool CMouseDrag::isOveringThreshold(Point p) { p.x -= m_start.x; p.y -= m_start.y; if (m_threshold.x > 0) { if (p.x >= m_threshold.x || p.x <= -m_threshold.x) return true; } if (m_threshold.y > 0) return (p.y >= m_threshold.y || p.y <= -m_threshold.y); return false; } int CMouseDrag::dx() { return isMoved() ? m_pos.x - m_prev.x : 0; } int CMouseDrag::dy() { return isMoved() ? m_pos.y - m_prev.y : 0; } basis::Size CMouseDrag::getDifference() { if (isMoved()) return static_cast<Size>(m_pos - m_prev); else return{}; } basis::Size CMouseDrag::getTravel() { if (isMoved()) return static_cast<Size>(m_pos - m_start); else return{}; } int CMouseDrag:: dx_abs() { return isMoved() ? m_scr.x - m_prev_scr.x : 0; } int CMouseDrag:: dy_abs() { return isMoved() ? m_scr.y - m_prev_scr.y : 0; } Size CMouseDrag::threshold(int dx, int dy) { if (dx != -1) { if (dy == -1) dy = dx; m_threshold.x = dx; m_threshold.y = (dy != -1) ? dy : dx; } return m_threshold; } } // namespace<file_sep>#pragma once #ifndef GUID_5D731DDEDE3F412C9EFF88A7D5DECB83 #define GUID_5D731DDEDE3F412C9EFF88A7D5DECB83 #ifndef STDAFX_H #include <wtypes.h> #include <memory> #endif /*! @file ハンドルのデリータの短縮エイリアスを定義する。 */ //! SignalHandle のデリータ struct DSignalHandle { void operator()(HANDLE h) const { if (h != INVALID_HANDLE_VALUE) { WaitForSingleObject(h, INFINITE); CloseHandle(h); } } }; //! 解体時に待機、終了処理を行うハンドル。主にイベントオブジェクト。 typedef std::unique_ptr <std::remove_pointer<HANDLE>::type, DSignalHandle> SignalHandle; //! ClosableHandle のデリータ struct DCloseHandle { void operator()(HANDLE h) const { if (h != INVALID_HANDLE_VALUE) CloseHandle(h); } }; //! 解体時に終了処理を行うハンドル。イベント等。 typedef std::unique_ptr <std::remove_pointer<HANDLE>::type, DCloseHandle> ClosableHandle; #endif<file_sep>#pragma once #ifndef GUID_65F96D9BE4D14C599276F442B3038637 #define GUID_65F96D9BE4D14C599276F442B3038637 #ifndef STDAFX_H #include <wtypes.h> #include <list> #include <functional> #endif namespace basis { class IEventHandler; class Window; enum class Message : int; //! ウィンドウフック /*! <p>メッセ―ジを受け取るイベントリスナをフックできるクラス。 </p> <p>ウィンドウクラスで使われ、ウィンドウ生成直後は ウィンドウ自身のリスナのみを格納することになる。 */ class WindowHook { public: using Function = std::function<int(Window*, Message, WPARAM, LPARAM)>; using Element = std::pair<IEventHandler*, Function>; using Container = std::list<Element>; WindowHook() = default; virtual ~WindowHook() = default; WindowHook(const WindowHook &) = default; WindowHook&operator=(const WindowHook &) = default; WindowHook(WindowHook &&) = default; WindowHook&operator=(WindowHook &&) = default; //! ウィンドウのリスナを登録 void push(IEventHandler *p); //! ウィンドウのリスナを登録 void push(Function f); //! リスナを除去する void unhook(IEventHandler *p); /*! メッセージを各リスナに(新しいほうから)配信する. @return 非0を返すものに出会ったらその値を返す。 すべてのリスナが0を返したとき、戻り値は0。 */ int dispatch(Window *win, Message msg, WPARAM wp, LPARAM lp) const; protected: //! 全消去 void clear(); //! 指定したリスナが登録されているか? bool exist(const IEventHandler *p) const; private: // IEventHandlerインタフェースを継承するクラスへのポインタ、 // もしくは関数オブジェクトとして、リスナをリスト化して管理 Container listeners; }; } // namespace #endif <file_sep>#pragma once #ifndef GUID_88E6B0C9D24C48378AB9ED3883518A73 #define GUID_88E6B0C9D24C48378AB9ED3883518A73 #ifndef STDAFX_H #include <functional> #endif #include "private_profile.h" #include "image_viewer.h" namespace image_viewer { enum class ID : int; class CImageViewer::Profile { public: Profile(); Profile(Profile&) = delete; bool exist(); bool isEnable() { return m_enable; } bool enable() { return setEnable(true); } bool disable() { return setEnable(false); } //! セクション指定 Profile& general() { return section(GENERAL); } //! セクション指定 Profile& menu() { return section(MENU); } //! セクション指定 Profile& window() { return section(WINDOW); } //! セクション指定 Profile& control() { return section(CONTROL); } /*! セクション内のすべての項目に関数オブジェクトを適用. 渡されるのは項目のIDと、それに対応する値文字列へのポインタ。 この関数はControlクラスからキーコマンドの割り付けに使用される。 */ void applyToAllItemInTheSection(std::function<void(ID, const TCHAR *)> f); //! キー名を返す const TCHAR * getKeyString(ID id); //! 言語ファイルにある対訳を返す。なければキー名を返す const TCHAR * getTranslatedString(ID id); //! ブーリアン型プロファイルを読む bool loadBoolean(ID id, bool bDefault); //! ブーリアン型プロファイルを書く bool saveBoolean(ID id, bool b); //! 整数型プロファイルを読む int load(ID id, int nDefault); //! 整数型プロファイルを書く bool save(ID id, int n); /*! 文字列型プロファイルを読む. なければsDefaultのコピーを返し、nullptrが返ることはない。 sDefaultはnullptrの場合、空文字列として扱われる。 */ const TCHAR * load(ID id, const TCHAR *sDefault); //! 文字列型プロファイルを書く bool save(ID id, const TCHAR * value); private: bool setEnable(bool bEnable); Profile& section(const TCHAR *p) { if (!p) throw 0; m_prof.section(p); return *this; } const TCHAR * ToStr(bool b); struct Pair { ID id; const TCHAR *key; }; static Pair m_profile_ids[]; static constexpr TCHAR * const WINDOW = TEXT("Window"); static constexpr TCHAR * const CONTROL = TEXT("Control"); static constexpr TCHAR * const MENU = TEXT("Menu"); static constexpr TCHAR * const GENERAL = TEXT("General"); static constexpr TCHAR * const LANGUAGE = TEXT("Language"); static constexpr TCHAR * const kSettingFile = TEXT("setting.ini"); static constexpr TCHAR * const kLanguageFile = TEXT("language.ini"); bool m_enable; basis::CPrivateProfile m_prof, m_lang; }; } // namespace #endif <file_sep>/*! @file Defines the element of the container which CImageViewer class uses. CListItem class, the element, is only created from WIN32_FIND_DATA structure. See also CFindFile class that returns the structure. */ #pragma once #ifndef GUID_66F5CCBBF45C4752AB6FD2D76BD57BFF #define GUID_66F5CCBBF45C4752AB6FD2D76BD57BFF #ifndef STDAFX_H #include <string> #include <memory> #endif #include "iunit_test.h" #include "types.h" namespace basis { class CFilePath; } namespace image_viewer { UNIT_TEST(CListItem) //! List element of CImageViewer class CListItem { public: CListItem(const WIN32_FIND_DATA&); ~CListItem(); CListItem(CListItem &&) = default; CListItem&operator=(CListItem &&) = default; enum class Status : BYTE { Undefined = 0, NotExist = 1, CannotOpen = 2, SizeError = 4, MemoryError = 8, TypeError = 16, LoadError = 32, Loaded = 64, Release = 128, }; /*! 画像をロードする。 ファイルが開けなかった場合はStatus::CannotOpenを返す。 それ以外の理由で失敗した場合、m_typeをTYPE::Errorに設定し、 対応するエラーステータスを返す。 成功した場合はStatus::Loadedを返す。 */ Status loadImage(::basis::CFilePath path); //! Returns filename, not including directory's. const TCHAR * fileName() const; /*! Whether the file can be loaded. This function will return false after loading a file failed, except for the file was not accessible at the time. It is because of that the file might be loadable next. */ bool isLoadingFailed(); void unload(); bool isLoaded() const; //! ロード済みであれば対象のデバイスコンテキストに描画する bool draw(HDC hdc, const RECT& destination, const RECT& source); basis::Size size() const; //!< 画像サイズを返す basis::Rect rect() const; //!< 始点0,0の画像矩形を返す FILETIME ftAccess() const; FILETIME ftCreate() const; FILETIME ftWrite() const; // 外部使用。リスト内インデックス int index; // 概算メモリ使用量。ロード前のみファイルサイズで、ロードするたび更新 int weight; // 外部使用。ビットマップを解放するかどうかの判定に使う bool bUnload; private: class Impl; std::unique_ptr<Impl> impl; }; // class } // namespace #endif<file_sep>/* APIs Get/WritePrivateProfileString Windows NT 3.1 / 95 以降 (Winbase.h), Kernel32.lib Unicode:Windows NT/2000 は Unicode 版と ANSI 版を実装 */ #include "private_profile.h" namespace basis { CPrivateProfile:: CPrivateProfile(tstr path) : m_path(std::move(path)) {} CPrivateProfile& CPrivateProfile:: path(tstr path) { m_path = std::move(path); return *this; } CPrivateProfile& CPrivateProfile:: section(tstr sectionname) { m_section = std::move(sectionname); return *this; } bool CPrivateProfile:: getAllKeyNames() noexcept { if (m_path.empty() || m_section.empty()) return false; return do_read(m_section.c_str(), nullptr, nullptr); } bool CPrivateProfile:: getAllSectionNames() noexcept { if (m_path.empty()) return false; return do_read(nullptr, nullptr, nullptr); } const TCHAR *CPrivateProfile:: read(const TCHAR *name, const TCHAR *sDefault) { if (isInvalid(name)) { m_buf = sDefault; return m_buf.c_str(); } if (do_read(m_section.c_str(), name, sDefault) == false) { m_buf = StringBuffer(sDefault); } return m_buf.c_str(); } bool CPrivateProfile:: do_read(const TCHAR *section, const TCHAR *name, const TCHAR *def) { for(DWORD capacity, nRead; ;m_buf.resize()) { capacity = static_cast<DWORD>(m_buf.capacity()); nRead = GetPrivateProfileString(section, name, def, m_buf.data(), capacity, m_path.c_str()); if (nRead < capacity - 2) { if (nRead != 0) return true; // getAll~ functionでひとつも見つからなかった場合、 // 終端文字は最後のひとつしか付加されないので、補う。 if (capacity >= 2) { m_buf.data()[1] = TEXT('\0'); return true; } } } } bool CPrivateProfile:: write(const TCHAR *name, const TCHAR *value) { return (!isInvalid(name) && do_write(name, value)); } bool CPrivateProfile:: eraseKey(const TCHAR *name) { return (!isInvalid(name) && do_write(name, nullptr)); } bool CPrivateProfile:: clearSection() { if (m_path.empty() || m_section.empty()) return false; return do_write(nullptr, nullptr); } bool CPrivateProfile:: do_write(const TCHAR *name, const TCHAR *value) { return (FALSE != WritePrivateProfileString(m_section.c_str(), name, value, m_path.c_str())); } // IO時に使用するパラメータをテストし、無効なものがあればtrueを返す bool CPrivateProfile:: isInvalid(const TCHAR *name) { return (m_path.empty() || m_section.empty() || name == nullptr || *name == TEXT('\0')); } } // namespace<file_sep>#include "singleton.h" namespace basis { std::vector<SingletonFinalizers::Finalizer> SingletonFinalizers::maFinalizer; void SingletonFinalizers::Finalize() { while (!maFinalizer.empty()) { maFinalizer.back()(); maFinalizer.pop_back(); } } } // namespace<file_sep>#pragma once #ifndef GUID_34D3C18605834843B468B02CA31C7E47 #define GUID_34D3C18605834843B468B02CA31C7E47 #ifndef STDAFX_H #include <memory> #endif #include "types.h" namespace basis { /* Lapper-class for Device Contexts and those APIs associated with. */ class Surface { public: Surface() : Surface(0) {} Surface(HDC hdc) : m_h(hdc), m_default(hdc ? reinterpret_cast<HBITMAP>(-1) : 0), m_font(0) {} ~Surface() { reset(); } Surface(const Surface &s) = delete; Surface&operator=(const Surface &s) = delete; Surface(Surface &&s) { *this = std::move(s); } Surface&operator=(Surface &&s) { reset(s.m_h); m_default = s.m_default; m_font = s.m_font; s.m_h = 0; s.m_default = 0; s.m_font = 0; return *this; } explicit operator bool() const { return m_h != 0; } operator HDC() const { return m_h; } void reset() noexcept { reset(0); } /*! Creates itself as memory device context. */ void create(HDC src, Size s); bool isCompatible(HDC hdc); /*! Ensures compatibility and minimum size. Unless this object has the same src hdc AND adequate size, this function will call create(). @return If surface recreated, return value is true. @note If src was not the same hdc that has been specified when current bitmap had been created, this function creates new one even if two HDCs were actually compatible with each other. My personal test showed that the hdc returned by GetDC or BeginPaint was not changed while owner display was still the same one. */ bool compatible(HDC hdc, Size s); //! Selects GDI Pen object as current. //! @return Previous HPEN. HPEN pen(HPEN h) const noexcept; //! Selects GDI Pen object as current. //! @return Previous HPEN. HPEN pen(HGDIOBJ h) const noexcept; //! Selects GDI Brush object as current. //! @return Previous HBRUSH. HBRUSH brush(HBRUSH h) const noexcept; //! Selects GDI Brush object as current. //! @return Previous HBRUSH. HBRUSH brush(HGDIOBJ h) const noexcept; bool rectangle(const Rect& rc) const noexcept; HFONT setFont(HFONT); HFONT resetFont(); int getFontHeight(); //! Returns width and height of the Device Context Bitmap. Size size() const; //! Returns estimated memory usage in bytes. size_t usage() const; /*! Sets new bitmap as current. Previous bitmap would be released. */ bool setBitmap(HBITMAP hBmp); bool transfer(HDC hdc, const Rect&dest, const Rect& src) const; static Size GetSize(HBITMAP h); protected: HBITMAP swapBitmap(HBITMAP hBmp); void reset(HDC hdc) noexcept; private: HDC m_h; mutable Size m_size; // Default bitmap. It must be set back before m_h deleted not to be leaked. // Also, If it is -1, m_h is not necessary to be deleted. HBITMAP m_default; HFONT m_font; }; } // namespace #endif<file_sep>#pragma once #ifndef GUID_B933196D89114B67BBF2F379F809436F #define GUID_B933196D89114B67BBF2F379F809436F #include "image_viewer.h" #include "thread_unity.h" #include "critical_section.h" namespace image_viewer { class CImageViewer::Loader { public: Loader(CImageViewer &parent_); ~Loader() = default; /*! 画像展開 @param bWait 展開終了まで待つかどうか @return 展開済み、または終了した場合 2 展開中の場合 1 読み込めなかった場合 0 ファイルにアクセスできない場合 -1 */ enum class Status : int { Finished = 2, Loading = 1, CannotOpen = -1, Failed = 0 }; Status loadImage(iterator itr, bool bWait); //! 範囲内の画像を事前展開開始 void preloadAround(iterator itr); //! 終了待ち bool isLoading(iterator itr) { return !waitIfLoading(itr, 0); } bool waitIfLoading(iterator itr) { return waitIfLoading(itr, -1); } bool waitIfLoading(iterator itr, int time); //! ロードタスクが0になるのを待つ bool waitIfAnyImageIsLoading(); //! キャッシュ解放 void releaseIfOk(iterator itr); void markToReleaseAround(iterator itr) { helper_release(itr, false, true); } void unmarkAround(iterator itr) { helper_release(itr, false, false); } void performReleaseAround(iterator itr) { helper_release(itr, true, true); } private: bool beginLoad(iterator itr); void helper_release(iterator itr, bool bPerform, bool bMark); CImageViewer &parent; const int m_maxPreload; const int m_minPreload; const int m_memoryCapMegaBytes; basis::CThreadUnity m_threads; basis::CriticalSection m_cs; std::list<std::pair<iterator, HANDLE>> m_loading; }; } // namespace #endif<file_sep>#pragma once #ifndef GUID_7D79D07FF2F14AA7BF2BDD0A976FCC45 #define GUID_7D79D07FF2F14AA7BF2BDD0A976FCC45 /*! @file CColor class definition. */ #include "types.h" #ifndef STDAFX_H #include <algorithm> #endif #include "iunit_test.h" UNIT_TEST(CColor) /*! Color expressed by A-RGB or A-HSV. Alpha express (1 - transparency). */ class CColor { double m_r, m_g, m_b, m_a; public: CColor() : CColor(0, 0, 0) {} CColor(double r, double g, double b, double a = 255.0) : m_r(r), m_g(g), m_b(b), m_a(a) {} CColor(int r, int g, int b, int a = 255) : m_r(static_cast<double>(r)), m_g(static_cast<double>(g)), m_b(static_cast<double>(b)), m_a(static_cast<double>(a)) {} operator COLORREF() { return RGB(static_cast<DWORD>(m_r), static_cast<DWORD>(m_g), static_cast<DWORD>(m_b)); } //! Red val. double red() { return m_r; } //! Green val. double green() { return m_g; } //! Blue val. double blue() { return m_b; } //! Red val. unsigned char r() { return static_cast<unsigned char>(m_r); } //! Green val. unsigned char g() { return static_cast<unsigned char>(m_g); } //! Blue val. unsigned char b() { return static_cast<unsigned char>(m_b); } //! Multiply each RGB members. CColor operator*(double d) { return { d * m_r, d * m_g, d * m_b , m_a }; } /*! Get the color if rhs is underneath. */ CColor underLay(CColor rhs) { if (m_a > 255 - 1) return *this; CColor temp{ *this * (m_a / 255) }; rhs = rhs * ((255 - m_a) / 255); temp.m_r += rhs.m_r; temp.m_g += rhs.m_g; temp.m_b += rhs.m_b; temp.m_a = m_a + (255 - m_a) * (rhs.m_a / 255); return temp; } //! Get hue, the direction of color. 色相 double hue() // [0, 360) { auto mm = std::minmax({ m_r, m_g, m_b }); auto &min = mm.first; auto &max = mm.second; double hue{ 60.0 }; if (m_r == m_g && m_g == m_b) { return .0; } if (m_r > m_b && m_r > m_g) { hue *= (m_g - m_b) / (max - min); } else if (m_g > m_b) { hue *= (m_b - m_r) / (max - min); hue += 120; } else { hue *= (m_r - m_g) / (max - min); hue += 240; } if (hue < 0) hue += 360; if (hue >= 360) hue -= 360; return hue; } //! Get saturation, the color freshness. 彩度 double saturation() { auto mm{ std::minmax({m_r, m_g, m_b}) }; return 255.0 * (mm.second - mm.first) / mm.second; } //! Get value, means briteness. 明度 double value() { return (std::max)({ m_r, m_g, m_b }); } //! Get hue int. unsigned int h() { static_cast<unsigned int>(hue()); } //! Get saturation in u-char. unsigned char s() { static_cast<unsigned char>(saturation()); } //! Get value, briteness in u-char. unsigned char v() { static_cast<unsigned char>(value()); } //! Create instance specified by HSV values. static CColor fromHSV(double h, double s, double v) { double min = v - ((s / 255) * v); // v = max if (h < 60) { return{ v, h / 60 * (v - min) + min, min }; } if (h < 120) { return{ (120 - h) / 60 * (v - min) + min, v, min }; } if (h < 180) { return{ min, v, (h - 120) / 60 * (v - min) + min }; } if (h < 240) { return{ min, (240 - h) / 60 * (v - min) + min, v }; } if (h < 300) { return{ (h - 240) / 60 * (v - min) + min, min, v }; } return{ v, min, (360 - h) / 60 * (v - min) + min }; } }; UNIT_TEST_FUNC(CColor) { double max_diff = 0.0; for (int i = 0; i < 255; i++) { unsigned char r, g, b; r = rand() % 255; g = rand() % 255; b = rand() % 255; CColor rgb; rgb = { r, g, b }; double h, s, v; h = rgb.hue(); s = rgb.saturation(); v = rgb.value(); CColor hsv; hsv = CColor::fromHSV(h, s, v); double diff; diff = rgb.red() + rgb.green() + rgb.blue(); diff = abs(diff - hsv.red() - hsv.green() - hsv.blue()); if (diff > max_diff) max_diff = diff; if (diff > 3) throw 0; } return true; } #endif<file_sep>#ifndef STDAFX_H #include <assert.h> #endif #include "exception.h" #include "imemory.h" namespace basis { GlobalMemory:: GlobalMemory() : m_h(0), m_address(0), m_lock_count(0) {} GlobalMemory:: ~GlobalMemory() { reset(); } GlobalMemory& GlobalMemory:: operator=(GlobalMemory &&rhs) noexcept { if (m_h != rhs.m_h) { reset(rhs.m_h, rhs.m_address, rhs.m_lock_count); rhs.reset(); } return *this; } void * GlobalMemory:: alloc(size_t size, bool bZeroFill) { HGLOBAL h = 0; if (size != 0) { UINT flags = (bZeroFill) ? GMEM_MOVEABLE : GHND; h = GlobalAlloc(flags, size); } if (!h) throw std::bad_alloc(); reset(h, 0, 0); return address(); } bool GlobalMemory:: realloc(size_t size) noexcept { HGLOBAL h = GlobalReAlloc(handle(), size, GMEM_MOVEABLE); if (!h) return false; reset(h, 0, 0); return true; } void GlobalMemory:: reset(HGLOBAL h, void* addr, BOOL lock) noexcept { CriticalSection cs = m_cs.local(); // アンロック後、解放。失敗するとm_hはそのまま if (m_h && m_h != h) { m_h = GlobalFree(handle()); m_h = nullptr; // 少なくとも自分でロックした分はアンロックしてる } m_h = h; m_address = addr; m_lock_count = lock; } void* GlobalMemory:: address() const { void *p = lock(); if (p) return p; throw std::runtime_error(LOCATION); } size_t GlobalMemory:: capacity() const { if (!m_h) return 0; size_t cb = static_cast<size_t>(GlobalSize(m_h)); if (cb == 0) throw api_runtime_error(); return cb; } // ハンドルが生きているかをチェックし、 // アンロック済みのハンドルを返す。 // 不可またはエラーで0をかえす HGLOBAL GlobalMemory:: handle() noexcept { CriticalSection cs = m_cs.local(); if (m_h == nullptr) return nullptr; // ハンドルが死んでいないか? UINT uFlags = GlobalFlags(m_h); if (uFlags == GMEM_INVALID_HANDLE) return nullptr; // ロックカウントにかかわらずアドレスを放棄する m_address = nullptr; // アンロックしたメモリのハンドルを返す if (m_lock_count) { // GlobalUnlock() 戻り値は成功 0, 失敗も 0 // ロックカウント減算してなおロックされていれば非 0を返す if (GlobalUnlock(m_h)) { m_lock_count--; return nullptr; } DWORD le = GetLastError(); if (le == NO_ERROR || le == ERROR_NOT_LOCKED) { m_lock_count--; return m_h; } // 減算そのものも失敗すると 戻り値は0で上記以外のError code を吐く return nullptr; } // 自分でロックしてない場合は、アンロックされてるかどうかを返す return ((uFlags & 0xFF) == 0) ? m_h : nullptr; } // ロックしてアドレスを返す void* GlobalMemory:: lock() const { CriticalSection cs = m_cs.local(); // すでにロック if (m_lock_count) return m_address; m_address = GlobalLock(m_h); // 失敗0(also:in=0) // 関数失敗 if (m_address == m_h) { m_address = nullptr; std::runtime_error(LOCATION); } ++m_lock_count; return m_address; } // -------------------------------------------------------- // size_t HeapMemory::mTotalAmount = 0; HeapMemory:: HeapMemory() : m_h(HeapCreate(0, 0, 0)), m_address(0), m_size(0) { if (!m_h) throw 0; } HeapMemory:: ~HeapMemory() { reset(); HeapDestroy(m_h); } HeapMemory& HeapMemory::operator=(HeapMemory&& rhs) noexcept { m_cs.enter(); rhs.m_cs.enter(); { std::swap(*this, rhs); } rhs.m_cs.leave(); m_cs.leave(); return *this; } void * HeapMemory:: alloc(size_t size, bool bZeroFill) { void *p = 0; if (m_h && size) p = HeapAlloc(m_h, (bZeroFill) ? HEAP_ZERO_MEMORY : 0, size); if (!p) throw std::bad_alloc(); size_t s = static_cast<size_t>(HeapSize(m_h, 0, p)); if (s == static_cast<size_t>(-1) || s == 0) { HeapFree(m_h, 0, p); throw std::bad_alloc(); } reset(p, s); return p; } bool HeapMemory:: realloc(size_t size) noexcept { if (!size) return false; void *p = HeapReAlloc(m_h, 0, m_address, size); if (!p) return false; if (m_h) { mTotalAmount -= m_size; } mTotalAmount += size; auto cs = m_cs.local(); m_address = p; m_size = size; return true; } void HeapMemory:: reset(void *addr, size_t s) noexcept { m_cs.enter(); if (m_h && m_size) { mTotalAmount -= m_size; HeapFree(m_h, 0, m_address); } mTotalAmount += s; m_address = addr; m_size = s; m_cs.leave(); } void * HeapMemory:: address() const { auto cs = m_cs.local(); if (m_size && m_address) return m_address; throw std::runtime_error(LOCATION); } HANDLE HeapMemory:: handle() const { auto cs = m_cs.local(); if (m_h) return m_h; throw std::runtime_error(LOCATION); } size_t HeapMemory:: capacity() const { return m_size; } // -------------------------------------------------------- // CMemory::CMemory(CMemory && s) noexcept : m_buf(0), m_size(0) { *this = std::move(s); } CMemory& CMemory:: operator=(CMemory &&s) noexcept { if (this == &s) return *this; auto cs = m_cs.local(); auto cs2 = s.m_cs.local(); reset(); m_buf = s.m_buf; m_size = s.m_size; s.m_buf = nullptr; s.m_size = 0; return *this; } void * CMemory:: alloc(size_t size, bool zeroFill) { void *p; if (!size) p = 0; else if (zeroFill) p = calloc(size, 1); // 配列版。ゼロクリアされる else p = malloc(size); if (!p) throw std::bad_alloc(); reset(p, size); return p; } bool CMemory:: realloc(size_t size) noexcept { auto cs = m_cs.local(); if (!size) return false; void *p = ::realloc(m_buf, size); if (!p) return false; mTotalAmount -= m_size; mTotalAmount += size; // <- Don't reset the buffer m_buf = p; m_size = size; return true; } void CMemory::reset(void *p, size_t s) noexcept { auto cs = m_cs.local(); if (m_buf) { free(m_buf); mTotalAmount -= m_size; } mTotalAmount += s; m_buf = p; m_size = s; } size_t CMemory::mTotalAmount = 0; } // namespace<file_sep>#pragma once #ifndef GUID_A03BAB06D25C40F99E26A5AE91EC30D8 #define GUID_A03BAB06D25C40F99E26A5AE91EC30D8 #include "types.h" #include "iunit_test.h" namespace basis { UNIT_TEST(Monitor) /*! Monitor class. @par In virtual screen coordinates, its origin point (0, 0) is left-top of a primary window. All windows placed left or above of primary window should have negative coordinates. Note that if a window is of 1024x768 size, the coordinate (1024, 768) is not included, because horizontal coordinates is from 0 to 1023. GDI functions, though, accepts rectangle(0, 0, 1024, 768) and it will paint rectangle at (0, 0, 1023, 767) ignoring right and bottom end of the rectangle. */ class Monitor { public: Monitor(HMONITOR h = 0); Monitor(const Point &pos, bool nearest = false); Monitor(const RECT &rc); Monitor(const Monitor&) = default; Monitor&operator=(const Monitor&) = default; operator HMONITOR() { return m_h; } //! Returns number of monitors alive. static int Count(); //! Returns primary monitor. static Monitor GetPrimary(); //! Returns rectangle of virtual screen. static Rect GetVirtualScreen(); bool isPrimary(); /*! Return rectangle of display monitor. If monitor is not selected, rectangle of privary monitor will be returned. */ Rect getRect(); /*! Returns rectangle of work area. Work area, despite task bar and side bar area, is used to maximize application window by Windows. */ Rect getWorkArea(); //! Returns device name. std::basic_string<TCHAR> getName(); private: static BOOL CALLBACK proc( HMONITOR hMonitor, // ディスプレイモニタのハンドル HDC hdcMonitor, // モニタに適したデバイスコンテキストのハンドル LPRECT lprcMonitor, // モニタ上の交差部分を表す長方形領域へのポインタ LPARAM dwData // EnumDisplayMonitors から渡されたデータ ); MONITORINFO getMonitorInfo(); HMONITOR m_h; }; } // namespace #endif <file_sep>#pragma once #ifndef GUID_AF784485DB6A4893BDE4F5A05D37669F #define GUID_AF784485DB6A4893BDE4F5A05D37669F #ifndef STDAFX_H #include <memory> #endif #include "types.h" #include "surface.h" #include "iunit_test.h" namespace basis { UNIT_TEST(CDDBitmap) /*! Device Dependent Bitmap. CDDBitmap provides the way to load bitmap from memory image, loadOleLoad() and loadGdiplus(). Loadable files are<br \> Gdiplus : BMP, ICO, GIF, JPEG, Exif, PNG, TIFF, WMF, and EMF.<br \> OleLoadPicture : BMP, GIF, JPEG, TIFF, WMF It can also tells what kind of image the file is, before decoding image with using getType() function. To render loaded image, calling draw() function will invoke Surface::transfer() function. @sa */ class CDDBitmap : public Surface { public: class Test; enum class TYPE : BYTE { Undefined = 0, Error = 1, TIFF = 0x49, // 0b0100'1001 BMP = 'B', // 0b0100'0010 GIF = 'G', // 0b0100'0111 PNG = 0x89, // 0b1000'1001 WMF = 0xD7, // 0b1101'0111 JPEG = 0xff, // 0b1111'1111 }; // ‰æ‘œŒ`Ž®”»’è static constexpr unsigned kTypeCheckBytes = 9; static TYPE getType(IStream *is); static TYPE getType(const BYTE buf[kTypeCheckBytes]); void unload() noexcept { reset(); } bool isLoaded() const noexcept { return *this ? true : false; } bool loadOleLoad(HGLOBAL hg); bool loadGdiplus(void *p, size_t size); }; } // namespace #endif <file_sep>#pragma once #ifndef GUID_B6132F5238C640ACB180B841FEB4AF8B #define GUID_B6132F5238C640ACB180B841FEB4AF8B /*! @file IMemory interface class definition and inherit class definition, GlobalMemory, HeapMemory, CMemory */ #ifndef STDAFX_H #include <memory> #include <wtypes.h> #endif #include "critical_section.h" #include "iunit_test.h" namespace basis { UNIT_TEST(IMemory) /*! Memory class interface. Memory class interface defines buffer I/O. These classes have no way to copy, because buffers are not guarranteed to be initialized. There are a lot of cases that buffers better not to be initialized entirely. So this is why it cannot copies itself. */ class IMemory { public: IMemory() {} virtual ~IMemory() {} IMemory(IMemory&) = delete; IMemory& operator=(IMemory&) = delete; /*! Allocates a new buffer. The old buffer will be released if successful. If failed to allocate a new buffer, it throws exception while keeping the old buffer. @param size The least size of a buffer to be allocated. @param zeroFill Fills all field of a buffer with 0 if it is set to be true. @return A pointer to a new buffer. No chance to be nullptr. @exception bad_alloc */ virtual void* alloc(size_t size, bool zeroFill = false) = 0; /*! Reallocates buffers carrying contents as possible as it can. */ virtual bool realloc(size_t size) = 0; /*! Releases memory buffers. Despite for decreasing memory usage, it is not necessary to call explicitly. */ virtual void reset() noexcept = 0; /*! Returns a valid pointer to buffers. @exception std::runtime_error can't return address. @return Valid pointer to buffers. */ virtual void* address() const = 0; /*! Returns size of memory buffers in count of bytes. May return 0 if memory is not allocated or already released. */ virtual size_t capacity() const = 0; }; /*! Global Memory with IMemory interface. It will lock a memory block when to use address, while it will unlock a memory block when to use handle. @note Global memory was designed to use a small memory block. It is said to be recommended to use VirtualAlloc function when one intend to use a memory block bigger than 1 or 2MB, despite certain old functions need global memory handle. */ class GlobalMemory : public IMemory { public: GlobalMemory(); ~GlobalMemory(); GlobalMemory(GlobalMemory&) = delete; GlobalMemory&operator=(GlobalMemory&) = delete; GlobalMemory(GlobalMemory&&s) noexcept { *this = std::move(s); } GlobalMemory&operator=(GlobalMemory&&) noexcept; void* alloc(size_t size, bool zeroFill = false) override; bool realloc(size_t size) noexcept override; void reset() noexcept override { return reset(0, 0, 0); } void* address() const override; size_t capacity() const override; /*! Unlock memory and return its handle. It makes memory address become invalid. @return Handle. It may be 0 if failed. */ HGLOBAL handle() noexcept; private: void reset(HGLOBAL h, void* addr, BOOL lock) noexcept; void* lock() const; CriticalSection m_cs; HGLOBAL m_h; mutable void* m_address; mutable BOOL m_lock_count; }; //! Heap memory with IMemory interface. class HeapMemory : public IMemory { public: HeapMemory(); ~HeapMemory(); HeapMemory(HeapMemory&) = delete; HeapMemory&operator=(HeapMemory&) = delete; HeapMemory(HeapMemory&&s) noexcept { *this = std::move(s); } HeapMemory&operator=(HeapMemory&&) noexcept; void* alloc(size_t size, bool bZeroFill = false) override; bool realloc(size_t size) noexcept override; void reset() noexcept override { reset(0, 0); } void* address() const override; size_t capacity() const override; /*! Returns Heap memory handle. @exception std::runtime_error No handles. */ HANDLE handle() const; static size_t TotalAmount() { return mTotalAmount; } private: void reset(void *addr, size_t s) noexcept; static size_t mTotalAmount; CriticalSection m_cs; HANDLE m_h; void* m_address; size_t m_size; }; /*! IMemory interface for the memory malloc function creates. */ class CMemory : public IMemory { public: CMemory() : m_size(0), m_buf(0) {} ~CMemory() { reset(); } CMemory(const CMemory& s) = delete; CMemory&operator=(const CMemory &s) = delete; CMemory(CMemory &&s) noexcept; CMemory&operator=(CMemory &&s) noexcept; void *alloc (size_t size, bool zeroFill) override; bool realloc(size_t size) noexcept override; void reset() noexcept override { reset(0, 0); } void *address() const override { if (m_buf) return m_buf; throw 0; } size_t capacity() const override { return m_size; } static size_t TotalAmount() { return mTotalAmount; } private: void reset(void *p, size_t s) noexcept; static size_t mTotalAmount; CriticalSection m_cs; size_t m_size; void *m_buf; }; } // namespace #endif<file_sep>#include "key_map.h" #include "exception.h" namespace basis { void CKeyMap:: clear() noexcept { ZeroMemory(index, _countof(index)); ZeroMemory(keyset, _countof(keyset)); } bool CKeyMap:: append(CKey key, Command id) noexcept { if (isFull() || !key || id == 0 || id > 0xFFFF) return false; ++index[0]; // インデックスの変更と挿入位置取得 Index ix; if (index[key.vkey()]) { ix = index[key.vkey()]; // 挿入位置より後ろを指していたインデックスをずらす for (auto i = 1; i < _countof(index); ++i) { if (index[i] > ix) ++index[i]; } } else { ix = index[0]; index[key.vkey()] = ix; } // 挿入 Item item{ key, static_cast<WORD>(id) }; for (ix; item.first; ++ix) { std::swap(item, keyset[ix]); } return true; } void CKeyMap:: eraseByCommand(Command id) { for (unsigned i = 1; i <= size(); ) { if (keyset[i].second == id) erase(static_cast<Index>(i)); else ++i; } } void CKeyMap:: eraseByKey(CKey key) { WORD ix = index[key.vkey()]; while (0 != (ix = search(key, ix))) { if (key == keyset[ix].first) erase(ix); else ++ix; } return; } void CKeyMap:: erase(Index ix) { if (ix == 0) throw std::invalid_argument(LOCATION); CKey key = keyset[ix].first; // 削除 for (Index i = ix; i <= size(); ++i) { keyset[i] = (i <= size() - 1) ? keyset[i + 1] : Item{ 0, 0 }; } // インデックスとサイズ(index[0])を変更 for (WORD &i : index) { if (i > ix) --i; } // vkeyチャンクが消滅したらインデックスを消去 if (!search(key.vkey(), index[key.vkey()])) index[key.vkey()] = 0; } DWORD CKeyMap:: getCommand(CKey key, bool bCompliment) { if (bCompliment) { if (GetKeyState(VK_SHIFT) < 0) key |= CKey::SHIFT; if (GetKeyState(VK_CONTROL) < 0) key |= CKey::CTRL; if (GetKeyState(VK_MENU) < 0) key |= CKey::ALT; } return keyset[search(key, index[key.vkey()])].second; } CKeyMap::Index CKeyMap:: search(CKey key, Index start_ix) { CKey matched; WORD id = 0; for (auto i = start_ix; i <= size(); ++i) { CKey test; test = keyset[i].first; // 同一vkeyのチャンク終了 if (test.vkey() != key.vkey()) return id; // 完全一致を見つけた if (test == key) { return i; } // もっとも一致するものを保持する // たとえばCtrl+Fがないとき、Fのみのものがあればそれを返す if (matched.flags() <= test.flags()) { matched = test; id = i; } } return id; } CKey CKeyMap:: getKey(Command id, int n) { for (auto &&i : keyset) { if (i.second == id) { if (n) n--; else return i.first; } } return CKey{}; } } // namespace<file_sep>#include "monitor.h" namespace basis { UNIT_TEST_FUNC(Monitor) { Monitor primary{ {}, false }; if (primary.getName() != Monitor::GetPrimary().getName()) throw 0; if (primary.getRect() != Monitor::GetPrimary().getRect()) throw 0; return true; } } // namespace<file_sep>/* APIs FormatMessage Windows NT/2000:Windows NT 3.1 以降 Windows 95/98:Windows 95 以降 ヘッダー:Winbase.h 内で宣言、Windows.h をインクルード インポートライブラリ:Kernel32.lib を使用 Unicode:Windows NT/2000 は Unicode 版と ANSI 版を実装 */ #include "exception.h" #ifndef STDAFX_H #include <wtypes.h> #endif namespace { std::string ToStrA(int i) { char sz[16] = { '\0' }; if (sprintf_s(sz, "%d", i) < 0) return std::string(); return std::string(sz); } // APIエラー用の補助関数 // エラーコードをメッセージに変換する std::string TranslateErrorCode(DWORD dwLastError) { const int size = 512; PSTR buf = static_cast<PSTR>(malloc(size)); // ロケールを実行環境に合わせる setlocale(LC_ALL, setlocale(LC_CTYPE, "")); if (!FormatMessageA(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, nullptr, dwLastError, 0, buf, size, nullptr)) return ""; std::string message(buf); free(buf); return message; } } // namespace namespace basis { std::string MakeLocation(const char *func, const char *file, int line) { return std::string(func) + " in " + file + " line:" + ToStrA(line); } // APIエラー用の例外メッセージを成形する std::string GetErrorMessage(const char *func, const char *file, int line) { std::string mes("Error:"); mes += ToStrA(static_cast<int>(GetLastError())); mes += " " + TranslateErrorCode(GetLastError()); mes += MakeLocation(func, file, line); return mes; } } // namespace<file_sep>#include "file_path.h" #include "file_item.h" namespace basis { UNIT_TEST_FUNC(CFilePath) { auto path = CFilePath::GetCurrentDirectory(); if (!path.exist()) throw 0; if (!path.isDirectory()) throw 0; auto file = path + TEXT("temp"); if (file.getFileName() != TEXT("temp")) throw 0; if (file.exist()) if (!file.erase()) throw 0; if (path != file.getDir()) throw 0; if (!file.createAsDirectory()) throw 0; if (!file.erase()) throw 0; // ファイル作成のテストはCFileItemが行う return true; } } // namespace<file_sep>#include "file_path.h" #include "file_item.h" #include "stdfnc.h" #include "cdd_bitmap.h" namespace basis { UNIT_TEST_FUNC(CDDBitmap) { using tstr = ::std::basic_string<TCHAR>; auto path = CFilePath::GetBootDirectory() + TEXT("img11.jpg"); CFileItem file = path.open(); if (!file) throw 0; auto size = file.getSize(); if (size.HighPart > 0) return false; GlobalMemory heap; heap.alloc(size.LowPart); file.read(heap.address(), 0, INFINITE); CDDBitmap bitmap; if (bitmap.loadOleLoad(heap.handle()) == false) throw 0; if (bitmap.loadGdiplus(heap.address(), size.LowPart) == false) throw 0; return true; } } // namespace<file_sep>#include "string_buffer.h" // for GetDropFile #include "stdfnc.h" namespace { const TCHAR * const TEXT_TRUE = TEXT("true"); const TCHAR * const TEXT_FALSE = TEXT("false"); } namespace basis { const TCHAR * const TEXT_EMPTY = TEXT(""); const TCHAR * const ASTERISK = TEXT("*"); template<> bool ToInt(const TCHAR *p, const bool &bDefault) { static const tstr text_true = TEXT("true"); static const tstr text_false = TEXT("false"); if (text_true == p) return true; if (text_false == p) return false; return bDefault; } uint32_t ReverseByteOrder(uint32_t src) { uint32_t dest; dest = (src << 24) & 0xFF000000; dest |= (src << 8) & 0x00FF0000; dest |= (src >> 8) & 0x0000FF00; dest |= (src >> 24) & 0x000000FF; return dest; } PTSTR GetTypeFormat(TCHAR type[5], size_t bytes, TCHAR d_x) { TCHAR *p = type + 1; switch (bytes) { case 1: *p = _T('h'); ++p; // fall through case 2: *p = _T('h'); break; case 8: *p = _T('l'); ++p; // fall through case 4: *p = _T('l'); break; default: throw 0; } *type = _T('%'); *++p = d_x; *++p = _T('\0'); return type; } tstr ToStr(int64_t n) { TCHAR sz[17]{ _T('\0') }; // 64bitは15文字+符号+\0 if (_stprintf_s(sz, TEXT("%lld"), n) < 0) throw 0; return tstr(sz); } // コマンドラインのnCmd番目の引数を返す。 // 0番目は実行中のプログラムのパス、以降が引数である。 // 引数が""(ダブルクォート)で囲まれている場合はこれを除去する。 // "path" や -ls など混在した引数でも動作する。 // 戻り値は対象の引数がない場合、空(empty)である。 tstr GetCommandLine(int nCmd) { TCHAR c; // 引数ごとの終端とする文字 tstr str = ::GetCommandLine(); tstr::size_type i = 0, n; while (i <= str.size()) { // 括り文字 if (str[i] == _T('"') || str[i] == _T('\'')) { c = str[i]; // 区切り文字 if (++i > str.size()) return{}; } else c = _T(' '); // 区切り文字 if (--nCmd <= -1) // 目的の引数。終端文字まで取り出す return str.substr(i, str.find(c, i) - i); if ((n = str.find(c, i)) == tstr::npos) return{}; // 末尾まで見た。目的の引数なし i = n + 1; if (str[i] == _T(' ')) // "" のあとのスペースを無視 ++i; } return{}; } tstr GetDropFile(WPARAM wp, UINT num) { UINT size = 1 + DragQueryFile((HDROP)wp, num, nullptr, 0); StringBuffer buf(size); DragQueryFile((HDROP)wp, num, buf.data(), size); return tstr(buf.data()); } LONG GetFontHeight( HDC hdc, HFONT lhs, HFONT rhs ) { TEXTMETRIC tm; LONG lhs_height = 0; HFONT prev_font = 0; if ( lhs ) { prev_font = (HFONT)SelectObject( hdc, lhs ); } GetTextMetrics( hdc, &tm ); if ( lhs ) { if ( rhs ) { lhs_height = tm.tmHeight; SelectObject( hdc, rhs ); GetTextMetrics( hdc, &tm ); } SelectObject( hdc, prev_font ); if ( rhs && lhs_height > tm.tmHeight ) return lhs_height; } return tm.tmHeight; } bool ShowProperty(HWND hParentWnd, const tstr& filePath) { SHELLEXECUTEINFO sInfo = { sizeof(SHELLEXECUTEINFO) }; sInfo.hwnd = hParentWnd; sInfo.lpVerb = _T("properties"); sInfo.lpFile = filePath.c_str(); sInfo.fMask = SEE_MASK_INVOKEIDLIST; sInfo.nShow = SW_SHOWNORMAL; BOOL const ret = ShellExecuteEx(&sInfo); return ret != FALSE; } void HimetricFromDP(HDC hdc, SIZE& s) { LONG HIMETRIC_INCH = 2540; // 2540mm(HIMETRIC) / 1inch int x_dpi = GetDeviceCaps( hdc, LOGPIXELSX ); int y_dpi = GetDeviceCaps( hdc, LOGPIXELSY ); // たいがい96dpiをかえす。 // 合計のmmとpixelから正確に算出する方法もあるが // ビットバイビット表示する場合はこちらを使う s.cx = s.cx * HIMETRIC_INCH / x_dpi; s.cy = s.cy * HIMETRIC_INCH / y_dpi; return; } bool ClearBackground(HDC hdc, RECT screen, RECT diff) { SetLastError(0); if (screen.top < diff.top) { // 上側クリア Rectangle(hdc, screen.left, screen.top, screen.right, (diff.top)); } if (screen.bottom > diff.bottom) { // 下側クリア Rectangle(hdc, screen.left, max(diff.bottom, screen.top), screen.right, screen.bottom); } if (screen.left < diff.left) { // 残りの左側クリア Rectangle(hdc, screen.left, max(diff.top, screen.top), diff.left, min(diff.bottom, screen.bottom)); } if (screen.right > diff.right) { // 右側クリア Rectangle(hdc, diff.right, max(diff.top, screen.top), screen.right, min(diff.bottom, screen.bottom)); } return GetLastError() == NO_ERROR; } } // namespace<file_sep>#include "types.h" namespace basis { Point::operator Size() const { return Size(x, y); } Rect Size::toRect() { return{ 0, 0, x, y }; } Size::operator Point() const { return{ x, y }; } const Point operator*(const Point &pt, int n) { return Point(pt.x * n, pt.y * n); } const Point operator/(const Point &pt, int n) { return Point(pt.x / n, pt.y / n); } const Point operator+(const Point &pt, const Size &s) { return Point(pt.x + s.x, pt.y + s.y); } const Point operator+(const Size &s, const Point &pt) { return Point(pt.x + s.x, pt.y + s.y); } const Point operator-(const Point &pt, const Size &s) { return Point(pt.x - s.x, pt.y - s.y); } const Point operator-(const Size &s, const Point &pt) { return Point(s.x - pt.x, s.y - pt.y); } const Size operator*(const Size &s, double d) { return{ static_cast<int>(s.x * d), static_cast<int>(s.y * d) }; } const Size operator*(double d, const Size &s) { return s * d; } const Size operator/(const Size &s, double d) { return{ static_cast<int>(s.x / d), static_cast<int>(s.y / d) }; } const Size operator/(double d, const Size &s) { return{ static_cast<int>(d / s.x), static_cast<int>(d / s.y) }; } } // namespace<file_sep>#pragma once #ifndef GUID_E45C9C8A647442EA95F7EB08ABDEF7E4 #define GUID_E45C9C8A647442EA95F7EB08ABDEF7E4 #ifndef STDAFX_H #include <string> #endif #include "iunit_test.h" #include "string_buffer.h" namespace basis { UNIT_TEST(CPrivateProfile) /*! Initialization file accessor. API wrapper for GetPrivateProfileString and others. @note These APIs on WIN95 has bug that rewrites parameters set as default string, because they didn't declaired const. This class provides no countermeasure for that. */ class CPrivateProfile { public: using tstr = std::basic_string<TCHAR>; CPrivateProfile() = default; CPrivateProfile(tstr path); virtual ~CPrivateProfile() = default; CPrivateProfile(const CPrivateProfile&) = default; CPrivateProfile(CPrivateProfile&&) = default; CPrivateProfile&operator=(const CPrivateProfile&) = default; CPrivateProfile&operator=(CPrivateProfile&&) = default; //! Sets the Ini path name. CPrivateProfile& path(tstr path); //! Gets the Ini path name. const tstr& path() const { return m_path; } //! Sets the section name. CPrivateProfile& section(tstr section); //! Gets the section name. const tstr& section() const { return m_section; } /*! Enumlates all section that is in the ini file. This function uses Ini path set by path() function. This function stores all section names into inner buffer. Inner buffer is accessible with using c_str(). Stored string consists of null-separated section names, and last section name is terminated with two nulls. */ bool getAllSectionNames() noexcept; /*! Enumlates all key names in the section. This function uses Ini path and section name set by path() and section() function. This function stores all key names into inner buffer. Inner buffer is accessible with using c_str(). Stored string consists of null-separated key names, and last key name is terminated with two nulls. */ bool getAllKeyNames() noexcept; /*! Returns a string corresponds to the key. This function uses Ini path and section name set by path() and section() function. If there was no corresponded string, sDefault would be copied as return value. Returned value is pointer to inner buffer, so reaccessible with calling c_str() untill other functions of this object overwrites it. */ const TCHAR *read(const TCHAR *name, const TCHAR *sDefault = nullptr); /*! Writes a string corresponds to the key. This function uses Ini path and section name set with path() and section() function. */ bool write(const TCHAR *keyName, const TCHAR *str); /*! Returns pointer to inner buffer. getAllSectionNames(), getAllKeyNames(), and read() function uses inner buffer to return strings. Inner buffer keeps still, until another call to these functions is made. */ const TCHAR *c_str() const { return m_buf.c_str(); } /*! Erases a specified key and a corresponded string. This function uses Ini path and section name set with path() and section() function. */ bool eraseKey(const TCHAR *name); /*! Erase all keys and strings in the section. This function uses Ini path and section name set with path() and section() function. */ bool clearSection(); private: bool do_read(const TCHAR *section, const TCHAR *name, const TCHAR *sDefault); bool do_write(const TCHAR *name, const TCHAR *value); bool isInvalid(const TCHAR *name); tstr m_path; // File path tstr m_section; // Section name StringBuffer m_buf; // Inner buffer }; } // namespace #endif <file_sep>#pragma once #ifndef GUID_6982DB1EB5D04F939E5EFCBA3B0D17EA #define GUID_6982DB1EB5D04F939E5EFCBA3B0D17EA /*! @file iunit_test.h contains definition of IUnitTest, unit test class interface. This codes shows how to make unit tests. @par @code // --- Class declaration part that can be divided to header file --- // #include "CBase.h" #include "iunit_test.h" UNIT_TEST(CDerived) class CDerived : public CBase { public: bool derived(); }; // -------------- Class definition -------------- // bool derived(); // -------------- Unit test definition -------------- // UNIT_TEST_FUNC(CDerived) { CDerived obj; if (obj.derived() != true) throw 0; return true; } @endcode Let's see in detail. @code #include "CBase.h" @endcode If CBase also has a unit test defined in "CBase.h", CBase will be certainly tested before CDerived tested. @code UNIT_TEST(CDerived) @endcode CDerived is not even declared yet, another class, named TEST_CDerived was defined and created here.<br \> In the constructor of TEST_CDerived, it goes to register pointer to the static list IUnitTest has.<br \> It will succeed unless a pointer of the same class that exists within other translation unit had already registered. @code UNIT_TEST_FUNC(CDerived) { ... } @endcode This is a declaration of TEST_CDerived::test(). One can give it definition in the next. These created tests are available from anywhere after reached a entry point of a program. @code IUnitTest::PerformUnitTest(); @endcode This function executes respective test functions regisered. To be said generally, these gimmicks are... @li Runtime test @li Available in anywhere @li Available with single call @li Ignored if release build. @li Not rebuked by CrtDbg, macros to select memory leaks. @li Capable to be in cpp files so not increase class headers despite MAKE_PATH macro. @li Capable to sort tests by relationships of classes. */ #ifndef STDAFX_H #include <vector> #include <typeinfo> #endif #include "ienumlator.h" /*! Unit test class interface. See also iunit_test.h to know how to make unit tests. */ class IUnitTest { private: class Enumlator; public: virtual ~IUnitTest() {} //! Executes the unit test. virtual bool test() = 0; //! Executes all registered unit tests. static void PerformUnitTest() { #ifdef _DEBUG int nUnitTest = ncodes(0); IUnitTest* p = 0; for (int i = 0; i < nUnitTest; i++) { p = GetUnitTest(i); if (p->test() == false) throw 0; } #endif } //! Adds a pointer of unit test to the list. template<class T> static bool PushUnitTest() { static T t; for (int i = ncodes(0); i; i--) { if (GetUnitTest(i) == &t) return false; } GetUnitTest(ncodes(0)) = &t; ncodes(1); return true; } static int Size() { return ncodes(0); } static Enumlator GetEnumlator() { return Enumlator(&GetUnitTest(0)); } private: static IUnitTest*& GetUnitTest(int n) { static IUnitTest* ar[kUnitTestMax]; return ar[n]; } static int ncodes(int add) { static int n = 0; int ret = n; n += add; return ret; } static constexpr int kUnitTestMax = 255; private:class Enumlator : public ::basis::IEnumlator<IUnitTest*> { public: Enumlator(IUnitTest** p) : m_p(p), m_end(0) {} bool next() override { if (!m_end) m_end = m_p + IUnitTest::Size(); if (m_p == m_end) return false; ++m_p; return (m_p != m_end); } bool empty() const override { return m_p == m_end; } IUnitTest*& get() override { return *m_p; } private: IUnitTest **m_p; IUnitTest **m_end; }; // Enumlator }; // IUnitTest #ifdef _DEBUG /*! Gives a test class definition and its substance. The object add itself to a list of test codes as it created. */ #define UNIT_TEST(hoge) struct TEST_##hoge : public IUnitTest {\ bool test() override; };\ static bool TEST_##hoge##_TRIGGER = IUnitTest::PushUnitTest<TEST_##hoge>(); #else #define UNIT_TEST(hoge) namespace {\ struct TEST_##hoge { bool test(); }; } #endif /*! Defines a test function of hoge class. See a sample below. @code UNIT_TEST_FUNC(hoge) { return true; } @endcode This will be expanded to be something like below. @code bool TEST_hoge() { return true; } @endcode UNIT_TEST and UNIT_TEST_FUNC macros allows one to be ignorant which classes provide test functions. */ #define UNIT_TEST_FUNC(hoge) bool TEST_##hoge##::test() /*! @class hoge Dummy name for certain macros. */ #endif<file_sep>#pragma once #ifndef GUID_B58C4C5E7A9343598E1D86A2FE6F8F3A #define GUID_B58C4C5E7A9343598E1D86A2FE6F8F3A #ifndef STDAFX_H #include <list> #endif #include "window.h" #include "thread.h" #include "critical_section.h" namespace basis { class Window::Impl { public: Impl(Window* win) : parent(win) {} ~Impl() = default; HWND handle() { return m_h; } bool create(); bool destroy(); void waitToEnd(); void join(); //! ウィンドウのリスナを登録 void hook(IEventHandler *p); //! ウィンドウのリスナを登録 void hook(Listener f); //! リスナを除去する void unhook(IEventHandler *p); //! フックされたリスナにイベントを渡す int broadcast(Message, WPARAM, LPARAM); //! メッセージを配信する int dispatch(Window*, Message, WPARAM, LPARAM) const; private: bool exist(const IEventHandler *p); //! ウィンドウハンドルに関連付けられたイベントハンドラを取得し委譲する。 static LRESULT CALLBACK Dispatch(HWND hWnd, UINT msg, WPARAM wp, LPARAM lp); //! HWNDに仕込んだポインタを取得する static Window * GetInstance(HWND hWnd); ATOM createWindowAtom(const TCHAR *identifier, WNDPROC proc); //! CreateWindow中のインスタンス static Window *ConstructingInstance; static CriticalSection m_cs; Window* parent; HWND m_h; basis::CThread m_thread; using Element = std::pair<IEventHandler*, Listener>; std::list<Element> m_hook; }; } // namespace #endif <file_sep>#include "key_map.h" #ifndef unless #define unless(a) if(!(a)) #endif namespace basis { UNIT_TEST_FUNC(CKeyMap) { CKeyMap map; // 例外送出する操作 try { if (map.append(CKey(), 3)) throw 0; } catch (std::invalid_argument& e) { (void)e; } try { if (map.append(VK_SHIFT, 0)) throw 0; } catch (std::invalid_argument& e) { (void)e; } unless(map.size() == 0) throw 0; unless(map.capacity() > 0) throw 0; unless(map.append(VK_SHIFT, 5)) throw 0; unless(map.getCommand(VK_SHIFT, false) == 5) throw 0; unless(map.getKey(5, 0) == VK_SHIFT) throw 0; unless(map.size() == 1) throw 0; unless(map.append(CKey::SHIFT + VK_F1, 1)) throw 0; unless(map.append(CKey::CTRL | CKey::SHIFT | VK_F1, 2)) throw 0; // ※1 unless(map.getCommand(CKey::SHIFT + VK_F1, false) == 1) throw 0; unless(map.getCommand(CKey::SHIFT + CKey::CTRL + VK_F1, false) == 2) throw 0; unless(map.size() == 3) throw 0; map.eraseByCommand(5); unless(map.size() == 2) throw 0; unless(map.getCommand(VK_SHIFT, false) == 0) throw 0; unless(map.getKey(5, 0) == CKey()) throw 0; // ※1 消去後の整合性チェック unless(map.getCommand(CKey::SHIFT + VK_F1, false) == 1) throw 0; unless(map.getCommand(CKey::SHIFT + CKey::CTRL + VK_F1, false) == 2) throw 0; return true; } } // namespace<file_sep>#pragma once #ifndef GUID_528AE1BB7FB24BC5A32EDEFFF61AFC10 #define GUID_528AE1BB7FB24BC5A32EDEFFF61AFC10 #ifndef STDAFX_H #include <ctype.h> #include <wtypes.h> #endif #include "exception.h" namespace basis { template <class T> class Char_T {}; template<> class Char_T<char> { // for Ascii, Shift-JIS(932) public: Char_T() : m_c(0) {} Char_T(char c) : m_c(c) {} operator char() { return m_c; } bool isNull() const { return m_c == '\0'; } bool isAlpha() const { return isLower() || isUpper(); } bool isLower() const { return 'a' <= m_c && m_c <= 'z'; } bool isUpper() const { return 'A' <= m_c && m_c <= 'Z'; } bool isDigit() const { return '0' <= m_c && m_c <= '9'; } bool isHexDigit() const { return ('A' <= m_c && m_c <= 'F') || ('a' <= m_c && m_c <= 'f') || isDigit(); } /*! Returns whether it can be lead byte of DBC. This function returns whether it can be a 1st byte of Double Byte Char; Sjis for example. Even if this function returns true, it remains a chance that the char is a 2nd byte. Therefore, if this function returned false, the char is definitely a 2nd byte. */ bool isSjisLeadByte() const { return (0x81 <= m_c && m_c <= 0x9f) || (0xe0 <= m_c && m_c <= 0xfc); } int bytes() const { return sizeof(m_c); } int width() const { return (isSjisLeadByte()) ? 2 : 1; } #ifdef _UNICODE wchar_t tchar() const { char buf[2] = { m_c, '\0' }; wchar_t str[2]; if (!MultiByteToWideChar(CP_THREAD_ACP, 0, buf, _countof(buf), str, _countof(str))) throw api_runtime_error(); return str[0]; } #else char tchar() const { return m_c; } #endif private: char m_c; }; template<> class Char_T<wchar_t> { public: Char_T() : m_c(0) {} Char_T(wchar_t c) : m_c(c) {} operator wchar_t() { return m_c; } bool isNull() const { return m_c == L'\0'; } bool isAlpha() const { return isLower() || isUpper(); } bool isLower() const { return L'a' <= m_c && m_c <= L'z'; } bool isUpper() const { return L'A' <= m_c && m_c <= L'Z'; } bool isDigit() const { return L'0' <= m_c && m_c <= L'9'; } bool isHexDigit() const { return (L'A' <= m_c && m_c <= L'F') || (L'a' <= m_c && m_c <= L'f') || isDigit(); } bool isSjisLeadByte() const { return false; } int bytes() const { return sizeof(m_c); } int width() const { return 1; } #ifdef _UNICODE wchar_t tchar() const { return m_c; } #else char tchar() const { wchar_t str[2] = { m_c, '\0' }; char buf[2]; if (!WideCharToMultiByte(CP_THREAD_ACP, 0, str, _countof(str), buf, _countof(buf), 0, nullptr)) throw api_runtime_error(); return buf[0]; } #endif private: wchar_t m_c; }; } // namespace #endif<file_sep>/*! @file Monitor definition. */ #include "singleton.h" #include "exception.h" #include "monitor.h" #pragma comment(lib, "user32.lib") namespace basis { Monitor::Monitor(HMONITOR h) : m_h(h) {} Monitor::Monitor(const Point &pos, bool nearest) : Monitor(MonitorFromPoint({ pos.x, pos.y }, nearest ? MONITOR_DEFAULTTONEAREST : MONITOR_DEFAULTTONULL)) {} Monitor::Monitor(const RECT &rc) : Monitor(MonitorFromRect(&rc, MONITOR_DEFAULTTONULL)) {} int Monitor::Count() // static { return GetSystemMetrics(SM_CMONITORS); } Monitor Monitor::GetPrimary() { return MonitorFromPoint({ 0, 0 }, MONITOR_DEFAULTTOPRIMARY); } Rect Monitor::GetVirtualScreen() // static { // Virtual screen is not available on Win95 RECT rc{ 0, 0, GetSystemMetrics(SM_CXVIRTUALSCREEN) }; if (rc.right) { rc.left = GetSystemMetrics(SM_XVIRTUALSCREEN); rc.right += rc.left; rc.top = GetSystemMetrics(SM_YVIRTUALSCREEN); rc.bottom = GetSystemMetrics(SM_CYVIRTUALSCREEN); rc.bottom += rc.top; } else { rc.right = GetSystemMetrics(SM_CXSCREEN); rc.bottom = GetSystemMetrics(SM_CYSCREEN); } return rc; } bool Monitor::isPrimary() { return (getMonitorInfo().dwFlags & MONITORINFOF_PRIMARY) != 0; } Rect Monitor::getRect() { return getMonitorInfo().rcMonitor; } Rect Monitor::getWorkArea() { return getMonitorInfo().rcWork; } MONITORINFO Monitor::getMonitorInfo() { MONITORINFO mi{ sizeof(mi) }; GetMonitorInfo(m_h ? m_h : GetPrimary(), &mi); return mi; } std::basic_string<TCHAR> Monitor::getName() { MONITORINFOEX mi; mi.cbSize = sizeof(mi); GetMonitorInfo(m_h ? m_h : GetPrimary(), &mi); return mi.szDevice; } } // namespace<file_sep>/* Gdiplus Bitmap Class Windows XP, Windows 2000 Professional GDI+ 1.0 Gdiplusheaders.h (include Gdiplus.h), Gdiplus.lib / Gdiplus.dll */ #ifndef STDAFX_H #include <olectl.h> // OleLoadPicture, IPicture(OCIdl.h) #pragma comment(lib, "oleaut32.lib") #include <shlwapi.h> // for SHCreateMemStream #pragma comment( lib, "Shlwapi.lib" ) #endif #include <basetsd.h> // OLE_HANDLEはx64でも32bitのため、 // 各ハンドル型にキャストすることが不可能。 // ヘルパ関数LongToHandleを使う。 #include "gdiplus_init.h" #include "ole_init.h" #include "stdfnc.h" // using SafeRelease #include "cdd_bitmap.h" namespace { // IPicture->Renderによる等倍転送。 // 非等倍転送はHALFTONEを考慮しないため画質が落ちるので、 // IPictureのままクラスに保持していても使えない。 void copyRender(IPicture &ip, HDC dest, basis::Size s) { OLE_XSIZE_HIMETRIC dxSrc; OLE_YSIZE_HIMETRIC dySrc; ip.get_Width(&dxSrc); ip.get_Height(&dySrc); ip.Render(dest, 0, 0, s.x, s.y, 0, dySrc, dxSrc, -dySrc, nullptr); // src側のサイズはHIMETRIC単位。 // さらに基準点は下からで、高さは負の値になる } struct Deleter_IPicture { void operator()(IPicture *ip) { ip->Release(); } }; } namespace basis { bool CDDBitmap:: loadGdiplus(void *p, size_t size) { SafeRelease<IStream> is{ SHCreateMemStream(static_cast<const BYTE*>(p), static_cast<UINT>(size)), [](IStream* p) { p->Release(); } }; using namespace Gdiplus; if (!is || !GdiplusInit().available()) return false; HBITMAP hBitmap = 0; Color background(255, 255, 255); if (Bitmap(is).GetHBITMAP(background, &hBitmap) != Ok) return false; return setBitmap(hBitmap); } bool CDDBitmap:: loadOleLoad(HGLOBAL hg) { OleInitializer ole; if (!ole.isValid()) return false; IStream *is = nullptr; if (!hg || CreateStreamOnHGlobal(hg, FALSE, &is)) return false; // Create ip. IPicture *p = nullptr; // Appears when fail. OleLoadPicture(is, 0, FALSE, IID_IPicture, (LPVOID*)&p); if (!p) return false; std::unique_ptr<IPicture, Deleter_IPicture> ip(p); // Makes ip to have hdc. Surface temp; temp.create(0, { 1, 1 }); HDC prev_dc = 0; // Will never change. OLE_HANDLE prev_bmp = 0; if (ip->SelectPicture(temp, &prev_dc, &prev_bmp) != S_OK) return false; // Duplicate bitmap. OLE_HANDLE hOle = 0; if (ip->get_Handle(&hOle) || !hOle) return false; auto s = Surface::GetSize(reinterpret_cast<HBITMAP>(LongToHandle(hOle))); create(temp, s); copyRender(*ip, *this, s); // Release ownership of hOle. temp.setBitmap(reinterpret_cast<HBITMAP>(LongToHandle(prev_bmp))); return true; // Scoping out will make IPicture, hOle and temp released. } CDDBitmap::TYPE CDDBitmap:: getType(IStream *is) { if (!is) return TYPE::Error; BYTE bytes[kTypeCheckBytes]; ULONG cbRead = 0; is->Seek({ 0, 0 }, STREAM_SEEK_SET, 0); is->Read(static_cast<void*>(&bytes), _countof(bytes), &cbRead); is->Seek({ 0, 0 }, STREAM_SEEK_SET, 0); return getType(bytes); } CDDBitmap::TYPE CDDBitmap:: getType(const BYTE p[kTypeCheckBytes]) { const BYTE end { 0x10 }; BYTE formats[][kTypeCheckBytes]{ { 0xff, 0xd8, end }, // JPEG/Exif { 0x89, 'P', 'N', 'G', 0x0d, 0x0a, 0x1a, 0x0a, end }, // PNG { 'B', 'M', end }, // BMP { 'G', 'I', 'F', '8', '7', 'a', end }, // GIF { 'G', 'I', 'F', '8', '9', 'a', end }, // GIF { 1, 0, 0, 0, end }, // EMF { 0, 0, 0, 1, end }, // ICO(.ico) { 0, 0, 0, 2, end }, // ICO(.cur) { 0xD7, 0xcd, 0xc6, 0x9a, 0, 0, end }, // WMF(APM) { 0, 0, end }, // WMF(memory) { 1, 0, end }, // WMF(disk) }; for (auto &&format : formats) { const BYTE *i = p; for (auto &&ch : format) { if (ch == end) return static_cast<TYPE>(format[0]); if (*i != ch) break; ++i; } } // TIFF if ((*p == 0x4d || *p == 0x49) && (*p == p[1])) return TYPE::TIFF; return TYPE::Error; } } // namespace<file_sep>#pragma once #ifndef GUID_096AB9D76F6D4D22B90A865B3CBC101A #define GUID_096AB9D76F6D4D22B90A865B3CBC101A #ifndef STDAFX_H #include <wtypes.h> #include <string> #endif namespace basis { class Size; class Rect; class Point { public: int x, y; Point() : Point(0, 0) {} Point(int cx, int cy) : x(cx), y(cy) {} explicit Point(POINT pt) : x(static_cast<int>(pt.x)), y(static_cast<int>(pt.y)) {} explicit operator POINT() const { return{ x, y }; } explicit operator Size() const; const Point operator+(const Point& rhs) { return Point(*this) += rhs; } Point& operator+=(const Point& rhs) { x += rhs.x, y += rhs.y; return *this; } const Point operator-(const Point& rhs) { return Point(*this) -= rhs; } Point& operator-=(const Point& rhs) { x -= rhs.x, y -= rhs.y; return *this; } }; class Size { public: int x, y; Size() : Size(0, 0) {} Size(int dx, int dy) : x(dx), y(dy) {} Size(const SIZE &s) : Size(s.cx, s.cy) {} explicit operator Point() const; operator SIZE() const { return{ x, y }; } bool operator==(const Size &s) const { return x == s.x && y == s.y; } bool operator!=(const Size &s) const { return x != s.x || y != s.y; } Size& operator+=(const Size &s) { x += s.x, y += s.y; return *this; } const Size operator+(const Size &s) const { return Size(*this) += s; } Size& operator-=(const Size &s) { x -= s.x, y -= s.y; return *this; } const Size operator-(const Size &s) const { return Size(*this) -= s; } Size& reset() { x = 0, y = 0; return *this; } // [0, 0]を始点とするRectを生成する Rect toRect(); }; class Rect { public: int left, top, right, bottom; Rect() : Rect(0, 0, 0, 0) {} Rect(int l, int t, int r, int b) : left(l), top(t), right(r), bottom(b) {} Rect(const RECT &rc) : Rect(rc.left, rc.top, rc.right, rc.bottom) {} // Bans rect += int explicit Rect(int l) : Rect(l, 0, 0, 0) {} operator RECT() const { return{ left, top, right, bottom }; } bool operator==(const Rect &rhs) { return left == rhs.left && top == rhs.top && right == rhs.right && bottom == rhs.bottom; } bool operator!=(const Rect &rhs) { return !(*this == rhs); } //! Adds respective members. Rect&operator+=(const Rect &rhs) { left += rhs.left; top += rhs.top; right += rhs.right; bottom += rhs.bottom; return *this; } //! Adds respective members. const Rect operator+(const Rect &rhs) const { Rect rc{ *this }; return rc += rhs; } //! Subtructs respective members. Rect&operator-=(const Rect &rhs) { left -= rhs.left; top -= rhs.top; right -= rhs.right; bottom -= rhs.bottom; return *this; } //! Subtructs respective members. const Rect operator-(const Rect &rhs) const { Rect rc{ *this }; return rc -= rhs; } Point lefttop() const { return{ left, top }; } Point rightbottom() const { return{ right, bottom }; } bool isEmpty() const { return left != right && top != bottom; } bool isValid() const { return left <= right && top <= bottom; } Rect& reset() { left = top = right = bottom = 0; return *this; } /*! Swaps members to be validated rectangle. This function makes sure that left <= right && top <= bottom. If it is an empty rectangle, all members set to be 0. @return true if empty rectangle, or false; */ bool validate() { int t; if (validate(left, right, t) && validate(top, bottom, t)) return false; left = top = right = bottom = 0; return true; } bool isInclusive(const Point &pt) const { return left <= pt.x && pt.x < right && top <= pt.y && pt.y < bottom; } bool isInclusive(const Rect &rhs) const { return left <= rhs.left && right >= rhs.right && top <= rhs.top && bottom >= rhs.bottom; } Rect& unite(const Rect& rhs) { Rect rc{ rhs }; if (rc.validate()) { validate(); return *this; } if (validate()) return *this = rc; left = min(left, rc.left); top = min(top, rc.top); right = max(right, rc.right); bottom = max(bottom, rc.bottom); return *this; } Rect& move(int dx, int dy) { left += dx, right += dx; top += dy, bottom += dy; return *this; } Rect& move(const Size &s) { return move(s.x, s.y); } bool empty() const { return Rect(*this).validate(); } Size size() const { return{ right - left, bottom - top }; } int width() const { return right - left; } int height() const { return bottom - top; } private: // Swaps to be l < r. returns l != r inline bool validate(int &l, int &r, int &t) { if (l < r) return true; if (l == r) return false; t = l; l = r; r = t; return true; } }; const Size operator*(const Size &s, double d); const Size operator*(double d, const Size &s); const Size operator/(const Size &s, double d); const Size operator/(double d, const Size &s); const Point operator+(const Point &pt, const Size &s); const Point operator+(const Size &s, const Point &pt); const Point operator-(const Point &pt, const Size &s); const Point operator-(const Size &s, const Point &pt); const Point operator*(const Point &pt, int n); const Point operator/(const Point &pt, int n); } // namespace #endif<file_sep>/*! @file アプリケーションクラス CImageViewerの実装. ただし内部クラスの定義・実装は各ファイルにて行う。 イベントハンドラonEvent()内で例外が発生した場合、 その内容を表示したうえで再送出する。 @ToDo イベントハンドラで例外が発生した場合、 イベント自体をキャンセルし、例外再送(強制終了)を避ける @ToDo 既知のバグ、特定のディスプレイから最大化した場合に、 別のディスプレイに属する領域の描画が行われないことの対処。 @ToDo ウィンドウを破棄する場合、win->destroy()を呼び出すだけでいいよう、 Windowクラス側に、WM::DESTROYを処理するonEvent()のラッパーか なにかを噛ませる。あるいはdispatch関数内で処理させる。 とにかく、create()とdestroy()を対照にすることと、 イベントハンドラを書くときにポンプ機構を意識しなくていいように、 PostQuitMessage()をWindowクラス内に隠蔽する。 */ #ifndef STDAFX_H #include <assert.h> #include <algorithm> #endif #include "stdfnc.h" #include "monitor.h" #include "window_message.h" #include "ids.h" #include "list_item.h" #include "profile.h" #include "menu.h" #include "control.h" #include "filer.h" #include "draw_list.h" #include "loader.h" namespace image_viewer { CImageViewer::CImageViewer() : m_exitCode(0), isImageInvalidated(true) { hook(this); profile.reset(new Profile); menu.reset(new ContextMenu(*this)); control.reset(new Control(*this)); filer.reset(new Filer(*this)); list.reset(new CDrawList(*this)); loader.reset(new Loader(*this)); } CImageViewer::~CImageViewer() {} int CImageViewer:: onEvent(Window *win, Message msg, WPARAM wp, LPARAM) try { assert(win == this); switch (msg) { case WM::CREATE: saveload(false); menu->initialize(); if (menu->isSelected(ID::VIEW_FILELIST)) list->show(); m_captionConfirmDelete = profile->getTranslatedString(ID::FILE_DELETE); m_textConfirmDelete = profile->getTranslatedString(ID::CONFIRM_DELETE); updateTitleBar(); setPath(::basis::GetCommandLine(1)); DragAcceptFiles(*win, true); return 0; case WM::PAINT: return onPaint(); case WM::COMMAND: return onCommand(wp); case WM::DROPFILES: activate(); setForeground(); setPath(::basis::GetDropFile(wp, 0)); return 1; case WM::SIZE: case WM::SIZING: isImageInvalidated = true; win->update(); return 0; case WM::ERASEBKGND: return 1; case WM::CONTEXTMENU: // Shift + F10 menu->track({}); return 1; case WM::CLOSE: if (profile->isEnable()) { menu->saveSettings(); saveload(true); } destroy(); m_exitCode = 0; return 1; case WM::DESTROY: PostQuitMessage(0); return 0; default: return 0; } } catch (std::exception &e) { MessageBoxA(0, e.what(), "Exception", 0); throw e; } basis::CKey CImageViewer::getKey(ID id, int n) { return control->getKey(id, n); } void CImageViewer::saveload(bool bSave) { profile->general(); if (!bSave) m_lastPath = profile->load(ID::LAST_PATH, nullptr); else if (m_dir.exist()) profile->save(ID::LAST_PATH, m_dir.path().c_str()); profile->window(); if (profile->loadBoolean(ID::WINDOW_REMINDER, true) == false) return; if (profile->loadBoolean(ID::WINDOW_POSITION, false)) { Rect rc = place(); if (bSave) { profile->save(ID::WINDOW_LEFT, rc.left); profile->save(ID::WINDOW_TOP, rc.top); profile->save(ID::WINDOW_RIGHT, rc.right); profile->save(ID::WINDOW_BOTTOM, rc.bottom); } else { rc.left = profile->load(ID::WINDOW_LEFT, rc.left); rc.top = profile->load(ID::WINDOW_TOP, rc.top); rc.right = profile->load(ID::WINDOW_RIGHT, rc.right); rc.bottom = profile->load(ID::WINDOW_BOTTOM, rc.bottom); place(rc); } } if (profile->loadBoolean(ID::WINDOW_ZOOMING, false)) { const ID id = ID::WINDOW_MAXIMIZE; if (bSave) profile->saveBoolean(id, isMaximized()); else if (profile->loadBoolean(id, false)) maximize(); } if (profile->loadBoolean(ID::WINDOW_STYLE, false)) { const ID id = ID::VIEW_POPUP; if (bSave) profile->saveBoolean(id, menu->isSelected(id)); else popup(profile->loadBoolean(id, false)); } } // 次候補(iNext)をカレント設定するタイプのループに使う。 // 設定に成功、またはすでに探索末尾(iLimit)である場合はfalseを返す。 // 失敗すると要素を削除したうえでtrueを返すので、 // 再度引数を渡してループさせること。 bool CImageViewer:: helper_show_must_loop(iterator iNext, const_iterator iLimit) { // もう探索できないので終了。 if (iNext == filer->end()) { invalidate(); return false; } // ロード成功したら終了 if (setCurrent(iNext) == true) { return false; } // 失敗したやつ消して、ループ継続 filer->erase(iNext); return true; } void CImageViewer:: showPrev() { if (filer->current() == filer->begin()) return; while (helper_show_must_loop(--filer->current(), filer->begin())) ; // noop } void CImageViewer:: showNext() { if (filer->current() == filer->end()) return; while (helper_show_must_loop(++filer->current(), filer->end())) ; // noop } void CImageViewer:: showFirst() { while (helper_show_must_loop(filer->begin(), filer->end())) ; // noop } void CImageViewer:: showLast() { while (helper_show_must_loop(filer->last(), filer->end())) ; // nop } bool CImageViewer:: setPath(FilePath path) { if (loader->waitIfAnyImageIsLoading() == false) { MessageBox(0, TEXT("Loading thread wouldn't respond." "Operations are annulled."), 0, 0); return false; } if (!path.exist()) return false; m_dir = (path.isDirectory()) ? path : path.getDir(); filer->generate(m_dir.path().c_str()); filer->sort(); filer->setCurrent(filer->begin()); if (path.isDirectory()) { showFirst(); return true; } auto filename = path.getFileName(); iterator itr = filer->search([filename](Element &p)->bool { return (filename == p->fileName()); }); setCurrent(itr); return true; } bool CImageViewer:: setCurrent(iterator itr) { if (itr == filer->end()) return false; // 範囲外のキャッシュ削除 loader->markToReleaseAround(filer->current()); loader->unmarkAround(itr); loader->performReleaseAround(filer->current()); if (loader->loadImage(itr, true) != Loader::Status::Finished) return false; // 移動 filer->setCurrent(itr); loader->preloadAround(itr); // 更新 list->invalidate(); updateTitleBar(); m_offset.reset(); invalidate_image(); update(); return true; } void CImageViewer:: reloadCurrent() { if (filer->current() == filer->end()) return; filer->current()->get()->unload(); if (loader->loadImage(filer->current(), false) == Loader::Status::Finished) setCurrent(filer->current()); } int CImageViewer:: onCommand(WPARAM wp) { ID const id = static_cast<ID>(LOWORD(wp)); menu->changeStatus(id); switch (id) { case ID::USE_PROFILE: if (menu->isSelected(id)) profile->enable(); else profile->disable(); return 1; case ID::LAST_PATH: setPath(m_lastPath); break; case ID::FILE_BACK: showPrev(); break; case ID::FILE_NEXT: showNext(); break; case ID::FILE_FIRST: showFirst(); break; case ID::FILE_LAST: showLast(); break; case ID::FILE_RELOAD: reloadCurrent(); break; case ID::FILE_DELETE: case ID::FILE_QUICK_DELETE: case ID::LIST_REMOVE: if (filer->isEmpty()) break; if (id == ID::FILE_DELETE) { if (IDOK != MessageBox(*this, m_textConfirmDelete.c_str(), m_captionConfirmDelete.c_str(), MB_OKCANCEL)) break; } loader->waitIfLoading(filer->current()); if (id != ID::LIST_REMOVE && (m_dir + filer->current()->get()->fileName()).trash() == false) break; filer->erase(filer->current()); setCurrent(filer->current()); break; case ID::VIEW_POPUP: popup(menu->isSelected(id)); break; case ID::VIEW_FILENAME: updateTitleBar(); break; case ID::VIEW_FILELIST: if (menu->isSelected(id)) list->show(); else list->hide(); break; case ID::VIEW_UPSCALE: case ID::VIEW_DOWNSCALE: case ID::VIEW_CENTER: m_offset.reset(); invalidate_image(); break; case ID::LOADER_IMAGE_LOADED: list->invalidate(); break; case ID::SCREEN_TOGGLE: toggleScreen(); break; case ID::SORT_LESSER_WRITE: case ID::SORT_GREATER_WRITE: case ID::SORT_LESSER_ACCESS: case ID::SORT_GREATER_ACCESS: case ID::SORT_LESSER_CREATION: case ID::SORT_GREATER_CREATION: filer->sort(); list->invalidate(); break; case ID::WINDOW_CLOSE: post(WM::CLOSE, 0, 0); break; case ID::SHOW_PROPERTY: if (!filer->isEmpty()) { auto path = m_dir + filer->current()->get()->fileName(); ShowProperty(*this, path.path().c_str()); } break; default: return 0; } // switch return 1; } void CImageViewer:: update() const { menu->updateStatus(); Window::update(); updateTitleBar(); } bool CImageViewer:: updateTitleBar() const { tstr title = (filer->isEmpty()) ? NAME_VERSION : filer->current()->get()->fileName(); int index = filer->isEmpty() ? 0 : filer->indexof(filer->current()) + 1; title += TEXT(" [") + basis::ToStr(index) + TEXT("/") + basis::ToStr(filer->size()) + TEXT("]"); return setTitle(title.c_str()); } bool CImageViewer:: toggleScreen() { if (!isMaximized()) { popup(); maximize(); } else if (!isMultiMaximized()) { popup(); maximize_multi(); } else { normalize(); popup(menu->isSelected(ID::VIEW_POPUP)); } m_offset.reset(); invalidate(); return true; } ID CImageViewer::getSortWay() const { return menu->getSortWay(); } bool CImageViewer::isMultiMaximized() const { if (!isMaximized()) return false; Rect vs = basis::Monitor::GetVirtualScreen(); Rect rc = getRect(); return (rc.width() >= vs.width() && rc.height() >= vs.height()); } int CImageViewer:: onPaint() { PAINTSTRUCT ps; HDC hdc = BeginPaint(*this, &ps); if (m_backbuffer.compatible(hdc, getClientSize())) { invalidate(getClientRect()); SetBkMode(m_backbuffer, TRANSPARENT); m_backbuffer.pen(GetStockObject(WHITE_PEN)); m_backbuffer.brush(GetStockObject(WHITE_BRUSH)); isImageInvalidated = true; } if (filer->isEmpty()) { m_backbuffer.rectangle(ps.rcPaint); } else { Rect image_rect; if (!filer->isEmpty()) image_rect = filer->current()->get()->rect(); Size drawing_size = getDrawSize(image_rect.size()); Rect src = { 0, 0, drawing_size.x, drawing_size.y }; if (m_offscreen.compatible(hdc, drawing_size) || isImageInvalidated) { isImageInvalidated = false; filer->current()->get()->draw(m_offscreen, src, image_rect); } m_drawingRect = getDrawRect(drawing_size); m_offscreen.transfer(m_backbuffer, m_drawingRect, src); ClearBackground(m_backbuffer, ps.rcPaint, m_drawingRect); } list->draw(&m_backbuffer); m_backbuffer.transfer(hdc, ps.rcPaint, ps.rcPaint); EndPaint(*this, &ps); return 1; } basis::Rect CImageViewer:: getDrawRect() const { if (filer->isEmpty()) return{}; return getDrawRect(getDrawSize(filer->current()->get()->size())); } basis::Rect CImageViewer:: getDrawRect(const Size &size) const { Rect rc{ 0, 0, size.x, size.y }; rc.move(m_offset); if (menu->isSelected(ID::VIEW_CENTER)) rc.move((getClientSize() - size) / 2); return rc; } basis::Size CImageViewer:: getDrawSize(const Size &image_size) const { Size size = image_size; const auto client = getClientSize(); if (!client.x || !client.y || !size.x || !size.y) return{}; if (menu->isSelected((client.x > size.x && client.y > size.y) ? ID::VIEW_UPSCALE : ID::VIEW_DOWNSCALE)) { if (static_cast<double>(size.x) / client.x > static_cast<double>(size.y) / client.y) { size.y = static_cast<int>(0.5 + size.y * static_cast<double>(client.x) / size.x); size.x = client.x; } else { size.x = static_cast<int>(0.5 + size.x * static_cast<double>(client.y) / size.y); size.y = client.y; } } return size; } void CImageViewer:: invalidate() const { isImageInvalidated = true; Window::invalidate(); } void CImageViewer:: invalidate_image() const { isImageInvalidated = true; invalidate(m_drawingRect); invalidate(getDrawRect()); } void CImageViewer::move_image(Size diff) { m_offset += diff; invalidate(getDrawRect().unite(m_drawingRect)); } } // namespace <file_sep>[General] ; 設定を有効化します。 bEnableSettings = true ; 画像ファイルを表示前にロードする範囲の最大値を指定します。 ; ロード済みのファイルは画像切替にかかる時間が短縮されます。 ; この値は、ロード済みのキャッシュを保持する範囲としても使われます。 ; キャッシュがメモリを圧迫する場合、事前ロードが抑制されてしまうので、 ; そのような場合はこの値を小さな値に設定することで回避できます。 nPreloadRangeMax = 10; ; 画像ファイルを表示前にロードする範囲の最小値を指定します。 ; この範囲にあるファイルは、メモリ状況に関わらず事前にロードが行われます。 nPreloadRangeMin = 2; ; アプリケーションが使用するメモリ制限の目標値をメガバイト単位で指定します。 ; 使用量がこの値を越えるか、システムメモリの残りが少ない状況では、 ; nPreloadRangeMax で指定した事前ロードは抑制されます。 nMemoryCapMegaBytes = 200; ; 前回表示したフォルダ名が保存されます。 ; コンテキストメニューにショートカットが表示されます。 sLastOpenedFile= [Menu] ; コンテキストメニューの選択状態が保存されます。 bHideTitleBar=false ShowFilename=true ShowList=false UpscaleImage=false DownscaleImage=true Centering=true SortBy =1 [Window] ; このセクション内の設定を有効化します。 bEnableWindowReminder = true ; 前回終了時のウィンドウ位置を復元するかどうかを指定します。 bRemindWindowPosition = true ; 前回終了時のウィンドウ位置が保存されます。 nWindowLeft=200 nWindowTop=200 nWindowRight=840 nWindowBottom=680 ; 前回終了時に最大化されていた場合、開始時に最大化するかどうかを指定します。 bRemindWindowZooming = true ; 最大化状態が保存されます。 bWindowMaximize =false ; タイトルバーを非表示にしていた場合、開始時に非表示にするか指定します。 bRemindWindowStyle = true ; タイトルバーが非表示であればtrue, 表示であればfalseが保存されます。 bHideTitleBar =false [Control] ; コマンドに対応するアクセラレータキーを指定します。 ; 複数指定する場合はカンマ区切り。 ; Ctrl, Alt, Shiftのうち0-3個と、キーの組み合わせが可能。 ; キーはキー名、または仮想キーコードを10進または16進(0x)で指定。 ; 前のファイルを表示 FileBack = Left, Up, W, A ; 次のファイルを表示 FileNext = Right, Down, S, D ; 先頭のファイルを表示 FileFirst = Home ; 末尾のファイルを表示 FileLast= End ; ファイルをリロード FileReload = Ctrl + R ; キーを押している間、ドラッグで画像の表示位置を移動 GripImage = Space ; ファイルを表示候補から除外 RemoveFromList = Ctrl + K ; ファイルリストの表示・非表示切替 ShowList = Tab ; ウィンドウサイズを通常、最大化、マルチモニタの順に切替 ToggleScreen = Esc ; ファイルをゴミ箱へ移動するかどうかの確認画面を表示 FileDelete = Delete ; ファイルを即座にゴミ箱へ移動 QuickFileDelete = Ctrl + Delete ; ウィンドウを閉じる CloseWindow = Ctrl+W <file_sep>#include "find_file.h" #include "list_item.h" #include "ids.h" #include "filer.h" namespace image_viewer { void CImageViewer::Filer:: generate(std::basic_string<TCHAR> dir) { clear(); basis::CFindFile e((dir += TEXT("\\*")).c_str()); for (int i = 0; e.nextFile(); i++) { m_list.emplace_back(new Content(e.get())); m_list.back()->index = i; } m_current = begin(); } // first != last のとき、lastは含まないことに注意 CImageViewer::iterator CImageViewer::Filer:: erase(iterator first, iterator last) { const iterator iEnd = end(); if (first == iEnd) { return iEnd; } for (iterator i = first; i != iEnd; ++i) { if (i != m_current) { if (i == last) break; continue; } if (first == last && std::next(last) != iEnd) m_current = std::next(last); else if (first != last && last != iEnd) m_current = last; else if (first != begin()) m_current = std::prev(first); else m_current = iEnd; break; } if (first == last) last = m_list.erase(first); else last = m_list.erase(first, last); // ひとつ前からインデックス再作成 first = move(last, -1); giveIndices(first, first == begin() ? 0 : first->get()->index); return last; } void CImageViewer::Filer:: sort() { ID sort_way = m_parent.getSortWay(); m_list.sort([sort_way, this](const Element&lhs, const Element&rhs) { return compare(lhs, rhs, sort_way); }); giveIndices(m_list.begin(), 0); } bool CImageViewer::Filer:: compare(const Element &lhs, const Element &rhs, ID sortWay) { FILETIME ft1, ft2; bool descending = false; switch (sortWay) { default: case ID::SORT_GREATER_WRITE: descending = true; // fall through case ID::SORT_LESSER_WRITE: ft1 = lhs->ftWrite(); ft2 = rhs->ftWrite(); break; case ID::SORT_GREATER_CREATION: descending = true; // fall through case ID::SORT_LESSER_CREATION: ft1 = lhs->ftCreate(); ft2 = rhs->ftCreate(); break; case ID::SORT_GREATER_ACCESS: descending = true; // fall through case ID::SORT_LESSER_ACCESS: ft1 = lhs->ftAccess(); ft2 = rhs->ftAccess(); break; } return (::CompareFileTime(&ft1, &ft2) == (descending ? 1 : -1)); } int CImageViewer::Filer:: indexof(const_iterator itr) const { return itr == cend() ? 0 : itr->get()->index; } void CImageViewer::Filer:: giveIndices(iterator iStart, int index) { for (; iStart != end(); ++iStart) { iStart->get()->index = index++; } } CImageViewer::iterator CImageViewer::Filer:: search(std::function<bool(Element&)> func) { assert(func); iterator itr = begin(); for (; itr != end(); ++itr) if (func(*itr)) break; return itr; } CImageViewer::iterator CImageViewer::Filer:: move(iterator itr, int nCount) { iterator iEnd = end(); if (isEmpty()) return iEnd; // search prev for (iterator iFirst = begin(); (nCount < 0 && itr != iFirst); ++nCount) { --itr; } // search next for (--iEnd; nCount > 0 && itr != iEnd; --nCount) { ++itr; } return itr; } } // namespace<file_sep>#include "imemory.h" #include "cdd_bitmap.h" #include "file_path.h" #include "file_item.h" #include "list_item.h" namespace image_viewer { class CListItem::Impl { public: Impl(const WIN32_FIND_DATA &fd) : m_fileName(fd.cFileName), m_access(fd.ftLastAccessTime), m_create(fd.ftCreationTime), m_write(fd.ftLastWriteTime), m_type(TYPE::Undefined) {} using TYPE = basis::CDDBitmap::TYPE; TYPE m_type; basis::Surface m_image; std::basic_string<TCHAR> m_fileName; FILETIME m_access; FILETIME m_create; FILETIME m_write; }; CListItem::CListItem(const WIN32_FIND_DATA &fd) : impl(new Impl(fd)), weight(fd.nFileSizeLow) {} CListItem::~CListItem() = default; CListItem::Status CListItem:: loadImage(basis::CFilePath path) { using CDDBitmap = basis::CDDBitmap; using IMemory = basis::IMemory; auto file = path.open(); if (!file) { if (path.exist()) return Status::CannotOpen; impl->m_type = Impl::TYPE::Error; return Status::NotExist; } LARGE_INTEGER file_size = file.getSize(); if (file_size.HighPart > 0) { impl->m_type = Impl::TYPE::Error; return Status::SizeError; } // Gdiplus or OleLoadPicture bool constexpr kUseGdiplus = true; std::unique_ptr<IMemory> mem(kUseGdiplus ? static_cast<IMemory*>(new basis::HeapMemory) : static_cast<IMemory*>(new basis::GlobalMemory)); mem->alloc(file_size.LowPart, false); BYTE *p = static_cast<BYTE*>(mem->address()); if (!p) { return Status::MemoryError; } if (!file.read(p, CDDBitmap::kTypeCheckBytes, INFINITE)) throw 0; impl->m_type = CDDBitmap::getType(p); if (impl->m_type == Impl::TYPE::Error) return Status::TypeError; if (!file.read(static_cast<void*> (p + CDDBitmap::kTypeCheckBytes), 0, INFINITE)) throw 0; CDDBitmap bmp; bool const succeeded = (kUseGdiplus) ? bmp.loadGdiplus(mem->address(), file_size.LowPart) : bmp.loadOleLoad(dynamic_cast<basis::GlobalMemory*> (mem.get())->handle()); if (succeeded) { weight = static_cast<int>(bmp.usage()); impl->m_image = std::move(bmp); return Status::Loaded; } impl->m_type = Impl::TYPE::Error; return Status::LoadError; } const TCHAR * CListItem:: fileName() const { return impl->m_fileName.c_str(); } bool CListItem:: isLoadingFailed() { return impl->m_type == Impl::TYPE::Error; } void CListItem::unload() { impl->m_image.reset(); } bool CListItem:: isLoaded() const { return impl->m_image.operator bool(); } bool CListItem:: draw(HDC dest, const RECT & rcDest, const RECT & rcSrc) { return isLoaded() && impl->m_image.transfer(dest, rcDest, rcSrc); } basis::Size CListItem:: size() const { return impl->m_image.size(); } basis::Rect CListItem:: rect() const { return{ 0, 0, size().x, size().y }; } FILETIME CListItem:: ftAccess() const { return impl->m_access; } FILETIME CListItem:: ftCreate() const { return impl->m_create; } FILETIME CListItem:: ftWrite() const { return impl->m_write; } } // namespace<file_sep>#include "exception.h" #include "find_file.h" namespace basis { CFindFile::CFindFile(const TCHAR *path) : fd(new WIN32_FIND_DATA) { hf = path ? FindFirstFile(path, fd) : nullptr; if (hf == INVALID_HANDLE_VALUE) hf = nullptr; fd->dwFileAttributes |= kFileAttrFirstFile; } CFindFile::~CFindFile() { close(); delete fd; } CFindFile::CFindFile(CFindFile && s) noexcept : fd(0), hf(0) { *this = std::move(s); } CFindFile& CFindFile:: operator=(CFindFile &&s) noexcept { std::swap(*this, s); return *this; } bool CFindFile:: next() noexcept { // First file was already set. if (fd->dwFileAttributes & kFileAttrFirstFile) { fd->dwFileAttributes &= ~kFileAttrFirstFile; } else { if (hf && FindNextFile(hf, fd) == FALSE) close(); } return hf != nullptr; } bool CFindFile:: nextFile() noexcept { while (next()) { if (!(get().dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY)) return true; } return false; } bool CFindFile:: nextDirectory() noexcept { while (next()) { if (get().dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) return true; } return false; } void CFindFile:: close() noexcept { if (hf) { FindClose(hf); hf = nullptr; } } } // namespace<file_sep>#pragma once #ifndef GUID_2BBA94E8F65D481D904502F7C1B79E19 #define GUID_2BBA94E8F65D481D904502F7C1B79E19 #include "movable.h" #include "list_item.h" #include "image_viewer.h" #include "surface.h" namespace image_viewer { class CImageViewer::CDrawList { public: using Surface = basis::Surface; CDrawList(CImageViewer &parent_); ~CDrawList(); // Shows the list. void show() { m_enable = true; invalidate(); } // Hides the list. void hide() { m_enable = false; invalidate(); } // Invalidate the rectangle to where it draws the list. void invalidate(); //! Returns rectangle that the list was drawn expressed by client coordinate. Rect rect() { return m_offset.rect(); } //! Draws the list. Rect draw(Surface *s) { return drawList(s, true); } //! Moves the list. This will cause WM_PAINT message posted. void move(basis::Size diff) { parent.invalidate(rect()); m_offset.move(diff); invalidate(); } iterator itemFromPt(basis::Point pt); bool isInclusive(basis::Point pt); private: /*! Draws the list and returns the rectangle. If bDraw was set to be false then not drawn, this allows getting next area to draw. */ Rect drawList(Surface *s, bool bDraw); Rect do_drawList(Surface *s, bool bDraw); using Position = std::pair<iterator, Rect>; std::vector<Position> m_pos; CImageViewer &parent; bool m_enable; HFONT boldFont; basis::CMovable m_offset; std::basic_string<TCHAR> sNoFileInfo; // Drawn if there was no file to show. enum ColorType { not_read, now_loading, loaded_image, current_file }; static constexpr COLORREF Colors[sizeof(ColorType)] = { 0, RGB(0, 0, 255), RGB(0, 255, 0), RGB(255, 0, 0) }; }; } // namespace #endif<file_sep>#pragma once #ifndef GUID_12D296409004455CB5F690D46983D9E2 #define GUID_12D296409004455CB5F690D46983D9E2 #ifndef STDAFX_H #include <memory> #include <list> #endif #include "types.h" #include "window.h" #include "file_path.h" #include "surface.h" namespace basis { class CKey; } /*! 画像ビューア・アプリケーション. 基本的なふるまいは、継承したWindowクラスに倣う。 CImageViewer().create().show().wait()、で実行可能。 ウィンドウの作成、およびメッセージポンプはWindowクラスの ワーカースレッドが行う。 */ namespace image_viewer { class CListItem; enum class ID : int; class CImageViewer final : private basis::Window { public: using Content = CListItem; using Element = std::shared_ptr<Content>; using ListTy = std::list<Element>; using iterator = ListTy::iterator; using const_iterator = ListTy::const_iterator; using Size = basis::Size; using Rect = basis::Rect; using Window = basis::Window; using Message = basis::Message; using WM = basis::Message; using FilePath = basis::CFilePath; using Surface = basis::Surface; CImageViewer(); ~CImageViewer(); CImageViewer(CImageViewer&) = delete; CImageViewer(CImageViewer&&) = delete; CImageViewer& operator=(CImageViewer&) = delete; CImageViewer& operator=(CImageViewer&&) = delete; static constexpr int VERSION = 1000; static constexpr TCHAR *NAME_VERSION = TEXT("Stella Vista ver1.0"); //! ウィンドウを生成し、メッセージポンプスレッドを稼働させる CImageViewer& create() { Window::create(); return *this; } //! ウィンドウを表示 CImageViewer& show(int nShow = SW_SHOW) { Window::show(nShow); return *this; } //! ウィンドウを非表示 CImageViewer& hide() { Window::hide(); return *this; } //! ウィンドウが閉じられるまで待機する void waitToEnd() { Window::waitToEnd(); } //! いまのところダミー関数 int exitCode() { return m_exitCode; } private: virtual int onEvent(Window*, Message, WPARAM, LPARAM) final; int onCommand(WPARAM wp); int onPaint(); bool setPath(FilePath path); bool setCurrent(iterator itr); void showPrev(); void showNext(); void showFirst(); void showLast(); bool helper_show_must_loop(iterator iNext, const_iterator iLimit); //! Gets the key, that was set to be related to the command ID. basis::CKey getKey(ID id, int n); void saveload(bool bSave); ID getSortWay() const; bool isMultiMaximized() const; bool toggleScreen(); Size getDrawSize(const Size &image_size) const; Rect getDrawRect() const; Rect getDrawRect(const Size &drawSize) const; void invalidate() const; void invalidate(const Rect &rc) const { Window::invalidate(rc); } void invalidate_image() const; void move_image(Size diff); void reloadCurrent(); void update() const; bool updateTitleBar() const; Size m_offset; Rect m_drawingRect; //! Indicates whether show or not window titlebar temporally. bool m_bTemporaryShowTitle; FilePath m_dir; FilePath m_lastPath; Surface m_offscreen; Surface m_backbuffer; std::basic_string<TCHAR> m_captionConfirmDelete; std::basic_string<TCHAR> m_textConfirmDelete; int m_exitCode; mutable bool isImageInvalidated; class Profile; std::unique_ptr<Profile> profile; class ContextMenu; std::unique_ptr<ContextMenu> menu; class Control; std::unique_ptr<Control> control; class Filer; std::unique_ptr<Filer> filer; class CDrawList; std::unique_ptr<CDrawList> list; class Loader; std::unique_ptr<Loader> loader; }; } // namesapce #endif <file_sep>/*! @file CFileItem class is desined to asynchronous read and write files. Therefore, CFilePath.open function creates CFileItem object with flag FILE_FLAG_OVERLAPPED. */ #pragma once #ifndef GUID_7E9375E462DA48E4A6A96E298F488D2F #define GUID_7E9375E462DA48E4A6A96E298F488D2F #include "types.h" #include "iunit_test.h" namespace basis { UNIT_TEST(CFileItem) class CFileItem { public: CFileItem(HANDLE h = 0) : m_h(h), m_overlapped(0), m_offset({}) {} ~CFileItem() { close(); } CFileItem(CFileItem&) = delete; CFileItem&operator=(CFileItem&) = delete; CFileItem(CFileItem &&s); CFileItem&operator=(CFileItem &&s); explicit operator bool() { return m_h != nullptr && m_h != INVALID_HANDLE_VALUE; } LARGE_INTEGER getSize() const; LARGE_INTEGER inline offset() const noexcept; void inline seek(LARGE_INTEGER offset) noexcept; bool read(void *dest, DWORD bytes, DWORD timeWait); bool write(const void *source, DWORD bytes, DWORD timeWait); bool finish(DWORD timeWait = INFINITE); bool close(); private: HANDLE m_h; LPOVERLAPPED m_overlapped; LARGE_INTEGER m_offset; }; // -------------- Inline Methods ------------- // LARGE_INTEGER inline CFileItem:: offset() const noexcept { return m_offset; } void inline CFileItem:: seek(LARGE_INTEGER offset) noexcept { m_offset = offset; } } // namespace #endif<file_sep>/*! @file Class definition of CImageViewer::ContextMenu. That class provides the application the way to manage context menu item's status. */ #pragma once #ifndef GUID_56ED3B352A5D42EBA6E9E85670749C89 #define GUID_56ED3B352A5D42EBA6E9E85670749C89 #include "image_viewer.h" namespace image_viewer { enum class ID : int; //! Menu Controls class CImageViewer::ContextMenu { public: ContextMenu(CImageViewer &parent_); ~ContextMenu(); /*! メニュー状態を復元し、アプリケーション設定に反映する. ウィンドウを生成してから呼び出すこと。 */ void initialize(); //! メニュー項目の状態を設定ファイルに保存する void saveSettings(); //! アプリケーションの状態に合わせてメニュー項目の有効/無効を切り替える bool updateStatus(); /*! 項目のチェック状態を反転する. ラジオボタンの場合はONになり、グループ内の他のラジオボタンがOFFになる */ bool changeStatus(ID id); //! メニューを表示し、ユーザの選択を返す。キャンセルは0。 int track(basis::Point pt) const; //! 選択されているソート条件を返す ID getSortWay(); //! 項目を有効化する void enable(ID id); //! 項目を無効化する void disable(ID id); //! 項目をチェック状態にする void select(ID id); //! 項目がチェック状態かどうか bool isSelected(ID id); //! 項目のチェック状態を解除 void clear(ID id); private: CImageViewer& parent; class Impl; std::unique_ptr<Impl> impl; }; } // namespace #endif<file_sep>#pragma once #ifndef GUID_5718854C303D43FF961DFBE2E38A4EA6 #define GUID_5718854C303D43FF961DFBE2E38A4EA6 #ifndef STDAFX_H #include <string> #include <memory> #include <list> #endif #include "image_viewer.h" namespace image_viewer { //! リスト管理を行う内部クラス class CImageViewer::Filer { public: Filer(CImageViewer &parent_) : m_parent(parent_), m_current(end()) {} ~Filer() = default; //! ディレクトリ内のファイルをリスト化する。 /*! @par 関数実行前に保持していたリストは解放され、 CFileEnumNoDirectories によって列挙されるファイルが格納される。 @par 先頭ファイルがカレントファイルとなる。 @param dir 対象となるディレクトリ */ void generate(std::basic_string<TCHAR> dir); /*! アイテムをリストから削除する。 @details 指定されたアイテムがカレントファイルの場合、 カレントイテレータは次のファイルに移動される。 次のファイルがない場合は前、 それもなければend要素に移動される。 @param itr 対象となるアイテム @return カレントイテレータ */ iterator erase(iterator itr) { return erase(itr, itr); } //! [first, last) の範囲の要素を削除する iterator erase(iterator first, iterator last); /*! リストのすべての要素を削除する。 @details カレントイテレータはend()に移動される。 */ void clear() { m_list.clear(); m_current = end(); } /*! リストをソートする。 @details メニューで指定されたFILETIME比較でソートされる。 */ void sort(); //! カレントイテレータを取得。空の場合はend() iterator current() const { return m_current; } //! カレントイテレータを設定 void setCurrent(iterator itr) { m_current = itr; } const_iterator cbegin() const { return m_list.cbegin(); } const_iterator cend() const { return m_list.cend(); } const_iterator clast() const { return isEmpty() ? cend() : --cend(); } iterator begin() { return m_list.begin(); } iterator end() { return m_list.end(); } iterator last() { return isEmpty() ? end() : --end(); } bool isEmpty() const { return m_list.empty(); } size_t size() const { return m_list.size(); } // リスト中のインデックスを返す int indexof(const_iterator itr) const; // 指定位置以降のアイテムに、リスト中のインデックスを与える void giveIndices(iterator iStart, int index); iterator search(std::function<bool(Element&)> func); // 有効なイテレータのみを対象に、nCountぶん移動する iterator move(iterator itr, int nCount); private: bool compare(const Element&, const Element&, ID sortWay); CImageViewer &m_parent; ListTy m_list; iterator m_current; }; } // namespace #endif <file_sep>#pragma once #ifndef GUID_05B31CB4D53047EDAFFA66A83A82742A #define GUID_05B31CB4D53047EDAFFA66A83A82742A #ifndef STDAFX_H #include <memory> #include <functional> #endif #include "types.h" #include "ievent_handler.h" namespace basis { class StringBuffer; enum class Message : int; /*! Window class. @note All coordinates used as parameters and return values is expressed in virtual screen coordinates. */ class Window : public IEventHandler { public: using Listener = std::function<int(Window*, Message, WPARAM, LPARAM)>; Window(); virtual ~Window(); Window(const Window&) = delete; Window&operator=(const Window&) = delete; Window(Window&&) = default; Window&operator=(Window&&) = default; operator HWND() const; const Window& hook(IEventHandler *p) const; const Window& hook(Listener f) const; const Window& unhook(IEventHandler *p) const; //! デフォルトのウィンドウ作成 virtual Window& create(); void waitToEnd() const; virtual int onEvent(Window*, Message, WPARAM, LPARAM) override { return 0; } //! Places a message in window's message queue and returns control immediatly. void post(Message msg, WPARAM wp, LPARAM lp); /*! Returns rectangle of the window. This function returns the rectangle of the window. The rectangle is including non-client area, title bar and border for example. Different from GetWindowRect API function, this function returns it excluding aero glass padding, that is used by Windows Aero on Windows Vista or later. */ Rect getRect() const; /*! Sets rectangle of the window. This function retreives a rectangle consits of left-top and right bottom coordinates. This function will calculate aero glass position so that window will fit left-top edge when left-top coordinate is set to be (0, 0). */ void setRect(Rect dest) const; Rect getWindowRect() const; // API wrapper Size getWindowSize() const; /*! Returns client area of the window. Different from GetClientRect API function, returned rectangle is expressed in virtual-screen coordinates. */ Rect getClientRectInScreen() const; Rect getClientRect() const; // API wrapper Size getClientSize() const; // Gets a coordinate where normalized window would be positioned. Rect place() const; //! Sets position for normalized window. bool place(const Rect& target) const; //! Moves the window relatively. bool move(Size s) const; //! Moves the window to specified virtual screen coordinates. bool moveTo(Point pt) const; // Gets window size. Size getSize() const { return getRect().size(); } // Gets window width. int getWidth() const { return getRect().width(); } // Gets window height. int getHeight() const { return getRect().height(); } //! Maximizes the window. void maximize() const; //! Minimizes the window. void minimize() const; //! Makes window back from being maximized or minimized. void normalize() const; //! Returns whether the window has been maximized or not. bool isMaximized() const; //! Returns whether the window has been minimized or not. bool isMinimized() const; //! Maximize the window to fit entire virtual screen. void maximize_multi() const; //! Gets window style. LONG getStyle() const; //! Sets window style. LONG setStyle(LONG windowStyle) const; //! Gets caption's height the window has. int getCaptionHeight() const; /*! Makes the window popuped or un-popuped. If the window had been maximized, window size will not change before and after. If the window size was normal, window size will change, but window's client area will not change or move. */ void popup(bool state = true) const; //! Returns whether the window is popuped or not. bool isPopup() const; /*! Shows the window. It can also retreive the parameter that was handed to entry point to indicate how to show it. */ const Window& show(int nShow = SW_SHOW) const; //! Hides the window. const Window& hide() const; void invalidate() const; void invalidate(const Rect &rc) const; //! Updates the window by executing WM_PAINT message. void update() const; //! Activates the window; makes it to be focused. bool activate() const; //! Makes the window foreground. bool setForeground() const; //! Changes the text of the window title bar. bool setTitle(const TCHAR *p) const; //! Returns a copied string of the window title bar. StringBuffer getTitle() const; HWND addChild(const TCHAR *title, Rect pos, DWORD addStyle); HWND addButton(const TCHAR *title, const Rect& pos); HWND addRadioButton(const TCHAR *title, const Rect& pos); protected: virtual int run() const; void destroy(); bool applyFrame() const; bool setWindowRect(const Rect &rc, UINT flag) const;// API wrapper //! リスナにイベントを渡す int broadcast(Message, WPARAM, LPARAM); private: class DWM; // Desktop Window Manager, Utility class Impl; std::unique_ptr<Impl> impl; }; } // namespace #endif <file_sep>#pragma once #ifndef GUID_A2759F8254E74760A21048340607EAB4 #define GUID_A2759F8254E74760A21048340607EAB4 #include "window_message.h" namespace image_viewer { /*! コントロールID (0, 0x6FFF]. 100か101から使うのが慣例らしい。 COMMAND_BEGIN 以降はプロファイル可能なコマンドID。 コマンドID に対応する文字列は Profile クラスにて定義される。 MENU_BEGIN 以降はメニュー項目で、このID順に並ぶ。 */ enum class ID : int { UNDEFINED = 0, LAST_PATH = 101, PROFILE_ENABLE, LOADER_RANGE_MIN, LOADER_RANGE_MAX, LOADER_MEMORY_CAP, LOADER_IMAGE_LOADED, LIST_EMPTY, CONFIRM_DELETE, WINDOW_REMINDER, WINDOW_ZOOMING, WINDOW_STYLE, WINDOW_POSITION, WINDOW_LEFT, WINDOW_TOP, WINDOW_RIGHT, WINDOW_BOTTOM, WINDOW_MAXIMIZE, IDM_USER = 150, COMMAND_BEGIN = 200, GRIP_IMAGE, UNGRIP_IMAGE, LIST_REMOVE, FILE_QUICK_DELETE, MENU_BEGIN, FILE_BACK, FILE_NEXT, FILE_FIRST, FILE_LAST, FILE_RELOAD, FILE_DELETE, USE_PROFILE, SCREEN_TOGGLE, VIEW_BEGIN, VIEW_POPUP, VIEW_FILENAME, VIEW_FILELIST, VIEW_CENTER, VIEW_UPSCALE, VIEW_DOWNSCALE, VIEW_END, SORT_BEGIN, SORT_GREATER_WRITE, SORT_LESSER_WRITE, SORT_GREATER_CREATION, SORT_LESSER_CREATION, SORT_GREATER_ACCESS, SORT_LESSER_ACCESS, SORT_END, SHOW_PROPERTY, WINDOW_CLOSE, MENU_END, COMMAND_END, IDM_USER_END, // 終端 }; // enum ID inline next(ID id) { return static_cast<ID>(static_cast<int>(id) + 1); } ID inline prev(ID id) { return static_cast<ID>(static_cast<int>(id) - 1); } } // namespace #endif<file_sep>#include "movable.h" namespace basis { UNIT_TEST_FUNC(CMovable) { CMovable a; if (a.pt().x != 0 || a.pt().y != 0) throw 0; CMovable b(&a); if (a.moveTo({ 10, 20 }) == false) throw 0; if (b.move({ 100, 100 }) == false) throw 0; if (b.pt().x != 110 || b.pt().y != 120) throw 0; return true; } } // namespace<file_sep>#pragma once #ifndef GUID_970E28484B3F45E18EDCA1E4A996BF26 #define GUID_970E28484B3F45E18EDCA1E4A996BF26 #include "image_viewer.h" #include "key_map.h" #include "mouse_drag.h" #include "unit_dispenser.h" #include "ids.h" namespace image_viewer { class CImageViewer::Control final : public basis::IEventHandler { public: Control(CImageViewer &parent_); ~Control() = default; virtual int onEvent(Window*, Message, WPARAM, LPARAM) final; ID getCommand(WPARAM wp) { return static_cast<ID>(keymap.getCommand(static_cast<DWORD>(wp), true)); } basis::CKey getKey(ID id, int n) { return keymap.getKey(static_cast<int>(id), n); } private: bool loadKeyCommands(); int onMouseMove(Window*, Message, WPARAM, LPARAM); int onMouseDrag(Window*); int onLButtonClick(); int onRButtonDrag(); int onLButtonDrag(Window*); int delegateCommand(ID command_id) { return parent.broadcast(WM::COMMAND, static_cast<WPARAM>(command_id), 0); } CImageViewer &parent; bool m_bGripImage; basis::CKeyMap keymap; basis::CMouseDrag mouse; CUnitDispenser<short> wheel; // マウスホイールの回転検出 CUnitDispenser<int> hFlip; // 左右ドラッグ CUnitDispenser<int> vFlip; // 上下ドラッグ bool m_bDragList; // Dragging item-list? or image. }; } // namespace #endif <file_sep>#include "key_combination.h" #include "stdfnc.h" #include "exception.h" #include "char_t.h" namespace { int perseFunctionKey(const TCHAR *str, size_t size) { if (!str || *str != TEXT('F')) return 0; ::basis::Char_T<TCHAR> c; if (size < 2 || (c = str[1]) == TEXT('0') || !c.isDigit()) return 0; if (size > 3 || (size == 3 && !(c = str[2]).isDigit())) return 0; return ::basis::ToInt(str + 1, 1) + VK_F1 - 1; } } // namespace namespace basis { std::map<unsigned char, tstr> CKey::Text; CKey::CKey() : m_keys(0) { if (Text.empty()) { Text = { { VK_SHIFT, TEXT("Shift") }, { VK_CONTROL, TEXT("Ctrl") }, { VK_MENU, TEXT("Alt") }, { VK_DELETE, TEXT("Delete") }, { VK_ESCAPE, TEXT("Esc") }, { VK_BACK, TEXT("Back") }, { VK_RETURN, TEXT("Enter") }, { VK_TAB, TEXT("Tab") }, { VK_INSERT, TEXT("Insert") }, { VK_HOME, TEXT("Home") }, { VK_END, TEXT("End") }, { VK_UP, TEXT("Up") }, { VK_DOWN, TEXT("Down") }, { VK_LEFT, TEXT("Left") }, { VK_RIGHT, TEXT("Right") }, { VK_SPACE, TEXT("Space") }, }; } } /*! 変換コンストラクタ @param key 仮想キーコード。 クラス定数の修飾キーを組み合わせても良い。 下位WORDのみが使われ、上位WORDは無視される。 @par 以下の値はすべて同一のキーに変換される -VK_SHIFT -CKey::Shift -VK_SHIFT | CKey::Shift */ CKey::CKey(DWORD key) : m_keys(static_cast<WORD>(key & 0xFFFF)) { // 修飾キーのVKEYならば、修飾キーとしても使う if (vkey() == VK_SHIFT) m_keys |= SHIFT; else if (vkey() == VK_CONTROL) m_keys |= CTRL; else if (vkey() == VK_MENU) m_keys |= ALT; if (!vkey()) // vkeyがなければ修飾キーを使う descendToVKey(); } CKey& CKey::read(const TCHAR *p) { tstr t = p; tstr sub; size_t size; m_keys = 0; Char_T<TCHAR> c; for (;;) { size = termLength(t.c_str()); if (!size) return *this; sub = t.substr(0, size); t = t.substr(size); c = sub.c_str()[0]; if (c.isDigit()) { m_keys = flags() | (ToInt(sub.c_str(), 0) & 0xFF); return *this; } if (c.isAlpha()) { // ファンクションキー int vkey_f = perseFunctionKey(sub.c_str(), sub.size()); if (vkey_f >= VK_F1 && vkey_f <= VK_F24) { m_keys = flags() | static_cast<WORD>(vkey_f); return *this; } // 文字キー if (sub.size() <= 1) { // _T("v")を'V'に変換 m_keys = flags() | (toupper(toascii(sub.c_str()[0])) & 0xFF); return *this; } // ネームドキー(Enterなど) for (auto i : Text) { if (!sub.compare(i.second)) { CKey key{ i.first }; if (key.isModifier()) { *this |= key; break; } m_keys = flags() | key.vkey(); return *this; } } } } } CKey CKey:: operator&(const CKey& rhs) const { CKey key{ (vkey() == rhs.vkey()) ? vkey() : static_cast<DWORD>(0) }; key.m_keys |= flags() & rhs.flags(); if (!key.vkey()) key.descendToVKey(); return key; } CKey CKey::operator|(const CKey& rhs) const { if (!isModifier()) { // 組み合わせられない if (!rhs.isModifier()) throw std::invalid_argument(0); // 右が修飾キー return m_keys | rhs.flags(); } // 両方修飾キー if (rhs.isModifier()) return flags() | rhs.flags(); // 左が修飾キー return flags() | rhs.m_keys; } tstr CKey::toStr() { tstr t; unsigned char flags[3]{ VK_SHIFT, VK_CONTROL, VK_MENU }; // 修飾キー for (auto i : flags) { if (*this & i) { Plus(t, Text[i]); } } if (vkey() >= VK_F1 && vkey() <= VK_F24) { // ファンクションキー Plus(t, TEXT("F")); t += ToStr((vkey() - VK_F1 + 1)); return t; } if (vkey() <= (std::numeric_limits<char>::max)()) { basis::Char_T<char> c{ static_cast<char>(vkey()) }; if (c.isAlpha() || c.isDigit()) { Plus(t, tstr() += c.tchar()); return t; } } try { // ネームドキー Plus(t, Text.at(vkey())); } catch (std::out_of_range &) { // その他、文字キー(数字表現) Plus(t, ToStr(vkey())); } return t; } size_t CKey::termLength(const TCHAR *p, size_t nStart) { size_t n = nStart; if (_tcslen(p) <= n) return 0; // 指定子 bool bHex = (p[n] == _T('0') && p[n + 1] == _T('x')); if (bHex) { n += 2; } // 数値 bool accept_comma = (n == nStart); for (; p[n]; ++n) { if (p[n] == _T('.') && n != nStart && accept_comma) { accept_comma = false; continue; } if (bHex) { if (!Char_T<TCHAR>(p[n]).isHexDigit()) break; } else { if (!Char_T<TCHAR>(p[n]).isDigit()) break; } } if (n != nStart) return n - nStart; // 変数名 for (Char_T<TCHAR> c; ; ++n) { if ((c = p[n]) == _T('\0') || !(c.isAlpha() || c.isDigit())) break; } if (n != nStart) return n - nStart; // スペースとタブ、カンマ for (; p[n]; ++n) { if (p[n] == _T(' ') || p[n] == _T('\t')) continue; if (p[n] == _T(',') || p[n] == _T(' ')) continue; break; } if (n != nStart) return n - nStart; // operator if (p[n] == _T('+') || p[n] == _T('-')) { if (p[n + 1] == p[n] || p[n + 1] == _T('=')) return 2; return 1; } // SJIS文字列 throw 0; // return 0; } void CKey::Plus(tstr& t, const tstr& keyname) { if (!t.empty()) { t += TEXT("+"); } t += keyname; } } // namespace<file_sep>/*! @file 補助関数群。 */ #pragma once #ifndef GUID_FE280A758E3643D8A9E8C30E948F6C90 #define GUID_FE280A758E3643D8A9E8C30E948F6C90 #ifndef STDAFX_H #include <wtypes.h> #include <tchar.h> #include <string> #include <functional> // for SafeRelease #endif namespace { using tstr = std::basic_string<TCHAR>; } namespace basis { extern const TCHAR * const TEXT_EMPTY; // スコープアウト時にポインタをどうにかするポインタキャリア // 解体時の動作をラムダ式で受け取る以外はstd::unique_ptrと同じ template<class T> class SafeRelease { using deleter = std::function<void(T*)>; deleter m_f; T *m_p; public: // ポインタとラムダ式を与える SafeRelease(T* p, deleter f) : m_p(p), m_f(f) {} SafeRelease() : m_p(0) {} ~SafeRelease() { if(m_p && m_f) m_f(m_p); } SafeRelease(SafeRelease&) = delete; SafeRelease&operator=(SafeRelease&) = delete; SafeRelease(SafeRelease&&) = default; SafeRelease&operator=(SafeRelease&&) = default; // ポインタとして振る舞う explicit operator bool() { return m_p != nullptr; } operator T*() { return m_p; } T* get() { return m_p; } T& operator*() { return *m_p; } T* operator->() { return m_p; } // ミューテーター T* operator=(const T* p) { m_p = p; } void operator=(deleter f) { m_f = f; } }; //! バイトオーダーを逆にする(4Bytes) uint32_t ReverseByteOrder(uint32_t n); //! 実バイト数に応じて整数型の書式文字列を構成する //! d_x は%d とか %x のdとかxの文字を指定 PTSTR GetTypeFormat(TCHAR type[5], size_t bytes, TCHAR d_x); //! 整数値を文字列型に書き出し tstr ToStr(int64_t n); template<typename T> inline T ToInt(const TCHAR *p, const T &nDefault) { TCHAR d_x = (p[0] == _T('0') && p[1] == _T('x')) ? p += 2, _T('x') // 0x : _T('d'); // Decimal TCHAR type[5]; GetTypeFormat(type, sizeof(T), d_x); T i{ 0 }; if (*type == _T('\0') || _stscanf_s(p, type, &i) != 1) return nDefault; return i; } template<> bool ToInt(const TCHAR *p, const bool &bDefault); //! コマンドラインのnCmd番目の引数を返す。 /*! 0番目は実行中のプログラムのパス、以降が引数である。 引数が""(ダブルクォート)で囲まれている場合はこれを除去する。 "path" や -ls など混在した引数でも動作する。 戻り値はbasic_string<TCHAR>型で、対象の引数がない場合は空(empty)である。 */ tstr GetCommandLine(int nCmd); /*! WM_DROPFILESメッセージが飛んできたときに num番目のファイル名を取得する */ tstr GetDropFile( WPARAM wp, UINT num = 0 ); //! 指定されたフォントのうち最大の高さをかえす。 /*! 第3引数が0の場合は第2引数の高さを、 第2引数も0の場合はカレントフォントの高さをかえす。 */ LONG GetFontHeight( HDC hdc, HFONT lhs, HFONT rhs ); //! ファイルのプロパティを表示する bool ShowProperty(HWND hParentWnd, const tstr &pszFile); //! OLE用ヘルパー関数。OLE_HIMETRIC単位(mm単位)への変換 void HimetricFromDP(HDC hdc, SIZE& s); //! 矩形 screen から foreground を除いた領域をRectangle()を使って塗りつぶす。 bool ClearBackground(HDC hdc, RECT screen, RECT foreground); } // namespace #endif<file_sep>#pragma once #ifndef GUID_E78E8F5DD24646759E45068A93FEFBCC #define GUID_E78E8F5DD24646759E45068A93FEFBCC #include "iunit_test.h" #include "types.h" namespace basis { UNIT_TEST(Cursor); /*! カーソルハンドルクラス. 関連するAPIのラッパー。 カーソルのハンドルを表すクラスで、 コピーしてもカーソルが2つになるなんてことはない(残念) */ class Cursor { public: enum class ID : int; Cursor() {}; ~Cursor() {}; //! システムカーソルに変更する bool set(ID id); //! ファイルのカーソルに変更 bool set(const TCHAR *fileName); //! ハンドルで指定したカーソルに変更 bool set(HCURSOR hCursor); //! カーソル形状を元に戻す void reset(); //! カーソル位置をかえす Point pos(); //! カーソル位置を指定する bool pos(Point pt); //! マウスをキャプチャする void capture(HWND hWnd); //! マウスキャプチャを解除 void release(); //! カーソル表示をインクリメント bool show(); //! カーソル表示をデクリメント bool hide(); private: class Impl; Impl& get(); }; //! Same as OCR_ numbers defined in winuser.h enum class Cursor::ID : int { APPSTARTING = 32650, //!< Standard arrow and small hourglass ARROW = 32512, //!< Standard arrow CROSS = 32515, //!< Crosshair HAND = 32649, //!< Hand HELP = 32651, //!< Arrow and question mark IBEAM = 32513, //!< I - beam ICON = 32641, //!< Obsolete for applications marked version 4.0 or later. NO = 32648, //!< Slashed circle SIZE = 32640, //!< Obsolete for applications marked version 4.0 or later.Use IDC_SIZEALL. SIZEALL = 32646, //!< Four - pointed arrow pointing north, south, east, and west SIZENESW = 32643, //!< Double - pointed arrow pointing northeast and southwest SIZENS = 32645, //!< Double - pointed arrow pointing north and south SIZENWSE = 32642, //!< Double - pointed arrow pointing northwest and southeast SIZEWE = 32644, //!< Double - pointed arrow pointing west and east UPARROW = 32516, //!< Vertical arrow WAIT = 32514, //!< Hourglass }; } // namespace #endif<file_sep>#include "profile.h" #include "private_profile.h" #include "cursor.h" #include "stdfnc.h" #include "ids.h" #include "menu.h" #include "draw_list.h" #include "control.h" namespace image_viewer { CImageViewer::Control::Control(CImageViewer &parent_) : parent(parent_), m_bGripImage(0), m_bDragList(0) { parent.hook(this); wheel.setUnit(WHEEL_DELTA); if (loadKeyCommands() == false) MessageBox(0, TEXT("There's no key settings in INI." "Please set your prefer settings in the file."), TEXT("Failed to load key settings."), MB_OK); } int CImageViewer::Control:: onEvent(Window *win, Message msg, WPARAM wp, LPARAM lp) { switch (msg) { case WM::KEYDOWN: { const ID cmd = getCommand(wp); if (cmd == ID::UNDEFINED) return 0; const bool key_repeat = (lp & 0x40000000) != 0; if (!key_repeat || cmd == ID::FILE_BACK || cmd == ID::FILE_NEXT) return delegateCommand(cmd); return 1; } case WM::KEYUP: if (getCommand(wp) == ID::GRIP_IMAGE) { return delegateCommand(ID::UNGRIP_IMAGE); } return 0; case WM::COMMAND: { switch (static_cast<ID>(LOWORD(wp))) { case ID::GRIP_IMAGE: m_bGripImage = true; return 1; case ID::UNGRIP_IMAGE: m_bGripImage = false; return 1; default: return 0; } } case WM::LBUTTONDBLCLK: mouse.proc(*win, msg, wp, lp); return delegateCommand(ID::SCREEN_TOGGLE); case WM::LBUTTONUP: case WM::RBUTTONUP: ReleaseCapture(); // fall through case WM::RBUTTONDBLCLK: case WM::LBUTTONDOWN: case WM::RBUTTONDOWN: if (!mouse.proc(*win, msg, wp, lp)) return 1; if (msg == WM::LBUTTONUP && !mouse.isDragged()) return onLButtonClick(); if (msg == WM::LBUTTONDOWN) { mouse.threshold(5); SetCapture(parent); m_bDragList = parent.list->isInclusive(mouse.start()); return 1; } if (mouse.vkey() == VK_RBUTTON) { if (mouse.state() & mouse.BUTTON_DOWN) { mouse.threshold(30, 30); hFlip.reset(0); hFlip.setUnit(30); vFlip.reset(0); vFlip.setUnit(30); return 1; } if (!mouse.isDragged()) { parent.menu->track(win->getClientRectInScreen().lefttop() + mouse.pos()); return 1; } } return 1; case WM::MOUSEMOVE: return onMouseMove(win, msg, wp, lp); case WM::MOUSEWHEEL: { // 回数分だけカレントを移動する int n; wheel.add(-GET_WHEEL_DELTA_WPARAM(wp)); while ((n = wheel.get()) != 0) { delegateCommand(n < 0 ? ID::FILE_BACK : ID::FILE_NEXT); } return 1; } // case } // switch return 0; } // onEvent bool CImageViewer::Control:: loadKeyCommands() { // コマンド指定のパース parent.profile->control(); parent.profile->applyToAllItemInTheSection([this](ID id, const TCHAR *str) { size_t n; basis::CKey key; basis::StringBuffer s(0, str); while (!s.empty()) { if (key.read(s.c_str())) { if (!keymap.append(key, static_cast<DWORD>(id))) throw 0; } if ((n = s.find(_T(','))) == s.npos) break; // カンマ区切りでリピート s.refer(s.c_str() + n + 1); } }); return keymap.size() != 0; } int CImageViewer::Control:: onMouseDrag(Window *win) { if (mouse.vkey() == VK_LBUTTON) return onLButtonDrag(win); if (mouse.vkey() == VK_RBUTTON) return onRButtonDrag(); return 1; } int CImageViewer::Control:: onLButtonClick() { if (parent.setCurrent(parent.list->itemFromPt(mouse.pos()))) return 1; return 0; } int CImageViewer::Control:: onLButtonDrag(Window *win) { if (m_bDragList) { parent.list->move({ 0, mouse.dy() }); } else if (m_bGripImage || win->isMaximized() || !win->isPopup()) { parent.move_image(mouse.getDifference()); } else { win->move({ mouse.dx_abs(), mouse.dy_abs() }); return 1; // 再描画済み } win->update(); return 1; } int CImageViewer::Control:: onMouseMove(Window *win, Message msg, WPARAM wp, LPARAM lp) { if (mouse.proc(*win, msg, wp, lp) && (mouse.isDragged())) return onMouseDrag(win); // 一時的にタイトルバーを表示する int margin = GetSystemMetrics(SM_CYCAPTION); if (mouse.pos().y <= margin && mouse.pos().y >= -margin) { if (win->isPopup()) { parent.m_bTemporaryShowTitle = true; win->popup(false); } return 1; } // 解除 else if (parent.m_bTemporaryShowTitle) { parent.m_bTemporaryShowTitle = false; if (win->isMaximized() || parent.menu->isSelected(ID::VIEW_POPUP)) win->popup(); } return 1; } int CImageViewer::Control:: onRButtonDrag() { // Currently not used. // vFlip.add(mouse.dy()); // 左右にドラッグで表示ファイル変更 hFlip.add(mouse.dx()); while (hFlip.over()) { delegateCommand(hFlip.get() < 0 ? ID::FILE_BACK : ID::FILE_NEXT); vFlip.reset(0); return 1; } return 0; } } // namespace<file_sep>#ifndef STDAFX_H #include <memory> #include <vector> #include <list> #include <assert.h> #endif #include "critical_section.h" #include "thread_unity.h" namespace basis { class CThreadUnity::Impl { public: Impl() { setThreadCount(1); } ~Impl() {} using TaskTy = CThread::TaskTy; void setThreadCount(int nThreads); void addTask(CThread *pThread, TaskTy task); void reloadTask(CThread *pThread); void run(CThread *pThread, TaskTy task); bool wait(bool bWaitAll, DWORD waitMilliSeconds); CThread *getSuspendedThread(); CriticalSection m_cs; std::list<TaskTy> m_tasks; std::vector<HANDLE> m_signals; std::vector<std::unique_ptr<CThread>> m_threads; }; void CThreadUnity::Impl:: setThreadCount(int nThreads) { assert(nThreads >= 0); CriticalSection cs = m_cs.local(); while (m_threads.size() > static_cast<size_t>(nThreads)) { m_threads.back()->wait(INFINITE); m_threads.pop_back(); m_signals.pop_back(); } while (m_threads.size() < static_cast<size_t>(nThreads)) { m_threads.emplace_back(new CThread); m_signals.push_back(m_threads.back()->getWaitHandle()); } } void CThreadUnity::Impl:: addTask(CThread *thread, TaskTy func) { auto cs = m_cs.local(); if (!thread) thread = getSuspendedThread(); if (thread) run(thread, std::move(func)); else m_tasks.push_back(std::move(func)); } bool CThreadUnity::Impl:: wait(bool bWaitAll, DWORD dwMilliseconds) { DWORD size = static_cast<DWORD>(m_signals.size()); DWORD result = WaitForMultipleObjects(size, &m_signals[0], bWaitAll, dwMilliseconds); return (result != WAIT_TIMEOUT); } void CThreadUnity::Impl:: run(CThread *pThread, TaskTy task) { TaskTy fReload{ [this, pThread] { reloadTask(pThread); } }; pThread->addTask(task); pThread->addTask(fReload); } CThread* CThreadUnity::Impl:: getSuspendedThread() { CriticalSection cs = m_cs.local(); for (auto &p : m_threads) { if (!p->isBusy()) return p.get(); } return nullptr; } void CThreadUnity::Impl:: reloadTask(CThread *pThread) { CriticalSection cs = m_cs.local(); if (m_tasks.empty()) return; run(pThread, m_tasks.front()); m_tasks.pop_front(); } // CThreadUnity CThreadUnity::CThreadUnity() : impl(new Impl) { impl->setThreadCount(1); } CThreadUnity::~CThreadUnity() { delete impl; } int CThreadUnity:: threadCount() { return static_cast<int>(impl->m_threads.size()); } bool CThreadUnity:: setThreadCount(int nThreads) { if (nThreads > 0 && nThreads <= MAX_THREADS) { impl->setThreadCount(nThreads); return true; } return false; } bool CThreadUnity:: wait(bool b, int time) { DWORD t = (time < 0) ? INFINITE : time; return impl->wait(b, t); } void CThreadUnity:: addTask(CThread::TaskTy task) { impl->addTask(nullptr, std::move(task)); } } // namespace<file_sep>#pragma once #ifndef GUID_F5A18BB6E7C8499CBBD555248819AE8F #define GUID_F5A18BB6E7C8499CBBD555248819AE8F #ifndef STDAFX_H #include <vector> #include <mutex> #include <assert.h> #endif #include "critical_section.h" namespace basis { //! シングルトンオブジェクトのインスタンス参照を取得する /* C++11で、ブロックスコープを持つstatic変数の初期化は、 スレッドセーフであることが規定された。 http://cpprefjp.github.io/lang/cpp11/static_initialization_thread_safely.html *//* //! シングルトンオブジェクトを解放する関数のリスト /*! 下記を参考にvectorに置き換えたもの http://qiita.com/kikuuuty/items/fcf5f7df2f0493c437dc#%E5%8B%95%E7%9A%84%E5%89%B2%E3%82%8A%E5%BD%93%E3%81%A6%E3%81%AB%E3%82%88%E3%82%8B%E5%AE%9F%E8%A3%85%E3%81%AE%E5%95%8F%E9%A1%8C%E7%82%B9 */ class SingletonFinalizers final { public: using Finalizer = void(*)(); SingletonFinalizers() = delete; //! Push deleter to finalizer list. static void Push(Finalizer func) { maFinalizer.push_back(std::move(func)); } /*! Execute finalizers of singleton objects. Finalizers will be executed in FILO order. Getting refference to instance after finalized is not thread safe. */ static void Finalize(); private: static std::vector<Finalizer> maFinalizer; }; template<typename T> class singleton { public: singleton() { get(); } T* operator->() { return &get(); } static T& get() { std::call_once(flag_init, create); return *m_p; } static void create() { SingletonFinalizers::Push(&destroy); m_p = new T; } static void destroy() { delete m_p; m_p = nullptr; } static std::once_flag flag_init; static T* m_p; }; template<typename T> T* singleton<T>::m_p; template<typename T> std::once_flag singleton<T>::flag_init; } // namespace #endif<file_sep>#pragma once #ifndef GUID_4D7AFF9D398E4DB2B5CF8BB729E7680D #define GUID_4D7AFF9D398E4DB2B5CF8BB729E7680D namespace basis { //! ウィンドウメッセージ定義. enum class Message : int { UNDEFINED = 0x00 , CREATE = 0x01 , DESTROY = 0x02 , MOVE = 0x03 , SIZE = 0x05 , ACTIVATE = 0x06 , SETFOCUS = 0x07 , KILLFOCUS = 0x08 , ENABLE = 0x0A , SETREDRAW = 0x0B , SETTEXT = 0x0C , GETTEXT = 0x0D , GETTEXTLENGTH = 0x0E , PAINT = 0x0F , CLOSE = 0x10 , QUERYENDSESSION = 0x11 , QUIT = 0x12 , QUERYOPEN = 0x13 , ERASEBKGND = 0x14 , SYSCOLORCHANGE = 0x15 , ENDSESSION = 0x16 , SYSTEMERROR = 0x17 , SHOWWINDOW = 0x18 , CTLCOLOR = 0x19 , WININICHANGE = 0x1A , SETTINGCHANGE = 0x1A , DEVMODECHANGE = 0x1B , ACTIVATEAPP = 0x1C , FONTCHANGE = 0x1D , TIMECHANGE = 0x1E , CANCELMODE = 0x1F , SETCURSOR = 0x20 , MOUSEACTIVATE = 0x21 , CHILDACTIVATE = 0x22 , QUEUESYNC = 0x23 , GETMINMAXINFO = 0x24 , PAINTICON = 0x26 , ICONERASEBKGND = 0x27 , NEXTDLGCTL = 0x28 , SPOOLERSTATUS = 0x2A , DRAWITEM = 0x2B , MEASUREITEM = 0x2C , DELETEITEM = 0x2D , VKEYTOITEM = 0x2E , CHARTOITEM = 0x2F , SETFONT = 0x30 , GETFONT = 0x31 , SETHOTKEY = 0x32 , GETHOTKEY = 0x33 , QUERYDRAGICON = 0x37 , COMPAREITEM = 0x39 , COMPACTING = 0x41 , WINDOWPOSCHANGING = 0x46 , WINDOWPOSCHANGED = 0x47 , POWER = 0x48 , COPYDATA = 0x4A , CANCELJOURNAL = 0x4B , NOTIFY = 0x4E , INPUTLANGCHANGEREQUEST = 0x50 , INPUTLANGCHANGE = 0x51 , TCARD = 0x52 , HELP = 0x53 , USERCHANGED = 0x54 , NOTIFYFORMAT = 0x55 , CONTEXTMENU = 0x7B , STYLECHANGING = 0x7C , STYLECHANGED = 0x7D , DISPLAYCHANGE = 0x7E , GETICON = 0x7F , SETICON = 0x80 , NCCREATE = 0x81 , NCDESTROY = 0x82 , NCCALCSIZE = 0x83 , NCHITTEST = 0x84 , NCPAINT = 0x85 , NCACTIVATE = 0x86 , GETDLGCODE = 0x87 , NCMOUSEMOVE = 0xA0 , NCLBUTTONDOWN = 0xA1 , NCLBUTTONUP = 0xA2 , NCLBUTTONDBLCLK = 0xA3 , NCRBUTTONDOWN = 0xA4 , NCRBUTTONUP = 0xA5 , NCRBUTTONDBLCLK = 0xA6 , NCMBUTTONDOWN = 0xA7 , NCMBUTTONUP = 0xA8 , NCMBUTTONDBLCLK = 0xA9 , KEYFIRST = 0x100 , KEYDOWN = 0x100 , KEYUP = 0x101 , CHAR = 0x102 , DEADCHAR = 0x103 , SYSKEYDOWN = 0x104 , SYSKEYUP = 0x105 , SYSCHAR = 0x106 , SYSDEADCHAR = 0x107 , KEYLAST = 0x108 , IME_STARTCOMPOSITION = 0x10D , IME_ENDCOMPOSITION = 0x10E , IME_COMPOSITION = 0x10F , IME_KEYLAST = 0x10F , INITDIALOG = 0x110 , COMMAND = 0x111 , SYSCOMMAND = 0x112 , TIMER = 0x113 , HSCROLL = 0x114 , VSCROLL = 0x115 , INITMENU = 0x116 , INITMENUPOPUP = 0x117 , MENUSELECT = 0x11F , MENUCHAR = 0x120 , ENTERIDLE = 0x121 , CTLCOLORMSGBOX = 0x132 , CTLCOLOREDIT = 0x133 , CTLCOLORLISTBOX = 0x134 , CTLCOLORBTN = 0x135 , CTLCOLORDLG = 0x136 , CTLCOLORSCROLLBAR = 0x137 , CTLCOLORSTATIC = 0x138 , MOUSEFIRST = 0x200 , MOUSEMOVE = 0x200 , LBUTTONDOWN = 0x201 , LBUTTONUP = 0x202 , LBUTTONDBLCLK = 0x203 , RBUTTONDOWN = 0x204 , RBUTTONUP = 0x205 , RBUTTONDBLCLK = 0x206 , MBUTTONDOWN = 0x207 , MBUTTONUP = 0x208 , MBUTTONDBLCLK = 0x209 , MOUSEWHEEL = 0x20A , MOUSEHWHEEL = 0x20E , PARENTNOTIFY = 0x210 , ENTERMENULOOP = 0x211 , EXITMENULOOP = 0x212 , NEXTMENU = 0x213 , SIZING = 0x214 , CAPTURECHANGED = 0x215 , MOVING = 0x216 , POWERBROADCAST = 0x218 , DEVICECHANGE = 0x219 , MDICREATE = 0x220 , MDIDESTROY = 0x221 , MDIACTIVATE = 0x222 , MDIRESTORE = 0x223 , MDINEXT = 0x224 , MDIMAXIMIZE = 0x225 , MDITILE = 0x226 , MDICASCADE = 0x227 , MDIICONARRANGE = 0x228 , MDIGETACTIVE = 0x229 , MDISETMENU = 0x230 , ENTERSIZEMOVE = 0x231 , EXITSIZEMOVE = 0x232 , DROPFILES = 0x233 , MDIREFRESHMENU = 0x234 , IME_SETCONTEXT = 0x281 , IME_NOTIFY = 0x282 , IME_CONTROL = 0x283 , IME_COMPOSITIONFULL = 0x284 , IME_SELECT = 0x285 , IME_CHAR = 0x286 , IME_KEYDOWN = 0x290 , IME_KEYUP = 0x291 , MOUSEHOVER = 0x2A1 , NCMOUSELEAVE = 0x2A2 , MOUSELEAVE = 0x2A3 , CUT = 0x300 , COPY = 0x301 , PASTE = 0x302 , CLEAR = 0x303 , UNDO = 0x304 , RENDERFORMAT = 0x305 , RENDERALLFORMATS = 0x306 , DESTROYCLIPBOARD = 0x307 , DRAWCLIPBOARD = 0x308 , PAINTCLIPBOARD = 0x309 , VSCROLLCLIPBOARD = 0x30A , SIZECLIPBOARD = 0x30B , ASKCBFORMATNAME = 0x30C , CHANGECBCHAIN = 0x30D , HSCROLLCLIPBOARD = 0x30E , QUERYNEWPALETTE = 0x30F , PALETTEISCHANGING = 0x310 , PALETTECHANGED = 0x311 , HOTKEY = 0x312 , PRINT = 0x317 , PRINTCLIENT = 0x318 , HANDHELDFIRST = 0x358 , HANDHELDLAST = 0x35F , PENWINFIRST = 0x380 , PENWINLAST = 0x38F , COALESCE_FIRST = 0x390 , COALESCE_LAST = 0x39F , DDE_FIRST = 0x3E0 , DDE_INITIATE = 0x3E0 , DDE_TERMINATE = 0x3E1 , DDE_ADVISE = 0x3E2 , DDE_UNADVISE = 0x3E3 , DDE_ACK = 0x3E4 , DDE_DATA = 0x3E5 , DDE_REQUEST = 0x3E6 , DDE_POKE = 0x3E7 , DDE_EXECUTE = 0x3E8 , DDE_LAST = 0x3E8 , USER = 0x400 , APP = 0x8000 }; // enum class } // namespace ID #endif<file_sep>#pragma once #ifndef STDAFX_H #define STDAFX_H /* stdafxの名は、MFCで使われた "Application Framework Extensions" から。 フレームワークとして扱う追加機能をまとめたものと理解できる。 個々のプログラムによって扱いが変わるものは入れてはいけない。 */ #pragma warning(push) #pragma warning(disable:4820) #include <windows.h> #include <assert.h> #include <process.h> #include <stdarg.h> #include <dwmapi.h> #include <ctype.h> #include <Psapi.h> #ifdef UNICODE #include <tchar.h> // _stprintf_s, TCHAR #else #include <stdio.h> // sprintf_s #endif #pragma warning(pop) #include <exception> #include <memory> #include <mutex> #include <algorithm> #include <array> #include <vector> #include <list> #include <map> #include <string> // クラスメンバが隠蔽される旨の警告を一時的にdisable #pragma warning(push) #pragma warning(disable:4458) #include <Gdiplus.h> #pragma warning(pop) #pragma comment( lib, "Gdiplus.lib" ) #pragma comment(lib, "shell32.lib") #include <ShlObj.h> #include <ShObjIdl.h> #include <ShlGuid.h> #include <atlbase.h> #if _DEBUG #define _CRTDBG_MAP_ALLOC #include <crtdbg.h> #define new new(_NORMAL_BLOCK, __FILE__, __LINE__) #endif #endif<file_sep>#include "file_path.h" #include "file_item.h" #include "stdfnc.h" #ifndef STDAFX_H // include files for trash() #pragma comment(lib, "shell32.lib") #include <ShlObj.h> #include <atlbase.h> #endif namespace basis { CFilePath:: CFilePath(const TCHAR *path) : m_path(StringBuffer::length(path) + 5) { static const TCHAR * const unc = TEXT("\\\\?\\"); if (!StringBuffer(0, path).compare(unc, 4, 0)) m_path.append(unc); m_path.append(path); } const StringBuffer CFilePath:: getFileName() const noexcept { size_t ix = m_path.rfind(TEXT('\\')); if (ix == StringBuffer::npos) return{}; else return StringBuffer(0, m_path.c_str() + ix + 1); } void CFilePath:: setFileName(const TCHAR *p) { size_t ix = m_path.rfind(TEXT('\\')); if (ix == StringBuffer::npos) m_path = p; else m_path.write(ix + 1, p, StringBuffer::length(p)); } bool CFilePath:: exist() const noexcept { return (GetFileAttributes(longPath().c_str()) != -1); } bool CFilePath:: isDirectory() const noexcept { return(GetFileAttributes(longPath().c_str()) & FILE_ATTRIBUTE_DIRECTORY ) != 0; } CFileItem CFilePath:: open(DWORD access, DWORD share) { return CFileItem( ::CreateFile(longPath().c_str(), access, share, 0, OPEN_EXISTING, FILE_FLAG_OVERLAPPED, 0) ); } // 既存でない場合、占有モードでファイルを作成。 // 成功するとopen状態になる。ディレクトリは作成不可。 CFileItem CFilePath:: create(DWORD attr) { return CFileItem( ::CreateFile(longPath().c_str(), GENERIC_READ | GENERIC_WRITE, 0, 0, CREATE_NEW, attr, 0) ); } bool CFilePath:: createAsDirectory() { return 0 != CreateDirectory(longPath().c_str(), 0); } bool CFilePath:: erase() { if (isDirectory()) return 0 != RemoveDirectory(longPath().c_str()); else return 0 != DeleteFile(longPath().c_str()); } bool CFilePath:: copyTo(const CFilePath& dest, bool overwrite) { return 0 != CopyFile(longPath().c_str(), dest.longPath().c_str(), overwrite ? FALSE : TRUE); } bool CFilePath:: moveTo(const CFilePath& dest) { if (!MoveFile(longPath().c_str(), dest.longPath().c_str())) return false; *this = dest; return true; } bool CFilePath:: rename(const TCHAR *filename) { return moveTo(getDir() + filename); } bool CFilePath:: trash() const noexcept { CComPtr<IShellItem> pItem; if (S_OK != SHCreateItemFromParsingName(m_path.toUTF16().c_str() + 4, nullptr, IID_PPV_ARGS(&pItem))) return false; CComPtr<IFileOperation> op; if (S_OK != CoCreateInstance( CLSID_FileOperation, nullptr, CLSCTX_ALL, IID_PPV_ARGS(&op))) return false; if (S_OK != op->DeleteItem(pItem, nullptr)) return false; if (S_OK != op->PerformOperations()) return false; BOOL isAborted; if (S_OK != op->GetAnyOperationsAborted(&isAborted)) return false; return !isAborted; } // ------------------ static CFilePath CFilePath:: GetCurrentDirectory() noexcept { CFilePath t; t.m_path.let(::GetCurrentDirectory(0, 0) + sizeof(TCHAR)); ::GetCurrentDirectory(static_cast<DWORD>(t.m_path.capacity()), t.m_path.data()); return t; } CFilePath CFilePath:: GetBootDirectory() noexcept { tstr str = GetCommandLine(0); return str.substr(0, str.rfind(_T('\\'))); } } // namespace<file_sep>#ifndef STDAFX_H #include <algorithm> #include <string> #include <array> #include <tchar.h> #endif #include "string_buffer.h" namespace basis { UNIT_TEST_FUNC(StringBuffer) { // バッファサイズ指定コンストラクタ std::array<DWORD, 4> arSize = { 1, 2, MAX_PATH, 5120 }; for (auto i : arSize) { StringBuffer buf(i); if (buf.getSize() != 0) throw 0; if (buf.capacity() != i) throw 0; if (*buf.c_str() != TEXT('\0')) throw 0; *buf.data() = TEXT('\0'); if (buf.find(TEXT("test")) != StringBuffer::npos) throw 0; if (buf.rfind(TEXT("rfind")) != StringBuffer::npos) throw 0; try { buf.resize(); } catch (...) { throw 0; } if (buf.capacity() != i * 2) throw 0; } // 例外を送出すべきケース try { #ifdef _UNICODE // 非UNICODEだと指定値がオーバーフローする StringBuffer buf(StringBuffer::CharLimit + 1); return false; #endif } catch (...) {} // 文字列ラテラルラッパーとするとき { LPCTSTR p = TEXT("このラテラルのポインタを保持する"); StringBuffer buf(0, p); if (buf.c_str() != p) throw 0; if (buf.getSize() != _tcslen(p)) throw 0; // logic_errorを送出する try { buf.data(); throw 0; } catch (std::logic_error &e) { (void)e; } try { buf.realloc(50); throw 0; } catch (std::logic_error &e) { (void)e; } try { buf.resize(); throw 0; } catch (std::logic_error &e) { (void)e; } int n = 2; #ifdef _UNICODE n = 1; #endif if (buf.find(TEXT("ポインタ")) != 7 * n) throw 0; if (buf.rfind(TEXT("ラテラル")) != 2 * n) throw 0; if (buf.find(TEXT("する")) != 14 * n) throw 0; if (buf.find(TEXT("の"), 2 * n) != 6 * n) throw 0; if (buf != p) throw 0; auto sub = buf.substr(2 * n, 4 * n); if (sub != TEXT("ラテラル")) throw 0; } // 文字列の複製、コピーコンストラクタ、コピー代入演算子 std::array<PCTSTR, 4> arStr = { 0, TEXT(""), TEXT("テス"), TEXT("tes") }; for (auto i : arStr) { StringBuffer buf1(i); if (buf1.getSize() != (i ? _tcslen(i) : 0)) throw 0; std::basic_string<TCHAR> str(i ? i : TEXT("")); if (str.compare(buf1.c_str()) != 0) throw 0; StringBuffer buf2(buf1); if (str.compare(buf2.c_str()) != 0) throw 0; StringBuffer buf3; buf3 = buf2; if (str.compare(buf3.c_str()) != 0) throw 0; } StringBuffer from_string{ TEXT("未整理のテスト") }; StringBuffer from_size = from_string; StringBuffer copy_construct{ from_size }; StringBuffer move_construct(std::move(copy_construct)); StringBuffer move_asign; move_asign = std::move(move_construct); using tstr = std::basic_string<TCHAR>; tstr data1 = from_string.c_str(); tstr data2 = move_asign.c_str(); if (data1 != data2) throw 0; StringBuffer buf = move_asign; if (buf.capacity() == 0) throw 0; if (buf.getSize() == 0) throw 0; if (buf.c_str() == 0) throw 0; if (buf.data() == 0) throw 0; if (buf.empty()) throw 0; if (buf.shrinkToFit() == false) throw 0; if (buf.capacity() != buf.getSize() + 1) throw 0; buf.realloc(MAX_PATH); if (buf.capacity() != MAX_PATH) throw 0; data2 = buf.c_str(); if (data1 != data2) throw 0; size_t capa = buf.capacity(); buf.resize(); if (buf.capacity() != capa * 2) throw 0; size_t s = buf.getSize(); buf.append(TEXT("12345")); if (buf.getSize() != s + 5) throw 0; #ifndef _UNICODE buf = "convertテスト"; if (buf.toUTF16() != L"convertテスト") throw 0; buf = "converting"; if (buf.toUTF16() != L"converting") throw 0; buf = "2バイトコードオンリー"; if (buf.toUTF16() != L"2バイトコードオンリー") throw 0; #endif return true; } } // namespace<file_sep>#pragma once #ifndef GUID_AE62FCFCEFE348AD9E93B20B97EECAC2 #define GUID_AE62FCFCEFE348AD9E93B20B97EECAC2 #ifndef STDAFX_H #include <wtypes.h> #endif #include "IEnumlator.h" namespace basis { /*! File Enumlator. CFindFile enumlates files and/or directories. To enumlate all items in a directory, please give constructor the directory name that is followed by "\\*". An astarisk is a file mask, it shows. Calling next() function will raise first file. To get access to enumlated files expressed by WIN32_FIND_DATA structure, call get() function. get() function is mere accessor and no overhead costs. nextFile() and nextDirectory() function is also available instead of next() function. These two functions enumlate only files or directories. Each call to next(), nextFile() or nextDirectory() function enumlates matched file or directory. These functions returns false when no more file left. Next sample shows how to select all files. @code TCHAR buf[MAX_PATH + 2]; GetCurrentDirectory(MAX_PATH, buf); _tcscat_s(buf, MAX_PATH + 2, TEXT("\\*")); CFindFile e(buf); while (e.nextFile()) { cout << e.get().cFileName << "("; cout << e.get().nFileSizeLow << "bytes)" << endl; if (e.get().nFileSizeLow >= 1024) break; } if (e.empty()) cout << "All files are below 1024 bytes." << endl; else cout << "This is a first file overs 1024 bytes." << endl; @endcode */ class CFindFile : public IEnumlator<WIN32_FIND_DATA> { public: CFindFile(const TCHAR *path); ~CFindFile(); CFindFile(const CFindFile& s) = delete; CFindFile& operator=(const CFindFile &s) = delete; CFindFile(CFindFile &&s) noexcept; CFindFile& operator=(CFindFile &&s) noexcept; bool next() noexcept override; bool nextFile() noexcept; bool nextDirectory() noexcept; bool empty() const noexcept { return hf == nullptr; } WIN32_FIND_DATA& get() noexcept override { return *fd; } private: void close() noexcept; // Indicates first file is already set. // Make sure not to compete with other attributes. static DWORD constexpr kFileAttrFirstFile = 0x800000; LPWIN32_FIND_DATA fd; // guaranteed to be a valid pointer. HANDLE hf; // May be nullptr. }; } // namespace #endif<file_sep>#pragma once #ifndef GUID_D634731E0CE3493DAA94DF5577BC181B #define GUID_D634731E0CE3493DAA94DF5577BC181B #ifndef STDAFX_H #include <wtypes.h> #include <functional> #endif namespace basis { /*! Thread. The thread suspends itself after it created. The Memeber function, addTask(), lets the thread work on the task. If the thread already had have another task, addTask function stores a task to inner list. The thread then keep on working until it finished all of tasks. The thread will alive until the memberfunction, join(), would be called. Destructor will call join() in any way. All member functions are thread-safe. */ class CThread { public: CThread(); CThread(CThread&) = delete; CThread& operator=(CThread&) = delete; CThread(CThread&&) = default; CThread& operator=(CThread&&) = default; ~CThread(); // Whether the thread is alive. explicit operator bool() { return impl != nullptr; } //! Original return code for threads of this class. static unsigned constexpr EXITCODE = 1u; using TaskTy = std::function<void(void)>; /*! Adds a task for the thread. If the thread was not busy, it starts to work on the task. @sa CThread */ void addTask(TaskTy f); /*! Tells the thread to end. @note All functions despite destructor will go to not work; but it's safe to call, once this function returned. @sa CThread */ void join(); /*! Waits the thread suspended. This function returns true immediately if the thread is not in busy or already joined. It also returns true if the thread finished all tasks before specified time passed. If the thread still has been working after specified time passed, then this function returns false. The thread suspend itself only when finished all tasks. @param timeToWait Maximum time to wait. If this is set to be negative value, this function returns control when the thread is suspended regardless the time. */ bool wait(int timeToWait) const noexcept; //! Returns true if the thread is running. //! This function returns !wait(0). bool isBusy() noexcept { return !wait(0); } /*! Returns a handle for waiting this thread suspended. Returned handle can be used as a parameter of WaitForMultipleObjects(). */ HANDLE getWaitHandle(); /*! Returns a handle of this thread. The handle this function returns can be used to wait thead terminated. */ HANDLE getThreadHandle(); private: class Impl; Impl *impl; }; } // namespace #endif<file_sep>#pragma once #ifndef GUID_70CA2EA915334398A9B81C0554FAE7DA #define GUID_70CA2EA915334398A9B81C0554FAE7DA #ifndef STDAFX_H #include <wtypes.h> #endif namespace basis { class CPosition { public: CPosition() { clear(); } explicit CPosition(POINT p) { clear(); pt = p; } CPosition(LONG x, LONG y) { clear(); set(x,y); } ~CPosition() = default; explicit CPosition(const CPosition &rhs) = default; CPosition& operator= (const CPosition &rhs) = default; operator POINT() { return pt; } LONG x() { return pt.x; } LONG y() { return pt.y; } const POINT& base() { return bs; } const POINT& pos() { return pt; } void setBase(LONG x, LONG y) { pt.x = x, pt.y = y; } void set(POINT p) { pt = p; } void set(LONG cx, LONG cy) { pt.x = cx; pt.y = cy; } void reset() { pt = bs; } void clear() { pt.x = pt.y = bs.x = bs.y = 0; } void move(POINT d) { pt.x += d.x; pt.y += d.y; } void move(LONG dx, LONG dy) { pt.x += dx; pt.y += dy; } private: POINT pt; // 現在位置 POINT bs; // 初期位置 }; } #endif <file_sep>/*! @file mouse_drag.h 2015/12/12 マウスドラッグのリスナクラス ウィンドウメッセージからドラッグに関連する操作を抽出し ドラッグの開始・実行・終了のタイミングを通知、 ドラッグ量などの関数を提供する */ #pragma once #ifndef CMouseDrag_h #define CMouseDrag_h #ifndef STDAFX_H #include <wtypes.h> #endif #include "types.h" // マウス関連のWM_でわたされるボタン状態(WPARAM)にALTキーをつけくわえて、 // .m_keysで開始時のボタン押下状態を管理する // 環境によってはMK_ALTが存在するらしいのでこの名前は避ける #ifndef MK_MENU #define MK_MENU 0x80 #endif // 他はWM_LBUTTONDOWNでわたされるWPARAMと同じ namespace basis { enum class Message : int; class CMouseDrag { public: using Point = basis::Point; using Size = basis::Size; enum STATE : int { UNDEFINED = 0, BUTTON_UP = 1, BUTTON_DOWN = 2, MOUSE_MOVE = 4, SLIGHT_MOVE = BUTTON_DOWN + MOUSE_MOVE, MOUSE_DRAG = 8, DRAGGING = MOUSE_MOVE + MOUSE_DRAG, }; CMouseDrag(); // メッセージを解析しドラッグの開始・実行・終了時にtrueをかえす bool proc(HWND hWnd, basis::Message msg, WPARAM wp, LPARAM lp); // ドラッグの開始地点をかえす Point start() { return m_start; } // Point pos() { return m_pos; } // 前回WM_MOUSEMOVEからの横方向移動差分を返す int dx(); // 前回WM_MOUSEMOVEからの縦方向移動差分を返す int dy(); Size getDifference(); // スクリーン座標上のマウス移動差分のみを返す // ウィンドウの移動による相対的移動は無視される int dx_abs(); int dy_abs(); Size getTravel(); // ドラッグ開始時の複合マウスキーステート UINT keys() { return m_keys; } // ドラッグ開始のトリガーとなったボタンのvirtual key UINT vkey() { return m_vkey; } int state() { return m_state; } bool isMoved() { return (static_cast<int>(m_state) & static_cast<int>(STATE::MOUSE_MOVE)) != 0; } bool isDragged() { return (static_cast<int>(m_state) & static_cast<int>(STATE::MOUSE_DRAG)) != 0; } // ドラッグ開始とみなす距離を設定する(初期値1) // クリック地点からこの分だけ離れるまで差分をかえさない // pixelを省略した場合は現在値をかえす Size threshold(int dx = -1, int dy = -1); protected: // スレッショルドを(すでに)越えたかどうか bool isOveringThreshold(Point p); // m_prev, m_posを更新 Point& updatePos(HWND hWnd, const LPARAM lp); private: Point m_pos; // クライアント座標 Point m_scr; // スクリーン座標 Point m_start; Point m_prev; Point m_prev_scr; UINT m_keys; UINT m_vkey; // これがBUTTON_UPのときはドラッグ中ではない int m_state; // この値以上になるまでドラッグとみなさない(procでtrueをかえさない) Size m_threshold; }; } // namespace #endif<file_sep>#pragma once #ifndef GUID_A3A269A23F564BB2B49C231E7FD120CA #define GUID_A3A269A23F564BB2B49C231E7FD120CA #ifndef STDAFX_H #include <wtypes.h> #endif #include "types.h" #include "critical_section.h" #include "iunit_test.h" namespace basis { UNIT_TEST(CMovable) //! 移動可能なグラフィックオブジェクトの座標。 /*! 親を持つとき親の動きに連動する。 スクリーン座標はpt, 親に対する相対座標はposで表す。 */ class CMovable { public: struct Test; CMovable(CMovable *parent = nullptr); CMovable(CMovable *parent, Point pos, Size size = {}); CMovable(const Rect& rc); virtual ~CMovable(); CMovable& operator=(const Rect &rc); //! 絶対座標を返す。 Point pt(); //! 相対座標を返す。 Point pos(); void forceSetPos(Point pos); //! 相対座標に移動する。 bool setPos(Point pos); //! 現在位置から相対移動する。 bool move(Size amount); //! 絶対座標に移動する。 /*! @return 位置変更に成功したかどうか。 */ bool moveTo(Point pt); //! 指定座標に移動可能かどうかを返す。 /*! この関数をオーバーライドすることで移動範囲を制限する。 */ virtual bool isMovableTo(Point pos); int width(); void width(int w); int height(); void height(int h); Size size(); void size(Size s); //! 4頂点の絶対座標を返す Rect rect(); //! 絶対座標を相対座標に変換 Point posFromPt(Point pt); //! 相対座標を絶対座標に変換 Point ptFromPos(Point pos); //! オフセットを返すオブジェクト CMovable *parent(); void setParent(CMovable *parent); //! デフォルトの相対位置を指定する void setBase(Point pos); //! デフォルトの相対位置に移動する void resetPos(); protected: CMovable *m_parent; //!< 親 Point m_pos; //!< 親に対する相対座標 Point m_base; //!< 相対座標の基本値。resetのパラメータ。 Size m_size; //!< ピクセル単位のサイズ CriticalSection m_cs; }; inline Point CMovable:: pos() { return m_pos; } inline void CMovable:: forceSetPos(Point pos) { m_pos = pos; } inline int CMovable:: width() { return m_size.x; } inline void CMovable:: width(int w) { m_size.x = w; } inline int CMovable:: height() { return m_size.y; } inline void CMovable:: height(int h) { m_size.y = h; } inline Size CMovable:: size() { return m_size; } inline void CMovable:: size(Size s) { m_size = s; } inline CMovable * CMovable:: parent() { return m_parent; } inline void CMovable:: setParent(CMovable * parent) { m_parent = parent; } inline void CMovable:: setBase(Point pos) { m_base = pos; } inline void CMovable:: resetPos() { m_pos = m_base; } } // namespace #endif<file_sep>#include "file_item.h" #include "exception.h" namespace { LPOVERLAPPED inline CreateOverlapped(const LARGE_INTEGER &offset) { auto lp = new OVERLAPPED{}; lp->hEvent = CreateEvent(0, TRUE, 0, 0); lp->Offset = offset.LowPart; lp->OffsetHigh = offset.HighPart; return lp; } } namespace basis { bool CFileItem:: close() try { finish(); if (*this) CloseHandle(m_h); m_h = nullptr; return true; } catch (...) { return false; } CFileItem:: CFileItem(CFileItem && s) : m_h(s.m_h), m_overlapped(s.m_overlapped), m_offset(s.m_offset) { s.m_h = nullptr; s.m_overlapped = nullptr; } CFileItem& CFileItem:: operator=(CFileItem &&s) { m_h = s.m_h; s.m_h = nullptr; m_overlapped = s.m_overlapped; s.m_overlapped = nullptr; return *this; } LARGE_INTEGER CFileItem::getSize() const { LARGE_INTEGER s{}; if (GetFileSizeEx(m_h, &s)) return s; throw std::runtime_error(LOCATION); } bool CFileItem:: read(void * dest, DWORD bytes, DWORD timeWait) { if (!dest || !*this || m_overlapped) return false; if (bytes == 0) { auto s = getSize(); s.QuadPart -= m_offset.QuadPart; if (s.HighPart > 0) bytes = static_cast<DWORD>(-1); else bytes = s.LowPart; } m_overlapped = CreateOverlapped(m_offset); ReadFile(m_h, dest, bytes, 0, m_overlapped); // If an error here, finish() function deal with that. return finish(timeWait); } bool CFileItem:: write(const void *source, DWORD bytes, DWORD timeWait) { if (!source || !*this || m_overlapped) return false; m_overlapped = CreateOverlapped(m_offset); WriteFile(m_h, source, bytes, 0, m_overlapped); // If an error here, finish() function deal with that. return finish(timeWait); } bool CFileItem:: finish(DWORD timeWait) { if (!m_overlapped) return true; DWORD cbForward = 0; BOOL result = GetOverlappedResultEx(m_h, m_overlapped, &cbForward, timeWait, FALSE); m_offset.QuadPart += cbForward; if (result) { // Finished CloseHandle(m_overlapped->hEvent); delete m_overlapped; m_overlapped = nullptr; return true; } // Yet loading. if (GetLastError() == ERROR_IO_INCOMPLETE || GetLastError() == WAIT_TIMEOUT) return false; throw api_runtime_error(); } } // namespace<file_sep>#ifndef STDAFX_H #include <tchar.h> #include <algorithm> #endif #include "char_t.h" #include "exception.h" #include "string_.h" namespace basis { String::String(const std::basic_string<TCHAR>& s) : String(s.c_str(), s.size()) {} String::String(const TCHAR *s) : String(s, length(s)) {} String::String(const TCHAR *s, size_t n) { alloc(n); if (!s || !m_p) { m_size = 0; return; } m_size = (std::min)(capacity() - 1, length(s)); _tcsncpy_s(m_p, capacity(), s, m_size); // Terminated with a null. } String::String(const String &s) : String(s.c_str(), s.size()) {} String& String:: operator=(const TCHAR *src) { atLeast(length(src)); if (_tcscpy_s(m_p, capacity(), src)) throw std::runtime_error(LOCATION); return *this; } bool String:: operator==(const TCHAR *rhs) const noexcept { for (const TCHAR *p = m_p; *rhs++ == *p;) { if (*p++ == NULL) return true; } return false; } int String::size() const { if (m_size < 0) m_size = length(m_p); return m_size; } void String::alloc(size_t size) { if (size < 0 || size >= (std::numeric_limits<int>::max)()) throw std::runtime_error(LOCATION); if (size == 0) { m_buf.reset(); m_p = nullptr; m_size = 0; return; } // May throw std::bad_alloc m_p = static_cast<TCHAR*>(m_buf.alloc((size + 1) * sizeof(TCHAR), false)); *m_p = NULL; m_size = 0; } int String::length(const TCHAR *str) { size_t len = _tcslen(str); if (len >= (std::numeric_limits<int>::max)()) throw std::runtime_error(LOCATION); return static_cast<int>(len); } // Treats as ref to static string to provide it some functions. const TCHAR * String:: reffer(const TCHAR * s) { m_buf.reset(); m_size = -1; return m_p = const_cast<TCHAR*>(s); } }<file_sep>#pragma once #ifndef GUID_5C163EBB008248C3BEE781FD5A572BC8 #define GUID_5C163EBB008248C3BEE781FD5A572BC8 #ifndef STDAFX_H #include <string> #endif #include "string_buffer.h" #include "iunit_test.h" namespace basis { class CFileItem; UNIT_TEST(CFilePath) /*! File path on Windows systems. Wrapped API list : <br /> GetFileAttributes, CreateFile, CopyFile, MoveFile, DeleteFile, GetCurrentDirectory, CreateDirectory, RemoveDirectory, */ class CFilePath { public: using tstr = std::basic_string<TCHAR>; //! Default constructor CFilePath() {} /*! Conversion constructor. @param path Null tarminated string to express full path name to a directory or file */ CFilePath(const TCHAR *path); /*! Conversion constructor. @param path full path name to a directory or file. */ CFilePath(const tstr &path) : CFilePath(path.c_str()) {} /*! Conversion constructor. @param path full path name to a directory or file. */ CFilePath(const StringBuffer&path) : CFilePath(path.c_str()) {} //! Destructor ~CFilePath() = default; /*! Copy constructor. @exception std::bad_alloc */ CFilePath(const CFilePath& s) = default; //! Move constructor. CFilePath(CFilePath&&) = default; /*! Copy asignment operator. @exception std::bad_alloc */ CFilePath&operator=(const CFilePath&) = default; //! Move asignment operator. CFilePath&operator=(CFilePath&&) = default; //! Adds a filename after a backslash. CFilePath&operator+=(PCTSTR file) { m_path.append(TEXT("\\")); m_path.append(file); return *this; } //! Adds a filename after a backslash. CFilePath&operator+=(const tstr &file) { return operator+=(file.c_str()); } //! Returns a path followed by a backslash and filename. CFilePath operator+(PCTSTR file) const { return CFilePath(*this) += file; } //! Returns a path followed by a backslash and filename. CFilePath operator+(const tstr &file) const { return operator+(file.c_str()); } bool operator==(const CFilePath &rhs) { return path() == rhs.path(); } bool operator==(PCTSTR rhs) { return path() == rhs; } bool operator==(const tstr &rhs) { return path() == rhs.c_str(); } bool operator!=(const CFilePath &rhs) { return !(*this == rhs); } bool operator!=(PCTSTR rhs) { return !(*this == rhs); } bool operator!=(const tstr &rhs) { return !(*this == rhs); } //! Returns long path name also known as UNC path. const StringBuffer longPath() const noexcept { return StringBuffer(0, m_path.c_str()); } //! Returns full path name. const StringBuffer path() const noexcept { return StringBuffer(0, m_path.c_str() + 4); } //! Returns path to its parent directory. CFilePath getDir() const noexcept { size_t ix = m_path.rfind(TEXT("\\")); if (ix == StringBuffer::npos) return{}; else return CFilePath(m_path.substr(4, ix - 4)); } //! Returns filename. const StringBuffer getFileName() const noexcept; void setFileName(LPCTSTR fileName); //! Whether path string contains no characters despite null. bool isEmpty() { return m_path.empty(); } //! Whether expressed file exists. bool exist() const noexcept; //! Whether expressed path is existing directory. bool isDirectory() const noexcept; /*! Opens existing file. @param access One or two of GENERIC_READ, GENERIC_WRITE @param share One or two of FILE_SHARE_READ, FILE_SHARE_WRITE @return Opened file item or invalid file item. */ CFileItem open(DWORD access = GENERIC_READ, DWORD share = FILE_SHARE_READ); /*! Creates non-existing file. This function can't create directory. Use createAsDirectory function if necessary. This function creates file with containing name. @param attributes file attributes from CreateFile API's. @return Created and opend file item or invalid file item. */ CFileItem create(DWORD attributes = FILE_ATTRIBUTE_NORMAL); /*! Creates non-existing Directory. @return @c true if succeeded, or @c false. */ bool createAsDirectory(); /*! Erases file. @return @c true if secceeded, or @c false. */ bool erase(); /*! Copies file to specified place. @return @c true if succeeded, or @c false. If overwrite was set to be false and destination file existed already then return value is @c false. */ bool copyTo(const CFilePath& dest, bool overwrite = false); /*! Moves file to specified place. A path expressed by this object will be also changed. If destination place is in the same directory, rename function can be used instead. @return @c true if succeeded, or @c false. */ bool moveTo(const CFilePath& dest); /*! Makes a file renamed. A path expressed by this object will be also changed. @return @c true if succeeded, or @c false. */ bool rename(PCTSTR filename); /*! Moves a file to Recycle.bin @return @c true if succeeded, or @c false. */ bool trash() const noexcept; /*! Returns the path to current directory. */ static CFilePath GetCurrentDirectory() noexcept; /*! Returns the path to the directory at where executed file locates. */ static CFilePath GetBootDirectory() noexcept; private: StringBuffer m_path; }; } // namespace #endif<file_sep>[General] Language=Japanese [Japanese] ListEmpty=ファイルまたはフォルダをドロップして、画像を表示します ConfirmDelete=移動してよろしいですか? FileBack=前(&B) FileNext=次(&N) FileFirst=先頭(&F) FileLast=末尾(&L) FileReload=ファイルを再読み込み(&R) FileDelete=ごみ箱へ送る Use Settings in INI=初期化ファイルの作成を許可(&I) ToggleScreen=ウィンドウサイズ切替(&S) View=表示(&V) bHideTitleBar=タイトルバーを非表示(&V) ShowFilename=タイトルにファイル名を表示(&T) ShowList=ファイルリストを表示(&L) Centering=画像をセンタリング(&C) UpscaleImage=小さい画像を拡大(&U) DownscaleImage=大きい画像を縮小(&D) SortBy=並べ替え(&Q) DescendingWriteTime=最終更新日時(降順)(&W) AscendingWriteTime=最終更新日時(昇順)(&E) DescendingCreationTime=作成日時(降順)(&C) AscendingCreationTime=作成日時(昇順)(&V) DescendingAccessTime=アクセス日時(降順)(&A) AscendingAccessTime=アクセス日時(昇順)(&S) ShowProperty=プロパティ(&P) CloseWindow=終了(&W) [English] ListEmpty=The list is current empty. ConfirmDelete=Are you sure to move the file to Recycle.bin ? FileBack=Back(&B) FileNext=Next(&N) FileFirst=First(&F) FileLast=Last(&L) FileReload=Reload(&R) FileDelete=Send to Recycle Bin Use Settings in INI=Use Settings in INI(&I) ToggleScreen=Toggle Screen Size(&S) View=View(&V) bHideTitleBar=Hide Title Bar(&V) ShowFilename=Show Filename on Title(&T) ShowList=Show List(&L) Centering=Centering(&C) UpscaleImage = Up Scale if Small(&U) DownscaleImage = Down Scale if Large(&D) SortBy=Sort(&Q) DescendingWriteTime = Descending Write Time(&W) AscendingWriteTime = Ascending Write Time(&E) DescendingCreationTime = Descending Creation Time(&C) AscendingCreationTime = Ascending Creation Time(&V) DescendingAccessTime = Descending Access Time(&A) AscendingAccessTime = Ascending Access Time(&S) ShowProperty=Show Property(&P) CloseWindow=Close(&W)<file_sep>/*! @file Defines a unit test function for CListItem class objects. This function will open and load the file, which is named "img11.jpg". If the file was not found, this function would quit the test immediatry without reporting any error. */ #include "find_file.h" #include "file_path.h" #include "surface.h" #include "stdfnc.h" #include "list_item.h" namespace { bool equal(const FILETIME &l, const FILETIME &r) { return l.dwHighDateTime == r.dwHighDateTime && l.dwLowDateTime == r.dwLowDateTime; } } namespace image_viewer { UNIT_TEST_FUNC(CListItem) { basis::CFilePath dir = dir.GetBootDirectory().path().c_str(); basis::StringBuffer name{ 0, TEXT("img11.jpg") }; if (!(dir + name.c_str()).exist()) return true; // File not found. // CListItem needs for a WIN32_FIND_DATA to be created. basis::CFindFile e((dir + TEXT("*")).path().c_str()); WIN32_FIND_DATA fd; while (e.nextFile()) { if (name == e.get().cFileName) { fd = e.get(); break; } } if (e.empty()) throw 0; // If this rose, CFindFile class should have a bug. DWORD size = fd.nFileSizeLow; if (size == 0 || fd.nFileSizeLow == INVALID_FILE_SIZE) { throw 0; } CListItem item(fd); if (name != item.fileName()) throw 0; if (!equal(item.ftAccess(), fd.ftLastAccessTime) || !equal(item.ftCreate(), fd.ftCreationTime) || !equal(item.ftWrite(), fd.ftLastWriteTime)) throw 0; int exception = 0; try { item.size(); } catch (std::runtime_error &) { exception += 1; } try { item.rect(); } catch (std::runtime_error &) { exception += 2; } if (exception != 3) throw 0; if (item.loadImage((dir + name.c_str()).path().c_str()) != CListItem::Status::Loaded) throw 0; if (!item.isLoaded()) throw 0; item.size(); // Should not throw item.rect(); // Should not throw basis::Surface s; s.create(0, item.size()); if (!item.draw(s, item.rect(), item.rect())) throw 0; return true; } } // namespace<file_sep>#include "exception.h" #include "stdfnc.h" #include "ids.h" #include "profile.h" namespace image_viewer { CImageViewer::Profile::Pair CImageViewer::Profile::m_profile_ids[] { { ID::PROFILE_ENABLE, TEXT("bEnableSettings") }, { ID::LOADER_RANGE_MAX, TEXT("nPreloadRangeMax") }, { ID::LOADER_RANGE_MIN, TEXT("nPreloadRangeMin") }, { ID::LOADER_MEMORY_CAP, TEXT("nMemoryCapMegaBytes") }, { ID::WINDOW_REMINDER, TEXT("bEnableWindowReminder") }, { ID::WINDOW_POSITION, TEXT("bRemindWindowPosition") }, { ID::WINDOW_ZOOMING, TEXT("bRemindWindowZooming") }, { ID::WINDOW_STYLE, TEXT("bRemindWindowStyle") }, { ID::WINDOW_LEFT, TEXT("nWindowLeft") }, { ID::WINDOW_TOP, TEXT("nWindowTop") }, { ID::WINDOW_RIGHT, TEXT("nWindowRight") }, { ID::WINDOW_BOTTOM, TEXT("nWindowBottom") }, { ID::WINDOW_MAXIMIZE, TEXT("bWindowMaximize") }, { ID::GRIP_IMAGE, TEXT("GripImage") }, { ID::LIST_REMOVE, TEXT("RemoveFromList") }, { ID::CONFIRM_DELETE, TEXT("ConfirmDelete") }, { ID::LAST_PATH, TEXT("sLastOpenedFile") }, { ID::USE_PROFILE, TEXT("Use Settings in INI") }, { ID::SCREEN_TOGGLE, TEXT("ToggleScreen") }, { ID::LIST_EMPTY, TEXT("ListEmpty") }, { ID::FILE_DELETE, TEXT("FileDelete") }, { ID::FILE_QUICK_DELETE, TEXT("QuickFileDelete") }, { ID::FILE_BACK, TEXT("FileBack") }, { ID::FILE_NEXT, TEXT("FileNext") }, { ID::FILE_FIRST, TEXT("FileFirst") }, { ID::FILE_LAST, TEXT("FileLast") }, { ID::FILE_RELOAD, TEXT("FileReload") }, { ID::VIEW_POPUP, TEXT("bHideTitleBar")}, { ID::VIEW_FILENAME, TEXT("ShowFilename") }, { ID::VIEW_FILELIST, TEXT("ShowList") }, { ID::VIEW_CENTER, TEXT("Centering") }, { ID::VIEW_UPSCALE, TEXT("UpscaleImage") }, { ID::VIEW_DOWNSCALE, TEXT("DownscaleImage") }, { ID::VIEW_END, TEXT("View") }, { ID::SORT_GREATER_WRITE, TEXT("DescendingWriteTime") }, { ID::SORT_LESSER_WRITE, TEXT("AscendingWriteTime") }, { ID::SORT_GREATER_CREATION, TEXT("DescendingCreationTime") }, { ID::SORT_LESSER_CREATION, TEXT("AscendingCreationTime") }, { ID::SORT_GREATER_ACCESS, TEXT("DescendingAccessTime") }, { ID::SORT_LESSER_ACCESS, TEXT("AscendingAccessTime") }, { ID::SORT_END, TEXT("SortBy") }, { ID::SHOW_PROPERTY, TEXT("ShowProperty") }, { ID::WINDOW_CLOSE, TEXT("CloseWindow") }, }; CImageViewer::Profile:: Profile() : m_enable(true) { const auto path = FilePath::GetBootDirectory(); m_prof.path((path + kSettingFile).path().c_str()); m_enable = general().loadBoolean(ID::PROFILE_ENABLE, false); m_lang.path((path + kLanguageFile).path().c_str()); m_lang.section(GENERAL); m_lang.section(m_lang.read(LANGUAGE, nullptr)); } bool CImageViewer::Profile:: exist() { return GetFileAttributes(m_prof.path().c_str()) != static_cast<DWORD>(-1); } bool CImageViewer::Profile:: setEnable(bool bEnable) { general(); if (save(ID::PROFILE_ENABLE, ToStr(bEnable)) == false) throw std::runtime_error(LOCATION); m_enable = bEnable; return true; } const TCHAR * CImageViewer::Profile:: ToStr(bool b) { return b ? TEXT("true") : TEXT("false"); } void CImageViewer::Profile:: applyToAllItemInTheSection(std::function<void(ID, const TCHAR *)> f) { basis::CPrivateProfile prof = m_prof; prof.getAllKeyNames(); // これを保持する basis::StringBuffer s(0, prof.c_str()); while (!s.empty()) { for (auto &&i : m_profile_ids) { if (s == i.key) f(i.id, m_prof.read(i.key, nullptr)); } s.refer(s.c_str() + s.getSize() + 1); } } const TCHAR * CImageViewer::Profile:: getKeyString(ID id) { for (auto i : m_profile_ids) { if (i.id == id) return i.key; } return TEXT(""); } const TCHAR * CImageViewer::Profile:: getTranslatedString(ID id) { const TCHAR *key = getKeyString(id); return m_lang.read(key, key); } bool CImageViewer::Profile:: loadBoolean(ID id, bool b) { basis::StringBuffer str(0, m_prof.read(getKeyString(id), nullptr)); if (str == TEXT("true")) return true; if (str == TEXT("false")) return false; return 0 != basis::ToInt(m_prof.c_str(), b ? 1 : 0); } bool CImageViewer::Profile:: saveBoolean(ID id, bool b) { return m_prof.write(getKeyString(id), ToStr(b)); } int CImageViewer::Profile:: load(ID id, int nDefault) { return ::basis::ToInt(load(id, nullptr), nDefault); } bool CImageViewer::Profile:: save(ID id, int value) { return m_prof.write(getKeyString(id), ::basis::ToStr(value).c_str()); } const TCHAR *CImageViewer::Profile:: load(ID id, const TCHAR *sDefault) { return m_prof.read(getKeyString(id), sDefault); } bool CImageViewer::Profile:: save(ID id, const TCHAR * value) { return m_prof.write(getKeyString(id), value); } } // namespace <file_sep>#pragma once #ifndef STDAFX_H #include <string> #endif #include "imemory.h" namespace basis { class String { static int constexpr npos = -1; String() : m_p(0), m_size(0) {} String(size_t size) { alloc(size); } String(const TCHAR *s, size_t n); ~String() = default; String(const String &s); String(String &&s); explicit String(const std::basic_string<TCHAR> &s); explicit String(const TCHAR *s); String& operator=(const String &src); String& operator=(String &&s); String& operator=(const TCHAR *s); bool operator==(const TCHAR *r) const noexcept; bool operator!=(const TCHAR *r) const noexcept; bool operator==(const String &r) const noexcept; bool operator!=(const String &r) const noexcept; bool operator==(const std::basic_string<TCHAR> &r) const noexcept; bool operator!=(const std::basic_string<TCHAR> &r) const noexcept; // Treats as ref to static string to provide it some functions. const TCHAR *reffer(const TCHAR *s); const TCHAR *c_str() const noexcept; TCHAR *data(); int capacity() { return static_cast<int>(m_buf.capacity()); } int size() const; int countSjis(); void alloc(size_t newSize); void realloc(size_t newSize); void shrinkToFit(); void atLeast(size_t sizeAtLeast); void flush() noexcept; // 何を返す? int compare(const TCHAR *p, size_t n = 0, size_t pos = 0) const noexcept; int find(TCHAR c, size_t pos = 0) const; int rfind(TCHAR c, size_t pos = 0) const; int find(const TCHAR *p, size_t pos = 0) const; int find(const TCHAR *p, size_t pos, size_t n) const; int rfind(const TCHAR *p, size_t pos = 0) const; int rfind(const TCHAR *p, size_t pos, size_t n) const; std::basic_string<TCHAR> substr(size_t pos, size_t n = 0) const; String& append(const TCHAR *str); String& append(const TCHAR *str, size_t n); String& write(const TCHAR *p, size_t pos, size_t n) const; private: static int length(const TCHAR *str); static int length(const TCHAR *str, size_t cap) noexcept; int getSize() const noexcept; int find(TCHAR c, size_t pos, bool first) const; int find(const TCHAR *p, size_t pos, size_t n, bool first) const; TCHAR *m_p; // 0:empty CMemory m_buf; // capacity()==0:static_string mutable int m_size; // -1:uncounted }; }<file_sep>#ifndef STDAFX_H #include <dwmapi.h> #endif #include "monitor.h" #include "string_buffer.h" #include "exception.h" #include "stdfnc.h" #include "window_dwm.h" #include "window_impl.h" namespace { bool inline isValid(MSG *msg) { return msg->message != WM_QUIT; } bool inline getMessageIfExist(MSG * msg, UINT filter) { return isValid(msg) && PeekMessage(msg, nullptr, filter, filter, PM_REMOVE) != FALSE && isValid(msg); } bool inline waitForMessagePosted(MSG * msg, UINT filter) { return isValid(msg) && GetMessage(msg, nullptr, filter, filter) != FALSE; } } // namespace namespace basis { Window::Window() : impl(new Impl(this)) {} Window::~Window() { PostMessage(*this, WM_CLOSE, 1, 0); impl->join(); } Window::operator HWND() const { return impl->handle(); } int Window::run() const { MSG msg{}; // WM_TIMERを優先して処理する while (getMessageIfExist(&msg, WM_TIMER) || waitForMessagePosted(&msg, WM_NULL)) { DispatchMessage(&msg); } int const exit_code = static_cast<int>(msg.wParam); return exit_code; } const Window & Window::hook(IEventHandler * p) const { impl->hook(p); return *this; } const Window & Window::hook(Listener f) const { impl->hook(std::move(f)); return *this; } const Window & Window::unhook(IEventHandler * p) const { impl->unhook(p); return *this; } Window& Window::create() { if (!impl->create()) throw std::runtime_error(LOCATION); return *this; } void Window:: destroy() { if (!impl->destroy()) throw api_runtime_error(); } void Window::post(Message msg, WPARAM wp, LPARAM lp) { if (PostMessage(*this, static_cast<UINT>(msg), wp, lp) == FALSE) throw api_runtime_error(); } HWND Window::addChild(const TCHAR *title, Rect pos, DWORD addStyle) { return CreateWindow(TEXT("BUTTON"), title, WS_CHILD | WS_VISIBLE | addStyle, pos.left, pos.top, pos.width(), pos.height(), *this, 0, reinterpret_cast<HINSTANCE>(GetWindowLongPtr(*this, GWLP_HINSTANCE)), nullptr); } HWND Window::addButton(const TCHAR * title, const Rect & pos) { return addChild(title, pos, BS_DEFPUSHBUTTON); } HWND Window::addRadioButton(const TCHAR * title, const Rect & pos) { return addChild(title, pos, BS_AUTORADIOBUTTON); } void Window::waitToEnd() const { impl->waitToEnd(); } int Window::broadcast(Message msg, WPARAM wp, LPARAM lp) { return impl->broadcast(msg, wp, lp); } Size Window:: getWindowSize() const { return getWindowRect().size(); } Rect inline Window:: getWindowRect() const { RECT rc; if (!GetWindowRect(*this, &rc)) throw api_runtime_error(); return rc; } //! Returns the practical rectanle of the window. Rect Window:: getRect() const { RECT rc; if (DWM::IsCompositionEnabled() && DWM::GetExtendedRect(this, &rc)) return rc; return getWindowRect(); } void Window:: setRect(Rect dest) const { if (DWM::IsCompositionEnabled()) dest = dest + getWindowRect() - getRect(); if (!setWindowRect(dest, SWP_NOZORDER | SWP_NOACTIVATE | SWP_NOOWNERZORDER)) throw api_runtime_error(); } Rect Window:: getClientRect() const { RECT rc{}; if (GetClientRect(*this, &rc) == FALSE) throw api_runtime_error(); return rc; } Rect Window:: getClientRectInScreen() const { auto rc = getClientRect(); LONG right = rc.right, bottom = rc.bottom; ClientToScreen(*this, reinterpret_cast<POINT*>(&rc.right)); rc.left += rc.right - right; rc.top += rc.bottom - bottom; return rc; } Size Window:: getClientSize() const { return getClientRect().size(); } Rect Window:: place() const { WINDOWPLACEMENT wp{ sizeof wp }; if (GetWindowPlacement(*this, &wp) == FALSE) throw api_runtime_error(); return wp.rcNormalPosition; } bool Window:: place(const Rect& rc) const { WINDOWPLACEMENT wp{ sizeof wp }; if (!GetWindowPlacement(*this, &wp)) return false; wp.rcNormalPosition = static_cast<RECT>(rc); return 0 != SetWindowPlacement(*this, &wp); } bool Window:: move(Size s) const { Rect rc = getWindowRect(); rc.left += s.x; rc.top += s.y; return setWindowRect(rc, SWP_NOSIZE | SWP_NOSENDCHANGING); } bool Window:: moveTo(Point pt) const { auto size = getSize(); return MoveWindow(*this, pt.x, pt.y, size.x, size.y, TRUE) != FALSE; } void Window:: maximize() const { ShowWindow(*this, SW_MAXIMIZE); } void Window:: minimize() const { ShowWindow(*this, SW_MINIMIZE); } void Window:: normalize() const { ShowWindow(*this, SW_RESTORE); } bool Window:: isMaximized() const { return IsZoomed(*this) != FALSE; } bool Window:: isMinimized() const { return IsIconic(*this) != FALSE; } void Window:: maximize_multi() const { maximize(); setRect(Monitor::GetVirtualScreen()); return; } LONG Window:: getStyle() const { return GetWindowLong(*this, GWL_STYLE); } LONG Window:: setStyle(LONG newStyle) const { return SetWindowLong(*this, GWL_STYLE, newStyle); } int Window:: getCaptionHeight() const { if ((getStyle() & WS_CAPTION) == 0L) return 0L; return static_cast<int>(GetSystemMetrics(SM_CYCAPTION)); } void Window:: popup(bool bPopup) const { if (isPopup() == bPopup) return; const bool maximized = isMaximized(); Rect rc = (maximized) ? getRect() : getClientRectInScreen(); if (bPopup) { setStyle((getStyle() &~WS_OVERLAPPEDWINDOW) | WS_POPUP); applyFrame(); setRect(rc); return; } setStyle((getStyle() &~WS_POPUP) | WS_OVERLAPPEDWINDOW); applyFrame(); setRect(maximized ? rc : rc + getRect() - getClientRectInScreen()); } bool Window:: isPopup() const { return (getStyle() & WS_POPUP) != 0; } const basis::Window& Window:: show(int nShow) const { ShowWindow(*this, nShow); return *this; } const basis::Window& Window:: hide() const { ShowWindow(*this, SW_HIDE); return *this; } void Window:: invalidate() const { if (InvalidateRect(*this, nullptr, 0) == FALSE) throw api_runtime_error(); } void Window:: invalidate(const Rect &rc) const { RECT rect = rc; if (InvalidateRect(*this, &rect, 0) == FALSE) throw api_runtime_error(); } void Window:: update() const { UpdateWindow(*this); } bool Window:: activate() const { return setWindowRect({}, SWP_NOSIZE | SWP_NOMOVE); } bool Window:: setForeground() const { return SetForegroundWindow(*this) != FALSE; } bool Window:: setTitle(const TCHAR *p) const { return SetWindowText(*this, p) != FALSE; } StringBuffer Window:: getTitle() const { auto size = static_cast<int>(DefWindowProc(*this, WM_GETTEXTLENGTH, 0, 0)); StringBuffer buf(size + 1); DefWindowProc(*this, WM_GETTEXT, static_cast<WPARAM>(buf.capacity()), reinterpret_cast<LPARAM>(buf.data())); return buf; } bool Window:: setWindowRect(const Rect& rc, UINT flag) const { return SetWindowPos(*this, 0, rc.left, rc.top, rc.width(), rc.height(), flag) != FALSE; } bool Window:: applyFrame() const { return setWindowRect({}, SWP_NOSIZE | SWP_NOCOPYBITS | SWP_NOMOVE | SWP_FRAMECHANGED | SWP_NOACTIVATE | SWP_NOOWNERZORDER | SWP_NOZORDER); } } // namespace <file_sep>【ソフト名】 Stella Vista 【バージョン】 1.0 【著作権者】 ライ 【制作日】 2016/7/31 【種 別】 フリーソフトウェア 【配布元】   github.com/ryechat/ImageViewer 【転載の可否】 可 【登 録 名】  Stella Vista v1.0.exe 【動作環境】  Windows XP, Vista以降 【開発環境】  Visual Studio 2015, Windows10 ――――――――――――――――――――――――――――――――――――― ≪著作権および免責事項≫  本ソフトはフリーソフトです。個人/団体/社内利用を問わず、ご自由にお使い下さい。  ソフトウェアおよびソースコードの著作権は作者が保有しています。  このソフトウェアを使用したことによって生じたすべての障害・損害・不具 合等に関しては、作者は一切の責任を負いません。各自の責任においてご使用ください。 ●はじめに Stella Vistaは、画像ファイルを閲覧できるWindows専用ソフトウェアです。 exeファイル起動後のウィンドウに、画像ファイルをドロップすると表示します。 マウスやキーボードを使って、同一フォルダにあるすべてのファイルを閲覧することができます。 ●対応画像形式 BMP, ICO, GIF, JPEG, Exif, PNG, TIFF, WMF, EMF. ●主な特長 1)シンプルで分かり易いインターフェース 2)マルチスレッドに対応した高速な画像展開 3)マルチモニタ環境に対応 ●インストール  配布元サイトより圧縮ファイルをダウンロード、  パスの通った任意の位置で展開します。  exeファイルをダブルクリックするとアプリケーションが起動します。  この際、アプリケーションのダウンロード数が少ないため、  危険なファイルとしてWindows SmartScreenが実行をブロックする場合があります。  詳細情報をクリックし、表示された実行ボタンをクリックすることで回避できます。 ●アンインストール 展開したファイルをすべて削除します。 レジストリは使用していません。 ●ソースコード  Stella Vistaはオープンソースソフトウェアです。  以下のサイトからソースコードをダウンロードできます。  https://github.com/ryechat/ImageViewer ●レポジトリ構成  setting.ini (設定ファイル)  language.ini (言語ファイル)  readme.txt   (この説明ファイル)  basis (ライブラリ)  app (ソースコード)  stdafx.h/.cc (プリコンパイル済みヘッダ用ソースファイル) ●プロジェクトのビルド ソースコードは、app および basis フォルダに含まれています。 ビルドするには、これらすべての.ccファイルをコンパイル、リンクします。  その際、basisフォルダを追加のインクルードディレクトリに指定する必要があります。 必ずしもプリコンパイル済みヘッダは必要ではありません。 --以上-- <file_sep>#include "exception.h" #include "char_t.h" #include "string_buffer.h" #ifndef STDAFX_H #include <tchar.h> #include <algorithm> #endif namespace basis { const size_t StringBuffer:: CharLimit = (std::numeric_limits<size_t>::max)() / sizeof(TCHAR); StringBuffer:: StringBuffer(size_t max_buf, const TCHAR *src) : m_p(0), m_size(npos) { if (max_buf || !src) { alloc(max_buf ? max_buf : 1); if (!src || copy(m_p, capacity(), src, 0)) { m_size = npos; return; } } else if (src) { m_p = const_cast<TCHAR*>(src); return; } throw std::runtime_error(LOCATION); } StringBuffer& StringBuffer:: operator=(const TCHAR *src) { m_size = length(src); alloc(m_size + 1); if (copy(m_p, capacity(), src, capacity())) return *this; throw std::runtime_error(LOCATION); } bool StringBuffer:: operator==(const TCHAR *rhs) const noexcept { for (size_t i = 0; m_p[i] == rhs[i]; i++) { if (!m_p[i]) return true; } return false; } const TCHAR *StringBuffer:: refer(const TCHAR *s) { m_buf.reset(); m_size = npos; return m_p = const_cast<TCHAR*>(s); } TCHAR *StringBuffer:: data() { m_size = npos; if (capacity()) return m_p; throw std::logic_error(LOCATION); } void StringBuffer:: resize(size_t s) { if (!capacity() && !s) throw std::logic_error(LOCATION); m_size = npos; if (s == 0) // Doubles capacity. s = (capacity() > CharLimit / 2) ? CharLimit : capacity() * 2; if (s != capacity()) alloc(s); } void StringBuffer:: let(size_t s) { if (capacity() < s) realloc(s); } size_t inline StringBuffer:: getSize() const noexcept { if (m_size == npos) m_size = capacity() ? length(m_p, capacity()) : length(m_p); return m_size; } bool StringBuffer:: shrinkToFit() noexcept try { size_t s = getSize() + 1; if (s != capacity()) realloc(s); return true; } catch (...) { return false; } void StringBuffer:: realloc(size_t s) { if (!capacity()) // Contains refference to a static string. throw std::logic_error(LOCATION); if (!m_buf.realloc(s * sizeof(TCHAR))) throw std::bad_alloc(); m_size = npos; m_p = static_cast<PTSTR>(m_buf.address()); } bool StringBuffer:: empty() const noexcept { return (!m_p || *m_p == _T('\0')); } void StringBuffer:: flush() noexcept { assert(m_p); m_size = npos; SecureZeroMemory(m_p, capacity()); } bool StringBuffer:: compare(const TCHAR *p, size_t n, size_t pos) const noexcept { if (n == 0) n = length(p); for (size_t i = 0; i < n; i++) { if (m_p[pos + i] != p[i]) return false; } return true; } size_t StringBuffer:: find(const TCHAR *str, size_t pos, size_t n, bool first) const { size_t buf_length{ getSize() }; if (n == 0) n = length(str); if (n == 0 || buf_length < n || pos >= buf_length) return npos; for (size_t i{ first ? pos : buf_length - n };;) { if (compare(str, n, i)) return i; if (i == (first ? buf_length - n : pos)) return npos; i += first ? 1 : -1; } } size_t StringBuffer:: find(TCHAR c, size_t pos, bool first) const { size_t ret = npos; for (size_t i = pos; m_p[i]; i+= Char_T<TCHAR>(m_p[i]).width()) { if (m_p[i] == c) { if (first) return i; ret = i; } } return ret; } StringBuffer StringBuffer:: substr(size_t pos, size_t n) const { if (pos >= getSize()) return{}; StringBuffer t{ n + 1 }; return t.append(m_p + pos, n); } StringBuffer& StringBuffer:: write(size_t pos, const TCHAR *str, size_t n) { let(pos + n + 1); m_size = npos; for (size_t i = 0; (m_p[pos + i] = str[i]) != _T('\0'); i++) { if (i == n - 1) { m_size = pos + n; m_p[m_size] = _T('\0'); break; } } return *this; } size_t StringBuffer:: count(size_t cb) const { size_t cnt = 0; for (size_t i = 0; i < cb && m_p[i]; ++cnt) { i += Char_T<TCHAR>(m_p[i]).width(); } return cnt; } void StringBuffer:: alloc(size_t size) try { m_buf.alloc(size * sizeof(TCHAR), false); m_p = static_cast<TCHAR*>(m_buf.address()); *m_p = _T('\0'); m_size = 0; } catch (...) { throw std::bad_alloc(); } std::wstring StringBuffer:: toUTF16() const { #ifdef _UNICODE return{ c_str() }; #else // Alloc intermediate buffer. int size = 2 * MultiByteToWideChar(CP_THREAD_ACP, 0, c_str(), -1, 0, 0); wchar_t *wide = static_cast<wchar_t*>(malloc(size)); // Translate to Unicode. if (MultiByteToWideChar(CP_THREAD_ACP, 0, c_str(), -1, wide, size) == 0) return{}; std::wstring str(wide); free(wide); return str; #endif } /* Static Member Functions */ size_t StringBuffer:: length(const TCHAR *str, size_t cap) noexcept(true) { if (cap) return str ? _tcsnlen(str, cap) : 0; else return str ? _tcslen(str) : 0; } bool StringBuffer:: copy(TCHAR *buf, size_t cap_buf, const TCHAR *str, size_t cap_str) noexcept { if (!buf) return false; if (str == nullptr) { *buf = _T('\0'); return true; } if (cap_str == 0) cap_str = length(str) + 1; if (cap_buf < cap_str) return false; return (_tcscpy_s(buf, cap_buf, str) == 0); } } // namespace <file_sep>#include "stdfnc.h" #include "file_path.h" #include "private_profile.h" namespace basis { UNIT_TEST_FUNC(CPrivateProfile) { // 終了時にtempファイルは削除する SafeRelease<CFilePath> file { new CFilePath(CFilePath::GetBootDirectory() + TEXT("test_profile.temp")), [](CFilePath* p) { p->erase(); delete p; } }; CPrivateProfile prof(file->path().c_str()); if (prof.path() != file->path().c_str()) throw 0; // エラー:セクション未指定 if (prof.write(0, TEXT_EMPTY) == true) throw 0; if (prof.eraseKey(0) == true) throw 0; if (prof.getAllKeyNames() == true) throw 0; // バッファ有効性の確認 if (*prof.c_str() != _T('\0')) throw 0; // バッファの再確保できるか確認 if (file->path() != prof.read(0, file->path().c_str())) throw 0; // 終端文字が2つついてるか? if (prof.getAllSectionNames() == false) throw 0; if (*prof.c_str() || *(prof.c_str() + 1)) throw 0; // キー追加 tstr tx{ TEXT("writestring") }; prof.section(TEXT("s")); if (prof.write(TEXT("k"), tx.c_str()) == false) throw 0; if (tx != prof.read(TEXT("k"), nullptr)) throw 0; if (prof.getAllKeyNames() == false) throw 0; if (prof.c_str()[0] != _T('k')) throw 0; if (*(prof.c_str() + 1) || *(prof.c_str() + 2)) throw 0; if (prof.getAllSectionNames() == false) throw 0; if (prof.c_str()[0] != _T('s')) throw 0; if (*(prof.c_str() + 1) || *(prof.c_str() + 2)) throw 0; if (prof.read(TEXT("not_exist"), tx.c_str()) != tx) throw 0; if (prof.eraseKey(TEXT("k")) == false) throw 0; if (prof.clearSection() == false) throw 0; return true; } } // namespace<file_sep>#pragma once #ifndef GUID_9E8D5A55ABC144D1920C2E03C8E3A187 #define GUID_9E8D5A55ABC144D1920C2E03C8E3A187 #ifndef STDAFX_H #include <wtypes.h> #include <string> #include <map> #endif #include "iunit_test.h" namespace basis { UNIT_TEST(CKey) class CKey { public: static constexpr WORD const SHIFT = 0x30'00; static constexpr WORD const L_SHIFT = 0x20'00; static constexpr WORD const R_SHIFT = 0x10'00; static constexpr WORD const CTRL = 0x0c'00; static constexpr WORD const L_CTRL = 0x08'00; static constexpr WORD const R_CTRL = 0x04'00; static constexpr WORD const ALT = 0x03'00; static constexpr WORD const L_ALT = 0x02'00; static constexpr WORD const R_ALT = 0x01'00; //! コンストラクタ CKey(); /*! 変換コンストラクタ @param key 仮想キーコード。 クラス定数の修飾キーを組み合わせても良い。 下位WORDのみが使われ、上位WORDは無視される。 @par 以下の値はすべて同一のキーに変換される -VK_SHIFT -CKey::Shift -VK_SHIFT | CKey::Shift */ CKey(DWORD key); explicit operator bool() const { return vkey() != 0; } CKey operator&(const CKey& rhs) const; CKey& operator&=(const CKey& rhs) { return *this = *this & rhs; } /*! Combine two keys. As parameters, two different cases are permitted. One is Modifier key and non-Modifier key. Other is Modifier key and Modifier key. In latter case, the smallest modifier key (ALT<CTRL<SHIFT) would be treated as vkey. As so, SHIFT | ALT will equal to ALT | SHIFT. @par In the unpermitted case, both of keys were non-modifier key, then rise invalid_argument. */ CKey operator|(const CKey& rhs) const; CKey& operator|= (const CKey& rhs) { return *this = *this | rhs; } bool operator==(const CKey& rhs) const { return m_keys == rhs.m_keys; } bool operator!=(const CKey& rhs) const { return !operator==(rhs); } unsigned char vkey() const { return m_keys & 0xFF; } WORD flags() const { return m_keys & 0xFF00; } bool isModifier() const { return (vkey() == VK_SHIFT || vkey() == VK_CONTROL || vkey() == VK_MENU); } using tstr = std::basic_string<TCHAR>; CKey& read(tstr t) { return read(t.c_str()); } CKey& read(PCTSTR p); tstr toStr(); static std::map<unsigned char, tstr> Text; private: // 修飾キーを、VKEYとしても使う void descendToVKey() { m_keys = flags(); if (m_keys & ALT) m_keys |= VK_MENU; else if (m_keys & CTRL) m_keys |= VK_CONTROL; else if (m_keys & SHIFT) m_keys |= VK_SHIFT; } void Plus(tstr& t, const tstr& keyname); size_t termLength(PCTSTR p, size_t nStart = 0); WORD m_keys; }; } // namespace #endif<file_sep>#include "file_path.h" #include "file_item.h" #include "stdfnc.h" namespace basis { UNIT_TEST_FUNC(CFileItem) { auto path = CFilePath::GetCurrentDirectory(); path += TEXT("TEST_CFileItem.temp"); auto content = "This is test file."; // ---------- write file --------- // { CFileItem file; if (path.exist()) file = path.open(GENERIC_WRITE, FILE_SHARE_READ); else file = path.create(); if (!file) throw 0; if (!file.write(reinterpret_cast<const void*>(content), static_cast<DWORD>(strlen(content)), INFINITE)) throw 0; } // --------- read file --------- // { auto file = path.open(); if (!file) throw 0; auto size = file.getSize(); SafeRelease<char> sz(static_cast<char*> (malloc(size.LowPart + 1)), [](void *p) { free(p); }); if (!file.read(reinterpret_cast<void*>(sz.get()), size.LowPart, INFINITE)) throw 0; sz[size.LowPart] = '\0'; if (strcmp(content, sz)) throw 0; } path.erase(); return true; } } // namespace
d12e1bef7b65c8ed49a43e090ff8b67adb156cab
[ "Text", "C++", "INI" ]
97
C++
ryechat/ImageViewer
1e7503dad46b9c5357da00d1b433b6445147cd90
e581e157f76cb499f2463e27fb0ee9ea89bd3fd1
refs/heads/master
<repo_name>Vinayaka-cronj/Projects<file_sep>/Githubfinder/app.js // init github const github=new Github; // init UI const ui=new UI; // debounce function const debounce = (fn, time) => { let timeout; return function() { const functionCall = () => fn.apply(this, arguments); clearTimeout(timeout); timeout = setTimeout(functionCall, time); } } // search input const searchUser=document.getElementById('searchuser'); // search input event listner searchUser.addEventListener('keyup',debounce( e =>{ // get user text const userText=e.target.value; if(userText!== ''){ // make http call github.getUser(userText) .then(data =>{ if(data.profile.message === 'Not Found'){ // show alert ui.showalert('User not found','alert alert-danger'); } else{ // show profile ui.showprofile(data.profile); ui.showrepos(data.repos); console.log(data.profile); } }) } else{ // clear profile ui.clearprofile(); } },1000)); <file_sep>/coronatracker/app.js const PageState = function () { let currentState = new homeState(this); this.init = function () { this.change(new homeState); } this.change = function (state) { currentState = state; } }; // home state const homeState = function (page) { // Get total results fetch('https://corona.lmao.ninja/all') .then(function (res) { return res.json(); }) .then(function (data) { let totalCase = data.cases; let totaldeath = data.deaths; let totalrecover = data.recovered; let dpercent = ((totaldeath * 100) / totalCase).toFixed(2); let rpercent = ((totalrecover * 100) / totalCase).toFixed(2); // append data to cards document.getElementById('tConfirmed').innerHTML = `<h5>${totalCase}</h5>`; document.getElementById('tDeath').innerHTML = `<h5>${totaldeath}</h5>`; document.getElementById('tRecovered').innerHTML = `<h5>${totalrecover}</h5>`; document.getElementById('tDeathPercentage').innerHTML = `<h6>Death Percent: ${dpercent}%</h6>`; document.getElementById('tRecoverPercentage').innerHTML = `<h6>Recover Percent: ${rpercent}%</h6>`; }) .catch(function (err) { console.log(err); }); // get country list fetch('https://corona.lmao.ninja/countries') .then(function (res) { return res.json(); }) .then(function (data) { // console.log(data); let output = ''; let countryname = ''; let confirmed = ''; let death = ''; let recovered = ''; data.forEach(function (country) { countryname = country.country; confirmed = country.cases; death = country.deaths; recovered = country.recovered; console.log(countryname, confirmed, death, recovered); output += ` <tr> <td>${countryname}</td> <td>${confirmed}</td> <td>${recovered}</td> <td>${death}</td> </tr> `; }); document.getElementById('listCountry').innerHTML = output; }) .catch(function (err) { console.log(err); }); }; // India Display const indiaDisplay = function (page) { // Get total details fetch('https://corona.lmao.ninja/countries/india') .then(function (res) { return res.json(); }) .then(function (data) { let totalCase = data.cases; let totaldeath = data.deaths; let totalrecover = data.recovered; let dpercent = ((totaldeath * 100) / totalCase).toFixed(2); let rpercent = ((totalrecover * 100) / totalCase).toFixed(2); // append data to cards document.getElementById('tConfirmed').innerHTML = `<h5>${totalCase}</h5>`; document.getElementById('tDeath').innerHTML = `<h5>${totaldeath}</h5>`; document.getElementById('tRecovered').innerHTML = `<h5>${totalrecover}</h5>`; document.getElementById('tDeathPercentage').innerHTML = `<h6>Death Percent: ${dpercent}%</h6>`; document.getElementById('tRecoverPercentage').innerHTML = `<h6>Recover Percent: ${rpercent}%</h6>`; }) .catch(function (err) { console.log(err); }); // get states info fetch('https://api.covid19india.org/data.json') .then(function (res) { return res.json(); }) .then(function (data) { console.log(data.statewise); let loop = data.statewise; let output = ''; let statename = ''; let confirmed = ''; let death = ''; let recovered = ''; loop.forEach(function (state) { statename = state.state; confirmed = state.confirmed; death = state.deaths; recovered = state.recovered; console.log(statename, confirmed, death, recovered); output += ` <tr> <td>${statename}</td> <td>${confirmed}</td> <td>${recovered}</td> <td>${death}</td> </tr> `; }); document.getElementById('listCountry').innerHTML = output; }) .catch(function (err) { console.log(err); }); } // display usa const usaDisplay = function (page) { // Get total results fetch('https://corona.lmao.ninja/countries/usa') .then(function (res) { return res.json(); }) .then(function (data) { let totalCase = data.cases; let totaldeath = data.deaths; let totalrecover = data.recovered; let dpercent = ((totaldeath * 100) / totalCase).toFixed(2); let rpercent = ((totalrecover * 100) / totalCase).toFixed(2); // append data to cards document.getElementById('tConfirmed').innerHTML = `<h5>${totalCase}</h5>`; document.getElementById('tDeath').innerHTML = `<h5>${totaldeath}</h5>`; document.getElementById('tRecovered').innerHTML = `<h5>${totalrecover}</h5>`; document.getElementById('tDeathPercentage').innerHTML = `<h6>Death Percent: ${dpercent}%</h6>`; document.getElementById('tRecoverPercentage').innerHTML = `<h6>Recover Percent: ${rpercent}%</h6>`; }) .catch(function (err) { console.log(err); }); // clear table document.getElementById('listCountry').innerHTML = ''; // get states list fetch('https://corona.lmao.ninja/states') .then(function (res) { return res.json(); }) .then(function (data) { // console.log(data); let output = ''; let statename = ''; let confirmed = ''; let death = ''; let recovered = ''; data.forEach(function (state) { statename = state.state; confirmed = state.cases; death = state.deaths; recovered = state.recovered; console.log(statename, confirmed, death, recovered); output += ` <tr> <td>${statename}</td> <td>${confirmed}</td> <td>N/A</td> <td>${death}</td> </tr> `; }); document.getElementById('listCountry').innerHTML = output; }) .catch(function (err) { console.log(err); }); } // Instantiate pageState const page = new PageState(); // Init the first state page.init(); // UI Vars const select = document.getElementById('states'); // loading page based on selected values select.addEventListener('change', (e) => { if (select.value === 'all') { page.change(new homeState); } else if (select.value === 'india') { page.change(new indiaDisplay); } else if (select.value === 'usa') { page.change(new usaDisplay); } e.preventDefault(); }); // debounce const debounce = (fn, time) => { let timeout; return function () { const functionCall = () => fn.apply(this, arguments); clearTimeout(timeout); timeout = setTimeout(functionCall, time); } } // event listners document.getElementById('searchcountry').addEventListener('keyup', debounce(filterCountry), 1000); // filter function filterCountry(e) { const input = e.target.value.toLowerCase(); table = document.getElementById("listCountry"); tr = table.getElementsByTagName("tr"); var filter, table, tr, td, i, txtValue; filter = input; for (i = 0; i < tr.length; i++) { td = tr[i].getElementsByTagName("td")[0]; if (td) { txtValue = td.textContent || td.innerText; if (txtValue.toLocaleLowerCase().indexOf(filter) > -1) { tr[i].style.display = ""; } else { tr[i].style.display = "none"; } } } }<file_sep>/Githubfinder/github.js class Github{ constructor(){ this.client_id='8fe88f41812d7712cc50'; this.client_secret='<KEY>'; this.repo_count=5; this.repo_sort='created: asc'; } async getUser(user){ const profileResponse = fetch(`https://api.github.com/users/${user}?client_id=${this.client_id}&client_secret=${this.client_secret}`); const repoResponse = fetch(`https://api.github.com/users/${user}/repos?per_page=${this.repo_count}&sort=${this.repo_sort}&client_id=${this.client_id}&client_secret=${this.client_secret}`); const profile=await (await profileResponse).json(); const repos=await (await repoResponse).json(); return{ profile, repos } } }
b5110fa074edb9e70310fcc3ac75d7b95b5ae223
[ "JavaScript" ]
3
JavaScript
Vinayaka-cronj/Projects
14b5d6173fe19f5c3ac1976bbfb9cdc643debbaf
6b07e8a615416adb8fbb02d3c354214dd0effe42
refs/heads/master
<file_sep>"use strict"; const app = angular.module("UberPro", []); <file_sep>app.controller("TodoCtrl", function($scope, $http) { $scope.title = "Uberproductify!!"; $scope.textIN = ""; $scope.anotherTextIN = ""; $scope.testCheck = false; $scope.newTodo = ""; $scope.taskType = "home"; $http.get("/data/tasks.json") .then((response) => { $scope.tasks = response.data.tasks; }); $scope.addTodo = () => { $scope.tasks.push({name: $scope.newTodo, type: $scope.taskType}); $scope.newTodo = ""; } $scope.removeTodo = (task) => { const taskIndex = $scope.tasks.indexOf(task); if (taskIndex >= 0) { $scope.tasks.splice(taskIndex, 1); } } });
8407a1a0c3144af0a93f01cc3b077d55b8d552b0
[ "JavaScript" ]
2
JavaScript
chase-ramsey/angular-to-do
ea0ceef3d9efd95f530b6f1b9a3e8601e1f5563c
03f803970f0e1a12146905c6e0143dfaff9de50f
refs/heads/master
<repo_name>yanle417467928/spring-boot-dubbo-demo<file_sep>/spring-boot-dubbo-demo-service/src/main/java/com/yanle/springboot/dubbo/demo/service/impl/OrderServiceImpl.java package com.yanle.springboot.dubbo.demo.service.impl; import com.yanle.springboot.dubbo.demo.service.OrderService; import org.apache.dubbo.config.annotation.Service; import org.springframework.stereotype.Component; @Component @Service(version = "${demo.service.version}") public class OrderServiceImpl implements OrderService { public String sayHello(String name) { return null; } } <file_sep>/spring-boot-dubbo-demo-service/src/main/java/com/yanle/springboot/dubbo/demo/service/OrderService.java package com.yanle.springboot.dubbo.demo.service; public interface OrderService { String sayHello(String name); } <file_sep>/spring-boot-dubbo-demo-consumer/src/main/java/com/yanle/springboot/dubbo/demo/consumer/SpringBootDubboDemoConsumerApplication.java package com.yanle.springboot.dubbo.demo.consumer; import org.apache.dubbo.config.spring.context.annotation.EnableDubbo; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication @EnableDubbo public class SpringBootDubboDemoConsumerApplication { public static void main(String[] args) { SpringApplication.run(SpringBootDubboDemoConsumerApplication.class, args); } } <file_sep>/spring-boot-dubbo-demo-consumer/src/main/java/com/yanle/springboot/dubbo/demo/consumer/TestController.java package com.yanle.springboot.dubbo.demo.consumer; import com.yanle.springboot.dubbo.demo.service.OrderService; import com.yanle.springboot.dubbo.demo.service.TestService; import org.apache.dubbo.config.annotation.Reference; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RestController; @RestController public class TestController { @Reference(version = "${demo.service.version}") private TestService testService; @Reference(version = "${demo.service.version}") private OrderService orderService; @GetMapping(value = "/hello") public String hello() { return testService.test(); } } <file_sep>/spring-boot-dubbo-demo-service/src/main/java/com/yanle/springboot/dubbo/demo/service/TestService.java package com.yanle.springboot.dubbo.demo.service; public interface TestService { String test(); }
fc9be90100fcdf56d329a1bb3f575424a295789e
[ "Java" ]
5
Java
yanle417467928/spring-boot-dubbo-demo
d5690c9f4766ed2e4743ce79d657862af9fbd11c
f57ee4e185a9122003353898333be4c891dbbbfa
refs/heads/master
<file_sep>#!/bin/sh export NAME=ulikoehler/ubuntu-gcc-make export VERSION=latest docker build -t ${NAME}:${VERSION} . docker build -t ${NAME}:22.04 . docker push ${NAME}:${VERSION} docker push ${NAME}:22.04 <file_sep>FROM ubuntu:22.04 # Install NodeJS & Dummy xorg ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update && apt-get -y install curl && curl -sL https://deb.nodesource.com/setup_18.x | bash - && apt-get install -y nodejs xserver-xorg-video-dummy && rm -rf /var/lib/apt/lists/* <file_sep>#!/bin/sh export NAME=ulikoehler/redmine-with-compiler export VERSION=alpine docker build -t ${NAME}:${VERSION} . docker push ${NAME}:${VERSION} <file_sep>FROM docker:stable RUN apk add curl bash RUN curl -fsSL https://techoverflow.net/install-buildock.sh > /etc/profile.d/buildock.sh && chmod a+x /etc/profile.d/buildock.sh # Login shell to SOURCE /etc/profile.d/buildock.sh instead of running it CMD [ "/bin/bash", "-l" ] <file_sep>#!/bin/sh export NAME=ulikoehler/streamripper export VERSION=latest docker build -t ${NAME}:${VERSION} . docker build -t ${NAME}:20.04 . docker push ${NAME}:${VERSION} docker push ${NAME}:20.04 <file_sep>FROM ubuntu:22.04 RUN apt update && apt -y install build-essential make && rm -rf /var/lib/apt/lists/* WORKDIR /app CMD [ "/usr/bin/make" ] <file_sep>#!/bin/sh export NAME=ulikoehler/node-inkscape-gm export VERSION=14 docker build -t ${NAME}:${VERSION} . docker build -t ${NAME}:lts . docker push ${NAME}:${VERSION} docker push ${NAME}:lts <file_sep>FROM ubuntu:20.04 RUN apt update && apt -y install streamripper && rm -rf /var/lib/apt/lists/* WORKDIR /app CMD [ "/usr/bin/streamripper" ] <file_sep>FROM node:16 ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update && apt-get -y install xserver-xorg-video-dummy && rm -rf /var/lib/apt/lists/* <file_sep>FROM ulikoehler/ubuntu-opencascade ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update && apt-get install -y rapidjson-dev && rm -rf /var/lib/apt/lists/* WORKDIR /app CMD [ "/usr/bin/make" ] <file_sep># buildock *Reproducible build environments for local builds using Docker* Do you need to make your builds (e.g. using *make* and *cmake*) or software (e.g. using *node* or *python3*) to reliably work on a wide range of different host systems, but you still want to keep your actual application's files locally on the host system? `buildock` creates a new docker container from an image and mounts the current working directory ($(pwd)) to `/app` in the container. This enables reliable builds (e.g. using `make` and `cmake`) or the use of specific versions of software (e.g. using `node` or `python3`) working on a wide range of host systems, but **keeps your actual files locally on the host system**. ## What does it do? *buildock* provides two sets of tools: - A shell function `buildock` that wraps `docker` to facilitate easy local builds - Several pre-built docker images that have software like `make` and `gcc`. These images are just for convenience! ## How to install **If you just want to try it out:** ```sh git clone https://github.com/ulikoehler/buildock source buildock/buildock.sh ``` **Permanent installation:** For `bash`: ```sh curl -fsSLhttps://raw.githubusercontent.com/ulikoehler/buildock/master/buildock.sh >> ~/.bashrc ``` For `zsh`: ```sh curl -fsSL https://raw.githubusercontent.com/ulikoehler/buildock/master/buildock.sh >> ~/.zshrc sed -i -e 's/export -f buildock/#export -f buildock/g' ~/.zshrc ``` This will add a [`buildock` function](https://github.com/ulikoehler/buildock/blob/master/buildock.sh) to your `~/.bashrc` or `~/.zshrc`. This function will be automatically loaded once you restart your shell. To load `buildock` in already active shells, run `source ~/.bashrc` or `source ~/.zshrc`, else you'll see `command not found: buildock` One-liner to activate in the current shell (not persistent!) ```sh source /dev/stdin < <(curl -fsSL https://techoverflow.net/install-buildock.sh) ``` ## How to use Usage: ``` buildock [docker run argument(s)] <image name> <command(s)> ``` Example: To compile a C++ application using `make`: ```sh buildock ulikoehler/ubuntu-gcc-make make ``` cmake This command is mostly equivalent to just running `make` locally, however using the *docker*-based approach you don't have to deal with different compiler/make versions on different host systems producing. By default, *buildock* does not enable interactive mode (`docker run --interactive/-i`) or allocate a pseudo-TTY (`docker run --tty/-t`) to facilitate easy automated builds in non-TTY environments like Gitlab runners. In case you need to run in **interactive mode** (e.g. if you need to interact with the program being run), use this syntax: ```sh buildock -it ulikoehler/ubuntu-gcc-make make ``` ## Tips, tricks & limitation: #### npm install fails ``` buildock ulikoehler/ubuntu-opencascade-node:12 npm install ``` This fails with ``` npm ERR! path /.npm npm ERR! code EACCES npm ERR! errno -13 npm ERR! syscall mkdir npm ERR! Error: EACCES: permission denied, mkdir '/.npm' npm ERR! [Error: EACCES: permission denied, mkdir '/.npm'] { npm ERR! stack: "Error: EACCES: permission denied, mkdir '/.npm'", npm ERR! errno: -13, npm ERR! code: 'EACCES', npm ERR! syscall: 'mkdir', npm ERR! path: '/.npm' npm ERR! } npm ERR! npm ERR! The operation was rejected by your operating system. npm ERR! It is likely you do not have the permissions to access this file as the current user npm ERR! npm ERR! If you believe this might be a permissions issue, please double-check the npm ERR! permissions of the file and its containing directories, or try running npm ERR! the command again as root/Administrator (though this is not recommended). ``` The reason for this is that the current user's ID does not have a home directory on the container and therefore npm tries to access `/.npm` for its cache, which it can't create. **Workaround:** Use `-e HOME=/tmp` to define a home dir or update buildock since this is the default in more recent versions. ## How does it work` `buildock` creates a new container using the given imamage an mounts the current working directory (`$(pwd)`) to `/app` on said container. It then runs the user-defined command on the container (e.g. `make`). Additionally it ensures that the `docker` container runs under the current user using `--user $(id -u):$(id -g)`. This prevents the output files (if any) to be created as `root` user, instead they will be created with the user and group running `buildock`. ## How to make custom *buildock* images Easy: Just use any docker container with the software you need installed. The only requirement is that `/app` is not used for anything relevant in the image, since `/app` is where `buildock` will mount the current directory to. ## More reading * The post that started it all: [Towards a docker-based build of C/C++ applications](https://techoverflow.net/2019/06/27/towards-a-docker-based-build-of-c-c-applications/) <file_sep>#!/bin/sh export NAME=ulikoehler/mkpasswd export VERSION=latest docker build -t ${NAME}:${VERSION} . docker build -t ${NAME}:34 . docker push ${NAME}:${VERSION} docker push ${NAME}:34 <file_sep>FROM fedora:34 RUN yum -y install mkpasswd && yum -y clean all && rm -rf /var/cache WORKDIR /app CMD ["/usr/bin/mkpasswd"] <file_sep>#!/bin/sh export NAME=ulikoehler/ubuntu-node-xorg-dummy export VERSION=18 docker build -t ${NAME}:${VERSION} . docker push ${NAME}:${VERSION} <file_sep>FROM redmine:alpine RUN apk add make musl-dev ruby-dev g++ <file_sep>FROM ubuntu:20.04 ENV DEBIAN_FRONTEND=noninteractive RUN apt update && apt -y install texlive-latex-recommended texlive-lang-all texlive-latex-extra && apt -y purge texlive-\*-doc && rm -rf /var/lib/apt/lists/* WORKDIR /app CMD [ "/usr/bin/pdflatex" ] <file_sep>#!/bin/sh export NAME=ulikoehler/node-xorg-dummy export VERSION=12 docker build -t ${NAME}:${VERSION} . docker push ${NAME}:${VERSION} <file_sep>FROM ulikoehler/ubuntu-gcc-make ENV DEBIAN_FRONTEND=noninteractive RUN apt update && apt -y install cmake cmake-extras && rm -rf /var/lib/apt/lists/* WORKDIR /app CMD [ "/usr/bin/make" ] <file_sep>#!/bin/sh export NAME=ulikoehler/docker-buildock export VERSION=stable docker build -t ${NAME}:${VERSION} . docker push ${NAME}:${VERSION} <file_sep>#!/bin/bash rm -rf bup # Remove any old versions that might be present git clone -b 0.33 --depth 1 https://github.com/bup/bup export NAME=ulikoehler/bup export VERSION=0.33-alpine3.17 export SHORTVERSION=0.33 docker build -t ${NAME}:${VERSION} . # Create additional tags docker image tag ${NAME}:${VERSION} ${NAME}:${SHORTVERSION} docker image tag ${NAME}:${VERSION} ${NAME}:latest # Push to repo docker push ${NAME}:${VERSION} docker push ${NAME}:${SHORTVERSION} docker push ${NAME}:latest <file_sep> # buildock: https://github.com/ulikoehler/buildock # usage: buildock [docker run argument(s)] <image name> <command(s)> function buildock { # No arguments warning if [[ $# -eq 0 ]] ; then echo "Usage: buildock [docker run argument(s)] <image name> <command(s)>" else # Actual buildock code docker run -e HOME=/tmp --user $(id -u):$(id -g) -v "$(pwd):/app" -w "/app" --rm $@ fi } # Export function to sub-shells export -f buildock <file_sep>FROM ulikoehler/bup:latest # Install dropbear # https://techoverflow.net/2022/12/25/minimal-ssh-server-on-docker-container-using-dropbear/ ENV SSH_PORT=2022 RUN apk --no-cache add dropbear &&\ mkdir -p /home/bup/.ssh &&\ adduser -s /bin/sh -D bup --uid 1111 --home /home/bup &&\ chown -R bup:bup /home/bup CMD ["/bin/sh", "-c", "chown -R bup:bup /bup ; chmod -R u+rw,g+rw,o+r /bup ; chown -R bup:bup /home/bup/.ssh ; chmod u+rwx,g-rwx,o-rwx /home/bup/.ssh ; chmod u+rwx,g-rwx,o-rwx /home/bup/.ssh/authorized_keys ; /usr/sbin/dropbear -RFEwgsjk -G bup -p ${SSH_PORT}"] <file_sep>FROM alpine:3.17 # Add dropbear (includes dropbearkey) & remove irrelevant files RUN apk --no-cache add dropbear dropbear-convert && rm /usr/sbin/dropbear CMD [ "/usr/bin/dropbearkey" ]<file_sep>FROM ulikoehler/ubuntu-opencascade-node:18 ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update && apt-get install -y xserver-xorg-video-dummy && rm -rf /var/lib/apt/lists/* <file_sep>#!/bin/sh export NAME=ulikoehler/node-mongo-tools export VERSION=lts docker build -t ${NAME}:${VERSION} . docker build -t ${NAME}:14 . docker push ${NAME}:${VERSION} docker push ${NAME}:14 <file_sep>FROM fedora:32 RUN yum -y install zchunk && yum clean all WORKDIR /app CMD [ "/usr/bin/zchunk" ] <file_sep>#!/bin/sh export NAME=ulikoehler/ubuntu-gcc-cmake-boost export VERSION=latest docker build -t ${NAME}:${VERSION} . docker push ${NAME}:${VERSION} <file_sep>FROM ulikoehler/ubuntu-opencascade-rapidjson ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update && apt-get -y install curl && curl -sL https://deb.nodesource.com/setup_18.x | bash - && apt-get install -y nodejs && rm -rf /var/lib/apt/lists/* <file_sep>FROM node:lts RUN apt-get update && apt-get -y install mongo-tools && rm -rf /var/lib/apt/lists/* WORKDIR /app CMD [ "/usr/bin/node" ] <file_sep>FROM ulikoehler/ubuntu-gcc-cmake-boost ENV DEBIAN_FRONTEND=noninteractive RUN apt update -y && apt install -y xfonts-scalable libocct-data-exchange-dev libocct-draw-dev libocct-foundation-dev libocct-modeling-algorithms-dev libocct-modeling-data-dev libocct-ocaf-dev libocct-visualization-dev && rm -rf /var/lib/apt/lists/* WORKDIR /app CMD [ "/usr/bin/make" ] <file_sep>#!/bin/sh export NAME=ulikoehler/ubuntu-opencascade-node export VERSION=18 docker build -t ${NAME}:${VERSION} -t ${NAME}:latest . docker push ${NAME}:${VERSION} docker push ${NAME}:latest <file_sep>FROM ulikoehler/ubuntu-gcc-cmake RUN apt update && apt -y install libboost-all-dev && rm -rf /var/lib/apt/lists/* WORKDIR /app CMD [ "/usr/bin/make" ] <file_sep>#!/bin/sh export NAME=ulikoehler/zchunk export VERSION=1.1.9-fedora32 docker build -t ${NAME}:${VERSION} -t ${NAME}:latest . docker push ${NAME}:${VERSION} docker push ${NAME}:latest <file_sep>FROM node:lts RUN apt-get update && apt-get -y install inkscape graphicsmagick imagemagick && rm -rf /var/lib/apt/lists/* WORKDIR /app CMD [ "/usr/bin/inkscape" ] <file_sep>FROM alpine:3.17 # Install requirements RUN apk add --no-cache bash make g++ python3-dev git automake autoconf par2cmdline py3-pip && pip3 install wheel && pip3 install pyxattr # Copy bup clone (done ahead of time in build.sh) COPY ./bup /app/bup # Install bup RUN cd /app/bup && ./configure && make -j4 install PREFIX=/usr # Cleanup: remove bup clone RUN rm -rf /app/bup WORKDIR /app CMD [ "/usr/bin/bup" ] <file_sep>#!/bin/bash export NAME=ulikoehler/bup-server export SHORTVERSION=0.33 export VERSION=${SHORTVERSION}-alpine3.17 docker build -t ${NAME}:${VERSION} -t ${NAME}:${SHORTVERSION} -t ${NAME}:latest . # Push to repo docker push ${NAME}:${VERSION} docker push ${NAME}:${SHORTVERSION} docker push ${NAME}:latest
85c4c4f70a50b7c74316f1bec02a174011fb0807
[ "Markdown", "Dockerfile", "Shell" ]
36
Shell
ulikoehler/buildock
0af56af85b6e1ba22b7dc5ff4ec494cf7dad6ef0
6a894d7823086d50acc7bc08bdeefe965ab1a164
refs/heads/master
<file_sep>const { Router } = require("express"); const routes = Router(); const migrationsController = require('./app/controllers/vendedores'); routes.post('/migrations', migrationsController.store); routes.get('/migrations', migrationsController.index); routes.get('/migrations/:id', migrationsController.show); routes.put('/migrations/:id', migrationsController.update); routes.delete('/migrations/:id', migrationsController.destroy); module.exports = routes;<file_sep>const index = require('./index'); const port = 8080; index.listen(port, _ => { console.log(`Application started in ${ port }`) });<file_sep>module.exports = (sequelize, DataTypes) => { const Vendedor = sequelize.define("Vendedor", { Nome: DataTypes.STRING, Senha: DataTypes.STRING, }, { timestamps: false, freezeTableName: true, }); return Vendedor; };
655a20776e8123e05ddb82e89919fea3f4770183
[ "JavaScript" ]
3
JavaScript
VictorManduca/Sequelize_Migrations
69a11d7aa9f60dfb73b3d1b06c72acfca706be0c
fabee6f80b3f7fd2464bb8049c2dce00ad9954ab
refs/heads/master
<file_sep>var MapWrapper = function(container, coords, zoom, styles){ this.googleMap = new google.maps.Map(container, {center: coords, zoom: zoom, styles: styles, mapTypeId: 'terrain'}); this.directionsService = new google.maps.DirectionsService; this.directionsDisplay = new google.maps.DirectionsRenderer({ draggable: true, map: this.googleMap}); this.markersArray = []; } MapWrapper.prototype = { clearMarkers: function(){ for (var i = 0; i < this.markersArray.length; i++){ this.markersArray[i].setMap(null) } }, addMarker: function(adventure){ var marker = new google.maps.Marker({ position: adventure.startpoint, map: this.googleMap }) this.markersArray.push(marker); var infoWindow = new google.maps.InfoWindow({ content: adventure.name + "/n" + adventure.mode }); marker.addListener('click', function() { infoWindow.open(this.googleMap, marker); }); }, resetMap: function () { this.directionsDisplay.setMap(null); //this.directionsDisplay = null; }, showRoute1: function (origin, destination,waypoints, service, display, id, url) { this.id = id service.route({ origin: origin, destination: destination, waypoints: waypoints, travelMode: 'WALKING', avoidTolls: true }, function(response, status) { if (status === 'OK') { // this.changeView(1,origin) this.clearMarkers(); //// the route is displayed here this.directionsDisplay.setMap(this.googleMap); this.directionsDisplay.setDirections(response); ///////////response is the same as the line under //console.log(response.routes[0].legs[0]); //console.log(this.directionsDisplay.directions.routes[0].legs[0]); /////////////////////////////////////////////////////// // console.log(routeData1.via_waypoints); this.directionsDisplay.addListener("directions_changed", function(){ console.log("directions changed - xmlhttprequest"); console.log(this.directionsDisplay.directions.routes[0].legs[0]); //// create object from directions var routeData = {}; var routeDirections = this.directionsDisplay.directions.routes[0].legs[0] var routeWaypoints = []; routeData.startpoint = {'lat': routeDirections.start_location.lat(), 'lng':routeDirections.start_location.lng()} routeData.endpoint = {'lat': routeDirections.end_location.lat(), 'lng':routeDirections.end_location.lng()} var viawp = routeDirections.via_waypoints for(var i=0;i<viawp.length;i++){ routeWaypoints[i] = [viawp[i].lat(),viawp[i].lng()] } routeData.waypoints = routeWaypoints; var jsonString = JSON.stringify(routeData) console.log(jsonString); /////////////////////////// Update route var request = new XMLHttpRequest(); request.open("PUT", url + this.id); request.setRequestHeader("Content-Type", "application/json"); request.onload = function(){console.log("sending");}; request.send(jsonString); //////update end }.bind(this)) } else { alert('Could not display directions due to: ' + status); } }.bind(this)); } }; module.exports = MapWrapper; <file_sep>var Route = function(url){ this.url = url; } Route.prototype = { makePost: function(callback){ var request = new XMLHttpRequest(); request.open("POST", this.url); request.setRequestHeader("Content-Type", "application/json"); request.onload = callback request.send(payload); } } module.exports = Route; <file_sep>var List = function(url){ this.url = url; this.itemList = []; this.item = ""; } List.prototype = { getData: function(callback){ var request = new XMLHttpRequest(); request.open("GET", this.url); request.onload = function(){ if (request.status === 200){ var jsonString = request.responseText; this.itemList = JSON.parse(jsonString); callback(this.itemList); } }.bind(this); request.send(); }, getDataById: function(id,callback){ var request = new XMLHttpRequest(); request.open("GET", this.url + "/"+id); console.log(this.url) console.log(this.url +"/"+id) request.onload = function(){ if (request.status === 200){ console.log("Here?",request.responseText) var jsonString = request.responseText; console.log(jsonString) this.item = JSON.parse(jsonString); console.log(this.item) callback(this.item); } }.bind(this); request.send(); } } module.exports = List;<file_sep>var HeaderView = function(headerElement){ this.headerElement = headerElement this.adventureItem = null this.wishlistItem = null this.render() } HeaderView.prototype = { render: function(){ var headerWrapper = document.createElement('div') headerWrapper.className = "header-wrapper" var headerViewSeparator1 = document.createElement('div') headerViewSeparator1.className = "header-view-item" headerViewSeparator1.innerHTML = " | " this.adventureItem = document.createElement('div') this.adventureItem.className = "header-view-item" this.adventureItem.innerHTML = "all Adventures" this.adventureItem.style = "cursor: pointer" var headerViewSeparator2 = document.createElement('div') headerViewSeparator2.className = "header-view-item" headerViewSeparator2.innerHTML = " | " this.wishlistItem = document.createElement('div') this.wishlistItem.className = "header-view-item" this.wishlistItem.innerHTML = "my Adventures" this.wishlistItem.style = "cursor: pointer" var headerViewSeparator3 = document.createElement('div') headerViewSeparator3.className = "header-view-item" headerViewSeparator3.innerHTML = " | " headerWrapper.appendChild(headerViewSeparator1); headerWrapper.appendChild(this.adventureItem); headerWrapper.appendChild(headerViewSeparator2); headerWrapper.appendChild(this.wishlistItem); headerWrapper.appendChild(headerViewSeparator3); this.headerElement.appendChild(headerWrapper); } } module.exports = HeaderView<file_sep>use adventures db.route_data.drop() db.route_data.insert([ {name: "<NAME>", description: "This is an easy walk", startpoint:{"lat":55.9423957,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "cycling", rating: 3, review: "This is great" }, {name: "<NAME>", description: "This is a tricky walk", startpoint:{"lat":55.942359,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "cycling", rating: 3, review: "This is great" }, {name: "West Highland Way", description: "This is an easy walk", startpoint:{"lat":55.9423952,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" }, {name: "Glentress", description: "This is an easy walk", startpoint:{"lat":55.9423951,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" }, {name: "<NAME>", description: "This is an easy walk", startpoint:{"lat":55.9423978,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" }, {name: "Wolftracks", description: "This is an easy walk", startpoint:{"lat":55.9423957,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" }, {name: "Aberdeen", description: "This is an easy walk", startpoint:{"lat":55.9423957,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" }, {name: "Lochaber", description: "This is an easy walk", startpoint:{"lat":55.9423957,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" }, {name: "Rock<NAME>", description: "This is an easy walk", startpoint:{"lat":55.9423957,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" }, {name: "<NAME>", description: "This is an easy walk", startpoint:{"lat":55.9423957,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" }, {name: "<NAME>", description: "This is an easy walk", startpoint:{"lat":55.9423957,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" }, {name: "<NAME>", description: "This is an easy walk", startpoint:{"lat":55.9423957,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" } ]) db.wishlist.drop() db.wishlist.insert([ {name: "Glencoe", description: "This is an easy walk", startpoint:{"lat":55.9423957,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" }, {name: "Braemar", description: "This is an easy walk", startpoint:{"lat":55.9423957,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" }, {name: "Glenshee", description: "This is an easy walk", startpoint:{"lat":55.9423957,"lng":-3.20640149999997}, endpoint:{"lat":55.9608186,"lng":-3.199936799999932}, waypoints:[[55.9472594,-3.1973686000000043],[55.9539001,-3.185916300000031],[55.9569345,-3.201127700000029]], mode: "walking", rating: 3, review: "This is great" } ]) <file_sep>var FilterView = function(filterElement){ this.filterElement = filterElement; } FilterView.prototype = { render: function(callback){ var title = document.createElement('div') title.innerHTML = "Search "; title.className = "filter-title"; this.filterElement.appendChild(title) //make the form var form = document.createElement('form') form.id = "filterForm" form.method = "post"; form.className = "filter-set"; form.name = "filterForm"; var fieldSet = document.createElement('fieldset') fieldSet.className = "filter-field-set" form.appendChild(fieldSet) //cycle row var cycleRow = document.createElement('div') cycleRow.className = 'filter-form-row' var cycleCheckBox = document.createElement('input') cycleCheckBox.type = "checkbox"; cycleCheckBox.name = "cycle"; cycleCheckBox.className = "filter-input" var cycleLabel = document.createElement('label') cycleLabel.innerHTML = "Cycle" cycleLabel.className = "filter-label" cycleRow.appendChild(cycleLabel) cycleRow.appendChild(cycleCheckBox) fieldSet.appendChild(cycleRow) //walk row var walkRow = document.createElement('div') walkRow.className = 'filter-form-row' var walkCheckBox = document.createElement('input') walkCheckBox.type = "checkbox"; walkCheckBox.name = "walk"; walkCheckBox.className = "filter-input" var walkLabel = document.createElement('label') walkLabel.innerHTML = "Walk" walkLabel.className = "filter-label" walkRow.appendChild(walkLabel) walkRow.appendChild(walkCheckBox) fieldSet.appendChild(walkRow) // //avoid roads row // var roadsRow = document.createElement('div') // roadsRow.className = 'filter-form-row' // var roadsCheckBox = document.createElement('input') // roadsCheckBox.type = "checkbox"; // roadsCheckBox.name = "avoidRoads"; // roadsCheckBox.className = "filter-input" // var roadsLabel = document.createElement('label') // roadsLabel.innerHTML = "Avoid roads" // roadsLabel.className = "filter-label" // roadsRow.appendChild(roadsLabel) // roadsRow.appendChild(roadsCheckBox) // fieldSet.appendChild(roadsRow) //name var nameRow = document.createElement('div') nameRow.className = 'filter-form-row-textfield' var nameLabel = document.createElement('label') nameLabel.innerHTML = "Name" nameLabel.className = "filter-label" var nameInput = document.createElement('input') nameInput.type = "text"; nameInput.name = "name"; nameInput.className = "filter-input-textfield" nameRow.appendChild(nameLabel) nameRow.appendChild(nameInput) fieldSet.appendChild(nameRow) // //description var descriptionRow = document.createElement('div') descriptionRow.className = 'filter-form-row-textfield' var descriptionLabel = document.createElement('label') descriptionLabel.innerHTML = "Description" descriptionLabel.className = "filter-label" var descriptionInput = document.createElement('input') descriptionInput.type = "text"; descriptionInput.name = "description"; descriptionInput.className = "filter-input-textfield" descriptionRow.appendChild(descriptionLabel) descriptionRow.appendChild(descriptionInput) fieldSet.appendChild(descriptionRow) //clear search button var submitRow = document.createElement('div') submitRow.className = "filter-form-row-clearbutton" var submitButton = document.createElement('input') submitButton.type = "submit"; submitButton.name = "submit" submitButton.className = "filter-input-clearbutton" form.addEventListener('submit', function(event){ event.preventDefault() var formData = new FormData(form) //can do it this way too // console.log(formData.get('cycle')) // if(formData.get('cycle')) console.log("cycle is checked") var query = {}; if(this.cycle.checked && !this.walk.checked) { query.mode = 'cycling' } if(!this.cycle.checked && this.walk.checked) { query.mode = 'walking' } if(this.cycle.checked && this.walk.checked) { query.$or = "[{mode: $all 'walking'},{mode: $all 'cycling'}]" } if(this.name.value) { // query.name = this.name.value // query.name = "{'name' : {$regex : '.*son.*'}}" // query.name = "{name : {$regex : '.*" + this.name.value + ".*'}}" query.name = "'$regex' : '" + this.name.value + "'" // {name: "{$regex : /[eorge]/}"} // {"name" : /.*eorge*/} // //doesnt work // var regex = RegExp('.*' + this.name.value + '*') // query.name = regex } // if(this.description.value) { // query.description = this.description.value // } console.log(query) //send query as the payload of an XMLHTTPRequest post var jsonString = JSON.stringify(query) var request = new XMLHttpRequest(); request.open("POST", "http://localhost:3000/api/adventures/filter"); request.setRequestHeader("Content-Type", "application/json"); request.onload = function(){ var array = JSON.parse(request.responseText) callback(array); // console.log("we're back...", request.responseText ); }; request.send(jsonString); }) // submitRow.appendChild(submitButton) fieldSet.appendChild(submitRow) this.filterElement.appendChild(form) } } module.exports = FilterView<file_sep> var ListScrollerView = function(listElement){ this.listElement = listElement; this.adventures = [] } ListScrollerView.prototype = { renderAdventures: function(adventures, showCallback){ this.adventures = adventures; this.clearNodes() var title = document.createElement('div') title.innerHTML = "all Adventures"; title.className = "adventure-list-title"; this.listElement.appendChild(title) var scrollableContainer = document.createElement('div') scrollableContainer.className = "adventure-scrollable-container"; this.listElement.appendChild(scrollableContainer) this.adventures.forEach(function(adventure){ var wrapper = document.createElement('div') wrapper.className = "adventure-wrapper"; var name = document.createElement('div') name.innerHTML = adventure.name; name.className = "adventure-name"; var description = document.createElement('div') description.innerHTML = "Description : " + adventure.description; description.className = "adventure-description"; var routeModeRating = document.createElement('div') routeModeRating.innerHTML = "Route Mode : " + adventure.mode + ". Rating : " + adventure.rating; routeModeRating.className = "adventure-mode-rating"; var review = document.createElement('div') review.innerHTML = adventure.review; review.className = "adventure-review"; var adventureRouteWrapper = document.createElement('div') adventureRouteWrapper.className = "adventure-route-wrapper"; var showRoute = document.createElement('div') showRoute.innerHTML = "show route"; showRoute.className = "adventure-route-action"; showRoute.style = "cursor: pointer" showRoute.addEventListener('click', function(){ showCallback(adventure) }.bind(this)) var saveAdventure = document.createElement('div') saveAdventure.innerHTML = "save route"; saveAdventure.className = "adventure-route-action"; saveAdventure.style = "cursor: pointer" saveAdventure.addEventListener('click', function(){ var jsonString = JSON.stringify(adventure) var request = new XMLHttpRequest(); request.open("POST", "http://localhost:3000/api/wishlist"); request.setRequestHeader("Content-Type", "application/json"); request.onload = function(){ console.log("sending"); }; request.send(jsonString); }.bind(this)) adventureRouteWrapper.appendChild(showRoute) adventureRouteWrapper.appendChild(saveAdventure) scrollableContainer.appendChild(wrapper) wrapper.appendChild(name) wrapper.appendChild(description) wrapper.appendChild(routeModeRating) wrapper.appendChild(review) wrapper.appendChild(adventureRouteWrapper) }.bind(this)) }, renderWishlist: function(adventures,deleteCallback ,callback){ this.clearNodes(); var title = document.createElement('div') title.innerHTML = "my Adventures"; title.className = "adventure-list-title"; this.listElement.appendChild(title) var scrollableContainer = document.createElement('div') scrollableContainer.className = "adventure-scrollable-container"; this.listElement.appendChild(scrollableContainer) ///////////////////////////////////////////////////////////////////////// adventures.forEach(function(adventure){ var wrapper = document.createElement('div') wrapper.className = "adventure-wrapper"; var name = document.createElement('div') name.innerHTML = adventure.name; name.className = "adventure-name"; var description = document.createElement('div') description.innerHTML = "Description : " + adventure.description; description.className = "adventure-description"; var routeModeRating = document.createElement('div') routeModeRating.innerHTML = "Route Mode : " + adventure.mode + ". Rating : " + adventure.rating; routeModeRating.className = "adventure-mode-rating"; var review = document.createElement('div') review.innerHTML = adventure.review; review.className = "adventure-review"; var adventureRouteWrapper = document.createElement('div') adventureRouteWrapper.className = "adventure-route-wrapper"; var showRoute = document.createElement('div') showRoute.innerHTML = "show route"; showRoute.className = "adventure-route-action"; showRoute.style = "cursor: pointer" showRoute.addEventListener('click', function(){ callback(adventure) }.bind(this)) var editAdventure = document.createElement('div') editAdventure.innerHTML = "edit route"; editAdventure.className = "adventure-route-action"; editAdventure.style = "cursor: pointer" editAdventure.addEventListener('click', function(event){ this.renderEditView(adventure); }.bind(this)) //// var removeAdventure = document.createElement('div') removeAdventure.innerHTML = "remove route"; removeAdventure.className = "adventure-route-action"; removeAdventure.style = "cursor: pointer" removeAdventure.addEventListener('click', function(){ deleteCallback(adventure) // var xmlHttp = new XMLHttpRequest(); // xmlHttp.open( "DELETE", 'http://localhost:3000/api/wishlist/'+ adventure._id, false ); // xmlHttp.send( null ); console.log("remove adventure from wishlist db") }.bind(this)) adventureRouteWrapper.appendChild(editAdventure) adventureRouteWrapper.appendChild(showRoute) adventureRouteWrapper.appendChild(removeAdventure) scrollableContainer.appendChild(wrapper) wrapper.appendChild(name) wrapper.appendChild(description) wrapper.appendChild(routeModeRating) wrapper.appendChild(review) wrapper.appendChild(adventureRouteWrapper) }.bind(this)) /////////////////////////////foreach ends///////////////////////////////////// }, clearNodes: function(){ while (this.listElement.firstChild){ this.listElement.removeChild(this.listElement.firstChild) } }, renderEditView: function(adventure){ this.clearNodes() var title = document.createElement('div') title.innerHTML = "Edit adventure"; title.className = "adventure-list-title"; this.listElement.appendChild(title) var scrollableContainer = document.createElement('div') scrollableContainer.className = "adventure-scrollable-container"; this.listElement.appendChild(scrollableContainer) var editWrapper = document.createElement('form') editWrapper.label = "" editWrapper.setAttribute('method','PUT') editWrapper.setAttribute('action','submit') editWrapper.className = "adventure-wrapper"; var editNameWrapper = document.createElement('div') editNameWrapper.className = "edit-item-wrapper" var editNameLabel = document.createElement('label'); editNameLabel.innerText = "Name: " editNameLabel.className = "edit-label" editNameLabel.htmlFor = 'edit-adventure-name' var editName = document.createElement('input') editName.label = "Adventure Name : " editName.value = adventure.name editName.type = "text"; editName.name = 'name' editName.className = "edit-input"; editNameWrapper.appendChild(editNameLabel); editNameWrapper.appendChild(editName); var editDescriptionWrapper = document.createElement('div') editDescriptionWrapper.className = "edit-item-wrapper" var editDescription = document.createElement('input') editDescription.value = adventure.description; editDescription.type = "text" editDescription.name = "description" editDescription.className = "edit-input"; var editDescriptionLabel = document.createElement('label'); editDescriptionLabel.innerText = "Discription: " editDescriptionLabel.className = "edit-label" editDescriptionLabel.htmlFor = "edit-adventure-description" editDescriptionWrapper.appendChild(editDescriptionLabel); editDescriptionWrapper.appendChild(editDescription); var editRouteRatingWrapper = document.createElement('div') editRouteRatingWrapper.className = 'edit-item-wrapper' var editRouteRatingLabel = document.createElement('label') editRouteRatingLabel.innerText = "Rating: " editRouteRatingLabel.className = "edit-label" editRouteRatingLabel.htmlFor = "edit-adventure-route-rating" var editRouteRating = document.createElement('input') editRouteRating.type = "number"; editRouteRating.value = adventure.rating editRouteRating.name = "rating" editRouteRating.className = "edit-input"; editRouteRatingWrapper.appendChild(editRouteRatingLabel); editRouteRatingWrapper.appendChild(editRouteRating); var editModeWrapper = document.createElement('div'); editModeWrapper.className = 'edit-item-wrapper' var editModeLabel = document.createElement('label') editModeLabel.className = 'edit-label' editModeLabel.innerText = "Mode: " editModeLabel.htmlFor = "edit-adventure-mode" var editMode = document.createElement('input'); editMode.type = "text"; editMode.value = adventure.mode; editMode.name ="mode" editMode.className = "edit-input"; editModeWrapper.appendChild(editModeLabel); editModeWrapper.appendChild(editMode) var editReviewWrapper = document.createElement('div') editReviewWrapper.className = 'edit-item-wrapper' var editReviewLabel = document.createElement('label') editReviewLabel.className = 'edit-label' editReviewLabel.htmlFor = 'edit-adventure-review' editReviewLabel.innerText = "Review: " var editReview = document.createElement('input') editReview.type = "text"; editReview.value = adventure.review editReview.name = 'review' editReview.className = "edit-input"; editReviewWrapper.appendChild(editReviewLabel); editReviewWrapper.appendChild(editReview); var submitButtonWrapper = document.createElement('div') submitButtonWrapper.className = "edit-item-wrapper" var submit = document.createElement("input"); submit.type = "submit"; submit.value = "Submit"; submit.className ="edit-submit-button" submitButtonWrapper.appendChild(submit) scrollableContainer.appendChild(editWrapper) editWrapper.appendChild(editNameWrapper) editWrapper.appendChild(editDescriptionWrapper) editWrapper.appendChild(editRouteRatingWrapper) editWrapper.appendChild(editModeWrapper) editWrapper.appendChild(editReviewWrapper) editWrapper.appendChild(submitButtonWrapper) var editedAdventure = {}; editWrapper.addEventListener("submit", function (event) { event.preventDefault(); var formData = new FormData(editWrapper); editedAdventure.name = formData.get('name') editedAdventure.description =formData.get('description') editedAdventure.rating =formData.get('rating') editedAdventure.mode = formData.get('mode') editedAdventure.review = formData.get('review') console.log(editedAdventure) var jsonString = JSON.stringify(editedAdventure) var request = new XMLHttpRequest(); request.open("PUT", "http://localhost:3000/api/wishlist/" + adventure._id); request.onload = function(){ this.renderWishlist(this.adventures) }.bind(this) request.setRequestHeader("Content-Type", "application/json"); request.send(jsonString); }.bind(this)) } } module.exports = ListScrollerView;
86a1c5d6acbd3c029d47955e6e5b946b5c21de10
[ "JavaScript" ]
7
JavaScript
rob-code/JS_Group_Project
0ee1c1b5ad4f7138939616eca4b436f86f2ebf6a
d5a9bae6c3e456f14e216fb989af19ddb186b66a
refs/heads/master
<file_sep>/* * 初始化上传插件 */ function initUpload(){ $("#uploadify").uploadify({ debug: false, swf: 'js/uploadify.swf', //swf文件路径 method: 'post', // 提交方式 uploader: 'say/send', // 服务器端处理该上传请求的程序(servlet, struts2-Action) preventCaching: true, // 加随机数到URL后,防止缓存 buttonCursor: 'hand', // 上传按钮Hover时的鼠标形状 buttonText: '上传图片', //按钮上显示的文字,默认”SELECTFILES” height: 30, // 30 px width: 120, // 120 px fileObjName: 'filedata', //文件对象名称, 即属性名 fileSizeLimit: 1024*1024*6, // 文件大小限制, 100 KB fileTypeDesc: 'any', //文件类型说明 any(*.*) fileTypeExts: '*.jpg;*.png;*.zip', // 允许的文件类型,分号分隔 formData: { 'id': '1', 'name': 'myFile' }, //指定上传文件附带的其他数据。也动态设置。可通过getParameter()获取 multi: true, // 多文件上传 progressData: 'speed', // 进度显示, speed-上传速度,percentage-百分比 queueID: 'fileQueue', //上传队列的DOM元素的ID号 queueSizeLimit: 99, // 队列长度 removeCompleted: false, // 上传完成后是否删除队列中的对应元素 removeTimeout: 2, //上传完成后多少秒后删除队列中的进度条, requeueErrors: true, // 上传失败后重新加入队列 uploadLimit: 20, // 最多上传文件数量 successTimeout: 30, //表示文件上传完成后等待服务器响应的时间。超过该时间,那么将认为上传成功。 // 打开文件对话框 关闭时触发 onDialogClose: function(queueData) { }, // 选择文件对话框打开时触发 onDialogOpen: function() { /*alert( 'please select files' ) */ }, // 没有兼容的FLASH时触发 onFallback: function() { alert('Flash was not detected!') }, onUploadError: function(file, errorCode, errorMsg, errorString) { alert( file.name + ' upload failed! ' + 'errorCode: ' + errorCode + 'errorMsg:' + errorMsg + 'errorString:' + errorString ); }, // 在每一个文件上传成功后触发 onUploadSuccess: function(file, data, response) { //上传成功后,把上传队列的第一个删除 // $('#uploadify').uploadify('cancel','*'); } }); } <file_sep>package com.jxufe.entity; import java.io.Serializable; import java.util.Date; public class LogComment implements Serializable { private Integer logId; private String logCommentContent; private Date createTime; private String modifyTime; public Integer getLogId() { return logId; } public void setLogId(Integer logId) { this.logId = logId; } public String getLogCommentContent() { return logCommentContent; } public void setLogCommentContent(String logCommentContent) { this.logCommentContent = logCommentContent; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public String getModifyTime() { return modifyTime; } public void setModifyTime(String modifyTime) { this.modifyTime = modifyTime; } }<file_sep>package com.jxufe.entity; import java.io.Serializable; import java.util.Date; public class CommentReply implements Serializable { private Integer replayId; private Integer commentId; private String replyContent; private Date createTime; private Date modifyTime; public Integer getReplayId() { return replayId; } public void setReplayId(Integer replayId) { this.replayId = replayId; } public Integer getCommentId() { return commentId; } public void setCommentId(Integer commentId) { this.commentId = commentId; } public String getReplyContent() { return replyContent; } public void setReplyContent(String replyContent) { this.replyContent = replyContent; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getModifyTime() { return modifyTime; } public void setModifyTime(Date modifyTime) { this.modifyTime = modifyTime; } }<file_sep>package com.jxufe.entity; import java.io.Serializable; import java.util.Date; public class LeaveMsg implements Serializable { private Integer userId; private Integer leaveId; private String leaveContent; private Date createTime; private Date modifyTime; public Integer getUserId() { return userId; } public void setUserId(Integer userId) { this.userId = userId; } public Integer getLeaveId() { return leaveId; } public void setLeaveId(Integer leaveId) { this.leaveId = leaveId; } public String getLeaveContent() { return leaveContent; } public void setLeaveContent(String leaveContent) { this.leaveContent = leaveContent; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getModifyTime() { return modifyTime; } public void setModifyTime(Date modifyTime) { this.modifyTime = modifyTime; } }<file_sep>package com.jxufe.entity; import java.io.Serializable; import java.util.Date; public class Report implements Serializable { private Integer userId; private Integer objectUserId; private Integer objectDetailId; private String reportContent; private Date createTime; private Date modifyTime; private Integer objectUsreId; private Integer objectUserDetail; public Integer getUserId() { return userId; } public void setUserId(Integer userId) { this.userId = userId; } public Integer getObjectUserId() { return objectUserId; } public void setObjectUserId(Integer objectUserId) { this.objectUserId = objectUserId; } public Integer getObjectDetailId() { return objectDetailId; } public void setObjectDetailId(Integer objectDetailId) { this.objectDetailId = objectDetailId; } public String getReportContent() { return reportContent; } public void setReportContent(String reportContent) { this.reportContent = reportContent; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getModifyTime() { return modifyTime; } public void setModifyTime(Date modifyTime) { this.modifyTime = modifyTime; } public Integer getObjectUsreId() { return objectUsreId; } public void setObjectUsreId(Integer objectUsreId) { this.objectUsreId = objectUsreId; } public Integer getObjectUserDetail() { return objectUserDetail; } public void setObjectUserDetail(Integer objectUserDetail) { this.objectUserDetail = objectUserDetail; } }<file_sep>package com.jxufe.consumer; import com.alibaba.dubbo.rpc.RpcException; import com.alibaba.fastjson.JSON; import com.jxufe.service.bhind.YooAiBhindService; import com.jxufe.service.front.YooAiFrontService; import org.junit.Test; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; /** * Created by liuburu on 2017/4/29. */ public class RunCosumer { @Test public void cosumerTest(){ try { String configLocation= "spring/dubbo-consumer.xml"; ApplicationContext context =new ClassPathXmlApplicationContext(configLocation); YooAiFrontService yooAiFrontService = (YooAiFrontService) context.getBean("yooaiFrontService"); YooAiBhindService yooAiBhindService = (YooAiBhindService) context.getBean("yooaiBhindService"); System.out.println("**************"); System.out.println(JSON.toJSONString(yooAiFrontService.queryUsers())); System.out.println(JSON.toJSONString(yooAiBhindService.queryUser(10086))); System.out.println("**************"); String[] names=context.getBeanDefinitionNames(); System.out.println("容器Beans:"); for(String string : names) { System.out.println(string); System.out.print(","); } } catch (RpcException e) { e.printStackTrace(); } } } <file_sep>/*==============================================================*/ /* DBMS name: MySQL 5.0 */ /* Created on: 2017/5/10 17:41:27 */ /*==============================================================*/ drop table if exists admin; drop table if exists admin_role; drop table if exists advise; drop table if exists agree; drop table if exists collection; drop table if exists comment_reply; drop table if exists friend; drop table if exists friend_request; drop table if exists function; drop table if exists leavemsg; drop table if exists log; drop table if exists log_comment; drop table if exists log_type; drop table if exists report; drop table if exists role; drop table if exists role_function; drop table if exists say; drop table if exists say_comment; drop table if exists say_picture; drop table if exists say_picture_type; drop table if exists say_say_type; drop table if exists say_type; drop table if exists sys_info; drop table if exists user; /*==============================================================*/ /* Table: admin */ /*==============================================================*/ create table admin ( admin_id int not null, admin_name varchar(50) not null, admin_password char(50) not null, admin_head_url varchar(200), create_time datetime, modify_time datetime, last_login_time datetime, primary key (admin_id) ); /*==============================================================*/ /* Table: admin_role */ /*==============================================================*/ create table admin_role ( role_id int not null, admin_id int not null, create_time datetime, modify_time datetime, primary key (role_id, admin_id) ); /*==============================================================*/ /* Table: advise */ /*==============================================================*/ create table advise ( user_id int not null, advise_content varchar(1000) not null, create_time datetime, modify_time datetime, primary key (user_id) ); /*==============================================================*/ /* Table: agree */ /*==============================================================*/ create table agree ( user_id int, say_id int, create_time datetime, modify_time datetime ); /*==============================================================*/ /* Table: collection */ /*==============================================================*/ create table collection ( user_id int, object_id int not null, create_time datetime, modify_time datetime ); /*==============================================================*/ /* Table: comment_reply */ /*==============================================================*/ create table comment_reply ( replay_id int not null, comment_id int, reply_content varchar(200), create_time datetime, modify_time datetime, primary key (replay_id) ); /*==============================================================*/ /* Table: friend */ /*==============================================================*/ create table friend ( friend_id int not null, user_id int not null, create_time datetime, modify_time datetime, primary key (friend_id) ); /*==============================================================*/ /* Table: friend_request */ /*==============================================================*/ create table friend_request ( user_id int not null, stranger_id int not null, request_status int, create_time datetime, update_time datetime, primary key (user_id, stranger_id) ); /*==============================================================*/ /* Table: function */ /*==============================================================*/ create table function ( function_id int not null, function_name char(50), function_url varchar(200), create_time datetime, modify_time datetime, primary key (function_id) ); /*==============================================================*/ /* Table: leavemsg */ /*==============================================================*/ create table leavemsg ( user_id int, leave_id int not null, leave_content varchar(500), create_time datetime, modify_time datetime ); /*==============================================================*/ /* Table: log */ /*==============================================================*/ create table log ( log_id int not null, user_id int, ltype_id int, log_titile varchar(100), log_conent text, create_time datetime, modify_time datetime, primary key (log_id) ); /*==============================================================*/ /* Table: log_comment */ /*==============================================================*/ create table log_comment ( log_id int, log_comment_content varchar(500), create_time datetime, modify_time char(10) ); /*==============================================================*/ /* Table: log_type */ /*==============================================================*/ create table log_type ( ltype_id int not null, ltype_name varchar(50) not null, create_time datetime, modify_time datetime, primary key (ltype_id) ); /*==============================================================*/ /* Table: report */ /*==============================================================*/ create table report ( user_id int not null, object_user_id int, object_detail_id int, report_content varchar(1000), create_time datetime, modify_time datetime, object_usre_id int, object_user_detail int ); /*==============================================================*/ /* Table: role */ /*==============================================================*/ create table role ( role_id int not null, role_name char(50), create_time datetime, modify_time datetime, primary key (role_id) ); /*==============================================================*/ /* Table: role_function */ /*==============================================================*/ create table role_function ( role_id int not null, function_id int not null, create_time datetime, modify_time datetime, primary key (role_id, function_id) ); /*==============================================================*/ /* Table: say */ /*==============================================================*/ create table say ( say_id int not null, user_id int, say_content varchar(1000), say_views int, say_lovers int, say_state int, is_top int, reported_count int, say_video varchar(200), say_music varchar(200), create_time datetime, modify_time datetime, primary key (say_id) ); /*==============================================================*/ /* Table: say_comment */ /*==============================================================*/ create table say_comment ( commenter_id int not null, comment_id int not null, say_id int, comment_content varchar(200), create_time datetime, modify_time datetime, primary key (comment_id) ); /*==============================================================*/ /* Table: say_picture */ /*==============================================================*/ create table say_picture ( picture_id int not null, say_id int, ptype_id int, picture_url varchar(100), create_time datetime, modify_time datetime, primary key (picture_id) ); /*==============================================================*/ /* Table: say_picture_type */ /*==============================================================*/ create table say_picture_type ( ptype_id int not null, ptype_name varchar(50), create_time datetime, modify_time datetime, primary key (ptype_id) ); /*==============================================================*/ /* Table: say_say_type */ /*==============================================================*/ create table say_say_type ( stype_id int not null, say_id int not null, create_time datetime, mofify_time datetime, primary key (stype_id, say_id) ); /*==============================================================*/ /* Table: say_type */ /*==============================================================*/ create table say_type ( stype_id int not null, stype_name varchar(50), create_time datetime, modify_time datetime, primary key (stype_id) ); /*==============================================================*/ /* Table: sys_info */ /*==============================================================*/ create table sys_info ( sys_info_id int not null, sys_info_title char(10), sys_info_content varchar(500), create_time datetime, modify_time datetime, primary key (sys_info_id) ); /*==============================================================*/ /* Table: user */ /*==============================================================*/ create table user ( user_id int not null, user_name varchar(50), user_password varchar(50), signature varchar(200), email varchar(50), sex int, birthday date, emotion_state int, now_place varchar(50), hobby varchar(50), head_url varchar(100), level int, email_enable int, create_time datetime, modify_time datetime, primary key (user_id) ); alter table admin_role add constraint FK_admin_role foreign key (admin_id) references admin (admin_id) on delete restrict on update restrict; alter table admin_role add constraint FK_admin_role2 foreign key (role_id) references role (role_id) on delete restrict on update restrict; alter table advise add constraint FK_建议 foreign key (user_id) references user (user_id) on delete restrict on update restrict; alter table agree add constraint FK_说说点赞 foreign key (user_id) references user (user_id) on delete restrict on update restrict; alter table collection add constraint FK_收藏 foreign key (user_id) references user (user_id) on delete restrict on update restrict; alter table comment_reply add constraint FK_回复 foreign key (comment_id) references say_comment (comment_id) on delete restrict on update restrict; alter table friend add constraint FK_好友 foreign key (user_id) references user (user_id) on delete restrict on update restrict; alter table friend_request add constraint FK_好友申请 foreign key (user_id) references user (user_id) on delete restrict on update restrict; alter table leavemsg add constraint FK_留言 foreign key (user_id) references user (user_id) on delete restrict on update restrict; alter table log add constraint FK_Relationship_9 foreign key (ltype_id) references log_type (ltype_id) on delete restrict on update restrict; alter table log add constraint FK_发表日志 foreign key (user_id) references user (user_id) on delete restrict on update restrict; alter table log_comment add constraint FK_日志评论 foreign key (log_id) references log (log_id) on delete restrict on update restrict; alter table report add constraint FK_举报 foreign key (user_id) references user (user_id) on delete restrict on update restrict; alter table role_function add constraint FK_role_function foreign key (function_id) references function (function_id) on delete restrict on update restrict; alter table role_function add constraint FK_role_function2 foreign key (role_id) references role (role_id) on delete restrict on update restrict; alter table say add constraint FK_发表说说 foreign key (user_id) references user (user_id) on delete restrict on update restrict; alter table say_comment add constraint FK_说说评论 foreign key (say_id) references say (say_id) on delete restrict on update restrict; alter table say_picture add constraint FK_含有 foreign key (say_id) references say (say_id) on delete restrict on update restrict; alter table say_picture add constraint FK_属于 foreign key (ptype_id) references say_picture_type (ptype_id) on delete restrict on update restrict; alter table say_say_type add constraint FK_say_say_type foreign key (say_id) references say (say_id) on delete restrict on update restrict; alter table say_say_type add constraint FK_say_say_type2 foreign key (stype_id) references say_type (stype_id) on delete restrict on update restrict; <file_sep>package com.jxufe.entity; import java.io.Serializable; import java.util.Date; public class Agree implements Serializable { private Integer userId; private Integer sayId; private Date createTime; private Date modifyTime; public Integer getUserId() { return userId; } public void setUserId(Integer userId) { this.userId = userId; } public Integer getSayId() { return sayId; } public void setSayId(Integer sayId) { this.sayId = sayId; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getModifyTime() { return modifyTime; } public void setModifyTime(Date modifyTime) { this.modifyTime = modifyTime; } }<file_sep><?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <parent> <artifactId>yooaispace</artifactId> <groupId>com.jxufe.yooai</groupId> <version>1.0-SNAPSHOT</version> </parent> <modelVersion>4.0.0</modelVersion> <packaging>war</packaging> <name>yooai-service-web</name> <artifactId>yooai-service-web</artifactId> <dependencies> <!--依赖服务模块yooai-service-front和yooai-service-bhind和yooai-spring-parent--> <dependency> <groupId>com.jxufe.yooai</groupId> <artifactId>yooai-service-front</artifactId> <version>1.0-SNAPSHOT</version> </dependency> <!--不用去配置依赖后台的服务,前台就只能调用前台提供的服务--> <dependency> <groupId>com.jxufe.yooai</groupId> <artifactId>yooai-service-bhind</artifactId> <version>1.0-SNAPSHOT</version> </dependency> <dependency> <groupId>com.jxufe.yooai</groupId> <artifactId>yooai-spring-parent</artifactId> <version>1.0-SNAPSHOT</version> </dependency> <!--SpringMVC相关依赖--> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-web</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-webmvc</artifactId> <version>${spring.version}</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>${jackson.version}</version> </dependency> <dependency> <groupId>com.jxufe.yooai</groupId> <artifactId>yooai-service-bhind</artifactId> <version>1.0-SNAPSHOT</version> </dependency> </dependencies> <build> <plugins> <plugin> <groupId>org.mortbay.jetty</groupId> <artifactId>maven-jetty-plugin</artifactId> <version>6.1.7</version> <configuration> <connectors> <connector implementation="org.mortbay.jetty.nio.SelectChannelConnector"> <port>8888</port> <maxIdleTime>30000</maxIdleTime> </connector> </connectors> <webAppSourceDirectory>${project.build.directory}/${pom.artifactId}-${pom.version} </webAppSourceDirectory> <contextPath>/</contextPath> </configuration> </plugin> </plugins> </build> </project> <file_sep>package com.jxufe.dao; import com.jxufe.entity.Reply; public interface ReplyMapper { int deleteByPrimaryKey(Integer replayId); int insert(Reply record); int insertSelective(Reply record); Reply selectByPrimaryKey(Integer replayId); int updateByPrimaryKeySelective(Reply record); int updateByPrimaryKey(Reply record); }<file_sep><?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>com.jxufe.yooai</groupId> <artifactId>yooaispace</artifactId> <packaging>pom</packaging> <version>1.0-SNAPSHOT</version> <properties> <!--Maven插件版本--> <jdk.version>1.8</jdk.version> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <clean.plugin.verson>3.0.0</clean.plugin.verson> <source.plugin.verson>3.0.1</source.plugin.verson> <compiler.plugin.verson>3.5.1</compiler.plugin.verson> <install.plugin.verson>2.5.2</install.plugin.verson> <war.plugin.version>2.6</war.plugin.version> <jetty.plugin.version>9.2.19.v20160908</jetty.plugin.version> <jetty.plugin.port>80</jetty.plugin.port> <jetty.plugin.contextPath>/</jetty.plugin.contextPath> <!--项目公共依赖(日志,测试,转换,工具类)--> <fastjson.version>1.2.12</fastjson.version> <junit.version>4.12</junit.version> <sl4j.version>1.7.12</sl4j.version> <logback.version>1.1.7</logback.version> <!--持久层Mybatis依赖--> <mysql.version>5.1.39</mysql.version> <druid.version>1.0.18</druid.version> <mybatis.version>3.2.8</mybatis.version> <mybatis.spring.version>1.3.0</mybatis.spring.version> <!--Web相关依赖--> <servlet.version>3.1.0</servlet.version> <jstl.version>1.2</jstl.version> <tglib.version>1.1.2</tglib.version> <!--spring依赖和springMVC相关依赖--> <spring.version>4.3.3.RELEASE</spring.version> <jackson.version>2.8.1</jackson.version> <!--缓存相关--> <redis.version>2.7.3</redis.version> <protostuff.version>1.0.8</protostuff.version> </properties> <modules> <module>yooai-common</module> <module>yooai-common-config</module> <module>yooai-common-core</module> <module>yooai-common-web</module> <module>yooai-spring-parent</module> <module>yooai-service-front</module> <module>yooai-service-bhind</module> <module>yooai-service-provider</module> <module>yooai-service-web</module> <module>yooai-data-api</module> <module>yooai-nosql-redis</module> <module>yooai-nosql-mongo</module> <module>yooai-spring-socket</module> <module>yooai-service-admin</module> </modules> <dependencies> <!--#########################全部模块公共依赖试相关Jar包################################--> <!--单元测试依赖--> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>${junit.version}</version> </dependency> <!--fastJson依赖--> <dependency> <groupId>com.alibaba</groupId> <artifactId>fastjson</artifactId> <version>${fastjson.version}</version> </dependency> <!--日志相关依赖--> <!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api --> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${sl4j.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <version>${logback.version}</version> <exclusions> <exclusion> <artifactId>slf4j-api</artifactId> <groupId>org.slf4j</groupId> </exclusion> </exclusions> </dependency> </dependencies> <build> <finalName>${project.artifactId}</finalName> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <version>${source.plugin.verson}</version> <configuration> <attach>true</attach> </configuration> <executions> <execution> <phase>compile</phase> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-clean-plugin</artifactId> <version>${clean.plugin.verson}</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>${compiler.plugin.verson}</version> <configuration> <source>${jdk.version}</source> <target>${jdk.version}</target> <encoding>${project.build.sourceEncoding}</encoding> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-install-plugin</artifactId> <version>${install.plugin.verson}</version> </plugin> </plugins> </build> </project><file_sep>package com.jxufe.dao; import com.jxufe.entity.User; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DuplicateKeyException; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; /** * Created by liuburu on 2017/4/29. */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration({"classpath:spring/spring-*.xml"}) public class UserMapperTest { @Autowired private UserMapper userMapper; @Test public void selectByPrimaryKey() throws Exception { User user = userMapper.selectByPrimaryKey(10086); } @Test public void insertSelective() throws Exception { User user = userMapper.selectByPrimaryKey(10086); user.setUserName("刘卜铷"); user.setEmail("<EMAIL>"); int result = 0; try { result = userMapper.insertSelective(user); } catch (DuplicateKeyException e) { System.out.println("DuplicateKeyException!!"); // e.printStackTrace(); } catch (Exception e) { // e.printStackTrace(); } System.out.println(result); } }<file_sep>package com.jxufe.dao; import com.jxufe.entity.Report; public interface ReportMapper { int insert(Report record); int insertSelective(Report record); }<file_sep>package com.jxufe.service.bhind; import com.jxufe.entity.User; /** * Created by liuburu on 2017/4/29. */ public interface YooAiBhindService { User queryUser(int userid); } <file_sep>package com.jxufe.entity; import java.io.Serializable; import java.util.Date; public class SayPicture implements Serializable { private Integer pictureId; private Integer sayId; private Integer ptypeId; private String pictureUrl; private Date createTime; private Date modifyTime; public Integer getPictureId() { return pictureId; } public void setPictureId(Integer pictureId) { this.pictureId = pictureId; } public Integer getSayId() { return sayId; } public void setSayId(Integer sayId) { this.sayId = sayId; } public Integer getPtypeId() { return ptypeId; } public void setPtypeId(Integer ptypeId) { this.ptypeId = ptypeId; } public String getPictureUrl() { return pictureUrl; } public void setPictureUrl(String pictureUrl) { this.pictureUrl = pictureUrl; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getModifyTime() { return modifyTime; } public void setModifyTime(Date modifyTime) { this.modifyTime = modifyTime; } }<file_sep>/*==============================================================*/ /* DBMS name: MySQL 5.0 */ /* Created on: 2017/4/27 12:48:08 */ /*==============================================================*/ drop table if exists Say; drop table if exists User; drop table if exists advise; drop table if exists agree; drop table if exists collection; drop table if exists comment; drop table if exists friend; drop table if exists friend_request; drop table if exists leavemsg; drop table if exists log; drop table if exists picture; drop table if exists reply; drop table if exists report; drop table if exists say_type; drop table if exists type; /*==============================================================*/ /* Table: Say */ /*==============================================================*/ create table Say ( say_id int not null, user_id int, say_content varchar(1000), say_views int, say_lovers int, say_state int, is_top int, primary key (say_id) ); /*==============================================================*/ /* Table: User */ /*==============================================================*/ create table User ( user_id int not null, user_name varchar(50), user_password varchar(50), signature varchar(200), email varchar(50), sex int, birthday date, emotion_state int, now_place varchar(50), hobby varchar(50), head_url varchar(100), level int, email_enable varchar(50), create_time datetime, modify_time datetime, primary key (user_id) ); /*==============================================================*/ /* Table: advise */ /*==============================================================*/ create table advise ( user_id int not null, advise_content varchar(1000) not null, create_time datetime, modify_time datetime, primary key (user_id) ); /*==============================================================*/ /* Table: agree */ /*==============================================================*/ create table agree ( user_id int not null, say_id int not null, create_time datetime, modify_time datetime, primary key (user_id, say_id) ); /*==============================================================*/ /* Table: collection */ /*==============================================================*/ create table collection ( user_id int not null, object_id int, create_time datetime, modify_time datetime, primary key (user_id) ); /*==============================================================*/ /* Table: comment */ /*==============================================================*/ create table comment ( comment_id int not null, say_id int, comment_content varchar(200), create_time datetime, modify_time datetime, primary key (comment_id) ); /*==============================================================*/ /* Table: friend */ /*==============================================================*/ create table friend ( user_id int not null, friend_id int not null, create_time datetime, modify_time datetime, primary key (user_id, friend_id) ); /*==============================================================*/ /* Table: friend_request */ /*==============================================================*/ create table friend_request ( user_id int not null, stranger_id int not null, request_status char(10), create_time datetime, update_time datetime, primary key (user_id, stranger_id) ); /*==============================================================*/ /* Table: leavemsg */ /*==============================================================*/ create table leavemsg ( user_id int not null, leave_id int, leave_content varchar(500), create_time datetime, modify_time datetime, primary key (user_id) ); /*==============================================================*/ /* Table: log */ /*==============================================================*/ create table log ( log_id int not null, user_id int not null, log_titile varchar(100), log_conent text, create_time datetime, modify_time datetime, primary key (log_id) ); /*==============================================================*/ /* Table: picture */ /*==============================================================*/ create table picture ( picture_id int not null, say_id int, picture_url varchar(100), create_time datetime, modify_time datetime, primary key (picture_id) ); /*==============================================================*/ /* Table: reply */ /*==============================================================*/ create table reply ( replay_id int not null, comment_id int, reply_content varchar(200), create_time datetime, modify_time datetime, primary key (replay_id) ); /*==============================================================*/ /* Table: report */ /*==============================================================*/ create table report ( user_id int, report_content varchar(1000), create_time datetime, modify_time datetime ); /*==============================================================*/ /* Table: say_type */ /*==============================================================*/ create table say_type ( say_id int not null, type_id int not null, primary key (say_id, type_id) ); /*==============================================================*/ /* Table: type */ /*==============================================================*/ create table type ( type_id int not null, type_name varchar(50), primary key (type_id) ); alter table Say add constraint FK_user_say foreign key (user_id) references User (user_id) on delete restrict on update restrict; alter table advise add constraint FK_user_advise foreign key (user_id) references User (user_id) on delete restrict on update restrict; alter table agree add constraint FK_say_agree foreign key (say_id) references Say (say_id) on delete restrict on update restrict; alter table agree add constraint FK_user_agree foreign key (user_id) references User (user_id) on delete restrict on update restrict; alter table collection add constraint FK_user_collection foreign key (user_id) references User (user_id) on delete restrict on update restrict; alter table comment add constraint FK_say_comment foreign key (say_id) references Say (say_id) on delete restrict on update restrict; alter table friend add constraint FK_user_friend foreign key (user_id) references User (user_id) on delete restrict on update restrict; alter table friend_request add constraint FK_user_reqeust foreign key (user_id) references User (user_id) on delete restrict on update restrict; alter table leavemsg add constraint FK_user_leavemsg foreign key (user_id) references User (user_id) on delete restrict on update restrict; alter table log add constraint FK_user_log foreign key (user_id) references User (user_id) on delete restrict on update restrict; alter table picture add constraint FK_say_picture foreign key (say_id) references Say (say_id) on delete restrict on update restrict; alter table reply add constraint FK_comment_reply foreign key (comment_id) references comment (comment_id) on delete restrict on update restrict; alter table report add constraint FK_user_report foreign key (user_id) references User (user_id) on delete restrict on update restrict; alter table say_type add constraint FK_say_saytype foreign key (say_id) references Say (say_id) on delete restrict on update restrict; alter table say_type add constraint FK_type_saytype foreign key (type_id) references type (type_id) on delete restrict on update restrict; <file_sep>/*==============================================================*/ /* DBMS name: MySQL 5.0 */ /* Created on: 2017/5/7 10:58:46 */ /*==============================================================*/ drop table if exists Association_1; drop table if exists Entity_1; drop table if exists Entity_2; /*==============================================================*/ /* Table: Association_1 */ /*==============================================================*/ create table Association_1 ( Attribute_1 char(10) not null, Attribute_4 char(10) not null, Attribute_7 char(10), Attribute_8 char(10), primary key (Attribute_1, Attribute_4) ); /*==============================================================*/ /* Table: Entity_1 */ /*==============================================================*/ create table Entity_1 ( Attribute_1 char(10) not null, Attribute_2 char(10), Attribute_3 char(10), primary key (Attribute_1) ); /*==============================================================*/ /* Table: Entity_2 */ /*==============================================================*/ create table Entity_2 ( Attribute_4 char(10) not null, Attribute_5 char(10), Attribute_6 char(10), primary key (Attribute_4) ); alter table Association_1 add constraint FK_Association_1 foreign key (Attribute_4) references Entity_2 (Attribute_4) on delete restrict on update restrict; alter table Association_1 add constraint FK_Association_2 foreign key (Attribute_1) references Entity_1 (Attribute_1) on delete restrict on update restrict; <file_sep>package com.jxufe.dao; import com.jxufe.entity.SayPictureType; public interface SayPictureTypeMapper { int deleteByPrimaryKey(Integer ptypeId); int insert(SayPictureType record); int insertSelective(SayPictureType record); SayPictureType selectByPrimaryKey(Integer ptypeId); int updateByPrimaryKeySelective(SayPictureType record); int updateByPrimaryKey(SayPictureType record); }<file_sep>package com.jxufe.service.front.iml; import com.jxufe.dao.UserMapper; import com.jxufe.entity.User; import com.jxufe.service.front.YooAiFrontService; import org.springframework.beans.factory.annotation.Autowired; import java.util.List; /** * Created by liuburu on 2017/4/29. */ public class YooAiFrontServiceIml implements YooAiFrontService { @Autowired private UserMapper userMapper; @Override public List<User> queryUsers() { return userMapper.selectAll(); } } <file_sep># yooai 友爱空间毕业设计 <file_sep>package com.jxufe.bhind.control; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; /** * Created by liuburu on 2017/5/9. */ @Controller public class AdminPageControl { /** * 测试SpringMVC页面 * @return */ @RequestMapping("success") public String toSuccessPage(){ return "success"; } /** * 管理员首页 * @return */ @RequestMapping("success") public String toAdminIndexPage(){ return "success"; } } <file_sep>package com.jxufe.entity; import java.io.Serializable; import java.util.Date; public class Advise implements Serializable { private Integer userId; private String adviseContent; private Date createTime; private Date modifyTime; public Integer getUserId() { return userId; } public void setUserId(Integer userId) { this.userId = userId; } public String getAdviseContent() { return adviseContent; } public void setAdviseContent(String adviseContent) { this.adviseContent = adviseContent; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getModifyTime() { return modifyTime; } public void setModifyTime(Date modifyTime) { this.modifyTime = modifyTime; } }<file_sep>package com.jxufe.entity; import java.io.Serializable; import java.util.Date; public class FriendRequest implements Serializable { private Integer userId; private Integer strangerId; private Integer requestStatus; private Date createTime; private Date updateTime; public Integer getUserId() { return userId; } public void setUserId(Integer userId) { this.userId = userId; } public Integer getStrangerId() { return strangerId; } public void setStrangerId(Integer strangerId) { this.strangerId = strangerId; } public Integer getRequestStatus() { return requestStatus; } public void setRequestStatus(Integer requestStatus) { this.requestStatus = requestStatus; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getUpdateTime() { return updateTime; } public void setUpdateTime(Date updateTime) { this.updateTime = updateTime; } }<file_sep>package com.jxufe.entity; import java.io.Serializable; import java.util.Date; public class Friend implements Serializable { private Integer friendId; private Integer userId; private Date createTime; private Date modifyTime; public Integer getFriendId() { return friendId; } public void setFriendId(Integer friendId) { this.friendId = friendId; } public Integer getUserId() { return userId; } public void setUserId(Integer userId) { this.userId = userId; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getModifyTime() { return modifyTime; } public void setModifyTime(Date modifyTime) { this.modifyTime = modifyTime; } }<file_sep>/*==============================================================*/ /* DBMS name: MySQL 5.0 */ /* Created on: 2017/5/7 11:13:19 */ /*==============================================================*/ drop table if exists course; drop table if exists score; drop table if exists student; /*==============================================================*/ /* Table: course */ /*==============================================================*/ create table course ( courseid int not null, cname varchar(30), credit float(3), primary key (courseid) ); /*==============================================================*/ /* Table: score */ /*==============================================================*/ create table score ( studentid int not null, courseid int not null, score float(4), primary key (studentid, courseid) ); /*==============================================================*/ /* Table: student */ /*==============================================================*/ create table student ( studentid int not null, sname varchar(30), age int, primary key (studentid) ); alter table score add constraint FK_学生成绩 foreign key (studentid) references student (studentid) on delete restrict on update restrict; alter table score add constraint FK_课程成绩 foreign key (courseid) references course (courseid) on delete restrict on update restrict; <file_sep>package com.jxufe.entity; import java.io.Serializable; import java.util.Date; public class Say implements Serializable { private Integer sayId; private Integer userId; private String sayContent; private Integer sayViews; private Integer sayLovers; private Integer sayState; private Integer isTop; private Integer reportedCount; private String sayVideo; private String sayMusic; private Date createTime; private Date modifyTime; public Integer getSayId() { return sayId; } public void setSayId(Integer sayId) { this.sayId = sayId; } public Integer getUserId() { return userId; } public void setUserId(Integer userId) { this.userId = userId; } public String getSayContent() { return sayContent; } public void setSayContent(String sayContent) { this.sayContent = sayContent; } public Integer getSayViews() { return sayViews; } public void setSayViews(Integer sayViews) { this.sayViews = sayViews; } public Integer getSayLovers() { return sayLovers; } public void setSayLovers(Integer sayLovers) { this.sayLovers = sayLovers; } public Integer getSayState() { return sayState; } public void setSayState(Integer sayState) { this.sayState = sayState; } public Integer getIsTop() { return isTop; } public void setIsTop(Integer isTop) { this.isTop = isTop; } public Integer getReportedCount() { return reportedCount; } public void setReportedCount(Integer reportedCount) { this.reportedCount = reportedCount; } public String getSayVideo() { return sayVideo; } public void setSayVideo(String sayVideo) { this.sayVideo = sayVideo; } public String getSayMusic() { return sayMusic; } public void setSayMusic(String sayMusic) { this.sayMusic = sayMusic; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getModifyTime() { return modifyTime; } public void setModifyTime(Date modifyTime) { this.modifyTime = modifyTime; } }<file_sep>package com.jxufe.dao; import com.jxufe.entity.Collection; public interface CollectionMapper { int insert(Collection record); int insertSelective(Collection record); }<file_sep>package com.jxufe.entity; import java.io.Serializable; import java.util.Date; public class Log implements Serializable { private Integer logId; private Integer userId; private Integer ltypeId; private String logTitile; private Date createTime; private Date modifyTime; private String logConent; public Integer getLogId() { return logId; } public void setLogId(Integer logId) { this.logId = logId; } public Integer getUserId() { return userId; } public void setUserId(Integer userId) { this.userId = userId; } public Integer getLtypeId() { return ltypeId; } public void setLtypeId(Integer ltypeId) { this.ltypeId = ltypeId; } public String getLogTitile() { return logTitile; } public void setLogTitile(String logTitile) { this.logTitile = logTitile; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getModifyTime() { return modifyTime; } public void setModifyTime(Date modifyTime) { this.modifyTime = modifyTime; } public String getLogConent() { return logConent; } public void setLogConent(String logConent) { this.logConent = logConent; } }<file_sep>package com.jxufe.dao; import com.jxufe.entity.SayType; public interface SayTypeMapper { int deleteByPrimaryKey(Integer stypeId); int insert(SayType record); int insertSelective(SayType record); SayType selectByPrimaryKey(Integer stypeId); int updateByPrimaryKeySelective(SayType record); int updateByPrimaryKey(SayType record); }<file_sep>package com.jxufe.entity; import java.io.Serializable; import java.util.Date; public class SayType implements Serializable { private Integer stypeId; private String stypeName; private Date createTime; private Date modifyTime; public Integer getStypeId() { return stypeId; } public void setStypeId(Integer stypeId) { this.stypeId = stypeId; } public String getStypeName() { return stypeName; } public void setStypeName(String stypeName) { this.stypeName = stypeName; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public Date getModifyTime() { return modifyTime; } public void setModifyTime(Date modifyTime) { this.modifyTime = modifyTime; } }<file_sep>jdbc.user = root jdbc.password = <PASSWORD> jdbc.driverClass=com.mysql.jdbc.Driver jdbc.url=jdbc:mysql://192.168.56.101:3306/yooaispace?useUnicode=true&characterEncoding=utf8<file_sep>package com.jxufe.dao; import com.jxufe.entity.FriendRequest; import org.apache.ibatis.annotations.Param; public interface FriendRequestMapper { int deleteByPrimaryKey(@Param("userId") Integer userId, @Param("strangerId") Integer strangerId); int insert(FriendRequest record); int insertSelective(FriendRequest record); FriendRequest selectByPrimaryKey(@Param("userId") Integer userId, @Param("strangerId") Integer strangerId); int updateByPrimaryKeySelective(FriendRequest record); int updateByPrimaryKey(FriendRequest record); }
0e0e46b6d112524cc4d9bcc22e0277a2624f9a58
[ "SQL", "JavaScript", "Markdown", "Maven POM", "INI", "Java" ]
32
JavaScript
kakaluote444/yooai
2d5d464397bd011bdf923bb9d47f90d1503f6e46
3dca956efe79f0a2d935c3461d7979bfa18de50d
refs/heads/master
<file_sep>const fetch = require('isomorphic-unfetch') const fetchViaCep = (cep, proxyURL = '') => { const url = `${proxyURL}https://viacep.com.br/ws/${cep}/json/` const options = { method: 'GET', mode: 'cors', headers: { 'content-type': 'application/json;charset=utf-8', }, } return fetch(url, options) .then((res) => res.json()) .then((data) => ({ bairro: data.bairro, cidade: data.localidade, logradouro: data.logradouro, tipo_logradouro: data.logradouro.split(' ')[0], uf: data.uf, servico: 'ViaCep', data })) .catch((e) => e) } module.exports = { fetchViaCep, } <file_sep>const fastify = require('fastify') const { fetchAll } = require('./services/cep/index') const { PROXY } = require('./utils/constants') const app = fastify({ logger: false }) app.post('/', async (request, reply) => { let body = request.body if (typeof request.body === 'string') body = JSON.parse(request.body) const res = await fetchAll(body.cep, body.service, PROXY) console.log(res) return res }) ;(async () => { try { await app.listen(3000) app.log.info(`Server is listening on ${app.server.address().port}`) } catch (e) { app.log.error(e) process.exit(1) } })() <file_sep># Google Cloud functions ## Deploy gcloud functions deploy cep-service --entry-point cep --runtime nodejs10 --trigger-http gcloud functions deploy <other>-service --entry-point <other> --runtime nodejs10 --trigger-http ## Check logs gcloud functions logs read ## Sidenote `server.js` is not used by Google Cloud Functions. This is used for local testing of the service. ## CURL `service` is optional. ``` curl --location --request POST 'https://<link to cloud>/cep-service' \ --header 'Content-Type: application/json' \ --data-raw '{ "cep": "31210-630", "service": "<service optional>" }' ``` <file_sep>const { Logging } = require('@google-cloud/logging') const { fetchAll } = require('./services/cep/index') const { PROXY } = require('./utils/constants') const logging = new Logging() exports.cep = async (req, res) => { res.set('Access-Control-Allow-Origin', process.env.CORS) res.set('Access-Control-Allow-Headers', '*') let body = req.body if (typeof req.body === 'string') { body = JSON.parse(req.body) } if (req.method !== 'POST') { res.status(500).send({ error: 'Only POST is allowed!' }) return } const data = await fetchAll(body.cep, body.service, PROXY) res.status(200).send(data) } <file_sep>const validateInput = (cep) => { const cepTypeOf = typeof cep if (cepTypeOf === 'number' || cepTypeOf === 'string') { return cep } throw new Error('CEP is an invalid combination.') } const removeSpecialCharacters = (cep) => { return cep.toString().replace(/\D+/g, '') } const validateInputLength = (cep) => { if (cep.length <= 8) { return cep } throw new Error('CEP is an invalid size.') } const validations = (cep) => { return Promise.resolve(cep) .then(validateInput) .then(removeSpecialCharacters) .then(validateInputLength) .catch((e) => e) } module.exports = { validations, } <file_sep>const PROXY = 'https://proxier.now.sh/api?url=' module.exports = { PROXY, } <file_sep>'use strict'; module.exports = { spec: 'src/**/*.spec.js' }; <file_sep>const { fetchViaCep } = require('./viacep') const { fetchWideNet } = require('./widenet') const { fetchPassagensPromo } = require('./passagenspromo') const { validations } = require('../../utils/validations') const fetchAll = async (cep, service = null, proxy) => { try { const validCep = await validations(cep) const services = { passagenspromo: fetchPassagensPromo(cep, proxy), viacep: fetchViaCep(cep, proxy), widenet: fetchWideNet(cep, proxy), default: await Promise.race([ fetchViaCep(validCep, proxy), fetchWideNet(cep, proxy), fetchPassagensPromo(cep, proxy), ]), } return await (services[service] || services['default']) } catch (e) { return e } } module.exports = { fetchAll, }
88ba0ba269dcb839dd16a70a7b94ec964ed0719e
[ "JavaScript", "Markdown" ]
8
JavaScript
maarteNNNN/api-services
880f4a0bf359670e8401faa2ffa421d749403a69
8c58ec763cc8dd8b5d97f6b22228f460d0d9e30c
refs/heads/master
<repo_name>dstarner15/go-app<file_sep>/rdpg-app.go package main import ( "database/sql" "fmt" _ "github.com/lib/pq" "log" "net/http" ) const ( DB_USER = "golangrules" DB_PASSWORD = "<PASSWORD>" DB_NAME = "django" DB_HOST = "10.244.2.6" DB_PORT = "7432" DB_TABLE = "names" ) func main() { fmt.Println("Let's try connecting to a database!") // Connect to database dbinfo := fmt.Sprintf("user=%s password=%s dbname=%s host=%s port=%s sslmode=disable", DB_USER, DB_PASSWORD, DB_NAME, DB_HOST, DB_PORT) db, err := sql.Open("postgres", dbinfo) check_error(err) defer db.Close() _, err = db.Exec("CREATE TABLE IF NOT EXISTS " + DB_TABLE + " (ID INT PRIMARY KEY NOT NULL, NAME TEXT NOT NULL)") check_error(err) // Get all rows rows, err := db.Query("SELECT * FROM " + DB_TABLE) if err != nil { fmt.Println(err) } else { var last_id = 0 fmt.Println("id | name") for rows.Next() { var uid int var username string err = rows.Scan(&uid, &username) check_error(err) fmt.Printf("%3v | %8v", uid, username) fmt.Println() last_id = uid // Set the last ID of the database } fmt.Println() var name string fmt.Print("Some input please: ") _, err := fmt.Scanln(&name) check_error(err) fmt.Println() fmt.Println("Adding you to the database!") fmt.Printf("%4v is now at %v", name, last_id+1) _, err = db.Exec("INSERT INTO names VALUES($1, $2)", last_id+1, name) check_error(err) fmt.Println() } } func check_error(err error) { if err != nil { log.Fatal(err) } } <file_sep>/server.go package main import ( "database/sql" "fmt" _ "github.com/lib/pq" "html/template" "log" "net/http" "strings" ) const ( DB_USER = "vcap" DB_PASSWORD = "<PASSWORD>" DB_NAME = "django" DB_HOST = "10.244.2.6" DB_PORT = "7432" DB_TABLE = "names" ) var last_id int func index(w http.ResponseWriter, r *http.Request) { fmt.Println("method:", r.Method) //get request method if r.Method == "GET" { fmt.Println("Let's try connecting to a database!") // Set up template t, _ := template.ParseFiles("index.gtpl") t.Execute(w, nil) } else { fmt.Println("Let's try connecting to a database!") // Connect to database dbinfo := fmt.Sprintf("user=%s password=%s dbname=%s host=%s port=%s sslmode=disable", DB_USER, DB_PASSWORD, DB_NAME, DB_HOST, DB_PORT) db, err := sql.Open("postgres", dbinfo) check_error(err) defer db.Close() // Get all rows rows, err := db.Query("SELECT * FROM " + DB_TABLE) // Get IDs for rows.Next() { var uid int var username string err = rows.Scan(&uid, &username) check_error(err) last_id = uid // Set the last ID of the database } _, err = db.Exec("CREATE TABLE IF NOT EXISTS " + DB_TABLE + " (ID INT PRIMARY KEY NOT NULL, NAME TEXT NOT NULL)") check_error(err) // Parse form r.ParseForm() // logic part of log in fmt.Println("username:", r.Form["username"]) fmt.Println("password:", r.Form["password"]) name := strings.Join(r.Form["username"], " ") _, err = db.Exec("INSERT INTO names VALUES($1, $2)", last_id+1, name) check_error(err) http.Redirect(w, r, "/show", 301) } } func showTable(w http.ResponseWriter, r *http.Request) { fmt.Println("Let's try connecting to a database!") // Connect to database dbinfo := fmt.Sprintf("user=%s password=%s dbname=%s host=%s port=%s sslmode=disable", DB_USER, DB_PASSWORD, DB_NAME, DB_HOST, DB_PORT) db, err := sql.Open("postgres", dbinfo) check_error(err) defer db.Close() // Get all rows rows, err := db.Query("SELECT * FROM " + DB_TABLE) // Print out database contents fmt.Fprintf(w, " id | name\n") for rows.Next() { var uid int var username string err = rows.Scan(&uid, &username) check_error(err) writeOut := fmt.Sprintf("%3v | %8v \n", uid, username) fmt.Fprintf(w, writeOut) last_id = uid // Set the last ID of the database } } func check_error(err error) { if err != nil { log.Fatal(err) } } func main() { last_id = 0 http.HandleFunc("/", index) // setting router rule http.HandleFunc("/show", showTable) err := http.ListenAndServe(":9090", nil) // setting listening port if err != nil { log.Fatal("ListenAndServe: ", err) } }
8ed7268bb43ec2940f36efe9065ca34fa25c6445
[ "Go" ]
2
Go
dstarner15/go-app
6e2224f8cae7ca0c53be4dc7190c97087beddd46
3600f9104c7e3f2f557dbc75414141d16bf552c4
refs/heads/master
<repo_name>atiyehmaz/DotNetCoreAngular<file_sep>/DotNetCoreAngular/Controllers/ContactController.cs using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.AspNetCore.Mvc; using BusinessLibrary.Repository; using DataAccessLibrary.Models; namespace DotNetCoreAngular.Controllers { [Route("api/[controller]")] public class ContactController : Controller { public IContactRepository ContactRepo; public ContactController(IContactRepository contactRepo) { ContactRepo = contactRepo; } [HttpGet, Produces("application/json")] [Route("GetContacts")] public async Task<IActionResult> GetContacts() { var data = await ContactRepo.GetAllContact(); return Json(new { result = data }); } [HttpPost, Produces("application/json")] [Route("SaveContact")] public async Task<IActionResult> SaveContact([FromBody] Contacts model) { return Json(await ContactRepo.SaveContact(model)); } [HttpPost, Produces("application/x-www-form-urlencoded")] [Route("DeleteContactByID")] public async Task<IActionResult> DeleteContactByID(int id) { return Json(await ContactRepo.DeleteContactByID(id)); } } }<file_sep>/BusinessLibrary/Repository/ContactRepository.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using DataAccessLibrary.Models; using Microsoft.EntityFrameworkCore; namespace BusinessLibrary.Repository { public class ContactRepository: IContactRepository { public async Task<bool> DeleteContactByID(int id) { using (ContactDBContext db = new ContactDBContext()) { Contacts contact = db.Contacts.Where(x => x.ContactId == id).FirstOrDefault(); if (contact != null) { db.Contacts.Remove(contact); } return await db.SaveChangesAsync() >= 1; } } public async Task<List<Contacts>> GetAllContact() { using (ContactDBContext db = new ContactDBContext()) { return await (from a in db.Contacts select new Contacts { ContactId = a.ContactId, FirstName = a.FirstName, LastName = a.LastName, Email = a.Email, Phone = a.Phone }).ToListAsync(); } } public async Task<bool> SaveContact(Contacts model) { using (ContactDBContext db = new ContactDBContext()) { Contacts contact = db.Contacts.Where (x => x.ContactId == model.ContactId).FirstOrDefault(); if (contact == null) { contact = new Contacts() { FirstName = model.FirstName, LastName = model.LastName, Email = model.Email, Phone = model.Phone }; db.Contacts.Add(contact); } else { contact.FirstName = model.FirstName; contact.LastName = model.LastName; contact.Email = model.Email; contact.Phone = model.Phone; } return await db.SaveChangesAsync() >= 1; } } } } <file_sep>/BusinessLibrary/Repository/IContactRepository.cs using System; using System.Collections.Generic; using System.Text; using System.Threading.Tasks; using DataAccessLibrary.Models; namespace BusinessLibrary.Repository { public interface IContactRepository { Task<List<Contacts>> GetAllContact(); Task<bool> SaveContact(Contacts model); Task<bool> DeleteContactByID(int id); } }
a8f5b8b3cb4b3e827a5d3ff7b2a1f9d1209e8799
[ "C#" ]
3
C#
atiyehmaz/DotNetCoreAngular
fecc9aca2ec037e7ccb0b67ac439eca51ae1b1e8
1852d5f550ac74403e4bbddb6388c631828b0fee
refs/heads/master
<repo_name>vivekjuneja/covid-contact-tracing<file_sep>/test1.py import datetime import uuid class PersonState: def __init__(self, state, year, month, day): self.status = False self.readAbleState = None self.isStateFrom = datetime.datetime(year, month, day) def __str__(self): return (str(self.readAbleState) + " since " + str(self.isStateFrom)) class PersonHasSymptom(PersonState): def __init__(self, state, year, month, day): print("PersonHasSymptom") super().__init__(state, year, month, day) self.readAbleState="Has Symptoms" class PersonIsAffected(PersonState): def __init__(self, state, year, month, day): print("PersonIsAffected") super().__init__(state, year, month, day) self.readAbleState="Is Affected" class PersonIsNotAffected(PersonState): def __init__(self, state, year, month, day): print("PersonIsNotAffected") super().__init__(state, year, month, day) self.readAbleState="Is Normal" class Person: def __init__(self): self.locationHistory = dict() self.caution = "NORMAL" self.id = uuid.uuid1() status = False todayDate = datetime.date.today() self.state = PersonIsNotAffected(status, todayDate.year, todayDate.month, todayDate.day) def setCarefulCaution(self): self.caution = "CAREFUL" def setIsolateCaution(self): self.caution = "ISOLATE" def hasSymptom(self, year, month, day): status = True self.state = PersonHasSymptom(status, year, month, day) def isAffected(self, year, month, day): status = True self.state = PersonIsAffected(status, year, month, day) def visitsLocation(self, location, year, month, day): #visitEvent = VisitEvent(location, year, month, day) #self.locationHistory.append(visitEvent) date = datetime.date(year, month,day) self.locationHistory[date]=location def getVisits(self): return self.locationHistory def getVisitLocationForDate(self, date): print("getVisitLocationForDate******") print(self.id) print(self.locationHistory) print(date) if date in self.locationHistory: return self.locationHistory[date] else: return None def printVisits(self): for visitEvent in self.locationHistory: print("On " + str(visitEvent) + ", visited " + str(self.locationHistory[visitEvent])) def __str__(self): if(self.id!=None): return (str(self.id) + " : " + str(self.state) + ", they should remain " + str(self.caution)) else: return (self.state) class Location: latitudeX = None latitudeY = None longitudeX = None longitudeY = None def __init__(self,latX, latY, longX, longY): self.latitudeX=latX self.longitudeX=longX self.latitudeY=latY self.longitudeY=longY def __str__(self): return str("Lat : " + str(self.latitudeX) + ","+ str(self.latitudeY) + " Long : " + str(self.longitudeX)+ ","+ str(self.longitudeY)) class CrossPath: @staticmethod def checkIfPathCrossed(person0, person1, dateOfEvent): print("checkIfPathCrossed") print(person0.getVisits()) print(person1.getVisits()) #Check if their location histories have a match isPathCrossed = False location0 = person0.getVisitLocationForDate(dateOfEvent) print(str(person0) + " " + str(location0)) location1 = person1.getVisitLocationForDate(dateOfEvent) print(str(person1) + " " + str(location1)) if ((location0==None) and (location1==None)): isPathCrossed = False elif(location0==location1): isPathCrossed = True return isPathCrossed class CoronaTracker: listOfPeople = [] @staticmethod def addPeople(person): CoronaTracker.listOfPeople.append(person) @staticmethod def reportSymptomEvent(person, year, month, day): person.hasSymptom(year, month, day) person.setIsolateCaution() #The Person who reports symptom should isolate print("----------") print(person) print("----------") dateOfSymptom = datetime.date(year, month, day) #Check if that person crossed path with other people. If they did, they should be careful for people in CoronaTracker.listOfPeople: if people!=person: #Only check the other people, not the reported person didPathCross = CrossPath.checkIfPathCrossed(person, people, dateOfSymptom) print(didPathCross) if(didPathCross==True): print("This person came into contact: ") print(people) people.setCarefulCaution() #The people who crossed the path with the one who had symptom should be careful didPathCross=False #Setting the flag to False @staticmethod def reportAffectedEvent(person, year, month, day): person.hasSymptom(year, month, day) person.setIsolateCaution() #The Person who reports symptom should isolate print("----------") print(person) print("----------") dateOfSymptom = datetime.date(year, month, day) #Check if that person crossed path with other people. If they did, they should be careful for people in CoronaTracker.listOfPeople: if people!=person: #Only check the other people, not the reported person didPathCross = CrossPath.checkIfPathCrossed(person, people, dateOfSymptom) print(didPathCross) if(didPathCross==True): print("This person came into contact: ") print(people) people.setIsolateCaution() #The people who crossed the path with the one who had symptom should be isolated didPathCross=False #Setting the flag to False if __name__ == "__main__": A = Person() B = Person() C = Person() print(A) print(B) print(C) print("**********") visitDateA = datetime.date.today() location0 = Location(20.0, 21.0, 89.0, 89.1) A.visitsLocation(location0, visitDateA.year, visitDateA.month, visitDateA.day) B.visitsLocation(location0, visitDateA.year, visitDateA.month, visitDateA.day) print("**********") ''' print(CrossPath.checkIfPathCrossed(A, B, visitDateA)) print(CrossPath.checkIfPathCrossed(A, C, visitDateA)) print(CrossPath.checkIfPathCrossed(B, C, visitDateA)) ''' CoronaTracker.addPeople(A) CoronaTracker.addPeople(B) CoronaTracker.addPeople(C) CoronaTracker.reportAffectedEvent(A, visitDateA.year, visitDateA.month, visitDateA.day) print("-----------------") print(A) print(B) print(C) ''' symptomDateA = datetime.date.today() A.hasSymptom(symptomDateA.year, symptomDateA.month, symptomDateA.day) print(A) print(B) print(C) symptomDateB = datetime.date.today() A.isAffected(symptomDateB.year, symptomDateB.month, symptomDateB.day) print(A) print(B) print(C) '''<file_sep>/README.md # covid-contact-tracing A simplistic application that handles contact tracing for Covid-19 and other epidemic / pandemic situations 1. Record people and their movements on their permission 2. Whenever some person has symptoms, they report it here. It then flags all possible people who were in touch (crossed path) with the "symptotatic" person and cautions them to remain CAREFUL 3. If a person reports "Affected by the virus", then the systme will flag all possible people who were in touch (crossed path) with the affected person to remain ISOLATED 4. We can always find people who can remain Norma, Careful and Isolated This is a very dumb way of managing this information. Possible to build small working web frontend, and possible App that can allow tracking of locations if people agree to it. One more idea is to remove all centralization - all history is stored on the client's device and is encrypted thereby no chance of having this "monitored" by government.
a0d2a1844effdc5928423f63972e4899e62400c2
[ "Markdown", "Python" ]
2
Python
vivekjuneja/covid-contact-tracing
7a7213a4868c8ce2496bd54b76c20cf8cc4369ff
22874ddc7037ec7e207d67015ee1799c2dc9f6c1
refs/heads/master
<repo_name>Amiro30/PhotoProject<file_sep>/MvcProject/Global.asax.cs using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.Web.Mvc; using System.Web.Optimization; using System.Web.Routing; using Ninject; using Ninject.Modules; using Ninject.Web.Mvc; using AutoMapper; using DAL.Entities; using MvcProject.Models; namespace MvcProject { public class MvcApplication : System.Web.HttpApplication { protected void Application_Start() { AreaRegistration.RegisterAllAreas(); FilterConfig.RegisterGlobalFilters(GlobalFilters.Filters); RouteConfig.RegisterRoutes(RouteTable.Routes); BundleConfig.RegisterBundles(BundleTable.Bundles); InitializeMapper(); var registrations = new Registrations(); var kernel = new StandardKernel(registrations); DependencyResolver.SetResolver(new NinjectDependencyResolver(kernel)); } private void InitializeMapper() { Mapper.Initialize(cfg => cfg.CreateMap<Photo, PhotoModel>()); } } } <file_sep>/MvcProject/Models/UserViewModel.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Linq; using System.Web; namespace MvcProject.Models { public class UserViewModel { public int Id { get; set; } [Required] [Display (Name = "Login")] public string Login { get; set; } public string FirstName { get; set; } public string LastName { get; set; } public byte[] UserPhoto { get; set; } } }<file_sep>/DAL/Repositories/UnitOfWork.cs using System; using System.Collections.Generic; using System.Data.Entity.Core.Metadata.Edm; using System.Linq; using System.Text; using System.Threading.Tasks; using DAL.Entities; using DAL.Interfaces; namespace DAL.Repositories { public class UnitOfWork { private AlbumContext db = new AlbumContext(); private PhotoRepository photoRepository; private UserRepository userRepository; private RoleRepository roleRepository; public IRepository<Photo> Photos { get { if (photoRepository == null) photoRepository = new PhotoRepository(); return photoRepository; } } public UserRepository Users { get { if (userRepository == null) userRepository = new UserRepository(db); return userRepository; } } public void Save() { if (db != null) { db.SaveChanges(); } } public void Dispose() { if (db != null) { db.Dispose(); } } } } <file_sep>/DAL/Interfaces/IPhotoRepo.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using DAL.Entities; namespace DAL.Interfaces { public interface IPhotoRepo: IRepository<Photo> { /* IEnumerable<Photo> GetUserPhotos(int userId); IEnumerable<Photo> GetUserPhotosByName(int userId, string photoName);*/ } } <file_sep>/MvcProject/Util/DependencyModule.cs using System; using System.Collections.Generic; using System.Linq; using System.Web; using BLL.Interfaces; using Ninject.Modules; namespace MvcProject.Util { public class DependencyModule: NinjectModule { public override void Load() { Bind<IUserService>().To<IUserService>(); } } }<file_sep>/DAL/Interfaces/IUnitOfWork.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using DAL.Entities; namespace DAL.Interfaces { public interface IUnitOfWork : IDisposable { IRepository<Photo> Photos { get; } IRepository<User> Users { get; } // IRepository<Role> Roles { get; } void Save(); } } <file_sep>/DAL/Interfaces/IUserRepo.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using DAL.Entities; namespace DAL.Interfaces { public interface IUserRepo: IRepository<User> { User GetByLogin(string login); void ChangeUserPassword(string login, string password); } } <file_sep>/DAL/Repositories/PhotoRepository.cs using System; using System.Collections.Generic; using System.Data.Entity; using System.Linq; using System.Text; using System.Threading.Tasks; using DAL.Entities; using DAL.Interfaces; namespace DAL.Repositories { public class PhotoRepository : IPhotoRepo { private AlbumContext db; public PhotoRepository() { db = new AlbumContext(); } public IEnumerable<Photo> GetAll() { return db.Photos; } public Photo GetById(int id) { return db.Photos.Find(id); } public void Create(Photo photo) { db.Photos.Add(photo); db.SaveChanges(); } public void Update(Photo photo) { db.Entry(photo).State = EntityState.Modified; } public void Delete(Photo photo) { Photo photo_toDelete = db.Photos.First(p => p.Id == photo.Id); if (photo_toDelete != null) { db.Photos.Remove(photo_toDelete); } } } } <file_sep>/MvcProject/Controllers/AccountController.cs  using System.Web.Mvc; using System.Web.Security; using MvcProject.Models; using DAL.Repositories; namespace MvcProject.Controllers { [Authorize] public class AccountController : Controller { private UnitOfWork unintOfWork; public AccountController() { unintOfWork = new UnitOfWork(); } [HttpGet] [AllowAnonymous] public ActionResult Register() { return View(); } [HttpPost] [AllowAnonymous] [ValidateAntiForgeryToken] public ActionResult Register(RegisterModel model) { if (ModelState.IsValid) { var membershipUser = Membership.CreateUser(model.UserName, model.Password); if (membershipUser != null) { var userEnitity = unintOfWork.Users.GetByLogin(model.UserName); unintOfWork.Users.Update(userEnitity); FormsAuthentication.SetAuthCookie(model.UserName, false); return RedirectToAction("Index", "Home"); } else { ModelState.AddModelError("", "Error registration."); } } return View(model); } } } <file_sep>/MvcProject/Controllers/HomeController.cs using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.Web.Mvc; using DAL.Interfaces; using AutoMapper; using DAL.Entities; using MvcProject.Models; namespace MvcProject.Controllers { public class HomeController : Controller { private readonly IPhotoRepo _photos; public HomeController(IPhotoRepo photos) { _photos = photos; } public ActionResult Index() { var model = Mapper.Map<IEnumerable<Photo>, IEnumerable<PhotoModel>>(_photos.GetAll()); return View(model); } public ActionResult About() { ViewBag.Message = "Your application description page."; return View(); } public ActionResult Contact() { ViewBag.Message = "Your contact page."; return View(); } } }<file_sep>/MvcProject/Controllers/PhotoController.cs using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.Web.Mvc; using DAL.Interfaces; using DAL.Entities; using MvcProject.Models; using AutoMapper; namespace MvcProject.Controllers { public class PhotoController : Controller { private readonly IPhotoRepo _photos; public PhotoController(IPhotoRepo photos) { _photos = photos; } public ActionResult Index() { return View("Index", _photos.GetAll()); } // GET: /Photo/Create public ActionResult Create() { Photo newPhoto = new Photo(); var model = Mapper.Map<Photo, PhotoModel>(newPhoto); model.CreatedDate = DateTime.Today; return View(model); } // POST: /Photo/Create [HttpPost] public ActionResult Create(Photo photo, HttpPostedFileBase image) { var model = Mapper.Map<Photo, PhotoModel>(photo); model.CreatedDate = DateTime.Today; if (ModelState.IsValid) { //Is there a photo? If so save it if (image != null) { model.ImageMimeType = image.ContentType; model.PhotoFile = new byte[image.ContentLength]; image.InputStream.Read(model.PhotoFile, 0, image.ContentLength); } //Add the photo to the database and save it _photos.Create(photo); return RedirectToAction("Index"); } return View(photo); } } }<file_sep>/MvcProject/Registrations.cs using System; using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; using System.Web; using DAL.Interfaces; using DAL.Repositories; using Ninject.Modules; namespace MvcProject { public class Registrations: NinjectModule { public override void Load() { Bind<IPhotoRepo>().To<PhotoRepository>(); } } }<file_sep>/DAL/Repositories/UserRepository.cs using System; using System.Collections.Generic; using System.Data.Entity; using System.Linq; using System.Text; using System.Threading.Tasks; using DAL.Entities; using DAL.Interfaces; namespace DAL.Repositories { public class UserRepository : IUserRepo { private AlbumContext db; public UserRepository(AlbumContext context) { this.db = context; } public IEnumerable<User> GetAll() { return db.Set<User>().AsEnumerable().Select(user => new User()); } public User GetById(int id) { return db.Users.Find(id); } public void Create(User user) { db.Users.Add(user); } public void Update(User user) { db.Entry(user).State = EntityState.Modified; } public void Delete(User user) { User user_toDelete = db.Users.First(p => p.Id == user.Id); db.Users.Remove(user_toDelete); } public User GetByLogin(string login) { return db.Users.Find(login); } public void ChangeUserPassword(string login, string password) { User user = db.Set<User>().FirstOrDefault(u => u.Login == login); if (user != null) { user.Password = <PASSWORD>; } } } } <file_sep>/DAL/Repositories/RatingRepository.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace DAL.Repositories { class RatingRepository { } } <file_sep>/DAL/Repositories/RoleRepository.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using DAL.Entities; using DAL.Interfaces; namespace DAL.Repositories { class RoleRepository: IRoleRepo { private AlbumContext db; public RoleRepository(AlbumContext context) { this.db = context; } public IEnumerable<Role> GetUserRoles(int userId) { return db.Set<User>().FirstOrDefault(user => user.Id == userId)?.Roles.Select(role => new Role()); } public void AddUserToRole(int userId, string roleName) { User user = db.Set<User>().FirstOrDefault(u => u.Id == userId); if (user == null) return; Role role = db.Set<Role>().FirstOrDefault(r => r.RoleName == roleName); if (role == null) return; role.Users.Add(user); } } } <file_sep>/DAL/Entities/Photo.cs using System; using System.Collections.Generic; using System.ComponentModel; using System.ComponentModel.DataAnnotations; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Web.Mvc; namespace DAL.Entities { public class Photo { public int Id { get; set; } [Required] public string Title { get; set; } //PhotoFile. This is a picture file [DisplayName("Picture")] [MaxLength] public byte[] PhotoFile { get; set; } public int TotalRate { get; set; } //ImageMimeType, stores the MIME type for the PhotoFile [HiddenInput(DisplayValue = false)] public string ImageMimeType { get; set; } [DataType(DataType.MultilineText)] public string Description { get; set; } //CreatedDate [DataType(DataType.DateTime)] [DisplayName("Created Date")] [DisplayFormat(DataFormatString = "{0:dd/MM/yy}", ApplyFormatInEditMode = true)] public DateTime CreatedDate { get; set; } public int UserId { get; set; } } }<file_sep>/DAL/EF/AlbumContext.cs using System; using System.Data.Entity; using DAL.Entities; namespace DAL { public class AlbumContext : DbContext { public AlbumContext() : base("AlbumContext") { } public DbSet<User> Users { get; set; } public DbSet<Role> Role { get; set; } public DbSet<Photo> Photos { get; set; } public DbSet<Rating> Ratings { get; set; } } } <file_sep>/DAL/Interfaces/IRoleRepo.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using DAL.Entities; namespace DAL.Interfaces { public interface IRoleRepo { IEnumerable<Role> GetUserRoles(int userId); void AddUserToRole(int userId, string roleName); } } <file_sep>/DAL/Entities/Rating.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace DAL.Entities { public class Rating { public int RatingId { get; set; } public int UserRate { get; set; } public int? UserId { get; set; } public virtual User User { get; set; } public int PhotoId { get; set; } public virtual Photo Photo { get; set; } } }
57eb4cafdfebfd0ac93fb1e2a93417d2f3439291
[ "C#" ]
19
C#
Amiro30/PhotoProject
611240eb29931e56416a072facf4c3cd267653df
dd8fc2ed76872b6702c4eeda9478fefb2d7288bc
refs/heads/master
<file_sep>import React from 'react'; const Song = (props) => { return ( <> <div> <li> {props.title} by {props.artist} <img src={props.image}/> </li> </div> </> ) } export default Song;
96a0418f19e5e90eb7418ba44405759aa069f563
[ "JavaScript" ]
1
JavaScript
lmeromy/wk14-d2-React-Top20
5f57586c9431009473172165b5d5a145997cc15b
79116fcf7d65ff4d00b353a5c4ab70b0a857981f
refs/heads/master
<file_sep>#!/bin/bash # En este script vamos a realizar operaciones aritméticas usando los comandos let y expr. varX=2 varY=3 varZ=0 # Operaciones con el comando let: echo "Operaciones con el comando let:$varX $varY $varZ" echo "suma $varX $varY" let suma=$varX+$varY echo $suma echo "resta $varX $varY" let resta=$varX-$varY echo $resta echo "multiplcacion $varX $varY" let multiplicacion=$varX*$varY echo $multiplicacion echo "division $varY $varY" let division=$varY/$varY # Si la division es algo dividido por 0 dará error y lo mostrará por STDERR. echo $division echo "resto $varY $varX" let resto=$varY%$varY echo $resto if [ 2 -gt 1 ]; then #> echo "2 es mayor que 1" fi if [ 2 -ge 2 ]; then #>= echo "2 es mayor o igual que 2" fi if [ 2 -eq 2 ]; then #== echo "2 es igual que 2" fi if [ 2 -lt 4 ]; then #< echo "2 es menor que 4" fi if [ 4 -le 4 ]; then #=< echo "4 es menor o igual que 4" fi NUM=0 while [ $NUM -le 10 ]; do echo "\$NUM: $NUM" let NUM=$NUM+1 done
03df5ace489ab2dfe6b088b46cd6ba88ce64b868
[ "Shell" ]
1
Shell
hydra60/pruebaApi
1209f4576fb60857df45456c947dfb88034a9e84
34873acd7fa030608fbbf80bc6b7295456381731
refs/heads/master
<repo_name>alilja/subcurrent<file_sep>/requirements.txt Jinja2==2.7.3 Markdown==2.5.2 MarkupSafe==0.23 PyYAML==3.11 Pygments==2.0.2 Unidecode==0.04.17 python-slugify==0.1.0 wsgiref==0.1.2 <file_sep>/posts/test2.md ### Another Test Yay<file_sep>/subcurrent.py from os import listdir from yaml import load from jinja2 import Environment, PackageLoader from markdown import markdown from slugify import slugify jinja_env = Environment(loader=PackageLoader('subcurrent', 'templates')) with open("config.yaml") as f: config = load(f) for item in listdir("posts"): dot_location = item.find(".") if dot_location == -1: continue if item[dot_location:] != ".md": continue # if it's a special file, get the right template special_files = {"index", "404"} if item[:dot_location] in special_files: template = jinja_env.get_template('%s.html' % item[:dot_location]) else: template = jinja_env.get_template('post.html') with open("posts/%s" % item) as f: lines = f.readlines() title = lines[0].decode("utf8", "ignore").strip('#\n\r ') slug = slugify(title) body = markdown(unicode(''.join(lines[1:]), "UTF-8")) with open("../{0}.html".format(slug), "wb") as output_file: output_file.write(template.render(site_name=config["site_name"], title=title, body_text=body)) <file_sep>/posts/test.md #### This is a Subcurrent Test Title This is some body text. I'm sick of looking at lorem ipsum, so I'm writing it myself by hand. I just prefer to do it that way. I need some longer sentences to make sure that things like linebreaks work, and I'm going to add some other elements as well. >[11:02pm] schiaparelli: 3 years ago one of their new designers told me >[11:02pm] alilja: okay i could definitely do that >[11:02pm] schiaparelli: he didnt have a portfolio so just coded up a blog and made some small examples of his design sensibilities by typesetting his posts and whatnot >[11:03pm] alilja: ahh i can DEFINITELY do that >[11:03pm] schiaparelli: so i think there are companies that have eng-centric design roles >[11:03pm] alilja: i have a bunch of stuff in the pipeline >[11:03pm] alilja: and a bunch of code i can do that with >[11:03pm] schiaparelli: and you can really emphasize your skills re: you can design and implement >[11:03pm] alilja: thats a million dollar idea celine >[11:03pm] schiaparelli: and also just wow them w/ your deep thoughtfulness and process So that was reassuring. It made me feel like I could do what I needed to. 1. This is a list. 2. There are many lists like it. 3. But this one is mine.
c15f6d57ab25972001af096b2b24af7c35797f10
[ "Markdown", "Python", "Text" ]
4
Text
alilja/subcurrent
1e13220314300bc0bd51c2acca38a66b24b4dd65
220ec265e36680abace2d888fdc4b587c28c4cda
refs/heads/master
<file_sep># OpenGL-4.6-Hello-Triangle The famous OpenGL "Hello triangle" using shaders. It uses the OpenGL 4.5 functionality called [Direct State Access](https://www.khronos.org/opengl/wiki/Direct_State_Access). Be aware, not-that-old hardware may not be compatible with this functionality. As of November 2020, it is the most up-to-date "hello triangle" example I could piece together. ## Build and run I work on GNU/Linux Ubuntu 20. Obviously you need to install a compiler (g++), make, opengl and glfw3. `sudo apt install g++ build-essential libopengl0 libopengl-dev libglfw3 libglfw3-dev` Run `make` in the directory of the project. ## Glad This code uses Glad as its openGL extensions loader. Glew would work fine too. The directories include/glad, include/KHR, and the files glad.h, glad.c and khrplatform.h were generated from [Glad website](https://glad.dav1d.de/) ## Greetings I copied/modified code from [<NAME>](https://antongerdelan.net/opengl/index.html) and [<NAME> / Learnopengl](https://learnopengl.com/Getting-started/Shaders). See Licence file for more info. Also check [Fendevel repos about modern OpenGL](https://github.com/fendevel). <file_sep># Hellor Colorful Triangle Makefile # ludo456 @github # # -g adds debugging information to the executable file # -Wall turns on most, but not all, compiler warnings # CC = g++ CFLAGS = -g -Wall LIBS = -lGL -lglfw -ldl INCL = ./include DEPS = gl_utils.cpp glad.c EXE = hello_colorful_triangle .PHONY: run # typing 'make' will invoke the first target entry in the file # (in this case the default target entry) # you can name this target entry anything, but "default" or "all" # are the most commonly used names by convention # all: $(EXE) run # To create the executable file count we need the object files # countwords.o, counter.o, and scanner.o: # $(EXE): $(EXE).cpp $(DEPS) $(CC) $(CFLAGS) $^ $(LIBS) -I$(INCL) -o bin/$(EXE) run: ./bin/$(EXE) <file_sep>/** * Hello colorful triangle * * An OpenGL "Hello Triangle" using * - glad instead of glew (because reason), and glfw3, - pieces from learnopengl.com * - the gl_utils lib of <NAME> tutos * - DSA, looking at fendevel/Guide-to-Modern-OpenGL-Functions @github * - heavy comments! This is the way. * * @author ludo456 / the opensourcedev @github */ #include "include/gl_utils.h" //includes glad.h & glfw3.h #include <iostream> // Forward declarations void framebuffer_size_callback(GLFWwindow* window, int width, int height); void processInput(GLFWwindow *window); int main() { // glfw: initialize and configure // ------------------------------ glfwInit(); glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4); glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 6); glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); // glfw window creation // -------------------- GLFWwindow* window = glfwCreateWindow(800, 600, "Hello Colorful Triangle", NULL, NULL); if (window == NULL) { std::cout << "Failed to create GLFW window" << std::endl; glfwTerminate(); return -1; } glfwMakeContextCurrent(window); glfwSetFramebufferSizeCallback(window, framebuffer_size_callback); // glad: load all OpenGL function pointers // --------------------------------------- if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress)) { std::cout << "Failed to initialize GLAD" << std::endl; return -1; } if ( !GLAD_GL_ARB_direct_state_access ) { /* see * https://www.khronos.org/registry/OpenGL/extensions/ARB/ARB_direct_state_access.txt * https://www.khronos.org/opengl/wiki/Direct_State_Access. This is the way. */ std::cout << "DSA not supported!" << std::endl; return -1; } // Setting up the triangle data // ---------------------------- float vertices[] = { -1.0f, -1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, -1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f }; GLuint attribPos = 0; GLuint attribCol = 1; /**************** VBO ***************/ unsigned int hctVBO; // hello colorful triangle vbo //glGenBuffers(1, &VBO); // Way to go before openGl 4.5 //glBindBuffer(GL_ARRAY_BUFFER, VBO); // Binding to openGl context was necessary // replaced with: glCreateBuffers(1, &hctVBO);//uses DSA. This is the way. //glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW); // replaced with: glNamedBufferStorage(hctVBO, sizeof(vertices), vertices, GL_DYNAMIC_STORAGE_BIT); // ^^^ needed, since there is no context binding. /**************** VAO ***************/ unsigned int hctVAO; //glGenVertexArrays(1, &VAO); //glBindVertexArray(VAO); // replaced with: glCreateVertexArrays(1, &hctVAO);// This is the way. // As there is, by definition, no context binding in DSA, then we need to //"bind" vao with vbo explicitely, like linking 2 indexes in a database. GLuint vaoBindingPoint = 0;//A binding point in VAO. See GL_MAX_VERTEX_ATTRIB_BINDINGS glVertexArrayVertexBuffer( hctVAO, // vao to bind vaoBindingPoint, // Could be 1, 2... if there were several vbo to source. hctVBO, // VBO to bound at "vaoBindingPoint". 0, // offset of the first element in the buffer hctVBO. 6*sizeof(float)); // stride == 3 position floats + 3 color floats. //glEnableVertexAttribArray(attribPos); //glEnableVertexAttribArray(attribCol); // replaced with: glEnableVertexArrayAttrib(hctVAO, attribPos);// Need to precise vao, as there is no context binding in DSA style glEnableVertexArrayAttrib(hctVAO, attribCol);// Meaning no current vao is bound to the opengl context. //glVertexAttribPointer(attribPos, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(float), (void*)0); //glVertexAttribPointer(attribCol, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(float), (void*)( 3*sizeof(float) )); // replaced with: glVertexArrayAttribFormat(hctVAO, attribPos, 3, GL_FLOAT, false, 0);// Need to precise vao, as there is no context binding in DSA glVertexArrayAttribFormat(hctVAO, attribCol, 3, GL_FLOAT, false, 3*sizeof(float));//https://www.khronos.org/registry/OpenGL-Refpages/gl4/html/glVertexAttribFormat.xhtml //Explicit binding of an attribute to a vao binding point glVertexArrayAttribBinding(hctVAO, attribPos, vaoBindingPoint); glVertexArrayAttribBinding(hctVAO, attribCol, vaoBindingPoint); //Create shader using gl_utils // --------------------------- GLuint shader_prog = create_programme_from_files( "shader_triangle.vert", "shader_triangle.frag" ); glUseProgram(shader_prog); // Could be copy-pasted in render loop too... //glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);//Wireframe rendering // render loop // ----------- while (!glfwWindowShouldClose(window)) { // input // ----- processInput(window); // render // ------ glClearColor(1.0f, 0.3f, 0.3f, 1.0f); glClear(GL_COLOR_BUFFER_BIT); glBindVertexArray(hctVAO); glDrawArrays(GL_TRIANGLES, 0, 3); // glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.) // ------------------------------------------------------------------------------- glfwSwapBuffers(window); glfwPollEvents(); } // glfw: terminate, clearing all previously allocated GLFW resources. // ------------------------------------------------------------------ glfwTerminate(); return 0; } // process all input: query GLFW whether relevant keys are pressed/released this frame and react accordingly // --------------------------------------------------------------------------------------------------------- void processInput(GLFWwindow *window) { if(glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS) glfwSetWindowShouldClose(window, true); } // glfw: whenever the window size changed (by OS or user resize) this callback function executes // --------------------------------------------------------------------------------------------- void framebuffer_size_callback(GLFWwindow* window, int width, int height) { // make sure the viewport matches the new window dimensions; note that width and // height will be significantly larger than specified on retina displays. glViewport(0, 0, width, height); }
208e4f26f96f9d966967265cfaea1530693e4311
[ "Markdown", "Makefile", "C++" ]
3
Markdown
Fr3nchK1ss/OpenGL-4.6-Hello-Triangle
9d8397218fcfe3e137bbd88e7ce04eb1c8835292
73e2581841567e34765780debbb21a926ee28d30
refs/heads/master
<file_sep>#from env_utils.vistarget_nav_task import CustomVisTargetSensor #from env_utils.vistarget_nav_dataset import VisTargetNavDatasetV1 #from env_utils.vistarget_nav_env import VisTargetNavEnv from env_utils.habitat_env import RLEnv from env_utils.habitat_env import MIN_DIST, MAX_DIST #from env_utils.object_nav_task import CustomObjectGoalSensor #from env_utils.object_nav_dataset import CustomObjectNavDatasetV1 #from env_utils.object_nav_env import CustomObjectNavEnv <file_sep>import torch.utils.data as data import numpy as np import joblib import torch import time import cv2 import time class HabitatDemoSingleGoalDataset(data.Dataset): def __init__(self, cfg, data_list, include_stop = False): self.data_list = data_list self.img_size = (64, 256) self.action_dim = 4 if include_stop else 3 self.max_demo_length = 100#cfg.dataset.max_demo_length def __getitem__(self, index): return self.pull_image(index) def __len__(self): return len(self.data_list) def get_dist(self, demo_position): return np.linalg.norm(demo_position[-1] - demo_position[0], ord=2) def pull_image(self, index): s = time.time() demo_data = joblib.load(self.data_list[index]) #print('file loading time:', time.time() - s) scene = self.data_list[index].split('/')[-1].split('_')[0] start_pose = [demo_data['position'][0], demo_data['rotation'][0]] target_rgb, target_depth = demo_data['target_rgb'], demo_data['target_depth'] target_goal = np.concatenate([target_rgb/255.,np.expand_dims(target_depth,2)],2) demo_rgb = np.array(demo_data['rgb'], dtype=np.float32) demo_dep = np.array(demo_data['depth'], dtype=np.float32) demo_length = len(demo_rgb) - 1 #if demo_length > self.max_demo_length: # print('longggg', self.data_list[index]) demo_rgb_out = np.zeros([self.max_demo_length, demo_rgb.shape[1], demo_rgb.shape[2], 3]) demo_rgb_out[:demo_length] = demo_rgb[:demo_length] demo_dep_out = np.zeros([self.max_demo_length, demo_rgb.shape[1], demo_rgb.shape[2], 1]) demo_dep_out[:demo_length] = demo_dep[:demo_length] demo_act = np.array(demo_data['action'], dtype=np.int8) if self.action_dim > 3: demo_act[-1] = 0 demo_act_out = np.ones([self.max_demo_length]) * (-100) # print(demo_act.shape, demo_length, 'rgbd', len(demo_data['rgb']), len(demo_data['depth']), len(demo_data['action'])) demo_act_out[:demo_length] = demo_act targets = np.zeros([self.max_demo_length]) targets[:demo_length] = 0 target_img = np.zeros([1, demo_rgb.shape[1], demo_rgb.shape[2] , 4]) target_num = 1 target_img[:target_num] = np.array(target_goal)#[start_idx:start_idx+demo_length]) positions = np.zeros([self.max_demo_length,3]) positions[:demo_length] = demo_data['position'][:demo_length] return_tensor = [torch.from_numpy(demo_rgb_out).float(), torch.from_numpy(demo_dep_out).float(), torch.from_numpy(demo_act_out).float(), torch.from_numpy(positions), targets, torch.from_numpy(target_img).float(), scene, start_pose] return return_tensor class HabitatDemoMultiGoalDataset(data.Dataset): def __init__(self, cfg, data_list, include_stop = False): self.data_list = data_list self.img_size = (64, 256) self.action_dim = 4 if include_stop else 3 self.max_demo_length = 100#cfg.dataset.max_demo_length self.single_goal = False def __getitem__(self, index): return self.pull_image(index) def __len__(self): return len(self.data_list) def get_dist(self, demo_position): return np.linalg.norm(demo_position[-1] - demo_position[0], ord=2) def pull_image(self, index): s = time.time() demo_data = joblib.load(self.data_list[index]) #print('file loading time:', time.time() - s) scene = self.data_list[index].split('/')[-1].split('_')[0] start_pose = [demo_data['position'][0], demo_data['rotation'][0]] target_indices = np.array(demo_data['target_idx']) # There are two random indices to sample # 1. when to start making graph # 2. when to start predict action # goals = np.unique(target_indices) #starts = [np.where(target_indices == g)[0].min() for g in goals] orig_data_len = len(demo_data['position']) if self.single_goal: try_num = 0 while True: start_idx = np.random.randint(orig_data_len - 10) if orig_data_len > 10 else orig_data_len start_target_idx = target_indices[start_idx] end_idx = np.where(target_indices == start_target_idx)[0][-1] if end_idx - start_idx >= 10 : break try_num += 1 if try_num > 1000: end_idx = -1 break else: start_idx = np.random.randint(orig_data_len - 10) if orig_data_len > 10 else orig_data_len end_idx = - 1 demo_rgb = np.array(demo_data['rgb'][start_idx:end_idx], dtype=np.float32) demo_length = np.minimum(len(demo_rgb), self.max_demo_length) demo_dep = np.array(demo_data['depth'][start_idx:end_idx], dtype=np.float32) demo_rgb_out = np.zeros([self.max_demo_length, demo_rgb.shape[1], demo_rgb.shape[2], 3]) demo_rgb_out[:demo_length] = demo_rgb[:demo_length] demo_dep_out = np.zeros([self.max_demo_length, demo_rgb.shape[1], demo_rgb.shape[2], 1]) demo_dep_out[:demo_length] = demo_dep[:demo_length] demo_act = np.array(demo_data['action'][start_idx:start_idx+demo_length], dtype=np.int8) if self.action_dim > 3: demo_act[-1] = 0 demo_act_out = np.ones([self.max_demo_length]) * (-100) # print(demo_act.shape, demo_length, 'rgbd', len(demo_data['rgb']), len(demo_data['depth']), len(demo_data['action'])) demo_act_out[:demo_length] = demo_act -1 if self.action_dim == 3 else demo_act targets = np.zeros([self.max_demo_length]) targets[:demo_length] = demo_data['target_idx'][start_idx:start_idx+demo_length] target_img = np.zeros([5, demo_rgb.shape[1], demo_rgb.shape[2] , 4]) target_num = len(demo_data['target_img']) target_img[:target_num] = np.array(demo_data['target_img'])#[start_idx:start_idx+demo_length]) positions = np.zeros([self.max_demo_length,3]) positions[:demo_length] = demo_data['position'][start_idx:start_idx+demo_length] return_tensor = [torch.from_numpy(demo_rgb_out).float(), torch.from_numpy(demo_dep_out).float(), torch.from_numpy(demo_act_out).float(), torch.from_numpy(positions), targets, torch.from_numpy(target_img).float(), scene, start_pose] return return_tensor if __name__ == '__main__': import sys from IL_configs.default import get_config from dataset.demo_dataset import HabitatDemoMultiGoalDataset import os from tqdm import tqdm cfg = get_config('IL_configs/gmt.yaml') data_list = [os.path.join('/disk4/obin/vistarget_demo_gibson/train/random',x) for x in os.listdir('/disk4/obin/vistarget_demo_gibson/train/random')] data_list += [os.path.join('/disk4/obin/vistarget_demo_gibson/val/random',x) for x in os.listdir('/disk4/obin/vistarget_demo_gibson/val/random')] dataset = HabitatDemoMultiGoalDataset(cfg, data_list, True) print(len(dataset)) for idx in tqdm(range(len(dataset))): if 'Angiola_019_env0.dat.gz' in dataset.data_list[idx]: dataset.pull_image(idx) <file_sep>#!/usr/bin/env python3 # Copyright (without_goal+curr_emb) Facebook, Inc. and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import sys if '/opt/ros/kinetic/lib/python2.7/dist-packages' in sys.path: sys.path.remove('/opt/ros/kinetic/lib/python2.7/dist-packages') import argparse import random import numpy as np from habitat_baselines.common.baseline_registry import baseline_registry from rl_configs.default import get_config from trainer.algo import ppo, ddppo import env_utils import os os.environ['GLOG_minloglevel'] = "2" os.environ['MAGNUM_LOG'] = "quiet" parser = argparse.ArgumentParser() parser.add_argument( "--run-type", choices=["train", "eval", 'benchmark'], required=True, help="run type of the experiment (train or eval)", ) parser.add_argument( "--exp-config", type=str, required=True, help="path to config yaml containing info about experiment", ) parser.add_argument( "opts", default=None, nargs=argparse.REMAINDER, help="Modify config options from command line", ) parser.add_argument( "--gpu", type=str, default="0", help="gpus", ) parser.add_argument( "--stop", action='store_true', default=False, help="include stop action or not", ) parser.add_argument( "--diff", choices=['easy', 'medium', 'hard'], help="episode difficulty", ) parser.add_argument( "--seed", type=str, default="none" ) arguments = parser.parse_args() os.environ["CUDA_VISIBLE_DEVICES"] = arguments.gpu # print(args.gpu) def main(): run_exp(**vars(arguments)) def run_exp(exp_config: str, run_type: str, opts=None, *args, **kwargs) -> None: r"""Runs experiment given mode and config Args: exp_config: path to config file. run_type: "train" or "eval. opts: list of strings of additional config options. Returns: None. """ config = get_config(exp_config, opts) config.defrost() config.DIFFICULTY = arguments.diff if arguments.stop: config.TASK_CONFIG.TASK.POSSIBLE_ACTIONS = ["STOP", "MOVE_FORWARD", "TURN_LEFT", "TURN_RIGHT"] else: config.TASK_CONFIG.TASK.POSSIBLE_ACTIONS = ["MOVE_FORWARD", "TURN_LEFT", "TURN_RIGHT"] if arguments.seed != 'none': config.TASK_CONFIG.SEED = int(arguments.seed) config.freeze() random.seed(config.TASK_CONFIG.SEED) np.random.seed(config.TASK_CONFIG.SEED) trainer_init = baseline_registry.get_trainer(config.TRAINER_NAME) assert trainer_init is not None, f"{config.TRAINER_NAME} is not supported" trainer = trainer_init(config) if run_type == "train": trainer.train() elif run_type == "eval": trainer.eval() elif run_type == 'benchmark': trainer.benchmark() if __name__ == "__main__": main() <file_sep># habitat_rl habitat rl code without PointNavdataset ### Look into - trainer/algo/ppo/ppo_trainer_memory.py line94 - env_utils/habitat_env.py - rl_configs/example.yaml - habitat_homing_rl/env_utils/habitat_env.py ### train ``` python train_rl.py --run-type train --exp-config rl_configs/example.yaml --gpu 0 ``` ### refer - https://github.com/obin-hero/Vistarget <file_sep>#!/bin/bash SBATCH --job-name=ddppo SBATCH --output=logs.ddppo.out SBATCH --error=logs.ddppo.err SBATCH --gres gpu:1 SBATCH --nodes 1 SBATCH --cpus-per-task 10 SBATCH --ntasks-per-node 1 SBATCH --mem=60GB SBATCH --time=12:00 SBATCH --signal=USR1@600 SBATCH --partition=dev export GLOG_minloglevel=2 export MAGNUM_LOG=quiet export MASTER_ADDR=$(srun --ntasks=1 hostname 2>&1 | tail -n1) set -x srun python -u -m train_baseline \ --exp-config configs/dppo_objectnav.yaml \ --run-type train <file_sep>import os import numpy as np import torch import functools from . import MeterLogger from .. import meter as Meter IS_IMPORTED_TENSORBOARDX = False try: import tensorboardX IS_IMPORTED_TENSORBOARDX = True except: pass class TensorboardMeterLogger(MeterLogger): ''' A class to package and visualize meters. Args: log_dir: Directory to write events to (log_dir/env) env: Tensorboard environment to log to. plotstylecombined: Whether to plot curves in the same window. loggers: All modes: defaults to ['train', 'val']. If plotstylecombined, these will be superimposed in one plot. ''' def __init__(self, env, log_dir=None, plotstylecombined=True, loggers=('train', 'val')): super().__init__(modes=loggers) self.env = env self.log_dir = os.path.join(log_dir, env) self.logger = {} self.writer = {} for logger in loggers: self.logger[logger] = {} self.writer[logger] = tensorboardX.SummaryWriter(logdir=self.log_dir + "-{}".format(logger)) self.metername_to_ptype = {} self.plotstylecombined = plotstylecombined def __addlogger(self, meter, ptype, kwargs={}): for key in self.writer.keys(): self.metername_to_ptype[meter] = ptype if ptype == 'stacked_line': raise NotImplementedError("stacked_line not yet implemented for TensorboardX meter") elif ptype == 'line': if self.plotstylecombined: for key in self.writer.keys(): self.logger[key][meter] = functools.partial(self.writer[key].add_scalar, tag=meter) else: for key in self.writer.keys(): self.logger[key][meter] = functools.partial(self.writer[key].add_scalar, tag=meter) elif ptype == 'image': if self.plotstylecombined: for key in self.writer.keys(): self.logger[key][meter] = functools.partial(self.writer[key].add_image, tag=meter) else: for key in self.writer.keys(): self.logger[key][meter] = functools.partial(self.writer[key].add_image, tag=meter) elif ptype == 'histogram': if self.plotstylecombined: for key in self.writer.keys(): self.logger[key][meter] = functools.partial(self.writer[key].add_histogram, tag=meter) else: for key in self.writer.keys(): self.logger[key][meter] = functools.partial(self.writer[key].add_histogram, tag=meter) elif ptype == 'heatmap': raise NotImplementedError("heatmap not yet implemented for TensorboardX meter") elif ptype == 'text': for key in self.writer.keys(): self.logger[key][meter] = functools.partial(self.writer[key].add_text, tag=meter) elif ptype == 'video': for key in self.writer.keys(): self.logger[key][meter] = functools.partial(self.writer[key].add_video, tag=meter, **kwargs) def add_meter(self, meter_name, meter, ptype=None, kwargs={}): super().add_meter(meter_name, meter) if ptype: # Use `ptype` for manually selecting the plot type self.__addlogger(meter_name, ptype, kwargs) elif isinstance(meter, Meter.ClassErrorMeter): self.__addlogger(meter_name, 'line') elif isinstance(meter, Meter.mAPMeter): self.__addlogger(meter_name, 'line') elif isinstance(meter, Meter.AUCMeter): self.__addlogger(meter_name, 'line') elif isinstance(meter, Meter.ConfusionMeter): self.__addlogger(meter_name, 'heatmap') elif isinstance(meter, Meter.MSEMeter): self.__addlogger(meter_name, 'line') elif type(meter) == Meter.ValueSummaryMeter: self.__addlogger(meter_name, 'line') elif isinstance(meter, Meter.MultiValueSummaryMeter): self.__addlogger(meter_name, 'stacked_line') else: raise NotImplementedError("Unknown meter type (and pytpe): {} ({})".format(type(meter), ptype)) def reset_meter(self, iepoch, mode='train', meterlist=None): self.timer.reset() for meter_name, meter in self.meter[mode].items(): if meterlist is not None and meter_name not in meterlist: continue val = self.meter[mode][meter_name].value() val = val[0] if isinstance(val, (list, tuple)) else val should_reset_and_continue = False if isinstance(val, str) or val is None: should_reset_and_continue = (val is None) elif isinstance(val, np.ndarray): should_reset_and_continue = np.isnan(val).any() elif isinstance(val, torch.Tensor): should_reset_and_continue = torch.isnan(val).any() else: should_reset_and_continue = np.isnan(val) if should_reset_and_continue: self.meter[mode][meter_name].reset() continue if isinstance(meter, Meter.ConfusionMeter): self.logger[mode][meter_name].log(val, global_step=iepoch) elif 'image' == self.metername_to_ptype[meter_name]: try: self.logger[mode][meter_name](img_tensor=val, global_step=iepoch) except ValueError as e: print(f'trouble logging {meter_name} {e}') print('probably due to fake 0 data the data is all at 0') elif 'histogram' == self.metername_to_ptype[meter_name]: try: self.logger[mode][meter_name](values=val, global_step=iepoch) except ValueError as e: print(f'trouble logging {meter_name} {e}') print('probably due to fake 0 data the data is all at 0') elif 'text' == self.metername_to_ptype[meter_name]: if val is not None: self.logger[mode][meter_name](text_string=val, global_step=iepoch) elif 'video' == self.metername_to_ptype[meter_name]: if val is not None: self.logger[mode][meter_name](vid_tensor=val, global_step=iepoch) elif isinstance(self.meter[mode][meter_name], Meter.MultiValueSummaryMeter): self.logger[mode][meter_name](scalar_val=np.array(np.cumsum(val), global_step=iepoch)) # keep mean else: self.logger[mode][meter_name](scalar_value=val, global_step=iepoch) self.meter[mode][meter_name].reset() <file_sep>#!/usr/bin/env python3 # Copyright (without_goal+curr_emb) Facebook, Inc. and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from habitat_baselines.common.utils import CategoricalNet, Flatten from habitat_baselines.rl.ddppo.policy import resnet from habitat_baselines.rl.ddppo.policy.running_mean_and_var import ( RunningMeanAndVar, ) from habitat_baselines.rl.models.rnn_state_encoder import RNNStateEncoder from habitat_baselines.rl.ppo import Net, Policy from model.resnet.resnet import ResNetEncoder class ExploreResNetPolicy(Policy): def __init__( self, observation_space, action_space, goal_sensor_uuid="pointgoal_with_gps_compass", hidden_size=512, num_recurrent_layers=2, rnn_type="LSTM", resnet_baseplanes=32, backbone="resnet18", normalize_visual_inputs=True, cfg=None ): super().__init__( ExploreResNetNet( observation_space=observation_space, action_space=action_space, goal_sensor_uuid=goal_sensor_uuid, hidden_size=hidden_size, num_recurrent_layers=num_recurrent_layers, rnn_type=rnn_type, backbone=backbone, resnet_baseplanes=resnet_baseplanes, normalize_visual_inputs=normalize_visual_inputs, ), action_space.n, ) class PointNavResNetPolicy(Policy): def __init__( self, observation_space, action_space, goal_sensor_uuid="pointgoal_with_gps_compass", hidden_size=512, num_recurrent_layers=2, rnn_type="LSTM", resnet_baseplanes=32, backbone="resnet18", normalize_visual_inputs=True, cfg=None ): super().__init__( PointNavResNetNet( observation_space=observation_space, action_space=action_space, goal_sensor_uuid=goal_sensor_uuid, hidden_size=hidden_size, num_recurrent_layers=num_recurrent_layers, rnn_type=rnn_type, backbone=backbone, resnet_baseplanes=resnet_baseplanes, normalize_visual_inputs=normalize_visual_inputs, ), action_space.n, ) import time TIME_DEBUG = True def log_time(prev_time, log): print("[TIME] ", log, time.time() - prev_time) return time.time() class PointNavResNetNet(Net): """Network which passes the input image through CNN and concatenates goal vector with CNN's output and passes that through RNN. """ def __init__( self, observation_space, action_space, goal_sensor_uuid, hidden_size, num_recurrent_layers, rnn_type, backbone, resnet_baseplanes, normalize_visual_inputs, ): super().__init__() self.goal_sensor_uuid = goal_sensor_uuid self.prev_action_embedding = nn.Embedding(action_space.n+1, 32) self._n_prev_action = 32 #self._n_input_goal = self.num_category = 50 #self.tgt_embeding = nn.Linear(self.num_category, 32) self._n_input_goal = 0 self._hidden_size = hidden_size rnn_input_size = self._n_input_goal + self._n_prev_action self.visual_encoder = ResNetEncoder( observation_space, baseplanes=resnet_baseplanes, ngroups=resnet_baseplanes // 2, make_backbone=getattr(resnet, backbone), normalize_visual_inputs=normalize_visual_inputs, ) if not self.visual_encoder.is_blind: self.visual_fc = nn.Sequential( nn.Linear( np.prod(self.visual_encoder.output_shape)*2, hidden_size ), nn.ReLU(True), ) self.state_encoder = RNNStateEncoder( (0 if self.is_blind else self._hidden_size) + rnn_input_size, self._hidden_size, rnn_type=rnn_type, num_layers=num_recurrent_layers, ) self.train() @property def output_size(self): return self._hidden_size @property def is_blind(self): return self.visual_encoder.is_blind @property def num_recurrent_layers(self): return self.state_encoder.num_recurrent_layers def get_tgt_encoding(self, goal_observations): goal_onehot = torch.eye(self.num_category)[goal_observations[:,0,0].long()].to(goal_observations.device) return self.tgt_embeding(goal_onehot) def forward(self, observations, rnn_hidden_states, prev_actions, masks): B = observations['panoramic_rgb'].shape[0] input_list = [observations['panoramic_rgb'].permute(0,3,1,2)/255.0, observations['panoramic_depth'].permute(0,3,1,2)] curr_obs = torch.cat(input_list,1) #goal_obs = observations['objectgoal'].permute(0,3,1,2) goal_obs = observations['target_goal'].permute(0,3,1,2) batched_obs = torch.cat([curr_obs, goal_obs[:,:4]],0)# * 2 - 1 feats = self.visual_encoder(batched_obs) curr_feats, target_feats = feats.split(B) #tgt_encoding = self.get_tgt_encoding(goal_obs[:,-1]) prev_actions = self.prev_action_embedding( ((prev_actions.float()+1) * masks).long().squeeze(-1) ) feats = self.visual_fc(torch.cat((curr_feats.view(B,-1),target_feats.view(B,-1)),1)) x = [feats, prev_actions] x = torch.cat(x, dim=1) x, rnn_hidden_states = self.state_encoder(x, rnn_hidden_states, masks) return x, rnn_hidden_states class ExploreResNetNet(Net): """Network which passes the input image through CNN and concatenates goal vector with CNN's output and passes that through RNN. """ def __init__( self, observation_space, action_space, goal_sensor_uuid, hidden_size, num_recurrent_layers, rnn_type, backbone, resnet_baseplanes, normalize_visual_inputs, ): super().__init__() self.goal_sensor_uuid = goal_sensor_uuid self.prev_action_embedding = nn.Embedding(action_space.n+1, 32) self._n_prev_action = 32 #self._n_input_goal = self.num_category = 50 #self.tgt_embeding = nn.Linear(self.num_category, 32) self._n_input_goal = 0 self._hidden_size = hidden_size rnn_input_size = self._n_input_goal + self._n_prev_action self.visual_encoder = ResNetEncoder( observation_space, baseplanes=resnet_baseplanes, ngroups=resnet_baseplanes // 2, make_backbone=getattr(resnet, backbone), normalize_visual_inputs=normalize_visual_inputs, ) if not self.visual_encoder.is_blind: self.visual_fc = nn.Sequential( nn.Linear( np.prod(self.visual_encoder.output_shape), hidden_size ), nn.ReLU(True), ) self.state_encoder = RNNStateEncoder( (0 if self.is_blind else self._hidden_size) + rnn_input_size, self._hidden_size, rnn_type=rnn_type, num_layers=num_recurrent_layers, ) self.train() @property def output_size(self): return self._hidden_size @property def is_blind(self): return self.visual_encoder.is_blind @property def num_recurrent_layers(self): return self.state_encoder.num_recurrent_layers def forward(self, observations, rnn_hidden_states, prev_actions, masks): B = observations['panoramic_rgb'].shape[0] input_list = [observations['panoramic_rgb'].permute(0,3,1,2)/255.0, observations['panoramic_depth'].permute(0,3,1,2)] curr_obs = torch.cat(input_list,1) curr_feats = self.visual_encoder(curr_obs * 2 - 1) prev_actions = self.prev_action_embedding( ((prev_actions.float()+1) * masks).long().squeeze(-1) ) feats = self.visual_fc(curr_feats.view(B,-1)) x = [feats, prev_actions] x = torch.cat(x, dim=1) x, rnn_hidden_states = self.state_encoder(x, rnn_hidden_states, masks) return x, rnn_hidden_states <file_sep>#!/usr/bin/env python3 # Copyright (without_goal+curr_emb) Facebook, Inc. and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from typing import List, Optional, Union import numpy as np from habitat import get_config as get_task_config from habitat.config import Config as CN import os DEFAULT_CONFIG_DIR = "IL_configs/" CONFIG_FILE_SEPARATOR = "," # ----------------------------------------------------------------------------- # EXPERIMENT CONFIG # ----------------------------------------------------------------------------- _C = CN() _C.VERSION = 'base' _C.AGENT_TASK = 'search' _C.BASE_TASK_CONFIG_PATH = "IL_configs/tasks/pointnav.yaml" _C.TASK_CONFIG = CN() # task_config will be stored as a config node _C.CMD_TRAILING_OPTS = [] # store command line options as list of strings _C.TRAINER_NAME = "ppo" _C.ENV_NAME = "NavRLEnv" _C.SIMULATOR_GPU_ID = 0 _C.TORCH_GPU_ID = 0 _C.VIDEO_OPTION = ["disk", "tensorboard"] _C.TENSORBOARD_DIR = "logs/" _C.VIDEO_DIR = "data/video_dir" _C.TEST_EPISODE_COUNT = 2 _C.EVAL_CKPT_PATH_DIR = "data/eval_checkpoints" # path to ckpt or path to ckpts dir _C.NUM_PROCESSES = 16 _C.NUM_VAL_PROCESSES = 0 _C.SENSORS = ["RGB_SENSOR", "DEPTH_SENSOR"] _C.CHECKPOINT_FOLDER = "data/new_checkpoints" _C.NUM_UPDATES = 10000 _C.LOG_INTERVAL = 10 _C.LOG_FILE = "train.log" _C.CHECKPOINT_INTERVAL = 50 _C.VIS_INTERVAL = 200 _C.DIFFICULTY = 'easy' _C.NUM_GOALS = 1 _C.REWARD_METHOD = 'progress' _C.POLICY = 'PointNavResNetPolicy' _C.OBS_TO_SAVE = ['panoramic_rgb', 'panoramic_depth', 'target_goal'] # ----------------------------------------------------------------------------- # EVAL CONFIG # ----------------------------------------------------------------------------- _C.EVAL = CN() # The split to evaluate on _C.EVAL.SPLIT = "test" _C.EVAL.USE_CKPT_CONFIG = True # ----------------------------------------------------------------------------- # REINFORCEMENT LEARNING (RL) ENVIRONMENT CONFIG # ----------------------------------------------------------------------------- _C.RL = CN() _C.RL.REWARD_MEASURE = "distance_to_goal" _C.RL.SUCCESS_MEASURE = "spl" _C.RL.SUCCESS_REWARD = 10.0 _C.RL.SLACK_REWARD = -0.01 _C.RL.SUCCESS_DISTANCE = 1.0 # ----------------------------------------------------------------------------- # PROXIMAL POLICY OPTIMIZATION (PPO) # ----------------------------------------------------------------------------- _C.RL.PPO = CN() _C.RL.PPO.clip_param = 0.2 _C.RL.PPO.ppo_epoch = 4 _C.RL.PPO.num_mini_batch = 16 _C.RL.PPO.value_loss_coef = 0.5 _C.RL.PPO.entropy_coef = 0.01 _C.RL.PPO.lr = 7e-4 _C.RL.PPO.eps = 1e-5 _C.RL.PPO.max_grad_norm = 0.5 _C.RL.PPO.num_steps = 5 _C.RL.PPO.use_gae = True _C.RL.PPO.use_linear_lr_decay = False _C.RL.PPO.use_linear_clip_decay = False _C.RL.PPO.gamma = 0.99 _C.RL.PPO.tau = 0.95 _C.RL.PPO.reward_window_size = 50 _C.RL.PPO.use_normalized_advantage = True _C.RL.PPO.hidden_size = 512 _C.RL.PPO.rnn_type = "LSTM" _C.RL.PPO.num_recurrent_layers = 2 _C.RL.PPO.backbone = "resnet50" _C.RL.PPO.pretrained_weights = "data/ddppo-models/gibson-2plus-resnet50.pth" _C.RL.PPO.pretrained = False _C.RL.PPO.il_pretrained = False _C.RL.PPO.pretrained_encoder = False _C.RL.PPO.train_encoder = True _C.RL.PPO.reset_critic = True #---------------------------------------------------------------------------- # Base architecture config _C.features = CN() _C.features.visual_feature_dim = 512 _C.features.action_feature_dim = 32 _C.features.time_dim = 8 #---------------------------------------------------------------------------- # Transformer #---------------------------------------------------------------------------- _C.attention = CN() _C.attention.n_head = 4 _C.attention.d_model = 512 + 32 _C.attention.d_k = 512 + 32 _C.attention.d_v = 512 + 32 _C.attention.dropout = 0.1 # for memory module _C.memory = CN() _C.memory.embedding_size = 512 _C.memory.memory_size = 100 _C.memory.pose_dim = 5 _C.memory.need_local_memory = False _C.training = CN() _C.training.batch_size = 20 _C.training.lr = 1e-4 _C.training.max_epoch = 100 _C.training.lr_decay = 0.5 _C.training.num_workers = 7 _C.saving = CN() _C.saving.name = '0715_lgmt' _C.saving.log_interval = 100 _C.saving.save_interval = 500 _C.saving.eval_interval = 500 _C.dataset = CN() _C.dataset.max_demo_length = 50 # ----------------------------------------------------------------------------- # ORBSLAM2 BASELINE # ----------------------------------------------------------------------------- _C.ORBSLAM2 = CN() _C.ORBSLAM2.SLAM_VOCAB_PATH = "habitat_baselines/slambased/data/ORBvoc.txt" _C.ORBSLAM2.SLAM_SETTINGS_PATH = ( "habitat_baselines/slambased/data/mp3d3_small1k.yaml" ) _C.ORBSLAM2.MAP_CELL_SIZE = 0.1 _C.ORBSLAM2.MAP_SIZE = 40 _C.ORBSLAM2.CAMERA_HEIGHT = get_task_config().SIMULATOR.DEPTH_SENSOR.POSITION[ 1 ] _C.ORBSLAM2.BETA = 100 _C.ORBSLAM2.H_OBSTACLE_MIN = 0.3 * _C.ORBSLAM2.CAMERA_HEIGHT _C.ORBSLAM2.H_OBSTACLE_MAX = 1.0 * _C.ORBSLAM2.CAMERA_HEIGHT _C.ORBSLAM2.D_OBSTACLE_MIN = 0.1 _C.ORBSLAM2.D_OBSTACLE_MAX = 4.0 _C.ORBSLAM2.PREPROCESS_MAP = True _C.ORBSLAM2.MIN_PTS_IN_OBSTACLE = ( get_task_config().SIMULATOR.DEPTH_SENSOR.WIDTH / 2.0 ) _C.ORBSLAM2.ANGLE_TH = float(np.deg2rad(15)) _C.ORBSLAM2.DIST_REACHED_TH = 0.15 _C.ORBSLAM2.NEXT_WAYPOINT_TH = 0.5 _C.ORBSLAM2.NUM_ACTIONS = 3 _C.ORBSLAM2.DIST_TO_STOP = 0.05 _C.ORBSLAM2.PLANNER_MAX_STEPS = 500 _C.ORBSLAM2.DEPTH_DENORM = get_task_config().SIMULATOR.DEPTH_SENSOR.MAX_DEPTH def get_config( config_paths: Optional[Union[List[str], str]] = None, opts: Optional[list] = None, ) -> CN: r"""Create a unified config with default values overwritten by values from `config_paths` and overwritten by options from `opts`. Args: config_paths: List of config paths or string that contains comma separated list of config paths. opts: Config options (keys, values) in a list (e.g., passed from command line into the config. For example, `opts = ['FOO.BAR', 0.5]`. Argument can be used for parameter sweeping or quick tests. """ config = _C.clone() if config_paths: if isinstance(config_paths, str): if CONFIG_FILE_SEPARATOR in config_paths: config_paths = config_paths.split(CONFIG_FILE_SEPARATOR) else: config_paths = [config_paths] for config_path in config_paths: config.merge_from_file(config_path) config.TASK_CONFIG = get_task_config(config.BASE_TASK_CONFIG_PATH) if opts: config.CMD_TRAILING_OPTS = opts config.merge_from_list(opts) config.TENSORBOARD_DIR = os.path.join(config.TENSORBOARD_DIR, config.VERSION) config.VIDEO_DIR = os.path.join(config.VIDEO_DIR, config.VERSION) config.EVAL_CKPT_PATH_DIR = os.path.join(config.EVAL_CKPT_PATH_DIR, config.VERSION) config.CHECKPOINT_FOLDER = os.path.join(config.CHECKPOINT_FOLDER, config.VERSION) config.freeze() return config <file_sep>#!/usr/bin/env python3 # Copyright (without_goal+curr_emb) Facebook, Inc. and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import random from typing import Type, Union import habitat from habitat import Config, Env, RLEnv, VectorEnv, make_dataset from utils.visdommonitor import VisdomMonitor from gym.wrappers.monitor import Wrapper import os import cv2 class EvalEnvWrapper(Wrapper): def __init__(self,env,directory='.', uid='0'): from habitat_baselines.common.utils import generate_video super().__init__(env) self.generate_video = generate_video self.video_dir = directory self.uid = uid self.number_of_episodes = 1000 def setup_embedding_network(self, visual_encoder, prev_action_embedding): self.env.setup_embedding_network(visual_encoder, prev_action_embedding) @property def current_episode(self): return self.env.current_episode @property def episode_over(self): return self.env.episode_over def reset(self): obs = super().reset() self.img_frames = [self.render(mode='rgb_array')] return obs def setup_embedding_network(self, visual_encoder, prev_action_embedding): self.env.setup_embedding_network(visual_encoder, prev_action_embedding) def update_graph(self, node_list, affinity, changed_info, curr_info): self.env.update_graph(node_list, affinity, changed_info, curr_info) def draw_activated_nodes(self, activated_node_list): self.env.draw_activated_nodes(activated_node_list) def build_path_follower(self): self.env.build_path_follower() def get_best_action(self,goal=None): return self.env.get_best_action(goal) def step(self, action): obs, reward, done, info = super().step(action) self.img_frames.append(self.render(mode='rgb_array')) ''' video_option: string list of "tensorboard" or "disk" or both. video_dir: path to target video directory. images: list of images to be converted to video. episode_id: episode id for video naming. checkpoint_idx: checkpoint index for video naming. metric_name: name of the performance metric, e.g. "spl". metric_value: value of metric. tb_writer: tensorboard writer object for uploading video. fps: fps for generated video.''' if done: if 'success' in info.keys(): ep_info = {'episode': info['episode'], 'success': info['success'], 'spl': info['spl'], 'distance_to_goal': info['distance_to_goal'], 'length': info['length'], 'collisions': info['collisions']['count'],} if 'coverage' in info.keys(): ep_info = {'episode': info['episode'], 'total_reward': info['total_reward'], 'coverage': info['coverage'], 'length': info['length'], 'collisions': info['collisions']['count'],} img_shape = (self.img_frames[0].shape[0],self.img_frames[0].shape[1]) if img_shape[0]%16 != 0 or img_shape[1]%16 != 0 : required_img_shape = (16 * (img_shape[1]//16), 16 * (img_shape[0]//16)) else: required_img_shape = (img_shape[1], img_shape[0]) resized_img_frames = [cv2.resize(img, required_img_shape) for img in self.img_frames] self.img_frames = resized_img_frames self.generate_video(video_option=['disk'], video_dir=self.video_dir, images=self.img_frames, episode_id=self.env.current_episode.episode_id, checkpoint_idx='', metrics= ep_info, fps=30, ) return obs, reward, done, info import numpy as np def add_panoramic_camera(task_config, remain_front_rgbd=False, normalize_depth=True): num_of_camera = 360//task_config.SIMULATOR.RGB_SENSOR.HFOV assert isinstance(num_of_camera, int) angles = [2 * np.pi * idx/ num_of_camera for idx in range(num_of_camera-1,-1,-1)] half = num_of_camera//2 angles = angles[half:] + angles[:half] sensors = [] use_semantic = 'PANORAMIC_SEMANTIC_SENSOR' in task_config.TASK.SENSORS use_depth = 'PANORAMIC_DEPTH_SENSOR' in task_config.TASK.SENSORS for camera_idx in range(num_of_camera): curr_angle = angles[camera_idx] if curr_angle > 3.14: curr_angle -= 2 * np.pi new_camera_config = task_config.SIMULATOR.RGB_SENSOR.clone() new_camera_config.TYPE = "PanoramicPartRGBSensor" new_camera_config.ORIENTATION = [0, curr_angle, 0] new_camera_config.ANGLE = "{}".format(camera_idx) task_config.SIMULATOR.update({'RGB_SENSOR_{}'.format(camera_idx): new_camera_config}) sensors.append('RGB_SENSOR_{}'.format(camera_idx)) if use_depth: new_depth_camera_config = task_config.SIMULATOR.DEPTH_SENSOR.clone() new_depth_camera_config.TYPE = "PanoramicPartDepthSensor" new_depth_camera_config.ORIENTATION = [0, curr_angle, 0] new_depth_camera_config.ANGLE = "{}".format(camera_idx) new_depth_camera_config.NORMALIZE_DEPTH = normalize_depth task_config.SIMULATOR.update({'DEPTH_SENSOR_{}'.format(camera_idx): new_depth_camera_config}) sensors.append('DEPTH_SENSOR_{}'.format(camera_idx)) if use_semantic: new_semantic_camera_config = task_config.SIMULATOR.SEMANTIC_SENSOR.clone() new_semantic_camera_config.TYPE = "PanoramicPartSemanticSensor" new_semantic_camera_config.ORIENTATION = [0, curr_angle, 0] new_semantic_camera_config.ANGLE = "{}".format(camera_idx) task_config.SIMULATOR.update({'SEMANTIC_SENSOR_{}'.format(camera_idx): new_semantic_camera_config}) sensors.append('SEMANTIC_SENSOR_{}'.format(camera_idx)) if remain_front_rgbd: task_config.SIMULATOR.RGB_SENSOR.WIDTH = 256 task_config.SIMULATOR.RGB_SENSOR.HEIGHT = 256 task_config.SIMULATOR.RGB_SENSOR.HFOV = 90 task_config.SIMULATOR.RGB_SENSOR.POSITION = [0,1.25,0] task_config.SIMULATOR.DEPTH_SENSOR.WIDTH = 256 task_config.SIMULATOR.DEPTH_SENSOR.HEIGHT = 256 task_config.SIMULATOR.DEPTH_SENSOR.HFOV = 90 task_config.SIMULATOR.DEPTH_SENSOR.POSITION = [0, 1.25, 0] task_config.SIMULATOR.DEPTH_SENSOR.MIN_DEPTH = 0.0 task_config.SIMULATOR.DEPTH_SENSOR.MAX_DEPTH = 10.0 task_config.SIMULATOR.DEPTH_SENSOR.NORMALIZE_DEPTH = True sensors.extend(['RGB_SENSOR', 'DEPTH_SENSOR']) task_config.SIMULATOR.AGENT_0.SENSORS = sensors task_config.TASK.PANORAMIC_SENSOR = habitat.Config() task_config.TASK.PANORAMIC_SENSOR.TYPE = 'PanoramicRGBSensor' task_config.TASK.PANORAMIC_SENSOR.WIDTH = task_config.SIMULATOR.RGB_SENSOR.HEIGHT * 4 task_config.TASK.PANORAMIC_SENSOR.HEIGHT = task_config.SIMULATOR.RGB_SENSOR.HEIGHT task_config.TASK.PANORAMIC_SENSOR.NUM_CAMERA = num_of_camera if use_depth: task_config.TASK.PANORAMIC_DEPTH_SENSOR = task_config.SIMULATOR.DEPTH_SENSOR.clone() task_config.TASK.PANORAMIC_DEPTH_SENSOR.TYPE = 'PanoramicDepthSensor' task_config.TASK.PANORAMIC_DEPTH_SENSOR.WIDTH = task_config.SIMULATOR.DEPTH_SENSOR.HEIGHT * 4 task_config.TASK.PANORAMIC_DEPTH_SENSOR.HEIGHT = task_config.SIMULATOR.DEPTH_SENSOR.HEIGHT task_config.TASK.PANORAMIC_DEPTH_SENSOR.NUM_CAMERA = num_of_camera if use_semantic: task_config.TASK.PANORAMIC_SEMANTIC_SENSOR = habitat.Config() task_config.TASK.PANORAMIC_SEMANTIC_SENSOR.TYPE = 'PanoramicSemanticSensor' task_config.TASK.PANORAMIC_SEMANTIC_SENSOR.WIDTH = task_config.SIMULATOR.SEMANTIC_SENSOR.HEIGHT * 4 task_config.TASK.PANORAMIC_SEMANTIC_SENSOR.HEIGHT = task_config.SIMULATOR.SEMANTIC_SENSOR.HEIGHT task_config.TASK.PANORAMIC_SEMANTIC_SENSOR.NUM_CAMERA = num_of_camera task_config.TASK.CUSTOM_VISTARGET_SENSOR = habitat.Config() task_config.TASK.CUSTOM_VISTARGET_SENSOR.TYPE = 'CustomVisTargetSensor' task_config.TASK.CUSTOM_VISTARGET_SENSOR.NUM_CAMERA = num_of_camera task_config.TASK.CUSTOM_VISTARGET_SENSOR.WIDTH = task_config.SIMULATOR.RGB_SENSOR.HEIGHT * 4 task_config.TASK.CUSTOM_VISTARGET_SENSOR.HEIGHT = task_config.SIMULATOR.RGB_SENSOR.HEIGHT if "STOP" not in task_config.TASK.POSSIBLE_ACTIONS: task_config.TASK.SUCCESS.TYPE = "Success_woSTOP" task_config.TASK.SUCCESS.SUCCESS_DISTANCE = task_config.TASK.SUCCESS_DISTANCE return task_config def make_env_fn( config: Config, env_class: Type[Union[Env, RLEnv]], rank: int, kwargs ) -> Union[Env, RLEnv]: print('make-env') env = env_class(config=config) env.seed(rank) if kwargs['run_type'] == 'train': env = VisdomMonitor(env, directory = config.VIDEO_DIR, video_callable = lambda x : x % config.VIS_INTERVAL == 0, uid = str(rank) ) else: env = EvalEnvWrapper(env, directory=config.VIDEO_DIR, uid=str(rank) ) return env def construct_envs(config,env_class, mode='vectorenv', make_env_fn=make_env_fn, run_type='train', no_val=False): num_processes, num_val_processes = config.NUM_PROCESSES, config.NUM_VAL_PROCESSES total_num_processes = num_processes + num_val_processes if no_val: num_val_processes = 0 configs = [] env_classes = [env_class for _ in range(total_num_processes)] # for debug! # config.defrost() # print('***!!!!!!!!!!!!!!!!**************debug code not deleted') # config.TASK_CONFIG.DATASET.CONTENT_SCENES = ['S9hNv5qa7GM','B6ByNegPMKs'] # config.freeze() habitat_api_path = os.path.join(os.path.dirname(habitat.__file__), '../') config.defrost() config.TASK_CONFIG.DATASET.SCENES_DIR = os.path.join(habitat_api_path, config.TASK_CONFIG.DATASET.SCENES_DIR) config.TASK_CONFIG.DATASET.DATA_PATH = os.path.join(habitat_api_path, config.TASK_CONFIG.DATASET.DATA_PATH) config.freeze() eval_config = config.clone() eval_config.defrost() eval_config.TASK_CONFIG.DATASET.SPLIT = 'val' eval_config.freeze() dataset = make_dataset(config.TASK_CONFIG.DATASET.TYPE) training_scenes = config.TASK_CONFIG.DATASET.CONTENT_SCENES if "*" in config.TASK_CONFIG.DATASET.CONTENT_SCENES: training_scenes = dataset.get_scenes_to_load(config.TASK_CONFIG.DATASET) eval_scenes = dataset.get_scenes_to_load(eval_config.TASK_CONFIG.DATASET) else: eval_scenes = ['EU6Fwq7SyZv'] if num_processes > 1: if len(training_scenes) == 0: raise RuntimeError( "No scenes to load, multiple process logic relies on being able to split scenes uniquely between processes" ) if len(training_scenes) < num_processes: raise RuntimeError( "reduce the number of processes as there " "aren't enough number of scenes" ) random.shuffle(training_scenes) scene_splits = [[] for _ in range(num_processes)] for idx, scene in enumerate(training_scenes): scene_splits[idx % len(scene_splits)].append(scene) eval_scene_splits = [[] for _ in range(num_val_processes)] if num_val_processes > 0 : for idx, scene in enumerate(eval_scenes): eval_scene_splits[idx % len(eval_scene_splits)].append(scene) else: eval_scenes = [] scene_splits += eval_scene_splits print('Total Process %d = train %d + eval %d '%(total_num_processes, num_processes, num_val_processes)) for i, s in enumerate(scene_splits): if i < num_processes: print('train_proc %d :'%i, s) else: print('eval_proc %d :' % i, s) assert sum(map(len, scene_splits)) == len(training_scenes+eval_scenes) for i in range(total_num_processes): proc_config = config.clone() proc_config.defrost() task_config = proc_config.TASK_CONFIG task_config.DATASET.SPLIT = 'train' if i < num_processes else 'val' if len(training_scenes) > 0: task_config.DATASET.CONTENT_SCENES = scene_splits[i] #task_config = add_panoramic_camera(task_config) task_config.SIMULATOR.HABITAT_SIM_V0.GPU_DEVICE_ID = ( config.SIMULATOR_GPU_ID ) proc_config.freeze() configs.append(proc_config) if mode == 'vectorenv': envs = habitat.VectorEnv( make_env_fn=make_env_fn, env_fn_args=tuple( tuple(zip(configs, env_classes, range(total_num_processes), [{'run_type':run_type}]*total_num_processes)) ) ) else: envs = make_env_fn(configs[0] ,env_class, 0, { 'run_type': run_type}) return envs <file_sep>import os import cv2 import joblib import matplotlib.pyplot as plt DATA_DIR = '/media/obin/5d368da0-d601-490b-b5d8-6122946470b8/DATA/vistarget_demo2/' train_data_list = [os.path.join(DATA_DIR+'train/medium',x) for x in os.listdir(DATA_DIR+'train/medium')] val_data_list = [os.path.join(DATA_DIR+'val/medium',x) for x in os.listdir(DATA_DIR+'val/medium')] import time from typing import Any, Dict, Iterator, List, Optional, Tuple, Type, Union import gym import numpy as np from gym.spaces.dict_space import Dict as SpaceDict from habitat.config import Config from habitat.datasets import make_dataset from habitat.sims import make_sim from habitat.tasks import make_task import os # os.environ['CUDA_VISIBLE_DEVICES'] = "9" from habitat_sim.utils.common import quat_to_coeffs import quaternion as q import habitat_sim import habitat from gym.spaces.dict_space import Dict as SpaceDict from gym.spaces.box import Box from tqdm import tqdm import pickle SPLIT = 'train' if not os.path.exists(DATA_DIR): os.mkdir(DATA_DIR) import joblib MAX_DIST = np.Inf MIN_DIST = 1.5 NEAR_DIST_TH = 1.5 MIDDLE_DIST_TH = 3.0 MEDIUM_MAX_FAR_DIST = 5.0 import numpy as np def add_panoramic_camera(task_config): task_config.SIMULATOR.RGB_SENSOR_LEFT = task_config.SIMULATOR.RGB_SENSOR.clone() task_config.SIMULATOR.RGB_SENSOR_LEFT.TYPE = "PanoramicPartRGBSensor" task_config.SIMULATOR.RGB_SENSOR_LEFT.ORIENTATION = [0, 2 / 3 * np.pi, 0] task_config.SIMULATOR.RGB_SENSOR_LEFT.ANGLE = "left" task_config.SIMULATOR.RGB_SENSOR_RIGHT = task_config.SIMULATOR.RGB_SENSOR.clone() task_config.SIMULATOR.RGB_SENSOR_RIGHT.TYPE = "PanoramicPartRGBSensor" task_config.SIMULATOR.RGB_SENSOR_RIGHT.ORIENTATION = [0, -2 / 3 * np.pi, 0] task_config.SIMULATOR.RGB_SENSOR_RIGHT.ANGLE = "right" task_config.SIMULATOR.AGENT_0.SENSORS = ['RGB_SENSOR', 'RGB_SENSOR_LEFT', 'RGB_SENSOR_RIGHT'] task_config.SIMULATOR.DEPTH_SENSOR_LEFT = task_config.SIMULATOR.DEPTH_SENSOR.clone() task_config.SIMULATOR.DEPTH_SENSOR_LEFT.TYPE = "PanoramicPartDepthSensor" task_config.SIMULATOR.DEPTH_SENSOR_LEFT.ORIENTATION = [0, 2 / 3 * np.pi, 0] task_config.SIMULATOR.DEPTH_SENSOR_LEFT.ANGLE = "left" task_config.SIMULATOR.DEPTH_SENSOR_RIGHT = task_config.SIMULATOR.DEPTH_SENSOR.clone() task_config.SIMULATOR.DEPTH_SENSOR_RIGHT.TYPE = "PanoramicPartDepthSensor" task_config.SIMULATOR.DEPTH_SENSOR_RIGHT.ORIENTATION = [0, -2 / 3 * np.pi, 0] task_config.SIMULATOR.DEPTH_SENSOR_RIGHT.ANGLE = "right" task_config.SIMULATOR.AGENT_0.SENSORS += ['DEPTH_SENSOR', 'DEPTH_SENSOR_LEFT', 'DEPTH_SENSOR_RIGHT'] task_config.TASK.CUSTOM_VISTARGET_SENSOR = habitat.Config() task_config.TASK.CUSTOM_VISTARGET_SENSOR.TYPE = 'CustomVisTargetSensor' task_config.TASK.PANORAMIC_SENSOR = habitat.Config() task_config.TASK.PANORAMIC_SENSOR.TYPE = 'PanoramicRGBSensor' task_config.TASK.PANORAMIC_SENSOR.WIDTH = task_config.SIMULATOR.RGB_SENSOR.WIDTH task_config.TASK.PANORAMIC_SENSOR.HEIGHT = task_config.SIMULATOR.RGB_SENSOR.HEIGHT task_config.TASK.PANORAMIC_DEPTH_SENSOR = task_config.SIMULATOR.DEPTH_SENSOR.clone() task_config.TASK.PANORAMIC_DEPTH_SENSOR.TYPE = 'PanoramicDepthSensor' task_config.TASK.PANORAMIC_DEPTH_SENSOR.WIDTH = task_config.SIMULATOR.DEPTH_SENSOR.WIDTH task_config.TASK.PANORAMIC_DEPTH_SENSOR.HEIGHT = task_config.SIMULATOR.DEPTH_SENSOR.HEIGHT if "STOP" not in task_config.TASK.POSSIBLE_ACTIONS: task_config.TASK.SUCCESS.TYPE = "Success_woSTOP" task_config.TASK.SUCCESS.SUCCESS_DISTANCE = task_config.TASK.SUCCESS_DISTANCE return task_config class DataCollectEnv: def __init__( self, config: Config ) -> None: """Constructor :param config: config for the environment. Should contain id for simulator and ``task_name`` which are passed into ``make_sim`` and ``make_task``. :param dataset: reference to dataset for task instance level information. Can be defined as :py:`None` in which case ``_episodes`` should be populated from outside. """ assert config.is_frozen(), ( "Freeze the config before creating the " "environment, use config.freeze()." ) self._config = config self._current_episode_index = None self._current_episode = None self._scenes = config.DATASET.CONTENT_SCENES self._swap_building_every = config.ENVIRONMENT.ITERATOR_OPTIONS.MAX_SCENE_REPEAT_EPISODES self._current_scene_episode_idx = 0 self._current_scene_idx = 0 self._config.defrost() self._config.SIMULATOR.SCENE = os.path.join(config.DATASET.SCENES_DIR, 'mp3d/{}/{}.glb'.format(self._scenes, self._scenes)) self._config.freeze() self._sim = make_sim( id_sim=self._config.SIMULATOR.TYPE, config=self._config.SIMULATOR ) def validate_data(self, data): data_len = len(data['rgb']) changed = False for i in range(data_len): position = data['position'][i] rotation = q.from_float_array(data['rotation'][i]) obs = self._sim.get_observations_at(position, rotation) new_rgb, new_depth = self.process_obs(obs) old_rgb, old_depth = data['rgb'][i], data['depth'][i] if not (new_rgb[:,1:-1] == old_rgb).all(): #past_view, future_view = data['rgb'][max(0,i-1)], data['rgb'][min(data_len-1, i+1)] #compare = np.concatenate([past_view, future_view],0)[:,:,[2,1,0]] #curr_view = np.concatenate([old_rgb, new_rgb[:,1:-1]], 0)[:,:,[2,1,0]] #cv2.imshow('hi', np.concatenate([curr_view, compare],1)) #cv2.waitKey(0) data['rgb'][i] = new_rgb[:,1:-1] data['depth'][i] = new_depth[:,1:-1] changed = True return changed, data def process_obs(self, obs): rgb = np.concatenate([obs['rgb_left'], obs['rgb'], obs['rgb_right']], 1) depth = np.concatenate([obs['depth_left'], obs['depth'], obs['depth_right']], 1) return rgb, depth def collect_data(config): # 1 env per 1 config # np.random.seed(config.SEED) scene_name = config.DATASET.CONTENT_SCENES env = DataCollectEnv(config) split = config.DATASET.SPLIT if split == 'train': scene_data_list = [x for x in train_data_list if scene_name in x] elif split == 'val': scene_data_list = [x for x in val_data_list if scene_name in x] for data_file in tqdm(scene_data_list): data = joblib.load(data_file) changed, new_data = env.validate_data(data) if changed: joblib.dump(new_data,data_file) env._sim.close() return splits = ['val'] from IL_configs.default import get_config import numpy as np from multiprocessing import Pool import cv2 from env_utils.vistarget_nav_task import CustomVisTargetSensor for split in splits: config = get_config('IL_configs/base.yaml') configs = [] habitat_api_path = os.path.join(os.path.dirname(habitat.__file__), '../') config.defrost() config.TASK_CONFIG.DATASET.SCENES_DIR = os.path.join(habitat_api_path, config.TASK_CONFIG.DATASET.SCENES_DIR) config.TASK_CONFIG.DATASET.DATA_PATH = os.path.join(habitat_api_path, config.TASK_CONFIG.DATASET.DATA_PATH) config.TASK_CONFIG.DATASET.SPLIT = split config.freeze() dataset = make_dataset('PointNav-v1') scenes = config.TASK_CONFIG.DATASET.CONTENT_SCENES if "*" in config.TASK_CONFIG.DATASET.CONTENT_SCENES: scenes = dataset.get_scenes_to_load(config.TASK_CONFIG.DATASET) # 17DRP5sb8fy 1LXtFkjw3qL 1pXnuDYAj8r 29hnd4uzFmX 2n8kARJN3HM 5LpN3gDmAk7 5q7pvUzZiYa 759xd9YjKW5 7y3sRwLe3Va 82sE5b5pLXE # 8WUmhLawc2A B6ByNegPMKs D7G3Y4RVNrH D7N2EKCX4Sj E9uDoFAP3SH EDJbREhghzL GdvgFV5R1Z5 HxpKQynjfin JF19kD82Mey JeFG25nYj2p # JmbYfDe2QKZ valid_scenes = [] for scene_name in scenes: if split == 'train': scene_data_list = [x for x in train_data_list if scene_name in x] elif split == 'val': scene_data_list = [x for x in val_data_list if scene_name in x] changed = False for each_data in scene_data_list: valid = ((time.time() - os.stat(each_data).st_mtime) / 3600) > 24 print(scene_name, (time.time() - os.stat(each_data).st_mtime) / 3600) if not valid: changed = True break if not changed: valid_scenes.append(scene_name) scenes = valid_scenes for i in range(len(scenes)): proc_config = config.clone() proc_config.defrost() task_config = proc_config.TASK_CONFIG task_config.DATASET.CONTENT_SCENES = scenes[i] task_config = add_panoramic_camera(task_config) task_config.SIMULATOR.HABITAT_SIM_V0.GPU_DEVICE_ID = ( config.SIMULATOR_GPU_ID ) proc_config.freeze() configs.append(proc_config.TASK_CONFIG) # process map IL_configs num_thread = 7 start = time.time() with Pool(num_thread) as p: p.map(collect_data, configs, int(len(configs) / num_thread)) end = time.time() - start<file_sep> from .policy import Net, PointNavBaselinePolicy, Policy from .ppo import PPO from .ppo_trainer import PPOTrainer from .ppo_trainer_memory import PPOTrainer_Memory __all__ = ["PPO", "Policy", "RolloutStorage", "Net", "PointNavBaselinePolicy"] <file_sep>from . import meter import numpy as np class ValueSummaryMeter(meter.Meter): def __init__(self): super(ValueSummaryMeter, self).__init__() self.reset() self.val = 0 def add(self, value, n=1): self.val = value self.sum += value self.var += value * value self.n += n if self.n == 0: self.mean, self.std = np.nan, np.nan elif self.n == 1: self.mean = self.sum + 0.0 # This is to force a copy in torch/numpy self.min = self.mean + 0.0 self.max = self.mean + 0.0 self.std = np.inf self.mean_old = self.mean self.m_s = 0.0 else: self.mean = self.mean_old + (value - n * self.mean_old) / float(self.n) self.m_s += (value - self.mean_old) * (value - self.mean) self.mean_old = self.mean self.std = np.sqrt(self.m_s / (self.n - 1.0)) self.min = np.minimum(self.min, value) self.max = np.maximum(self.max, value) def value(self): return self.mean, self.std def reset(self): self.n = 0 self.sum = 0.0 self.var = 0.0 self.val = 0.0 self.mean = np.nan self.mean_old = 0.0 self.m_s = 0.0 self.std = np.nan self.min = np.nan self.max = np.nan def __str__(self): old_po = np.get_printoptions() np.set_printoptions(precision=3) res = "mean(std) {} ({}) \tmin/max {}/{}\t".format( *[np.array(v) for v in [self.mean, self.std, self.min, self.max]]) np.set_printoptions(**old_po) return res <file_sep>#!/usr/bin/env python3 # Copyright (without_goal+curr_emb) Facebook, Inc. and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import os from collections import defaultdict, deque from typing import Any, Dict, List, Optional import numpy as np import torch import torch.nn as nn import tqdm from torch.optim.lr_scheduler import LambdaLR from habitat import Config, logger #from habitat.utils.visualizations.utils import observations_to_image from utils.vis_utils import observations_to_image from habitat_baselines.common.base_trainer import BaseRLTrainer from habitat_baselines.common.baseline_registry import baseline_registry #from habitat_baselines.common.env_utils import construct_envs from env_utils.make_env_utils import construct_envs from env_utils import * #GraphMemoryEnv = VisTargetGraphMemEnv from habitat_baselines.common.environments import get_env_class from habitat_baselines.common.rollout_storage import RolloutStorage from habitat_baselines.common.tensorboard_utils import TensorboardWriter from habitat_baselines.common.utils import ( batch_obs, generate_video, linear_decay, ) from trainer.algo.ppo import PPO from model.resnet.resnet_policy import PointNavResNetPolicy, ExploreResNetPolicy from model.policy import * import time TIME_DEBUG = False ADD_IL = False def log_time(prev_time, log): print("[TIME] ", log, time.time() - prev_time) return time.time() from trainer.algo.ppo.ppo_trainer_memory import PPOTrainer_Memory import torch.nn.functional as F @baseline_registry.register_trainer(name="custom_ppo_memory_aux") class PPOTrainer_Memory_aux(PPOTrainer_Memory): r"""Trainer class for PPO algorithm Paper: https://arxiv.org/abs/1707.06347. """ supported_tasks = ["Nav-v0"] def _collect_rollout_step( self, rollouts, current_episode_reward, running_episode_stats ): pth_time = 0.0 env_time = 0.0 t_sample_action = time.time() # sample actions with torch.no_grad(): ( values, actions, actions_log_probs, recurrent_hidden_states, _, preds, _ ) = self.actor_critic.act( self.last_observations, self.last_recurrent_hidden_states, self.last_prev_actions, self.last_masks, ) actions = actions.unsqueeze(1) pth_time += time.time() - t_sample_action t_step_env = time.time() pred1, pred2 = preds if pred1 is not None: have_been = F.sigmoid(pred1[:,0]).detach().cpu().numpy().tolist() else: have_been = None if pred2 is not None: pred_target_distance = F.sigmoid(pred2[:,0]).detach().cpu().numpy().tolist() else: pred_target_distance = None log_strs = [] for i in range(len(actions)): hb = have_been[i] if have_been is not None else -1 ptd = pred_target_distance[i] if pred_target_distance is not None else -1 log_str = 'have_been: %.3f pred_dist: %.3f'%(hb, ptd) log_strs.append(log_str) self.envs.call(['log_info']*len(have_been),[{'log_type':'str', 'info':log_strs[i]} for i in range(len(have_been))]) #scenes = [curr_ep.scene_id.split('/')[-2] for curr_ep in self.envs.current_episodes()]xdd/d if self.collect_mode == 'RL': k = [a[0] for a in actions.cpu().numpy()] batch, rewards, dones, infos = self.envs.step(k) #self.envs.render('human') else: k = self.last_observations['gt_action'].cpu().long().numpy().tolist() batch, rewards, dones, infos = self.il_envs.step(k) #self.il_envs.render('human') env_time += time.time() - t_step_env t_update_stats = time.time() #batch = batch_obs(observations, device=self.device) rewards = torch.tensor( rewards, dtype=torch.float, device=current_episode_reward.device ) rewards = rewards.unsqueeze(1) masks = torch.tensor( [[0.0] if done else [1.0] for done in dones], dtype=torch.float, device=current_episode_reward.device, ) if self.collect_mode == 'RL': current_episode_reward += rewards running_episode_stats["reward"] += (1 - masks) * current_episode_reward running_episode_stats["count"] += 1 - masks for k, v in self._extract_scalars_from_infos(infos).items(): try: v = torch.tensor( v, dtype=torch.float, device=current_episode_reward.device ).unsqueeze(1) if k not in running_episode_stats: running_episode_stats[k] = torch.zeros_like( running_episode_stats["count"] ) running_episode_stats[k] += (1 - masks) * v except: print('EEEEERRROR!!!!', masks.shape, v.shape) print('key:', k) current_episode_reward *= masks if self._static_encoder: with torch.no_grad(): pass #batch["visual_features"] = self._encoder(batch) rollouts.insert( {k: v[:self.num_processes] for k,v in batch.items()}, recurrent_hidden_states[:,:self.num_processes], actions[:self.num_processes], actions_log_probs[:self.num_processes], values[:self.num_processes], rewards[:self.num_processes], masks[:self.num_processes], ) self.last_observations = batch self.last_recurrent_hidden_states = recurrent_hidden_states#.to(self.device) self.last_prev_actions = actions self.last_masks = masks.to(self.device) pth_time += time.time() - t_update_stats return pth_time, env_time, self.num_processes <file_sep>#!/usr/bin/env python3 # Copyright (without_goal+curr_emb) Facebook, Inc. and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from collections import defaultdict import torch import numpy as np import cv2 class RolloutStorage: r"""Class for storing rollout information for RL trainers. """ def __init__( self, num_steps, num_envs, observation_space, action_space, recurrent_hidden_state_size, num_recurrent_layers=1, OBS_LIST = [] ): self.observations = {} self.OBS_LIST = OBS_LIST for sensor in observation_space.spaces: if sensor in OBS_LIST: self.observations[sensor] = torch.zeros( num_steps + 1, num_envs, *observation_space.spaces[sensor].shape ) self.recurrent_hidden_states = torch.zeros( num_steps + 1, num_recurrent_layers, num_envs, recurrent_hidden_state_size, ) self.rewards = torch.zeros(num_steps, num_envs, 1) self.value_preds = torch.zeros(num_steps + 1, num_envs, 1) self.returns = torch.zeros(num_steps + 1, num_envs, 1) self.action_log_probs = torch.zeros(num_steps, num_envs, 1) if action_space.__class__.__name__ == "ActionSpace": action_shape = 1 else: action_shape = action_space.shape[0] self.actions = torch.zeros(num_steps, num_envs, action_shape) self.prev_actions = torch.zeros(num_steps + 1, num_envs, action_shape) if action_space.__class__.__name__ == "ActionSpace": self.actions = self.actions.long() self.prev_actions = self.prev_actions.long() self.masks = torch.zeros(num_steps + 1, num_envs, 1) self.num_steps = num_steps self.step = 0 def to(self, device): for sensor in self.observations: self.observations[sensor] = self.observations[sensor].to(device) self.recurrent_hidden_states = self.recurrent_hidden_states.to(device) self.rewards = self.rewards.to(device) self.value_preds = self.value_preds.to(device) self.returns = self.returns.to(device) self.action_log_probs = self.action_log_probs.to(device) self.actions = self.actions.to(device) self.prev_actions = self.prev_actions.to(device) self.masks = self.masks.to(device) def insert( self, observations, recurrent_hidden_states, actions, action_log_probs, value_preds, rewards, masks, ): for sensor in observations: if sensor in self.OBS_LIST: self.observations[sensor][self.step + 1].copy_( observations[sensor] ) self.recurrent_hidden_states[self.step + 1].copy_( recurrent_hidden_states ) self.actions[self.step].copy_(actions) self.prev_actions[self.step + 1].copy_(actions) self.action_log_probs[self.step].copy_(action_log_probs) self.value_preds[self.step].copy_(value_preds) self.rewards[self.step].copy_(rewards) self.masks[self.step + 1].copy_(masks) self.step = self.step + 1 def after_update(self): for sensor in self.observations: self.observations[sensor][0].copy_( self.observations[sensor][self.step] ) self.recurrent_hidden_states[0].copy_( self.recurrent_hidden_states[self.step] ) self.masks[0].copy_(self.masks[self.step]) self.prev_actions[0].copy_(self.prev_actions[self.step]) self.step = 0 def compute_returns(self, next_value, use_gae, gamma, tau): if use_gae: self.value_preds[self.step] = next_value gae = 0 for step in reversed(range(self.step)): delta = ( self.rewards[step] + gamma * self.value_preds[step + 1] * self.masks[step + 1] - self.value_preds[step] ) gae = delta + gamma * tau * self.masks[step + 1] * gae self.returns[step] = gae + self.value_preds[step] else: self.returns[self.step] = next_value for step in reversed(range(self.step)): self.returns[step] = ( self.returns[step + 1] * gamma * self.masks[step + 1] + self.rewards[step] ) def recurrent_generator(self, advantages, num_mini_batch): num_processes = self.rewards.size(1) assert num_processes >= num_mini_batch, ( "Trainer requires the number of processes ({}) " "to be greater than or equal to the number of " "trainer mini batches ({}).".format(num_processes, num_mini_batch) ) num_envs_per_batch = num_processes // num_mini_batch perm = torch.randperm(num_processes) for start_ind in range(0, num_processes, num_envs_per_batch): observations_batch = defaultdict(list) recurrent_hidden_states_batch = [] actions_batch = [] prev_actions_batch = [] value_preds_batch = [] return_batch = [] masks_batch = [] old_action_log_probs_batch = [] adv_targ = [] for offset in range(num_envs_per_batch): ind = perm[start_ind + offset] for sensor in self.observations: observations_batch[sensor].append( self.observations[sensor][: self.step, ind] ) recurrent_hidden_states_batch.append( self.recurrent_hidden_states[0, :, ind] ) actions_batch.append(self.actions[: self.step, ind]) prev_actions_batch.append(self.prev_actions[: self.step, ind]) value_preds_batch.append(self.value_preds[: self.step, ind]) return_batch.append(self.returns[: self.step, ind]) masks_batch.append(self.masks[: self.step, ind]) old_action_log_probs_batch.append( self.action_log_probs[: self.step, ind] ) if advantages is not None: adv_targ.append(advantages[: self.step, ind]) T, N = self.step, num_envs_per_batch # These are all tensors of size (T, N, -1) for sensor in observations_batch: observations_batch[sensor] = torch.stack( observations_batch[sensor], 1 ) actions_batch = torch.stack(actions_batch, 1) prev_actions_batch = torch.stack(prev_actions_batch, 1) value_preds_batch = torch.stack(value_preds_batch, 1) return_batch = torch.stack(return_batch, 1) masks_batch = torch.stack(masks_batch, 1) old_action_log_probs_batch = torch.stack( old_action_log_probs_batch, 1 ) if advantages is not None: adv_targ = torch.stack(adv_targ, 1) # States is just a (num_recurrent_layers, N, -1) tensor recurrent_hidden_states_batch = torch.stack( recurrent_hidden_states_batch, 1 ) #recurrent_hidden_states_batch = self._flatten_helper(T,N,recurrent_hidden_states_batch) # Flatten the (T, N, ...) tensors to (T * N, ...) for sensor in observations_batch: observations_batch[sensor] = self._flatten_helper( T, N, observations_batch[sensor] ) actions_batch = self._flatten_helper(T, N, actions_batch) prev_actions_batch = self._flatten_helper(T, N, prev_actions_batch) value_preds_batch = self._flatten_helper(T, N, value_preds_batch) return_batch = self._flatten_helper(T, N, return_batch) masks_batch = self._flatten_helper(T, N, masks_batch) old_action_log_probs_batch = self._flatten_helper( T, N, old_action_log_probs_batch ) if advantages is not None : adv_targ = self._flatten_helper(T, N, adv_targ) else: adv_targ = None yield ( observations_batch, recurrent_hidden_states_batch, actions_batch, prev_actions_batch, value_preds_batch, return_batch, masks_batch, old_action_log_probs_batch, adv_targ, ) @staticmethod def _flatten_helper(t: int, n: int, tensor: torch.Tensor) -> torch.Tensor: r"""Given a tensor of size (t, n, ..), flatten it to size (t*n, ...). Args: t: first dimension of tensor. n: second dimension of tensor. tensor: target tensor to be flattened. Returns: flattened tensor of size (t*n, ...) """ return tensor.view(t * n, *tensor.size()[2:]) class RolloutStorage_HER: r"""Class for storing rollout information for RL trainers. """ def __init__( self, num_steps, num_envs, observation_space, action_space, recurrent_hidden_state_size, num_recurrent_layers=1, OBS_LIST = [] ): self.observations = {} self.OBS_LIST = OBS_LIST self.num_envs = num_envs for sensor in observation_space.spaces: if sensor in OBS_LIST: self.observations[sensor] = torch.zeros( num_steps + 1, num_envs, *observation_space.spaces[sensor].shape ) self.recurrent_hidden_states = torch.zeros( num_steps + 1, num_recurrent_layers, num_envs, recurrent_hidden_state_size, ) self.rewards = torch.zeros(num_steps, num_envs, 1) self.value_preds = torch.zeros(num_steps + 1, num_envs, 1) self.returns = torch.zeros(num_steps + 1, num_envs, 1) self.action_log_probs = torch.zeros(num_steps, num_envs, 1) if action_space.__class__.__name__ == "ActionSpace": action_shape = 1 else: action_shape = action_space.shape[0] self.actions = torch.zeros(num_steps, num_envs, action_shape) self.prev_actions = torch.zeros(num_steps + 1, num_envs, action_shape) if action_space.__class__.__name__ == "ActionSpace": self.actions = self.actions.long() self.prev_actions = self.prev_actions.long() self.masks = torch.zeros(num_steps + 1, num_envs, 1) self.re_observations = {} for sensor in observation_space.spaces: if sensor in OBS_LIST: self.re_observations[sensor] = torch.zeros( (num_steps + 1) * num_envs, *observation_space.spaces[sensor].shape ) self.re_rewards = torch.zeros(num_steps * num_envs, 1) self.re_value_preds = torch.zeros((num_steps + 1)*num_envs, 1) self.re_returns = torch.zeros((num_steps + 1)*num_envs, 1) self.re_recurrent_hidden_states = torch.zeros( (num_steps + 1)*num_envs, num_recurrent_layers, recurrent_hidden_state_size, ) self.re_action_log_probs = torch.zeros((num_steps) * num_envs, 1) self.re_actions = torch.zeros(num_steps * num_envs, action_shape) self.re_prev_actions = torch.zeros((num_steps + 1)*num_envs, action_shape) self.re_masks = torch.zeros((num_steps + 1)*num_envs, action_shape) self.num_steps = num_steps self.step = 0 def to(self, device): for sensor in self.observations: self.observations[sensor] = self.observations[sensor].to(device) self.recurrent_hidden_states = self.recurrent_hidden_states.to(device) self.rewards = self.rewards.to(device) self.value_preds = self.value_preds.to(device) self.returns = self.returns.to(device) self.action_log_probs = self.action_log_probs.to(device) self.actions = self.actions.to(device) self.prev_actions = self.prev_actions.to(device) self.masks = self.masks.to(device) for sensor in self.re_observations: self.re_observations[sensor] = self.re_observations[sensor].to(device) self.re_rewards = self.re_rewards.to(device) self.re_value_preds = self.re_value_preds.to(device) self.re_returns = self.re_returns.to(device) self.re_action_log_probs = self.re_action_log_probs.to(device) self.re_actions = self.re_actions.to(device) self.re_prev_actions = self.re_prev_actions.to(device) self.re_masks = self.re_masks.to(device) self.re_recurrent_hidden_states = self.re_recurrent_hidden_states.to(device) def insert( self, observations, recurrent_hidden_states, actions, action_log_probs, value_preds, rewards, masks, ): for sensor in observations: if sensor in self.OBS_LIST: self.observations[sensor][self.step + 1].copy_( observations[sensor] ) self.recurrent_hidden_states[self.step + 1].copy_( recurrent_hidden_states ) self.actions[self.step].copy_(actions) self.prev_actions[self.step + 1].copy_(actions) self.action_log_probs[self.step].copy_(action_log_probs) self.value_preds[self.step].copy_(value_preds) self.rewards[self.step].copy_(rewards) self.masks[self.step + 1].copy_(masks) self.step = self.step + 1 def after_update(self): for sensor in self.observations: self.observations[sensor][0].copy_( self.observations[sensor][self.step] ) self.recurrent_hidden_states[0].copy_( self.recurrent_hidden_states[self.step] ) self.masks[0].copy_(self.masks[self.step]) self.prev_actions[0].copy_(self.prev_actions[self.step]) self.step = 0 def rearrange_rollout(self): # B, step # for sensor in observations: # if sensor in self.OBS_LIST: # self.observations[sensor][self.step + 1].copy_( # observations[sensor] # ) # self.recurrent_hidden_states[self.step + 1].copy_( # recurrent_hidden_states # ) # self.actions[self.step].copy_(actions) # self.prev_actions[self.step + 1].copy_(actions) # self.action_log_probs[self.step].copy_(action_log_probs) # self.value_preds[self.step].copy_(value_preds) # self.rewards[self.step].copy_(rewards) # self.masks[self.step + 1].copy_(masks) # # self.step = self.step + 1 # collect episode indices done_step, env_idx = torch.where(self.masks.squeeze(-1) == 0) episode_in_envs = [[] for _ in range(self.num_envs)] for step, b in zip(done_step, env_idx): episode_in_envs[b].append([b, step]) episodes = [] for b, eps in enumerate(episode_in_envs): num_of_episodes = len(eps) new_episodes = [] for episode_idx in range(num_of_episodes): episode_start = int(eps[episode_idx][1]) episode_end = int(eps[episode_idx + 1][1]) if episode_idx != num_of_episodes - 1 else self.step if episode_start > episode_end: print('h') new_episodes.append([b, episode_start, episode_end-1]) if num_of_episodes > 0 : episodes.extend(new_episodes) # check each episodes for ep_id, ep in enumerate(episodes): b, start_idx, end_idx = ep start_x, _, start_y = self.observations['position'][start_idx, b] end_x, _, end_y = self.observations['position'][end_idx, b] dist = torch.sqrt((abs(end_x - start_x))**2 + (abs(end_y - start_y))**2) if dist < 2.0: dists = torch.norm(self.observations['position'][start_idx:end_idx, b] - self.observations['position'][start_idx, b],dim=1) if (dists > 2.0).any() and int(dists.argmax()) >= episodes[ep_id][1]: episodes[ep_id][2] = int(dists.argmax()) else: episodes[ep_id] = None fake_step = 0 for ep_id, ep in enumerate(episodes): if ep is not None: b, start_idx, end_idx = ep if start_idx > end_idx : continue target_rgb = self.observations['panoramic_rgb'][end_idx,b] target_depth = self.observations['panoramic_depth'][end_idx,b] target_goal = torch.cat((target_rgb/255., target_depth),2) target_pose = self.observations['position'][end_idx, b] dists = torch.norm(self.observations['position'][start_idx:end_idx+1, b] - target_pose,dim=1) if not (dists>1.0).any(): continue try: last_idx = int(torch.where(dists > 1.0)[0].max()) + start_idx except: print('ssssss') length = last_idx - start_idx + 1 # for t in range(length): # rgb = self.observations['panoramic_rgb'][start_idx+t,b].cpu().numpy().astype(np.uint8) # end = (target_goal[:,:,:3]*255).cpu().numpy().astype(np.uint8)#self.observations['panoramic_rgb'][end_idx,b].cpu().numpy().astype(np.uint8) # cv2.imshow('a', np.concatenate([rgb,end],0)) # cv2.waitKey(0) for sensor in self.observations: if 'target_goal' in sensor: self.re_observations[sensor][fake_step:fake_step+length] = target_goal else: self.re_observations[sensor][fake_step:fake_step+length] = self.observations[sensor][start_idx:last_idx+1, b] for t in range(length): reward_t = max(dists[t] - dists[t+1], 0.0) * 0.2 - 0.01 self.re_rewards[fake_step+t] = reward_t self.re_actions[fake_step+t] = self.actions[start_idx+t,b] self.re_prev_actions[fake_step+t] = self.prev_actions[start_idx+t,b] self.re_recurrent_hidden_states[fake_step+t] = self.recurrent_hidden_states[start_idx+t,:,b] self.re_masks[fake_step+t] = 1.0 self.re_rewards[fake_step+t] = 10.0 self.re_masks[fake_step+t] = 0.0 fake_step += length self.fake_step = fake_step # from habitat.utils.visualizations.utils import append_text_to_image # for t in range(fake_step): # rgb = self.re_observations['panoramic_rgb'][t].cpu().int().numpy().astype(np.uint8) # target_goal = (self.re_observations['target_goal'][t][:, :, :3] * 255).cpu().int().numpy().astype(np.uint8) # view_img = np.concatenate([rgb, target_goal], 1) # text = 't %d: act: %d reward %.3f mask: %d'%(t, int(self.re_actions[t]), self.re_rewards[t], self.re_masks[t]) # view_img = append_text_to_image(view_img, text) # cv2.imshow('hi', view_img) # cv2.waitKey(0) #TODO 1 Value Prediction #TODO 2 action log probs def compute_rearranged_returns(self, agent, gamma, tau): with torch.no_grad(): hidden_states = self.re_recurrent_hidden_states[0].unsqueeze(1) for step in range(self.fake_step): last_observation = { k: v[step].unsqueeze(0) for k, v in self.re_observations.items() } value, action_log_probs, _, hidden_states, *_= agent.evaluate_actions( last_observation, hidden_states, self.re_prev_actions[step].unsqueeze(0), self.re_masks[step].unsqueeze(0), self.re_actions[step].unsqueeze(0) ) self.re_value_preds[step] = value[0].detach() self.re_action_log_probs[step] = action_log_probs[0].detach() gae = 0 for step in reversed(range(self.fake_step)): delta = ( self.re_rewards[step] + gamma * self.re_value_preds[step + 1] * self.re_masks[step + 1] - self.re_value_preds[step] ) gae = delta + gamma * tau * self.re_masks[step + 1] * gae self.re_returns[step] = gae + self.re_value_preds[step] def compute_returns(self, next_value, use_gae, gamma, tau): if use_gae: self.value_preds[self.step] = next_value gae = 0 for step in reversed(range(self.step)): delta = ( self.rewards[step] + gamma * self.value_preds[step + 1] * self.masks[step + 1] - self.value_preds[step] ) gae = delta + gamma * tau * self.masks[step + 1] * gae self.returns[step] = gae + self.value_preds[step] else: self.returns[self.step] = next_value for step in reversed(range(self.step)): self.returns[step] = ( self.returns[step + 1] * gamma * self.masks[step + 1] + self.rewards[step] ) def recurrent_generator(self, advantages, num_mini_batch): num_processes = self.rewards.size(1) assert num_processes >= num_mini_batch, ( "Trainer requires the number of processes ({}) " "to be greater than or equal to the number of " "trainer mini batches ({}).".format(num_processes, num_mini_batch) ) num_envs_per_batch = num_processes // num_mini_batch perm = torch.randperm(num_processes) for start_ind in range(0, num_processes, num_envs_per_batch): observations_batch = defaultdict(list) recurrent_hidden_states_batch = [] actions_batch = [] prev_actions_batch = [] value_preds_batch = [] return_batch = [] masks_batch = [] old_action_log_probs_batch = [] adv_targ = [] for offset in range(num_envs_per_batch): ind = perm[start_ind + offset] for sensor in self.observations: observations_batch[sensor].append( self.observations[sensor][: self.step, ind] ) recurrent_hidden_states_batch.append( self.recurrent_hidden_states[0, :, ind] ) actions_batch.append(self.actions[: self.step, ind]) prev_actions_batch.append(self.prev_actions[: self.step, ind]) value_preds_batch.append(self.value_preds[: self.step, ind]) return_batch.append(self.returns[: self.step, ind]) masks_batch.append(self.masks[: self.step, ind]) old_action_log_probs_batch.append( self.action_log_probs[: self.step, ind] ) if advantages is not None: adv_targ.append(advantages[: self.step, ind]) T, N = self.step, num_envs_per_batch # These are all tensors of size (T, N, -1) for sensor in observations_batch: observations_batch[sensor] = torch.stack( observations_batch[sensor], 1 ) actions_batch = torch.stack(actions_batch, 1) prev_actions_batch = torch.stack(prev_actions_batch, 1) value_preds_batch = torch.stack(value_preds_batch, 1) return_batch = torch.stack(return_batch, 1) masks_batch = torch.stack(masks_batch, 1) old_action_log_probs_batch = torch.stack( old_action_log_probs_batch, 1 ) if advantages is not None: adv_targ = torch.stack(adv_targ, 1) # States is just a (num_recurrent_layers, N, -1) tensor recurrent_hidden_states_batch = torch.stack( recurrent_hidden_states_batch, 1 ) recurrent_hidden_states_batch = self._flatten_helper(T,N,recurrent_hidden_states_batch) # Flatten the (T, N, ...) tensors to (T * N, ...) for sensor in observations_batch: observations_batch[sensor] = self._flatten_helper( T, N, observations_batch[sensor] ) actions_batch = self._flatten_helper(T, N, actions_batch) prev_actions_batch = self._flatten_helper(T, N, prev_actions_batch) value_preds_batch = self._flatten_helper(T, N, value_preds_batch) return_batch = self._flatten_helper(T, N, return_batch) masks_batch = self._flatten_helper(T, N, masks_batch) old_action_log_probs_batch = self._flatten_helper( T, N, old_action_log_probs_batch ) if advantages is not None : adv_targ = self._flatten_helper(T, N, adv_targ) else: adv_targ = None yield ( observations_batch, recurrent_hidden_states_batch, actions_batch, prev_actions_batch, value_preds_batch, return_batch, masks_batch, old_action_log_probs_batch, adv_targ, ) @staticmethod def _flatten_helper(t: int, n: int, tensor: torch.Tensor) -> torch.Tensor: r"""Given a tensor of size (t, n, ..), flatten it to size (t*n, ...). Args: t: first dimension of tensor. n: second dimension of tensor. tensor: target tensor to be flattened. Returns: flattened tensor of size (t*n, ...) """ return tensor.view(t * n, *tensor.size()[2:]) def recurrent_generator_her(self, advantages, num_mini_batch, advantages_her=None): num_processes = self.rewards.size(1) assert num_processes >= num_mini_batch, ( "Trainer requires the number of processes ({}) " "to be greater than or equal to the number of " "trainer mini batches ({}).".format(num_processes, num_mini_batch) ) num_envs_per_batch = num_processes // num_mini_batch perm = torch.randperm(num_processes) stst = [[0,start_ind] for start_ind in range(0, num_processes, num_envs_per_batch)] batch_size = self.num_steps * num_envs_per_batch for i in range(int(np.ceil(self.fake_step/batch_size))): stst.append([1,i]) for k in stst: mode, start_ind = k if mode == 0: observations_batch = defaultdict(list) recurrent_hidden_states_batch = [] actions_batch = [] prev_actions_batch = [] value_preds_batch = [] return_batch = [] masks_batch = [] old_action_log_probs_batch = [] adv_targ = [] for offset in range(num_envs_per_batch): ind = perm[start_ind + offset] for sensor in self.observations: observations_batch[sensor].append( self.observations[sensor][: self.step, ind] ) recurrent_hidden_states_batch.append( self.recurrent_hidden_states[0, :, ind] ) actions_batch.append(self.actions[: self.step, ind]) prev_actions_batch.append(self.prev_actions[: self.step, ind]) value_preds_batch.append(self.value_preds[: self.step, ind]) return_batch.append(self.returns[: self.step, ind]) masks_batch.append(self.masks[: self.step, ind]) old_action_log_probs_batch.append( self.action_log_probs[: self.step, ind] ) if advantages is not None: adv_targ.append(advantages[: self.step, ind]) T, N = self.step, num_envs_per_batch # These are all tensors of size (T, N, -1) for sensor in observations_batch: observations_batch[sensor] = torch.stack( observations_batch[sensor], 1 ) actions_batch = torch.stack(actions_batch, 1) prev_actions_batch = torch.stack(prev_actions_batch, 1) value_preds_batch = torch.stack(value_preds_batch, 1) return_batch = torch.stack(return_batch, 1) masks_batch = torch.stack(masks_batch, 1) old_action_log_probs_batch = torch.stack( old_action_log_probs_batch, 1 ) if advantages is not None: adv_targ = torch.stack(adv_targ, 1) # States is just a (num_recurrent_layers, N, -1) tensor recurrent_hidden_states_batch = torch.stack( recurrent_hidden_states_batch, 1 ) #recurrent_hidden_states_batch = self._flatten_helper(T,N,recurrent_hidden_states_batch) # Flatten the (T, N, ...) tensors to (T * N, ...) for sensor in observations_batch: observations_batch[sensor] = self._flatten_helper( T, N, observations_batch[sensor] ) actions_batch = self._flatten_helper(T, N, actions_batch) prev_actions_batch = self._flatten_helper(T, N, prev_actions_batch) value_preds_batch = self._flatten_helper(T, N, value_preds_batch) return_batch = self._flatten_helper(T, N, return_batch) masks_batch = self._flatten_helper(T, N, masks_batch) old_action_log_probs_batch = self._flatten_helper( T, N, old_action_log_probs_batch ) if advantages is not None : adv_targ = self._flatten_helper(T, N, adv_targ) else: adv_targ = None else: observations_batch = defaultdict(list) end = min((start_ind+1)*batch_size,self.fake_step) for sensor in self.re_observations: observations_batch[sensor] = self.re_observations[sensor][:end] recurrent_hidden_states_batch = self.re_recurrent_hidden_states[start_ind*batch_size].unsqueeze(1) actions_batch = self.re_actions[:end] prev_actions_batch = self.re_prev_actions[:end] value_preds_batch = self.re_value_preds[:end] return_batch = self.re_returns[:end] masks_batch = self.re_masks[:end] old_action_log_probs_batch = self.re_action_log_probs[:end] if advantages is not None: adv_targ = advantages_her[:end] # T, N = self.step, num_envs_per_batch # Flatten the (T, N, ...) tensors to (T * N, ...) # for sensor in observations_batch: # observations_batch[sensor] = self._flatten_helper( # T, N, observations_batch[sensor] # ) # # actions_batch = self._flatten_helper(T, N, actions_batch) # prev_actions_batch = self._flatten_helper(T, N, prev_actions_batch) # value_preds_batch = self._flatten_helper(T, N, value_preds_batch) # return_batch = self._flatten_helper(T, N, return_batch) # masks_batch = self._flatten_helper(T, N, masks_batch) # old_action_log_probs_batch = self._flatten_helper( # T, N, old_action_log_probs_batch # ) # if advantages is not None: # adv_targ = self._flatten_helper(T, N, adv_targ) # else: # adv_targ = None yield ( observations_batch, recurrent_hidden_states_batch, actions_batch, prev_actions_batch, value_preds_batch, return_batch, masks_batch, old_action_log_probs_batch, adv_targ, )<file_sep>from typing import Optional, Type from habitat import Config, Dataset from utils.vis_utils import observations_to_image, append_text_to_image import cv2 from gym.spaces.dict_space import Dict as SpaceDict from gym.spaces.box import Box from gym.spaces.discrete import Discrete from habitat.core.spaces import ActionSpace, EmptySpace import numpy as np from env_utils.habitat_env import RLEnv, MIN_DIST, MAX_DIST import habitat from habitat.utils.visualizations.utils import images_to_video from habitat.tasks.nav.shortest_path_follower import ShortestPathFollower import env_utils.noisy_actions from env_utils.noisy_actions import CustomActionSpaceConfiguration from habitat.sims.habitat_simulator.actions import HabitatSimActions from habitat_sim.utils.common import quat_to_coeffs RENDER = True NOISY = True class VisTargetNavEnv(RLEnv): metadata = {'render.modes': ['rgb_array']} def __init__(self, config: Config, dataset: Optional[Dataset] = None): self.noise = NOISY print('[VisTargetNavEnv] NOISY ACTUATION : ', self.noise) if hasattr(config,'AGENT_TASK'): self.agent_task = config.AGENT_TASK else: self.agent_task = 'search' if self.agent_task == 'homing': self.num_goals = 2 else: self.num_goals = config.NUM_GOALS task_config = config.TASK_CONFIG task_config.defrost() #task_config.TASK.TOP_DOWN_MAP.MAP_RESOLUTION = 1250 task_config.TASK.TOP_DOWN_MAP.DRAW_SOURCE = True task_config.TASK.TOP_DOWN_MAP.DRAW_SHORTEST_PATH = True task_config.TASK.TOP_DOWN_MAP.FOG_OF_WAR.VISIBILITY_DIST = 2.0 task_config.TASK.TOP_DOWN_MAP.FOG_OF_WAR.FOV = 360 task_config.TASK.TOP_DOWN_MAP.FOG_OF_WAR.DRAW = True task_config.TASK.TOP_DOWN_MAP.DRAW_VIEW_POINTS = False task_config.TASK.TOP_DOWN_MAP.DRAW_GOAL_POSITIONS = True task_config.TASK.TOP_DOWN_MAP.DRAW_GOAL_AABBS = False if ('GMT' in config.POLICY or 'NTS' in config.POLICY) and RENDER: task_config.TASK.TOP_DOWN_GRAPH_MAP = config.TASK_CONFIG.TASK.TOP_DOWN_MAP.clone() if 'GMT' in config.POLICY: task_config.TASK.TOP_DOWN_GRAPH_MAP.TYPE = "TopDownGraphMap" elif 'NTS' in config.POLICY: task_config.TASK.TOP_DOWN_GRAPH_MAP.TYPE = 'NTSGraphMap' task_config.TASK.TOP_DOWN_GRAPH_MAP.MAP_RESOLUTION = 4000 task_config.TASK.TOP_DOWN_GRAPH_MAP.NUM_TOPDOWN_MAP_SAMPLE_POINTS = 20000 task_config.TASK.MEASUREMENTS += ['TOP_DOWN_GRAPH_MAP'] if 'TOP_DOWN_MAP' in config.TASK_CONFIG.TASK.MEASUREMENTS: task_config.TASK.MEASUREMENTS = [k for k in task_config.TASK.MEASUREMENTS if 'TOP_DOWN_MAP' != k] task_config.SIMULATOR.ACTION_SPACE_CONFIG = "CustomActionSpaceConfiguration" task_config.TASK.POSSIBLE_ACTIONS = task_config.TASK.POSSIBLE_ACTIONS + ['NOISY_FORWARD', 'NOISY_RIGHT', 'NOISY_LEFT'] task_config.TASK.ACTIONS.NOISY_FORWARD = habitat.config.Config() task_config.TASK.ACTIONS.NOISY_FORWARD.TYPE = "NOISYFORWARD" task_config.TASK.ACTIONS.NOISY_RIGHT = habitat.config.Config() task_config.TASK.ACTIONS.NOISY_RIGHT.TYPE = "NOISYRIGHT" task_config.TASK.ACTIONS.NOISY_LEFT = habitat.config.Config() task_config.TASK.ACTIONS.NOISY_LEFT.TYPE = "NOISYLEFT" task_config.TASK.MEASUREMENTS = ['GOAL_INDEX'] + task_config.TASK.MEASUREMENTS + ['SOFT_SPL'] task_config.TASK.DISTANCE_TO_GOAL.TYPE = 'Custom_DistanceToGoal' if self.agent_task != 'search': task_config.TASK.SPL.TYPE = 'Custom_SPL' task_config.TASK.SOFT_SPL.TYPE = 'Custom_SoftSPL' task_config.TASK.GOAL_INDEX = task_config.TASK.SPL.clone() task_config.TASK.GOAL_INDEX.TYPE = 'GoalIndex' task_config.freeze() self.config = config self._core_env_config = config.TASK_CONFIG self._reward_measure_name = config.REWARD_METHOD self._success_measure_name = config.SUCCESS_MEASURE self.success_distance = config.SUCCESS_DISTANCE self._previous_measure = None self._previous_action = -1 self.time_t = 0 self.stuck = 0 self.follower = None if 'NOISY_FORWARD' not in HabitatSimActions: HabitatSimActions.extend_action_space("NOISY_FORWARD") HabitatSimActions.extend_action_space("NOISY_RIGHT") HabitatSimActions.extend_action_space("NOISY_LEFT") if 'STOP' in task_config.TASK.POSSIBLE_ACTIONS: self.action_dict = {0: HabitatSimActions.STOP, 1: "NOISY_FORWARD", 2: "NOISY_LEFT", 3: "NOISY_RIGHT"} else: self.action_dict = {0: "NOISY_FORWARD", 1: "NOISY_LEFT", 2: "NOISY_RIGHT"} super().__init__(self._core_env_config, dataset) act_dict = {"MOVE_FORWARD": EmptySpace(), 'TURN_LEFT': EmptySpace(), 'TURN_RIGHT': EmptySpace() } if 'STOP' in task_config.TASK.POSSIBLE_ACTIONS: act_dict.update({'STOP': EmptySpace()}) self.action_space = ActionSpace(act_dict) obs_dict = { 'panoramic_rgb': self.habitat_env._task.sensor_suite.observation_spaces.spaces['panoramic_rgb'], 'panoramic_depth': self.habitat_env._task.sensor_suite.observation_spaces.spaces['panoramic_depth'], 'target_goal': self.habitat_env._task.sensor_suite.observation_spaces.spaces['target_goal'], 'step': Box(low=np.array(0),high=np.array(500), dtype=np.float32), 'prev_act': Box(low=np.array(-1), high=np.array(self.action_space.n), dtype=np.int32), 'gt_action': Box(low=np.array(-1), high=np.array(self.action_space.n), dtype=np.int32), 'position': Box(low=-np.Inf, high=np.Inf, shape=(3,), dtype=np.float32), 'target_pose': Box(low=-np.Inf, high=np.Inf, shape=(3,), dtype=np.float32), 'distance': Box(low=-np.Inf, high=np.Inf, shape=(1,), dtype=np.float32), } if 'GMT' in config.POLICY and RENDER: self.mapper = self.habitat_env.task.measurements.measures['top_down_map'] #obs_dict.update({'unexplored':Box(low=0, high=1, shape=(self.mapper.delta,), dtype=np.int32), # 'neighbors': Box(low=0, high=1, shape=(self.mapper.delta,), dtype=np.int32),}) else: self.mapper = None if 'aux' in self.config.POLICY: self.return_have_been = True self.return_target_dist_score = True obs_dict.update({'have_been': Box(low=0, high=1, shape=(1,), dtype=np.int32), 'target_dist_score': Box(low=0, high=1, shape=(1,), dtype=np.float32), }) else: self.return_have_been = False self.return_target_dist_score = False self.observation_space = SpaceDict(obs_dict) if config.DIFFICULTY == 'easy': self.habitat_env.difficulty = 'easy' self.habitat_env.MIN_DIST, self.habitat_env.MAX_DIST = 1.5, 3.0 elif config.DIFFICULTY == 'medium': self.habitat_env.difficulty = 'medium' self.habitat_env.MIN_DIST, self.habitat_env.MAX_DIST = 3.0, 5.0 elif config.DIFFICULTY == 'hard': self.habitat_env.difficulty = 'hard' self.habitat_env.MIN_DIST, self.habitat_env.MAX_DIST = 5.0, 10.0 elif config.DIFFICULTY == 'random': self.habitat_env.difficulty = 'random' self.habitat_env.MIN_DIST, self.habitat_env.MAX_DIST = 3.0, 10.0 else: raise NotImplementedError self.habitat_env._num_goals = self.num_goals self.habitat_env._agent_task = self.agent_task print('current task : %s'%(self.agent_task)) print('current difficulty %s, MIN_DIST %f, MAX_DIST %f - # goals %d'%(config.DIFFICULTY, self.habitat_env.MIN_DIST, self.habitat_env.MAX_DIST, self.habitat_env._num_goals)) self.min_measure = self.habitat_env.MAX_DIST self.reward_method = config.REWARD_METHOD if self.reward_method == 'progress': self.get_reward = self.get_progress_reward elif self.reward_method == 'milestone': self.get_reward = self.get_milestone_reward elif self.reward_method == 'coverage': self.get_reward = self.get_coverage_reward self.run_mode = 'RL' self.number_of_episodes = 1000 self.need_gt_action = False self.has_log_info = None def swith_run_mode(self, mode): self.run_mode = mode self.captured_episode = self.current_episode def update_graph(self, node_list, affinity, changed_info, curr_info): if self.mapper is not None: self.mapper.update_graph(node_list, affinity, changed_info, curr_info) def draw_activated_nodes(self, activated_node_list): if self.mapper is not None: self.mapper.highlight_activated_nodes(activated_node_list) def build_path_follower(self): self.follower = ShortestPathFollower(self._env.sim, 0.8, False) self.curr_goal = self.current_episode.goals[self.curr_goal_idx] def get_best_action(self, goal=None): curr_goal = goal if goal is not None else self.curr_goal.position act = self.follower.get_next_action(curr_goal) return act def get_dist(self, goal_position): dist = self.habitat_env._sim.geodesic_distance(self.current_position, goal_position) return dist @property def curr_goal_idx(self): return self.habitat_env.get_metrics()['goal_index']['curr_goal_index'] @property def curr_goal(self): return self.current_episode.goals[self.curr_goal_idx] def reset(self): #tic = time.time() self._previous_action = -1 self.time_t = 0 observations = super().reset() #tt = time.time() #self.curr_goal = self.current_episode.goals[self.curr_goal_idx] self.num_goals = len(self.current_episode.goals) self._previous_measure = self.get_dist(self.curr_goal.position) #print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%get metric', time.time() - tt) self.info = None#self.get_info(observations) self.total_reward = 0 self.progress = 0 self.stuck = 0 self.min_measure = self.habitat_env.MAX_DIST self.prev_coverage = 0 if self.need_gt_action: if hasattr(self.habitat_env._sim,'habitat_config'): sim_scene = self.habitat_env._sim.habitat_config.SCENE else: sim_scene = self.habitat_env._sim.config.SCENE if self.follower is None or sim_scene != self.follower._current_scene: self.build_path_follower()#print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%reset time', time.time()-tic) observations.update({'gt_action': self.get_best_action(self.current_episode.goals[0].position) - 1}) self.positions = [self.current_position] self.obs = self.process_obs(observations) self.has_log_info = None if self.agent_task != 'search': self.log_success = [0.0 for _ in range(self.num_goals)] self.log_spl = [0.0 for _ in range(self.num_goals)] self.log_softspl = [0.0 for _ in range(self.num_goals)] self.curr_stage = 'search' return self.obs @property def scene_name(self): if hasattr(self.habitat_env._sim, 'habitat_config'): sim_scene = self.habitat_env._sim.habitat_config.SCENE else: sim_scene = self.habitat_env._sim.config.SCENE return sim_scene def process_obs(self, obs): copy_from_obs = ['target_goal', 'panoramic_rgb', 'panoramic_depth', 'rgb', 'depth'] obs_dict = { 'step': self.time_t, 'position': self.current_position, 'target_pose': self.curr_goal.position, 'distance': self.get_dist(self.curr_goal.position)} for key in copy_from_obs: if key in obs.keys(): if key == 'target_goal': obs_dict.update({key: obs[key][self.curr_goal_idx]}) else: obs_dict.update({key: obs[key]}) if self.need_gt_action: obs_dict.update(obs['gt_action']) if hasattr(self,'unexp'): obs_dict.update({'unexplored': self.unexp.astype(np.float32), 'neighbors': self.neighbor.astype(np.float32),}) if self.return_have_been: if len(self.positions) < 10: have_been = 0 else: dists = np.linalg.norm(np.array(self.positions) - self.current_position, axis=1) far = np.where(dists > 1.0)[0] near = np.where(dists[:-10] < 1.0)[0] if len(far) > 0 and len(near) > 0 and (near < far.max()).any(): have_been = 1 else: have_been = 0 obs_dict.update({'have_been': np.array([have_been])}) if self.return_target_dist_score: target_dist_score = np.maximum(1-obs_dict['distance']/2.,0.0) obs_dict.update({'target_dist_score': np.array([target_dist_score])}) return obs_dict def step(self, action): if isinstance(action, int): action = {'action': action} self._previous_action = action if NOISY: action = {'action':self.action_dict[action['action']]} if self.agent_task != 'search' and 'STOP' in self.action_space.spaces and action['action'] == 0: dist = self.get_dist(self.curr_goal.position) print(dist, self.success_distance) if dist <= self.success_distance: self.log_success[self.curr_goal_idx] = 1.0 self.log_spl[self.curr_goal_idx] = self.habitat_env.task.measurements.get_metrics()['spl'] self.log_softspl[self.curr_goal_idx] = self.habitat_env.task.measurements.get_metrics()['softspl'] all_done = self.habitat_env.task.measurements.measures['goal_index'].increase_goal_index() state = self.habitat_env.sim.get_agent_state() obs = self.habitat_env._sim.get_observations_at(state.position, state.rotation) obs.update(self.habitat_env.task.sensor_suite.get_observations( observations=obs, episode=self.habitat_env.current_episode, action=action, task=self.habitat_env.task, )) if all_done: done = True reward = self.config.SUCCESS_REWARD else: done = False reward = 0 else: obs, reward, done, self.info = super().step(action) else: obs, reward, done, self.info = super().step(action) self.time_t += 1 self.info['length'] = self.time_t * done self.info['episode'] = int(self.current_episode.episode_id) self.info['distance_to_goal'] = self._previous_measure self.info['step'] = self.time_t if self.need_gt_action: best_action = self.get_best_action(self.curr_goal.position) gt_action = best_action - 1 if best_action is not None else 0 obs.update({'gt_action': gt_action}) self.positions.append(self.current_position) self.obs = self.process_obs(obs) self.total_reward += reward if self._episode_success(): done = True if self.agent_task != 'search': self.info.update({'success':self.log_success, 'spl': self.log_spl, 'softspl': self.log_softspl}) return self.obs, reward, done, self.info def get_reward_range(self): return ( self.config.SLACK_REWARD - 1.0, self.config.SUCCESS_REWARD + 1.0, ) def get_progress_reward(self, observations): reward = self.config.SLACK_REWARD current_measure = self.get_dist(self.curr_goal.position) # absolute decrease on measure self.move = self._previous_measure - current_measure #print(self.move) if abs(self.move) < 0.01: self.stuck += 1 else: self.stuck = 0 self.progress = max(self.move,0.0) * 0.2 reward += self.progress self._previous_measure = current_measure if self._episode_success(): reward += self.config.SUCCESS_REWARD * self._env.get_metrics()['spl'] #if self._part_success(): # reward += self.config.SUCCESS_REWARD return reward def get_milestone_reward(self, observations): reward = self.config.SLACK_REWARD current_measure = self.get_dist(self.curr_goal.position) # absolute decrease on measure self.move = self.min_measure - current_measure if abs(self.move) < 0.01: self.stuck += 1 else: self.stuck = 0 self.progress = max(self.move,0.0) reward += self.progress self.min_measure = min(self.min_measure, current_measure) if self._episode_success(): reward += self.config.SUCCESS_REWARD * self._env.get_metrics()['spl'] return reward def _episode_success(self): if self.num_goals == 1: return self._env.get_metrics()['success'] else: return self.log_success[-1] #all_done = self.curr_goal_idx == self.num_goals - 1 #close = self.get_dist(self.curr_goal.position) < self.success_distance #return all_done and close def _part_success(self): close = self.get_dist(self.curr_goal.position) < self.success_distance return close def get_success(self): return self._episode_success() def get_done(self, observations): done = False if self._env.episode_over or self._episode_success(): done = True if self.stuck > 100 : done = True return done def get_info(self, observations): info = self.habitat_env.get_metrics() return info @property def current_position(self): return self.habitat_env.sim.get_agent_state().position def get_episode_over(self): return self._env.episode_over def get_agent_state(self): return self.habitat_env.sim.get_agent_state() def get_curr_goal_index(self): return self.curr_goal_idx def log_info(self, log_type='str', info=None): self.has_log_info = {'type': log_type, 'info': info} def render(self, mode='rgb'): info = self.get_info(None) if self.info is None else self.info img = observations_to_image(self.obs, info, mode='panoramic') str_action = 'NN' if 'STOP' not in self.habitat_env.task.actions: action_list = ["MF", 'TL', 'TR'] else: action_list = ["STOP", "MF", 'TL', 'TR'] if self._previous_action != -1: str_action = action_list[self._previous_action['action']] dist = self.get_dist(self.curr_goal.position) txt = 't: %03d, r: %.2f ,dist: %.2f, stuck: %d a: %s '%(self.time_t,self.total_reward, dist, self.stuck, str_action) if self.has_log_info is not None: if self.has_log_info['type'] == 'str': txt += ' ' + self.has_log_info['info'] elif self.return_have_been: txt += ' ' if hasattr(self.mapper, 'node_list'): if self.mapper.node_list is None: txt += ' node : NNNN' txt += ' curr : NNNN' else: num_node = len(self.mapper.node_list) txt += ' node : %03d' % (num_node) curr_info = self.mapper.curr_info if 'curr_node' in curr_info.keys(): txt += ' curr: %02d,'%(curr_info['curr_node']) if 'goal_prob' in curr_info.keys(): txt += ' goal %.3f'%(curr_info['goal_prob']) img = append_text_to_image(img, txt) if mode == 'rgb' or mode == 'rgb_array': return img elif mode == 'human': cv2.imshow('render', img[:,:,::-1]) cv2.waitKey(1) return img return super().render(mode) def get_coverage_reward(self, observations): top_down_map = self.habitat_env.get_metrics()['top_down_map'] fow = top_down_map["fog_of_war_mask"] self.map_size = (top_down_map['map'] != 0).sum() self.curr_coverage = np.sum(fow) new_pixel = self.curr_coverage - self.prev_coverage reward = np.clip(new_pixel, 0, 50) / 1000 # 0 ~ 0.1 self.prev_coverage = self.curr_coverage reward += self.config.SLACK_REWARD current_measure = self.get_dist(self.curr_goal.position) # absolute decrease on measure self.move = self._previous_measure - current_measure #print(self.move) if abs(self.move) < 0.01: self.stuck += 1 else: self.stuck = 0 self._previous_measure = current_measure if self._episode_success(): reward += self.config.SUCCESS_REWARD# * self._env.get_metrics()['spl'] return reward if __name__ == '__main__': from env_utils.make_env_utils import construct_envs from IL_configs.default import get_config import numpy as np import os import time os.environ['CUDA_VISIBLE_DEVICES'] = "0" config = get_config('IL_configs/lgmt.yaml') config.defrost() config.DIFFICULTY = 'hard' config.TASK_CONFIG.ENVIRONMENT.ITERATOR_OPTIONS.MAX_SCENE_REPEAT_EPISODES = 10 config.NUM_PROCESSES = 1 config.NUM_VAL_PROCESSES = 0 config.freeze() action_list = config.TASK_CONFIG.TASK.POSSIBLE_ACTIONS env = construct_envs(config, eval(config.ENV_NAME), mode='single') obs = env.reset() img = env.render('rgb') done = False fps = {} reset_time = {} scene = env.env.current_episode.scene_id.split('/')[-2] fps[scene] = [] reset_time[scene] = [] imgs = [img] while True: action = env.env.get_best_action() #action = env.action_space.sample() #action = action_list.index(action['action']) img = env.render('rgb') #imgs.append(img) cv2.imshow('render', img[:, :, [2, 1, 0]]) key = cv2.waitKey(0) # # if key == ord('s'): action = 1 # elif key == ord('w'): action = 0 # elif key == ord('a'): action = 1 # elif key == ord('d'): action = 2 # elif key == ord('r'): # done = True # print(done) # elif key == ord('q'): # break # else: # action = env.action_space.sample() if done: tic = time.time() obs = env.reset() toc = time.time() scene = env.env.current_episode.scene_id.split('/')[-2] fps[scene] = [] reset_time[scene] = [] reset_time[scene].append(toc-tic) done = False #shapes = [img.shape for img in imgs] #for i,im in enumerate(imgs): # if im.shape != imgs[0].shape: # imgs[i] = cv2.resize(im,dsize=(imgs[0].shape[1],imgs[0].shape[0])) #images_to_video(imgs, output_dir='.', video_name='%s_%s_no_topmap'% (scene, env.current_episode.episode_id), fps=60) imgs = [] #print(env.current_episode) else: tic = time.time() obs, reward, done, info = env.step({'action':action}) toc = time.time() fps[scene].append(toc-tic) #break if len(fps) > 20: break print('===============================') print('FPS : ', [(key, np.array(fps_list).mean()) for key, fps_list in fps.items()]) print('Reset : ', [(key, np.array(reset_list).mean()) for key, reset_list in reset_time.items()]) <file_sep>import torch from tnt.torchnet.logger import VisdomPlotLogger, VisdomLogger, VisdomTextLogger from . import MeterLogger from .. import meter as Meter import numpy as np class VisdomMeterLogger(MeterLogger): ''' A class to package and visualize meters. Args: server: The uri of the Visdom server env: Visdom environment to log to. port: Port of the visdom server. title: The title of the MeterLogger. This will be used as a prefix for all plots. plotstylecombined: Whether to plot train/test curves in the same window. ''' def __init__(self, server="localhost", env='main', port=8097, title="DNN", nclass=21, plotstylecombined=True, log_to_filename=None, loggers=('train', 'val')): super(VisdomMeterLogger, self).__init__() self.server = server self.env = env self.port = port self.title = title self.logger = {} for logger in loggers: self.logger[logger] = {} self.plotstylecombined = plotstylecombined self.log_to_filename = log_to_filename self.metername_to_ptype = {} def __addlogger(self, meter, ptype): first_logger = None for logger_name, logger in self.logger.items(): if ptype == 'stacked_line': opts = {'title': '{} {} ({})'.format(self.title, meter, logger_name), 'fillarea': True, 'legend': self.meter[logger_name][meter].keys} logger[meter] = VisdomPlotLogger(ptype, env=self.env, server=self.server, port=self.port, log_to_filename=self.log_to_filename, opts=opts) elif ptype == 'line': if self.plotstylecombined: if first_logger is None: opts = {'title': self.title + ' ' + meter} logger[meter] = VisdomPlotLogger(ptype, env=self.env, server=self.server, port=self.port, log_to_filename=self.log_to_filename, opts=opts) else: logger[meter] = self.logger[first_logger][meter] else: opts = {'title': self.title + '{} '.format(logger_name) + meter} logger[meter] = VisdomPlotLogger(ptype, env=self.env, server=self.server, port=self.port, log_to_filename=self.log_to_filename, opts=opts) elif ptype == 'heatmap': names = list(range(self.nclass)) opts = {'title': '{} {} {}'.format(self.title, logger_name, meter) + meter, 'columnnames': names, 'rownames': names} logger[meter] = VisdomLogger('heatmap', env=self.env, server=self.server, port=self.port, log_to_filename=self.log_to_filename, opts=opts) # >>> # Image example # >>> img_to_use = skimage.data.coffee().swapaxes(0,2).swapaxes(1,2) # >>> image_logger = VisdomLogger('image') # >>> image_logger.log(img_to_use) elif ptype == 'image': opts = {'title': '{} {} {}'.format(self.title, logger_name, meter) + meter} logger[meter] = VisdomLogger(ptype, env=self.env, server=self.server, port=self.port, log_to_filename=self.log_to_filename, opts=opts) # >>> # Histogram example # >>> hist_data = np.random.rand(10000) # >>> hist_logger = VisdomLogger('histogram', , opts=dict(title='Random!', numbins=20)) # >>> hist_logger.log(hist_data) elif ptype == 'histogram': opts = {'title': '{} {} {}'.format(self.title, logger_name, meter) + meter, 'numbins': 20} logger[meter] = VisdomLogger(ptype, env=self.env, server=self.server, port=self.port, log_to_filename=self.log_to_filename, opts=opts) elif ptype == 'text': opts = {'title': '{} {} {}'.format(self.title, logger_name, meter) + meter} logger[meter] = VisdomTextLogger(env=self.env, server=self.server, port=self.port, log_to_filename=self.log_to_filename, update_type='APPEND', opts=opts) elif ptype =='video': opts = {'title': '{} {} {}'.format(self.title, logger_name, meter) + meter} logger[meter] = VisdomLogger(ptype, env=self.env, server=self.server, port=self.port, log_to_filename=self.log_to_filename, opts=opts) def add_meter(self, meter_name, meter, ptype=None): super(VisdomMeterLogger, self).add_meter(meter_name, meter) # for key in self.writer.keys(): # self.metername_to_ptype[meter] = ptype self.metername_to_ptype[meter_name] = ptype if ptype: # Use `ptype` for manually selecting the plot type self.__addlogger(meter_name, ptype) elif isinstance(meter, meter.ClassErrorMeter): self.__addlogger(meter_name, 'line') elif isinstance(meter, meter.mAPMeter): self.__addlogger(meter_name, 'line') elif isinstance(meter, meter.AUCMeter): self.__addlogger(meter_name, 'line') elif isinstance(meter, meter.ConfusionMeter): self.__addlogger(meter_name, 'heatmap') elif isinstance(meter, meter.MSEMeter): self.__addlogger(meter_name, 'line') elif type(meter) == meter.ValueSummaryMeter: self.__addlogger(meter_name, 'line') elif isinstance(meter, meter.MultiValueSummaryMeter): self.__addlogger(meter_name, 'stacked_line') else: raise NotImplementedError("Unknown meter type (and pytpe): {} ({})".format(type(meter), ptype)) def reset_meter(self, iepoch, mode='train', meterlist=None): self.timer.reset() for meter_name, meter in self.meter[mode].items(): if meterlist is not None and meter_name not in meterlist: continue val = self.meter[mode][meter_name].value() val = val[0] if isinstance(val, (list, tuple)) else val should_reset_and_continue = False if isinstance(val, str) or val is None: should_reset_and_continue = (val is None) elif isinstance(val, np.ndarray): should_reset_and_continue = np.isnan(val).any() elif isinstance(val, torch.Tensor): should_reset_and_continue = torch.isnan(val).any() else: should_reset_and_continue = np.isnan(val) if should_reset_and_continue: self.meter[mode][meter_name].reset() continue if isinstance(meter, meter.ConfusionMeter) or self.metername_to_ptype[meter_name] in ['histogram', 'image', 'text']: self.logger[mode][meter_name].log(val) elif isinstance(self.meter[mode][meter_name], meter.MultiValueSummaryMeter): self.logger[mode][meter_name].log( np.array([iepoch]*len(val)), np.array(np.cumsum(val)), name=mode) # keep mean elif meter_name in self.metername_to_ptype and self.metername_to_ptype[meter_name] == 'video': self.logger[mode][meter_name].log(videofile=val) # video takes in a string else: self.logger[mode][meter_name].log(iepoch, val, name=mode) self.meter[mode][meter_name].reset() <file_sep> import os import textwrap from typing import Dict, List, Optional, Tuple import imageio import numpy as np import tqdm from habitat.core.logging import logger from habitat.core.utils import try_cv2_import from habitat.utils.visualizations import maps import cv2 ''' top_down_map = maps.draw_agent( image=top_down_map, agent_center_coord=map_agent_pos, agent_rotation=info["top_down_map"]["agent_angle"], agent_radius_px=5, ) ''' def clip_map_birdseye_view(image, clip_size, pixel_pose): half_clip_size = clip_size//2 delta_x = pixel_pose[0] - half_clip_size delta_y = pixel_pose[1] - half_clip_size min_x = max(delta_x, 0) max_x = min(pixel_pose[0] + half_clip_size, image.shape[0]) min_y = max(delta_y, 0) max_y = min(pixel_pose[1] + half_clip_size, image.shape[1]) return_image = np.zeros([clip_size, clip_size, 3],dtype=np.uint8) cliped_image = image[min_x:max_x, min_y:max_y] start_x = max(-delta_x,0) start_y = max(-delta_y,0) try: return_image[start_x:start_x+cliped_image.shape[0],start_y:start_y+cliped_image.shape[1]] = cliped_image except: print('image shape ', image.shape, 'min_x', min_x,'max_x', max_x,'min_y',min_y,'max_y',max_y, 'return_image.shape',return_image.shape, 'cliped', cliped_image.shape, 'start_x,y', start_x, start_y) return return_image #python train_bc.py --run-type eval --gpu 7 --exp-config IL_configs/gmt.yaml --resume data/eval/GMT_fix_90.pt --version-name GMT_FIX # python train_bc.py --run-type eval --exp-config IL_configs/gmt.yaml --gpu 8 --data-dir /disk4/obin/vistarget_demo_gibson/train/ --stop --version-name 0725_gmt_goalemb --diff hard --resume data/eval/GMT_goalemb_85.pt def append_text_to_image(image: np.ndarray, text: str, font_size=0.5, font_line=cv2.LINE_AA): r""" Appends text underneath an image of size (height, width, channels). The returned image has white text on a black background. Uses textwrap to split long text into multiple lines. Args: image: the image to put text underneath text: a string to display Returns: A new image with text inserted underneath the input image """ h, w, c = image.shape font_thickness = 1 font = cv2.FONT_HERSHEY_SIMPLEX blank_image = np.zeros(image.shape, dtype=np.uint8) linetype = font_line if font_line is not None else cv2.LINE_8 char_size = cv2.getTextSize(" ", font, font_size, font_thickness)[0] wrapped_text = textwrap.wrap(text, width=int(w / char_size[0])) y = 0 for line in wrapped_text: textsize = cv2.getTextSize(line, font, font_size, font_thickness)[0] y += textsize[1] + 10 if y % 2 == 1 : y += 1 x = 10 cv2.putText( blank_image, line, (x, y), font, font_size, (255, 255, 255), font_thickness, lineType=linetype, ) text_image = blank_image[0 : y + 10, 0:w] final = np.concatenate((image, text_image), axis=0) return final def draw_collision(view: np.ndarray, alpha: float = 0.4) -> np.ndarray: r"""Draw translucent red strips on the border of input view to indicate a collision has taken place. Args: view: input view of size HxWx3 in RGB order. alpha: Opacity of red collision strip. 1 is completely non-transparent. Returns: A view with collision effect drawn. """ strip_width = view.shape[0] // 20 mask = np.ones(view.shape) mask[strip_width:-strip_width, strip_width:-strip_width] = 0 mask = mask == 1 view[mask] = (alpha * np.array([255, 0, 0]) + (1.0 - alpha) * view)[mask] return view def observations_to_image(observation: Dict, info: Dict, mode='panoramic', local_imgs=None, clip=None, center_agent = True) -> np.ndarray: r"""Generate image of single frame from observation and info returned from a single environment step(). Args: observation: observation returned from an environment step(). info: info returned from an environment step(). Returns: generated image of a single frame. """ egocentric_view = [] if "rgb" in observation and mode != 'panoramic': observation_size = observation["rgb"].shape[0] rgb = observation["rgb"] if not isinstance(rgb, np.ndarray): rgb = rgb.cpu().numpy() egocentric_view.append(rgb) elif "panoramic_rgb" in observation and mode == 'panoramic': observation_size = observation["panoramic_rgb"].shape[0] rgb = observation["panoramic_rgb"] if not isinstance(rgb, np.ndarray): rgb = rgb.cpu().numpy() egocentric_view.append(rgb) #egocentric_view.append((observation['panoramic_depth'].repeat(3,2)*255).astype(np.uint8)) if "objectgoal" in observation: goal_rgb = (observation['objectgoal'][:,:,:3]*255) if not isinstance(goal_rgb, np.ndarray): goal_rgb = goal_rgb.cpu().numpy() egocentric_view.append(goal_rgb.astype(np.uint8)) if "target_goal" in observation: goal_rgb = (observation['target_goal'][:,:,:3]*255) if not isinstance(goal_rgb, np.ndarray): goal_rgb = goal_rgb.cpu().numpy() egocentric_view.append(goal_rgb.astype(np.uint8)) if local_imgs is not None: blank_img = np.zeros_like(rgb) small_imgs = [] for i in range(4): if i >= len(local_imgs): small_shape = [int(blank_img.shape[0]/2), int(blank_img.shape[1]/2)] small_img = np.zeros([small_shape[0],small_shape[1],3]) else: small_img = cv2.resize(local_imgs[i], dsize=None, fx=0.5, fy=0.5) small_imgs.append(small_img) small_img = np.concatenate([np.concatenate([small_imgs[0],small_imgs[1]],0),np.concatenate([small_imgs[2],small_imgs[3]],0)],1) egocentric_view.append(small_img.astype(np.uint8)) if mode == 'panoramic': egocentric_view = np.concatenate(egocentric_view, axis=0) else: egocentric_view = np.concatenate(egocentric_view, axis=1) if "collisions" in info and info['collisions'] is not None: if info["collisions"]["is_collision"]: egocentric_view = draw_collision(egocentric_view) #if 'direction_img' in observation.keys(): # int_img = (observation['direction_img'] * 255).astype(np.uint8) # egocentric_view = np.concatenate([int_img, egocentric_view],0) frame = egocentric_view top_down_height = frame.shape[0] if info is not None and "top_down_map" in info: if info['top_down_map'] is not None: #trg = observation['neighbors'] #targetable = np.zeros([21, frame.shape[1], 3], dtype=np.uint8) #d = 21 #for i in range(12): # targetable[:, d * i:d * (i + 1)] = [0, 255 * trg[i], 0] #frame = np.concatenate([targetable, frame], 0) ''' exp = info["top_down_map"]['unexplored'] trg = observation['neighbors'] delta = len(exp) kk = np.arange(delta).tolist() ss = kk[int(delta/2):] + kk[:int(delta/2)] targetable = np.zeros([22, frame.shape[1], 3],dtype=np.uint8) explorable = np.zeros([22, frame.shape[1], 3],dtype=np.uint8) exp,trg = exp[ss], trg[ss] d = int(frame.shape[1]/delta) for i in range(delta): explorable[:,d*i:d*(i+1)] = 255 * exp[i] targetable[:, d * i:d * (i + 1)] = [0, 255 * trg[i], 0] frame = np.concatenate([targetable, explorable,frame],0) ''' top_down_height = frame.shape[0] top_down_map = info["top_down_map"]["map"] top_down_map = maps.colorize_topdown_map( top_down_map, info["top_down_map"]["fog_of_war_mask"] ) map_agent_pos = info["top_down_map"]["agent_map_coord"] top_down_map = maps.draw_agent( image=top_down_map, agent_center_coord=map_agent_pos, agent_rotation=info["top_down_map"]["agent_angle"], agent_radius_px=5, ) clip_size = 80#200 if local_imgs is None else 80 if clip is not None: clip_size = clip if center_agent: top_down_map = clip_map_birdseye_view(top_down_map, clip_size, map_agent_pos) if top_down_map.shape[0] > top_down_map.shape[1]: top_down_map = np.rot90(top_down_map, 1) # scale top down map to align with rgb view old_h, old_w, _ = top_down_map.shape top_down_width = int(float(top_down_height) / old_h * old_w) # cv2 resize (dsize is width first) top_down_map = cv2.resize( top_down_map, (top_down_height, top_down_height), interpolation=cv2.INTER_CUBIC, ) else: top_down_map = np.zeros([top_down_height, top_down_height, 3],dtype=np.uint8) frame = np.concatenate((frame, top_down_map), axis=1) return frame <file_sep>import time def log_time(prev_time=None, log='', return_time=False): if prev_time is not None : delta = time.time() - prev_time print("[TIME] ", log, delta) if return_time: return time.time(), delta else: return time.time()<file_sep>import copy import numpy as np import torch import warnings from .. import meter as Meter class MeterLogger(object): ''' A class to package and print meters. ''' def __init__(self, modes=('train', 'val')): self.modes = list(modes) self.meter = {} self.logger = {} for mode in modes: self.meter[mode] = {} self.logger[mode] = {} self.timer = Meter.TimeMeter(None) self.metername_to_ptype = {} def _ver2tensor(self, target): target_mat = torch.zeros(target.shape[0], self.nclass) for i, j in enumerate(target): target_mat[i][j] = 1 return target_mat def _to_tensor(self, var): if isinstance(var, torch.autograd.Variable): var = var.data if not torch.is_tensor(var): if isinstance(var, np.ndarray): var = torch.from_numpy(var) else: var = torch.Tensor([var]) return var def add_meter(self, meter_name, meter): for mode in self.modes: self.meter[mode][meter_name] = copy.deepcopy(meter) def update_meter(self, output, target=None, meters={'METER_NAME_HERE'}, phase='train'): for meter_name in meters: assert meter_name in self.meter[phase].keys(), "Unrecognized meter name {}".format(meter_name) meter = self.meter[phase][meter_name] if not isinstance(meter, Meter.SingletonMeter): # Singleton stores as-is output = self._to_tensor(output) if target is not None: target = self._to_tensor(target) if isinstance(meter, Meter.APMeter) or \ isinstance(meter, Meter.mAPMeter) or \ isinstance(meter, Meter.ConfusionMeter): assert target is not None, "Meter '{}' of type {} requires 'target' is not None".format(meter_name, type(meter)) target_th = self._ver2tensor(target) meter.add(output, target_th) elif target is not None: meter.add(output, target) else: meter.add(output) def peek_meter(self, phase='train'): '''Returns a dict of all meters and their values.''' result = {} for key in self.meter[phase].keys(): val = self.meter[phase][key].value() val = val[0] if isinstance(val, (list, tuple)) else val result[key] = val return result def reset_meter(self, meterlist=None, phase='train'): self.timer.reset() if meterlist is None: meterlist = self.meter[phase].keys() for meter_name in meterlist: assert meter_name in self.meter[phase].keys(), "Unrecognized meter name {}".format(meter_name) self.meter[phase][meter_name].reset() def print_meter(self, mode, iepoch, ibatch=1, totalbatch=1, meterlist=None): assert mode in self.modes, f'{mode} is not any phase' pstr = "%s:\t[%d][%d/%d] \t" tval = [] tval.extend([mode, iepoch, ibatch, totalbatch]) if meterlist is None: meterlist = self.meter[mode].keys() for meter_name in meterlist: assert meter_name in self.meter[mode].keys(), "Unrecognized meter name {}".format(meter_name) meter = self.meter[mode][meter_name] if isinstance(meter, Meter.ConfusionMeter): continue if isinstance(meter, Meter.ClassErrorMeter): # Printing for this could be significantly improved pstr += "Acc@1 %.2f%% \t Acc@" + str(self.topk) + " %.2f%% \t" tval.extend([self.meter[mode][meter_name].value()[0], self.meter[mode][meter_name].value()[1]]) elif isinstance(meter, Meter.mAPMeter): pstr += "mAP %.3f \t" tval.extend([self.meter[mode][meter_name].value()]) elif isinstance(meter, Meter.AUCMeter): pstr += "AUC %.3f \t" tval.extend([self.meter[mode][meter_name].value()]) elif isinstance(meter, Meter.ValueSummaryMeter) or isinstance(meter, meter.MSEMeter): pstr += "{}: {}".format(meter_name, self.meter[mode][meter_name]) elif isinstance(meter, Meter.MultiValueSummaryMeter): pstr += "{}: {}".format(meter_name, self.meter[mode][meter_name]) else: warnings.warn("Can't print meter '{}' of type {}".format(meter_name, type(meter)), RuntimeWarning) pstr += " %.2fs/its\t" tval.extend([self.timer.value()]) print(pstr % tuple(tval))<file_sep>import torch.utils.data as data import numpy as np import joblib import torch import time import cv2 class HabitatDemoDataset(data.Dataset): def __init__(self, cfg, data_list, transform=None): self.data_list = data_list self.img_size = (64,252) self.action_dim = 3 self.max_demo_length = cfg.dataset.max_demo_length def __getitem__(self, index): return self.pull_image(index) def __len__(self): return len(self.data_list) def get_dist(self, demo_position): return np.linalg.norm(demo_position[-1] - demo_position[0], ord=2) def pull_image(self, index): demo_data = joblib.load(self.data_list[index]) scene = self.data_list[index].split('/')[-1].split('_')[0] start_pose = [demo_data['position'][0], demo_data['rotation'][0]] start_idx = np.random.randint(0, 10)#len(demo_data['position']) if len(demo_data['position']) <= start_idx: start_idx = 0 dists = np.linalg.norm(np.array(demo_data['position']) - demo_data['position'][start_idx], axis=1) dists[:start_idx] = 1000 close = np.max(np.where(dists < 2.0)[0]) aux_info = {'have_been': None, 'distance': None} try: demo_rgb = np.array(demo_data['rgb'][start_idx:close], dtype=np.float32) demo_length = np.minimum(len(demo_rgb), self.max_demo_length) demo_dep = np.array(demo_data['depth'], dtype=np.float32)[start_idx:close] demo_rgb_out = np.zeros([self.max_demo_length, demo_rgb.shape[1], demo_rgb.shape[2],3]) demo_rgb_out[:demo_length] = demo_rgb[:demo_length] demo_dep_out = np.zeros([self.max_demo_length, demo_rgb.shape[1], demo_rgb.shape[2],1]) demo_dep_out[:demo_length] = demo_dep[:demo_length] demo_data['action'] = np.array(demo_data['action'], dtype=np.int8) demo_act = demo_data['action'] demo_act_out = np.ones([self.max_demo_length]) * (-100) demo_act_out[:demo_length] = demo_act[start_idx:start_idx + demo_length] return_tensor = [torch.from_numpy(demo_rgb_out).float(), torch.from_numpy(demo_dep_out).float(), torch.from_numpy(demo_act_out).float(), scene, start_pose] except: start_idx = 0 close = len(demo_data['rgb']) demo_rgb = np.array(demo_data['rgb'][start_idx:close], dtype=np.float32) demo_length = np.minimum(len(demo_rgb), self.max_demo_length) demo_dep = np.array(demo_data['depth'], dtype=np.float32)[start_idx:close] demo_rgb_out = np.zeros([self.max_demo_length, demo_rgb.shape[1], demo_rgb.shape[2],3]) demo_rgb_out[:demo_length] = demo_rgb[:demo_length] demo_dep_out = np.zeros([self.max_demo_length, demo_rgb.shape[1], demo_rgb.shape[2],1]) demo_dep_out[:demo_length] = demo_dep[:demo_length] demo_data['action'] = np.array(demo_data['action'], dtype=np.int8) demo_act = demo_data['action'] demo_act_out = np.ones([self.max_demo_length]) * (-100) demo_act_out[:demo_length] = demo_act[start_idx:start_idx + demo_length] return_tensor = [torch.from_numpy(demo_rgb_out).float(), torch.from_numpy(demo_dep_out).float(), torch.from_numpy(demo_act_out).float(), scene, start_pose, aux_info] return return_tensor import time class HabitatDemoMultiGoalDataset(data.Dataset): def __init__(self, cfg, data_list, include_stop = False): self.data_list = data_list self.img_size = (64, 256) self.action_dim = 4 if include_stop else 3 self.max_demo_length = 100#cfg.dataset.max_demo_length self.single_goal = False def __getitem__(self, index): return self.pull_image(index) def __len__(self): return len(self.data_list) def get_dist(self, demo_position): return np.linalg.norm(demo_position[-1] - demo_position[0], ord=2) def pull_image(self, index): s = time.time() demo_data = joblib.load(self.data_list[index]) #print('file loading time:', time.time() - s) scene = self.data_list[index].split('/')[-1].split('_')[0] start_pose = [demo_data['position'][0], demo_data['rotation'][0]] target_indices = np.array(demo_data['target_idx']) aux_info = {'have_been': None, 'distance': None} # There are two random indices to sample # 1. when to start making graph # 2. when to start predict action # goals = np.unique(target_indices) #starts = [np.where(target_indices == g)[0].min() for g in goals] orig_data_len = len(demo_data['position']) if self.single_goal: try_num = 0 while True: start_idx = np.random.randint(orig_data_len - 10) if orig_data_len > 10 else orig_data_len start_target_idx = target_indices[start_idx] end_idx = np.where(target_indices == start_target_idx)[0][-1] if end_idx - start_idx >= 10 : break try_num += 1 if try_num > 1000: end_idx = -1 break else: start_idx = np.random.randint(orig_data_len - 10) if orig_data_len > 10 else orig_data_len end_idx = - 1 demo_rgb = np.array(demo_data['rgb'][start_idx:end_idx], dtype=np.float32) demo_length = np.minimum(len(demo_rgb), self.max_demo_length) demo_dep = np.array(demo_data['depth'][start_idx:end_idx], dtype=np.float32) demo_rgb_out = np.zeros([self.max_demo_length, demo_rgb.shape[1], demo_rgb.shape[2], 3]) demo_rgb_out[:demo_length] = demo_rgb[:demo_length] demo_dep_out = np.zeros([self.max_demo_length, demo_rgb.shape[1], demo_rgb.shape[2], 1]) demo_dep_out[:demo_length] = demo_dep[:demo_length] demo_act = np.array(demo_data['action'][start_idx:start_idx+demo_length], dtype=np.int8) demo_act_out = np.ones([self.max_demo_length]) * (-100) # print(demo_act.shape, demo_length, 'rgbd', len(demo_data['rgb']), len(demo_data['depth']), len(demo_data['action'])) demo_act_out[:demo_length] = demo_act -1 if self.action_dim == 3 else demo_act targets = np.zeros([self.max_demo_length]) targets[:demo_length] = demo_data['target_idx'][start_idx:start_idx+demo_length] target_img = np.zeros([5, demo_rgb.shape[1], demo_rgb.shape[2] , 4]) target_num = len(demo_data['target_img']) target_img[:target_num] = np.array(demo_data['target_img'])#[start_idx:start_idx+demo_length]) positions = np.zeros([self.max_demo_length,3]) positions[:demo_length] = demo_data['position'][start_idx:start_idx+demo_length] have_been = np.zeros([self.max_demo_length]) for idx, pos_t in enumerate(positions[:demo_length]): if idx == 0: have_been[idx] = 0 else: dists = np.linalg.norm(positions[:demo_length][:idx-1] - pos_t, axis=1) if len(dists) > 10: far = np.where(dists > 1.0)[0] near = np.where(dists[:-10] < 1.0)[0] if len(far) > 0 and len(near) > 0 and (near < far.max()).any(): have_been[idx] = 1 else: have_been[idx] = 0 else: have_been[idx] = 0 aux_info['distance'] = np.zeros([self.max_demo_length]) try: distances = np.maximum(1-np.array(demo_data['distance_to_goal'][start_idx:start_idx+demo_length])/2.,0.0) except: print(self.data_list[index]) aux_info['distance'][:demo_length] = torch.from_numpy(distances).float() aux_info['have_been'] = torch.from_numpy(have_been).float() return_tensor = [torch.from_numpy(demo_rgb_out).float(), torch.from_numpy(demo_dep_out).float(), torch.from_numpy(demo_act_out).float(), torch.from_numpy(positions), targets, torch.from_numpy(target_img).float(), scene, start_pose, aux_info] return return_tensor if __name__ == '__main__': import sys from IL_configs.default import get_config from dataset.demo_dataset import HabitatDemoMultiGoalDataset import os from tqdm import tqdm cfg = get_config('IL_configs/gmt.yaml') data_list = [os.path.join('/disk4/obin/vistarget_demo_gibson/train/random',x) for x in os.listdir('/disk4/obin/vistarget_demo_gibson/train/random')] data_list += [os.path.join('/disk4/obin/vistarget_demo_gibson/val/random',x) for x in os.listdir('/disk4/obin/vistarget_demo_gibson/val/random')] dataset = HabitatDemoMultiGoalDataset(cfg, data_list, True) print(len(dataset)) for idx in tqdm(range(len(dataset))): if 'Angiola_019_env0.dat.gz' in dataset.data_list[idx]: dataset.pull_image(idx)
cca2f5b645e4e395fc2486496bf93d31d88c6b12
[ "Markdown", "Python", "Shell" ]
20
Python
obin-hero/habitat_rl
9b46f8b77b1eb73f11dc7ea8fadf0bc383960b5f
a1d455cab027f0ebc0a69a200b431cee5406dcb5
refs/heads/master
<repo_name>dealer2/SpringHibernate<file_sep>/pom.xml <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>ua.ucra.example.SpringHibernate</groupId> <artifactId>SpringHibernate</artifactId> <version>0.0.1-SNAPSHOT</version> <packaging>war</packaging> <properties> <org.springframework.version>4.0.3.RELEASE</org.springframework.version> <org.slf4j.version>1.7.5</org.slf4j.version> <org.hibernate.version>4.3.4.Final</org.hibernate.version> <mysql.version>5.1.28</mysql.version> <jstl.version>1.2</jstl.version> </properties> <dependencies> <!-- JSF --> <dependency> <groupId>com.sun.faces</groupId> <artifactId>jsf-impl</artifactId> <version>2.2.4</version> </dependency> <dependency> <groupId>com.sun.faces</groupId> <artifactId>jsf-api</artifactId> <version>2.2.4</version> </dependency> <dependency> <groupId>com.sun.facelets</groupId> <artifactId>jsf-facelets</artifactId> <version>1.1.14</version> </dependency> <!-- PrimeFaces --> <dependency> <groupId>org.primefaces</groupId> <artifactId>primefaces</artifactId> <version>4.0</version> </dependency> <!-- Spring --> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-core</artifactId> <version>${org.springframework.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> <version>${org.springframework.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-tx</artifactId> <version>${org.springframework.version}</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-web</artifactId> <version>${org.springframework.version}</version> </dependency> <dependency> <groupId>org.springframework.webflow</groupId> <artifactId>spring-webflow</artifactId> <version>2.3.3.RELEASE</version> </dependency> <dependency> <groupId>org.springframework.webflow</groupId> <artifactId>spring-faces</artifactId> <version>2.3.3.RELEASE</version> </dependency> <dependency> <groupId>org.springframework.data</groupId> <artifactId>spring-data-jpa</artifactId> <version>1.5.2.RELEASE</version> </dependency> <dependency> <groupId>org.springframework</groupId> <artifactId>spring-expression</artifactId> <version>${org.springframework.version}</version> </dependency> <!-- Spring Security--> <dependency> <groupId>org.springframework.security</groupId> <artifactId>spring-security-web</artifactId> <version>3.2.4.RELEASE</version> </dependency> <dependency> <groupId>org.springframework.security</groupId> <artifactId>spring-security-config</artifactId> <version>3.2.4.RELEASE</version> </dependency> <!-- Hibernate --> <dependency> <groupId>org.hibernate</groupId> <artifactId>hibernate-core</artifactId> <version>${org.hibernate.version}</version> </dependency> <dependency> <groupId>org.hibernate</groupId> <artifactId>hibernate-entitymanager</artifactId> <version>${org.hibernate.version}</version> </dependency> <dependency> <groupId>org.hibernate</groupId> <artifactId>hibernate-validator</artifactId> <version>5.1.0.Final</version> </dependency> <!-- MySQL CONNECTOR --> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>${mysql.version}</version> </dependency> <dependency> <groupId>commons-dbcp</groupId> <artifactId>commons-dbcp</artifactId> <version>1.4</version> </dependency> <!-- Logging --> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>1.7.7</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>1.7.7</version> </dependency> <dependency> <groupId>log4j</groupId> <artifactId>log4j</artifactId> <version>1.2.16</version> </dependency> <!-- JSTL (JavaServer Pages Standard Tag Library) --> <dependency> <groupId>javax.servlet</groupId> <artifactId>jstl</artifactId> <version>${jstl.version}</version> </dependency> <dependency> <groupId>javax.servlet</groupId> <artifactId>javax.servlet-api</artifactId> <version>3.0.1</version> <scope>provided</scope> <!-- provided mean no copy to target folder (server have realization) --> </dependency> <dependency> <groupId>javax.el</groupId> <artifactId>el-api</artifactId> <version>2.2</version> </dependency> <dependency> <groupId>xml-apis</groupId> <artifactId>xml-apis</artifactId> <version>2.0.2</version> </dependency> <dependency> <groupId>cglib</groupId> <artifactId>cglib</artifactId> <version>3.1</version> <scope>runtime</scope> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-remote-shell</artifactId> <version>1.0.0.RELEASE</version> </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-remote-shell</artifactId> <version>1.0.0.RELEASE</version> </dependency> <dependency> <groupId>net.wimpi</groupId> <artifactId>telnetd-x</artifactId> <version>2.1.1</version> </dependency> <dependency> <groupId>org.eclipse.jetty</groupId> <artifactId>jetty-jndi</artifactId> <version>8.1.14.v20131031</version> </dependency> </dependencies> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>3.1</version> <configuration> <source>1.7</source> <target>1.7</target> </configuration> </plugin> <!-- Tell for Maven where is file web.xml --> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-war-plugin</artifactId> <version>2.4</version> <configuration> <webXml>src\main\webapp\WEB-INF\web.xml</webXml> </configuration> </plugin> </plugins> </build> </project><file_sep>/src/create table appuser.sql create table appuser ( id integer, firstName varchar(30), lastName varchar(30), userName varchar(30), password varchar(40), constraint appuser_pk primary key(id), constraint app_username_uk unique(userName) );<file_sep>/src/main/java/ua/com/crm/commons/dao/GenericDao.java package ua.com.crm.commons.dao; import java.io.Serializable; import java.util.List; /** * Generic interface for Data Access Object. To be extended or implemented * Contains common persist methods. * * @author dealer * */ public interface GenericDao <T, ID extends Serializable> { T save (T entity); T update (T entity); void delete (T entity); T findById (ID id); List <T> findAll (); void flush(); } <file_sep>/src/main/java/ua/com/crm/services/UserService.java package ua.com.crm.services; import ua.com.crm.entity.UserEntity; /** * Service providing service methods to work with user data and entity. * * @author dealer2 * */ public interface UserService { /** * Create User - persist to database * * @param userEntity * @return true if success */ public boolean createUser(UserEntity userEntity); }
364c9257606cbed7b3bf1d8f3681731fba9cd7d2
[ "Java", "SQL", "Maven POM" ]
4
Maven POM
dealer2/SpringHibernate
62c23d7457b633354a9f06915808d601570c7838
2adcc2d6332083dfa4e14a8793163871b77b9529
refs/heads/master
<file_sep>import { USER_LOGIN } from '~redux/actions/authActions/types'; import { USER_LOGOUT, USER_REGISTER, FORM_SEND_TRUE, FORM_SEND_FALSE, } from '../actions/authActions/types'; import { Storage } from '~/storage'; const initialState = { isFormSending: false, user: { uid: localStorage.getItem(Storage.STORAGE_UID), }, }; const authReducer = (state = initialState, { type, payload: user }) => { switch (type) { case USER_LOGIN: case USER_REGISTER: Storage.populateStorage(user.uid); return { ...state, user: { ...state.user, ...user } }; case USER_LOGOUT: Storage.clearStorage(); return { ...state, user: { uid: null } }; case FORM_SEND_TRUE: return { ...state, isFormSending: true }; case FORM_SEND_FALSE: return { ...state, isFormSending: false }; default: return state; } }; export { authReducer }; <file_sep>import React from 'react'; import './style.css'; const Center = ({ children }) => { return <main className='layout layout-center'>{children}</main>; }; export default Center; <file_sep>import React, { useState } from 'react'; import { useSelector } from 'react-redux'; const ProfileForm = ({ onSubmitHandler }) => { const displayName = useSelector( (state) => state.profile.currentProfile.displayName, ); const [form, setForm] = useState({ displayName, photoUrl: '', }); const onChangeHandler = (e) => { setForm({ ...form, [e.target.name]: e.target.value }); }; return ( <form onSubmit={onSubmitHandler.bind(this, form)}> <div className='row'> <div className='input-field col s12 m6'> <input name='displayName' value={form.displayName} onChange={onChangeHandler} id='name' type='text' className='validate' /> <label className='active' htmlFor='name'> Name </label> </div> <div className='input-field col s12 m6'> <input name='photoUrl' value={form.photoUrl} onChange={onChangeHandler} id='photoUrl' type='url' pattern='https://.*' placeholder='https://example.com' className='validate' /> <label className='active' htmlFor='photoUrl'> Photo Url </label> </div> </div> <div className='row'> <div className='col s12 m6'> <button className='btn' type='url'> Submit </button> </div> </div> </form> ); }; export default ProfileForm; <file_sep>import React, { useState } from 'react'; import { useSelector } from 'react-redux'; import PropTypes from 'prop-types'; import './style.css'; const Form = ({ formTitle = 'Form', onSubmitHandler, Link }) => { const isFormSending = useSelector((state) => state.auth.isFormSending); const [form, setForm] = useState({ email: '', password: '', }); const onChangeHandler = (e) => { setForm({ ...form, [e.target.name]: e.target.value, }); }; return ( <form className='auth-form z-depth-1' onSubmit={onSubmitHandler.bind(this, form)} > <h1>{formTitle}</h1> <div className='row'> <div className='input-field col s12'> <input placeholder='<EMAIL>' id='email' type='email' className='validate' name='email' value={form.email} onChange={onChangeHandler} disabled={isFormSending} /> <label htmlFor='email' className='active'> Email </label> </div> </div> <div className='row'> <div className='input-field col s12'> <input placeholder='Placeholder' id='password' type='password' className='validate' name='password' value={form.password} onChange={onChangeHandler} disabled={isFormSending} /> <label htmlFor='password' className='active'> Password </label> </div> </div> <div className='row'> <div className='inpuut-field col s12'> <button type='submit' className='btn' disabled={isFormSending}> Submit </button> </div> </div> <div className='row'> <div className='inpuut-field col s12'>{Link}</div> </div> </form> ); }; Form.propTypes = { formTitle: PropTypes.string.isRequired, onSubmitHandler: PropTypes.func.isRequired, Link: PropTypes.object, }; export default Form; <file_sep>import { combineReducers } from 'redux'; import { authReducer } from './authReducer'; import { storyReducer } from './storyReducer'; import { profileReducer } from './profileReducer'; const initialState = {}; const rootReducer = combineReducers({ init: () => initialState, auth: authReducer, story: storyReducer, profile: profileReducer, }); export { rootReducer }; <file_sep>import React from 'react'; import { Link } from 'react-router-dom'; import { Default, Section } from '~/layouts'; import { Card, GridList, GridItem } from '~components'; const Favorites = () => { return ( <Default> <Section> <h1>Favorites</h1> </Section> <Section> <p>Not ready yet</p> </Section> </Default> ); }; export default Favorites; <file_sep>import React from 'react'; import './style.css'; const index = ({ children }) => { return <section className='app-section'>{children}</section>; }; export default index; <file_sep>import { STORY_CREATE } from '~redux/actions/storyActions/types'; import { STORY_FETCH, STORY_LOADING_TRUE, STORY_LOADING_FALSE, STORY_SET, } from '../actions/storyActions/types'; const initialState = { isLoading: true, stories: [], story: {}, }; const storyReducer = (state = initialState, { type, payload }) => { switch (type) { case STORY_CREATE: return { ...state, stories: [...state.stories, payload] }; case STORY_FETCH: return { ...state, stories: payload }; case STORY_SET: return { ...state, story: payload, }; case STORY_LOADING_TRUE: return { ...state, isLoading: true }; case STORY_LOADING_FALSE: return { ...state, isLoading: false }; default: return state; } }; export { storyReducer }; <file_sep>export { default as Header } from './Header'; export { default as Form } from './Form'; export { default as Masonry } from './Masonry'; export { default as MasonryItem } from './Masonry/MasonryItem'; export { default as Card } from './Card'; export { default as GridList } from './GridList'; export { default as GridItem } from './GridList/GridItem'; export { default as Hero } from './Hero'; export { default as Spinner } from './Spinner'; export { default as Modal } from './Modal'; export { default as ProfileForm } from './ProfileForm'; <file_sep>export const STORY_CREATE = 'STORY/STORY_CREATE'; export const STORY_FETCH = 'STORY/STORY_FETCH'; export const STORY_GET = 'STORY/STORY_GET'; export const STORY_SET = 'STORY/STORY_SET'; export const STORY_LOADING_TRUE = 'STORY/STORY_LOADING_TRUE'; export const STORY_LOADING_FALSE = 'STORY/STORY_LOADING_FALSE'; <file_sep>export const PROFILE_SET_NAME = 'PROFILE/PROFILE_SET_NAME'; export const PROFILE_LOAGING_TRUE = 'PROFILE/PROFILE_LOAGING_TRUE'; export const PROFILE_LOAGING_FALSE = 'PROFILE/PROFILE_LOAGING_FALSE'; export const PROFILE_GET = 'PROFILE/PROFILE_GET'; <file_sep>import React from 'react'; import { Header } from '~components'; const Default = ({ children }) => { return ( <> <Header /> <main className='layout'> <div className='row'> <div className='col s12'>{children}</div> </div> </main> </> ); }; export default Default; <file_sep>export { default as Home } from './Home'; export { default as Register } from './Auth/Register'; export { default as Login } from './Auth/Login'; export { default as Favorites } from './Favorites'; export { default as Project } from './Project'; export { default as Profile } from './Profile'; export { default as Create } from './Create'; <file_sep>import { PROFILE_SET_NAME, PROFILE_LOAGING_TRUE, PROFILE_LOAGING_FALSE, PROFILE_GET, } from '~redux/actions/profileActions/types'; import { Storage } from '~/storage'; const initialState = { isLoading: true, currentProfile: { displayName: localStorage.getItem(Storage.STORAGE_DISPLAY_NAME), }, profile: { displayName: '', }, }; const profileReducer = (state = initialState, { type, payload }) => { switch (type) { case PROFILE_SET_NAME: return { ...state, currentProfile: { ...state.currentProfile, displayName: payload }, }; case PROFILE_GET: return { ...state, profile: { ...payload } }; case PROFILE_LOAGING_TRUE: return { ...state, isLoading: true }; case PROFILE_LOAGING_FALSE: return { ...state, isLoading: false }; default: return state; } }; export { profileReducer }; <file_sep>import { db, storage } from '~firebase'; import { STORY_CREATE, STORY_FETCH, STORY_LOADING_TRUE, STORY_LOADING_FALSE, STORY_SET, } from './types'; export const storyCreate = (story) => async (dispatch, getState) => { let _story = {}; const user = { uid: getState().auth.user.uid, displayName: getState().profile.currentProfile.displayName, }; if (!story.image?.src || !story.image?.name) { _story.image = { name: null, src: null }; } else { const storageRef = storage.ref(`images/${story.image.name}`); await storageRef.putString(story.image.src, 'data_url'); const imageUrl = await storageRef.getDownloadURL(); _story.image = { name: story.image.name, src: imageUrl, }; } _story.user = user; _story.title = story.title; _story.body = story.body; const storyRef = await db.collection('stories').add(_story); dispatch({ type: STORY_CREATE, payload: { ..._story, id: storyRef.id } }); }; export const storyFetch = () => async (dispatch) => { try { const { docs } = await db.collection('stories').get(); const stories = docs.map((doc) => ({ ...doc.data(), id: doc.id })); dispatch({ type: STORY_FETCH, payload: stories }); dispatch(storyLoadingFalse()); } catch (error) { console.log(error); } }; export const storySet = (story) => async (dispatch) => { dispatch({ type: STORY_SET, payload: story }); }; export const storyGet = (id) => async (dispatch, getState) => { const stories = getState().story.stories; if (!stories.length) { dispatch(storyLoadingTrue()); const docRef = await db.collection('stories').doc(id); const doc = await docRef.get(); dispatch(storySet({ ...doc.data(), id: doc.id })); return dispatch(storyLoadingFalse()); } const story = stories.find((story) => story.id === id); dispatch(storySet(story)); dispatch(storyLoadingFalse()); }; export const storyLoadingTrue = () => async (dispatch) => dispatch({ type: STORY_LOADING_TRUE }); export const storyLoadingFalse = () => async (dispatch) => dispatch({ type: STORY_LOADING_FALSE }); <file_sep>import React, { useEffect } from 'react'; import M from 'materialize-css/dist/js/materialize.min.js'; const Modal = ({ children }) => { const classname = 'modal'; useEffect(() => { const elem = document.querySelector(`.${classname}`); M.Modal.init(elem, {}); return () => { const instanse = M.Modal.getInstance(elem); instanse.destroy(); }; }, []); return ( <div id='modal1' className={classname}> <div className='modal-content'>{children}</div> <div className='modal-footer'> <a href='#!' className='modal-close waves-effect waves-green btn-flat'> Close </a> </div> </div> ); }; export default Modal; <file_sep>const storage = () => { const STORAGE_UID = 'STORAGE_UID'; const STORAGE_DISPLAY_NAME = 'STORAGE_DISPLAY_NAME'; const populateName = (name) => localStorage.setItem(STORAGE_DISPLAY_NAME, name); const populateUID = (uid) => localStorage.setItem(STORAGE_UID, uid); const populateStorage = (uid, displayName) => { populateUID(uid); populateName(displayName); }; const clearStorage = () => { localStorage.removeItem(STORAGE_UID); localStorage.removeItem(STORAGE_DISPLAY_NAME); }; return { STORAGE_DISPLAY_NAME, STORAGE_UID, populateStorage, clearStorage, populateName, populateUID, }; }; const Storage = storage(); export { Storage }; <file_sep>import M from 'materialize-css/dist/js/materialize.min.js'; const Alert = () => { const createToast = (message) => { M.toast({ html: message }); }; return { createToast, }; }; const alert = Alert(); export { alert }; <file_sep>export const config = { apiKey: '<KEY>', authDomain: 'firestore-blog-6750f.firebaseapp.com', databaseURL: 'https://firestore-blog-6750f.firebaseio.com', projectId: 'firestore-blog-6750f', storageBucket: 'firestore-blog-6750f.appspot.com', messagingSenderId: '624114414274', appId: '1:624114414274:web:9c4ac6ce386cf0d6a7415b', }; <file_sep>import React from 'react'; import './style.css'; const Masonry = ({ children }) => { return <div className='masonry'>{children}</div>; }; export default Masonry; <file_sep>import { auth, db } from '~firebase'; import { USER_LOGIN, USER_LOGOUT, USER_REGISTER, FORM_SEND_TRUE, FORM_SEND_FALSE, } from './types'; import { profileSetName } from '~redux/actions/profileActions'; import { alert } from '~/alert'; export const userLogin = (email, password) => async (dispatch) => { dispatch(formSendTrue()); try { let { user } = await auth.signInWithEmailAndPassword(email, password); const displayName = user.displayName ? user.displayName : 'Friend'; user = { uid: user.uid, }; dispatch({ type: USER_LOGIN, payload: { uid: user.uid }, }); dispatch(profileSetName(displayName)); dispatch(formSendFalse()); } catch (error) { console.error(error); dispatch(formSendFalse()); alert.createToast(error.message); } }; export const userRegister = (email, password) => async (dispatch) => { dispatch(formSendTrue()); try { let { user } = await auth.createUserWithEmailAndPassword(email, password); const displayName = user.displayName ? user.displayName : 'Friend'; user = { uid: user.uid, displayName, }; const userRef = await db.doc(`users/${user.uid}`); const snapshot = await userRef.get(); if (!snapshot.exists) { await userRef.set(user); } dispatch({ type: USER_REGISTER, payload: { uid: user.uid }, }); dispatch(profileSetName(displayName)); dispatch(formSendFalse()); } catch (error) { console.error(error); dispatch(formSendFalse()); alert.createToast(error.message); } }; export const userLogout = () => async (dispatch) => { try { await auth.signOut(); dispatch({ type: USER_LOGOUT, }); dispatch(profileSetName(null)); } catch (error) { console.error(error); } }; export const formSendTrue = () => (dispatch) => dispatch({ type: FORM_SEND_TRUE }); export const formSendFalse = () => (dispatch) => dispatch({ type: FORM_SEND_FALSE }); <file_sep>import React, { useEffect } from 'react'; import { useSelector } from 'react-redux'; import { useDispatch } from 'react-redux'; import { userLogout } from '~redux/actions/authActions'; import { Link } from 'react-router-dom'; import M from 'materialize-css/dist/js/materialize.min.js'; import './style.css'; const Header = () => { const uid = useSelector((state) => state.auth.user.uid); const dispatch = useDispatch(); useEffect(() => { const elem = document.querySelector('#mobile'); M.Sidenav.init(elem, {}); return () => { const instance = M.Sidenav.getInstance(elem); instance.destroy(); }; }, []); return ( <header> <nav className='amber'> <div className='row'> <div className='col s12'> <div className='nav-wrapper'> <Link to='/' className='brand-logo'> Firestore React </Link> <button data-target='mobile' className='btn btn-flat sidenav-trigger' > <i className='material-icons'>menu</i> </button> <ul id='nav-mobile' className='right hide-on-med-and-down'> <li> <Link to='/create' title='Create new Story'> <i className='material-icons left'>add</i> <span>Create story</span> </Link> </li> <li> <Link to='/favorites' title='Favorites'> <i className='material-icons'>favorite_border</i> </Link> </li> <li> <Link to={`/profile/${uid}`} title='Profile'> <i className='material-icons'>person_outline</i> </Link> </li> <li> <button className='btn-flat logout' onClick={() => dispatch(userLogout())} title='Log out' > <i className='material-icons'>exit_to_app</i> </button> </li> </ul> </div> </div> </div> </nav> <ul className='sidenav' id='mobile'> <li> <Link to='/create' title='Create new Story'> <i className='material-icons left'>add</i> <span>Create story</span> </Link> </li> <li> <Link to='/favorites' title='Favorites'> <i className='material-icons left'>favorite_border</i> <span>Favorites</span> </Link> </li> <li> <Link to={`/profile/${uid}`} title='Profile'> <i className='material-icons'>person_outline</i> <span>Profile</span> </Link> </li> <li> <button className='btn-flat logout logout-mobile' onClick={() => dispatch(userLogout())} title='Log out' > <i className='material-icons'>exit_to_app</i> <span>Logout</span> </button> </li> </ul> </header> ); }; export default Header; <file_sep>export const colors = [ '#fff59d', '#ffe082', '#ffcc80', '#ffab91', '#a5d6a7', '#0277bd', ]; <file_sep>import React, { useEffect } from 'react'; import { useDispatch, useSelector } from 'react-redux'; import { Default, Section } from '~/layouts'; import { profileSetName, profileGet } from '~redux/actions/profileActions'; import { Modal, ProfileForm, Spinner } from '~components'; const Profile = ({ match }) => { const { profile, isLoading } = useSelector((state) => state.profile); const urlUid = match.params.uid; const uid = useSelector((state) => state.auth.user.uid); const isCurrentProfile = urlUid === uid ? true : false; const dispatch = useDispatch(); useEffect(() => { dispatch(profileGet(urlUid)); }, [dispatch, urlUid]); const formSubmitHandler = (form, e) => { e.preventDefault(); dispatch(profileSetName(form.displayName)); }; return ( <Default> {isLoading ? ( <Spinner /> ) : ( <Section> <h1>{profile.displayName}</h1> {isCurrentProfile ? ( <> <button data-target='modal1' className='btn modal-trigger'> Change Name </button> <Modal> <h2>Hello</h2> <ProfileForm onSubmitHandler={formSubmitHandler} /> </Modal> </> ) : null} </Section> )} </Default> ); }; export default Profile; <file_sep>import React from 'react'; import { useSelector } from 'react-redux'; import { Link } from 'react-router-dom'; import { Default } from '~/layouts'; import { Masonry, MasonryItem, Card, Spinner } from '~components'; const Home = () => { let stories = useSelector((state) => state.story.stories); const isLoading = useSelector((state) => state.story.isLoading); stories = stories.map((story) => ( <MasonryItem key={story.id}> <Link to={`/projects/${story.id}`}> <Card story={story} /> </Link> </MasonryItem> )); return ( <Default> {isLoading ? ( <Spinner /> ) : ( <Masonry> {stories?.length > 0 ? stories : 'There is no stories yet'} </Masonry> )} </Default> ); }; export default Home; <file_sep>Посмотреть можно [тут](https://wonderful-kilby-27e0a4.netlify.app/) # Что это и зачем Простое приложение для добавление, редактирования(в будущем) и просмотра постов, работы с профилями, с регистрацией и аутентификацией, с использованием Firebase. Целью было разработать приложение с уделенным вниманием к архитектуре, проектировке "бекенда" и работе с данными, общему флоу. ## Что тут есть? 1. **Redux** - потому что хранить данные и логику для этих данных в одном месте хорошо 1. **Redux thunk** - потому что ассинхронные запросы к firebase это круто 1. **React router dom** - чтобы менялись страницы 1. **React/Redux hooks** - это удобно 1. **Firebase Cloud Firestore/Authentication/Storage** - чтобы регистрировать, добавлять записи и хранить картинки. Почти как свой бекенд 1. **Javascript ES6/ES7/ES8** - стрелочные функции, async/await, деструктуризация - все самое модное 1. **Расширенные настройки webpack** - чтобы делать аллиасы и не прыгать по фолдерам при импортах 1. **Materialize.css** - чтобы было красиво(терпимо) ## Как пощупать Клонируем ветку, после в папке проекта из терминала: ### `npm install` Скачает все зависимости ### `npm start` Заведет проект на [http://localhost:3000](http://localhost:3000) ## Это не production ready проект Иначе мне бы пришлось потратить/тратить на него столько времени, сколько у меня нет. Проект в разработке This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). <file_sep>import React from 'react'; const MasonryItem = ({ children }) => { return <div className='masonry-item'>{children}</div>; }; export default MasonryItem; <file_sep>import React from 'react'; import PropTypes from 'prop-types'; import './style.css'; const GridItem = ({ children }) => { return <div className='grid-item'>{children}</div>; }; GridItem.propTypes = { children: PropTypes.object.isRequired, }; export default GridItem; <file_sep>import React from 'react'; import { Switch, Route, Redirect } from 'react-router-dom'; import { Home, Register, Login, Favorites, Project, Profile, Create, } from '~pages'; const Routes = (isAuthenticated) => { if (!isAuthenticated) { return ( <Switch> <Route path='/login' exact component={Login} /> <Route path='/register' exact component={Register} /> <Redirect to='/login' /> </Switch> ); } return ( <Switch> <Route path='/' exact component={Home} /> <Route path='/favorites' exact component={Favorites} /> <Route path='/projects/:id' exact component={Project} /> <Route path='/profile/:uid' exact component={Profile} /> <Route path='/create' exact component={Create} /> <Redirect to='/' /> </Switch> ); }; export { Routes };
bedd9a873fcbfb2ac4c86a48f555649cb2f7b7c3
[ "JavaScript", "Markdown" ]
29
JavaScript
jukeShy/firestore-blog
4d2d36b848103559f56274f044565c06ffe37f41
9b275702c0e5c9a74e667482d3df64fe061ea8cb
refs/heads/master
<repo_name>wmaciel/cmpt_741_project<file_sep>/src/frequent_itemsets.py import sys from pprint import pprint from pymining import itemmining, assocrules def find_frequent_itemsets(transactions, support): relim_input = itemmining.get_relim_input(transactions) item_sets = itemmining.relim(relim_input, min_support=support) return item_sets def generate_association_rules(item_sets, support, confidence): rules = assocrules.mine_assoc_rules(item_sets, min_support=support, min_confidence=confidence) return rules def extract_transactions(file_pointer): list_of_transactions = [] list_of_ids = [] for line in file_pointer: tokens = line.strip().split(',') words = tokens[2:] transaction = tuple(words) list_of_transactions.append(transaction) list_of_ids.append(tokens[0]) return tuple(list_of_ids), tuple(list_of_transactions) def match_rules(transaction, rules): recommendations = [] st = frozenset(transaction) for r in rules: if st.issuperset(r[0]) and st.isdisjoint(r[1]): recommendations.append(r) return recommendations def sort_recommendations(recommendations): # Sort recommendations by confidence, which is in the 3rd position of the tuple for tid in recommendations: recommendations[tid].sort(key=lambda x: x[3], reverse=True) def generate_recommendation_list(recommendation): pick = [] for r in recommendation: if_items, then_items, support, confidence = r for item in then_items: pick.append((item, confidence)) return pick def pick_top_5(r_list): picked_set = set() top5 = [] for r in r_list: item = r[0] if item not in picked_set: picked_set.add(item) top5.append(item) if len(top5) < 5: return top5 else: return top5[0:5] def compute_frequent_itemsets(input, output, min_sup, assoc_min_sup, assoc_min_conf): print 'opening file...' fp_in = open(input, 'r') print 'extracting transactions...', ids, transactions = extract_transactions(fp_in) fp_in.close() print len(transactions) print 'finding frequent itemsets...', # 30 item_sets = find_frequent_itemsets(transactions, int(min_sup)) print len(item_sets) print 'generating association rules...', # 20, 0.5 rules = generate_association_rules(item_sets, int(assoc_min_sup), float(assoc_min_conf)) print len(rules) print 'matching transactions and rules...' recommendations = {} for i, t in enumerate(transactions): tid = ids[i] recommends = match_rules(t, rules) recommendations[tid] = recommends print 'sorting recommendations...' sort_recommendations(recommendations) print 'organizing in dict form...' rec_per_tid = {} for tid in recommendations: r = recommendations[tid] rec_per_tid[tid] = generate_recommendation_list(r) print 'picking top5...' top5_per_tid = {} for tid in rec_per_tid: r_list = rec_per_tid[tid] top5_per_tid[tid] = pick_top_5(r_list) #pprint.pprint(top5_per_tid) print 'saving file:', output, '...' fp_out = open(output, 'w') for doc_id, words in top5_per_tid.items(): fp_out.write(str(doc_id)) for w in words: fp_out.write(',' + str(w)) fp_out.write('\n') if __name__ == "__main__": compute_frequent_itemsets(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5]) <file_sep>/src/colab_filtering.py __author__ = 'walthermaciel' from pyspark import SparkConf, SparkContext, SQLContext import sys import pprint from pyspark.mllib.recommendation import * def split_votes(row): tokens = row.strip().split(',') doc_id = int(tokens[0]) words = tokens[2:] map(lambda x: int(x), words) out = [] for w in words: out.append((doc_id, w, 1)) return tuple(out) def generate_rankings(sc, input_directory): # read input text_rdd = sc.textFile(input_directory) rankings_rdd = text_rdd.flatMap(split_votes) return rankings_rdd def main(input, output): # spark specific setup conf = SparkConf().setAppName('Word Recommender') sc = SparkContext(conf=conf) rankings_rdd = generate_rankings(sc, input).cache() # trains the model model = ALS.trainImplicit(rankings_rdd, 10) doc_id_rdd = rankings_rdd.map(lambda (doc_id, w, one): doc_id) doc_ids = doc_id_rdd.collect() doc_ids = set(doc_ids) recommendations = [] for i in doc_ids: products = model.recommendProducts(i, 5) rec_row = [i] + products recommendations.append(rec_row) answer = [] for r in recommendations: a = [r[0]] for i in r[1:]: a.append(i.product) answer.append(a) fp_out = open(output, 'w') for a in answer: fp_out.write(str(a[0])) for i in a[1:]: fp_out.write(',' + str(i)) fp_out.write('\n') fp_out.close() if __name__ == "__main__": main(sys.argv[1], sys.argv[2]) <file_sep>/validation_grad/readme.txt How to run the program: java -jar Validation.jar "fp1" "fp2" fp1: path of the file "validation_data.txt" fp2: path of your answer<file_sep>/src/generate_document_words_clustered.py __author__ = 'walthermaciel' import sys import arff from pprint import pprint def main(clustering_file, document_words_file, output): print 'Splitting DocumentWords into separate folders for it cluster...' print 'Loading', clustering_file fp_in = open(clustering_file, 'r') arff_dict = arff.load(fp_in) fp_in.close() print 'Extracting clusters...' # {doc_id: cluster_number} id_cluster_dict = {} for doc_list in arff_dict[u'data']: id_cluster_dict[int(doc_list[1])] = int(doc_list[-1][-1]) print 'Saving clustering for the undergrad answer...' fp_undergrad = open(output + '/undergrad_clustering.txt', 'w') for k, v in id_cluster_dict.items(): fp_undergrad.write(str(k) + ',' + str(v) + '\n') fp_undergrad.close() print 'Preparing output files...' # [fp to 'output/c1..4'] fp_out = [] for i in xrange(4): fp_out.append(open(output + '/c' + str(i) + '.txt', 'w')) print 'Distributing docs among cluster files...' # fp to DocumentWords.txt fp_words = open(document_words_file, 'r') for line in fp_words: # isolate the document id and the list of words tokens = line.strip().split(',') doc_id = int(tokens[0]) words = tokens[2:] # find out which cluster this document belongs to cluster = id_cluster_dict[doc_id] fpc = fp_out[cluster] # write document information fpc.write(str(doc_id) + ',' + str(len(words))) for w in words: fpc.write(',' + str(w)) fpc.write('\n') print 'Closing file pointers...' # close file pointers fp_words.close() for fp in fp_out: fp.close() print 'SUCCESS!' if __name__ == "__main__": if len(sys.argv) != 4: print 'parameters: clustering arff file, document words file, output folder' else: main(sys.argv[1], sys.argv[2], sys.argv[3]) <file_sep>/src/split_cluster_files.py __author__ = 'walthermaciel' import sys def load_cluster_file(file_path): fp_in = open(file_path, 'r') # {doc_id: cluster_number} id_cluster_dict = {} for line in fp_in: str_doc_id, str_cluster_id = line.strip().split(',') doc_id = int(str_doc_id) cluster_id = int(str_cluster_id) - 1 id_cluster_dict[doc_id] = cluster_id fp_in.close() return id_cluster_dict def main(clustering_file, document_words_file, output): print 'Loading cluster file', clustering_file id_cluster_dict = load_cluster_file(clustering_file) print 'Preparing output files...' # [fp to 'output/c1..4'] fp_out = [] for i in xrange(4): fp_out.append(open(output + '/c' + str(i) + '.txt', 'w')) print 'Distributing docs among cluster files...' # fp to DocumentWords.txt fp_words = open(document_words_file, 'r') for line in fp_words: # isolate the document id and the list of words tokens = line.strip().split(',') doc_id = int(tokens[0]) words = tokens[2:] print 'doc_id', doc_id, # find out which cluster this document belongs to cluster = id_cluster_dict[doc_id] print 'belongs to cluster', cluster fpc = fp_out[cluster] # write document information fpc.write(str(doc_id) + ',' + str(len(words))) for w in words: fpc.write(',' + str(w)) fpc.write('\n') print 'Closing file pointers...' # close file pointers fp_words.close() for fp in fp_out: fp.close() print 'SUCCESS!' if __name__ == "__main__": if len(sys.argv) != 4: print 'parameters: clustering file, document words file, output folder' else: main(sys.argv[1], sys.argv[2], sys.argv[3]) <file_sep>/src/frequent_itemset_clusters.py __author__ = 'walthermaciel' import sys from frequent_itemsets import compute_frequent_itemsets def main(cluster_folder, output_file, min_sup, assoc_min_sup, assoc_min_conf ): print 'Opening output file:', output_file, '...' fp_out = open(output_file, 'w') for i in xrange(4): c_str = cluster_folder + '/c' + str(i) + '.txt' tmp_str = cluster_folder + '/out_c' + str(i) + '.txt' print 'Computing frequent itemsets on cluster', i, '...' compute_frequent_itemsets(c_str, tmp_str, int(min_sup), int(assoc_min_sup), float(assoc_min_conf)) print 'Opening temp file...', fp_tmp = open(tmp_str, 'r') print 'Apending to output file...', fp_out.write(fp_tmp.read()) print 'Done!' fp_tmp.close() print 'Closing output file...' fp_out.close() if __name__ == "__main__": if len(sys.argv) != 6: print 'parameters: cluster_folder , output file, min_sup, assoc_min_sup, assoc_min_conf' else: main(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5]) <file_sep>/src/clustering.r clusterable_data = read.csv('../data/DocumentWords_onehot_filter_1.csv') doc_ids = clusterable_data$doc_id clusterable_data$doc_id = NULL kc <- kmeans(clusterable_data, 4, nstart=100) print(kc$size) clusters_ids = kc$cluster doc_plus_cluster = c(doc_ids, clusters_ids) doc_cluster = matrix(doc_plus_cluster, ncol=2) write.table(doc_cluster, file = "doc_cluster.csv", row.names=FALSE, col.names=FALSE, sep=',')
b9ec964d783af8b7556426c7cf3e8ce685865a7c
[ "Python", "Text", "R" ]
7
Python
wmaciel/cmpt_741_project
b4d1cf0f2f7071ec315ef5f86be4ff4f4772b948
c2a26e16130c61ac2485473ac57f069a74e7fdbf
refs/heads/master
<file_sep><?php use Illuminate\Support\Facades\Route; use App\Http\Controllers\PostController; /* |-------------------------------------------------------------------------- | Web Routes |-------------------------------------------------------------------------- | | Here is where you can register web routes for your application. These | routes are loaded by the RouteServiceProvider within a group which | contains the "web" middleware group. Now create something great! | */ Route::get('/', function () { return view('welcome'); }); Route::middleware(['auth:sanctum', 'verified'])->get('/dashboard', function () { return view('dashboard'); })->name('dashboard'); //Session用 Route::get('/sessions','SessionTestController@create'); Route::post('/sessions','SessionTestController@store'); Route::get('/sessions/detail','SessionTestController@detail'); Route::get('/sessions/logout','SessionTestController@logout'); //Route::get('/sessions', [PostController::class, 'index']); <file_sep><?php namespace App\Http\Controllers; use Illuminate\Http\Request; class SessionTestController extends Controller { public function create(Request $request){ return view('session.create'); } public function store(Request $request){ //セッション登録 if ($request->session()->has('name')==false) { $request->session()->put('name', $request->name); } if ($request->session()->has('tel')==false) { $request->session()->put('tel', $request->tel); } if ($request->session()->has('email')==false) { $request->session()->put('email', $request->email); } $username = $request->name; return view('session.result',compact('username')); } public function detail(Request $request){ //セッション登録 $username = $request->session()->get('name'); $tel = $request->session()->get('tel'); $email = $request->session()->get('email'); return view('session.detail',compact('username','tel','email')); } public function logout(Request $request){ //確認用 //$username = $request->session()->get('name'); //dump($username); //セッション削除 $request->session()->flush(); //確認用 //$username = $request->session()->get('name'); //dd($username); return redirect('/sessions'); } }
f57de59ab2c10f212e1ca748ecdcf2e631f79ee7
[ "PHP" ]
2
PHP
yuuki4019/session
ab2ffe607579c24005de8ec61c9b1361f4e77aa5
0484310b06b3ea87af290bb997e2aa8f16c1030a
refs/heads/master
<repo_name>gorillaking/CoffeeRun<file_sep>/Assets/soundtrigger.cs using UnityEngine; using System.Collections; public class soundtrigger : MonoBehaviour { public AudioClip clip; public float range; public float playChance; public float distance; Transform target; bool inRange; // Use this for initialization void Awake() { target = GameObject.FindGameObjectWithTag ("Player").GetComponent<Transform> (); } void Start () { } // Update is called once per frame void Update () { RangeCheck (); } void RangeCheck() { distance = Vector3.Distance (transform.position, target.transform.position); if (distance < range && !inRange) { playSound (); inRange = true; } else if (distance > range && inRange) { inRange = false; } } void playSound() { if (Random.value < playChance) { AudioSource.PlayClipAtPoint (clip, transform.position); } } } <file_sep>/Assets/Scripts/PlayerController.cs using UnityEngine; using System.Collections; public class PlayerController : MonoBehaviour { public float coffeeTemperature; public float temperatureDecay; public float acceleration; public float maxSpeed; bool jumping = false; public Vector3 velocity; Rigidbody body; float decayCooldown = 5f;//every 5 seconds, drop temperature by temperatureDecay float decayTimer; void Start(){ body = GetComponent<Rigidbody> (); body.freezeRotation = true; decayTimer = decayCooldown; } void Update () { if (decayTimer > 0) { decayTimer -= Time.deltaTime; } else { decayTimer = decayCooldown; coffeeTemperature -= temperatureDecay; } float horizontalInput = Input.GetAxisRaw ("Horizontal"); if(horizontalInput < 0) { // Rigidbody body = GetComponent<Rigidbody>(); float velX = body.velocity.x - (acceleration * Time.deltaTime); velX = Mathf.Clamp(velX, -maxSpeed, maxSpeed); body.velocity = new Vector3(velX, body.velocity.y, body.velocity.z); velocity.x = velX; } if (horizontalInput > 0) { // Rigidbody body = GetComponent<Rigidbody>(); float velX = body.velocity.x + (acceleration * Time.deltaTime); velX = Mathf.Clamp(velX, -maxSpeed, maxSpeed); body.velocity = new Vector3(velX, body.velocity.y, body.velocity.z); velocity.x = velX; } if(Input.GetKeyDown(KeyCode.Space) && !jumping) { jumping = true; //Rigidbody body = GetComponent<Rigidbody>(); float velY = body.velocity.y + 5f; body.velocity = new Vector3(body.velocity.x, velY, body.velocity.z); velocity.x = body.velocity.x; // save x velocity when you jump so you will keep it upon landing } for (int i = 0; i < Input.touchCount; i++) { //touch controls if (Input.GetTouch (i).position.x < Screen.width / 2) {//touch the left half of the screen float velX = body.velocity.x - (acceleration * Time.deltaTime); velX = Mathf.Clamp (velX, -maxSpeed, maxSpeed); body.velocity = new Vector3 (velX, body.velocity.y, body.velocity.z); velocity.x = velX; } else if (Input.GetTouch (i).position.x > Screen.width / 2) { float velX = body.velocity.x + (acceleration * Time.deltaTime); velX = Mathf.Clamp(velX, -maxSpeed, maxSpeed); body.velocity = new Vector3(velX, body.velocity.y, body.velocity.z); velocity.x = velX; } } if (jumping) { body.velocity = new Vector3(velocity.x, body.velocity.y, body.velocity.z); } } void fixedUpdate() { if (jumping) { body.velocity = new Vector3(velocity.x, body.velocity.y, body.velocity.z); } } void OnTriggerEnter(Collider collider) { jumping = false; body.velocity = new Vector3(velocity.x, body.velocity.y, body.velocity.z); // when you land, keep x velocity; this mostly fixes the momentum loss (still a slight visual bug) } }
d6810b9150d0d78b3648b11294475c19439739d3
[ "C#" ]
2
C#
gorillaking/CoffeeRun
c7a0a86e7251b926a1f516fd075a54c132054525
1faec1e87f2aa5afd335f35d6a7d790e9e3213f2
refs/heads/main
<file_sep>package io.memoria.recipes.app; import com.fasterxml.jackson.databind.JsonNode; import org.openapi4j.core.exception.DecodeException; import org.openapi4j.core.exception.EncodeException; import org.openapi4j.core.model.v3.OAI3; import org.openapi4j.core.util.TreeUtil; import org.openapi4j.core.validation.ValidationException; import org.openapi4j.parser.model.v3.OpenApi3; import org.openapi4j.schema.validator.ValidationContext; import org.openapi4j.schema.validator.v3.SchemaValidator; public class OpenAPITestUtils { public static void validateSchema(OpenApi3 api, String filePath, String schemaName) throws EncodeException, DecodeException, ValidationException { JsonNode schemaNode = api.getComponents().getSchema(schemaName).toNode(); var url = ClassLoader.getSystemResource(filePath); JsonNode contentNode = TreeUtil.load(url); var context = new ValidationContext<OAI3>(api.getContext()); SchemaValidator schemaValidator = new SchemaValidator(context, null, schemaNode); schemaValidator.validate(contentNode); } private OpenAPITestUtils() {} } <file_sep>package io.memoria.recipes.core.service; import io.memoria.jutils.jcore.id.Id; import io.memoria.recipes.core.recipe.Recipe; import io.memoria.recipes.core.repo.RecipeRepo; import io.memoria.recipes.core.repo.RecipeSearchRepo; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; @SuppressWarnings("ClassCanBeRecord") public class RecipeService { private final RecipeRepo recipeRepo; private final RecipeSearchRepo recipeSearchRepo; public RecipeService(RecipeRepo recipeRepo, RecipeSearchRepo recipeSearchRepo) { this.recipeRepo = recipeRepo; this.recipeSearchRepo = recipeSearchRepo; } public Mono<Id> create(Recipe recipe) { return this.recipeRepo.create(recipe); } public Flux<Recipe> recipes() { return this.recipeRepo.recipes(); } public Flux<Recipe> recipes(String filterByCategory) { return this.recipeRepo.recipes(filterByCategory); } public Flux<Recipe> search(String text) { return this.recipeSearchRepo.search(text); } } <file_sep>package io.memoria.recipes.app.controller; import io.memoria.jutils.jweb.netty.NettyClientUtils; import io.memoria.recipes.app.AppConfig; import io.memoria.recipes.app.AppDependencies; import io.memoria.recipes.app.TestResource; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import reactor.netty.DisposableServer; import reactor.test.StepVerifier; import java.time.Duration; import static io.memoria.recipes.app.Defaults.file; import static io.memoria.recipes.app.Defaults.yaml; import static io.memoria.recipes.app.TestResource.allRecipes; import static io.memoria.recipes.app.TestResource.vegetableRecipes; import static io.netty.handler.codec.http.HttpResponseStatus.OK; class RecipesControllerTest { private static final String omeletteRecipe = TestResource.recipeOmeletteV6; private static String serverUrl; private static String recipesPath; private static String recipesSearchPath; private static DisposableServer disposableServer; @AfterAll static void afterAll() { disposableServer.disposeNow(); } @BeforeAll static void beforeAll() { var conf = file.read("test.yaml").block(); var appConfig = yaml.deserialize(conf, AppConfig.class).get(); var dependencies = new AppDependencies(appConfig); serverUrl = appConfig.server().url(); recipesPath = appConfig.apiV6().recipes(); recipesSearchPath = appConfig.apiV6().recipesSearch(); disposableServer = dependencies.httpServer.bindNow(Duration.ofSeconds(10)); } @Test void createRecipe() { // when var result = NettyClientUtils.post(omeletteRecipe, serverUrl, recipesPath); // Then StepVerifier.create(result) .expectNextMatches(t -> t._1.equals(OK) && t._2.equals("omelette")) .expectComplete() .verify(); } @Test void recipes() { // when var all = NettyClientUtils.get(serverUrl, recipesPath); var veg = NettyClientUtils.get(serverUrl, recipesPath + "?category=Vegetables"); // Then StepVerifier.create(all) .expectNextMatches(t -> t._1.equals(OK) && t._2.equals(allRecipes)) .expectComplete() .verify(); StepVerifier.create(veg) .expectNextMatches(t -> t._1.equals(OK) && t._2.equals(vegetableRecipes)) .expectComplete() .verify(); } @Test void search() { // when var all = NettyClientUtils.get(serverUrl, recipesSearchPath); var veg = NettyClientUtils.get(serverUrl, recipesSearchPath + "?text=Zucchini"); // Then StepVerifier.create(all) .expectNextMatches(t -> t._1.equals(OK) && t._2.equals(allRecipes)) .expectComplete() .verify(); StepVerifier.create(veg) .expectNextMatches(t -> t._1.equals(OK) && t._2.equals(vegetableRecipes)) .expectComplete() .verify(); } } <file_sep>package io.memoria.recipes.app.controller; import io.memoria.jutils.jweb.netty.NettyClientUtils; import io.memoria.recipes.app.AppConfig; import io.memoria.recipes.app.AppDependencies; import io.memoria.recipes.app.TestResource; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import reactor.netty.DisposableServer; import reactor.test.StepVerifier; import java.time.Duration; import static io.memoria.recipes.app.Defaults.file; import static io.memoria.recipes.app.Defaults.yaml; import static io.netty.handler.codec.http.HttpResponseStatus.OK; class CategoriesControllerTest { private static final String categories = TestResource.categories; private static String serverUrl; private static String categoriesPath; private static DisposableServer disposableServer; @AfterAll static void afterAll() { disposableServer.disposeNow(); } @BeforeAll static void beforeAll() { var conf = file.read("test.yaml").block(); var appConfig = yaml.deserialize(conf, AppConfig.class).get(); var dependencies = new AppDependencies(appConfig); serverUrl = appConfig.server().url(); categoriesPath = appConfig.apiV6().categories(); disposableServer = dependencies.httpServer.bindNow(Duration.ofSeconds(10)); } @Test void getCategories() { // when var result = NettyClientUtils.get(serverUrl, categoriesPath); // Then StepVerifier.create(result) .expectNextMatches(t -> t._1.equals(OK) && t._2.equals(categories)) .expectComplete() .verify(); } } <file_sep>package io.memoria.recipes.app; import io.memoria.jutils.jcore.config.ConfigUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.Duration; import static io.memoria.jutils.jcore.vavr.ReactorVavrUtils.toMono; public class App { private static final Logger LOG = LoggerFactory.getLogger(App.class.getName()); private static final String DEFAULT_CONFIG_PATH = "app.yaml"; public static void main(String[] args) { LOG.debug("DEBUG logging is on"); LOG.info("INFO logging is on"); LOG.warn("WARNING logging is on"); LOG.error("ERROR logging is on"); var configPath = ConfigUtils.readMainArgs(args).get("--config").getOrElse(DEFAULT_CONFIG_PATH); var appConfig = Defaults.file.read(configPath) .flatMap(f -> toMono(Defaults.yaml.deserialize(f, AppConfig.class))) .block(); assert appConfig != null; new AppDependencies(appConfig).httpServer.bindNow(Duration.ofSeconds(10)).onDispose().block(); LOG.info("Server Shutdown!"); } } <file_sep>package io.memoria.recipes.core.service; import io.memoria.jutils.jcore.id.Id; import io.memoria.recipes.core.recipe.Recipe; import io.memoria.recipes.core.repo.mem.RecipeMemRepo; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import reactor.test.StepVerifier; import java.util.concurrent.ConcurrentHashMap; import static io.memoria.recipes.core.TestResource.amarettoCake; import static io.memoria.recipes.core.TestResource.omeletteRecipe; class RecipeServiceTest { private final ConcurrentHashMap<Id, Recipe> db = new ConcurrentHashMap<>(); private final RecipeMemRepo memRepo = new RecipeMemRepo(db); private final RecipeService service = new RecipeService(memRepo, memRepo); @BeforeEach void beforeEach() { db.clear(); } @Test @DisplayName("Mem repo should create recipe successfully") void create() { // Given var id = Id.of(omeletteRecipe.head().title()); StepVerifier.create(service.create(omeletteRecipe)).expectNext(id).verifyComplete(); Assertions.assertEquals(omeletteRecipe, db.get(id)); } @Test @DisplayName("Mem repo should find all recipes with specific category") void filterByCategory() { // Given db.put(Id.of(omeletteRecipe.head().title()), omeletteRecipe); db.put(Id.of(amarettoCake.head().title()), amarettoCake); // When StepVerifier.create(service.recipes("breakfast")).expectNext(omeletteRecipe).verifyComplete(); StepVerifier.create(service.recipes("eggs")).expectNext(amarettoCake, omeletteRecipe).verifyComplete(); StepVerifier.create(service.recipes("none")).verifyComplete(); } @Test @DisplayName("Mem repo should return all recipes") void recipes() { // Given db.put(Id.of(omeletteRecipe.head().title()), omeletteRecipe); db.put(Id.of(amarettoCake.head().title()), amarettoCake); // When StepVerifier.create(service.recipes()).expectNext(omeletteRecipe, amarettoCake).verifyComplete(); } @Test @DisplayName("Mem repo should find all recipes which contains a text") void search() { // Given db.put(Id.of(omeletteRecipe.head().title()), omeletteRecipe); db.put(Id.of(amarettoCake.head().title()), amarettoCake); // When StepVerifier.create(service.search("amaretto")).expectNext(amarettoCake).verifyComplete(); StepVerifier.create(service.search("eggs")).expectNext(amarettoCake, omeletteRecipe).verifyComplete(); StepVerifier.create(service.search("none")).verifyComplete(); } } <file_sep><project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>io.memoria</groupId> <artifactId>recipes</artifactId> <version>0.0.1</version> </parent> <artifactId>app</artifactId> <version>0.0.1</version> <packaging>jar</packaging> <name>${project.artifactId}</name> <description>Application Module</description> <build> <plugins> <plugin> <artifactId>maven-assembly-plugin</artifactId> <configuration> <archive> <manifest> <mainClass>io.memoria.recipes.app.App</mainClass> </manifest> <manifestEntries> <Multi-Release>true</Multi-Release> </manifestEntries> </archive> <descriptorRefs> <descriptorRef>jar-with-dependencies</descriptorRef> </descriptorRefs> <finalName> ${project.groupId}-${project.parent.artifactId}-${project.artifactId}-${project.version} </finalName> <appendAssemblyId>false</appendAssemblyId> </configuration> <executions> <execution> <id>make-assembly</id> <phase>install</phase> <goals> <goal>single</goal> </goals> </execution> </executions> </plugin> </plugins> </build> <dependencies> <dependency> <groupId>io.memoria.recipes</groupId> <artifactId>core</artifactId> <version>${project.parent.version}</version> </dependency> <!-- =============================================================== --> <!-- Jutils Dependencies --> <!-- =============================================================== --> <dependency> <groupId>io.memoria.jutils</groupId> <artifactId>jtext</artifactId> </dependency> <dependency> <groupId>io.memoria.jutils</groupId> <artifactId>jweb</artifactId> </dependency> <!-- =============================================================== --> <!-- Openapi Dependencies --> <!-- =============================================================== --> <dependency> <groupId>org.openapi4j</groupId> <artifactId>openapi-parser</artifactId> </dependency> <dependency> <groupId>org.openapi4j</groupId> <artifactId>openapi-schema-validator</artifactId> </dependency> </dependencies> </project> <file_sep>package io.memoria.recipes.app.dto; import io.memoria.recipes.core.recipe.Recipe; public sealed interface RecipeDto permits RecipeDtoV5, RecipeDtoV6 { Recipe toRecipe(); } <file_sep>FROM memoriaio/java-docker:latest ENV jarfile=io.memoria-recipes-app-0.0.1.jar ADD app/target/${jarfile} /sources/ EXPOSE 8090 CMD java --enable-preview -jar ${jarfile} <file_sep># Recipes: * [Api v5](app/src/main/resources/API_V5.yaml) * [Api v6](app/src/main/resources/API_V6.yaml) ## 1.0 Build and Run the application #### 1.1 Build 1. Make sure you have openjdk 16 2. Maven 3.6.3 3. Make sure your `~/.m2/settings.xml` has access to GitHub packages: 1. Rename [.github/my_setting.xml](.github/my_settings.xml) to `settings.xml` 2. Go to your GitHub account https://github.com/settings/tokens/new 3. Create a token which has permission of `read:packages` 4. Copy that token to the [.github/setting.xml](.github/settings.xml) file in the password 5. Don't forget to put your GitHub username instead of username #### 1.2 Run through docker 1. Make sure docker is installed 2. Inside the project main directory do `docker build . -t recipes:latest` 3. then `docker run -it recipes:latest` 4. Your application would be running on port 8090 #### 1.3 Run on local 1. Run `java --enable-preview -jar app/target/io.memoria-recipes-app-0.0.1.jar` ## 2.0 Decisions > As per email, we're using Json instead of XML for the initial recipes and since we're flexible with decisions; > the following decisions were made #### 2.1 API decisions * Converting all XMLs to JSON * The JSON format would contain `{ "$type":"Recipe05", ....}` as type reference * Using the `Amaretto_cake.json` schema as version v0.6.0 * Using `30_Minute_Chili.json` and `Another_Zucchini_Dish.json` schema as version v0.5.0 * v0.6.0 Changes * Because `ingredients` is a group of ingredients it will be a key:value map, this enforces group naming uniqueness and simplifies payload * key is the group name e.g. Glaze * value will be an array of strings instead of complex `ingredient(item, amount (unit/qty)`: * Such complex object would make chefs struggle to write their recipes and be creative * The object is written already in string, because the original author knew such problem * This complexity introduced as a middle solution is unnecessary. * Cooking ingredient measurement is very complex to be listed, and it'd be better to use NLP or AI if certain analysis was needed in the future * For the `directions` property we'll make it an array of steps to preserve order, and for better displaying the steps instead of a bulk paragraph it can be a checkboxes of steps * v0.5.0 Changes * ... * v0.4.0 Changes * ... ## 3.0 TODOs * Tests for v5 api * Query result pagination <file_sep>## Search Recipes > As a web designer I would like to retrieve recipes from the back-end system, so I can display them in my app **Requirements:** - Without any additional query parameters, should return all recipes known to the back-end service - Support filtering based on recipe category - Support search strings, with the service then trying to match these in relevant fields (for example name and category) <file_sep>package io.memoria.recipes.app; import io.memoria.recipes.app.dto.RecipeDto; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.openapi4j.core.exception.ResolutionException; import org.openapi4j.core.validation.ValidationException; import org.openapi4j.core.validation.ValidationResults; import org.openapi4j.parser.OpenApi3Parser; import org.openapi4j.parser.model.v3.OpenApi3; import org.openapi4j.parser.validation.v3.OpenApi3Validator; class ApiV5Test { private final OpenApi3 api; public ApiV5Test() throws ResolutionException, ValidationException { var openApiFile = ClassLoader.getSystemResource("API_V5.yaml"); api = new OpenApi3Parser().parse(openApiFile, false); } @Test @DisplayName("OpenApi syntax should be valid") void openApiSyntax() throws ValidationException { ValidationResults results = OpenApi3Validator.instance().validate(api); Assertions.assertTrue(results.isValid()); } @Test @DisplayName("Objects Schema should be valid") void schema() throws Exception { OpenAPITestUtils.validateSchema(api, "model/recipe/RecipeDtoV5.json", RecipeDto.class.getSimpleName()); } } <file_sep><?xml version="1.0" encoding="UTF-8"?> <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>io.memoria</groupId> <artifactId>jbom</artifactId> <version>0.38.0</version> </parent> <artifactId>recipes</artifactId> <version>0.0.1</version> <packaging>pom</packaging> <name>${project.artifactId}</name> <description>recipes Parent Pom</description> <url>https://memoria.io/recipes</url> <organization> <name>memoria</name> <url>https://memoria.io</url> </organization> <modules> <module>core</module> <module>app</module> </modules> <properties> <jutils.version>0.242.0</jutils.version> <openapi4j.version>1.0.4</openapi4j.version> <reactor.version>2020.0.5</reactor.version> </properties> <repositories> <repository> <id>github</id> <url>https://maven.pkg.github.com/memoria-io/*</url> <releases> <enabled>true</enabled> </releases> <snapshots> <enabled>true</enabled> </snapshots> </repository> </repositories> <dependencyManagement> <dependencies> <!-- =============================================================== --> <!-- Jutils Dependencies --> <!-- =============================================================== --> <dependency> <groupId>io.memoria.jutils</groupId> <artifactId>jcore</artifactId> <version>${jutils.version}</version> </dependency> <dependency> <groupId>io.memoria.jutils</groupId> <artifactId>jtext</artifactId> <version>${jutils.version}</version> </dependency> <dependency> <groupId>io.memoria.jutils</groupId> <artifactId>jweb</artifactId> <version>${jutils.version}</version> </dependency> <!-- =============================================================== --> <!-- Reactive Streams Dependencies --> <!-- =============================================================== --> <dependency> <groupId>io.projectreactor</groupId> <artifactId>reactor-bom</artifactId> <version>${reactor.version}</version> <type>pom</type> <scope>import</scope> </dependency> <!-- =============================================================== --> <!-- OpenAPI Dependencies --> <!-- =============================================================== --> <dependency> <groupId>org.openapi4j</groupId> <artifactId>openapi-parser</artifactId> <version>${openapi4j.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.openapi4j</groupId> <artifactId>openapi-schema-validator</artifactId> <version>${openapi4j.version}</version> <scope>test</scope> </dependency> </dependencies> </dependencyManagement> <dependencies> <!--======================================================================================== --> <!-- Reactive Dependencies --> <!--======================================================================================== --> <dependency> <groupId>io.projectreactor</groupId> <artifactId>reactor-core</artifactId> </dependency> <dependency> <groupId>io.projectreactor</groupId> <artifactId>reactor-test</artifactId> <scope>test</scope> </dependency> <!-- =============================================================== --> <!-- Jutils Dependencies --> <!-- =============================================================== --> <dependency> <groupId>io.memoria.jutils</groupId> <artifactId>jcore</artifactId> </dependency> </dependencies> </project> <file_sep>## Create Recipe > As a web designer I want to be able to add new recipes, so I can expand the recipe database with new, > tasty and inspiring recipes **Requirements:** - When given valid input, creates a new recipe in the backend which can then be retrieved by the service's clients - Make sure the provided input is valid - Do not allow multiple recipes with the same name (so people don't get confused) <file_sep>## List categories > As a web designer I would like to retrieve the available recipe categories, so I can do more focused requests for specific recipe types **Requirements:** - Operation returns all recipe categories<file_sep>package io.memoria.recipes.app; import io.memoria.recipes.app.dto.RecipeDto; import io.memoria.recipes.app.dto.RecipeDtoV5; import io.memoria.recipes.app.dto.RecipeDtoV6; import io.memoria.recipes.core.recipe.Head; import io.memoria.recipes.core.recipe.QuickRecipe; import io.memoria.recipes.core.recipe.Recipe; import io.vavr.collection.HashMap; import io.vavr.collection.HashSet; import io.vavr.collection.List; import static io.memoria.recipes.app.Defaults.file; public class TestResource { // Recipes json public static final String recipe30MinChili; public static final String recipeAmarettoCake; public static final String recipeZucchiniDish; public static final String recipeOmeletteV6; public static final String recipeOmeletteV5; // Controllers Json public static final String allRecipes; public static final String categories; public static final String vegetableRecipes; // Recipes objects public static final RecipeDto omeletteV5 = new RecipeDtoV5(omeletteRecipe()); public static final RecipeDto omeletteV6 = new RecipeDtoV6(omeletteRecipe()); static { // Recipes json recipe30MinChili = file.read("recipes/30_Minute_Chili.json").block(); recipeAmarettoCake = file.read("recipes/Amaretto_Cake.json").block(); recipeZucchiniDish = file.read("recipes/Another_Zucchini_Dish.json").block(); recipeOmeletteV5 = file.read("recipes/omeletteV5.json").block(); recipeOmeletteV6 = file.read("recipes/omeletteV6.json").block(); // Controllers Json allRecipes = file.read("controller/all.json").block(); categories = file.read("controller/categories.json").block(); vegetableRecipes = file.read("controller/vegetableRecipes.json").block(); } private TestResource() {} private static Recipe omeletteRecipe() { // Omelette var head = new Head("omelette", HashSet.of("eggs", "breakfast"), 2); var ingredients = HashSet.of("2 eggs", "sprinkle of salt", "2 table spoon of oil"); var directions = List.of("break the eggs", "whisk the eggs", "put the salt on eggs", "heat the pan with oil", "pour the whisked eggs"); return new QuickRecipe(head, HashMap.of("base", ingredients), directions); } } <file_sep>package io.memoria.recipes.app.controller; import io.memoria.recipes.app.AppConfig; import io.memoria.recipes.app.AppDependencies; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import reactor.netty.DisposableServer; import reactor.test.StepVerifier; import java.time.Duration; import static io.memoria.jutils.jweb.netty.NettyClientUtils.get; import static io.memoria.recipes.app.Defaults.file; import static io.memoria.recipes.app.Defaults.yaml; import static io.netty.handler.codec.http.HttpResponseStatus.OK; class VarzControllerTest { private static String serverUrl; private static String varzPath; private static AppDependencies deps; private static DisposableServer disposableServer; @AfterAll static void afterAll() { disposableServer.disposeNow(); } @BeforeAll static void beforeAll() { var conf = file.read("test.yaml").block(); var appConfig = yaml.deserialize(conf, AppConfig.class).get(); deps = new AppDependencies(appConfig); serverUrl = appConfig.server().url(); varzPath = appConfig.apiV6().varz(); disposableServer = deps.httpServer.bindNow(Duration.ofSeconds(10)); } @Test void varz() { // when var result = get(serverUrl, varzPath); // Then StepVerifier.create(result).expectNextMatches(t -> t._1.equals(OK)).expectComplete().verify(); } } <file_sep>package io.memoria.recipes.core.repo.mem; import io.memoria.jutils.jcore.id.Id; import io.memoria.recipes.core.common.RecipeException.RecipeAlreadyExists; import io.memoria.recipes.core.recipe.Head; import io.memoria.recipes.core.recipe.Recipe; import io.memoria.recipes.core.repo.CategoryRepo; import io.memoria.recipes.core.repo.RecipeRepo; import io.memoria.recipes.core.repo.RecipeSearchRepo; import io.vavr.collection.HashSet; import io.vavr.collection.Set; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.util.concurrent.ConcurrentHashMap; import static io.memoria.jutils.jcore.vavr.ReactorVavrUtils.toMono; public record RecipeMemRepo(ConcurrentHashMap<Id, Recipe> db) implements RecipeRepo, CategoryRepo, RecipeSearchRepo { @Override public Flux<String> categories() { return Mono.fromCallable(this::categoriesSet).flatMapMany(Flux::fromIterable); } @Override public Mono<Id> create(Recipe recipe) { var id = Id.of(recipe.head().title()); return Mono.fromCallable(() -> !db.containsKey(id)) .flatMap(toMono(() -> db.put(id, recipe), new RecipeAlreadyExists())) .thenReturn(id); } @Override public Flux<Recipe> recipes(String category) { return Mono.fromCallable(() -> filterByCategory(category)).flatMapMany(Flux::fromIterable); } @Override public Flux<Recipe> recipes() { return Mono.fromCallable(db::values).flatMapMany(Flux::fromIterable); } @Override public Flux<Recipe> search(String text) { return Mono.fromCallable(() -> HashSet.ofAll(this.db.values()).filter(r -> contains(r, text))) .flatMapMany(Flux::fromIterable); } private Set<String> categoriesSet() { return HashSet.ofAll(this.db.values()).map(Recipe::head).flatMap(Head::categories); } private boolean contains(Recipe r, String text) { text = text.toLowerCase(); var titleContains = r.head().title().toLowerCase().contains(text); var directionsContains = r.directions().mkString(" ").toLowerCase().contains(text); return titleContains || directionsContains; } private Set<Recipe> filterByCategory(String category) { return HashSet.ofAll(this.db.values()).filter(r -> r.head().categories().contains(category)); } }
998c74a599ce5952738b972c28bdc077654c8d02
[ "Markdown", "Java", "Maven POM", "Dockerfile" ]
18
Java
IsmailMarmoush/recipes
491799eb8f6c2d878a3f9a4fbb11d3800d85f615
0db4002c02b51659631a96a20844016908ee5ca7
refs/heads/master
<file_sep>package model; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; public class StudentTest { Student student1; @BeforeEach public void createObject() { student1 = new Student("testStudent1","<EMAIL>","46447389","student1"); } @Test public void checkCredentials() { assertEquals("student1",student1.getPass()); assertEquals("<EMAIL>",student1.getEmail()); assertEquals("46447389",student1.getId()); assertEquals("testStudent1",student1.getName()); } } <file_sep>package ui.exceptions; public class EndOfExamException extends Exception{ } <file_sep>package persistence; import model.*; import org.junit.jupiter.api.Test; import java.io.IOException; import static org.junit.jupiter.api.Assertions.*; class JsonWriterExamCollectionTest { @Test public void testWriterInvalidFile() { try { WriterExamCollection writerExamCollection = new WriterExamCollection("./data/my\0illegal:fileName.json"); writerExamCollection.open(); fail("IOException was expected"); } catch (IOException e) { // expected } } @Test public void testWriterEmptyExamCollection() { try { ExamCollection examCollection = new ExamCollection(); WriterExamCollection writerExamCollection = new WriterExamCollection("./data" + "/testWriterEmptyExamCollection" + ".json"); writerExamCollection.open(); writerExamCollection.write(examCollection); writerExamCollection.close(); ReaderExamCollection readerExamCollection = new ReaderExamCollection("./data" + "/testWriterEmptyExamCollection" + ".json"); ExamCollection examCollection1; examCollection1 = (ExamCollection) readerExamCollection.read(); assertEquals(0, examCollection1.examList.size()); } catch (IOException e) { fail("Exception should not have been thrown"); } } @Test void testWriterExamCollection() { try { ExamCollection examCollection = new ExamCollection(); Exam exam1 = new Exam(); Exam exam2 = new Exam(); exam1.addQuestion("testQues1","testAns1"); exam1.addQuestion("testQues2","testAns2"); exam1.addQuestion("testQues3","testAns3"); exam1.addQuestion("testQues4","testAns4"); exam1.addTitleAndName("TestTitle1","TestName1"); exam2.addQuestion("testQues1","testAns1"); exam2.addQuestion("testQues2","testAns2"); exam2.addQuestion("testQues3","testAns3"); exam2.addQuestion("testQues4","testAns4"); exam2.addQuestion("testQues5","testAns5"); exam2.addQuestion("testQues6","testAns6"); exam2.addTitleAndName("TestTitle2","TestName2"); assertEquals(0, examCollection.examList.size()); examCollection.addExam(exam1); examCollection.addExam(exam2); assertEquals(2,examCollection.examList.size()); WriterExamCollection writerExamCollection = new WriterExamCollection("./data" + "/testWriterExamCollection" + ".json"); writerExamCollection.open(); writerExamCollection.write(examCollection); writerExamCollection.close(); ReaderExamCollection readerExamCollection = new ReaderExamCollection("./data" + "/testWriterExamCollection" + ".json"); ExamCollection examCollection1 = (ExamCollection) readerExamCollection.read(); assertEquals(2,examCollection1.examList.size()); } catch (IOException e) { fail("Exception should not have been thrown"); } } }<file_sep>package ui.exceptions; public class BadStudentEmailException extends Exception{ } <file_sep>package ui; import model.ExamCollection; import java.util.ArrayList; import java.util.List; //Account Class is the backend helper class for both the Student and Instructor Accounts public class Account extends ExamCollection { //Default Constructor public Account() { } //EFFECTS: Returns a string containing the exams created by various Instructors public List<String> viewExams() { List<String> list = new ArrayList<>(); if (examList.size() == 0) { list.add("No Exams have been created yet!"); } else { for (int i = 0; i < examList.size(); i++) { String entry = (i + 1) + "." + examList.get(i).toString(); list.add(entry); } } return list; } }<file_sep>package ui; import model.Student; import ui.exceptions.EndOfExamException; import javax.swing.*; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.awt.event.ActionListener; import java.util.List; import static javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED; import static javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED; //GiveExamGUI class is the page used by student to attempt exams created by instructors public class GiveExamGUI { JFrame mainframe; JLabel heading; Account account = new Account(); JButton goBack; JButton submit; JLabel questionLabel; JLabel answerLabel; JTextField answer; JList<String> examList; DefaultListModel<String> examListModel; List<String> list; int counter = 0; GiveExam giveExam; int index; //Parameterized Constructor public GiveExamGUI(Student student, JFrame mainframe) { this.mainframe = mainframe; mainframe.setTitle("Give Exam"); mainframe.getContentPane().removeAll(); mainframe.repaint(); giveExam = new GiveExam(student); mainframe.setSize(800, 500); mainframe.setBackground(Color.BLACK); Image icon = Toolkit.getDefaultToolkit().getImage(".data/Program_Icon.png"); mainframe.setIconImage(icon); initializeViewExam(); mainframe.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); mainframe.setVisible(true); } //EFFECTS: Constructs the GUI Elements for the View Exam Scroll Panel Page private void initializeViewExam() { JPanel viewExamPanel = new JPanel(new GridBagLayout()); viewExamPanel.setBackground(SystemColor.WHITE); GridBagConstraints viewExamPanelConstraints = new GridBagConstraints(); viewExamPanelConstraints.insets = new Insets(10, 10, 10, 10); heading = new JLabel("Select your Exam choice:"); goBack = new JButton("Go Back to Account"); list = account.viewExams(); examListModel = new DefaultListModel<>(); for (String s : list) { examListModel.addElement(s); } examList = new JList<>(); examList.setModel(examListModel); examList.setSize(new Dimension(800, 300)); examList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); examList.setVisibleRowCount(4); JScrollPane viewExamScrollPane; viewExamScrollPane = new JScrollPane(examList, VERTICAL_SCROLLBAR_AS_NEEDED, HORIZONTAL_SCROLLBAR_AS_NEEDED); viewExamScrollPane.setSize(new Dimension(800, 300)); viewExamElements(viewExamPanel, viewExamPanelConstraints, viewExamScrollPane); mainframe.add(viewExamPanel); } //EFFECTS: Extension of the method above, it constructs the GUI Elements for the View Exam Scroll Panel Page private void viewExamElements(JPanel viewExamPanel, GridBagConstraints constraints, JScrollPane scrollPane) { constraints.fill = GridBagConstraints.HORIZONTAL; constraints.gridx = 0; constraints.gridy = 0; viewExamPanel.add(heading, constraints); constraints.gridx = 0; constraints.gridy = 1; viewExamPanel.add(scrollPane, constraints); ListSelectionListener examSelectionFromList = examSelectionFromList(viewExamPanel); examList.addListSelectionListener(examSelectionFromList); constraints.gridx = 0; constraints.gridy = 2; viewExamPanel.add(goBack, constraints); ActionListener goBackCall = goBackCall(); goBack.addActionListener(goBackCall); } //EFFECTS: Constructs the GUI Elements for the Give Exam Page private void initializeGiveExam() { giveExam.selectExam(index); JPanel giveExamPanel = new JPanel(new GridBagLayout()); giveExamPanel.setBackground(SystemColor.WHITE); GridBagConstraints giveExamPanelConstraints = new GridBagConstraints(); giveExamPanelConstraints.insets = new Insets(10, 10, 10, 10); questionLabel = new JLabel(); answerLabel = new JLabel(); answer = new JTextField(10); submit = new JButton("Submit Answer"); try { questionLabel.setText("Question " + (counter + 1) + ": " + giveExam.getQuestion(counter)); } catch (EndOfExamException e) { JOptionPane.showMessageDialog(mainframe, "End of Exam. Your final grade is " + giveExam.getGrade()); new StudentAccountGUI(mainframe); } answerLabel.setText("Enter Answer " + (counter + 1) + ":"); giveExamElements(giveExamPanel, giveExamPanelConstraints); mainframe.add(giveExamPanel); } //EFFECTS: Extension of the method above, it constructs the GUI Elements for the Give Exam Page private void giveExamElements(JPanel giveExamPanel, GridBagConstraints giveExamPanelConstraints) { giveExamPanelConstraints.anchor = GridBagConstraints.CENTER; giveExamPanelConstraints.gridx = 0; giveExamPanelConstraints.gridy = 0; giveExamPanel.add(questionLabel, giveExamPanelConstraints); giveExamPanelConstraints.gridx = 0; giveExamPanelConstraints.gridy = 1; giveExamPanel.add(answerLabel, giveExamPanelConstraints); giveExamPanelConstraints.gridx = 1; giveExamPanelConstraints.gridy = 1; giveExamPanel.add(answer, giveExamPanelConstraints); giveExamPanelConstraints.gridx = 0; giveExamPanelConstraints.gridy = 2; giveExamPanel.add(submit, giveExamPanelConstraints); ActionListener submitAnswer = submitAnswer(giveExamPanel); submit.addActionListener(submitAnswer); } //EFFECTS: Adds a ListSelectionListener for selecting the exam from a list private ListSelectionListener examSelectionFromList(JPanel viewExamPanel) { return e -> { index = examList.getSelectedIndex(); int input = JOptionPane.showConfirmDialog(null, "Click Yes to start Exam number: " + (index + 1), "Exam Confirmation", JOptionPane.YES_NO_CANCEL_OPTION); viewExamPanel.setVisible(false); if (input == 0) { initializeGiveExam(); } else { initializeViewExam(); } }; } //EFFECTS: Adds an Action Listener for the go back Button which takes the control to the Student Account Homepage private ActionListener goBackCall() { return e -> { new StudentAccountGUI(mainframe); }; } //EFFECTS: Adds an Action Listener for the Submit Answer Button which submits the entered answer to be checked private ActionListener submitAnswer(JPanel panel) { return e -> { panel.setVisible(false); String answerText = answer.getText(); giveExam.attempt(counter, answerText); counter++; initializeGiveExam(); }; } } <file_sep>package model; import org.json.JSONObject; import persistence.Writable; //Instructor Class extends the person class and stores details about a particular Instructor public class Student extends Person implements Writable { //Parameterized Constructor public Student(String name, String email, String id,String pass) { this.name = name; this.email = email; this.id = id; this.pass = pass; } public String getName() { return name; } public String getEmail() { return email; } public String getId() { return id; } public String getPass() { return pass; } //EFFECTS: Converts this class into a JSON Object @Override public JSONObject toJson() { JSONObject json = new JSONObject(); json.put("name",name); json.put("email",email); json.put("id",id); json.put("pass",pass); return json; } } <file_sep>#Pariksha Exam Portal ## Your go to portal to write and create Exams! In the current times of a Global Pandemic, learning and student assessment have been heavily hampered. This creates a need for better online learning and assessment systems. This application aims to combat this problem that has been hindering the quality of education for students and ease of teaching for Instructors for the better part of this year. I would like introduce **Pariksha Exam Portal** *(Pariksha means Assessment in Hindi)*. This is an Exam Portal created to ease the process of taking tests for Students and the Instructors. The functionality of this application includes: - Sign Up for an account as a Student or Instructor. - Login to an account as a Student or Instructor. - As an Instructor, he/she can create new exams for students to attempt, as well as he/she can view exams created by other instructors. These exams will also be saved in a file. - As a Student, he/she can view exams created by all instructors and then attempt these exams with the grade shown at the end. - As a Student/Instructor the login credentials can be saved into a file. This application can be used by all those students who are studying remotely and are trying to keep up with their coursework. As well as, Instructors who are working tirelessly from home to provide the best education experience possible at this moment in time. ### User Stories As a user, I want to be able to create an Account. As a user, I want to be able to login to my corresponding account. As a user, I want to create exams for my students. As a user, I want to attempt the exams created by my instructors. As a user, I want my login credentials to be stored and retrieved by the program. As a user, I want my exams created to be stored and retrieved by the program. # This project was created for Computer Science Software Construction Course<file_sep>package ui.exceptions; public class BadIdException extends Exception { } <file_sep>package persistence; import model.Instructor; import model.Student; import org.json.JSONArray; import org.json.JSONObject; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.stream.Stream; public class ReaderCatalogue { private String source; // EFFECTS: constructs reader to read from source file public ReaderCatalogue(String source) { this.source = source; } // EFFECTS: reads source file as string and returns it private String readFile(String source) throws IOException { StringBuilder contentBuilder = new StringBuilder(); try (Stream<String> stream = Files.lines(Paths.get(source), StandardCharsets.UTF_8)) { stream.forEach(contentBuilder::append); } return contentBuilder.toString(); } // EFFECTS: Parses a studentList from Catalogue JSONObject and returns a list of the containing teams public ArrayList<Student> readStudents() throws IOException { String jsonData = readFile(source); JSONObject jsonObject = new JSONObject(jsonData); JSONArray studentArray = jsonObject.getJSONArray("studentList"); ArrayList<Student> students = new ArrayList<>(); for (Object s: studentArray) { JSONObject studentObject = (JSONObject) s; String name = studentObject.getString("name"); String email = studentObject.getString("email"); String id = studentObject.getString("id"); String pass = studentObject.getString("pass"); students.add(new Student(name,email,id,pass)); } return students; } // EFFECTS: Parses a instructorList from Catalogue JSONObject and returns a list of the containing teams public ArrayList<Instructor> readInstructors() throws IOException { String jsonData = readFile(source); JSONObject jsonObject = new JSONObject(jsonData); JSONArray instructorArray = jsonObject.getJSONArray("instructorList"); ArrayList<Instructor> instructors = new ArrayList<>(); for (Object i: instructorArray) { JSONObject instructorObject = (JSONObject) i; String name = instructorObject.getString("name"); String email = instructorObject.getString("email"); String id = instructorObject.getString("id"); String pass = instructorObject.getString("pass"); instructors.add(new Instructor(name,email,id,pass)); } return instructors; } } <file_sep>package model; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; public class QuestionTest { Question question1; Question question2; @BeforeEach public void createObject() { question1 = new Question("testQues1","testAns1"); question2 = new Question("testQues2","testAns2"); } @Test public void getQuesTest() { assertEquals("testQues1",question1.getQues()); assertEquals("testQues2",question2.getQues()); } @Test public void checkAnswerTest() { assertFalse(question1.checkAnswer("wrong")); assertFalse(question2.checkAnswer("wrong")); assertTrue(question1.checkAnswer("testAns1")); assertTrue(question2.checkAnswer("testAns2")); } } <file_sep>package model; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; public class InstructorTest { Instructor instructor1; @BeforeEach public void createObject() { instructor1 = new Instructor("testInstructor1","<EMAIL>","86520489","instructor1"); } @Test public void checkCredentials() { assertEquals("instructor1",instructor1.getPass()); assertEquals("<EMAIL>",instructor1.getEmail()); assertEquals("86520489",instructor1.getId()); assertEquals("testInstructor1",instructor1.getName()); } } <file_sep>package ui; import model.ExamCollection; import model.Student; import ui.exceptions.EndOfExamException; //GiveExam Class is the class used by a logged in Student to attempt an exam created by any Instructor public class GiveExam extends ExamCollection { public int grade; Student student; //Parameterized Constructor public GiveExam(Student student) { this.student = student; grade = 0; } //REQUIRES: Choice entered by user //EFFECTS: Selection of exam by the student public void selectExam(int choice) { exam = examList.get(choice); } //REQUIRES: Counter of question //EFFECTS: Gets the question corresponding to the counter public String getQuestion(int counter) throws EndOfExamException { if (counter >= exam.questionList.size()) { throw new EndOfExamException(); } else { return exam.questionList.get(counter).getQues(); } } //MODIFIES: this public int getGrade() { return grade; } //REQUIRES: Answers to be entered by the student //EFFECTS: Displays questions and receives answers for the corresponding question. Displays grade at the end public void attempt(int counter, String answer) { if (exam.questionList.get(counter).checkAnswer(answer)) { grade++; } } }<file_sep>package model; import model.exceptions.EmptyListException; import org.json.JSONArray; import org.json.JSONObject; import persistence.Writable; import java.util.ArrayList; import java.util.List; //Catalogue Class holds all the login details for Instructor and Student public class Catalogue implements Writable { //Static Object of Instructor to be used by subclasses protected static Instructor instructor = null; //Static Object of Student to be used by subclasses protected static Student student = null; //Static ArrayList stores login details for students private static List<Student> studentList = new ArrayList<>(); //Static ArrayList stores login details for instructors private static List<Instructor> instructorList = new ArrayList<>(); //MODIFIES: this //EFFECTS: Logs a new Signup entry into the Student List public void addStudent(Student student) { studentList.add(student); } //MODIFIES: this //EFFECTS: Logs a new Signup entry into the Instructor List public void addInstructor(Instructor instructor) { instructorList.add(instructor); } //MODIFIES: this //EFFECTS: Checks if the login credentials match in the list and copies the corresponding object for the // Instructor Class protected boolean checkInstructor(String id, String pass) { for (Instructor instructor : instructorList) { if (instructor.getId().equals(id) && instructor.getPass().equals(pass)) { Catalogue.instructor = instructor; return true; } } return false; } //MODIFIES: this //EFFECTS: Checks if the login credentials match in the list and copies the corresponding object for the Student // Class protected boolean checkStudent(String id, String pass) { for (Student student : studentList) { if (student.getId().equals(id) && student.getPass().equals(pass)) { Catalogue.student = student; return true; } } return false; } //EFFECTS: Checks if the Instructor Signup list is empty public void isEmptyInstructor() throws EmptyListException { if (instructorList.size() == 0) { throw new EmptyListException(); } } //EFFECTS: Checks if the Student Signup list is empty public void isEmptyStudent() throws EmptyListException { if (studentList.size() == 0) { throw new EmptyListException(); } } //MODIFIES: this //EFFECTS: Copies the student list locally public void copyStudentList(List<Student> s) { studentList = s; } //MODIFIES: this //EFFECTS: Copies the instructor list locally public void copyInstructorList(List<Instructor> i) { instructorList = i; } public String getInstructorID() { return instructor.getId(); } public String getStudentID() { return student.getId(); } public String getInstructorName() { return instructor.getName(); } public String getStudentName() { return student.getName(); } public Instructor getInstructor() { return instructor; } public Student getStudent() { return student; } //EFFECTS: Converts this class into a JSON Object @Override public JSONObject toJson() { JSONObject json = new JSONObject(); JSONArray jsonArray1 = new JSONArray(); JSONArray jsonArray2 = new JSONArray(); for (Student student : studentList) { jsonArray1.put(student.toJson()); } json.put("studentList", jsonArray1); for (Instructor instructor : instructorList) { jsonArray2.put(instructor.toJson()); } json.put("instructorList", jsonArray2); return json; } } <file_sep>package model; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; public class ExamCollectionTest { ExamCollection examCollection; Exam exam1; Exam exam2; @BeforeEach public void createObject() { examCollection = new ExamCollection(); exam1 = new Exam(); exam2 = new Exam(); exam1.addQuestion("testQues1","testAns1"); exam1.addQuestion("testQues2","testAns2"); exam1.addQuestion("testQues3","testAns3"); exam1.addQuestion("testQues4","testAns4"); exam2.addQuestion("testQues1","testAns1"); exam2.addQuestion("testQues2","testAns2"); exam2.addQuestion("testQues3","testAns3"); exam2.addQuestion("testQues4","testAns4"); exam2.addQuestion("testQues5","testAns5"); exam2.addQuestion("testQues6","testAns6"); } @Test public void addExamTest() { assertEquals(0, ExamCollection.examList.size()); examCollection.addExam(exam1); examCollection.addExam(exam2); assertEquals(2,ExamCollection.examList.size()); } } <file_sep>package persistence; import model.*; import model.exceptions.EmptyListException; import org.junit.jupiter.api.Test; import java.io.IOException; import static org.junit.jupiter.api.Assertions.*; class JsonWriterCatalogueTest { @Test public void testWriterInvalidFile() { try { WriterCatalogue writerCatalogue = new WriterCatalogue("./data/my\0illegal:fileName.json"); writerCatalogue.open(); fail("IOException was expected"); } catch (IOException e) { // expected } } @Test public void testWriterEmptyCatalogue() { try { Catalogue catalogue = new Catalogue(); WriterCatalogue writerCatalogue = new WriterCatalogue("./data/testWriterEmptyCatalogue.json"); writerCatalogue.open(); writerCatalogue.write(); writerCatalogue.close(); ReaderCatalogue readerCatalogue = new ReaderCatalogue("./data/testWriterEmptyCatalogue.json"); Catalogue catalogue1 = new Catalogue(); catalogue1.copyInstructorList(readerCatalogue.readInstructors()); catalogue1.copyStudentList(readerCatalogue.readStudents()); try { catalogue.isEmptyStudent(); fail("Exception should be thrown!"); } catch (EmptyListException e) { //expected } try { catalogue.isEmptyInstructor(); fail("Exception should be thrown!"); } catch (EmptyListException e) { //expected } } catch (IOException e) { fail("Exception should not have been thrown"); } } @Test void testWriterCatalogue() { try { Catalogue catalogue = new Catalogue(); try { catalogue.isEmptyStudent(); fail("Exception should be thrown!"); } catch (EmptyListException e) { //expected } try { catalogue.isEmptyInstructor(); fail("Exception should be thrown!"); } catch (EmptyListException e) { //expected } Student student1; student1 = new Student("testStudent1","<EMAIL>","46447389","student1"); Student student2; student2 = new Student("testStudent2","<EMAIL>.ca","59452992","student2"); Instructor instructor1; instructor1 = new Instructor("testInstructor1","<EMAIL>","86520489", "instructor1"); Instructor instructor2; instructor2 = new Instructor("testInstructor2","<EMAIL>","23524628", "instructor2"); catalogue.addStudent(student1); catalogue.addStudent(student2); catalogue.addInstructor(instructor1); catalogue.addInstructor(instructor2); WriterCatalogue writerCatalogue = new WriterCatalogue("./data/testWriterCatalogue.json"); writerCatalogue.open(); writerCatalogue.write(); writerCatalogue.close(); ReaderCatalogue readerCatalogue = new ReaderCatalogue("./data/testWriterCatalogue.json"); Catalogue catalogue1 = new Catalogue(); catalogue1.copyInstructorList(readerCatalogue.readInstructors()); catalogue1.copyStudentList(readerCatalogue.readStudents()); try { catalogue.isEmptyStudent(); } catch (EmptyListException e) { fail("Exception should not be thrown!"); } try { catalogue.isEmptyInstructor(); } catch (EmptyListException e) { fail("Exception should not be thrown!"); } } catch (IOException e) { fail("Exception should not have been thrown"); } } }<file_sep>package ui; import javax.swing.*; import java.awt.*; import java.awt.event.ActionListener; import java.util.List; import static javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED; import static javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED; //StudentAccountGUI class is the Student account page of the program public class StudentAccountGUI { JFrame mainframe; Account account = new Account(); JLabel heading; JLabel name; JLabel instructorId; JButton giveExam; JButton viewExams; JButton goBack; JButton logOut; JList<String> examList; DefaultListModel<String> examListModel; List<String> list; //Parameterized Constructor public StudentAccountGUI(JFrame mainframe) { this.mainframe = mainframe; mainframe.setTitle("Student Account"); mainframe.getContentPane().removeAll(); mainframe.repaint(); mainframe.setSize(800, 500); mainframe.setBackground(Color.BLACK); Image icon = Toolkit.getDefaultToolkit().getImage(".data/Program_Icon.png"); mainframe.setIconImage(icon); initializeAccount(); mainframe.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); mainframe.setVisible(true); } //EFFECTS: Constructs the GUI Elements for the Student Account Page private void initializeAccount() { JPanel studentHomeScreen = new JPanel(new GridBagLayout()); studentHomeScreen.setBackground(SystemColor.WHITE); GridBagConstraints studentHomeScreenConstraints = new GridBagConstraints(); studentHomeScreenConstraints.insets = new Insets(10, 10, 10, 10); String id = account.getStudentID(); String nameText = account.getStudentName(); heading = new JLabel("Student Account Details:"); name = new JLabel("Name: " + nameText); instructorId = new JLabel("Student ID: " + id); giveExam = new JButton("Give Exam"); viewExams = new JButton("View Exams"); logOut = new JButton("Log Out"); accountElements(studentHomeScreen, studentHomeScreenConstraints); ActionListener logOutCall = logOutCall(); logOut.addActionListener(logOutCall); mainframe.add(studentHomeScreen); } //EFFECTS: Extension of the methode above, it constructs the GUI Elements for the Student Account Page private void accountElements(JPanel studentHomeScreen, GridBagConstraints studentHomeScreenConstraints) { studentHomeScreenConstraints.fill = GridBagConstraints.HORIZONTAL; studentHomeScreenConstraints.gridx = 0; studentHomeScreenConstraints.gridy = 0; studentHomeScreen.add(heading, studentHomeScreenConstraints); studentHomeScreenConstraints.gridx = 0; studentHomeScreenConstraints.gridy = 1; studentHomeScreen.add(name, studentHomeScreenConstraints); studentHomeScreenConstraints.gridx = 1; studentHomeScreenConstraints.gridy = 1; studentHomeScreen.add(instructorId, studentHomeScreenConstraints); studentHomeScreenConstraints.gridx = 0; studentHomeScreenConstraints.gridy = 2; studentHomeScreen.add(giveExam, studentHomeScreenConstraints); ActionListener giveExamCall = giveExamCall(); giveExam.addActionListener(giveExamCall); studentHomeScreenConstraints.gridx = 0; studentHomeScreenConstraints.gridy = 3; studentHomeScreen.add(viewExams, studentHomeScreenConstraints); ActionListener viewExamCall = viewExamCall(studentHomeScreen); viewExams.addActionListener(viewExamCall); studentHomeScreenConstraints.gridx = 0; studentHomeScreenConstraints.gridy = 4; studentHomeScreen.add(logOut, studentHomeScreenConstraints); } //EFFECTS: Constructs the GUI Elements for the View Exam Scroll Panel Page private void initializeViewExams() { JPanel viewExam = new JPanel(new GridBagLayout()); viewExam.setBackground(SystemColor.WHITE); GridBagConstraints viewExamConstraints = new GridBagConstraints(); viewExamConstraints.insets = new Insets(10, 10, 10, 10); heading = new JLabel("Exams Created So Far:"); goBack = new JButton("Go Back to Account"); list = account.viewExams(); examListModel = new DefaultListModel<>(); for (String s : list) { examListModel.addElement(s); } examList = new JList<>(); examList.setModel(examListModel); examList.setSize(new Dimension(800, 300)); examList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); examList.setVisibleRowCount(4); viewExamsElements(viewExam, viewExamConstraints); mainframe.add(viewExam); } //EFFECTS: Extension of the method above, it constructs the GUI Elements for the View Exam Scroll Panel Page private void viewExamsElements(JPanel viewExam, GridBagConstraints viewExamConstraints) { JScrollPane viewExamScrollPane; viewExamScrollPane = new JScrollPane(examList, VERTICAL_SCROLLBAR_AS_NEEDED, HORIZONTAL_SCROLLBAR_AS_NEEDED); viewExamScrollPane.setSize(new Dimension(800, 300)); viewExamConstraints.fill = GridBagConstraints.HORIZONTAL; viewExamConstraints.gridx = 0; viewExamConstraints.gridy = 0; viewExam.add(heading, viewExamConstraints); viewExamConstraints.gridx = 0; viewExamConstraints.gridy = 1; viewExam.add(viewExamScrollPane, viewExamConstraints); viewExamConstraints.gridx = 0; viewExamConstraints.gridy = 2; viewExam.add(goBack, viewExamConstraints); ActionListener goBackCall = goBackCall(viewExam); goBack.addActionListener(goBackCall); } //EFFECTS: Adds an Action Listener for the logOut Button which takes the control to the HomeScreen private ActionListener logOutCall() { return e -> new HomeScreenGUI(mainframe); } //EFFECTS: Adds an Action Listener for the Give Exam Button which takes the control to the Give Exam frame private ActionListener giveExamCall() { return e -> new GiveExamGUI(account.getStudent(),mainframe); } //EFFECTS: Adds an Action Listener for the go back Button which takes the control to the Instructor Account Homepage private ActionListener goBackCall(JPanel panel) { return e -> { panel.setVisible(false); initializeAccount(); }; } //EFFECTS: Adds an Action Listener for the View Exam Button which takes the control to the View Exam Panel private ActionListener viewExamCall(JPanel panel) { return e -> { panel.setVisible(false); initializeViewExams(); }; } }
6091b784472ed5c3220fc9ca070e76304d14b879
[ "Markdown", "Java" ]
17
Java
lavyarocket/Pariksha-Exam-Portal
bab3900758d37cc5c6ad88212d2ef3287b94cf07
292716ab7611252b78aa3746f02edb71c6a3861b
refs/heads/main
<file_sep>import { Routes, RouterModule } from '@angular/router'; import { NgModule } from '@angular/core'; import { MainDashboardComponent } from './main-dashboard.component'; import { DashboardComponent } from './dashboard/dashboard.component'; import { ProgressComponent } from './progress/progress.component'; import { Graphic1Component } from './graphic1/graphic1.component'; import { AccountSettingsComponent } from './account-settings/account-settings.component'; import { PromisesComponent } from './promises/promises.component'; import { AuthGuard } from '../guards/auth.guard'; import { ProfileComponent } from './profile/profile.component'; const routes: Routes = [ { path: 'dashboard', component: MainDashboardComponent, canActivate:[AuthGuard], children: [ { path: '', component: DashboardComponent, data:{title:'Dashboard'} }, { path: 'progress', component: ProgressComponent, data:{title:'Progress'}}, { path: 'graphic1', component: Graphic1Component, data:{title:'Graphic1'} }, { path: 'promises', component: PromisesComponent, data:{title:'Promises'} }, { path: 'account-settings', component: AccountSettingsComponent, data:{title:'Account settings'} }, { path: 'profile', component: ProfileComponent, data:{title:'My profile'} }, ], }, ]; @NgModule({ imports: [RouterModule.forChild(routes)], exports: [RouterModule] }) export class PagesRoutingModule {} <file_sep>import { Injectable, NgZone } from '@angular/core'; import {HttpClient} from '@angular/common/http' import { RegisterForm } from '../interfaces/registerForm.interface'; import { environment } from 'src/environments/environment'; import { LoginForm } from '../interfaces/loginForm.interface'; import { catchError, map, tap } from 'rxjs/operators'; import { Observable, of } from 'rxjs'; import { Router } from '@angular/router'; import { Usuario } from '../models/usuario.model'; const {base_url}= environment; declare const gapi:any; @Injectable({ providedIn: 'root' }) export class UsuarioService { auth2:any; usuario:Usuario; constructor(private http: HttpClient,private router:Router, private ngZone:NgZone) { this.googleInit(); } get token(): string{ return localStorage.getItem('token') || ''; } get uid(): string{ return this.usuario.uid || ''; } googleInit(){ return new Promise (resolve => { gapi.load('auth2',() => { // Retrieve the singleton for the GoogleAuth library and set up the client. this.auth2 = gapi.auth2.init({ client_id: '37233226989-ds0qv4d2snem0a7r994jl5np61fidam5.apps.googleusercontent.com', cookiepolicy: 'single_host_origin', // Request scopes in addition to 'profile' and 'email' //scope: 'additional_scope' }); resolve(); }); }) } logout(){ localStorage.removeItem('token'); this.auth2.signOut().then(() => { this.ngZone.run(() => { this.router.navigateByUrl('/login'); }); }); } validarToken(): Observable<boolean>{ return this.http.get(`${base_url}/login/renew`,{ headers: { 'x-token':this.token } }).pipe( map((resp:any) => { const {email,google,nombre,role,uid,img=''}= resp.usuario; // Ojo esto seria una instancia de la clase por lo que usuario tendria los metodos de la clase Usuario. Sin embargo si lo hubieramos hecho 'this.usuario= resp.usuario' usuario NO tendria los metodos de la clase usuario. this.usuario= new Usuario (nombre,email,'',role,google,img,uid); localStorage.setItem('token',resp.token) return true }), catchError(error => { console.log(error); return of(false) }) ) } crearUsuario(formData: RegisterForm){ return this.http.post(`${base_url}/usuarios`, formData); } // TODO: capitulo 15.8 en el backend del profesor era obligatorio enviar el rol del usuario que iba a actualizar, en mi back yo no lo puse obligatorio actualizarUsuario(data:{email:string,nombre:string}){ return this.http.put(`${base_url}/usuarios/${this.uid}`, data, { headers: { 'x-token':this.token } }); } loginUsuario(formData: LoginForm){ if (formData.rememberMe) { localStorage.setItem('email', formData.email); }else{ localStorage.removeItem('email'); } return this.http.post(`${base_url}/login`, formData) .pipe( map((resp:any) => { localStorage.setItem('id',resp.id); localStorage.setItem('token',resp.token); localStorage.setItem('usuario',JSON.stringify(resp.usuario)); return true; }) ) } loginGoogle(token){ return this.http.post(`${base_url}/login/google`, {token}) .pipe( tap( (resp:any) => { localStorage.setItem('token', resp.token) }) ) } } <file_sep>import { Component, OnInit } from '@angular/core'; import { FormBuilder, FormGroup, Validators } from '@angular/forms'; import { UsuarioService } from '../../providers/usuario.service'; import { Usuario } from '../../models/usuario.model'; import { FileUploadService } from '../../providers/file-upload.service'; import Swal from 'sweetalert2'; @Component({ selector: 'app-profile', templateUrl: './profile.component.html', styleUrls: ['./profile.component.css'] }) export class ProfileComponent implements OnInit { public profileForm: FormGroup; public usuario:Usuario; public imagenUpload:File; public imagenTemp:any = null; constructor( private fb:FormBuilder, private usuarioService:UsuarioService, private fileUploadService:FileUploadService) { this.usuario = usuarioService.usuario; } ngOnInit(): void { this.profileForm = this.fb.group({ nombre: [this.usuario.nombre,Validators.required], email: [this.usuario.email,[Validators.required, Validators.email]], }); } actualizarDatos(){ this.usuarioService.actualizarUsuario(this.profileForm.value) .subscribe( (resp:any) => { // De este modo actualizo los nombres en todos los lugares de la app ya que como estoy usando el usuario por referencia este se modifica en todos lo lugares donde lo uso. const {nombre, email}= resp.usuario; this.usuario.nombre=nombre; this.usuario.email=email; Swal.fire('Guardado', 'Cambios realizados con exito', 'success'); }, (err) => { console.log(err); Swal.fire('Error', err.error.msg , 'error'); }) } cambiarImagen(file:File){ this.imagenUpload=file; if (!file) { return this.imagenTemp= null; } const reader = new FileReader(); reader.readAsDataURL(file); reader.onloadend = () => { this.imagenTemp= reader.result; } } subirImagen() { this.fileUploadService.actualizarFoto(this.imagenUpload,'usuarios',this.usuario.uid).then( img => { this.usuario.img=img; Swal.fire('Guardado', 'Cambios realizados con exito', 'success'); }).catch(err => { console.log(err); Swal.fire('Error', 'No se pudo subir la imagen', 'error'); }); } } <file_sep>import { Component, EventEmitter, Input, OnInit, Output } from '@angular/core'; @Component({ selector: 'app-incrementador', templateUrl: './incrementador.component.html', styleUrls: ['./incrementador.component.css'] }) export class IncrementadorComponent implements OnInit { // @Input('alias de la vble') progreso:number=10; @Input() progreso:number=10; @Input() btnClass:string='btn btn-primary'; @Output() valorSalida:EventEmitter<number> = new EventEmitter(); constructor() { } ngOnInit(): void { } changePercent(valor:number){ if (this.progreso >=100 && valor>=0) { this.valorSalida.emit(100); return this.progreso=100; } if (this.progreso<=0 && valor < 0){ this.valorSalida.emit(0); return this.progreso=0; } this.progreso= this.progreso+valor; this.valorSalida.emit(this.progreso); } onChange(evento:number){ console.log(evento); if (evento >= 100) { this.progreso=100; } else if (evento<= 0){ this.progreso=0; } else { this.progreso=evento; } this.valorSalida.emit(this.progreso); } } <file_sep>import { Component, OnDestroy, OnInit } from '@angular/core'; import { ActivationEnd, Router } from '@angular/router'; import { Subscription } from 'rxjs'; import {filter, map} from 'rxjs/operators' @Component({ selector: 'app-breadcrumbs', templateUrl: './breadcrumbs.component.html', styleUrls: ['./breadcrumbs.component.css'] }) export class BreadcrumbsComponent implements OnInit, OnDestroy { title:string =''; // El simbolo $ se lo ponemos para especificar que es un observable. tituloSubs$: Subscription; constructor(private router:Router) { this.tituloSubs$=this.getArgumentosRuta() .subscribe(({title}) => { this.title=title; document.title=`AdminPro-${title}`; }) } ngOnInit(): void { } getArgumentosRuta(){ return this.router.events.pipe( filter( event => event instanceof ActivationEnd), filter( (event:ActivationEnd) => event.snapshot.firstChild===null), map( (event:ActivationEnd) => event.snapshot.data)) // } ngOnDestroy(): void { // Esto lo hicimios porque al momento de darle logout, ya no es necesario saber la informacion que contiene la ruta. Y si volvemos a entrar sin refrescar el navegador, se va a disparar otra subscripcion, por lo que cada vez que ingreseramos acumulariamos otro subscribe this.tituloSubs$.unsubscribe(); } } <file_sep>import { Component, OnInit, NgZone } from '@angular/core'; import { FormBuilder, Validators } from '@angular/forms'; import { Router } from '@angular/router'; import { UsuarioService } from '../../providers/usuario.service'; import Swal from 'sweetalert2'; declare const gapi:any; @Component({ selector: 'app-login', templateUrl: './login.component.html', styleUrls: ['./login.component.css'] }) export class LoginComponent implements OnInit { email:string; auth2:any; loginForm = this.fb.group({ email:['',Validators.required,], password:['',Validators.required,], rememberMe:[false], }) constructor(private router:Router, private fb: FormBuilder, private usuarioService:UsuarioService, private ngZone:NgZone) { } ngOnInit(): void { // FORMA ELEGANTE DE RECORDAR EL CORREO DE UN LOGIN this.email=localStorage.getItem('email') || ''; console.log(this.email); this.loginForm.get('email').setValue(this.email); if (this.email.length>1) { this.loginForm.get('rememberMe').setValue(true); }; // Renderizamos el boton de google this.renderButton(); } login(){ console.log(this.loginForm.value); if (this.loginForm.invalid) { Swal.fire({ title: 'Error!', text: 'Complete los campos', icon: 'error', }) return; } this.usuarioService.loginUsuario(this.loginForm.value) .subscribe((resp) => { console.log(resp); this.router.navigateByUrl('/') },(err) => { console.log(err); Swal.fire({ title: 'Error!', text: err.error.msg || err.error.errors.email.msg, icon: 'error', }) }) } renderButton() { gapi.signin2.render('my-signin2', { 'scope': 'profile email', 'width': 240, 'height': 50, 'longtitle': true, 'theme': 'dark', }); this.startApp(); } async startApp() { await this.usuarioService.googleInit(); this.auth2 = this.usuarioService.auth2; this.attachSignin(document.getElementById('my-signin2')); }; attachSignin(element) { console.log(element.id); this.auth2.attachClickHandler(element, {}, (googleUser) => { var id_token = googleUser.getAuthResponse().id_token; this.usuarioService.loginGoogle(id_token) .subscribe((resp) => { // Despues de identificarnos con exito redireccionamos al dashboard // El ngzone se usa cuando librerias ajenas a angular se encargan de realizar una redireccion. O sea el metodo de redireccionamiento es de angular, pero quien lo dispara es un metodo de google. this.ngZone.run(() => { this.router.navigateByUrl('/'); }); }); }, (error) => { alert(JSON.stringify(error, undefined, 2)); }); } } <file_sep>import { Component, OnInit } from '@angular/core'; import { FormBuilder, FormGroup, Validators } from '@angular/forms'; import Swal from 'sweetalert2'; import { UsuarioService } from '../../providers/usuario.service'; import { Router } from '@angular/router'; @Component({ selector: 'app-register', templateUrl: './register.component.html', styleUrls: ['./register.component.css'] }) export class RegisterComponent implements OnInit { formSubmitted = false; registerForm= this.fb.group({ nombre: ['Fernando', [Validators.required, Validators.minLength(3)],], email: ['<EMAIL>', [Validators.required,Validators.email]], password: ['<PASSWORD>', [Validators.required, Validators.minLength(3)],], password2: ['<PASSWORD>', [Validators.required, Validators.minLength(3)],], terminos: [false, [Validators.requiredTrue],], }, {validators: this.passwordsIguales('password','<PASSWORD>') }) constructor(private fb: FormBuilder, private usuarioService:UsuarioService, private router:Router) { } ngOnInit(): void { } crearUsuario(){ this.formSubmitted=true; console.log(this.registerForm.value); if (this.registerForm.invalid) { return ; } // Realizar el posteo this.usuarioService.crearUsuario(this.registerForm.value) .subscribe((resp) => { console.log('Usuario creado'); console.log(resp); this.router.navigateByUrl('/') }, (err)=> { Swal.fire({ title: 'Error!', text: err.error.msg, icon: 'error', confirmButtonText: 'Ok' }) }); } campoNoValido(campo:string):boolean{ if (this.registerForm.get(campo).invalid && this.formSubmitted) { return true; }else { return false; } } aceptaTerminos(){ return !this.registerForm.get('terminos').value && this.formSubmitted; } contrasenasNoValidas(){ const pass1 = this.registerForm.get('password').value; const pass2 = this.registerForm.get('password2').value; if ((pass1!==pass2) && this.formSubmitted) { return true; }else { return false; } } // Validador personalizado, debe retornar un objeto o retornar null // Debo retornar una funcion. passwordsIguales(pass1Name:string, pass2Name:string){ return (formGroup:FormGroup) => { const pass1Control = formGroup.get(pass1Name); const pass2Control = formGroup.get(pass2Name); if (pass1Control.value === pass2Control.value) { pass2Control.setErrors(null); } else { pass2Control.setErrors({noIgual:true}); } } } } <file_sep>import { Component, OnInit } from '@angular/core'; import { SettingsService } from '../providers/settings.service'; // Esta declaracion es para que typescript no tire error por una funcion que el no alcanza a supervisar declare function customInitFunctions(); @Component({ selector: 'app-main-dashboard', templateUrl: './main-dashboard.component.html', }) export class MainDashboardComponent implements OnInit { constructor(private settingsService:SettingsService) { } ngOnInit(): void { // Esta funcion es para inicializar los scripts del index.html // Esta funcion viene de /assets/js/custom.js customInitFunctions(); } } <file_sep>import { Component, OnDestroy } from '@angular/core'; import { Observable, interval, Subscription } from 'rxjs'; import { take, map, filter } from 'rxjs/operators'; @Component({ selector: 'app-promises', templateUrl: './promises.component.html', styleUrls: ['./promises.component.css'] }) export class PromisesComponent implements OnDestroy { intervalSubs: Subscription; constructor() { this.intervalSubs=this.retornarIntervalo().subscribe(console.log); } ngOnDestroy(): void { this.intervalSubs.unsubscribe(); } retornarIntervalo(): Observable<number> { return interval(100).pipe( take(10), map(valor => valor +1), filter(valor => (valor % 2 === 0) ? true : false), ); } retornaObservable():Observable<number>{ let i =-1; return new Observable<number>(observer => { const intervalo = setInterval( () => { i++; observer.next(i); if (i === 4) { clearInterval(intervalo); observer.complete(); } if (i === 2) { observer.error(`i llego al valor de 2`) } }, 1000) }) } } <file_sep>import { Component, OnInit } from '@angular/core'; @Component({ selector: 'app-graphic1', templateUrl: './graphic1.component.html', styleUrls: ['./graphic1.component.css'] }) export class Graphic1Component implements OnInit { labels1:string[] = ['TITULO 1', 'In-Store Sales', 'Mail-Order Sales']; data1 = [ [10, 20, 30], ]; constructor() { } ngOnInit(): void { } }
66185d5e67fc4ed0ff8ae9e72121297456372f15
[ "TypeScript" ]
10
TypeScript
A-TapiaZ/AngularJS-AdminPro
25e1b1860092f5c050b9955887bfe3dbe97f7d78
36dd57322a96099d27d33cfdde1753e8ff0f0cfd
refs/heads/master
<file_sep>f=open("add.txt","r+") f.write("hhhhhh") for x in f: print(x) <file_sep>package bigfoot.wordsearch; import java.util.Objects; /** * Describes the start and end locations of a word in the grid */ public class WordVector { public String word; public int startX; public int startY; public int endX; public int endY; public WordVector(String word, int startX, int startY, int endX, int endY) { this.word = word; this.startX = startX; this.startY = startY; this.endX = endX; this.endY = endY; } @Override public String toString() { return "word='" + word + '\'' + ", start=[" + startX + ", " + startY + "], end=[" + endX + ", " + endY + "]"; } }
73dd2ccb37999d3de6b69b227cc050bba3de9992
[ "Java", "Python" ]
2
Python
kunr2014/git-github
38e501088e31522df8eee8e743243d451e9705f7
a6ede47e023b310f4b04f5d67404c092d4d4af35
refs/heads/master
<repo_name>redrohX/md-xmas<file_sep>/xmas2017/mail.php <?php function wrapMailMessage($string, $length=980, $splitchar="\n ") { if (strlen($string) <= $length) { $output = $string; //do nothing } else { $output = wordwrap($string, $length, $splitchar); } return $output; } $protocol = empty($_SERVER['HTTPS']) ? 'http' : 'https'; $domain = $_SERVER['SERVER_NAME']; $domain_url = "${protocol}://${domain}"; $message_header = ''; $message_contact_details = ''; $message_footer = ''; $message_sender = ''; $message = ''; $title = htmlspecialchars(strip_tags($_POST['title'])); $first_name = htmlspecialchars(strip_tags($_POST['first_name'])); $surname = htmlspecialchars(strip_tags($_POST['surname'])); $email = htmlspecialchars(strip_tags($_POST['email'])); $dietary_wishes = htmlspecialchars(strip_tags($_POST['dietary_wishes'])); $food_allergies = htmlspecialchars(strip_tags($_POST['food_allergies'])); // RSVP MAIL DATA $mail_sender = 'Minddistrict Santa'; $mail_sender_email = '<EMAIL>'; $subject = 'RSVPed to the Minddistrict Bowtie Bash 2017'; // GENERAL MAIL DATA // Header $message_header .= '<!DOCTYPE html>'; $message_header .= '<html>'; $message_header .= '<body bgcolor="#ffffff" style="background-color:#ffffff; margin:0; padding:0;">'; $message_header .= '<center>'; $message_header .= '<table width="600" bgcolor="#F4F3F0" align="center" border="0" cellpadding="15" cellspacing="0" '; $message_header .= 'style="background-color:#F4F3F0; border:10px dashed #F43B3B; border-radius:15px; border-collapse:collapse;">'; $message_header .= '<tbody>'; $message_header .= '<tr>'; $message_header .= '<td>'; $message_header .= '<table width="550" border="0" cellpadding="0" cellspacing="0">'; $message_header .= '<thead>'; $message_header .= '<tr>'; $message_header .= '<th valign="top" align="center">'; $message_header .= '<p style="color:#333333; font-family:Tahoma, sans-serif; font-size:13px; text-align:center; text-transform:uppercase;">'; $message_header .= 'Thank you for joining the'; $message_header .= '</p>'; $message_header .= '</th>'; $message_header .= '</tr>'; $message_header .= '<tr>'; $message_header .= '<th valign="top" align="center">'; $message_header .= '<h1 style="color:#F43B3B; font-family: Georgia, Times New Roman, Times, cursive; font-size:31px; margin:0; text-align:center;">'; $message_header .= '<img src="'.$domain_url.'/xmas/img/bow-tie-bash-title.png" alt="Minddistrict Bowtie Bash 2017" style="width:350px;"/>'; $message_header .= '</h1>'; $message_header .= '</th>'; $message_header .= '</tr>'; $message_header .= '</thead>'; $message_header .= '</table>'; $message_header .= '</td>'; $message_header .= '</tr>'; // Contact details $message_contact_details .= '<tr>'; $message_contact_details .= '<td style="color:#333333; font-family:Tahoma, sans-serif; font-size:17px; margin-bottom:30px;">'; $message_contact_details .= '<table width="550" border="0" cellpadding="0" cellspacing="0">'; $message_contact_details .= '<tbody>'; $message_contact_details .= '<tr>'; $message_contact_details .= '<th align="left" style="width:40%" valign="top"><strong>Title:</strong></th>'; $message_contact_details .= '<td align="left" valign="top">'.$title.'</td>'; $message_contact_details .= '</tr>'; $message_contact_details .= '<tr>'; $message_contact_details .= '<th align="left" style="width:40%" valign="top"><strong>First name:</strong></th>'; $message_contact_details .= '<td align="left" valign="top">'.$first_name.'</td>'; $message_contact_details .= '</tr>'; $message_contact_details .= '<tr>'; $message_contact_details .= '<th align="left" style="width:40%" valign="top"><strong>Surname:</strong></th>'; $message_contact_details .= '<td align="left" valign="top">'.$surname.'</td>'; $message_contact_details .= '</tr>'; $message_contact_details .= '<tr>'; $message_contact_details .= '<th align="left" style="width:40%" valign="top"><strong>Email:</strong></th>'; $message_contact_details .= '<td align="left" valign="top">'.$email.'</td>'; $message_contact_details .= '</tr>'; $message_contact_details .= '<tr>'; $message_contact_details .= '<th align="left" style="width:40%" valign="top"><strong>Dietary wishes:</strong></th>'; $message_contact_details .= '<td align="left" valign="top">'.$dietary_wishes.'</td>'; $message_contact_details .= '</tr>'; $message_contact_details .= '<tr>'; $message_contact_details .= '<th align="left" style="width:40%" valign="top"><strong>Food allergies:</strong></th>'; $message_contact_details .= '<td align="left" valign="top">'.$food_allergies.'</td>'; $message_contact_details .= '</tr>'; $message_contact_details .= '</tbody>'; $message_contact_details .= '</table>'; $message_contact_details .= '</td>'; $message_contact_details .= '</tr>'; // Footer $message_footer .= '<tr>'; $message_footer .= '<td style="color:#333333; font-family:Tahoma, sans-serif; font-size:13px; padding-bottom:15px; padding-top:15px;">'; $message_footer .= '<table width="550" border="0" cellpadding="0" cellspacing="0">'; $message_footer .= '<tfoot>'; $message_footer .= '<tr>'; $message_footer .= '<td align="center" valign="top" style="color:#3B8686; text-align:center;">'; $message_footer .= '&copy; 2017 Minddistrict North Pole'; $message_footer .= '</td>'; $message_footer .= '</tr>'; $message_footer .= '</tfoot>'; $message_footer .= '</table>'; $message_footer .= '</td>'; $message_footer .= '</tr>'; $message_footer .= '</td>'; $message_footer .= '</tr>'; $message_footer .= '</tbody>'; $message_footer .= '</table>'; $message_footer .= '</center>'; $message_footer .= '</body>'; $message_footer .= '</html>'; // SENDER MAIL DATA $to_sender = $mail_sender_email; $headers_sender = 'MIME-Version: 1.0' . "\r\n"; $headers_sender .= 'Content-type: text/html; charset=utf-8' . "\r\n"; $headers_sender .= 'From: '.$mail_sender.' <'.$mail_sender_email.'>' . "\r\n"; $headers_sender .= 'Cc: <EMAIL>' . "\r\n"; $message_sender .= $message_header; $message_sender .= '<tr>'; $message_sender .= '<td>'; $message_sender .= '<table width="550" border="0" cellpadding="0" cellspacing="0">'; $message_sender .= '<tbody>'; $message_sender .= '<tr>'; $message_sender .= '<td>'; $message_sender .= '<p style="color:#333333; font-family:Tahoma, sans-serif; font-size:17px;">'; $message_sender .= 'The following person '; $message_sender .= $title.' '.$first_name.' '.$surname; $message_sender .= ' has RSVPed to the Minddistrict Bowtie Bash 2017:'; $message_sender .= '</p>'; $message_sender .= '</td>'; $message_sender .= '</tr>'; $message_sender .= '</tbody>'; $message_sender .= '</table>'; $message_sender .= '</td>'; $message_sender .= '</tr>'; $message_sender .= $message_contact_details; $message_sender .= $message_footer; // RECEIVER MAIL DATA $to = $email; $headers = 'MIME-Version: 1.0' . "\r\n"; $headers .= 'Content-type: text/html; charset=utf-8' . "\r\n"; $headers .= 'From: '.$mail_sender.' <'.$mail_sender_email.'>' . "\r\n"; $headers .= 'Bcc: <EMAIL>, <EMAIL>' . "\r\n"; $message .= $message_header; $message .= '<tr>'; $message .= '<td>'; $message .= '<table width="550" border="0" cellpadding="0" cellspacing="0">'; $message .= '<tbody>'; $message .= '<tr>'; $message .= '<td>'; $message .= '<p style="color:#333333; font-family:Tahoma, sans-serif; font-size:17px; margin:0;">'; $message .= 'Dear '.$title.' '.$first_name.' '.$surname.',<br/><br/>'; $message .= 'Thank you for joining the Minddistrict Bowtie Bash!<br/>'; $message .= 'You have sent us the following information:'; $message .= '</p>'; $message .= '</td>'; $message .= '</tr>'; $message .= '</tbody>'; $message .= '</table>'; $message .= '</td>'; $message .= '</tr>'; $message .= $message_contact_details; $message .= '<tr>'; $message .= '<td style="color:#333333; font-family:Tahoma, sans-serif; font-size:17px; text-align:center;">'; $message .= '<table width="550" border="0" cellpadding="0" cellspacing="0">'; $message .= '<tbody>'; $message .= '<tr>'; $message .= '<td align="center" valign="top">'; $message .= '<br/>'; $message .= '<img src="'.$domain_url.'/xmas/img/gingerbread_bow-tie_transparent.gif" style="width:80px;"/>'; $message .= '<br/><br/>'; $message .= '<h2 style="color:#F43B3B; font-family:Tahoma, sans-serif; font-size:29px; font-weight:bold; margin:0 0 10px 0; text-align:center;">'; $message .= 'Friday, December 15th'; $message .= '</h2>'; $message .= '<h3 style="font-family:Tahoma, sans-serif; font-size:20px; font-weight:bold; margin:0; text-align:center;">'; $message .= 'The Bash will start at 7:00 pm and end at 1:00 am'; $message .= '</h3>'; $message .= '<br/><br/>'; $message .= '<span style="color:#F43B3B; font-size:15px;">'; $message .= '<strong>'; $message .= 'What’s the dress code again?'; $message .= '</strong>'; $message .= '</span>'; $message .= '<br/>'; $message .= 'We already told you this. '; $message .= '<strong>It’s bow ties. BOW TIES.</strong>'; $message .= '<br/><br/>'; $message .= '<span style="color:#F43B3B; font-size:15px;">'; $message .= '<strong>'; $message .= 'Where are we going again? Well, here:'; $message .= '</strong>'; $message .= '</span>'; $message .= '<br/><br/>'; $message .= '<a href="https://www.nelisoost.nl" title="<NAME>" target="_blank">'; $message .= '<img src="'.$domain_url.'/xmas/img/logo-nelis.svg" width="130"/>'; $message .= '</a>'; $message .= '<br/><br/>'; $message .= '<strong>'; $message .= '<a style="color:#F43B3B;" href="https://www.nelisoost.nl" title="NELIS Oost" target="_blank">'; $message .= 'NELIS Oost'; $message .= '</a>'; $message .= '</strong>'; $message .= '<br/>'; $message .= 'Sumatrastraat 28H,<br/>'; $message .= '1094 ND Amsterdam'; $message .= '</td>'; $message .= '</tr>'; $message .= '</tbody>'; $message .= '</table>'; $message .= '</td>'; $message .= '</tr>'; $message .= $message_footer; $message_sender = wrapMailMessage($message_sender); $message = wrapMailMessage($message); $result_sender = mail($to_sender, $subject, $message_sender, $headers_sender); $result = mail($to, $subject, $message, $headers); if(!$result_sender) { echo '<span style="color: #F43B3B">'; echo "Dear " . $title . " " . $first_name . " " . $surname . ", <br/><br/> Oops, something went wrong! <br/>Please refresh your page and fill out Santa’s RSVP form again. <br/><br/> Ho ho ho, Santa!"; echo '</span>'; } else { if(!$result) { echo '<span style="color: #F43B3B">'; echo "Dear " . $title . " " . $first_name . " " . $surname . ", <br/><br/> Oops, something went wrong! <br/>Please refresh your page and fill out Santa’s RSVP form again. <br/><br/> Ho ho ho, Santa!"; echo '</span>'; } else { echo '<span style="color: #3B8686">'; echo "Dear " . $title . " " . $first_name . " " . $surname . ", <br/><br/> Thank you voor RSVPing. <br/>We’re looking forward to seeing you… and remember: bow ties all around please! <br/><br/> Ho ho ho, Santa!"; echo '</span>'; } } ?> <file_sep>/xmas2017/js/form-validation.js /* * form-validation.js * Copyright (C) 2017 * Author: <NAME> <<EMAIL>> * Created: 2017-11-8 */ function isEmpty(item) { if (item.length > 0) { return false; } return true; } function isEmail(item) { var pattern = /^([a-z\d!#$%&'*+\-\/=?^_`{|}~\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+(\.[a-z\d!#$%&'*+\-\/=?^_`{|}~\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+)*|"((([ \t]*\r\n)?[ \t]+)?([\x01-\x08\x0b\x0c\x0e-\x1f\x7f\x21\x23-\x5b\x5d-\x7e\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]|\\[\x01-\x09\x0b\x0c\x0d-\x7f\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]))*(([ \t]*\r\n)?[ \t]+)?")@(([a-z\d\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]|[a-z\d\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF][a-z\d\-._~\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]*[a-z\d\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])\.)+([a-z\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]|[a-z\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF][a-z\d\-._~\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]*[a-z\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])\.?$/i; return pattern.test(item); } $(document).ready(function () { // Trimming the spaces from the input fields. $('input, textarea').each(function(){ $(this).val(jQuery.trim($(this).val())); }); $("#rsvpForm").on("submit", function(e) { var postData = $(this).serializeArray(); var formURL = $(this).attr("action"); var validated = true; var validationFeedback = 'Error'; //Checks if someone checked the title radio button if( $('input[name="title"]').is(':checked') ) { $('#error-title').hide(); $("input[name='title']").parent().parent().parent( ".form-group" ).removeClass("has-feedback has-error"); $("input[name='title']").parent().parent().parent( ".form-group" ).addClass("has-feedback has-success"); } else { $('#error-title').show(); $("input[name='title']").parent().parent().parent( ".form-group" ).removeClass("has-feedback has-success"); $("input[name='title']").parent().parent().parent( ".form-group" ).addClass("has-feedback has-error"); validated = false; } for (var i = 0, len = postData.length; i < len; i++) { switch(postData[i].name) { case 'first_name': if (isEmpty(postData[i].value)){ $('#error-firstname').show(); $("input[name='surname']").parent().parent( ".form-group" ).removeClass("has-feedback has-success"); $("input[name='first_name']").parent().parent( ".form-group" ).addClass("has-feedback has-error"); validated = false; } else { $('#error-firstname').hide(); $("input[name='first_name']").parent().parent( ".form-group" ).removeClass("has-feedback has-error"); $("input[name='first_name']").parent().parent( ".form-group" ).addClass("has-feedback has-success"); } break; case 'surname': if (isEmpty(postData[i].value)){ $('#error-surname').show(); $("input[name='surname']").parent().parent( ".form-group" ).removeClass("has-feedback has-success"); $("input[name='surname']").parent().parent( ".form-group" ).addClass("has-feedback has-error"); validated = false; } else { $('#error-surname').hide(); $("input[name='surname']").parent().parent( ".form-group" ).removeClass("has-feedback has-error"); $("input[name='surname']").parent().parent( ".form-group" ).addClass("has-feedback has-success"); } break; case 'email': if (isEmpty(postData[i].value)){ $('#error-email').show(); $('#error-email-value').hide(); $("input[name='email']").parent().parent( ".form-group" ).removeClass("has-feedback has-success"); $("input[name='email']").parent().parent( ".form-group" ).addClass("has-feedback has-error"); validated = false; } else if (!isEmail(postData[i].value)){ $('#error-email').hide(); $('#error-email-value').show(); $("input[name='email']").parent().parent( ".form-group" ).removeClass("has-feedback has-success"); $("input[name='email']").parent().parent( ".form-group" ).addClass("has-feedback has-error"); validated = false; } else { $('#error-email').hide(); $('#error-email-value').hide(); $("input[name='email']").parent().parent( ".form-group" ).removeClass("has-feedback has-error"); $("input[name='email']").parent().parent( ".form-group" ).addClass("has-feedback has-success"); } break; } } if (validated) { console.log('Doing AJAX!'); $.ajax({ url: formURL, type: "POST", data: postData, success: function(data, textStatus, jqXHR) { $('#rsvpModal .modal-header .modal-title').html("Thank you for RSVPing"); $('#rsvpModal .modal-body').html(data); $("#btnSubmitRSVP").remove(); $("#rsvp-action-container").remove(); }, error: function(jqXHR, status, error) { } }); } e.preventDefault(); return false; }); });<file_sep>/xmas2017/index.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="robots" content="noindex, follow"> <title>Minddistrict Bowtie Bash 2017</title> <link rel="shortcut icon" href="favicon.ico" type="image/x-icon" /> <!-- Fonts --> <link href="https://fonts.googleapis.com/css?family=Galada|Montserrat" rel="stylesheet"> <!-- Bootstrap --> <link href="css/bootstrap.min.css" rel="stylesheet"> <link href="css/style.css" rel="stylesheet"> <!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <?php $protocol = empty($_SERVER['HTTPS']) ? 'http' : 'https'; $domain = $_SERVER['SERVER_NAME']; $domain_url = "${protocol}://${domain}"; ?> <div class="snow-container"></div> <div class="container"> <div class="col-md-8 col-md-offset-2"> <div class="content"> <p class="text-invitation"> You are cordially invited to the </p> <h1 class="theme"> <a href="#" title="Minddistrict Bowtie Bash 2017" data-toggle="modal" data-target="#rsvpModal"> <img src="<?php echo $domain_url;?>/xmas/img/bow-tie-bash-title.png" alt="Minddistrict Bowtie Bash 2017" style="width:75%;"/> </a> </h1> <img src="<?php echo $domain_url;?>/xmas/img/gingerbread_bow-tie_transparent.gif" style="width:100px;"/> <h2 class="date">Friday, December 15th</h2> <h3 class="time">The Bash will start at 7:00 pm and end at 1:00 am</h3> <br/> <p class="dresscode"> <b class="text-danger">Dress code:</b><br/> we want to see something jolly with <b>bow ties</b><br/> (yes ladies, you too!)… </p> <br/> <div id="rsvp-action-container"> <button type="button" class="btn btn-lg btn-primary" title="Click here to RSVP" data-toggle="modal" data-target="#rsvpModal"> Click here to RSVP </button><br/><br/> <p class="text-small"> Unfortunately you are <b>NOT</b> allowed to bring a +1</br></br> <b>Please RSVP before Thursday, November 30th!</b> </p><br/> </div> <h2 class="text-title">Please show up with your brightest smile and best <b>bow tie</b> outfit at:</h2> <a href="https://www.nelisoost.nl/" title="NELIS Oost" target="_blank"> <img src="<?php echo $domain_url;?>/xmas/img/logo-nelis.svg" alt="NELIS Oost" style="width:130px;"/> </a> <br/><br/> <address class="location"> <b><a href="https://www.nelisoost.nl/" title="NELIS Oost" target="_blank">NELIS Oost</a></b><br/> Sumatrastraat 28H,<br/> 1094 ND Amsterdam<br/> </address> <br/> <h2 class="text-title">Let’s eat, drink and bounce around in our <b>bow ties</b>!</h2> <p><b><NAME> will be dropping some festive tunes for us.</b></p> <br/><br/> <span class="bow-tie-top-left"> <img src="img/bow-tie_yellow.png"/> </span> <span class="bow-tie-top-right"> <img src="img/bow-tie_green.png"/> </span> <span class="bow-tie-bottom-left"> <img src="img/bow-tie_magenta.png"/> </span> <span class="bow-tie-bottom-right"> <img src="img/bow-tie_blue.png"/> </span> </div> </div> </div> </div> <!-- Modal --> <div class="modal fade" id="rsvpModal" tabindex="-1" role="dialog" aria-labelledby="rsvpModalLabel"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button> <h4 class="modal-title" id="rsvpModalLabel">RSVP to the Minddistrict Bowtie Bash 2017</h4> </div> <div class="modal-body"> <div id="form-messages"></div> <form id="rsvpForm" class="form-horizontal" action="<?php echo $domain_url;?>/xmas/mail.php" method="post"> <!-- TITLE --> <div class="form-group"> <label for="inputTitle" class="col-sm-4 control-label">Title</label> <div class="col-sm-8"> <label class="radio-inline"> <input type="radio" name="title" id="optionTitleMr" value="Mr."> Mr. </label> <label class="radio-inline"> <input type="radio" name="title" id="optionTitleMrs" value="Ms."> Ms. </label><br/> <span id="error-title" class="text-danger form-error-text">Please select one of the options.</span> </div> </div> <!-- FIRST NAME --> <div class="form-group"> <label for="inputFirstName" class="col-sm-4 control-label"> First name </label> <div class="col-sm-8"> <input type="text" class="form-control" id="inputFirstName" name="first_name"> <span id="error-firstname" class="text-danger form-error-text">Please fill this field.</span> </div> </div> <!-- SURNAME --> <div class="form-group"> <label for="inputSurname" class="col-sm-4 control-label"> Surname </label> <div class="col-sm-8"> <input type="text" class="form-control" id="inputSurname" name="surname"> <span id="error-surname" class="text-danger form-error-text">Please fill this field.</span> </div> </div> <!-- EMAIL --> <div class="form-group"> <label for="inputEmail" class="col-sm-4 control-label"> Email </label> <div class="col-sm-8"> <input type="email" class="form-control" id="inputEmail" name="email"> <span id="error-email" class="text-danger form-error-text">Please fill this field.</span> <span id="error-email-value" class="text-danger form-error-text">Please fill in the right emailaddress.</span> </div> </div> <!-- DIETARY WISHES --> <div class="form-group"> <label for="inputDietaryWishes" class="col-sm-4 control-label"> Dietary wishes </label> <div class="col-sm-8"> <select class="form-control" id="inputDietaryWishes" name="dietary_wishes"> <option value="No">No dietary wishes</option> <option value="Vegan">Vegan</option> <option value="Vegetarian">Vegetarian</option> </select> </div> </div> <!-- FOOD ALLERGIES --> <div class="form-group"> <label for="inputFoodAllergies" class="col-sm-4 control-label"> Food allergies<br/> / Dislikes </label> <div class="col-sm-8"> <textarea class="form-control" id="inputFoodAllergies" name="food_allergies"></textarea> </div> </div> <div class="form-group"> <div class="col-sm-offset-4 col-sm-8"> <input type="submit" id="btnSubmitRSVP" class="btn btn-lg btn-primary" value="Join the party" /> </div> </div> </form> </div> </div> </div> <!-- jQuery (necessary for Bootstrap's JavaScript plugins) --> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.12.4/jquery.min.js"></script> <script type="text/javascript" src="js/bootstrap.min.js"></script> <script type="text/javascript" src="js/cursor-animation.js"></script> <script type="text/javascript" src="js/form-validation.js"></script> </body> </html>
dfb906c6b19ce38fb21681e07bfbe8ad63214c6c
[ "JavaScript", "PHP" ]
3
PHP
redrohX/md-xmas
0a967a3ec3d1a3665bc7c7a7fd36b5bd7803fbe3
2d6e7651de4fa67be6fb058a4c1189a2b37a2435
refs/heads/master
<repo_name>Kal009/AutoFramework<file_sep>/src/test/java/com/Bframework/Newspage/NewspageSteps.java package com.Bframework.Newspage; /** * Created by Trupesh on 14/06/2017. */ public class NewspageSteps { } <file_sep>/src/test/java/com/Bframework/CommonSteps.java package com.Bframework; import com.Bframework.Homepage.Home_page; import cucumber.api.java.en.Given; import java.io.IOException; /** * Created by Trupesh on 14/06/2017. */ public class CommonSteps extends BaseSteps{ Home_page hp; Commonpage cp; public CommonSteps() throws IOException { } } <file_sep>/src/test/java/com/Bframework/Loginpage/LoginpageSteps.java package com.Bframework.Loginpage; import com.Bframework.BaseSteps; import com.Bframework.Commonpage; import com.Bframework.Homepage.Home_page; import cucumber.api.PendingException; import cucumber.api.java.After; import cucumber.api.java.en.And; import cucumber.api.java.en.Given; import cucumber.api.java.en.Then; import cucumber.api.java.en.When; import org.openqa.selenium.By; import org.testng.Assert; import java.io.IOException; import static alpha.BrowserFactory.driver; /** * Created by Trupesh on 14/06/2017. */ public class LoginpageSteps extends BaseSteps { Commonpage cp; Home_page hp; Login_page lp = new Login_page(driver); public LoginpageSteps() throws IOException { } @After("@Krishna") public void teardown(){ driver.quit(); } @Given("^user is in home page$") public void userIsInHomePage() throws Throwable { hp =new Home_page(driver); hp.homepage(); System.out.println("start"); } @When("^user click on login page$") public void userClickOnLoginPage() throws Throwable { Home_page hp = new Home_page(driver); hp.loginpagelink(); } @Then("^user can see the sign in page$") public void userCanSeeTheSignInPage() throws Throwable { Login_page lp = new Login_page(driver); lp.resterlink(); } // @When("^user enter username \"([^\"]*)\" and password \"([^\"]*)\" and receive error \"([^\"]*)\"$") public void userEnterUsernameAndPasswordAndReceiveError(String arg0, String arg1, String arg2) throws Throwable { lp.loginpage(arg0, arg1); } @When("^user enter username \"([^\"]*)\" and password \"([^\"]*)\"$") public void userEnterUsernameAndPassword(String arg0, String arg1) throws Throwable { lp.loginpage(arg0, arg1); } @And("^user click on singin botton$") public void userClickOnSinginBotton() throws Throwable { Login_page lp = new Login_page(driver); lp.siginbotton(); } // @Then("^user should be in logged in page$") public void userShouldBeInLoggedInPage() throws Throwable { lp.errormassage(); } @Then("^user should can see error message \"([^\"]*)\"$") public void userShouldCanSeeErrorMessage(String arg0) throws Throwable { Assert.assertTrue(driver.findElement(By.xpath("//div[@id='password']/div[2]")).getText().matches(arg0)); } @And("^tear down browser$") public void tearDownBrowser() throws Throwable { cp = new Commonpage(driver); cp.teardown(); } }
ca2ca2980449b554f2064a407979466e4e3aed8b
[ "Java" ]
3
Java
Kal009/AutoFramework
d931e8808e9707128ec2e6a781d33a744db933c0
49f59c5732b9817c8458799af38ca19110a4aacc
refs/heads/master
<repo_name>aykaypkt/rithm_bcrypt_exercises<file_sep>/User_message_blueprint/project/users/models.py from project import db, bcrypt from project.messages.models import Message from flask_login import UserMixin class User(db.Model,UserMixin): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True) username = db.Column(db.Text, unique=True) password = db.Column(db.Text) email = db.Column(db.Text) first_name = db.Column(db.Text) last_name = db.Column(db.Text) messages = db.relationship('Message', backref='user',lazy='dynamic') def __init__(self,username, password, email, first_name, last_name): self.username = username self.password = <PASSWORD>_<PASSWORD>(<PASSWORD>).decode('UTF-8') self.email = email self.first_name = first_name self.last_name = last_name<file_sep>/User_message_blueprint/project/users/views.py from flask import redirect, render_template, request, url_for, Blueprint, flash from project.users.models import User from project.users.forms import UserForm, LoginForm, EditForm from project import db, bcrypt from flask_login import login_user, logout_user, current_user, login_required users_blueprint = Blueprint( 'users', __name__, template_folder='templates' ) @users_blueprint.route('/', methods =["GET", "POST"]) def index(): if request.method == "POST": form = UserForm(request.form) if form.validate(): new_user = User(request.form['username'],request.form['password'], request.form['email'], request.form['first_name'], request.form['last_name']) db.session.add(new_user) db.session.commit() return redirect(url_for('users.index')) return render_template('users/new.html', form=form) return render_template('users/index.html', users=User.query.all()) @users_blueprint.route('/login', methods=['GET','POST']) def login(): form = LoginForm(request.form) if form.validate_on_submit(): user = User.query.filter_by(username=form.data['username']).first() if user and bcrypt.check_password_hash(user.password, form.data['password']): login_user(user) flash("You have successfully logged in as {}".format(user.username)) return redirect(url_for('messages.index', id=user.id)) flash("Invalid credentials.") return render_template('users/login.html', form=form) @users_blueprint.route('/logout') def logout(): logout_user() flash('Sucessfully logged out!') return redirect(url_for('users.login')) @users_blueprint.route('/new') def new(): form = UserForm() return render_template('users/new.html', form=form) @users_blueprint.route('/<int:id>/edit') @login_required def edit(id): users=User.query.get(id) form = EditForm(obj=users) return render_template('users/edit.html', form=form, user=users) @users_blueprint.route('/<int:id>', methods =["GET", "PATCH", "DELETE"]) @login_required def show(id): found_user = User.query.get(id) if request.method == b"PATCH": form = EditForm(request.form) if form.validate(): found_user.first_name = request.form['first_name'] found_user.last_name = request.form['last_name'] found_user.email = request.form['email'] found_user.username = request.form['username'] db.session.add(found_user) db.session.commit() return redirect(url_for('users.show', id=found_user.id)) return render_template('users/edit.html', form=form, user=found_user) if request.method == b"DELETE": db.session.delete(found_user) db.session.commit() return redirect(url_for('users.index')) return render_template('users/show.html', user=found_user) <file_sep>/User_message_blueprint/project/__init__.py from flask import Flask from flask_sqlalchemy import SQLAlchemy from flask_modus import Modus from flask_bcrypt import Bcrypt import os from flask_login import LoginManager app = Flask(__name__) login_manager = LoginManager() modus = Modus(app) bcrypt = Bcrypt(app) login_manager.init_app(app) login_manager.login_view = "users.login" app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY') or 'Implement me later please' app.config['SQLALCHEMY_DATABASE_URI'] = 'postgres://localhost/users-blueprints' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False db = SQLAlchemy(app) # import a blueprint that we will create from project.users.views import users_blueprint from project.messages.views import messages_blueprint from project.users.models import User # register our blueprints with the application app.register_blueprint(users_blueprint, url_prefix='/users') app.register_blueprint(messages_blueprint, url_prefix='/users/<int:id>/messages') @login_manager.user_loader def load_user(user_id): return User.query.get(user_id) @app.route('/') def root(): return "HELLO BLUEPRINTS!"<file_sep>/simple_bcrypt/project/users/views.py from flask import redirect, render_template, request, url_for, Blueprint, flash, session from project.users.forms import UserForm, EditForm from project.users.models import User from project import db,bcrypt from flask_login import login_user, logout_user, current_user, login_required from functools import wraps from sqlalchemy.exc import IntegrityError users_blueprint = Blueprint( 'users', __name__, template_folder='templates' ) def ensure_correct_user(fn): @wraps(fn) def wrapper(*args, **kwargs): if kwargs.get('id') != current_user.id: flash("Not Authorized") return redirect(url_for('users.welcome')) return fn(*args, **kwargs) return wrapper @users_blueprint.route('/signup', methods=['GET','POST']) def signup(): form = UserForm(request.form) if form.validate_on_submit(): try: user = User(form.data['username'], form.data['password']) db.session.add(user) db.session.commit() except IntegrityError as e: flash("Username already taken.") return render_template('signup.html', form=form) flash("Sign up successful!") login_user(user) return redirect(url_for('users.welcome')) return render_template('signup.html', form=form) @users_blueprint.route('/login', methods=['GET', 'POST']) def login(): form = UserForm(request.form) if form.validate_on_submit(): user = User.query.filter_by(username=form.data['username']).first() if user and bcrypt.check_password_hash(user.password, form.data['password']): login_user(user) flash("You have successfully logged in as {}".format(user.username)) return redirect(url_for('users.welcome')) flash("Invalid credentials.") return render_template('login.html', form=form) @users_blueprint.route('/welcome') @login_required def welcome(): user = User.query.get(current_user.id) return render_template('welcome.html', user=user) @users_blueprint.route('/logout') def logout(): flash('logged out!') logout_user() return redirect(url_for('users.login')) @users_blueprint.route('/<int:id>', methods=["GET", "PATCH", "DELETE"]) @login_required @ensure_correct_user def show(id): user = User.query.get_or_404(id) form = EditForm(request.form) if request.method == b'PATCH': if form.validate(): user.username = form.data['username'] db.session.add(user) db.session.commit() return redirect(url_for('users.show', id=user.id)) return render_template('edit.html', user=user, form=form) if request.method == b'DELETE': db.session.delete(user) db.session.commit() flash('Deleted user') logout_user() return redirect(url_for('users.login')) return render_template('show.html', user=user) @users_blueprint.route('/<int:id>/edit') @login_required @ensure_correct_user def edit(id): user = User.query.get_or_404(id) form = UserForm(obj=user) return render_template('edit.html', user=user, form=form) <file_sep>/User_message_blueprint/project/users/templates/users/show.html {% extends 'base.html' %} {% block content %} <h1>{{user.username}}'s details:</h1> <li>First name: {{user.first_name}}</li> <li>Last name: {{user.last_name}}</li> <li>E-mail: {{user.email}}</li> <form action="{{url_for('users.edit', id=user.id)}}"> <input type="submit" value="Edit user"> </form> <form method="POST" action="{{url_for('users.show', id=user.id)}}?_method=DELETE"> <input type="submit" value="Delete user"> </form> {% endblock %}<file_sep>/User_message_blueprint/project/messages/views.py from flask import redirect, render_template, request, url_for, Blueprint from project.messages.models import Message from project.users.models import User from project.messages.forms import MessageForm from project import db messages_blueprint = Blueprint( 'messages', __name__, template_folder='templates' ) @messages_blueprint.route('/', methods =["GET", "POST"]) def index(id): if request.method == "POST": form = MessageForm() if form.validate(): new_message = Message(request.form['message'], id) db.session.add(new_message) db.session.commit() return redirect(url_for('messages.index',id=id)) return render_template('messages/new.html', form=form) return render_template('messages/index.html', user=User.query.get(id), messages=User.query.get(id).messages) @messages_blueprint.route('/new') def new(id): user = User.query.get(id) form = MessageForm() return render_template('messages/new.html', form=form, user=user) @messages_blueprint.route('/<int:mid>/edit') def edit(id,mid): user = User.query.get(id) message=Message.query.get(mid) form = MessageForm(obj=message) return render_template('messages/edit.html', form=form, message=message, user=user) @messages_blueprint.route('/<int:mid>', methods =["GET", "PATCH", "DELETE"]) def show(id,mid): user = User.query.get(id) found_message = Message.query.get(mid) if request.method == b"PATCH": form = MessageForm(request.form) if form.validate(): found_message.message = request.form['message'] db.session.add(found_message) db.session.commit() return redirect(url_for('messages.index', id=id)) return render_template('messages/edit.html', form=form, message=found_message) if request.method == b"DELETE": db.session.delete(found_message) db.session.commit() return redirect(url_for('messages.index', id=id)) return render_template('messages/show.html', message=found_message, user=user) <file_sep>/simple_bcrypt/project/__init__.py from flask import Flask, redirect, url_for from flask_sqlalchemy import SQLAlchemy from flask_bcrypt import Bcrypt from flask_modus import Modus import os from flask_login import LoginManager app = Flask(__name__) bcrypt = Bcrypt(app) modus = Modus(app) login_manager = LoginManager() login_manager.init_app(app) app.config['SQLALCHEMY_DATABASE_URI'] = 'postgres://localhost/learn-auth' app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY') or 'seriously, set a super secret key' # bad practice in general, but we'll live with it for now db = SQLAlchemy(app) from project.users.views import users_blueprint app.register_blueprint(users_blueprint, url_prefix='/users') login_manager.login_view = "users.login" from project.users.models import User @login_manager.user_loader def load_user(user_id): return User.query.get(user_id) @app.route('/') def index(): return redirect(url_for('users.login'))<file_sep>/User_message_blueprint/project/messages/forms.py from flask_wtf import FlaskForm from wtforms import StringField from wtforms.validators import DataRequired class MessageForm(FlaskForm): message = StringField('message', validators=[DataRequired()])
83f4d604cd97f34a9b942da527a738595928023d
[ "Python", "HTML" ]
8
Python
aykaypkt/rithm_bcrypt_exercises
545d60765b7bdba5911cc185970b9b206caf56cf
59aa18118cf9ca4787b5bc1ab5cc8b48b4c22fd7
refs/heads/master
<file_sep>// // TestViewController.swift // IssueTableViewDismiss0110 // // Created by <NAME> on 2020/01/10. // Copyright © 2020 ZhouyuanWork, Inc. All rights reserved. // import UIKit protocol TestViewControllerDelegate: class { func dismissController(_ controller: TestViewController) } class TestViewController: UIViewController { @IBOutlet weak var tableView: UITableView! weak var delegate: TestViewControllerDelegate? deinit { NSLog("deinit") print("-") } override func viewDidLoad() { super.viewDidLoad() tableView.register(UITableViewCell.self, forCellReuseIdentifier: "cell") } override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) view.backgroundColor = UIColor.black.withAlphaComponent(0.25) } } extension TestViewController: UITableViewDataSource { func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int { return 10 } func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { let cell = tableView.dequeueReusableCell(withIdentifier: "cell", for: indexPath) cell.textLabel?.text = "test" return cell } } extension TestViewController: UITableViewDelegate { func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) { self.modalTransitionStyle = .crossDissolve // dismiss from presenting view-controller self.delegate?.dismissController(self) /* // dismiss from(within) self NSLog("start to dismiss") dismiss(animated: false) { NSLog("dismissed") } */ } } <file_sep>// // ViewController.swift // IssueTableViewDismiss0110 // // Created by <NAME> on 2020/01/10. // Copyright © 2020 ZhouyuanWork, Inc. All rights reserved. // import UIKit class ViewController: UIViewController { override func viewDidLoad() { super.viewDidLoad() // Do any additional setup after loading the view. } @IBAction func buttonTapped(_ sender: Any) { let sb = UIStoryboard.init(name: "Main", bundle: nil) let vc = sb.instantiateViewController(withIdentifier: "TestViewController") as! TestViewController vc.modalPresentationStyle = .overFullScreen vc.modalPresentationCapturesStatusBarAppearance = true vc.delegate = self present(vc, animated: false, completion: nil) } } extension ViewController: TestViewControllerDelegate { func dismissController(_ controller: TestViewController) { NSLog("begin to dismiss") controller.dismiss(animated: false) { NSLog("dismissed") } } }
ce5f5ff9fbc185cbc8a45fc18cfeb44b8f12787c
[ "Swift" ]
2
Swift
inexcii/issue-dismiss-presented-tableview
c8bb70e90479e80ef49c7fbc797ef7354e8835b9
36def939cdbc092c6fb4c1de86262af895480712
refs/heads/master
<repo_name>rigweasle/Weight-Loss-App<file_sep>/src/tracker/Main.java package tracker; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.time.LocalDate; import java.time.format.DateTimeFormatter; import java.time.temporal.ChronoUnit; import java.util.Scanner; public class Main { public static void main(String[] args) { // Creating date variable LocalDate today = LocalDate.now(); DateTimeFormatter formatter = DateTimeFormatter.ofPattern("MMMM dd, yyyy"); String cd = today.format(formatter); Scanner in1 = new Scanner(System.in); // First question input System.out.println("Lets get started!"); System.out.println("What is your name?"); String username; username = in1.nextLine(); // Second question input System.out.println("What is your age?"); while (!in1.hasNextInt()) { System.out.println("Please use numbers"); in1.next(); } int currentAge = in1.nextInt(); // Third question input System.out.println("What is your current weight?"); int currentWeight = in1.nextInt(); // Determining if you're losing or gaining weight. If within and If testing if first, the goal isn't equal and next, if it's positive or negative System.out.println("What is your goal weight?"); int targetWeight = in1.nextInt(); int goalDirection = (currentWeight - targetWeight); int wktgmod = 0; char goal = 'x'; if (goalDirection == 0) { System.out.println("Looks like you've either met your goal or have not set an appropriate one."); } else if (goalDirection > 1) { System.out.println("Looks like you're trying to lose weight."); goal = 'p'; wktgmod = -1; //Modifies what direction this goal will take. } else { System.out.println("Looks like you're trying to gain weight.\n"); goal = 'n'; wktgmod = 1; } // Determining the weekly target double weeklyTarget; weeklyTarget = 1.5; System.out.println("The default weekly goal is a " + weeklyTarget + "lbs change per week.\nWould you like to adjust it?\n(type y or n)"); String q2 = in1.next(); char a1 = q2.charAt(0); if (a1 == 'n') { System.out.println("Good, let's proceed.\n"); } else { System.out.println("What would you like to change it to?"); Scanner q3 = new Scanner (System.in); double q4 = q3.nextDouble(); weeklyTarget = q4; q3.close(); } System.out.println("The weekly target has been set to:\n" + weeklyTarget + "lbs per week.\n"); double finalMod; finalMod = weeklyTarget * wktgmod; String fnmd = String.valueOf(finalMod); //Calculating how many weeks will be necessary to achieve the goal double wkcalc = Math.abs((currentWeight - targetWeight) / finalMod); int wknd = (int) Math.ceil(wkcalc); String wkndst = String.valueOf(wknd); //Determining what date you should expect to reach your goal LocalDate gd = today.plus(wknd, ChronoUnit.WEEKS); String goalDate = gd.format(formatter); System.out.println("To reach your goal of " + targetWeight + "lbs, then you will need " + wknd + " weeks to achive this goal.\n"); System.out.println("The current date is:\n" + cd + "\n"); System.out.println("If all goes well, then you should expect to meet your goal by:\n" + goalDate); String fileoutln1 = (username + ',' + currentAge + ',' + currentWeight + ',' + targetWeight); String fileoutln3 = (fnmd + ',' + wkndst + ',' + goal + "," + goalDate); int dtcount = 0; //Writing all the collected values for the previous questions to a file try { PrintWriter writer; try { writer = new PrintWriter("goal.txt", "UTF-8"); writer.println(fileoutln1); writer.println(""); writer.println(fileoutln3); writer.println("date setup"); writer.println(cd); writer.println(""); while (dtcount <= wknd) { LocalDate gd1 = today.plus(dtcount, ChronoUnit.WEEKS); String gdate = gd1.format(formatter); double rcdt = (currentWeight + (finalMod * dtcount)); dtcount++; writer.println(gdate + ',' + " " + rcdt + "lbs"); } writer.close(); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } in1.close(); } }
b7526005754ff757b4045a43b9f09f235b160fa7
[ "Java" ]
1
Java
rigweasle/Weight-Loss-App
3d1221653170de14a47e059ad3e6b87c1500f7da
14644edf38035a0fe9e2496c46a754dcb455a84d
refs/heads/master
<repo_name>amitshob/blog_ember<file_sep>/app/components/add-comment.js import Ember from 'ember'; export default Ember.Component.extend({ actions:{ submit() { var params = { username: this.get('username'), date: this.get('date'), comment: this.get('comment'), post: this.get('post') }; this.sendAction('submit',params); } } }); <file_sep>/app/models/comment.js import DS from 'ember-data'; export default DS.Model.extend({ username: DS.attr(), date: DS.attr(), comment:DS.attr(), post:DS.belongsTo('post') });
71dec46b4e20ca8dd0815ba43e709bc6edbd5f52
[ "JavaScript" ]
2
JavaScript
amitshob/blog_ember
a45d86166b0273c1c0bd15084cdbd6df44d2b97e
0f3d077722a97e8ea0c7642c6ba5ae809d47a797
refs/heads/main
<file_sep>pythan code is a hai type
588f45675307ed409e7b81d29f2f3b89f5d34471
[ "Shell" ]
1
Shell
Namdev12/project4
8ae2ddf8096f3634754b3f6a8434ce20938a7785
1d6d1bd1055d7264461b2fa5abfe945a5376b8a4
refs/heads/main
<repo_name>talha876/test-repo<file_sep>/config/routes.rb Rails.application.routes.draw do resources :urls # For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html root to: "urls#new" get '/shortened_url' => 'urls#shortened_url' get '/:id', to: 'urls#decode_url' end
8267fd96dfd1aac53ae1b1d9e49a404a3656a68f
[ "Ruby" ]
1
Ruby
talha876/test-repo
2155805a0ccff46dd16ac2afdb1f399f8b68a573
add74adedcc8841cae692988ea4834a097fc2f2c
refs/heads/master
<repo_name>JQ-Networks/UMRExtensions<file_sep>/umr_extensions_demo/QQ_recall.py from typing import List, Dict import asyncio from unified_message_relay.Core import UMRLogging from unified_message_relay.Core.UMRCommand import register_command, quick_reply from unified_message_relay.Core.UMRType import ChatAttribute, UnifiedMessage, MessageEntity, GroupID, DestinationMessageID, SendAction from unified_message_relay.Core.UMRMessageRelation import get_relation_dict from umr_coolq_driver import driver as QQ from unified_message_relay.Core.UMRDriver import driver_lookup_table logger = UMRLogging.get_logger('Plugin.QQ-recall') @register_command(cmd=['del', 'recall'], description='recall all related qq message sent by forward bot') async def command(chat_attrs: ChatAttribute, args: List): if chat_attrs.reply_to: message_relation = get_relation_dict(src_platform=chat_attrs.platform, src_chat_id=chat_attrs.chat_id, src_chat_type=chat_attrs.chat_type, message_id=chat_attrs.reply_to.message_id) dst_drivers = {k: v for k, v in driver_lookup_table.items() if isinstance(v, QQ.QQDriver)} if message_relation: filtered_message_ids: Dict[GroupID, DestinationMessageID] = {k: w for k, w in message_relation.items() if k.platform in dst_drivers} if filtered_message_ids: for key, value in filtered_message_ids.items(): asyncio.run_coroutine_threadsafe(dst_drivers[value.platform].bot.delete_msg(message_id=value.message_id), dst_drivers[value.platform].loop) reply_text = 'Message recalled' else: reply_text = 'No related QQ message found' else: reply_text = 'Message not recallable' else: reply_text = 'No message specified, please reply to a message' await quick_reply(chat_attrs, reply_text) <file_sep>/umr_extensions_demo/QQ_group_invite.py from typing import List, Dict import asyncio from unified_message_relay.Core import UMRLogging from unified_message_relay.Core import UMRConfig from unified_message_relay.Core.UMRCommand import register_command, quick_reply from unified_message_relay.Core.UMRType import ChatAttribute, UnifiedMessage, MessageEntity, GroupID, DestinationMessageID, SendAction from unified_message_relay.Core.UMRMessageRelation import get_relation_dict from umr_coolq_driver import driver as QQ from unified_message_relay.Core import UMRDriver from aiogram import Bot, Dispatcher, executor, types from aiogram.utils.callback_data import CallbackData import threading from time import sleep logger = UMRLogging.get_logger('Plugin.QQ-group-invite') # @register_command(cmd=['del', 'recall'], description='recall all related qq message sent by forward bot') # async def command(chat_attrs: ChatAttribute, args: List): # if chat_attrs.reply_to: # message_relation = get_relation_dict(src_platform=chat_attrs.platform, # src_chat_id=chat_attrs.chat_id, # src_chat_type=chat_attrs.chat_type, # message_id=chat_attrs.reply_to.message_id) # # dst_drivers = {k: v for k, v in driver_lookup_table.items() if isinstance(v, QQ.QQDriver)} # # if message_relation: # filtered_message_ids: Dict[GroupID, DestinationMessageID] = {k: w for k, w in message_relation.items() if # k.platform in dst_drivers} # if filtered_message_ids: # for key, value in filtered_message_ids.items(): # asyncio.run_coroutine_threadsafe(dst_drivers[value.platform].delete_msg(message_id=value.message_id), dst_drivers[value.platform].loop) # reply_text = 'Message recalled' # else: # reply_text = 'No related QQ message found' # else: # reply_text = 'Message not recallable' # else: # reply_text = 'No message specified, please reply to a message' # # await quick_reply(chat_attrs, reply_text) bot_token = UMRConfig.config.get('TelegramConsole') admin_list = UMRConfig.config.get('BotAdmin', dict()) if admin_list: admin_list = admin_list.get('Telegram') accept_cb = CallbackData('request', 'result', 'driver', 'request_type', 'handle') # todo post init trigger sleep(5) dst_drivers = {k: v for k, v in UMRDriver.driver_lookup_table.items() if isinstance(v, QQ.QQDriver)} def get_keyboard(driver: str, request_type: str, handle: str): return types.InlineKeyboardMarkup().row( types.InlineKeyboardButton('Accept', callback_data=accept_cb.new(type='accept', driver=driver, request_type=request_type, handle=handle)), types.InlineKeyboardButton('Decline', callback_data=accept_cb.new(type='decline', driver=driver, request_type=request_type, handle=handle)) ) def start(): def run(): def handle_exception(loop, context): # context["message"] will always be there; but context["exception"] may not msg = context.get("exception", context["message"]) logger.exception('Unhandled exception: ', exc_info=msg) logger.debug('Running qq-group-invite start') loop = asyncio.new_event_loop() loop.set_exception_handler(handle_exception) asyncio.set_event_loop(loop) bot = Bot(token=bot_token) dp = Dispatcher(bot) for driver_name, driver in dst_drivers.items(): @driver.bot.on_request() async def handle_event(context): user_id = context.get('user_id') stranger_name = driver.bot.get_stranger_info(user_id=user_id).get('nickname', str(user_id)) if context['request_type'] == 'group': group_name = driver.bot.get_group_info(group_id=context["group_id"]) \ .get('group_name', str(context["group_id"])) if context['sub_type'] == 'add': action = 'group_add' message = f'"{stranger_name}" wants to join group "{group_name}".' else: action = 'group_invite' message = f'"{stranger_name}" wants to add you to group "{group_name}".' elif context['request_type'] == 'friend': action = 'friend' message = f'"{stranger_name}" wants to add you as friend.' else: logger.info('unhandled event: ' + str(context)) return for chat_id in admin_list: asyncio.run_coroutine_threadsafe( bot.send_message(chat_id, message, reply_markup=get_keyboard(driver_name, action, context['flag'])), loop) @dp.callback_query_handler(accept_cb.filter(result=['accept', 'decline'])) async def callback_vote_action(query: types.CallbackQuery, callback_data: dict): logger.info('Got this callback data: %r', callback_data) await query.answer() # don't forget to answer callback query as soon as possible callback_data_action = callback_data['result'] callback_driver = dst_drivers[callback_data['driver']] callback_request_type = callback_data['request_type'] callback_handle = callback_data['handle'] if callback_data_action == 'accept': if callback_request_type == 'group_add': callback_driver.bot.set_group_add_request(flag=callback_handle, sub_type='add', approve=True) elif callback_request_type == 'group_invite': callback_driver.bot.set_group_add_request(flag=callback_handle, sub_type='invite', approve=True) else: callback_driver.bot.set_friend_add_request(flag=callback_handle, approve=True) else: if callback_request_type == 'group_add': callback_driver.bot.set_group_add_request(flag=callback_handle, sub_type='add', approve=False) elif callback_request_type == 'group_invite': callback_driver.bot.set_group_add_request(flag=callback_handle, sub_type='invite', approve=False) else: callback_driver.bot.set_friend_add_request(flag=callback_handle, approve=False) await bot.edit_message_text( query.message.text + '\nAccepted' if callback_data_action == 'accept' else '\nDeclined', query.from_user.id, query.message.message_id ) executor.start_polling(dp, skip_updates=True, loop=loop) t = threading.Thread(target=run) t.daemon = True UMRDriver.threads.append(t) t.start() logger.debug(f'Finished qq-group-invite initialization') if bot_token and admin_list: start() <file_sep>/umr_extensions_demo/cmd_echo.py from typing import List from unified_message_relay.Core.UMRType import ChatAttribute from unified_message_relay.Core.UMRCommand import register_command, quick_reply @register_command(cmd='echo', description='reply every word you sent') async def command(chat_attrs: ChatAttribute, args: List): """ Prototype of command :param chat_attrs: :param args: :return: """ if not args: # args should not be empty return await quick_reply(chat_attrs, ' '.join(args)) <file_sep>/umr_extensions_demo/cmd_privilege.py from typing import List from unified_message_relay.Core.UMRType import ChatAttribute, Privilege from unified_message_relay.Core.UMRCommand import register_command, quick_reply @register_command(cmd='owner', description='only owner can use this command', privilege=Privilege.GROUP_OWNER) async def command(chat_attrs: ChatAttribute, args: List): """ Prototype of command :param chat_attrs: :param args: :return: """ if args: # args should be empty return await quick_reply(chat_attrs, 'You are owner') @register_command(cmd='admin', description='only admin can use this command', privilege=Privilege.GROUP_ADMIN) async def command(chat_attrs: ChatAttribute, args: List): """ Prototype of command :param chat_attrs: :param args: :return: """ if args: # args should be empty return await quick_reply(chat_attrs, 'You are admin')<file_sep>/umr_extensions_demo/QQ-face-test.py from typing import List from unified_message_relay.Core import UMRLogging from unified_message_relay.Core.UMRType import ChatAttribute, ChatType from aiocqhttp import MessageSegment from unified_message_relay.Core.UMRDriver import driver_lookup_table from umr_coolq_driver import driver as QQ logger = UMRLogging.get_logger('Plugin.QQ-recall') # @register_command(cmd=['face'], description='test QQ face') async def command(chat_attrs: ChatAttribute, args: List): if not args: return False if len(args) != 2: return False dst_driver_name = args[0] dst_chat_id = int(args[1]) dst_driver = driver_lookup_table.get(dst_driver_name) if not dst_driver: return assert isinstance(dst_driver, QQ.QQDriver) context = dict() if chat_attrs.chat_type == ChatType.UNSPECIFIED: return context['message_type'] = f"{chat_attrs.chat_type}" context['message'] = list() if chat_attrs.chat_type == ChatType.PRIVATE: context['user_id'] = dst_chat_id else: context[f'{chat_attrs.chat_type}_id'] = abs(dst_chat_id) for i in range(256): context['message'].append(MessageSegment.text(f'Emoji {i}: ')) context['message'].append(MessageSegment.face(i)) context['message'].append(MessageSegment.text('\n')) await dst_driver.bot.send(context, context['message']) <file_sep>/umr_extensions_demo/cmd_id.py from typing import List from unified_message_relay.Core.UMRType import ChatAttribute from unified_message_relay.Core.UMRCommand import register_command, quick_reply from unified_message_relay.Core.UMRMessageRelation import get_message_id @register_command(cmd='id', description='get group id') async def command(chat_attrs: ChatAttribute, args: List): """ Prototype of command :param chat_attrs: :param args: :return: """ if args: # args should be empty return if chat_attrs.reply_to: source_message = get_message_id(src_platform=chat_attrs.platform, src_chat_id=chat_attrs.chat_id, src_chat_type=chat_attrs.chat_type, src_message_id=chat_attrs.reply_to.message_id, dst_platform=chat_attrs.platform, dst_chat_id=chat_attrs.chat_id, dst_chat_type=chat_attrs.chat_type) if source_message and source_message.source: await quick_reply(chat_attrs, 'src_chat_type:' + str(source_message.source.chat_type) + '\nsrc_chat_id: ' + str(source_message.source.chat_id)) else: await quick_reply(chat_attrs, 'chat_id: ' + str(chat_attrs.chat_id)) else: await quick_reply(chat_attrs, 'chat_id: ' + str(chat_attrs.chat_id)) <file_sep>/umr_extensions_demo/comment_filter.py from unified_message_relay.Core import UMRLogging from unified_message_relay.Core.UMRMessageHook import register_hook from unified_message_relay.Core.UMRCommand import quick_reply from unified_message_relay.Core.UMRType import UnifiedMessage logger = UMRLogging.get_logger('Plugin.Comment') # Filter messages that start with // @register_hook() async def message_hook_func(message: UnifiedMessage) -> bool: # filter keyword raw_text = message.text if raw_text.startswith('//'): # await quick_reply(message.chat_attrs, f'Message filtered') return True return False <file_sep>/umr_extensions_demo/Telegram_watermeter.py import yaml import pathlib import os from typing import Dict, List, Optional from typing_extensions import Literal from unified_message_relay.Core import UMRLogging from unified_message_relay.Core.UMRMessageHook import register_hook from unified_message_relay.Core.UMRCommand import register_command, quick_reply from unified_message_relay.Core.UMRType import UnifiedMessage, ChatAttribute, Privilege from unified_message_relay.Core.UMRExtension import BaseExtension, register_extension from unified_message_relay.Core import UMRConfig logger = UMRLogging.get_logger('Plugin.WaterMeter') # Telegram water meter filter # supports keyword filter, forward source filter(chat id based) class TelegramWaterMeterConfig(UMRConfig.BaseExtensionConfig): Extension: Literal['TelegramWaterMeter'] Keyword: Optional[List[str]] ChatID: Optional[List[int]] UMRConfig.register_extension_config(TelegramWaterMeterConfig) class TelegramWaterMeter(BaseExtension): def __init__(self): super().__init__() async def post_init(self): await super().post_init() self.config: TelegramWaterMeterConfig = \ UMRConfig.config.ExtensionConfig.setdefault(__name__, TelegramWaterMeterConfig( Extension='TelegramWaterMeter', Keyword=[], ChatID=[])) @register_hook(src_driver='Telegram') async def message_hook_func(message: UnifiedMessage) -> bool: # filter source if message.chat_attrs.forward_from and message.chat_attrs.forward_from.chat_id in self.config.ChatID: await quick_reply(message.chat_attrs, 'Message blocked by rule (channel)') return True # filter keyword raw_text = message.text for keyword in self.config.Keyword: if keyword in raw_text: await quick_reply(message.chat_attrs, f'Message blocked by rule (keyword: {keyword})') return True return False @register_command(cmd=['block_channel', 'bc'], platform='Telegram', description='register block channel', privilege=Privilege.BOT_ADMIN) async def command(chat_attrs: ChatAttribute, args: List): if not chat_attrs.reply_to: await quick_reply(chat_attrs, 'Message not specified, please reply to a message') return False reply_chat_attrs = chat_attrs.reply_to if not reply_chat_attrs.forward_from: # definitely not a channel await quick_reply(chat_attrs, 'Message is not a forward') return False if reply_chat_attrs.forward_from.chat_id >= 0: await quick_reply(chat_attrs, 'Message is not from channel') return False channel_id = reply_chat_attrs.forward_from.chat_id if channel_id in self.config.ChatID: await quick_reply(chat_attrs, 'Channel already exists') else: self.config.ChatID.append(reply_chat_attrs.forward_from.chat_id) UMRConfig.save_config() await quick_reply(chat_attrs, f'Success, added channel {reply_chat_attrs.forward_from.name}') @register_command(cmd=['block_keyword', 'bk'], platform='Telegram', description='register block keyword', privilege=Privilege.BOT_ADMIN) async def command(chat_attrs: ChatAttribute, args: List): if not args: await quick_reply(chat_attrs, 'Empty keyword list') return False old_keywords = set(self.config.Keyword) new_keywords = set(args) exists_keywords = old_keywords & new_keywords added_keywords = new_keywords - exists_keywords if added_keywords: self.config.Keyword = list(old_keywords | new_keywords) UMRConfig.save_config() if exists_keywords: await quick_reply(chat_attrs, f'Success, added keywords: {", ".join(added_keywords)}\n' f'exists keywords: {", ".join(exists_keywords)}') await quick_reply(chat_attrs, f'Success, added keywords: {", ".join(added_keywords)}') else: await quick_reply(chat_attrs, f'All keyword exists: {", ".join(exists_keywords)}') register_extension(TelegramWaterMeter()) <file_sep>/umr_extensions_demo/__init__.py from . import cmd_echo, cmd_id, cmd_privilege from . import comment_filter from . import QQ_recall from . import Telegram_watermeter __VERSION__ = '0.2'
14b734e5c8c52ba75eeacb1d7c52d6fcbb93cb90
[ "Python" ]
9
Python
JQ-Networks/UMRExtensions
da92dea3680b27478e8ed52d175e438711e57596
2e3b22b71c4da12c7b1dac9b500ba04162ca1bb4
refs/heads/master
<file_sep># AtlacommArchiver C# class library and archiver for handling Atlacomm Archive Format (.aaf) files <file_sep>using System; using System.Collections.Generic; namespace Atlacomm.ArchiveFormat { public partial class Archive { public byte[] BuildArchive() { List<byte> data = new List<byte>(); // Write header foreach (byte b in HEADER) { data.Add((byte)(b ^ XOR)); } // Write index foreach (string file in Files.Keys) { data.Add((byte)file.Length); foreach (char c in file) { data.Add((byte)c); } byte[] sizeBytes = BitConverter.GetBytes((long)Files[file].Length); foreach (byte b in sizeBytes) { data.Add(b); } } // Write index terminator data.Add(0); foreach (string file in Files.Keys) { foreach (byte b in Files[file]) { data.Add(b); } } return data.ToArray(); } } } <file_sep>using System; using System.Collections.Generic; namespace Atlacomm.ArchiveFormat { public partial class Archive { public ArchiveFolder GetFileHierarchy() { // Root Folder ArchiveFolder rootFolder = new ArchiveFolder("ROOT"); // Go through all files in archive foreach (string file in Files.Keys) { // Count the subdirectories the file is in int folderCount = file.Length - file.Replace("/", "").Length; // Split file path on slash string[] split = file.Split('/'); // Keep track of the current folder ArchiveFolder currentFolder = rootFolder; // Go through all subdirectories for (int i = 0; i < folderCount; i++) { // Get current directory string folder = split[i]; // Check if the folder already exists in the file structure bool folderExists = false; foreach (ArchiveFolder archiveFolder in currentFolder.Folders) { if (archiveFolder.Name == folder) { // If the folder exists set it as the current folder folderExists = true; currentFolder = archiveFolder; break; } } // If the folder doesn't exist create it and set it as the current folder if (!folderExists) { ArchiveFolder newFolder = new ArchiveFolder(folder); currentFolder.Folders.Add(newFolder); currentFolder = newFolder; } } // Add the file to the folder currentFolder.Files.Add(new ArchiveFile(split[folderCount], Files[file].Length)); } // Return the root folder return rootFolder; } } } <file_sep>using System.Collections.Generic; namespace Atlacomm.ArchiveFormat { public partial class Archive { public byte[] GetFile(string filepath) { // If the archive does contain the specifed file return null if (!Contains(filepath)) return null; // Return the specified file return Files[filepath]; } } } <file_sep>using System.IO; namespace Atlacomm.ArchiveFormat { public partial class Archive { public void Extract(string folderpath) { // Go through all files in the archive foreach (string file in Files.Keys) { // Create destination file path string filepath = Path.Combine(folderpath, file); // Get directory from destination file path string dir = Path.GetDirectoryName(filepath); // Create directory if it doesn't exist if (!Directory.Exists(dir)) Directory.CreateDirectory(dir); // Write file to destination file path File.WriteAllBytes(filepath, GetFile(file)); } } } } <file_sep>using System; namespace Atlacomm.ArchiveFormat { public class ArchiveFile { // File name public string Name; // File size public long Size; // Get string formatted public string SizeFormatted { get { double size = Size; string unit = "bytes"; if (size >= 1024) { size /= 1024; unit = "KB"; } if (size >= 1024) { size /= 1024; unit = "MB"; } if (size >= 1024) { size /= 1024; unit = "GB"; } if (size >= 1024) { size /= 1024; unit = "TB"; } return Math.Round(size, 1).ToString() + " " + unit; } } public ArchiveFile(string name, long size) { Name = name; Size = size; } public override string ToString() { return Name; } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Atlacomm.ArchiveFormat { public partial class Archive { public bool Contains(string filepath) { // Return whether or not the Files Dictionary contaisn a file with the specified key return Files.Keys.Contains(filepath); } } } <file_sep>using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Atlacomm.ArchiveFormat { public partial class Archive { public static Archive Load(string filepath) { Archive archive = new Archive(); // Whether or not the archive need to be decrypted bool decrypt = true; // Load archive file byte[] data = File.ReadAllBytes(filepath); // Go through the data for (int i = 0; i < data.Length; i++) { // Check whether the current byte is part of the header if (i < HEADER.Length) { // Check whether the current header byte is valid if (data[i] != HEADER[i]) { // If the header is not valid check if the file is decrypted if (data[i] != (byte)(HEADER[i] ^ XOR)) { // If the file is neither a valid decrypted nor an encrypted archive throw a FileFormatException throw new FileFormatException("Invalid file header"); } else { // If the file has a valid decrypted file header make sure that the decrypted file will not be encrypted again decrypt = false; } } } // Decrypt the current byte of the file if neccessary if (decrypt) data[i] = (byte)(data[i] ^ XOR); } // Index Dictionary<string, long> index = new Dictionary<string, long>(); // Offset at which file content starts int contentOffset = 0; // Populate the index for (int i = HEADER.Length; i < data.Length;) { // First byte is the length of the path int pathLength = data[i]; i++; // path's length also used as the index terminator if zero if (pathLength == 0) { contentOffset = i; break; } // Read the path string path = ""; for (int j = 0; j < pathLength; j++, i++) { path += (char)data[i]; } // Read 8 bytes and convert them to a 64 bit integer to get the file's size byte[] sizeBytes = new byte[8]; for (int j = 0; j < 8; j++, i++) { sizeBytes[j] = data[i]; } long size = BitConverter.ToInt64(sizeBytes, 0); // Add the current file to the index index.Add(path, size); } // Use the index to load all files foreach (string file in index.Keys) { byte[] fileData = new byte[index[file]]; for (int i = 0; i < fileData.Length; i++, contentOffset++) { fileData[i] = data[contentOffset]; } archive.Files.Add(file, fileData); } return archive; } } } <file_sep>using System; using System.Collections.Generic; namespace Atlacomm.ArchiveFormat { public class ArchiveFolder { // Folder name public string Name; // Files public List<ArchiveFile> Files; // Subdirectories public List<ArchiveFolder> Folders; // Get size formatted public string SizeFormatted { get { double size = Size; string unit = "bytes"; if (size >= 1024) { size /= 1024; unit = "KB"; } if (size >= 1024) { size /= 1024; unit = "MB"; } if (size >= 1024) { size /= 1024; unit = "GB"; } if (size >= 1024) { size /= 1024; unit = "TB"; } return Math.Round(size, 1).ToString() + " " + unit; } } // Calculate and return folder size public long Size { get { long ret = 0; foreach (ArchiveFile file in Files) ret += file.Size; foreach (ArchiveFolder folder in Folders) ret += folder.Size; return ret; } } public ArchiveFolder(string name, IList<ArchiveFile> files = null, IList<ArchiveFolder> folders = null) { Name = name; Files = new List<ArchiveFile>(files ?? Array.Empty<ArchiveFile>()); Folders = new List<ArchiveFolder>(folders ?? Array.Empty<ArchiveFolder>()); } // Return folder name public override string ToString() { return Name; } } } <file_sep>using System; using System.Collections.Generic; using System.IO; using System.Windows.Forms; using Atlacomm.ArchiveFormat; namespace Atlacomm.Archiver { public partial class MainWindow : Form { Archive archive = null; ArchiveFolder rootFolder = null; ArchiveFolder currentFolder = null; List<ArchiveFolder> parentFolders = new List<ArchiveFolder>(); public MainWindow() { InitializeComponent(); } public void LoadArchive(string path) { archive = Archive.Load(path); rootFolder = archive.GetFileHierarchy(); currentFolder = rootFolder; } public void UpdateFileView() { if (archive == null) return; if (rootFolder == null) return; if (currentFolder == null) return; fileView.Items.Clear(); foreach (ArchiveFolder folder in currentFolder.Folders) { ListViewItem item = new ListViewItem(folder.Name); // Name item.SubItems.Add("File folder"); // Type item.SubItems.Add(folder.SizeFormatted); // Size fileView.Items.Add(item); } foreach (ArchiveFile file in currentFolder.Files) { ListViewItem item = new ListViewItem(file.Name); // Name string[] extension = file.Name.Split('.'); if (extension.Length == 1) item.SubItems.Add("File"); // Type else item.SubItems.Add(extension[extension.Length - 1].ToUpper() + " file"); item.SubItems.Add(file.SizeFormatted); // Size fileView.Items.Add(item); } string path = "/"; for (int i = 1; i < parentFolders.Count; i++) { path += parentFolders[i].Name; path += "/"; } if (currentFolder.Name != rootFolder.Name) path += currentFolder.Name; pathTextBox.Text = path; } private void fileView_MouseClick(object sender, MouseEventArgs e) { if (e.Button == MouseButtons.Right) { string path = ""; for (int i = 1; i < parentFolders.Count; i++) { path += parentFolders[i].Name; path += "/"; } if (currentFolder.Name != rootFolder.Name) path += currentFolder.Name + "/"; path += fileView.SelectedItems[0].Text; if (archive.Contains(path)) fileContextMenu.Show(fileView, e.Location); } } private void fileView_MouseDoubleClick(object sender, MouseEventArgs e) { if (e.Button == MouseButtons.Left) { string selectedItem = fileView.SelectedItems[0].Text; foreach (ArchiveFolder folder in currentFolder.Folders) { if (folder.Name == selectedItem) { parentFolders.Add(currentFolder); currentFolder = folder; UpdateFileView(); break; } } } } private void buttonPrevFolder_Click(object sender, EventArgs e) { if (parentFolders.Count == 0) return; currentFolder = parentFolders[parentFolders.Count - 1]; parentFolders.RemoveAt(parentFolders.Count - 1); UpdateFileView(); } private void mainMenu_File_Exit_Click(object sender, EventArgs e) { Application.Exit(); } private void mainMenu_File_Open_Click(object sender, EventArgs e) { DialogResult result = openFileDialog.ShowDialog(); if (result == DialogResult.OK) { LoadArchive(openFileDialog.FileName); UpdateFileView(); } } private void mainMenu_Help_About_Click(object sender, EventArgs e) { new AboutWindow().ShowDialog(); } private void fileContextMenu_Extract_Click(object sender, EventArgs e) { string selectedItem = fileView.SelectedItems[0].Text; string path = ""; for (int i = 1; i < parentFolders.Count; i++) { path += parentFolders[i].Name; path += "/"; } if (currentFolder.Name != rootFolder.Name) path += currentFolder.Name + "/"; path += selectedItem; byte[] file = archive.GetFile(path); string cleanFilter = openFileDialog.Filter; saveFileDialog.Filter = selectedItem + "|" + selectedItem + "|" + openFileDialog.Filter; saveFileDialog.FileName = selectedItem; DialogResult result = saveFileDialog.ShowDialog(); if (result == DialogResult.OK) { File.WriteAllBytes(saveFileDialog.FileName, file); } saveFileDialog.Filter = cleanFilter; saveFileDialog.FileName = ""; } private void mainMenu_File_Save_Click(object sender, EventArgs e) { if (archive == null) return; string cleanFilter = openFileDialog.Filter; saveFileDialog.Filter = "Atlacomm Archive Files|*.aaf|" + saveFileDialog.Filter; saveFileDialog.FileName = ""; DialogResult result = saveFileDialog.ShowDialog(); if (result == DialogResult.OK) { File.WriteAllBytes(saveFileDialog.FileName, archive.BuildArchiveEnrypted()); } saveFileDialog.Filter = cleanFilter; saveFileDialog.FileName = ""; } private void mainMenu_File_SaveDecrypted_Click(object sender, EventArgs e) { if (archive == null) return; string cleanFilter = openFileDialog.Filter; saveFileDialog.Filter = "Atlacomm Archive Files|*.aaf|" + saveFileDialog.Filter; saveFileDialog.FileName = ""; DialogResult result = saveFileDialog.ShowDialog(); if (result == DialogResult.OK) { File.WriteAllBytes(saveFileDialog.FileName, archive.BuildArchive()); } saveFileDialog.Filter = cleanFilter; saveFileDialog.FileName = ""; } } } <file_sep>namespace Atlacomm.ArchiveFormat { public partial class Archive { public byte[] BuildArchiveEnrypted() { byte[] data = BuildArchive(); for (int i = 0; i < data.Length; i++) { data[i] = (byte)(data[i] ^ XOR); } return data; } } } <file_sep>using System.Collections.Generic; namespace Atlacomm.ArchiveFormat { public partial class Archive { // byte used XOR encryption const byte XOR = 0x89; // Headers for file validation and detection (is the file encrypted, decrypted or invalid) static readonly byte[] HEADER = { (byte)'A', (byte)'M', (byte)'M' }; // Contains all files and their paths readonly Dictionary<string, byte[]> Files = new Dictionary<string, byte[]>(); } }
0f54b7c0f42b7ec35e81101bbbf6b611609a9d97
[ "Markdown", "C#" ]
12
Markdown
Atlacomm/AtlacommArchiver
7ee0492cc2bc363ab760bb844da9e3f07c39a88e
07ce26a7c441b5e75545426c8c4603e5ba7bcb9d
refs/heads/master
<repo_name>sinouw/Todo-List-Angular-Asp.Net-Core<file_sep>/Angular8-Todo/src/app/shared/todoService.service.ts import { Injectable } from '@angular/core'; import { Todo } from './todo.model'; import { HttpClient} from "@angular/common/http"; import { ToastrService } from 'ngx-toastr'; @Injectable({ providedIn: 'root' }) export class TodoServiceService { formData : Todo; list : Todo[]; filtre :string = "All" Disable : boolean = true; readonly rootURL = '/api'; constructor(private http : HttpClient, private toastr: ToastrService) { } //Get All TODOS refreshList(f){ return this.http.get(this.rootURL + '/todo') .toPromise().then(res=>{ this.list=res as Todo[] this.filterTodos(this.filtre) }); } //Get A TODO filterTodos(f){ if (f=='Active'){ this.list=this.list.filter(x=>x.completed==false) }if (f=='Completed'){ this.list=this.list.filter(x=>x.completed) } } getTodo(id){ return this.http.get(this.rootURL + `/todo/${id}`).toPromise() .then( res=>{ this.formData = res as Todo }, err=>{ console.log(err) } ) } //Put A Todo putTodo(){ return this.http.put(this.rootURL + '/todo/'+this.formData.todoId,this.formData); } //Post A TODO postTodo(){ const body = { title : this.formData.title, completed: this.formData.completed, optimalLine: this.formData.optimalLine } this.http.post(this.rootURL+'/todo', body) .subscribe( result => { this.refreshList(this.filtre) this.ResetForm() this.toastr.success('Todo Was Added Successfully', 'Todo Adding'); }, () => { } ) } //Delete All TODOS deleteAllTodos() { this.http.delete(this.rootURL+'/todo').subscribe( res=>{ this.refreshList(this.filtre) }, err=>{ console.log(err) } ) } //Delete A Todo deleteTodo(id){ this.http.delete(this.rootURL+`/todo/${id}`).subscribe( res=>{ this.refreshList(this.filtre) }, err=>{ console.log(err) } ) } filterlist(fil : string){ this.filtre=fil; this.refreshList(this.filtre) } ResetForm(form? : any){ if(form != null) form.form.reset(); this.formData={ todoId : 0, title:'', optimalLine:null, completed:false, } } } <file_sep>/WebApi/Models/TodoDetailContext.cs using JetBrains.Annotations; using Microsoft.EntityFrameworkCore; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace WebApi.Models { public class TodoDetailContext : DbContext { public TodoDetailContext(DbContextOptions options) : base (options) { } public DbSet<TodoDetail> Todos { get; set; } } } <file_sep>/Angular8-Todo/src/app/app.module.ts import { BrowserModule } from '@angular/platform-browser'; import { NgModule } from '@angular/core'; import { AppRoutingModule } from './app-routing.module'; import { AppComponent } from './app.component'; import { TodoDetailsComponent } from './todo-details/todo-details.component'; import { TodoDetailComponent } from './todo-details/todo-detail/todo-detail.component'; import { TodoDetailListComponent } from './todo-details/todo-detail-list/todo-detail-list.component'; import { TodoServiceService } from './shared/todoService.service'; import { FormsModule } from '@angular/forms'; import { HttpClientModule } from '@angular/common/http'; import { BrowserAnimationsModule } from '@angular/platform-browser/animations'; import { ToastrModule } from 'ngx-toastr'; import { ServiceProvider } from 'src/providers/service/service'; @NgModule({ declarations: [ AppComponent, TodoDetailsComponent, TodoDetailComponent, TodoDetailListComponent ], imports: [ BrowserModule, AppRoutingModule, FormsModule, HttpClientModule, BrowserAnimationsModule, ToastrModule.forRoot(), ], providers: [TodoServiceService, ServiceProvider ], bootstrap: [AppComponent] }) export class AppModule { } <file_sep>/Angular8-Todo/src/app/todo-details/todo-detail-list/todo-detail-list.component.ts import { Component, OnInit } from '@angular/core'; import { TodoServiceService } from 'src/app/shared/todoService.service'; import { Todo } from 'src/app/shared/todo.model'; import { ToastrService } from 'ngx-toastr'; import { ServiceProvider } from 'src/providers/service/service'; @Component({ selector: 'app-todo-detail-list', templateUrl: './todo-detail-list.component.html', styles: [] }) export class TodoDetailListComponent implements OnInit { list: any; constructor(private api : ServiceProvider, public service: TodoServiceService, private toastr: ToastrService) {} ngOnInit() { this.service.refreshList(this.service.filtre) } // getTodos(s){ // if(s=='All'){ // this.api.get("/todo").subscribe( // res => { // this.list = res; // } // ) // } //} DeleteALLTodo(){ this.service.deleteAllTodos() this.toastr.success('Todos Was Deleted successfully', 'Todo Deleting'); } getInfos(item : Todo){ this.service.formData = item; this.service.Disable=false; } DeleteTodo(item : Todo){ this.service.deleteTodo(item.todoId); this.toastr.success('Todo Was Deleted successfully', 'Todo Deleting'); } } <file_sep>/WebApi/Controllers/TodoDetailsController.cs using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc; using Microsoft.EntityFrameworkCore; using WebApi.Models; namespace WebApi.Controllers { [Route("api/todo")] [ApiController] public class TodoDetailsController : ControllerBase { private readonly TodoDetailContext _context; public TodoDetailsController(TodoDetailContext context) { _context = context; } // GET: api/todo [HttpGet] public async Task<ActionResult<IEnumerable<TodoDetail>>> GetTodos() { var tods = _context.Todos.OrderBy(x => x.Completed); return await tods.ToListAsync(); } // GET: api/todo/5 [HttpGet("{id}")] public async Task<ActionResult<TodoDetail>> GetTodoDetail(int id) { var todoDetail = await _context.Todos.FindAsync(id); if (todoDetail == null) { return NotFound(); } return todoDetail; } // PUT: api/todo/5 [HttpPut("{id}")] public async Task<IActionResult> PutTodoDetail(int id, TodoDetail todoDetail) { if (id != todoDetail.TodoId) { return BadRequest(); } _context.Entry(todoDetail).State = EntityState.Modified; try { await _context.SaveChangesAsync(); } catch (DbUpdateConcurrencyException) { if (!TodoDetailExists(id)) { return NotFound(); } else { throw; } } return NoContent(); } // POST: api/todo [HttpPost] public async Task<ActionResult<TodoDetail>> PostTodoDetail(TodoDetail todoDetail) { _context.Todos.Add(todoDetail); await _context.SaveChangesAsync(); return CreatedAtAction("GetTodoDetail", new { id = todoDetail.TodoId }, todoDetail); } // DELETE: api/todo/5 [HttpDelete("{id}")] public async Task<ActionResult<TodoDetail>> DeleteTodoDetail(int id) { var todoDetail = await _context.Todos.FindAsync(id); if (todoDetail == null) { return NotFound(); } _context.Todos.Remove(todoDetail); await _context.SaveChangesAsync(); return todoDetail; } //DELETE: api/todo [HttpDelete] public async Task DeleteAllTodos() { var todos = await _context.Todos.ToListAsync(); _context.Todos.RemoveRange(todos); await _context.SaveChangesAsync(); } private bool TodoDetailExists(int id) { return _context.Todos.Any(e => e.TodoId == id); } } } <file_sep>/WebApi/Models/TodoDetail.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Linq; using System.Threading.Tasks; namespace WebApi.Models { public class TodoDetail { [Key] public int TodoId { get; set; } [Column(TypeName = "nvarchar(100)")] [Required] public string Title { get; set; } [Required] [Column(TypeName = "bit")] public bool Completed { get; set; } = false; [Required] [Column(TypeName = "nvarchar(10)")] public string OptimalLine { get; set; } } } <file_sep>/Angular8-Todo/src/app/todo-details/todo-detail/todo-detail.component.ts import { Component, OnInit } from '@angular/core'; import { Title } from '@angular/platform-browser'; import { TodoServiceService } from 'src/app/shared/todoService.service'; import { format } from 'url'; import { NgForm } from '@angular/forms'; import { ToastrService } from 'ngx-toastr'; @Component({ selector: 'app-todo-detail', templateUrl: './todo-detail.component.html', styles: [] }) export class TodoDetailComponent implements OnInit { constructor( private service : TodoServiceService, private toastr: ToastrService) { } checkIt : boolean = false; formdisabled : boolean ngOnInit() { this.service.ResetForm(); } Undo(){ this.service.formData={ todoId : 0, title:'', optimalLine:null, completed:false, } this.service.Disable=true; } onSubmit(form : NgForm) { if(this.service.formData.title!='' && this.service.formData.optimalLine!=null){ if(this.service.formData.todoId==0) { this.addTodo(form); this.service.Disable=true; } else this.putTodo(form) } if(this.service.formData.title=='' || this.service.formData.optimalLine==null){ this.toastr.error('Form Error', 'Verify Your Form'); } } addTodo(form : NgForm){ this.service.postTodo() //.subscribe( // res=>{ // this.ResetForm(form); // this.toastr.success('Todo Was Added Successfully', 'Todo Adding'); // }, // err=>{ // console.log(err) // }); // this.service.refreshList(this.service.filtre); } putTodo(form :NgForm){ this.service.putTodo().subscribe( res=>{ this.service.ResetForm(form) this.toastr.info('Todo Was Updated successfully', 'Todo Updating'); this.service.refreshList(this.service.filtre); this.service.Disable=true; }, err=>{ console.log(err) } ) } checkedd(){ this.service.formData.completed = ! this.service.formData.completed } } <file_sep>/Angular8-Todo/src/app/shared/todo.model.ts export class Todo { todoId : number ; title : string ; completed : boolean =false; optimalLine : string ; } <file_sep>/Angular8-Todo/src/providers/service/service.ts import { Injectable } from '@angular/core'; import {Http, Response, Headers, RequestOptions} from "@angular/http"; import {Observable} from "rxjs"; import "rxjs/add/operator/map"; import "rxjs/add/operator/catch"; import 'rxjs/add/observable/throw'; import 'rxjs/add/observable/of'; import 'rxjs/add/operator/delay'; import {DUMP_DATA} from "./dump.data"; import { HttpClient, HttpHeaders } from '@angular/common/http'; @Injectable() export class ServiceProvider { development= false; OriginPath = '/api/'; constructor(private http:HttpClient) { } getURI(data:any[]){ let dataString = ""; for(let v in data){ dataString="?"; dataString += v+"="+data[v]+'&'; } return dataString; } getHeaders():Headers{ let headers = new Headers(); headers.append('Access-Control-Allow-Origin','*'); return headers; } get(url:string,data?:any[]):Observable<any> | any{ let dataString = this.getURI(data); if(this.development){ console.log("DEV GET : "+url,dataString,data); return Observable.of(DUMP_DATA).delay(2500); }else{ console.log("GET : "+url,dataString,data); return this.http.get(this.OriginPath+url+dataString) .catch(this.handleError); } } post(url:string,body:any={},data:any[]=[]):Observable<any> | any{ let dataString = this.getURI(data); if(this.development){ console.log("DEV POST : "+url,dataString,body,data); return Observable.of(DUMP_DATA).delay(1000); } let headers = new HttpHeaders().set("Content-Type", "application/json"); console.log("POST : "+url,dataString,body,data); return this.http.post(this.OriginPath+url,body,{headers}) .map(this.extractData) .catch(this.handleError); } put(url:string,body:any={},data:any[]=[]):Observable<any> | any{ let dataString = this.getURI(data); let headers = new HttpHeaders().set("Content-Type", "application/json"); console.log("PUT : "+url,dataString,body,data); return this.http.put(this.OriginPath+url,body, {headers}) .map(this.extractData) .catch(this.handleError); } delete(url:string,body:any={},data:any[]=[]):Observable<any> | any{ let dataString = this.getURI(data); if(this.development){ console.log("DEV DELETE : "+url,dataString,body,data); return Observable.of(DUMP_DATA).delay(1000); } console.log("DELETE : "+url,dataString,body,data); return this.http.delete(this.OriginPath+url,body); } extractData(data:Response){ return data.text(); } handleError(){ console.log("Error handleError"); return Observable.throw("Error API Service Todo."); } }
6f491d5f6f5282906fb53171376efc1020a74c85
[ "C#", "TypeScript" ]
9
TypeScript
sinouw/Todo-List-Angular-Asp.Net-Core
be83e242dee83eee3b249f5450606dbef4e778fe
d0adf179fef63bbb0907d5d02240f1f7201d5297
refs/heads/main
<file_sep>import React, { useEffect, useState } from 'react'; import axios from 'axios'; import Card from '../../Common/Card/Card'; import DashboardSection from './DashboardSection/DashboardSection'; import './Dashboard.scss'; const Dashboard = () => { const [meals, setMeals] = useState([ { meal: null, loading: true, mealType: 'Main Meal' }, { meal: null, loading: true, mealType: 'Dessert' }, { meal: null, loading: true, mealType: 'Snack' }, { meal: null, loading: true, mealType: 'Breakfast' } ]); useEffect(() => { axios.get('https://recipe-app-341cf.firebaseio.com/recipes.json?orderBy="mealType"&equalTo="Main Meal"&limitToLast=4') .then(response => { let recipes = response.data; let newArray = [...meals]; newArray[0].meal = recipes; newArray[0].loading = false; setMeals(newArray); }) .catch(err => console.log(err)); axios.get('https://recipe-app-341cf.firebaseio.com/recipes.json?orderBy="mealType"&equalTo="Dessert"&limitToLast=4') .then(response => { let recipes = response.data; let newArray = [...meals]; newArray[1].meal = recipes; newArray[1].loading = false; setMeals(newArray); }) .catch(err => console.log(err)); axios.get('https://recipe-app-341cf.firebaseio.com/recipes.json?orderBy="mealType"&equalTo="Snack"&limitToLast=4') .then(response => { let recipes = response.data; let newArray = [...meals]; newArray[2].meal = recipes; newArray[2].loading = false; setMeals(newArray); }) .catch(err => console.log(err)); axios.get('https://recipe-app-341cf.firebaseio.com/recipes.json?orderBy="mealType"&equalTo="Breakfast"&limitToLast=4') .then(response => { let recipes = response.data; let newArray = [...meals]; newArray[3].meal = recipes; newArray[3].loading = false; setMeals(newArray); }) .catch(err => console.log(err)); },[]); return ( <div> <h1>Dashboard</h1> {meals.map(meal => { return !meal.loading ? ( <DashboardSection key={meal.mealType} mealType={meal.mealType}> {Object.keys(meal.meal).map(key => { return <Card key={key} id={key} imageUrl={meal.meal[key].imageUrl} recipeName={meal.meal[key].recipeName} /> }) } </DashboardSection> ) : null; }) } </div> ); }; export default Dashboard;<file_sep>import axios from 'axios'; import React, { useEffect, useState } from 'react'; import { useHistory } from 'react-router'; import { useDispatch, useSelector } from 'react-redux'; import { setCurrentUser } from '../../Store/Actions'; import Button from '../../Common/Button/Button'; import Input from '../../Common/Input/Input'; import './Auth.scss'; const UserInfo = () => { const [formData, setFormData] = useState({ displayName: { value: null, label: 'Name', required: true, valid: false, dirty: false, touched: false, errorMessage: null }, photoUrl: { value: null, label: 'Profile Photo', required: true, valid: false, dirty: false, touched: false, errorMessage: null } }); const [formValid, setFormValid] = useState(false); const [errorMessage, setErrorMessage] = useState(null); const user = useSelector(state => state.currentUser); const dispatch = useDispatch(); const history = useHistory(); useEffect(() => { formValidation(); },[formData]); const onChangeHandler = (event, id) => { let isValid = validation(event.target.value, id); setFormData({ ...formData, [id]: { ...formData[id], value: event.target.value, valid: isValid.valid, touched: true, errorMessage: isValid.errorMessage } }); }; const onBlurHandler = (id) => { setFormData({ ...formData, [id]: { ...formData[id], dirty: true } }); }; const validation = (value, id) => { let validate = { valid: true, errorMessage: null }; if(formData[id].required) { validate = { ...validate, valid: value !== '' && validate.valid, errorMessage: `${formData[id].label} is required` }; }; return validate; }; const formValidation = () => { let formIsValid = true; for(let input in formData) { formIsValid = formData[input].valid && formIsValid; } setFormValid(formIsValid); }; const onSubmit = (event) => { event.preventDefault(); if (user !== null) { const userInfo = { idToken: user.token, displayName: formData.displayName.value, photoUrl: formData.photoUrl.value, returnSecureToken: false } // update the user info -> store displayName and photoUrl axios.post(`https://identitytoolkit.googleapis.com/v1/accounts:update?key=${process.env.REACT_APP_API_KEY}`, userInfo) .then(res => { const updatedUser = { ...user, displayName: res.data.displayName, photoUrl: res.data.photoUrl }; dispatch(setCurrentUser(updatedUser)); history.push('/'); }) .catch((err) => { handleError(err.response); }); } else { setErrorMessage("Please create an account or sign in"); } }; const handleError = (errorResponse) => { let errorMessage; if(!errorResponse.data.error || !errorResponse.data.error.error) { setErrorMessage(errorMessage); } switch(errorResponse.data.error.message) { case "INVALID_ID_TOKEN": errorMessage = "Please sign in or create an account"; break; default: errorMessage = "Error not defined, please try again."; } setErrorMessage(errorMessage); }; return ( <div> <h1>Flavor<span class="black">ous</span></h1> <h2>Profile Information</h2> <form> <Input elementType="INPUT_TEXT" label="Name" id="displayName" placeholder="Enter your name" dirty={formData.displayName.dirty} valid={formData.displayName.valid} touched={formData.displayName.touched} change={(event) => onChangeHandler(event, 'displayName')} blur={() => onBlurHandler('displayName')} /> {!formData.displayName.valid && formData.displayName.dirty && formData.displayName.touched? <p className="error">{formData.displayName.errorMessage}</p> : null} <Input elementType="INPUT_TEXT" label="Profile Photo URL" id="photoUrl" placeholder="Enter photo URL" dirty={formData.photoUrl.dirty} valid={formData.photoUrl.valid} touched={formData.photoUrl.touched} change={(event) => onChangeHandler(event, 'photoUrl')} blur={() => onBlurHandler('photoUrl')} /> {!formData.photoUrl.valid && formData.photoUrl.dirty && formData.photoUrl.touched? <p className="error">{formData.photoUrl.errorMessage}</p> : null} <Button disabledBtn={!formValid} click={(event) => onSubmit(event)}> Submit </Button> {errorMessage ? <p className="error">{errorMessage}</p> : null} </form> </div> ); }; export default UserInfo;<file_sep># react-recipe-app Web application for finding and storing recipes developed using React.js, Redux and Firebase <file_sep>import React from 'react'; import { NavLink } from 'react-router-dom'; import './Nav.scss'; const Nav = () => { return ( <nav> <ul> <li> <NavLink exact to="/"> <svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="th-large" className="svg-nav-icon svg-inline--fa fa-th-large fa-w-16" role="img" viewBox="0 0 512 512"><path fill="currentColor" d="M296 32h192c13.255 0 24 10.745 24 24v160c0 13.255-10.745 24-24 24H296c-13.255 0-24-10.745-24-24V56c0-13.255 10.745-24 24-24zm-80 0H24C10.745 32 0 42.745 0 56v160c0 13.255 10.745 24 24 24h192c13.255 0 24-10.745 24-24V56c0-13.255-10.745-24-24-24zM0 296v160c0 13.255 10.745 24 24 24h192c13.255 0 24-10.745 24-24V296c0-13.255-10.745-24-24-24H24c-13.255 0-24 10.745-24 24zm296 184h192c13.255 0 24-10.745 24-24V296c0-13.255-10.745-24-24-24H296c-13.255 0-24 10.745-24 24v160c0 13.255 10.745 24 24 24z"/></svg> Dashboard </NavLink> </li> <li> <NavLink to="/create-recipe"> <svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="plus-circle" className="svg-nav-icon svg-inline--fa fa-plus-circle fa-w-16" role="img" viewBox="0 0 512 512"><path fill="currentColor" d="M256 8C119 8 8 119 8 256s111 248 248 248 248-111 248-248S393 8 256 8zm144 276c0 6.6-5.4 12-12 12h-92v92c0 6.6-5.4 12-12 12h-56c-6.6 0-12-5.4-12-12v-92h-92c-6.6 0-12-5.4-12-12v-56c0-6.6 5.4-12 12-12h92v-92c0-6.6 5.4-12 12-12h56c6.6 0 12 5.4 12 12v92h92c6.6 0 12 5.4 12 12v56z"/></svg> Recipe </NavLink> </li> <li> <NavLink to="/profile"> <svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="user" className="svg-nav-icon svg-inline--fa fa-user fa-w-14" role="img" viewBox="0 0 448 512"><path fill="currentColor" d="M224 256c70.7 0 128-57.3 128-128S294.7 0 224 0 96 57.3 96 128s57.3 128 128 128zm89.6 32h-16.7c-22.2 10.2-46.9 16-72.9 16s-50.6-5.8-72.9-16h-16.7C60.2 288 0 348.2 0 422.4V464c0 26.5 21.5 48 48 48h352c26.5 0 48-21.5 48-48v-41.6c0-74.2-60.2-134.4-134.4-134.4z"/></svg> Profile </NavLink> </li> </ul> </nav> ) } export default Nav;<file_sep>import axios from 'axios'; import React, { useEffect, useState } from 'react'; import { useSelector, useDispatch } from 'react-redux'; import { useHistory } from 'react-router'; import { setCurrentUser } from '../../Store/Actions'; import Button from '../../Common/Button/Button'; import Input from '../../Common/Input/Input'; import './Auth.scss'; const Auth = () => { const [formData, setFormData] = useState({ email: { value: null, label: 'Email', required: true, valid: false, dirty: false, touched: false, errorMessage: null }, password: { value: null, label: 'Password', required: true, minLength: 6, valid: false, dirty: false, touched: false, errorMessage: null } }); const [formValid, setFormValid] = useState(false); const [errorMessage, setErrorMessage] = useState(null); const [isLoginPage, setIsLoginPage] = useState(true); const history = useHistory(); const dispatch = useDispatch(); useEffect(() => { formValidation(); },[formData]); const switchPage = (event) => { event.preventDefault(); setIsLoginPage(!isLoginPage); }; const onChangeHandler = (event, id) => { let isValid = validation(event.target.value, id); setFormData({ ...formData, [id]: { ...formData[id], value: event.target.value, valid: isValid.valid, touched: true, errorMessage: isValid.errorMessage } }); }; const onBlurHandler = (id) => { setFormData({ ...formData, [id]: { ...formData[id], dirty: true } }); }; const validation = (value, id) => { let validate = { valid: true, errorMessage: null }; if(formData[id].required) { validate = { ...validate, valid: value !== '' && validate.valid, errorMessage: `${formData[id].label} is required` }; }; if(formData[id].minLength) { validate = { ...validate, valid: value.length >= formData[id].minLength && validate.valid, errorMessage: `${formData[id].label} must be at least ${formData[id].minLength} characters` }; }; return validate; }; const formValidation = () => { let formIsValid = true; for(let input in formData) { formIsValid = formData[input].valid && formIsValid; } setFormValid(formIsValid); }; const handleError = (errorResponse) => { let errorMessage = "Error not defined, please try again."; if(!errorResponse.data.error || !errorResponse.data.error.error) { setErrorMessage(errorMessage); } switch(errorResponse.data.error.message) { case "EMAIL_EXISTS": errorMessage = "The email address is already in use by another account."; break; case "OPERATION_NOT_ALLOWED": errorMessage = "Password sign-in is disabled for this project."; break; case "TOO_MANY_ATTEMPTS_TRY_LATER": errorMessage = "We have blocked all requests from this device due to unusual activity. Try again later."; break; case "EMAIL_NOT_FOUND": errorMessage = "Invalid email address or password"; break; case "INVALID_PASSWORD": errorMessage = "Invalid email address or password"; break; case "USER_DISABLED": errorMessage = "The user account has been disabled by an administrator."; break; case "INVALID_ID_TOKEN": errorMessage = "Please sign in."; break; default: errorMessage = errorMessage; } setErrorMessage(errorMessage); }; const onSubmit = (event) => { event.preventDefault(); const newUser = { email: formData.email.value, password: <PASSWORD>, returnSecureToken: true } if (isLoginPage) { axios.post(`https://identitytoolkit.googleapis.com/v1/accounts:signInWithPassword?key=${process.env.REACT_APP_API_KEY}`, newUser) .then((response) => { handleAuthentication(response.data.email, response.data.localId, response.data.idToken, response.data.expiresIn); }) .catch((error) => { handleError(error.response); }); } else { axios.post(`https://identitytoolkit.googleapis.com/v1/accounts:signUp?key=${process.env.REACT_APP_API_KEY}`, newUser) .then((response) => { handleAuthentication(response.data.email, response.data.localId, response.data.idToken, response.data.expiresIn); }) .catch((error) => { handleError(error.response); }); } }; const handleAuthentication = (email, id, token, expiresIn) => { // get the expiation date // 1. get the current date in milliseconds + milliseconds until token expires // 2. convert it back to a date via new Date() const expirationDate = new Date(new Date().getTime() + Number(expiresIn) * 1000); if (isLoginPage) { const userInfo = { "idToken": token, "localId": [id], "email": [email] } // get account information from user (displayName + photoUrl) axios.post(`https://www.googleapis.com/identitytoolkit/v3/relyingparty/getAccountInfo?key=${process.env.REACT_APP_API_KEY}`, userInfo) .then(res => { const user = { email: email, id: id, token: token, expirationDate: expirationDate, displayName: res.data.users[0].displayName, photoUrl: res.data.users[0].photoUrl }; // dispatch the logged in user as the current user dispatch(setCurrentUser(user)); history.push('/'); }) .catch(err => { handleError(err.response); }); } else { // new user const user = { email: email, id: id, token: token, expirationDate: expirationDate }; // dispatch the new user as the current user dispatch(setCurrentUser(user)); history.push('/profile-information'); } } return ( <div> <h1>Flavor<span class="black">ous</span></h1> {isLoginPage ? <h2>Log in to your account</h2> : <h2>Create an account</h2>} <form> <Input elementType="INPUT_TEXT" label="Email" id="email" placeholder="Enter email address" dirty={formData.email.dirty} valid={formData.email.valid} touched={formData.email.touched} change={(event) => onChangeHandler(event, 'email')} blur={() => onBlurHandler('email')} /> {!formData.email.valid && formData.email.dirty && formData.email.touched? <p className="error">{formData.email.errorMessage}</p> : null} <Input elementType="INPUT_PASSWORD" label="Password" id="password" placeholder="Enter password" dirty={formData.password.dirty} valid={formData.password.valid} touched={formData.password.touched} change={(event) => onChangeHandler(event, 'password')} blur={() => onBlurHandler('password')} /> {!formData.password.valid && formData.password.dirty && formData.password.touched? <p className="error">{formData.password.errorMessage}</p> : null} {errorMessage ? <p className="error">{errorMessage}</p> : null} <Button disabledBtn={!formValid} click={(event) => onSubmit(event)}> {isLoginPage ? 'Log in' : 'Sign Up'} </Button> <Button click={(event) => switchPage(event)}> {isLoginPage ? 'Create an account' : 'I already have an account' } </Button> </form> </div> ); }; export default Auth;<file_sep>import React from 'react'; import { Link } from 'react-router-dom'; import './Card.scss'; const Card = (props) => { return ( <div className="card-container"> <Link to={`recipe/${props.id}`}> <img src={props.imageUrl}/> <h3>{props.recipeName}</h3> </Link> </div> ); }; export default Card;<file_sep>import React from 'react'; import { BrowserRouter as Router, Switch, Route } from "react-router-dom"; import Nav from './Common/Nav/Nav'; import './App.scss'; import CreateRecipe from './Pages/CreateRecipe/CreateRecipe'; import Dashboard from './Pages/Dashboard/Dashboard'; import Recipe from './Pages/Recipe/Recipe'; import Auth from './Pages/Auth/Auth'; import UserInfo from './Pages/Auth/UserInfo'; function App() { return ( <div className="App"> <Router> <Route path="/auth" exact component={Auth}></Route> <Route path="/profile-information" exact component={UserInfo}></Route> <Switch> <Route path="/" exact component={Dashboard}></Route> <Route path="/recipe/:id" exact component={Recipe}></Route> <Route path="/create-recipe" component={CreateRecipe}></Route> <Route path="/profile"></Route> </Switch> <Nav/> </Router> </div> ); } export default App; <file_sep>import axios from 'axios'; import React, { useEffect, useState } from 'react'; import './Recipe.scss'; const Recipe = () => { const [recipe, setRecipe] = useState({ recipeData: null, loading: true }); useEffect(() => { const urlPath = window.location.pathname.split('/'); const recipeId = urlPath[2]; axios.get(`https://recipe-app-341cf.firebaseio.com/recipes/${recipeId}.json`) .then(response => { const recipeCopy = {...recipe}; recipeCopy.recipeData = response.data; recipeCopy.loading = false; setRecipe(recipeCopy); }) .catch(err => console.log(err)); },[]); return ( <div> {!recipe.loading ? ( <React.Fragment> <div className="recipe-image-container"> <img className="recipe-image" src={recipe.recipeData.imageUrl} /> </div> <div> <h1 className="recipe-title">{recipe.recipeData.recipeName}</h1> <p className="recipe-description">{recipe.recipeData.description}</p> <div className="recipe-label-container"> <div className="recipe-label"> <svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="utensils" class="svg-inline--fa fa-utensils fa-w-13" role="img" viewBox="0 0 416 512"><path fill="currentColor" d="M207.9 15.2c.8 4.7 16.1 94.5 16.1 128.8 0 52.3-27.8 89.6-68.9 104.6L168 486.7c.7 13.7-10.2 25.3-24 25.3H80c-13.7 0-24.7-11.5-24-25.3l12.9-238.1C27.7 233.6 0 196.2 0 144 0 109.6 15.3 19.9 16.1 15.2 19.3-5.1 61.4-5.4 64 16.3v141.2c1.3 3.4 15.1 3.2 16 0 1.4-25.3 7.9-139.2 8-141.8 3.3-20.8 44.7-20.8 47.9 0 .2 2.7 6.6 116.5 8 141.8.9 3.2 14.8 3.4 16 0V16.3c2.6-21.6 44.8-21.4 48-1.1zm119.2 285.7l-15 185.1c-1.2 14 9.9 26 23.9 26h56c13.3 0 24-10.7 24-24V24c0-13.2-10.7-24-24-24-82.5 0-221.4 178.5-64.9 300.9z"/></svg> <p>serves {recipe.recipeData.servings}</p> </div> <div className="recipe-label"> <svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="clock" class="svg-inline--fa fa-clock fa-w-16" role="img" viewBox="0 0 512 512"><path fill="currentColor" d="M256,8C119,8,8,119,8,256S119,504,256,504,504,393,504,256,393,8,256,8Zm92.49,313h0l-20,25a16,16,0,0,1-22.49,2.5h0l-67-49.72a40,40,0,0,1-15-31.23V112a16,16,0,0,1,16-16h32a16,16,0,0,1,16,16V256l58,42.5A16,16,0,0,1,348.49,321Z"/></svg> <p>{recipe.recipeData.preparation}</p> </div> <div className="recipe-label"> <svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="chart-area" className="svg-inline--fa fa-chart-area fa-w-16" role="img" viewBox="0 0 512 512"><path fill="currentColor" d="M500 384c6.6 0 12 5.4 12 12v40c0 6.6-5.4 12-12 12H12c-6.6 0-12-5.4-12-12V76c0-6.6 5.4-12 12-12h40c6.6 0 12 5.4 12 12v308h436zM372.7 159.5L288 216l-85.3-113.7c-5.1-6.8-15.5-6.3-19.9 1L96 248v104h384l-89.9-187.8c-3.2-6.5-11.4-8.7-17.4-4.7z"/></svg> <p>{recipe.recipeData.difficulty}</p> </div> </div> <h3>Ingredients</h3> <ul className="recipe-ul"> {recipe.recipeData.ingredients.map((ingredient) => { return <li>{ingredient.amount} {ingredient.ingredient}</li> })} </ul> <h3>Instructions</h3> <ol className="recipe-ol"> {recipe.recipeData.instructions.map((instruction) => { return <li>{instruction.step}</li> })} </ol> </div> </React.Fragment> ) : null } </div> ); }; export default Recipe;
e1ea21f93dea34e3a28443d7f81d14d775046f71
[ "JavaScript", "Markdown" ]
8
JavaScript
mariaaldis/react-recipe-app
cea5375a8d339716afd4b4a26d7f3c4064069a84
0994ff4b303c68d37d672ce646eab4720a5af51c
refs/heads/master
<file_sep>"""TodoProject URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/3.0/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.contrib import admin import os from django.urls import path, include # import importlib.util # views = importlib.util.spec_from_file_location("list_todo_item", "D:/Yokesh/python/todo/project/TodoProject/views.py") from todos import views from todos.views import ( ListTodoItem, InsertTodoItem ) urlpatterns = [ path('registration/',views.registration_site,name='registration'), path('login/',views.login_site,name='login'), path('list/', ListTodoItem.as_view()), # path('list/', views.list_todo_item), path('insert_todo/',InsertTodoItem.as_view(),name='insert_item'), path('delete_todo/<int:todo_id>/',views.delete_todo_item,name='delete_item'), ] <file_sep>from django.shortcuts import render,redirect from django.http import HttpResponse, HttpRequest from .models import Todo from django.contrib.auth.forms import UserCreationForm from todos.forms import CreateUserForm ,AddTodos from django.contrib.auth.mixins import LoginRequiredMixin from django.contrib import messages from django.contrib.auth import authenticate, login, logout from django.contrib.auth.decorators import login_required from django.views.generic import TemplateView, ListView, DetailView, CreateView, UpdateView # Create your views here. def registration_site(request): if request.user.is_authenticated: return redirect('/todos/list/') else: form=UserCreationForm() if request.method == 'POST': form=CreateUserForm(request.POST) if form.is_valid(): form.save() user=form.cleaned_data.get('username') messages.success(request,"Account was created for "+user) return redirect('login') context={'form':form} return render(request, 'todos/registration.html',context) def login_site(request): if request.user.is_authenticated: return redirect('/todos/list/') else: if request.method == 'POST': username=request.POST.get('username') password=request.POST.get('password') user=authenticate(request,username=username,password=<PASSWORD>) if user is not None: login(request,user) return redirect('/todos/list/') else: messages.info(request, 'Username or password is incorrect') context={} return render(request, 'todos/login.html',context) context={} return render(request, 'todos/login.html',context) def logoutUser(request): logout(request) return redirect('login') # @login_required(login_url='login') # def list_todo_item(request): # context={'todo_list':Todo.objects.all()} # return render(request,"todos/todo_list.html", context) class ListTodoItem(LoginRequiredMixin,ListView): template_name="todos/todo_list.html" login_url="/login/" def get_queryset(self): return Todo.objects.filter(user=self.request.user) # @login_required(login_url='login') # def insert_todo_item(request:HttpRequest): # todo=Todo(content=request.POST['content']) # todo.save() # return redirect('/todos/list/') class InsertTodoItem(LoginRequiredMixin,CreateView): # form_class=RLCreateForm # template_name="form.html" success_url="/todos/list/" login_url="/login/" form_class=AddTodos def form_valid(self,form): instance=form.save(commit=False) instance.user=self.request.user return super(InsertTodoItem, self).form_valid(form) def get_queryset(self): return Todo.objects.filter(user=self.request.user) # def get_context_data(self,*args,**kwargs): # context=super(InsertTodoItem,self).get_context_data(*args,**kwargs) # # context["title"]='Add Restaurant' # return context @login_required(login_url='login') def delete_todo_item(request,todo_id): todo_to_delete=Todo.objects.get(id=todo_id) todo_to_delete.delete() return redirect('/todos/list/') <file_sep>from django.db import models from django.conf import settings # Create your models here. #from django.contrib.auth.models import User User=settings.AUTH_USER_MODEL # class ToDoList(models.Model): # user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="todolist", null=True) # <--- added # name = models.CharField(max_length=200) # def __str__(self): # return self.name class Todo(models.Model): #user = models.ForeignKey(User,on_delete=models.DO_NOTHING) # todolist = models.ForeignKey(ToDoList, on_delete=models.CASCADE) user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="todolist") content=models.TextField() def __str__(self): return self.content <file_sep>from django.forms import ModelForm from django.contrib.auth.forms import UserCreationForm from django import forms from django.contrib.auth.models import User # from django import forms from .models import Todo class CreateUserForm(UserCreationForm): class Meta: model=User fields=['username','email','<PASSWORD>','<PASSWORD>'] class AddTodos(ModelForm): class Meta: model=Todo fields=['content']
06982a5cdb69b2b688382211c7e435487ea25e11
[ "Python" ]
4
Python
Yokeshwaran95/Django_ToDo_App
77a9adbb2602ef227aeb7124bd95192258b126b4
2da2c865d5acde4802e73a2d4afc4b725c289a18
refs/heads/master
<repo_name>MTalhaAlkan/Prayer-Time<file_sep>/build/Prayer Time/Classes/Managers/NetworkManager.swift // // NetworkManager.swift // Prayer Time // // Created by <NAME> on 10/06/2017. // Copyright © 2017 <NAME>. All rights reserved. // import UIKit import Alamofire import SwiftyJSON private enum Path: String { case countries = "/ulkeler" case cities = "/sehirler" case counties = "/ilceler" case countyDetail = "/ilce" case prayertimes = "/vakitler" } private let baseUrl = "http://ezanvakti.herokuapp.com" class NetworkManager { //TODO: It will based on release mode //To show debug messages. private let isDebugMode = true static let shared: NetworkManager = NetworkManager() fileprivate func performRequest(forPath path: Path, method: HTTPMethod, withParameters parameters: [String: Any]?, completion: @escaping (_ jsonResult: JSON?, _ error: Error?) -> Void) { Alamofire.request(baseUrl + path.rawValue, method: method, parameters: parameters).responseJSON { responseData in if self.isDebugMode { if let request = responseData.request { print("Sending Request:\n\(String(describing: request))\n") } if let response = responseData.response { print("Getting Response:\n\(String(describing: response))\n") } } if responseData.result.value != nil { let jsonData = JSON(responseData.result.value!) completion(jsonData, nil) } else { completion(nil, responseData.result.error) } } } }
5a84e5d2a2738be7995ae1b2beca8d58ac4ba7b1
[ "Swift" ]
1
Swift
MTalhaAlkan/Prayer-Time
712d2845ec3127a7e2b05db845aebf97a06e1d81
54d8099559c555e40d5baf21016df777683a59a9
refs/heads/master
<file_sep>package main import ( "flag" "io" "log" "math/rand" "net/http" "time" ) var generatorStrings = [][]string{ { "солнечный", "траурный", "плюшевый", "бешеный", "памятный", "трепетный", "базовый", "скошенный", "преданный", "ласковый", "пойманный", "радужный", "огненный", "радостный", "тензорный", "шёлковый", "пепельный", "ламповый", "жареный", "загнанный", }, { "зайчик", "Верник", "глобус", "ветер", "щавель", "пёсик", "копчик", "ландыш", "стольник", "мальчик", "дольшик", "Игорь", "невод", "егерь", "пончик", "лобстер", "жемчуг", "кольщик", "йогурт", "овод", }, { "стеклянного", "ванильного", "резонного", "широкого", "дешёвого", "горбатого", "собачьего", "исконного", "волшебного", "картонного", "лохматого", "арбузного", "огромного", "запойного", "великого", "бараньего", "вандального", "едрёного", "парадного", "укромного", }, { "глаза", "плова", "Пельша", "мира", "деда", "жира", "мема", "ада", "бура", "жала", "нёба", "гунна", "хлама", "шума", "воза", "сала", "фена", "зала", "рака", "Глеба", }, } var listen = flag.String("l", "localhost:5000", "listen address") func main() { flag.Parse() rand.Seed(time.Now().UnixNano()) http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { io.WriteString(w, GenOborona(generatorStrings)) }) log.Printf("Listen on %s", *listen) log.Fatal(http.ListenAndServe(*listen, nil)) } <file_sep>package main import ( "math/rand" "strings" ) func GenOborona(s [][]string) string { var res []string for _, lst := range s { res = append(res, lst[rand.Intn(len(lst))]) } return strings.Join(res, " ") }
e93ce896f4b171e201795d222607bb4f19e2e181
[ "Go" ]
2
Go
LK4D4/oborona
bce0e761dc17e6d394456fa228792ed8bbf65f22
9349d4be5c6a650938d9df8c0a1850f30ead7553
refs/heads/main
<file_sep>#!/usr/bin/env python3 import dateutil.parser as dp import json import os import sys import click import requests github_org = "chanzuckerberg" github_repo = "napari-hub" github_graphql_endpoint = "https://api.github.com/graphql" github_deployment_endpoint = "https://api.github.com/repos/chanzuckerberg/napari-hub/deployments" def get_latest_successful_deployment(github_api_token, stage): """get the latest successful/active deployment github sha""" # Assumption: One of the most recent 50 deployment attempts was successful query = """ query($repo_owner:String!, $repo_name:String!, $deployment_env:String!) { repository(owner: $repo_owner, name: $repo_name) { deployments(environments: [$deployment_env], last: 50) { nodes { commitOid statuses(first: 100) { nodes { state updatedAt } } } } } } """ variables = {"repo_owner": github_org, "repo_name": github_repo, "deployment_env": stage} headers = {"Authorization": "token %s" % github_api_token} query = {"query": query, "variables": variables} try: resp = requests.post(url=github_graphql_endpoint, json=query, headers=headers) if resp.status_code != 200: print("Error: Unexpected response {}".format(resp)) print(resp.text) return None except requests.exceptions.RequestException as e: print("Error: {}".format(e)) return None resp_json = json.loads(resp.text) deployments = resp_json["data"]["repository"]["deployments"] sha_tuple = (None, None) for node in deployments["nodes"]: gh_sha = node["commitOid"] for status in node["statuses"]["nodes"]: if status["state"] == "SUCCESS": parsed_t = dp.parse(status["updatedAt"]) if sha_tuple[0] == None: sha_tuple = (gh_sha, parsed_t) else: if sha_tuple[1] < parsed_t: sha_tuple = (gh_sha, parsed_t) break return sha_tuple def trigger_deploy(github_api_token, deployment_stage, github_sha, dry_run): """Start deployment to the given environment based on the github sha""" headers = {"Authorization": "token %s" % github_api_token, "Accept": "application/vnd.github.v3.text-match+json"} tag = f"sha-{github_sha[0:8]}" params = { "ref": github_sha, "auto_merge": False, "environment": deployment_stage, "required_contexts": [], "payload": {"tag": tag}, } if dry_run: print(f"Dry run requested. Would deploy {tag} to environment {deployment_stage}") return print(f"Deploying {tag} to environment {deployment_stage}") try: resp = requests.post(github_deployment_endpoint, headers=headers, json=params) if resp.status_code != 201: print("Error: Unexpected response {}".format(resp)) print(resp.text) return except requests.exceptions.RequestException as e: print("Error: {}".format(e)) return print("Deployment successful") def validate_sha(ctx, param, value): if len(value) < 8: raise click.BadParameter("Github SHA must be at least 8 characters!") return value @click.command() @click.argument("deployment_stage") @click.option("--github-sha", callback=validate_sha, help="github sha to be deployed", default=None) @click.option("--dry-run", help="do not perform actual deployment", default=False, is_flag=True) def happy_deploy(deployment_stage, github_sha, dry_run): api_token = os.getenv("GITHUB_TOKEN") if api_token is None: print("Error: Please set GITHUB_TOKEN environment variable") return read_deployment_stage = "stage" # If github sha is not provided, get the latest succesful deployment # github sha of staging environment if github_sha is None: github_sha, parsed_t = get_latest_successful_deployment(api_token, read_deployment_stage) print(f"Latest succesful '{read_deployment_stage}' deployment on {parsed_t}: commit {github_sha}") if github_sha is None: print( f"Error: Could not find a successful deployment for deployment stage {read_deployment_stage}, and no --github_sha was given" ) sys.exit(1) # Trigger deployment on the given stage. This will trigger github actions # and start/update the deployment. if github_sha is not None: trigger_deploy(api_token, deployment_stage, github_sha, dry_run) if __name__ == "__main__": happy_deploy() <file_sep>import { LINKS } from '@/constants'; import { LinkInfo } from '@/types'; export const APP_LINKS: LinkInfo[] = [LINKS.ABOUT, LINKS.FAQ]; <file_sep>/* eslint-disable import/no-default-export, @typescript-eslint/no-explicit-any, */ declare module 'remark-remove-comments' { import { Pluggable, Settings } from 'unified'; const plugin: Pluggable<any[], Settings>; export default plugin; } declare module '@renovate/pep440' { /** * Determines if a version string satisfies the version specifier. The * comparison is based on PEP440: https://www.python.org/dev/peps/pep-0440 * * @param version The version string. * @param specifier The version specifier. */ export function satisfies(version: string, specifier: string): boolean; } <file_sep>export * from './TableOfContents'; export * from './TableOfContents.constants'; export * from './TableOfContents.types'; <file_sep>/** * Route to search page. */ export const SEARCH_PAGE = '/'; /** * Sorting methods for search results. */ export enum SearchSortType { Relevance = 'relevance', ReleaseDate = 'recentlyUpdated', FirstReleased = 'newest', PluginName = 'pluginName', } export const DEFAULT_SORT_TYPE = SearchSortType.ReleaseDate; /** * Query parameters used for storing search form data. */ export enum SearchQueryParams { Filter = 'filter', Search = 'search', Sort = 'sort', } <file_sep>declare namespace NodeJS { interface ProcessEnv { readonly API_URL: string; readonly API_URL_HOST: string; readonly ENV: 'local' | 'dev' | 'staging' | 'prod'; readonly PLAUSIBLE: 'true' | 'false'; readonly GITHUB_CLIENT_ID: string; readonly GITHUB_CLIENT_SECRET: string; } } <file_sep>import slug from 'rehype-slug'; import html from 'rehype-stringify'; import markdownParser from 'remark-parse'; import remark2rehype from 'remark-rehype'; import unified from 'unified'; import { TOC_HEADER_TAG, TOCHeader } from '@/components/common/TableOfContents'; import { HeadingNode, MarkdownNode } from './Markdown.types'; /** * Plugins for transforming markdown to HTML. This also adds slug IDs to each * heading for linking in the TOC. */ const UNIFIED_PLUGINS = [ // Parse markdown markdownParser, // Convert markdown to HTML for rehype parsing remark2rehype, // Add slug IDs to every heading slug, // Compile to HTML html, ]; /** * Function for extracting TOC headers from a markdown string. This uses * unified, remark, and rehype to parse the markdown string. The string is * parsed synchronously so that rendering the headers work in SSR. This is * also the same approach react-markdown uses: * * https://git.io/JObhE * * @param markdown Markdown string. * @returns Array of TOCHeader objects. */ export function getHeadersFromMarkdown(markdown: string): TOCHeader[] { if (!markdown) { return []; } // Create markdown processor with remark / rehype plugins. const processor = UNIFIED_PLUGINS.reduce( (currentProcessor, plugin) => currentProcessor.use(plugin), unified(), ); // Create AST from markdown + plugins. const { children } = processor.runSync( processor.parse(markdown), ) as MarkdownNode; // Convert all H2 headings into TOCHeader objects. return children .filter((node): node is HeadingNode => node.tagName === TOC_HEADER_TAG) .map<TOCHeader>((node) => ({ id: node.properties.id, text: node.children[0].value, })); } <file_sep>/** * Form state for filtering on plugin development status. */ export interface DevelopmentStatusFormState { onlyStablePlugins: boolean; } /** * Form state for filtering on plugin license. */ export interface LicenseFormState { onlyOpenSourcePlugins: boolean; } /** * Form state for filtering on operating systems. */ export interface OperatingSystemFormState { linux: boolean; mac: boolean; windows: boolean; } /** * Root state object for the filter form. Each state object is a * string-to-boolean map that corresponds to the form checkbox state on the * search page, but this may change in the future as we add more filters. */ export interface FilterFormState { developmentStatus: DevelopmentStatusFormState; license: LicenseFormState; operatingSystems: OperatingSystemFormState; pythonVersions: Record<string, boolean>; } /** * Form state for rendering filters in a chip / pill above the plugin search results. */ export interface FilterChipFormState { id: string; key: keyof FilterFormState; subKey: string; value: boolean; } <file_sep>// Increase max time to prevent test from browser shutting down too soon. jest.setTimeout(2 * 60 * 1000); <file_sep>/** * Axios global configuration. Shared values for every network request made with * axios. */ import axios from 'axios'; /** * URL to hub API to make requests to. */ const API_URL = process.env.API_URL || 'http://localhost:8081'; /** * Host to use for Host header when making requests. Setting this is required * during local development because AWS API Gateway performs a host check for * API requests. */ const API_URL_HOST = process.env.API_URL_HOST || new URL(API_URL).host; export const hubAPI = axios.create({ baseURL: API_URL, headers: { Host: API_URL_HOST, }, }); export const spdxLicenseDataAPI = axios.create({ baseURL: 'https://raw.githubusercontent.com/spdx/license-list-data/master/json/licenses.json', }); <file_sep>import { useRouter } from 'next/router'; /** * Hook that gets the active URL parameter for a particular key. First it tries * getting the query parameter from the Next.js router. This will be populated * on initial server side rendering. * * If the query object is empty, check the URL for the query parameter. The * query object will only be empty for client side navigation: * https://github.com/vercel/next.js/issues/9473 * * @param name The name of the query parameter * @returns Query parameter or empty string if undefined */ export function useActiveURLParameter<R extends string = string>( name: string, ): R | undefined { const router = useRouter(); let query = router.query[name] as string | undefined; if (!query && process.browser) { const url = new URL(window.location.href); query = url.searchParams.get(name) ?? undefined; } return query as R; } <file_sep>from apig_wsgi import make_lambda_handler from napari import app # Configure this as your entry point in AWS Lambda handler = make_lambda_handler(app.wsgi_app) <file_sep>#!/usr/bin/env python3 import random from base64 import b64decode from contextlib import contextmanager from datetime import datetime import io import json import os import re from string import Template import subprocess import tarfile import tempfile import time from urllib.parse import urlparse import boto3 from botocore.exceptions import WaiterError from botocore.config import Config import click from terrasnek.api import TFC from terrasnek import exceptions import yaml class CliError(Exception): # Don't print core dumps for some kinds of exceptions pass class HappyConfig: def __init__(self, config_file=".happy/config.json", env=None, substitutions=None, ctx=None): self.ctx = ctx with open(config_file) as f: self._data = json.load(f) self.env = env self.set_env() try: if self.config_version != "v1": raise CliError(f'Config file {config_file} has invalid version number. Only version "v1" supported.') except KeyError: raise CliError(f"Config file {config_file} missing config_version field.") def set_env(self): env_override = self.env env = self._data["default_env"] if env_override: env = env_override elif os.getenv("HAPPY_ENV"): env = os.getenv("HAPPY_ENV") try: self.env = env # HACK HACK HACK self._data["env"] = env self._data.update(self._data["environments"][env]) except KeyError: error = f'Invalid environment: "{env}". Check .happy/config for valid environments' raise CliError(error) def __getattr__(self, field): if field == "ecrs": return self.ctx.obj["secret_mgr"].secrets["ecrs"] if field in self._data: return self._data[field] return self.__dict__[field] class TablePrinter: def __init__(self, headers): self.rows = [] self.widths = [] self.bump_widths(headers) self.headers = headers def bump_widths(self, data): for i in range(len(data)): try: self.widths[i] = max(len(data[i]), self.widths[i]) except IndexError: self.widths.append(len(data[i])) def add_row(self, row): self.bump_widths(row) self.rows.append(row) def print(self): fmt_string = " ".join(["{: <%s}" % width for width in self.widths]) print(fmt_string.format(*self.headers)) separators = ["-----" for i in range(len(self.headers))] print(fmt_string.format(*separators)) for row in self.rows: print(fmt_string.format(*row)) class StackMeta: tag_map = { "app": "happy/app", "env": "happy/env", "instance": "happy/instance", "owner": "happy/meta/owner", "priority": "happy/meta/priority", "imagetag": "happy/meta/imagetag", "configsecret": "happy/meta/configsecret", "created": "happy/meta/created-at", "updated": "happy/meta/updated-at", } parameter_map = { "instance": "stack_name", "priority": "priority", "imagetag": "image_tag", "configsecret": "happy_config_secret", } def __init__(self, ctx, stack_name): self.stack_name = stack_name self.ctx = ctx config = ctx.obj["config"] self.meta = { "app": config.app, "env": config.env, "instance": self.stack_name, } def load(self, existing_tags): for short_tag, tag_name in self.tag_map.items(): if tag_name in existing_tags: self.meta[short_tag] = existing_tags[tag_name] elif short_tag not in self.meta: self.meta[short_tag] = "" def __getattr__(self, tag): if tag in self.tag_map: return self.meta[tag] return self.__dict__[tag] def __setattr__(self, tag, value): if tag in self.tag_map: self.meta[tag] = value else: self.__dict__[tag] = value @property def tags(self): return {v: self.meta[k] for k, v in self.tag_map.items()} @property def parameters(self): return {v: self.meta[k] for k, v in self.parameter_map.items()} def update(self, tag, stack_mgr): stacks = stack_mgr.stacks # Track timestamps for this stack now = int(time.time()) if not self.created: self.created = now self.imagetag = tag self.updated = now if not self.owner: self.owner = resolve_owner(self.ctx) if not self.priority: # Find the first available priority id and use it. existing_priorities = set() for stack in stacks.values(): try: stack_priority = int(stack.meta.priority) existing_priorities.add(stack_priority) except ValueError: # meta.priority was unparsable, might be empty. Either way, no existing value to avoid. pass while True: # pick a random number between 1000 and 5000 that's not in use right now. random.seed() priority = random.randint(1000, 5000) if priority not in existing_priorities: break self.priority = priority class Stack: """Represents a Happy Stack""" def __init__(self, stack_mgr, stack_name): self.stack_mgr = stack_mgr self.stack_name = stack_name self._workspace = None self._meta = None @property def workspace(self): # If the corresponding workspace is missing from TFE, we will intentionally return None if not self._workspace: try: self._workspace = self.stack_mgr.get_stack_workspace(self.stack_name) except exceptions.TFCHTTPNotFound: pass return self._workspace @property def outputs(self): if self.workspace: return self.workspace.outputs return {} @property def status(self): if self.workspace and self.workspace.latest_run: status = self.workspace.latest_run["data"]["attributes"]["status"] if self.workspace.latest_run["data"]["attributes"]["is-destroy"]: status += " destroy" return status return "UNKNOWN" @property def meta(self): if not self._meta: self._meta = StackMeta(self.stack_mgr.ctx, self.stack_name) # Default to unknown if missing data tags = {"happy/meta/owner": "UNKNOWN", "happy/meta/imagetag": "UNKNOWN"} if self.workspace: # Non existent workspace has no meta data try: meta_var = self.workspace.vars.get("terraform", {}).get("happymeta_") except exceptions.TFCHTTPNotFound: meta_var = None if meta_var: if meta_var["attributes"]["sensitive"]: raise Exception(f"Invalid meta var for stack {self.stack_name}, must not be sensitive") tags = json.loads(meta_var["attributes"]["value"]) else: print(f"No happymeta_ variable for stack {self.stack_name}") # Any valid environment will have a tags variable; if missing # don't add to list self._meta.load(tags) return self._meta def _ensure_workspace(self): if not self.workspace: raise Exception(f"Could not find TFE workspace for stack {self.stack_name}") def apply(self, wait): """Saves the variables and applies the workspace""" self._ensure_workspace() self.workspace.set_var( "happymeta_", json.dumps(self.meta.tags), "Happy Path metadata", sensitive=False, ) for k, v in self.meta.parameters.items(): self.workspace.set_var(k, str(v), "", sensitive=False) self.workspace.reset_cache() # Resets known vars with config_tarball(self.stack_mgr.config.terraform_directory) as targz_file: config_version_id = self.workspace.upload_version(targz_file.name) self.workspace.run_config_version(config_version_id) if wait: self.workspace.wait() return True def destroy(self): self._ensure_workspace() if not self.workspace.latest_config_version_id: print("WARNING: No latest version of workspace to destroy. Assuming already empty and continuing.") return True self.workspace.run(is_destroy=True) return self.workspace.wait() def watch(self): self._ensure_workspace() return self.workspace.wait() def cancelupdate(self, wait): self._ensure_workspace() # TODO(mbarrien): Check run status to see if it's in a cancellable state if self.workspace.latest_run_id: self.workspace.cancel_run() if wait: return self.workspace.wait() return True def print_outputs(self): print() print("Module Outputs --") for k, v in self.outputs.items(): print(f"{k}: {v}") class StackMgr: def __init__(self, ctx): self.ctx = ctx self.config = ctx.obj["config"] self.write_path = f"/happy/{self.config.env}/stacklist" # self.read_prefix = f"/happy/{self.config.env}/stacks" self._stacks = {} self.creator_workspace_name = f"env-{self.config.env}" self.tfe_api = None def get_tfe_api(self): if not self.tfe_api: secrets = self.ctx.obj["secret_mgr"].secrets self.tfe_api = TfeApi(secrets["tfe"]["url"], secrets["tfe"]["org"]) return self.tfe_api def remove(self, stack_name): self._stacks = {} # Force a refresh of stacks. stack_names = set(self.stacks.keys()) stack_names.remove(stack_name) param_client = AwsSession.get_client(self.ctx, "ssm") param_client.put_parameter(Name=self.write_path, Value=json.dumps(sorted(stack_names)), Overwrite=True) self._resync(wait=False) del self._stacks[stack_name] def add(self, stack_name): self._stacks = {} # Force a refresh of stacks. stack_names = set(self.stacks.keys()) stack_names.add(stack_name) param_client = AwsSession.get_client(self.ctx, "ssm") param_client.put_parameter(Name=self.write_path, Value=json.dumps(sorted(stack_names)), Overwrite=True) success = self._resync() if not success: raise Exception("Error invoking Terraform to create stack") if not self.get_stack_workspace(stack_name): raise Exception("Could not find newly created workspace for our stack") stack = Stack(self, stack_name) self._stacks[stack_name] = stack return stack def _resync(self, wait=True): """Invoke a specific TFE workspace that creates/deletes TFE workspaces, with prepopulated variables for identifier tokens.""" print("Resyncing workspaces") tfe_api = self.get_tfe_api() workspace = tfe_api.get_workspace(self.creator_workspace_name) workspace.run() if wait: return workspace.wait() return True @property def stacks(self): if self._stacks: return self._stacks param_client = AwsSession.get_client(self.ctx, "ssm") param = param_client.get_parameter(Name=self.write_path) stacklist = json.loads(param["Parameter"]["Value"]) for stack_name in stacklist: self._stacks[stack_name] = Stack(self, stack_name) return self._stacks def get_stack_workspace(self, stack_name): workspace_name = f"{self.config.env}-{stack_name}" tfe_api = self.get_tfe_api() return tfe_api.get_workspace(workspace_name) # Singleton for handling aws sessions since this is oddly slow. class AwsSession: session = None config = None clients = {} @classmethod def get_session(cls, ctx): if not cls.session: cls.session = boto3.session.Session(profile_name=ctx.obj["aws_profile"]) return cls.session @classmethod def get_config(cls, ctx): if not cls.config: cls.config = Config(region_name="us-west-2", retries={"max_attempts": 2, "mode": "standard"}) return cls.config @classmethod def get_client(cls, ctx, client_type): if not cls.clients.get(client_type): session = cls.get_session(ctx) cls.clients[client_type] = session.client(client_type, config=cls.get_config(ctx)) return cls.clients[client_type] class TFEWorkspace: def __init__(self, tfc, workspace): self.tfc = tfc self.workspace = workspace self.reset_cache() @property def workspace_id(self): return self.workspace["id"] @property def name(self): return self.workspace["attributes"]["name"] @property def latest_run_id(self): if not self._latest_run_id: latest_run = self.workspace["relationships"]["latest-run"]["data"] if latest_run: self._latest_run_id = latest_run["id"] # latest_run == None if never run before return self._latest_run_id @property def latest_run(self): if not self._latest_run: if self.latest_run_id: self._latest_run = self.tfc.runs.show(self.latest_run_id) return self._latest_run @property def latest_config_version_id(self): if self.latest_run: return self.latest_run["data"]["relationships"]["configuration-version"]["data"]["id"] return None def run(self, is_destroy=False): return self.run_config_version(self.latest_config_version_id, is_destroy=is_destroy) def run_config_version(self, config_version_id, is_destroy=False): print(f"Running {'DESTROY ' if is_destroy else ''}workspace {self.name}") run = self.tfc.runs.create( { "data": { "attributes": { "is-destroy": is_destroy, "message": "Queued from happy cli", }, "type": "runs", "relationships": { "workspace": { "data": { "type": "workspaces", "id": self.workspace_id, } }, "configuration-version": { "data": { "type": "configuration-versions", "id": config_version_id, } }, }, } } ) run_id = run["data"]["id"] self._latest_run_id = run_id # The run we just created is now the latest. self._latest_run = None # Reset the cache self._outputs = None return True def wait(self): RUN_DONE_STATUSES = {"applied", "discarded", "errored", "canceled", "force_canceled", "policy_soft_failed"} last_status = "" while last_status not in RUN_DONE_STATUSES: if last_status: # Skip sleep on first time time.sleep(5) run = self.tfc.runs.show(self.latest_run_id) status = run["data"]["attributes"]["status"] if status != last_status: print(f"{datetime.now().strftime('%H:%M:%S')} - {status}") last_status = status if last_status != "applied": print(f"Error applying, ended in status {last_status}") return False return True @property def vars(self): """Get a nested dict of all the variables of the given workspace. Returns a 2-deep nested dict. Top-level dict has 2 possible entries for the 2 kinds of variables a workspace may have, "terraform" and "env". Value of that top level entry is itself a dict of key->value. The inner value is a dict object as returned by Terraform Enterprise API. """ if not self._vars: workspace_vars = self.tfc.workspace_vars.list(self.workspace_id) self._vars = {} for workspace_var in workspace_vars["data"]: attributes = workspace_var["attributes"] self._vars.setdefault(attributes["category"], {})[attributes["key"]] = workspace_var return self._vars def set_var(self, key, value, description, sensitive=True): category = "terraform" # Hard-coded, not allowing setting environment vars directly var_data = { "data": { "type": "vars", "attributes": { "key": key, "value": value, "description": description, "category": category, "sensitive": sensitive, }, }, } if category in self.vars and key in self.vars[category]: self.tfc.workspace_vars.update(self.workspace_id, self.vars[category][key]["id"], var_data) else: self.tfc.workspace_vars.create(self.workspace_id, var_data) def reset_cache(self): self._vars = None self._outputs = None self._latest_run_id = None self._latest_run = None @property def outputs(self): if self._outputs: return self._outputs try: state_version = self.tfc.state_versions.get_current(self.workspace_id) except exceptions.TFCHTTPNotFound: return {} # terrasnek api lacks a way to append ?include=outputs to state_version requests, # so we have to iterate through all outputs and get them individually # TODO(mbarrien): Add code to Terrasnek outputs = state_version["data"]["relationships"]["outputs"]["data"] state_version_output_ids = (output["id"] for output in outputs) self._outputs = {} for state_version_output_id in state_version_output_ids: state_version_output = self.tfc.state_version_outputs.show(state_version_output_id)["data"]["attributes"] if not state_version_output["sensitive"]: key = state_version_output["name"] value = state_version_output["value"] self._outputs[key] = value return self._outputs def upload_version(self, filename): # Not using auto-queue-runs, will explicitly create later config_version = self.tfc.config_versions.create( self.workspace_id, {"data": {"type": "configuration-versions", "attributes": {"auto-queue-runs": False}}} ) config_version_id = config_version["data"]["id"] upload_url = config_version["data"]["attributes"]["upload-url"] self.tfc.config_versions.upload(filename, upload_url) return config_version_id def cancel_run(self): self.tfc.runs.force_cancel(self.latest_run_id, {"comment": "Force cancelled by happy cli"}) class TfeApi: def __init__(self, url, org): self.url = url self.org = org self._tfc = None def get_token(self, hostname): env_token = os.getenv("TFE_TOKEN") if env_token: return env_token error = False try: with open(os.path.expanduser("~/.terraform.d/credentials.tfrc.json")) as f: credentials = json.load(f)["credentials"] except FileNotFoundError: error = True if error or hostname not in credentials: raise CliError( f"Terraform credentials for {hostname} not found. Run 'terraform login {hostname}' and follow the instructions" ) return credentials[hostname]["token"] @property def tfc(self): if self._tfc: return self._tfc hostname = urlparse(self.url).hostname tfc = TFC(self.get_token(hostname), url=self.url) tfc.set_org(self.org) self._tfc = tfc return tfc def get_workspace(self, workspace_name): workspace = self.tfc.workspaces.show(workspace_name) return TFEWorkspace(self.tfc, workspace["data"]) @click.group() @click.option( "--profile", default=None, help="AWS profile to use. Explicitly passing empty string uses Boto default credentials resolver.", ) @click.option("--env", default=None, help="Switch happy envs") @click.pass_context def cli(ctx, profile, env): ctx.ensure_object(dict) config = HappyConfig(env=env, ctx=ctx) if profile is None: profile = config.aws_profile elif profile == "": profile = None ctx.obj["secret_mgr"] = SecretMgr(ctx) ctx.obj["config"] = config ctx.obj["aws_profile"] = profile # These aren't lazy-instantiating! ctx.obj["stack_mgr"] = StackMgr(ctx) ctx.obj["orchestrator"] = Orchestrator(ctx) class SecretMgr: def __init__(self, ctx): self.ctx = ctx self._secrets = None @property def secrets(self): if self._secrets: return self._secrets config = self.ctx.obj["config"] secrets_client = AwsSession.get_client(self.ctx, "secretsmanager") secrets = secrets_client.get_secret_value(SecretId=config.secret_arn)["SecretString"] self._secrets = json.loads(secrets) return self._secrets def run_aws_cmd(ctx, cmd, return_output=True, json_output=True): command = [ "aws", "--profile", AwsSession.get_session(ctx).profile_name, "--region", AwsSession.get_config(ctx).region_name, ] command.extend(cmd) if return_output: output = subprocess.check_output(command) else: subprocess.check_call(command) return if not json_output: return output return json.loads(output) def resolve_owner(ctx): # Figure out what our current identity is sts_client = AwsSession.get_client(ctx, "sts") identity = sts_client.get_caller_identity()["Arn"] return identity.split("/")[-1].split("@")[0] def generate_tag(ctx): now = datetime.now().strftime("%m%d-%H%M%S") owner = resolve_owner(ctx) return f"{owner}-{now}" class Orchestrator: def __init__(self, ctx): self.ctx = ctx self._secrets = None @property def secrets(self): if not self._secrets: self._secrets = self.ctx.obj["secret_mgr"].secrets return self._secrets def shell(self, stack_name, service): cluster_arn = self.secrets["cluster_arn"] service_name = f"{stack_name}-{service}" ecs_client = AwsSession.get_client(self.ctx, "ecs") tasks = ecs_client.list_tasks(cluster=cluster_arn, serviceName=service_name)["taskArns"] print("Found tasks: ") taskinfo = ecs_client.describe_tasks(cluster=cluster_arn, tasks=tasks)["tasks"] tp = TablePrinter(["Task ID", "Started", "Status"]) containers = [] for task in taskinfo: task_id = task["taskArn"].split("/")[-1] tp.add_row([task_id, task["startedAt"].strftime("%m/%d %H:%M"), task["lastStatus"]]) containers.append( { "host": task["containerInstanceArn"], "container": task["containers"][0]["runtimeId"], "arn": task["taskArn"], } ) tp.print() print() for container in containers: instance_info = ecs_client.describe_container_instances( cluster=cluster_arn, containerInstances=[container["host"]] ) ec2_instance = instance_info["containerInstances"][0]["ec2InstanceId"] ec2_client = AwsSession.get_client(self.ctx, "ec2") hostinfo = ec2_client.describe_instances(InstanceIds=[ec2_instance]) ip = hostinfo["Reservations"][0]["Instances"][0]["PrivateIpAddress"] print(f"Connecting to {container['arn']}") os.execvp("ssh", ["ssh", "-t", ip, "sudo", "docker", "exec", "-ti", container["container"], "/bin/bash"]) def logs(self, stack_name, service, since): config = self.ctx.obj["config"] # TODO, we should get the logs path from ECS instead of generating it. log_prefix = config.log_group_prefix run_aws_cmd( self.ctx, ["logs", "tail", "--since", since, "--follow", f"{log_prefix}/{stack_name}/{service}"], return_output=False, json_output=False, ) def run_task(self, taskdef_arn, wait=False, show_logs=True): """Run a one-off ECS task and optionally wait""" secrets = self.secrets cluster_arn = secrets["cluster_arn"] subnets = secrets["private_subnets"] security_groups = secrets["security_groups"] print(f"Using task definition {taskdef_arn}") ecs_client = AwsSession.get_client(self.ctx, "ecs") output = ecs_client.run_task( cluster=cluster_arn, taskDefinition=taskdef_arn, networkConfiguration={ "awsvpcConfiguration": { "subnets": subnets, "securityGroups": security_groups, "assignPublicIp": "DISABLED", } }, ) task_info = output["tasks"][0] print(f"Task {task_info['taskArn']} started") if not wait: return # Wait for the task to start. waiter = ecs_client.get_waiter("tasks_running") try: waiter.wait(cluster=cluster_arn, tasks=[task_info["taskArn"]]) except WaiterError: print("Task failed!") result = ecs_client.describe_tasks(cluster=cluster_arn, tasks=[task_info["taskArn"]]) container = result["tasks"][0]["containers"][0] status = container["lastStatus"] if status != "RUNNING": reason = "" if "reason" in container: reason = container["reason"] print(f"Container did not start. Current status {status}: {reason}") else: print(f"Task {task_info['taskArn']} running") # Wait for the task to exit. waiter = ecs_client.get_waiter("tasks_stopped") waiter.wait(cluster=cluster_arn, tasks=[task_info["taskArn"]]) print(f"Task {task_info['taskArn']} stopped") result = ecs_client.describe_tasks(cluster=cluster_arn, tasks=[task_info["taskArn"]]) container = result["tasks"][0]["containers"][0] log_stream = container["runtimeId"] if "reason" in container: status = container["lastStatus"] reason = container["reason"] print(f"Container exited with status {status}: {reason}") # Get logs print("getting taskdef info") result = ecs_client.describe_task_definition(taskDefinition=taskdef_arn) taskdef = result["taskDefinition"] container = taskdef["containerDefinitions"][0] log_group = container["logConfiguration"]["options"]["awslogs-group"] print("Log Events:") logs_client = AwsSession.get_client(self.ctx, "logs") result = logs_client.get_log_events(logGroupName=log_group, logStreamName=log_stream) logs = result["events"] for log in logs: print(log) print("done!") def subprocess_output_with_default(cmd, default="unknown"): try: return subprocess.check_output(cmd).decode().strip() except subprocess.CalledProcessError: return default class ArtifactBuilder: """Builds artifacts such asx Docker containers. For now, this is hard coded to use docker-compose as the backend.""" DEFAULT_CONFIG = {"compose_file": "docker-compose.yml", "env": None} def __init__(self, config={}): self.config = {**self.DEFAULT_CONFIG, **config} # Replace defaults with the input def get_env(self): env = dict(os.environ) # Make a copy of the current environment env["DOCKER_BUILDKIT"] = "1" env["BUILDKIT_INLINE_CACHE"] = "1" env["COMPOSE_DOCKER_CLI_BUILD"] = "1" env["DOCKER_REPO"] = f"{self.config['ecr_repo']}/" env["HAPPY_COMMIT"] = subprocess_output_with_default(["git", "rev-parse", "--verify", "HEAD"]) env["HAPPY_BRANCH"] = subprocess_output_with_default(["git", "branch", "--show-current"]) env["HAPPY_TAG"] = subprocess_output_with_default(["git", "describe"]) return env def build(self, artifacts=None): env = self.get_env() compose_args = ["--file", self.config["compose_file"]] if self.config["env"]: compose_args += ["--env", self.config["env"]] cmd = ["docker-compose"] + compose_args + ["build"] if artifacts: cmd += artifacts subprocess.check_call(cmd, env=env) cmd = ["docker-compose"] + compose_args + ["config"] config_file = subprocess.check_output(cmd, env=env) result = yaml.load(config_file, Loader=yaml.Loader) images = {} for service_name, service in result["services"].items(): if "build" in service: if artifacts and service_name not in artifacts: continue images[service_name] = service["image"] return images def push(self, ecrs, images, tags): print("Tagging images...") env = self.get_env() for artifact_id, registry in ecrs.items(): if images and artifact_id not in images: continue image = images[artifact_id] for current_tag in tags: subprocess.check_call(["docker", "tag", f"{image}:latest", f"{registry['url']}:{current_tag}"], env=env) print(f"Pushing images...{images}") for artifact_id, registry in ecrs.items(): if images and artifact_id not in images: continue for current_tag in tags: subprocess.check_call(["docker", "push", f"{registry['url']}:{current_tag}"], env=env) def run_tasks(ctx, stack, task_type, wait=False, show_logs=True): print(f"Running tasks for {task_type}") config = ctx.obj["config"] orchestrator = ctx.obj["orchestrator"] task_outputs = config.tasks.get(task_type, []) if not task_outputs: print(f"Found no tasks for {task_type}") try: tasks = [stack.outputs[task_output] for task_output in task_outputs] except KeyError as exc: raise CliError(f"Stack {stack.stack_name} is missing output field '{exc.args[0]}' for task {task_type}") for task in tasks: orchestrator.run_task(task, wait=wait, show_logs=show_logs) @cli.command() @click.argument("stack_name") @click.option("--reset", is_flag=True, default=False, help="Drop and recreate the dev db from the latest snapshot") @click.pass_context def migrate(ctx, stack_name, reset): """Run DB migration task for given stack""" stack_mgr = ctx.obj["stack_mgr"] stack = stack_mgr.stacks[stack_name] if reset: run_tasks(ctx, stack, "delete", wait=True, show_logs=True) run_tasks(ctx, stack, "migrate", wait=True, show_logs=True) @cli.command() @click.argument("stack_name") @click.argument("service") @click.option("--instanceid", help="Choose a specific instance") @click.pass_context def shell(ctx, stack_name, service, instanceid): orchestrator = ctx.obj["orchestrator"] orchestrator.shell(stack_name, service) @cli.command() @click.argument("stack_name") @click.argument("service") @click.option("--since", default="10m", help="Output logs since <number>s|m|h|d") @click.pass_context def logs(ctx, stack_name, service, since): """Tail the logs of a service (frontend, backend, upload, migrations)""" orchestrator = ctx.obj["orchestrator"] orchestrator.logs(stack_name, service, since) @cli.command() @click.argument("stack_name") @click.option("--tag", help="Tag name for docker image. Leave empty to generate one automatically.", default=None) @click.option("--wait/--no-wait", is_flag=True, default=True, help="wait for this to complete") @click.option("--force", is_flag=True, default=False, help="Ignore already-exists errors") @click.pass_context def create(ctx, stack_name, tag, wait, force): """Create a new stack with a given tag""" stackmgr = ctx.obj["stack_mgr"] if stack_name in stackmgr.stacks: if force: print(f"Stack {stack_name} already exists") else: raise CliError(f"Stack {stack_name} already exists") stack_meta = StackMeta(ctx, stack_name) stack_meta.load({"happy/meta/configsecret": ctx.obj["config"].secret_arn}) if not tag: tag = generate_tag(ctx) ctx.invoke(push, tag=tag) stack_meta.update(tag, stackmgr) print(f"creating {stack_name}") stack = stackmgr.add(stack_name) stack._meta = stack_meta # TODO(mbarrien): Hack! success = stack.apply(wait) if not success: raise CliError("Apply failed, skipping migrations") if should_auto_migrate(ctx): ctx.invoke(migrate, stack_name=stack_name) stack.print_outputs() def should_auto_migrate(ctx): # Make sure this environment allows automatically running migrations on update try: migrate_ok = ctx.obj["config"].auto_run_migrations return migrate_ok except KeyError: # The default behavior is to auto-migrate on update. return True @contextmanager def config_tarball(source_dir): """Create a config tarball from given source_dir, then automatically delete it once we're done.""" targz_file = tempfile.NamedTemporaryFile(delete=False) try: with tarfile.open(fileobj=targz_file, mode="w:gz", dereference=True) as tar: tar.add(source_dir, arcname="") with targz_file: yield targz_file finally: os.remove(targz_file.name) @cli.command() @click.argument("stack_name") @click.option("--tag", help="Tag name for docker image. Leave empty to generate one automatically.", default=None) @click.option("--wait/--no-wait", is_flag=True, default=True, help="wait for this to complete") @click.option("--skip-migrations/--do-migrations", is_flag=True, default=False, help="Skip running migrations") @click.pass_context def update(ctx, stack_name, tag, wait, skip_migrations): """Update a dev stack tag version""" stackmgr = ctx.obj["stack_mgr"] try: stack = stackmgr.stacks[stack_name] except KeyError: raise CliError(f"Stack {stack_name} does not exist") print(f"updating {stack_name}") if not tag: tag = generate_tag(ctx) ctx.invoke(push, tag=tag) stack_meta = stack.meta # Reset the configsecret if it has changed stack_meta.load({"happy/meta/configsecret": ctx.obj["config"].secret_arn}) stack_meta.update(tag, stackmgr) success = stack.apply(wait or not skip_migrations) if not success: raise CliError("Apply failed, skipping migrations") if not skip_migrations and should_auto_migrate(ctx): ctx.invoke(migrate, stack_name=stack_name) stack.print_outputs() @cli.command() @click.argument("stack_name") @click.option("--wait/--no-wait", is_flag=True, default=True, help="wait for this to complete") @click.pass_context def cancelupdate(ctx, stack_name, wait): """Cancel a dev stack update""" print(f"Canceling update of {stack_name}") stack_mgr = ctx.obj["stack_mgr"] stack = stack_mgr.stacks[stack_name] stack.cancelupdate(wait) stack.print_outputs() @cli.command() @click.argument("stack_name") @click.pass_context def delete(ctx, stack_name): """Delete a dev stack""" stackmgr = ctx.obj["stack_mgr"] try: stack = stackmgr.stacks[stack_name] except Exception: raise CliError(f"Stack {stack_name} doesn't exist in our list") # Make sure this environment allows stack deletion. try: delete_protected = ctx.obj["config"].delete_protected if delete_protected: raise CliError("This stack cannot be deleted") except KeyError: pass print(f"deleting {stack_name}") try: run_tasks(ctx, stack, "delete", wait=False, show_logs=False) print(f"Database dropped") except CliError: print(f"Database task missing, skipping delete") success = stack.destroy() do_remove_workspace = False if not success: do_remove_workspace = input( f"Error while destroying {stack_name}; resources might remain. Continue to remove workspace (y/n)? " ) in ["Y", "y", "yes", "YES"] if success or do_remove_workspace: stackmgr.remove(stack_name) print(f"Delete done") else: print(f"Delete NOT done") @cli.command(name="list") # don't redefine list() @click.pass_context def list_command(ctx): """List dev stacks""" env = ctx.obj["config"].env stackmgr = ctx.obj["stack_mgr"] print(f"Listing stacks in environment '{env}'") headings = ["Name", "Owner", "Tag", "Status", "URLs"] tp = TablePrinter(headings) for name, info in stackmgr.stacks.items(): url = info.outputs.get("frontend_url", "") status = info.status tp.add_row([name, info.meta.owner, info.meta.imagetag, status, url]) tp.print() @cli.command() @click.argument("images", nargs=-1) @click.option( "--source-tag", help="Tag name for existing docker image. Leave empty to generate one automatically.", default=None, required=True, ) @click.option("--dest-tag", help="Extra tags to apply and push to the docker repo.", multiple=True, required=True) @click.pass_context def addtags(ctx, images, source_tag, dest_tag): """Add additional tags to already-pushed images in the ECR repo""" config = ctx.obj["config"] ecrs = config.ecrs ecr_client = AwsSession.get_client(ctx, "ecr") for image_name, repository in ecrs.items(): if images and image_name not in images: continue print(f"retagging {image_name} from {source_tag} to {', '.join(dest_tag)}") registry_id = repository["url"].split(".")[0] repo_name = "/".join(repository["url"].split("/")[1:]) manifest = ecr_client.batch_get_image( registryId=registry_id, repositoryName=repo_name, imageIds=[{"imageTag": source_tag}] ) manifest = manifest["images"][0]["imageManifest"] for tag in dest_tag: try: ecr_client.put_image( registryId=registry_id, repositoryName=repo_name, imageManifest=manifest, imageTag=tag ) except ecr_client.exceptions.ImageAlreadyExistsException: print(f"tag {tag} already exists, skipping.") def get_ecr_repo(config): # Assumption: All the ECR registries are within the same AWS account as the one configured # for the profile, and in the same region as the default one. # TODO(mbarrien): Ensure this is true, print an error if not. # TODO(mbarrien): If correct account and wrong region, create an ecr_client with config # for the correct region, and login to that. ecrs = config.ecrs first_repo = next(iter(ecrs.values())) first_registry = first_repo["url"].split("/")[0] return first_registry def login_ecrs(images, ecrs, ecr_client): """Login to all registries associated with images. An item in images must be a key in ecrs, skipping any that is not, with a warning msg. If images list is empty, assume that we are building and pushing everything, so login everywhere. """ registry_logins = set() if not images: registry_logins = set([entry["url"].split("/")[0] for entry in ecrs.values()]) else: for image_name in images: if image_name not in ecrs: print(f"WARNING: Registry url for image {image_name} not found, please check config. Skipping...") continue registry_logins.add(ecrs[image_name]["url"].split("/")[0]) print("logging in to ECR...") for registry in registry_logins: # Equivalent to aws get-login-password # We need to do this *before* a build now for buildkit's build cache to work. auth = ecr_client.get_authorization_token()["authorizationData"][0] auth_token = b64decode(auth["authorizationToken"]).decode() pwd = auth_token.split(":")[1].encode() cmd = subprocess.run(["docker", "login", "--username", "AWS", "--password-stdin", registry], input=pwd) print(f"login to {registry}, done!") @cli.command() @click.argument("images", nargs=-1, default=None) @click.option( "--tag", help="Tag name for existing docker image. Leave empty to generate one automatically.", default=None ) @click.option("--extra-tag", help="Extra tags to apply and push to the docker repo.", multiple=True) @click.option("--compose-env", help="Environment file to pass to docker compose", default=None) @click.pass_context def push(ctx, images, tag, extra_tag, compose_env): """Build and push docker images to ECR. Optionally filter by service name""" config = ctx.obj["config"] ecrs = config.ecrs ecr_client = AwsSession.get_client(ctx, "ecr") login_ecrs(images, ecrs, ecr_client) if not tag: tag = generate_tag(ctx) print("Building images...") builder_config = {} if compose_env: builder_config["env"] = compose_env elif os.path.isfile(config.default_compose_env): builder_config["env"] = config.default_compose_env # NOTE: It's OK to set an arbitrary repo here because it's only # used for buildkit caching during image building. # Image pushing does not care about this setting, and will push # images to the right places with some re-tagging. first_registry = get_ecr_repo(config) builder_config["ecr_repo"] = first_registry # TODO this is a leak in our abstraction, need to fix. builder = ArtifactBuilder(builder_config) artifacts = builder.build(images) print("Build complete") # extra_tag is a tuple all_tags = [tag] + (list(extra_tag) or []) print("Pushing images...") all_tags = [tag] + (list(extra_tag) or []) builder.push(ecrs, artifacts, all_tags) print(f"Built and pushed docker images with tags: {all_tags}") @cli.command() @click.argument("stack_name") @click.pass_context def watch(ctx, stack_name): """Wait until a dev stack is updated""" stack_mgr = ctx.obj["stack_mgr"] stack = stack_mgr.stacks[stack_name] stack.watch() stack.print_outputs() @cli.group() def hosts(): "Commands to manage system hostsfile" pass class HostnameManager(): def __init__(self, filename): self.filename = filename def get_file_borders(self): cwd = os.getcwd() directory = os.path.split(cwd)[-1] return [ f"# ==== Happy docker-compose DNS for {directory} ===\n", f"# ==== Happy docker-compose DNS for {directory} ===\n", ] def get_hostsfile(self): with open(self.filename, "r") as hostsfile: return hostsfile.readlines() def get_compose_containers(self): containers = [] with open("docker-compose.yml") as composefile: result = yaml.load(composefile, Loader=yaml.Loader) got_alias = False for service_name, service in result["services"].items(): for networkname, netconf in service.get("networks", {}).items(): for alias in netconf.get("aliases", []): got_alias = True containers.append(alias) if not got_alias: containers.append(service_name) return containers def cleanup_config(self, borders, config): write_lines = True new_config = [] for idx in range(len(config)): line = config[idx] if write_lines and line == borders[0]: write_lines = False continue if not write_lines and line == borders[1]: write_lines = True continue if write_lines: new_config.append(line) return new_config def update_config(self, config, newconfig): with open(self.filename, "w") as hostfile: for line in config: hostfile.write(line) for line in newconfig: hostfile.write(line) def generate_config(self, borders, containers): lines = [] lines.append(borders[0]) for container in containers: lines.append(f"127.0.0.1\t{container}\n") lines.append(borders[1]) return lines @hosts.command() @click.option( "--hostsfile", default="/etc/hosts", help="Path to system hosts file" ) def install(hostsfile): "Install compose DNS entries" hostmgr = HostnameManager(hostsfile) config = hostmgr.get_hostsfile() containers = hostmgr.get_compose_containers() borders = hostmgr.get_file_borders() config = hostmgr.cleanup_config(borders, config) hostmgr.update_config(config, hostmgr.generate_config(borders, containers)) @hosts.command() @click.option( "--hostsfile", default="/etc/hosts", help="Path to system hosts file" ) def uninstall(hostsfile): "Remove compose DNS entries" hostmgr = HostnameManager(hostsfile) config = hostmgr.get_hostsfile() borders = hostmgr.get_file_borders() config = hostmgr.cleanup_config(borders, config) hostmgr.update_config(config, []) if __name__ == "__main__": obj = {} try: cli.main(obj=obj) except exceptions.TFCHTTPUnauthorized as err: print( f"Not authorized to access TFE. Try going to {obj['secret_mgr'].secrets['tfe']['url']} in your browser then rerunning your command." ) except CliError as err: print(f"ERROR: {err}") exit(1) <file_sep># Build image FROM node:16-alpine AS builder WORKDIR /app # Install dependencies COPY package.json yarn.lock postinstall.js /app/ RUN yarn install # Build for production COPY . /app ARG ENV RUN yarn build # Application image FROM node:16-alpine WORKDIR /app # Install dependencies for production COPY package.json yarn.lock postinstall.js /app/ ENV NODE_ENV=production RUN yarn install # Copy application build to image COPY --from=builder /app/.next /app/.next COPY public /app/public EXPOSE 8080 ENTRYPOINT [ "yarn", "start" ] <file_sep>export interface IconProps { className?: string; alt?: string; } <file_sep>import { LayoutMDX } from '@/components'; import { Accordion } from '@/components/common'; import { LINKS } from '@/constants'; <LayoutMDX toc title="FAQ"> # FAQ We hope these questions will help you in your napari plugin journey! We endeavour to update this regularly. If you need additional information or assistance, check out the [Contact]({CONTACT}) page or you can reach a human at <mailto:<EMAIL>>. ## Using napari <Accordion title="How do I get started using napari?" variant="faq"> If you’re new to napari, welcome and don’t fret! www.napari.org will help you install and get viewing in no time. </Accordion> <Accordion title="How do I get help with napari?" variant="faq"> napari is a community partner on the [image.sc forum]({IMAGESC}) and all help and support requests should be posted on the forum with the tag napari. We look forward to interacting with you there. Bug reports should be made on napari’s [github issues]({NAPARI_REPO}/issues/new?template=bug_report.md) using the bug report template. If you think something isn’t working, don’t hesitate to reach out - it is probably us and not you! </Accordion> <Accordion title="How do I get help with a napari plugin?" variant="faq"> Plugin authors are encouraged to add links to their listing, showing users of their plugin where to find help. If the plugin author has not indicated where to get help, the [image.sc](https://forum.image.sc/tag/napari) community is a great place to ask questions about napari and napari plugins. </Accordion> ## Using the napari hub <Accordion title="How do I perform more advanced search queries?" variant="faq" > We use [fuse.js](https://fusejs.io/) for the plugin search engine. You should be fine with most queries, but if you want to perform more advanced search queries, you can use [extended search operators](https://fusejs.io/examples.html#extended-search) to fine tune your search results. #### Examples By default, whitespace will be considered as an AND operator. To combine results using OR, use the pipe operator: ``` console | animation ``` To find results that include a word (without fuzzy matching), use the apostrophe operator: ``` 'video ``` To find results that start with a particular string, use the prefix operator. ``` ^napari ``` Similar to find results that end with a particular string, use the suffix operator: ``` napari$ ``` </Accordion> <Accordion title="Who is building this?" variant="faq" > The napari hub is a service of the Chan Zuckerberg Initiative in collaboration with napari, built by the CZI imaging team. You can learn more about who is steering this ship by visiting the [about page]({ABOUT}). </Accordion> <Accordion title="I have a neat idea or question not answered here, where do I go?" variant="faq" > Oh no, we didn’t answer your questions! Our apologies. We recommend you ask any hub community questions in [github discussions]({HUB_REPO}/discussions)- we value open source engagement and you never know who might have the right answer! For a full list of everyone involved, check out the [contacts page]({CONTACT}). </Accordion> ## Building and sharing plugins <Accordion title="How do I build a napari plugin?" variant="faq"> You can learn more about building a plugin for napari by reading [this tutorial](https://napari.org/plugins/stable/for_plugin_developers.html#plugins-for-plugin-developers), which will guide you through hook specifications, implementation, and how to share your plugin with the world. </Accordion> <Accordion title="How do I add my plugin to the napari hub?" variant="faq" > To add your plugin to the napari hub, simply push it to PyPI. If you used the napari plugin cookiecutter as a template for your plugin, it comes already [configured to deploy to PyPI when you tag a versioned release](https://github.com/napari/cookiecutter-napari-plugin#set-up-automatic-deployments). </Accordion> <Accordion title="How often do you check for new or updated plugins on PyPI?" variant="faq" > We poll PyPI for updates every 5 minutes. </Accordion> <Accordion title="How do I customize my plugin's listing?" variant="faq" > We source metadata for your plugin's listing from both PyPI and Github. In addition to a bunch of standard PyPI stuff, we support additional custom fields, like a user-friendly description to complement your Github README. Check out [Customizing your plugin's listing](https://github.com/chanzuckerberg/napari-hub/blob/main/docs/customizing-plugin-listing.md) to see what metadata we will present and where we source it from. </Accordion> <Accordion title="Can I preview my listing before I upload my plugin?" variant="faq" > Yes, you can! The [napari-hub-cli](https://github.com/chanzuckerberg/napari-hub-cli) is a command-line tool that lets you preview your metadata and make sure that you’ve included everything the hub is looking for. Simply install it on your local development machine and point it at your local repository. </Accordion> <Accordion title="How do I remove my plugin from the napari hub?" variant="faq" > If you remove your plugin from PyPI or yank a release, it will be removed from the napari hub within approximately 5 minutes. If you want your plugin to be available on PyPI, but not be visible on the napari hub, let us know by [submitting an issue](https://github.com/chanzuckerberg/napari-hub/issues/new) or sending an email to <<EMAIL>> and we’ll manually remove it from our listings. </Accordion> <Accordion title={`"My plugin is on PyPI, but why isn’t it showing up in the hub?"`} variant="faq" > We check PyPI every 5 minutes, so if you just pushed it to PyPI, you might just need to go and get a cup of coffee. Search PyPI directly for your plugin among those tagged with [“Framework :: napari”](https://pypi.org/search/?c=Framework+%3A%3A+napari) If your plugin is on PyPI, but not showing up in that search, then you don’t have the “Framework :: napari” trove classifier properly defined. If your plugin does show up under [“Framework :: napari”](https://pypi.org/search/?c=Framework+%3A%3A+napari) on PyPI, but not on the hub, go and grab a cup of coffee. If it still doesn’t show up after 5 minutes, [open an issue](https://github.com/chanzuckerberg/napari-hub/issues/new?assignees=&labels=bug&template=bug_report.md&title=) and our team will look into it as soon as we can. </Accordion> <Accordion title={`I updated my ".napari" directory in Github. Why aren’t my changes showing up?`} variant="faq" > We only pull plugin info from Github when we detect a new plugin or new release on PyPI. However, if you’d like to update your plugin listing with the latest metadata from your Github repo, just let us know by [_submitting an issue_](https://github.com/chanzuckerberg/napari-hub/issues/new) and we can manually trigger a refresh of your plugin metadata from Github. </Accordion> <Accordion title={`Why isn't my plugin "open source"?`} variant="faq" > To be considered an “open source” plugin, the hub must be able to detect that your plugin is distributed with an [OSI-approved open source license](https://opensource.org/licenses). That happens in one of two ways: - If you’ve linked to a Github repository, we’ll source [the license from Github’s API](https://docs.github.com/en/rest/reference/licenses#get-the-license-for-a-repository), which inspects your LICENSE file and infers the correct license. - If Github fails to identify your license, we’ll fallback on whatever you’ve specified in your Python setup. _Note: this MUST be a valid [SPDX license identifier](https://spdx.org/licenses/) in order to be detected as an “open source” license, such as “BSD-3-Clause”... “BSD-3” will not be recognized as “open source”._ For more information on how we source this metadata, see [the "License" section of "Customizing your plugin's listing"](https://github.com/chanzuckerberg/napari-hub/blob/main/docs/customizing-plugin-listing.md#license) </Accordion> </LayoutMDX> <file_sep>export * from './AppBar'; export * from './Footer'; export * from './Layout'; export * from './LayoutMDX'; export * from './MenuDrawer'; export * from './PluginDetails'; export * from './PluginSearch'; export * from './SearchBar'; export * from './SignupForm'; <file_sep>import dayjs from 'dayjs'; /** * Utility to transform a date into a more readable format. Useful for ISO and * UTC date strings that need to be more readable. * * @param dateString A date string parseable by Date * @returns The formatted date string */ export function formatDate(dateString: string): string { return dayjs(dateString).format('DD MMMM YYYY'); } /** * Utility for formatting a pypi operating systems string. This removes the * nested classifiers so that only the OS name is rendered. * * @param operatingSystem List of operating systems classifiers. * @returns The operating system formatted as a comma list. */ export function formatOperatingSystem(operatingSystem: string): string { // Return last part of OS trove classifier. The nesting on pypi is // arbitrary, so you can have a long string like "Operating Systems :: // Microsoft :: Windows :: Windows 10", or a short string like "Operating // Systems :: OS Independent". const parts = operatingSystem.split(' :: '); const name = parts[parts.length - 1]; return name.replace('OS Independent', 'All'); } <file_sep>import { forEach, isEmpty, pickBy, reduce, set } from 'lodash'; import { DeepPartial } from 'utility-types'; import { FilterChipFormState, FilterFormState } from './filter.types'; const SUPPORTED_PYTHON_VERSIONS = ['3.7', '3.8', '3.9']; /** * Returns the default filter form state derived from the search results. * * @param results Search results * @returns The default state */ export function getDefaultState(): FilterFormState { return { developmentStatus: { onlyStablePlugins: false, }, license: { onlyOpenSourcePlugins: false, }, operatingSystems: { linux: false, mac: false, windows: false, }, pythonVersions: SUPPORTED_PYTHON_VERSIONS.reduce( (state, version) => set(state, [version], false), {} as FilterFormState['pythonVersions'], ), }; } export function getChipID(key: string, subKey: string): string { return `${key}-${subKey}`; } export function getChipState(state: FilterFormState): FilterChipFormState[] { const chips: FilterChipFormState[] = []; forEach(state, (subState, key) => { forEach(subState, (value: boolean, subKey) => { if (value) { chips.push({ subKey, value, key: key as keyof FilterFormState, id: getChipID(key, subKey), }); } }); }); return chips; } /** * Returns a copy of the form state object with only truthy values present. If * there are no truthy values present, then return undefined. This is used for * setting the filter query parameter only when at least one filter is enabled. * * @param formState The form state * @returns A partial form state with only truthy values, otherwise undefined */ export function filterFalsyValues( formState: FilterFormState, ): DeepPartial<FilterFormState> | undefined { const enabledState = pickBy( reduce( formState, (result, state, key) => set(result, key, pickBy(state, Boolean)), {} as DeepPartial<FilterFormState>, ), (state) => !isEmpty(state), ); if (isEmpty(enabledState)) { return undefined; } return enabledState; } <file_sep>apig-wsgi boto3 flask google-cloud-bigquery gunicorn[gevent] pyyaml requests <file_sep>const { resolve } = require('path'); const pkg = require('../package.json'); module.exports = { parserOptions: { project: resolve(__dirname, '../tsconfig.jest.json'), }, extends: ['plugin:jest/recommended', 'plugin:jest/style'], settings: { /* Jest version has to be passed explicitly because ESlint throws an error about not being able to find the Jest version. This is likely due to the frontend being stored in `frontend/`. https://git.io/JYhAJ */ jest: { version: pkg.devDependencies.jest, }, }, }; <file_sep>SHELL := /bin/bash ### DOCKER ENVIRONMENTAL VARS ################################################# export DOCKER_BUILDKIT:=1 export COMPOSE_DOCKER_CLI_BUILD:=1 export COMPOSE_OPTS:=--env .env.ecr export AWS_DEV_PROFILE=sci-imaging export BACKEND_APP_ROOT=/var/task ### HELPFUL ################################################# help: ## display help for this makefile @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' .PHONY: help .PHONY: rm-pycache rm-pycache: ## remove all __pycache__ files (run if encountering issues with pycharm debugger (containers exiting prematurely)) find . -name '__pycache__' | xargs rm -rf ### DOCKER LOCAL DEV ######################################### .env.ecr: export AWS_ACCOUNT_ID=$$(aws sts get-caller-identity --profile $(AWS_DEV_PROFILE) | jq -r .Account); \ if [ -n "$${AWS_ACCOUNT_ID}" ]; then \ echo DOCKER_REPO=$${AWS_ACCOUNT_ID}.dkr.ecr.us-west-2.amazonaws.com/ > .env.ecr; \ else \ false; \ fi .PHONY: local-ecr-login local-ecr-login: if PROFILE=$$(aws configure list-profiles | grep $(AWS_DEV_PROFILE)); then \ aws ecr get-login-password --region us-west-2 --profile $(AWS_DEV_PROFILE) | docker login --username AWS --password-stdin $$(aws sts get-caller-identity --profile $(AWS_DEV_PROFILE) | jq -r .Account).dkr.ecr.us-west-2.amazonaws.com; \ fi .PHONY: local-init local-init: .env.ecr local-ecr-login ## Launch a new local dev env and populate it with test data. docker-compose $(COMPOSE_OPTS) up -d docker-compose $(COMPOSE_OPTS) run --rm --entrypoint /bin/bash backend -c "pip3 install awscli && $(BACKEND_APP_ROOT)/setup_dev_data.sh" .PHONY: backend-debugger backend-debugger: ## Attach to the backend service (useful for pdb) docker attach $$(docker-compose ps | grep backend | cut -d ' ' -f 1 | head -n 1) .PHONY: local-status local-status: ## Show the status of the containers in the dev environment. docker ps -a | grep --color=no -e 'CONTAINER\|napari-hub' .PHONY: local-rebuild local-rebuild: .env.ecr local-ecr-login ## Rebuild local dev without re-importing data docker-compose $(COMPOSE_OPTS) build frontend backend docker-compose $(COMPOSE_OPTS) up -d .PHONY: local-sync local-sync: local-rebuild local-init ## Re-sync the local-environment state after modifying library deps or docker configs .PHONY: local-start local-start: .env.ecr ## Start a local dev environment that's been stopped. docker-compose $(COMPOSE_OPTS) up -d .PHONY: local-stop local-stop: ## Stop the local dev environment. docker-compose stop .PHONY: local-clean local-clean: ## Remove everything related to the local dev environment (including db data!) docker-compose rm -sf -docker volume rm napari-hub_localstack .PHONY: local-logs local-logs: ## Tail the logs of the dev env containers. ex: make local-logs CONTAINER=backend docker-compose logs -f $(CONTAINER) .PHONY: local-shell local-shell: ## Open a command shell in one of the dev containers. ex: make local-shell CONTAINER=frontend docker-compose exec $(CONTAINER) bash <file_sep>export type Styles = { heading: string; list: string; logo: string; }; export type ClassNames = keyof Styles; declare const styles: Styles; export default styles; <file_sep># Happy Remote Development Environment Remote development environments are currently available for employees at CZI. For local development on your own machine, see instructions for setup at [./DEV_ENV.md]. Remote environments are also available through branches that named after prefix `dev-` ## Remote Dev Pre-requisites 1. Ensure your `awscli` is configured for `sci-imaging` profile. 1. Make sure you have the *latest version* of the AWS CLI installed. `brew upgrade awscli` if you're not sure: ``` % aws --version aws-cli/2.1.8 Python/3.9.0 Darwin/19.6.0 source/x86_64 prompt/off ``` 1. Run the following command to install prerequisites: ``` pip install -r .happy/requirements.txt ``` ### Overview Each developer can run as many remote development *stacks* as they like. Each stack can represent a feature branch, experiment, or whatever's useful to you. Stacks are managed using the remote dev cli utility called `happy`. The general remote dev workflow is: 1. Make some code changes 1. Run `./scripts/happy create <your-stack-name>` to create a new stack 1. Visit the URL printed by the create step, share it with the team, etc. 1. Run `./scripts/happy logs <your-stack-name> backend` to tail the logs of the napari hub api. 1. Make some more code changes 1. Run `./scripts/happy update <your-stack-name>` to update the remote stack with your latest changes. 1. When you don't need your stack anymore, run `./scripts/happy delete <your-stack-name>` to free up remote dev resources. If you forget which stacks you've created, just run `./scripts/happy list` at any time to list the current remote dev stacks. ### General CLI Usage The CLI utility is evolving rapidly, so the best reference for which commands are available and how to use them is the CLI itself. All commands support a `--help` flag to print usage docs. For example: ``` % ./scripts/happy create --help Usage: happy create [OPTIONS] STACK_NAME Create a dev stack with a given tag Options: --tag TEXT Tag name for docker image. Leave empty to generate one automatically. --wait / --no-wait wait for this to complete --help Show this message and exit. ``` ### GitHub Action Integration A new stack can also be deployed to remote development environment through GitHub Action integration. Pushing any branch prefixed with "dev-" will trigger the GH Action workflow to create or update a dev stack, with the stack name equals the part of branch name following the prefix, e.g. pushing branch "dev-my-dev-branch" will deploy the stack "my-dev-branch" in the remote dev enviroment. This is useful in situation where local connections is slow. ### Authentication The backend of Happy uses CZI's deployment of Terraform Enterprise (TFE) to deploy and track the resources for the stacks. This requires logging into TFE to get a long-lived token tied to your user. The first time you run the happy application, the prompt will give you a command to run to get a token; follow its prompts. The long-lived token's access to Terraform Enterprise will periodically expire. The happy CLI will let you know when this happens, and give you instruction to access the TFE website in your browser. Loading any TFE web page will reauthorize your token, and you can then re-run your command. ### Warnings 1. Stack name needs to be a valid DNS prefix: starts with a letter, only includes letters, numbers, and dashes, less than 64 characters in length. 1. Yes, you have access to manipulate your teammates' remote dev stacks. This is intentional, to enable collaboration on features. Please use responsibly. <file_sep>export * from './ColumnLayout'; <file_sep>const isHeadful = process.env.HEADFUL === 'true' || process.env.HEADLESS === 'false'; const DEFAULT_LAUNCH_CONFIG = { args: ['--ignore-certificate-errors', '--ignore-ssl-errors'], headless: !isHeadful, ignoreHTTPSErrors: true, }; const DEFAULT_CONTEXT_CONFIG = { acceptDownloads: true, }; const BROWSER = process.env.BROWSER || 'chromium'; /** * Mapping of naapri hub breakpoints to devices that fit within the dimensions * of its associated breakpoint. We only need the devices for the viewport * widths so we can test functional and layout changes in the UI. * */ const DEVICES = { // width = 320px > 300px = sm xs: 'iPhone SE', // width = 414px > 375px sm: 'iPhone 11 Pro Max', // width = 568px > 495px = md md: 'iPhone SE landscape', // width = 768px > 600px = lg lg: 'iPad Mini', // width = 104px > 875px = xl xl: 'iPad Mini landscape', // width = 1194px > 1150px = 2xl '2xl': 'iPad Pro 11 landscape', // width = 1920px > 1425px = 3xl '3xl': { name: 'Desktop', viewport: { width: 1920, height: 1080 }, }, }; /** * Device specific environment variable. Use if you want to test using a specific device. */ const { DEVICE } = process.env; module.exports = { rootDir: '..', preset: 'jest-playwright-preset', testMatch: ['<rootDir>/tests/**/*.test.ts'], moduleNameMapper: { '^@/tests/(.*)$': '<rootDir>/tests/$1', '^@/(.*)$': '<rootDir>/src/$1', }, setupFilesAfterEnv: [ 'expect-playwright', '<rootDir>/jest/playwright.setup.ts', ], testEnvironmentOptions: { 'jest-playwright': { browsers: [BROWSER], browserContext: 'incognito', devices: DEVICE ? [DEVICES[DEVICE]] : Object.values(DEVICES), contextOptions: DEFAULT_CONTEXT_CONFIG, launchOptions: DEFAULT_LAUNCH_CONFIG, }, }, transform: { '\\.tsx?$': 'babel-jest', }, }; <file_sep># Client napari hub website implemented with Next.js and TypeScript! We use a lot of cool frontend tech for the website: - :zap: [React](https://reactjs.org/) + [Next.js](https://nextjs.org/) - :crossed_swords: [TypeScript](https://www.typescriptlang.org/) - :art: [SCSS modules](https://github.com/css-modules/css-modules) - :nail_care: [Tailwind CSS](https://tailwindcss.com/) for utility styles - :racing_car: [Tailwind JIT](https://tailwindcss.com/docs/just-in-time-mode) for on-demand Tailwind styles - :package: [Yarn](https://classic.yarnpkg.com/en/) for package management - :camera_flash: [Jest](https://jestjs.io/) + [React Testing Library](https://testing-library.com/docs/react-testing-library/intro) for unit and snapshot tests - :performing_arts: [Jest](https://jestjs.io/) + [Playwright](https://github.com/microsoft/playwright) for E2E tests - :mag: [ESlint](https://eslint.org/) + [Stylelint](https://stylelint.io/) for TypeScript and SCSS linting - :gear: [Plop](https://plopjs.com/documentation/) for boilerplate automation ## Setup Dev Environment ### Node.js We use Node.js and various packages on NPM for building napari hub. For package management, we use [yarn](https://classic.yarnpkg.com/en/). It's recommended you use NVM so you don't have to manage multiple Node.js versions yourself: - Bash: [nvm](https://github.com/nvm-sh/nvm) - Fish: [nvm.fish](https://github.com/jorgebucaran/nvm.fish) - Zsh: [zsh-nvm](https://github.com/lukechilds/zsh-nvm) When you have NVM setup, run the following commands: ```sh # Installs Node.js version defined in `.nvmrc` nvm install # Uses project defined Node.js version nvm use # Install yarn globally npm -g install yarn # Install project dependencies yarn install ``` ## Development Mode To run the app in development mode, run the following command: ```sh yarn dev ``` This will start the Next.js dev server with [fast refresh](https://nextjs.org/docs/basic-features/fast-refresh). Edit some code and watch it update in the browser without having to refresh :heart_eyes: This will also start a mock server in the background, but it's also possible to [connect to an external API](#backend-api). ## Plop Generators We use [Plop](https://plopjs.com/documentation/) to automate common boilerplate in the codebase. You can run Plop without any arguments and get a list of generators you can use: ```sh yarn plop ``` If you want to use a specific generator, you can pass the name as the first argument: ```sh # Run component generator yarn plop component ``` ## Backend API If you want to connect to an API directly, you'll need to use the `API_URL` environment variable to point to the API, and `MOCK_SERVER=false` to disable the mock API server. If you're locally forwarding the API through SSH and if the API checks the host header (e.g. AWS API Gateway), you will also need to set `API_URL_HOST` to pass the header check: ```sh MOCK_SERVER=false \ API_URL=https://localhost:8081 \ API_HOST=<api-id>.execute-api.us-west-2.amazonaws.com \ yarn dev ``` <file_sep>import { SearchSortType } from './constants'; import { SearchResult } from './search.types'; import { SearchResultTransformFunction } from './types'; /** * Compare two dates to sort from newest to oldest. * * See: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort#description * * @param dateA The first date * @param dateB The second date * @returns <0 if dateA is newer than dateB, >0 if dateB is newer than dateA, and 0 if equal. */ export function compareDates(dateA: string, dateB: string): number { // time in ms makes newer dates have higher values return new Date(dateB).getTime() - new Date(dateA).getTime(); } function sortByReleaseDate(results: SearchResult[]) { return ( results // Create a copy of the array .slice() .sort((a, b) => compareDates(a.plugin.release_date, b.plugin.release_date), ) ); } function sortByFirstReleased(results: SearchResult[]) { return ( results // Create a copy of the array .slice() .sort((a, b) => compareDates(a.plugin.first_released, b.plugin.first_released), ) ); } function sortByPluginName(results: SearchResult[]) { return ( results // Create a copy of the array .slice() .sort((a, b) => a.plugin.name.localeCompare(b.plugin.name)) ); } /** * Map of sort types to sort functions. Used for calling a particular sort * function given the sort type. Each function should return a new copy of the * array to prevent unintended side-effects. */ const SORTERS: Record<SearchSortType, SearchResultTransformFunction | null> = { // Search engine already returns results in order of relevance. [SearchSortType.Relevance]: null, [SearchSortType.PluginName]: sortByPluginName, [SearchSortType.ReleaseDate]: sortByReleaseDate, [SearchSortType.FirstReleased]: sortByFirstReleased, }; /** * Sorts the search results given the sort type. * * @param sortType The sort type * @param results The search results * @returns The sorted results */ export function sortResults( sortType: SearchSortType, results: SearchResult[], ): SearchResult[] { const sorter = SORTERS[sortType]; return sorter?.(results) ?? results; } <file_sep>import { usePlausible as usePlausibleNext } from 'next-plausible'; import { Logger } from '@/utils'; const logger = new Logger('usePlausible.ts'); /** * Payloads for each event type. A type alias is used instead of an interface * because type aliases have an implicit index signature: https://git.io/JZt9O */ export type Events = { 'Copy Package': { plugin: string; }; 'Description Nav': { plugin: string; section: string; }; Filter: { field: string; value: string; checked: boolean; }; Install: { plugin: string; }; Links: { plugin: string; link: string; url: string; host: string; }; Search: never; Signup: never; Sort: { by: string; }; }; /** * Hook for sending custom Plausible events with typing enabled. */ export function usePlausible() { const plausible = usePlausibleNext(); function sendEvent<E extends keyof Events>( event: E, ...payload: Events[E][] ) { logger.debug('Plausible event:', { event, payload }); plausible(event, { props: payload[0], }); } return sendEvent; } <file_sep>import { isEqual, shuffle as ldshuffle } from 'lodash'; import { compareDates } from './sorters'; function shuffle<T>(array: Array<T>): Array<T> { let shuffled: Array<T>; // guarantee array is shuffled do { shuffled = ldshuffle(array); } while (isEqual(array, shuffled)); return shuffled; } function sortNewToOld(dates: string[]): string[] { return dates.slice().sort(compareDates); } describe('compareDates()', () => { it('should return 0 when dateA and dateB are the same', () => { expect(compareDates('1995-12-17 03:24:00', '1995-12-17T03:24:00')).toBe(0); }); it('should return a negative number when dateA is newer than dateB', () => { expect( compareDates('2020-01-01 00:00:00', '2019-01-01 00:00:00'), ).toBeLessThan(0); }); it('should return a positive number when dateB is newer than dateA', () => { expect( compareDates('2020-01-01 00:00:00', '2020-01-01 00:00:01'), ).toBeGreaterThan(0); }); it('should sort dates from newest to oldest (trivial)', () => { const datesSorted = [ '2020-01-01 00:00:00', '2019-02-01 00:00:00', '2019-01-02 00:00:00', '2019-01-01 01:00:00', '2019-01-01 00:01:00', '2019-01-01 00:00:01', '2019-01-01 00:00:00', ]; const datesShuffled = shuffle(datesSorted); expect(datesShuffled).not.toEqual(datesSorted); expect(sortNewToOld(datesShuffled)).toEqual(datesSorted); }); it('should sort dates from newest to oldest (complex)', () => { const datesSorted = [ '2028-02-01 15:50:49', '2013-08-20 16:17:33', '2013-07-26 07:04:39', '2012-01-02 17:44:56', '2010-06-28 20:01:09', '2008-08-08 05:29:52', '2004-04-12 20:23:48', '2003-06-10 19:46:13', '2000-09-24 04:16:24', '2000-09-01 08:32:31', ]; const datesShuffled = shuffle(datesSorted); expect(datesShuffled).not.toEqual(datesSorted); expect(sortNewToOld(shuffle(datesSorted))).toEqual(datesSorted); }); }); <file_sep>export * from './format'; export * from './logger'; export * from './performance'; export * from './react'; <file_sep>import { useEffect, useRef } from 'react'; import { usePrevious } from 'react-use'; import { createEnumParam, useQueryParam, withDefault } from 'use-query-params'; import { useActiveURLParameter, usePlausible } from '@/hooks'; import { DEFAULT_SORT_TYPE, SearchQueryParams, SearchSortType, } from './constants'; import { SearchResult } from './search.types'; import { sortResults } from './sorters'; const SortTypeValues = Object.values(SearchSortType); /** * Hook that provides the form state for the sort by form. */ function useForm() { const initialSortType = useActiveURLParameter<SearchSortType>( SearchQueryParams.Sort, ); const [sortType, setSortType] = useQueryParam( SearchQueryParams.Sort, withDefault( createEnumParam(SortTypeValues), // Default to release date if sort type is not initial in URL initialSortType ?? DEFAULT_SORT_TYPE, ), ); return { sortType, setSortType, }; } /** * Return type of `useForm()` hook. This includes the form data and data * setters. */ export type SortForm = ReturnType<typeof useForm>; function usePlausibleEvents(sortType: SearchSortType) { const plausible = usePlausible(); const prevSortType = usePrevious(sortType); const initialLoadRef = useRef(false); useEffect(() => { // Don't log sort event on initial load. if (initialLoadRef.current && sortType !== prevSortType) { plausible('Sort', { by: sortType, }); } else if (!initialLoadRef.current) { initialLoadRef.current = true; } }, [plausible, prevSortType, sortType]); } /** * Hook that provides access to the sort form state and handles sorting plugins * based on the selected sort type. * * @param results The search results * @returns Sorted results and form data */ export function useSort(results: SearchResult[]) { const sortForm = useForm(); usePlausibleEvents(sortForm.sortType); const sortedResults = sortResults(sortForm.sortType, results); return { sortForm, sortedResults, }; } <file_sep>export * from './TextHighlighter'; <file_sep>import { useRouter } from 'next/router'; import { useMemo } from 'react'; import { PushReplaceHistory } from 'use-query-params'; /** * Hook that returns the location object. On the browser, the default location * is returned. But on the server, we need to pass our own version to be * compatible with the Next.js router. * * @returns The location object */ export function useLocation(): Location { const router = useRouter(); const location = useMemo(() => { if (process.browser) { return window.location; } return { search: router.asPath.replace(/[^?]+/u, ''), } as Location; }, [router]); return location; } /** * Hook that returns the history object for navigation. This returns a version * that calls the Next.js router APIs directly instead of the browser APIs. * Since the Next.js router is universal, history interactions using this hook * should also be universal. * * @returns The history object */ export function useHistory(): PushReplaceHistory { const router = useRouter(); const match = /[^?]+/.exec(router.asPath); const pathname = match ? match[0] : router.asPath; const location = useLocation(); const history = useMemo( () => ({ push: ({ search }: Location) => router.push( { pathname: router.pathname, query: router.query }, { search, pathname }, { shallow: true, scroll: false }, ), replace: async ({ search }: Location) => { await router.replace( { pathname: router.pathname, query: router.query }, { search, pathname }, { shallow: true, scroll: false }, ); }, location, }), [location, pathname, router], ); return history; } <file_sep>import { useEffect, useState } from 'react'; import { TOCHeader } from './TableOfContents.types'; /** * The designs requires a 35px margin between the heading and the top of the * viewport when scrolling to a heading, so the next heading should be * highlighted when hitting that offset. */ const TOP_OFFSET = 35; /** * Hook for getting the selected heading anchor from an array of markdown * headers. The headers should be rendered on the DOM via the <Markdown /> * component for this to work. * * This code is inspired by the TOC for Docusaurus: https://git.io/JObbd, with * small improvements. * * @param headers Markdown headers from `getHeadersFromMarkdown()` * @returns Active header ID */ export function useActiveHeader(headers: TOCHeader[]): string { const [active, setActive] = useState(headers[0]?.id ?? ''); useEffect(() => { function findActiveHeader() { // Get headers as DOM nodes. const headerTags = headers.map((header) => document.getElementById(header.id), ); // Find first header that is in viewport. const firstHeaderIndex = headerTags.findIndex((header) => { const top = header?.getBoundingClientRect()?.top ?? 0; return top >= 0; }); const firstHeader = headerTags[firstHeaderIndex]; if (firstHeader) { const { top } = firstHeader.getBoundingClientRect(); // If user reaches the bottom, set the last header as selected. if ( window.innerHeight + window.pageYOffset >= document.body.scrollHeight ) { setActive(headers[headers.length - 1].id); } else if (Math.floor(top) <= TOP_OFFSET) { setActive(firstHeader.id); } else { /* If the first header in viewport is greater than the offset, then the user is still in the previous section. */ const previousHeader = headers[firstHeaderIndex - 1] ?? firstHeader; setActive(previousHeader.id); } } } document.addEventListener('scroll', findActiveHeader); document.addEventListener('resize', findActiveHeader); // Find active header on initial render findActiveHeader(); // Remove event listeners on cleanup. return () => { document.removeEventListener('scroll', findActiveHeader); document.removeEventListener('resize', findActiveHeader); }; }, [headers]); return active; } <file_sep>export * from './LayoutMDX'; <file_sep># napari hub This repository contains the source code for the _napari hub_, a place to find, evaluate, and install _napari_ plugins, built by the Imaging Team at the Chan Zuckerberg Initiative. ## What we're building now For our first _alpha/v0_ release in June 2021, the _napari hub_ will focus on making it easy for [Research Biologists, Imaging Scientists, and Bioimage Analysts](https://cziscience.medium.com/user-experience-research-in-the-imaging-field-6bb89e592bb9) to find, evaluate, and install _napari_ plugins distributed through PyPI. After talking with users about the challenges and needs in finding analysis solutions to bioimaging problems, we focused on a handful of critical features for our first release, including... - *global search bar* which returns results from multiple metadata sources (title, author, summary, description, etc.) - *napari-specific metadata*, such as author(s), links to external docs, support channels, etc., that plugin developers can easily define in PyPI metadata or Github config files - *napari-specific description* supports both napari-specific description specified in their Github repo or default to PyPI description if plugin developer does not use napari-specific description. - *install “call-to-action”* button for each plugin with plugin-specific instructions on how to install - *filter/sort* available plugins based on limited selection of metadata (2-3 filters, 1-2 sorts) that can be handled client side - *mailing list* sign up for announcements and updates for the _napari hub_ ## How we're building now The imaging team is comprised of product managers, user experience researchers, designers and engineers that learn with and from the community to build out the napari hub. Our process involves weekly syncs to track progress across disiplines, review and share assets. Just as our product is open source, we want our process to be as well. You can learn more by exploring: - [product](https://github.com/chanzuckerberg/napari-hub/wiki#product) strategy documents that inform how we'll make the napari hub a reality - [user experience research](https://github.com/chanzuckerberg/napari-hub/wiki#uxr) findings that help us understand the community's challenges and how the napari hub can help - [design](https://github.com/chanzuckerberg/napari-hub/wiki#design) sketches and prototypes that bring napari hub features to life - [engineering](https://github.com/chanzuckerberg/napari-hub/wiki#engineering) specs and diagrams that help us understand how the napari hub works behind the scenes ## What we're building next After our _alpha_ launch, we're very excited to keep iterating and building new features that are of value to the imaging community, supporting current and future _napari_ users and developers. We have a host of ideas for ways that we can help scientists learn if a plugin is right for them, for imaging scientists to share their knowledge with core facility users and collaborators, and ways to help lower barriers for image analysts in creating, maintaining, and supporting their plugins. If you want to share your own ideas on what we should improve or add, we'd love you to submit a [feature request](https://github.com/chanzuckerberg/napari-hub/issues)! This will help us ensure the experience functions as intended for the community. We’ll be using this feedback from the community, along with [User Experience Research](https://cziscience.medium.com/product-design-user-experience-research-to-accelerate-science-f9fbbb0d0a06), and consultation with the napari project to prioritize which features we decide to build. When we have a clearer idea of our roadmap beyond our June release, we'll share it here. ## About The _napari hub_ is built, maintained, and governed by the [Imaging team at the Chan Zuckerberg Initiative](https://chanzuckerberg.com/science/programs-resources/imaging/) as a free service for the napari community. [_napari_](https://napari.org) is a consensus-based community project and an open source tool that enables high performance visualization and exploration of a broad range of imaging data, including microscopy, medical imaging, geospatial data, and more, with clearly defined governance model, mission, and values. At CZI, we support the development of open source and community run tools like _napari_, as part of our mission to accelerate biomedical research and help every scientist make progress faster. The [CZI Imaging Team](https://chanzuckerberg.com/science/programs-resources/imaging/) seeks to remove barriers in the analysis of imaging and microscopy data and make it easier for biologists to access emerging methods for bioimage analysis that leverage machine learning. CZI sees promise in _napari_ and seeks to assist its development by providing resources not always available to the open source community, including dedicated user research, design, and engineering support. CZI is proud to collaborate with the science community to accelerate research and enable open science for all. ### Team - <NAME>, Engineering ([@codemonkey800](https://github.com/codemonkey800)) - <NAME>, Engineering ([@kne42](https://github.com/kne42)) - <NAME>, Product ([@neuromusic](https://github.com/neuromusic)) - <NAME>, Engineering ([@justinelarsen](https://github.com/justinelarsen)) - <NAME>, Engineering ([@ziyangczi](https://github.com/ziyangczi)) - <NAME>, User Experience ([@LCObus](https://github.com/LCObus)) - <NAME>, Design ([@liaprins-czi](https://github.com/liaprins-czi)) - <NAME>, Product ([@sofroniewn](https://github.com/sofroniewn)) ## Source Code We're building the napari hub out in the open, so if you want to contribute, please take a look at: - [napari hub frontend](frontend/README.md) ## Code of Conduct This project adheres to the Contributor Covenant [code of conduct](https://github.com/chanzuckerberg/.github/blob/master/CODE_OF_CONDUCT.md). By participating, you are expected to uphold this code. Please report unacceptable behavior to [<EMAIL>](mailto:<EMAIL>). <file_sep>export * from './PluginSearch'; <file_sep>click==7.1.2 pyyaml==5.4.1 terrasnek==0.1.2 boto3==1.17.47 <file_sep>export * from './useActiveURLParameter'; export * from './usePlausible'; <file_sep>import { useEffect, useState } from 'react'; import { TOC_HEADER_TAG, TOCHeader } from '@/components/common'; export function useHeaders() { const [headers, setHeaders] = useState<TOCHeader[]>([]); useEffect(() => { // based off of https://www.emgoto.com/react-table-of-contents/ const headerElements: HTMLHeadingElement[] = Array.from( document.querySelectorAll(`.markdown ${TOC_HEADER_TAG}`), ); setHeaders( headerElements.map<TOCHeader>((header) => ({ id: header.id, text: header.innerText, })), ); }, []); return headers; } <file_sep>/* eslint-disable global-require, @typescript-eslint/no-var-requires */ import { renderHook } from '@testing-library/react-hooks'; import { NextRouter, useRouter } from 'next/router'; import React from 'react'; import { act } from 'react-dom/test-utils'; import pluginIndex from '@/fixtures/index.json'; import { DEFAULT_SORT_TYPE, SearchQueryParams, SearchSortType, } from './constants'; import { useSearch, useSearchSetSortType } from './search.hooks'; import { SearchEngine, SearchResult } from './search.types'; import type { SortForm } from './sort.hooks'; jest.mock('next/router', () => ({ useRouter: jest.fn(), })); jest.mock('use-query-params', () => ({ useQueryParam: jest.fn().mockImplementation((_, initial: string) => { // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment const { useState }: { useState: typeof React.useState } = require('react'); return useState(initial); }), withDefault: jest.fn(), })); function mockRouter() { type Replace = NextRouter['replace']; const replace = jest .fn<ReturnType<Replace>, Parameters<Replace>>() .mockReturnValue(Promise.resolve(true)); (useRouter as jest.Mock).mockReturnValue({ replace, query: {}, }); return { replace }; } type Search = SearchEngine['search']; const search = jest .fn<ReturnType<Search>, Parameters<Search>>() .mockReturnValue([]); beforeEach(() => { search.mockClear(); }); describe('useSearch()', () => { const mockPlugin: SearchResult = { index: 6, plugin: pluginIndex[6], matches: {}, }; const pluginIndexList = pluginIndex.map((plugin, index) => ({ matches: {}, index, plugin, })); beforeEach(() => { mockRouter(); search.mockReturnValueOnce([mockPlugin]); }); it('should initially return index as search result list', () => { const { result } = renderHook(() => useSearch(pluginIndex).results); expect(result.current).toEqual(pluginIndexList); }); it('should return index list when clearing query', () => { const { result } = renderHook(() => useSearch(pluginIndex)); act(() => result.current.searchForm.setQuery('nothing')); expect(result.current.results).toHaveLength(0); act(() => result.current.searchForm.clearQuery()); expect(result.current.results).toEqual(pluginIndexList); }); it('should return match in search result list', () => { const { result } = renderHook(() => useSearch(pluginIndex)); act(() => result.current.searchForm.setQuery('video')); expect(result.current.results[0]).toMatchObject(mockPlugin); }); }); describe('useSearchSetSortType()', () => { let form: SortForm; const oldLocation = window.location; beforeEach(() => { form = { setSortType: jest.fn(), sortType: SearchSortType.Relevance, }; window.location = oldLocation; }); function mockSortType(sortType: SearchSortType) { const url = new URL('http://localhost'); url.searchParams.set(SearchQueryParams.Sort, sortType); window.location = { href: url.toString() } as Location; form.sortType = sortType; } it('should set sort type to relevance on intial load', () => { renderHook(() => useSearchSetSortType('video', form)); expect(form.setSortType).toHaveBeenCalled(); }); it('should not set sort type to relevance when user has sort type on initial load', () => { mockSortType(SearchSortType.PluginName); renderHook(() => useSearchSetSortType('video', form)); expect(form.setSortType).not.toHaveBeenCalled(); }); it('should set sort type to relevance when user enters query', () => { let query = ''; const { rerender } = renderHook(() => useSearchSetSortType(query, form)); expect(form.setSortType).not.toHaveBeenCalled(); query = 'video'; rerender(); expect(form.setSortType).toHaveBeenCalledWith(SearchSortType.Relevance); }); it('should set sort type to default when user clears query and sort type is relevance', () => { let query = 'video'; const { rerender } = renderHook(() => useSearchSetSortType(query, form)); query = ''; rerender(); expect(form.setSortType).toHaveBeenCalledWith(DEFAULT_SORT_TYPE); }); it('should maintain sort type when user clears query and sort type is not relevance', () => { mockSortType(SearchSortType.PluginName); let query = 'video'; const { rerender } = renderHook(() => useSearchSetSortType(query, form)); query = ''; rerender(); expect(form.setSortType).not.toHaveBeenCalled(); }); }); <file_sep>if (process.env.MOCK_SERVER === 'false') { console.log('MOCK_SERVER is false, exiting'); process.exit(); } const express = require('express'); const napariPlugin = require('./src/fixtures/napari.json'); const pluginIndex = require('./src/fixtures/index.json'); const app = express(); app.get('/plugins', async (_, res) => { res.json({ 'napari-compressed-labels-io': '0.0.0' }); }); app.get('/plugins/index', async (_, res) => { res.json(pluginIndex); }); app.get('/plugins/:name', async (_, res) => { res.json(napariPlugin); }); app.listen(8081, () => console.log('Started mock API server')); <file_sep>import { RequestError } from '@octokit/types'; export interface PluginAuthor { name: string; email?: string; } /** * Plugin data used for indexing. This is a subset of the full plugin data. */ export interface PluginIndexData { authors: PluginAuthor[]; description_content_type: string; description: string; development_status: string[]; first_released: string; license: string; name: string; operating_system: string[]; python_version: string; release_date: string; summary: string; version: string; } /** * Interface for plugin data response from backend. */ export interface PluginData extends PluginIndexData { code_repository: string; documentation: string; project_site: string; release_date: string; report_issues: string; requirements: string[]; support: string; twitter: string; } /** * Plugin repo data to render with plugin metadata. */ export interface PluginRepoData { forks: number; issuesAndPRs: number; stars: number; } export type PluginRepoFetchError = Pick<RequestError, 'name' | 'status'>; /** * Data used for rendering links in the app. */ export interface LinkInfo { /** * URL of this link. */ link: string; /** * Title of the link to use. */ title: string; /** * If the link should open in a new tab. */ newTab?: boolean; } <file_sep>import { LinkInfo } from '@/types'; export const LINKS: Record<string, LinkInfo>; <file_sep>from unittest import mock import requests from requests.exceptions import HTTPError from backend.napari import get_plugin from backend.napari import get_plugins from backend.napari import get_download_url from backend.napari import get_license class FakeResponse: def __init__(self, *, data: str): self.text = data self.status_code = requests.codes.ok @property def status_code(self): status_code = self._status_code self.status_code = requests.codes.ok + 100 return status_code @status_code.setter def status_code(self, status_code): self._status_code = status_code def raise_for_status(self): raise HTTPError plugin_list = """ <li> <a class="package-snippet" href="/project/brainreg-segment/"> <h3 class="package-snippet__title"> <span class="package-snippet__name">package1</span> <span class="package-snippet__version">0.2.7</span> <span class="package-snippet__released"><time datetime="2021-04-26T13:17:17+0000" data-controller="localized-time" data-localized-time-relative="true" data-localized-time-show-time="false"> Apr 26, 2021 </time></span> </h3> <p class="package-snippet__description">test package 1</p> </a> </li> <li> <a class="package-snippet" href="/project/napari-mri/"> <h3 class="package-snippet__title"> <span class="package-snippet__name">package2</span> <span class="package-snippet__version">0.1.0</span> <span class="package-snippet__released"><time datetime="2021-03-21T06:12:30+0000" data-controller="localized-time" data-localized-time-relative="true" data-localized-time-show-time="false"> Mar 21, 2021 </time></span> </h3> <p class="package-snippet__description">test package 2</p> </a> </li> """ plugin = """ {"info":{"author":"Test Author","author_email":"<EMAIL>", "bugtrack_url":null,"classifiers":["Development Status :: 4 - Beta", "Intended Audience :: Developers","License :: OSI Approved :: BSD License", "Operating System :: OS Independent","Programming Language :: Python", "Programming Language :: Python :: 3","Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7","Programming Language :: Python :: 3.8" ,"Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Testing"],"description":"description", "description_content_type":"","docs_url":null,"download_url":"", "downloads":{"last_day":-1,"last_month":-1,"last_week":-1}, "home_page":"https://github.com/test/test","keywords":"", "license":"BSD-3","maintainer":"Test Author", "maintainer_email":"<EMAIL>","name":"test", "package_url":"https://pypi.org/project/test/","platform":"", "project_url":"https://pypi.org/project/test/","project_urls":{ "Homepage":"https://github.com/test/test"}, "release_url":"https://pypi.org/project/test/0.0.1/", "requires_dist":null,"requires_python":">=3.6", "summary":"A test plugin", "version":"0.0.1","yanked":false,"yanked_reason":null}, "last_serial":10229034,"releases":{"0.0.1":[{"comment_text":"", "downloads":-1,"filename":"test.tar.gz","has_sig":false, "md5_digest":"","packagetype":"sdist", "python_version":"source","requires_python":">=3.6","size":3338, "upload_time":"2020-04-13T03:37:20","upload_time_iso_8601": "2020-04-13T03:37:20.169990Z","url":"","yanked":false,"yanked_reason":null}], "0.0.2":[{"comment_text":"", "downloads":-1,"filename":"","has_sig":false, "packagetype":"sdist", "python_version":"source","requires_python":">=3.6","size":3343, "upload_time":"2020-04-13T14:58:21","upload_time_iso_8601": "2020-04-13T14:58:21.644816Z","yanked":false,"yanked_reason":null}],"0.0.3": [{"comment_text":"", "downloads":-1,"filename":"test","has_sig":false,"packagetype":"sdist", "python_version":"source","requires_python":">=3.6","size":3423, "upload_time":"2020-04-20T15:28:53", "upload_time_iso_8601":"2020-04-20T15:28:53.386281Z", "url":"","yanked":false,"yanked_reason":null}]}}""" @mock.patch( 'requests.get', return_value=FakeResponse(data=plugin_list) ) def test_get_plugins(mock_get): result = get_plugins() assert len(result) == 2 assert result['package1'] == "0.2.7" assert result['package2'] == "0.1.0" @mock.patch( 'requests.get', return_value=FakeResponse(data=plugin) ) @mock.patch( 'backend.napari.get_plugins', return_value={'test': '0.0.1'} ) def test_get_plugin(mock_get, mock_plugins): result = get_plugin("test") assert(result["name"] == "test") assert(result["summary"] == "A test plugin") assert(result["description"] == "description") assert(result["description_content_type"] == "") assert(result["authors"] == [{'email': '<EMAIL>', 'name': '<NAME>'}]) assert(result["license"] == "BSD-3") assert(result["python_version"] == ">=3.6") assert(result["operating_system"] == ['Operating System :: OS Independent']) assert(result["release_date"] == '2020-04-13T03:37:20.169990Z') assert(result["version"] == "0.0.1") assert(result["first_released"] == "2020-04-13T03:37:20.169990Z") assert(result["development_status"] == ['Development Status :: 4 - Beta']) assert(result["requirements"] is None) assert(result["project_site"] == "https://github.com/test/test") assert(result["documentation"] == "") assert(result["support"] == "") assert(result["report_issues"] == "") assert(result["twitter"] == "") assert(result["code_repository"] == "https://github.com/test/test") @mock.patch( 'requests.get', return_value=FakeResponse(data=plugin) ) @mock.patch( 'backend.napari.get_plugins', return_value={'not_test': '0.0.1'} ) def test_get_invalid_plugin(mock_get, mock_plugins): assert({} == get_plugin("test")) def test_github_get_url(): plugins = {"info": {"project_urls": {"Source Code": "test1"}}} assert("test1" == get_download_url(plugins)) plugins = {"info": {"project_urls": {"Random": "https://random.com"}}} assert(get_download_url(plugins) is None) plugins = {"info": {"project_urls": {"Random": "https://github.com/org"}}} assert(get_download_url(plugins) is None) plugins = {"info": {"project_urls": {"Random": "https://github.com/org/repo/random"}}} assert("https://github.com/org/repo" == get_download_url(plugins)) license_response = """ { "name": "LICENSE", "path": "LICENSE", "license": { "key": "bsd-3-clause", "name": "BSD 3-Clause \\"New\\" or \\"Revised\\" License", "spdx_id": "BSD-3-Clause", "url": "https://api.github.com/licenses/bsd-3-clause" } } """ @mock.patch( 'requests.get', return_value=FakeResponse(data=license_response) ) def test_github_license(mock_get): result = get_license("test_website") assert result == "BSD-3-Clause" no_license_response = """ { "name": "LICENSE", "path": "LICENSE", "license": { "key": "other", "name": "Other", "spdx_id": "NOASSERTION", "url": null } } """ @mock.patch( 'requests.get', return_value=FakeResponse(data=no_license_response) ) def test_github_no_assertion_license(mock_get): result = get_license("test_website") assert result is None <file_sep>export * from './Accordion'; export * from './ColumnLayout'; export * from './Divider'; export * from './ErrorMessage'; export * from './Link'; export * from './Markdown'; export * from './Overlay'; export * from './TableOfContents'; export * from './TextHighlighter'; <file_sep>/** based on https://github.com/trevorblades/remark-typescript/ * and https://github.com/remarkjs/remark-external-links/ */ const visit = require('unist-util-visit'); function linkvars({ vars }) { return function transform(tree) { function visitor(node) { const data = node.data || (node.data = {}); const props = data.hProperties || (data.hProperties = {}); const { url } = node; let newURL = url; Object.entries(vars).forEach(([name, info]) => { const lookFor = `{${name}}`; if (newURL.includes(lookFor)) { newURL = newURL.replaceAll(lookFor, info.link); if (info.newTab) { props.target = '_blank'; props.rel = 'noreferrer'; } } }); node.url = newURL; } visit(tree, 'link', visitor); }; } module.exports = linkvars; <file_sep>module.exports = { rootDir: '..', setupFilesAfterEnv: ['<rootDir>/jest/setupTests.ts'], testMatch: ['<rootDir>/src/**/*.test.ts?(x)'], moduleNameMapper: { '^@/(.*)$': '<rootDir>/src/$1', /* `identity-obj-proxy` returns a string for whatever key you use, so we use it for SCSS modules since components use the exports to assign class names. */ '^.+\\.module\\.scss$': 'identity-obj-proxy', }, transform: { '\\.tsx?$': 'babel-jest', }, }; <file_sep>/* eslint-disable max-classes-per-file */ import Fuse from 'fuse.js'; import { maxBy } from 'lodash'; import { compareTwoStrings } from 'string-similarity'; import { PluginIndexData } from '@/types'; import { SearchEngine, SearchResult, SearchResultMatch } from './search.types'; /** * Minimum length a matched word must be to included in the match result. */ const MIN_WORD_LENGTH = 2; /** * Search engine using fuse.js. */ export class FuseSearchEngine implements SearchEngine { private fuse?: Fuse<PluginIndexData>; index(plugins: PluginIndexData[]): void { this.fuse = new Fuse(plugins, { /* Used to filter matches whose score is less than or equal to the threshold value: https://git.io/J3xRx */ threshold: 0.16, /* Finds matches in string regardless of location. This would have the most impact on searching the summary / description because without this, fuse would only be able to match with plugins that have the word at the beginning of the string. */ ignoreLocation: true, keys: [ { name: 'name', weight: 8, }, { name: 'summary', weight: 4, }, { name: 'authors.name', weight: 2, }, { name: 'description', weight: 1, }, ], /* Allow searching with extended search operators: https://fusejs.io/examples.html#extended-search */ useExtendedSearch: true, /* Include search result matches for text highlighting. */ includeMatches: true, }); } search(rawQuery: string): SearchResult[] { const query = rawQuery.trim(); const results = this?.fuse?.search(query) ?? []; return results.map((result) => ({ matches: this.findMatches(query, result.matches), index: result.refIndex, plugin: result.item, })); } /** * Finds the most similar match from the Fuse.js matches. Similarity is * calculated using Dice-coefficient. * * @param query The search query * @param fuseMatches The fuse.js matches * @returns The most similar match */ private findMatches( query: string, fuseMatches?: readonly Fuse.FuseResultMatch[], ) { const matches: Partial<Record<string, SearchResultMatch>> = {}; // Populate matches dictionary with match indices and substrings. fuseMatches?.forEach((match) => { const { indices, value } = match; let { key } = match; if (!key || !value) { return; } const mostSimilarMatch = this.findMostSimilarMatch(query, value, indices); if (mostSimilarMatch) { // There can be multiple authors, so use the author's name as the key // instead. if (key === 'authors.name') { key = value; } const [start, end] = mostSimilarMatch; matches[key] = { start, end, match: value.slice(start, end + 1), }; } }); return matches; } /** * Filter matches that are at least `MIN_WORD_LENGTH` long. Only the most * similar match is used for highlighting in case there are multiple matches. * * Similarity is calculated using the Dice-coefficient from the * `string-similarity` package: * https://www.npmjs.com/package/string-similarity * * @param query The search query string * @param value The match value * @param indices The match indices * @returns The index pair for the most similar match */ private findMostSimilarMatch( query: string, value: string, indices: readonly Fuse.RangeTuple[], ) { return maxBy( indices.filter(([start, end]) => end - start >= MIN_WORD_LENGTH), ([start, end]) => // Return the string that is the most similar to the raw query. // Unfortunately, Fuse.js doesn't provide a similarity score // matches, so we have to compute it here. compareTwoStrings(query, value.slice(start, end + 1)), ); } } <file_sep>import '@testing-library/jest-dom'; import timezoneMock from 'timezone-mock'; // Mock window.location for every test case: // https://stackoverflow.com/a/57612279 const originalWindowLocation = window.location; beforeEach(() => { // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore delete window.location; // Reassign to regular object so that properties can be reassign window.location = { ...originalWindowLocation, }; }); // Mock timezone to have consistent test results in CI. beforeAll(() => { timezoneMock.register('US/Pacific'); }); jest.mock('next/router', () => ({ useRouter: jest.fn().mockReturnValue({ query: {}, }), })); jest.mock('@/context/spdx', () => ({ useSpdx: jest.fn().mockReturnValue({ isOSIApproved: jest.fn().mockReturnValue(false), }), })); <file_sep>{ // TODO Remove when overrides are addeded to `tsconfig.json` "extends": "../tsconfig.jest.json" } <file_sep>import { NextApiRequest, NextApiResponse } from 'next'; export default function healthCheck( _: NextApiRequest, res: NextApiResponse, ): void { res.status(200).json({ status: 'ok' }); } <file_sep>export interface SpdxLicenseResponse { licenseListVersion: string; licenses: SpdxLicenseData[]; releaseDate: string; } export interface SpdxLicenseData { reference: string; isDeprecatedLicenseId: boolean; detailsUrl: string; referenceNumber: number; name: string; licenseId: string; seeAlso: string[]; isOsiApproved: boolean; isFsfLibre?: boolean; } <file_sep>import { PluginIndexData } from '@/types'; /** * The interface a browser search engine class needs to implement to index * napari plugins and provide search results. */ export interface SearchEngine { /** * Index plugin data list. * * @param plugins List of plugin index data */ index(plugins: PluginIndexData[]): void; /** * Searches plugin index using query string. * * @param query The string to query */ search(query: string): SearchResult[]; } /** * Interface for search result matches. Matches are used for highlighting * matched words in the plugin data. */ export interface SearchResultMatch { /** * The start index of the matched substring. */ start: number; /** * The end index of the matched substring. */ end: number; /** * The substring extracted using the start and end indices. */ match: string; } /** * Generic interface for storing a search result. This is used to standardize * the search result object so that different libraries / APIs could be used if * necessary. */ export interface SearchResult { /** * Array position of plugin in plugin index array data. */ index: number; /** * The plugin data. */ plugin: PluginIndexData; /** * Dictionary storing the first text match for a particular key in the plugin * index data. */ matches: Partial<Record<string, SearchResultMatch>>; } <file_sep>import { LayoutMDX } from '@/components'; <LayoutMDX toc title="Privacy"> # Website Privacy Notice _Last updated: June 30, 2021_ ## We Collect and Use Limited Personal Information About You If you subscribe to our mailing list, we will collect personally identifiable information that you provide by signing up on our Website. We will use this mailing list to send you newsletters and other communications (e.g. announcements about developments). We are based in the US and we use a service provider to help us manage our mailing list. [_Learn more about Mailchimp's privacy practices here_](https://mailchimp.com/legal/privacy/). ### **Personal information that is collected:** - First and last name - Email address ## Our Website Analytics Tool Does Not Collect Personal Information About You We use the privacy-friendly Plausible as our website analytics tool. [_Learn more about Plausible’s data and privacy practices here_](https://plausible.io/data-policy) and [_view the public dashboard for napari-hub.org here_](https://plausible.io/napari-hub.org). ## We Do Not Host Any Plugins napari plugins are specialized Python packages. Our website exposes information about these packages that the authors have distributed on [_PyPI_](https://pypi.org/) based on publicly available data from [_PyPI_](https://pypi.org/) and [_Github_](https://github.com/). Learn more about [_PyPI’s data and privacy practices here_](https://www.python.org/privacy/), and view [_Github’s data and privacy practices here_](https://docs.github.com/en/github/site-policy/github-privacy-statement). - You can email us at <<EMAIL>> if you _do not_ want your plugin to appear on our website. When we pull information from these websites, it may also include the information provided in the author field (such as name and email address). We are unable to prevent disclosure of this information if it has been provided by the author to PyPI or Github. - If you _do not_ want the information in the author field to be displayed, please remove that information from the source website. ## Sharing Data - We will never sell your data or share it with anyone that will sell your data. - We may share your data with service providers in order to provide services (see above) and where we believe it is legally required or useful to protect or defend our rights or property or others; and/or to investigate or assist in preventing any violation of the law or harm to a person. ## Your Rights - Applicable laws may provide you rights with request to your personal data, such as the right to access and/or delete your data, object to our use of your data, or revoke your consent to our use of your data. You can email us at <<EMAIL>> to request to exercise your rights. </LayoutMDX> <file_sep>export type Styles = { accordion: string; expanded: string; expandIcon: string; }; export type ClassNames = keyof Styles; declare const styles: Styles; export default styles; <file_sep>import { Page } from 'playwright'; import { SEARCH_PAGE, SearchQueryParams } from '@/context/search'; function getFirstSearchResultName() { return page.$('[data-testid=searchResultName]'); } function getSearchURL(query = '') { const url = new URL(SEARCH_PAGE, 'http://localhost:8080'); if (query) { url.searchParams.set(SearchQueryParams.Search, query); } return url.toString(); } function hasSearchParam(page: Page, query: string) { const param = `${SearchQueryParams.Search}=${query}`; return page.url().includes(param); } async function submitQuery(query: string) { await page.fill('[data-testid=searchBarInput]', query); await page.press('[data-testid=searchBarInput]', 'Enter'); } async function getSelectedSortByRadio() { return page.$('[data-testid=sortByRadio][data-selected=true]'); } describe('/ (Home page)', () => { beforeEach(async () => { await jestPlaywright.resetPage(); }); it('should update URL parameter when entering query', async () => { const query = 'video'; await page.goto(getSearchURL()); await submitQuery(query); expect(hasSearchParam(page, query)).toBe(true); }); it('should render search results for query', async () => { await page.goto(getSearchURL()); await submitQuery('video'); await expect(await getFirstSearchResultName()).toHaveText('napari_video'); }); it('should render search results when opening URL with query', async () => { await page.goto(getSearchURL('video')); await expect(await getFirstSearchResultName()).toHaveText('napari_video'); }); it('should render original list when query is cleared', async () => { await page.goto(getSearchURL('video')); await page.click('[data-testid=clearQueryButton]'); await expect(await getFirstSearchResultName()).not.toHaveText( 'napari_video', ); }); it('should clear query when clicking on app bar home link', async () => { await page.goto(getSearchURL('video')); await page.click('[data-testid=appBarHome]'); await expect(await getFirstSearchResultName()).not.toHaveText( 'napari_video', ); }); it('should redirect to search page when searching from another page', async () => { await page.goto('http://localhost:8080/about'); await submitQuery('video'); await page.waitForNavigation(); expect(page.url()).toEqual(getSearchURL('video')); await expect(await getFirstSearchResultName()).toHaveText('napari_video'); }); it('should maintain search query when navigating back', async () => { const query = 'video'; await page.goto(getSearchURL()); await submitQuery(query); await page.click('[data-testid=searchResult]'); await page.waitForNavigation(); await page.goBack(); await page.waitForNavigation(); expect(hasSearchParam(page, query)).toBe(true); await expect(await getFirstSearchResultName()).toHaveText('napari_video'); }); it('should switch to relevance sort type when searching', async () => { await page.goto(getSearchURL()); await expect(await getSelectedSortByRadio()).toHaveText('Release date'); await submitQuery('video'); await expect(await getSelectedSortByRadio()).toHaveText('Relevance'); }); }); <file_sep># Technical Specifications The purpose of this document is to provide technical details in order to assist in engineering implementation for napari hub. ## Scope for v0 ### Pages #### Landing page * Has some welcome copy and other info. * User can search and browse as described in the search/list/browse view. * Includes Email list signup. More details for this feature defined under Features: Mailing List #### Search/List/Browse view * Ability for users to search for a plugin by words (i.e. global search bar with plugins as results) * Ability to browse existing plugins with additional filter and sort functionality * Technical requirements: * Page and plugin data should load in a reasonable amount of time. * Search/list functionality should be able to support low 100s of plugins for the first version. * Search inputs should have form validation. * Search form fields: * Search [input/textbox] - Fields to search are defined in [this table](https://github.com/chanzuckerberg/napari-hub/wiki/Customizing-your-plugin's-listing#fields). We will use all items from the “Searched” column. More details for this feature defined under Features: Global Search Bar. * Filter [checkboxes] - Fields to filter are defined in [this table](https://github.com/chanzuckerberg/napari-hub/wiki/Customizing-your-plugin's-listing#fields). We will use all items from the “Filterable” column. More details for this feature defined under Features: Filter/Sort * Sort [dropdown/radio buttons] - Fields to sort are defined in [this table](https://github.com/chanzuckerberg/napari-hub/wiki/Customizing-your-plugin's-listing#fields). We will use all items from the “Sortable” column. More details for this feature defined under Features: Filter/Sort * Plugin list data to display * Fields to display are defined in [this table](https://github.com/chanzuckerberg/napari-hub/wiki/Customizing-your-plugin's-listing#fields). We will use all items from the “List view” column * Link to the details page * We will add an “exclude” list for plugins to exclude from the list. The exclusions will be stored in a file and plugin data returned will not include those excluded plugins. * Actions (what happens when a user clicks on each link, button, etc.) * Will be defined in the final Figma design/prototype #### Details view * Ability to learn more about each plugin (i.e. on a page of its own) * Content sourced from pypi & available sources linked from there (such as GitHub) * Includes a call to action to install the plugin. More details of this feature defined in Features: Install “CTA” * Technical requirements: * Page should be built as a reusable template and only data will change dynamically. * Page should load in a reasonable amount of time. * Plugin data to display from Pypi JSON API or GitHub: * Fields to display are defined in [this table](https://github.com/chanzuckerberg/napari-hub/wiki/Customizing-your-plugin's-listing#fields)v. We will use all items from the “Full view” column. * Flags for engineering: * Description - will need to be parsed from Markdown format or possible other formats like plain text or rst. For v0, we will support markdown and plain text formats only. * “GitHub source” column data will be sourced from files within a .napari folder as defined in this document * Actions (what happens when a user clicks on each link, button, etc.) * “Install” button - Details defined in Features: Install “CTA” #### Supplemental pages * About * Contact * Privacy Policy * FAQ ### Overall site Technical requirements: * Site traffic: Site should be able to support 200 unique visitors / 1,000 page views per day at the bare minimum * Error pages: Show a friendly error page if there is a site error. This should not happen very often, but in case it does, the user should see a friendly error page. * Analytics tracking code: We will use Plausible without the need for cookie banners * Deployment: work with CZI Infra’s modules for deployment to ecs cluster, we will ship our front end in a docker container * SEO: For v0, we should give minimal attention to SEO best practices. e.g. the title should include the plugin name. Each plugin should get a unique URL that includes the plugin name. The site should be readable by Google and Bing. ### Features #### Global Search Bar ##### Technical decision Filter on the client-side using a JS indexing library like fuse.js, lunr or minisearch. After research, we decided to use fuse.js. Pros: Quick search for medium-sized data and provides more complex search capabilities like fuzzy search out of the box. Cons: Requires developer time to get familiar with the libraries and how to implement them. Libraries are fixed in what they can support, but they should be good enough for our needs. ##### Technical requirements * Search is powered by [Fuse.js](https://fusejs.io/), a client-side fuzzy search engine. * [Options](https://fusejs.io/api/options.html) are configured in [engines.ts](../frontend/src/context/search/engines.ts#L23-L68). Fuzziness is primarily controlled by the `threshold` option. * Whenever the user loads the search page, the plugin list is fetched from the backend and used to index the search engine in the browser. * Results are ordered based on their [Fuse.js score](https://fusejs.io/concepts/scoring-theory.html), which is computed using a modified version of the [Bitap](https://en.wikipedia.org/wiki/Bitap_algorithm) algorithm. * The following plugin [metadata](./customizing-plugin-listing.md) is used for the index. The number represents its [weight](https://fusejs.io/examples.html#weighted-search) used for scoring: * Name (8) * Summary (4) * Author Name (2) * Description (1) * The search/filter/sort query should be encoded in the URL. User should be able to navigate to another page such as the details page and when returning to the page with filters, the page should remember the filters and sort and load that state. * This will enable linking to specific search results from elsewhere on the site (e.g. link to all plugins with a certain tag by clicking the tag name on the plugin detail page) * This will allow users to easily share search results with a copy+paste of the URL #### napari-specific readme template Details on how to customize the plugin listing using this feature can be found on the [wiki](https://github.com/chanzuckerberg/napari-hub/wiki/Customizing-your-plugin's-listing). #### Manual input of plugin contributor name(s) and description Details on how to customize the plugin listing using this feature can be found on the [wiki](https://github.com/chanzuckerberg/napari-hub/wiki/Customizing-your-plugin's-listing). #### Install “CTA” We will provide a call to action which will trigger a means to install a plugin. For the first version, installation may be several steps which include copying and pasting an installation command into the napari viewer where future versions may be a “one click” solution which opens up the napari viewer automatically and starts the installation of the plugin. There are further considerations to explore in the workflow of the user journey and engineering options to explore so this will not be the target for the first version. Implementation on the engineering side should be low for this first version. Decision for v0: The "install" CTA on the plugin detail page should pop up a modal that gives template-generated instructions for installation. Modal is subject to the responsive design. #### Filter/Sort We will provide users with the ability to filter and sort plugins on the search/browse/list page. Filter and sorting are two different types of features that will be used in conjunction with one another to provide a better discovery experience. Fields to use are defined in [this table](https://github.com/chanzuckerberg/napari-hub/wiki/Customizing-your-plugin's-listing#fields). We will use all items from the “Filterable” and "Sortable" columns. We should keep in mind that filters may be added or removed based on user feedback, and we should be able to provide a solution that will scale both in design and implementation. ##### Technical decision Use client-side filtering and sorting. Add query string parameters to the URL without reloading the entire page which will allow the user to reload the filtered state when needed. Keep in mind that we may need to provide a more robust filter/sort functionality after v0 as we grow our plugins to use an indexing service. ##### Technical requirements * The user should have the ability to: * Add filters * Refreshes the list to filter by the selected filter * Remove filters * Refreshes the list to filter without the removed filter * Clear all filters * Refreshes the list with no filters applied * Select a sort option * Refreshes the list sorted by the user’s selection * The search/filter/sort query should be encoded in the URL. User should be able to navigate to another page such as the details page and when returning to the page with filters, the page should remember the filters and sort and load that state. * This will enable linking to specific search results from elsewhere on the site (e.g. link to all plugins with a certain tag by clicking the tag name on the plugin detail page) * This will allow users to easily share search results with a copy+paste of the URL * User should be able to select filters or sort actions without the full page refreshing in a reasonable amount of time. #### Mailing List We will collect user data via a subscription form with the purpose of sending out future updates to subscribers. ##### Technical decision Use Mailchimp forms and collect data within Mailchimp. ##### Technical requirements * Form fields: Email address * Form validation: Required field error, email address validation * Email notification goes to admin when user signs up and subscriber gets added to list in Mailchimp. ### Test Plan #### Frontend Testing * The frontend will be tested using a variety of methods to verify correctness * Unit and E2E test support added in https://github.com/chanzuckerberg/napari-hub/pull/3 * Unit / Integration / Snapshot Tests * PRs ideally should include tests for new code * Components should include snapshot tests to prevent unintentionally updating the UI * Unit tests should focus on specific units: components, hooks, utils, etc. * Integration tests should test multiple components, hooks, utils working together * There should be more unit tests / snapshot tests than integration tests * There should be a GitHub workflow that runs tests for PRs to prevent merging failing tests * End-To-End Tests * PRs should ideally include E2E tests for things like common actions, high value features, and features that depend on the backend * Can be used to promote staging to production: * Staging site can’t be promoted to production unless the E2E tests pass * Browser Testing * Developers should ideally test on as many browsers they can (Chrome, Safari, Firefox are Edge if developer has a Windows computer) * BrowserStack is a possible solution for testing multiple browsers * Would be nice to set up E2E tests for multiple browsers * Playwright has experimental support * It’s possible to test Edge since it’s a webkit browser * Lighthouse Tests (Extra) * Need to set up a GitHub workflow to run Lighthouse CI for every PR * This will help reduce regressions in accessibility, performance, and SEO * Highlight improvements in aforementioned areas #### Backend testing * API functional testing * Unit test for api setup for different endpoints * API Load testing * Confirm traffic we can handle in the current setup * Health test * Health check for the http server ### Release Plan First version of the site was released on production on June 30, 2021. Subsequent major and minor versions to follow. #### Domain name Decision: We will use napari-hub.org with naparihub.org redirecting to napari-hub.org #### Hosting strategy * We will use CZI hosted AWS since it will allow us more flexibility, is more future proof than GitHub pages, and we have a budget for services. We can take advantage of AWS services like Lambda and load balancing. * We will work with CZI Infra Team to understand the components they have available for use. Terraform will be used. Option to open source the Terraform files after release is still an open question on how best to do this. #### Staging site * We should have a separate staging site for testing * We will likely put up a coming soon page on the production site until release ### Solutions #### API Deploy AWS lambda (to avoid machine level maintenance) to support API where we parse pypi result with a frequency of 5min (configurable, fine tune later between 1min to 1 hour), and cache to aws s3. We will use the public google bigquery dataset as a backup data source when the main query fails (this should also trigger an alert to our team channel) to indicate that the lambdas have switched to the backup approach, eng team is responsible to react to such alerts and make adjustment (be it pypi outage or pypi website structure change to address the failure and bring the parsing back online). The API to be built should return a list of plugin names, in the format of “[“plugin_1”, “plugin_2]”, where each individual plugin name can then be used to query pypi API. ##### endpoints * /plugins * return list of plugin and versions * /plugins/index * return all plugins’ metadata used in main page * /plugins/{plugin_name} * return metadata for a plugin * /plugins/{plugin_name}/version/{version}: * return metadata for a specific version of plugin #### Overall back-end technology: We decided to use AWS which we will use services like Lambda, S3 and API Gateway. A software architecture diagram can be found [here](https://lucid.app/lucidchart/1ad7e89e-7265-4a04-b37b-135e7a57432c/view). #### Overall front-end technology: After building some proof of concepts and testing, we decided to use React + Next.js and TailwindCSS. ## Publishing of plugins * To be transparent and not require an approval process as well as share the responsibility to flagging bad plugins, we will publish activity to the hub-updates stream on the [napari Zulip](https://napari.zulipchat.com). Activity will be published for brand-new plugins, new versions of plugins as well as plugin removals. ## Further Considerations * Since the first version will be very minimal, consider future features that may affect the chosen technologies and engineering design. * Even though the first version will support a few plugins, we should have a plan in order to scale and support potentially 1,000s of plugins in future versions. * We will use Plausible for analytics for the first version. In future releases, we may need more detailed analytics and should provide this without the use of a cookie banner. * Impact on other teams/projects * Depending on our solution to getting the plugin list data, this could be something that the napari desktop application could use as well. This may help to keep consistent. * User support For feature requests, please file [a new discussion under the ideas category](https://github.com/chanzuckerberg/napari-hub/discussions/categories/ideas). For bug reports, please file [a new issue](https://github.com/chanzuckerberg/napari-hub/issues). ### Milestones * Milestone April 5th: Plugin Page Design Complete and Ready to Build * Milestone May 6th: Discovery Page Design Complete and Ready to Build * Milestone June 30th: plugin discovery site alpha/v0 release ### Future work * First version will be minimal * We will iteratively add new features and get feedback from users in a continuous cycle of future releases <file_sep>export * from './spdx.context'; export * from './spdx.types'; <file_sep>import { ReactNode } from 'react'; import { ReactElement } from 'react-markdown'; /** * Checks if a React node is an element. An element is an instantiated * component. * * @param node The React node * @returns True if node is an element */ export function isReactElement(node: ReactNode): node is ReactElement { const element = node as ReactElement | null | undefined; return !!(element?.type && element?.props); } <file_sep>/** * Shared module for Tailwind and Material UI. This is written in JS so that the * Tailwind config can import it in a Node.js environment. */ const createMuiTheme = require('@material-ui/core/styles/createMuiTheme') .default; const colors = { primary: '#80d1ff', hover: '#98daff', hoverGray: '#f7f7f7', gray: '#808080', light: '#d2efff', error: '#eb1000', }; const breakpoints = { // Special screen size for fresnel when using `lessThan` queries. zero: 0, 'screen-300': 300, // if there's a smaller breakpoint, update global.scss 'screen-375': 375, 'screen-495': 495, 'screen-550': 550, 'screen-560': 560, 'screen-600': 600, 'screen-655': 655, 'screen-725': 725, 'screen-875': 875, 'screen-1150': 1150, 'screen-1425': 1425, // TODO Remove when breakpoint existing usages are renamed. xs: 300, sm: 375, md: 495, lg: 600, xl: 875, '2xl': 1150, '3xl': 1425, }; const fontFamily = ['Barlow', 'sans-serif']; const theme = createMuiTheme({ palette: { primary: { main: colors.primary, light: colors.light, error: colors.error, }, }, shape: { // Disable rounded borders for buttons, dialogs, etc. borderRadius: 0, }, typography: { fontFamily: fontFamily.join(','), button: { // Remove uppercase styling from buttons textTransform: 'none', }, }, }); module.exports = { breakpoints, colors, fontFamily, theme, }; <file_sep>import type { FilterForm } from './filter.hooks'; import type { SearchForm } from './search.hooks'; import { SearchResult } from './search.types'; import type { SortForm } from './sort.hooks'; /** * Function that transforms a list of search results into a different list of * search results after sorting or filtering. */ export type SearchResultTransformFunction = ( results: SearchResult[], ) => SearchResult[]; /** * Root search state. */ export interface SearchState { filter: FilterForm; search: SearchForm; results: SearchResult[]; sort: SortForm; } <file_sep>import { renderHook } from '@testing-library/react-hooks'; import { useSpdx } from '@/context/spdx'; import pluginIndex from '@/fixtures/index.json'; import { PluginIndexData } from '@/types'; import { FilterFormState } from './filter.types'; import { getDefaultState } from './filter.utils'; import { useFilterResults } from './filters'; import { SearchResult } from './search.types'; function getResults(...plugins: PluginIndexData[]): SearchResult[] { return plugins.map((plugin) => ({ index: 0, matches: {}, plugin, })); } function getVersionResults(...versions: string[]): SearchResult[] { const plugins = versions.map((python_version) => ({ ...pluginIndex[0], python_version, })); return getResults(...plugins); } function getOperatingSystemResults( ...operatingSystems: string[][] ): SearchResult[] { const plugins = operatingSystems.map((operating_system) => ({ ...pluginIndex[0], operating_system, })); return getResults(...plugins); } function getDevStatusResults(...devStatuses: string[][]): SearchResult[] { const plugins = devStatuses.map((development_status) => ({ ...pluginIndex[0], development_status, })); return getResults(...plugins); } function getLicenseResults(...licenses: string[]): SearchResult[] { const plugins = licenses.map((license) => ({ ...pluginIndex[0], license, })); return getResults(...plugins); } describe('filterResults()', () => { let state: FilterFormState; beforeEach(() => { state = getDefaultState(); (useSpdx as jest.Mock).mockClear(); }); describe('filter by python versions', () => { it('should allow all plugins when no filters are enabled', () => { const results = getVersionResults('>=3.10', '>=3.9'); const { result } = renderHook(() => useFilterResults(results, state)); expect(result.current).toEqual(results); }); it("should filter plugins that don't match an exact version", () => { const testCases = [ { input: getVersionResults('>=3.10', '==3.9'), output: getVersionResults('==3.9'), }, { input: getVersionResults('>=3.10', '>=3.9'), output: getVersionResults('>=3.9'), }, { input: getVersionResults('>=3.10', '>3.8'), output: getVersionResults('>3.8'), }, { input: getVersionResults('>=3.10', '<3.10'), output: getVersionResults('<3.10'), }, { input: getVersionResults('>=3.10', '<3.10,!=3.9'), output: [], }, ]; state.pythonVersions['3.9'] = true; testCases.forEach(({ input, output }) => { const { result } = renderHook(() => useFilterResults(input, state)); expect(result.current).toEqual(output); }); }); }); describe('filter by operating systems', () => { it('should allow all plugins when no filters are enabled', () => { const results = getOperatingSystemResults( ['Operating System :: OS Independent'], ['Operating System :: POSIX :: Linux'], ); const { result } = renderHook(() => useFilterResults(results, state)); expect(result.current).toEqual(results); }); it('should allow OS Independent plugins', () => { const results = getOperatingSystemResults( ['Operating System :: OS Independent'], ['Operating System :: POSIX :: Linux'], ); state.operatingSystems.mac = true; const { result } = renderHook(() => useFilterResults(results, state)); expect(result.current).toEqual( getOperatingSystemResults(['Operating System :: OS Independent']), ); }); it('should filter operating systems', () => { const results = getOperatingSystemResults( ['Operating System :: Microsoft :: Windows :: Windows 10'], ['Environment :: MacOS X'], ['Operating System :: POSIX :: Linux'], ['Environment :: MacOS X', 'Operating System :: POSIX :: Linux'], ); const testCases = [ { input: { mac: true }, output: getOperatingSystemResults( ['Environment :: MacOS X'], ['Environment :: MacOS X', 'Operating System :: POSIX :: Linux'], ), }, { input: { linux: true }, output: getOperatingSystemResults( ['Operating System :: POSIX :: Linux'], ['Environment :: MacOS X', 'Operating System :: POSIX :: Linux'], ), }, { input: { windows: true }, output: getOperatingSystemResults([ 'Operating System :: Microsoft :: Windows :: Windows 10', ]), }, ]; testCases.forEach(({ input, output }) => { state = getDefaultState(); Object.assign(state.operatingSystems, input); const { result } = renderHook(() => useFilterResults(results, state)); expect(result.current).toEqual(output); }); }); }); describe('filter by development status', () => { const results = getDevStatusResults( ['Development Status :: 1 - Planning'], ['Development Status :: 2 - Pre-Alpha'], ['Development Status :: 5 - Production/Stable'], ['Development Status :: 6 - Mature'], ['Development Status :: 7 - Inactive'], ); it('should allow all plugins when no filters are enabled', () => { const { result } = renderHook(() => useFilterResults(results, state)); expect(result.current).toEqual(results); }); it('should filter stable plugins', () => { state.developmentStatus.onlyStablePlugins = true; const expected = getDevStatusResults( ['Development Status :: 5 - Production/Stable'], ['Development Status :: 6 - Mature'], ); const { result } = renderHook(() => useFilterResults(results, state)); expect(result.current).toEqual(expected); }); }); describe('filter by license', () => { const results = getLicenseResults('valid', 'invalid'); it('should allow all plugins when no filters are enabled', () => { const { result } = renderHook(() => useFilterResults(results, state)); expect(result.current).toEqual(results); }); it('should filter plugins with open source licenses', () => { type F = typeof useSpdx; type P = Parameters<F>; type R = ReturnType<F>; (useSpdx as jest.Mock<R, P>).mockReturnValueOnce({ isOSIApproved: jest .fn() .mockImplementationOnce((license: string) => license === 'valid'), }); state.license.onlyOpenSourcePlugins = true; const { result } = renderHook(() => useFilterResults(results, state)); expect(result.current).toEqual(getLicenseResults('valid')); }); }); }); <file_sep>/* eslint-disable no-console */ const chalk = require('chalk'); const { readFileSync, writeFileSync } = require('fs'); function componentGenerator(plop) { /** * Adds export to the `component/index.ts` file so that pages can import the * component from `@/components`. */ const addComponentExport = (data) => { const file = 'src/components/index.ts'; let content = readFileSync(file, 'utf-8'); // Add the export to the file content += plop.renderString( `export * from './{{ pascalCase name }}';`, data, ); // Sort exports by component name content = content.split('\n').sort().join('\n'); // Add a newline because POSIX :) content += '\n'; writeFileSync(file, content); return `Added component export to ${chalk.cyan(file)} `; }; plop.setGenerator('component', { description: 'Create a new component', prompts: [ { type: 'input', name: 'name', message: 'What is the name of the component?', }, ], actions: [ { type: 'add', path: 'src/components/{{ pascalCase name }}/index.ts', templateFile: 'plop-templates/component/index.ts.hbs', }, { type: 'add', path: 'src/components/{{ pascalCase name }}/{{ pascalCase name }}.tsx', templateFile: 'plop-templates/component/Component.tsx.hbs', }, { type: 'add', path: 'src/components/{{ pascalCase name }}/{{ pascalCase name }}.test.tsx', templateFile: 'plop-templates/component/Component.test.tsx.hbs', }, { type: 'add', path: 'src/components/{{ pascalCase name }}/{{ pascalCase name }}.module.scss', templateFile: 'plop-templates/component/Component.module.scss.hbs', }, { type: 'add', path: 'src/components/{{ pascalCase name }}/{{ pascalCase name }}.module.scss.d.ts', templateFile: 'plop-templates/component/Component.module.scss.d.ts.hbs', }, addComponentExport, ], }); } function pageGenerator(plop) { plop.setGenerator('page', { description: 'Create a new page', prompts: [ { type: 'input', name: 'name', message: 'What is the name of the page?', }, { type: 'confirm', name: 'isMarkdown', message: 'Is it a Markdown page?', default: false, }, ], actions(data) { const actions = []; if (data.isMarkdown) { actions.push({ type: 'add', path: 'src/pages/{{ dashCase name }}.tsx', templateFile: 'plop-templates/page/page.tsx.hbs', }); } else { actions.push({ type: 'add', path: 'src/pages/{{ dashCase name }}.mdx', templateFile: 'plop-templates/page/page.mdx.hbs', }); } return actions; }, }); } function plopfile(plop) { const generators = [componentGenerator, pageGenerator]; generators.forEach((generator) => generator(plop)); } module.exports = plopfile; <file_sep>/** * Module that contains the filter function implementations used by the filter hooks. */ import { satisfies } from '@renovate/pep440'; import { flow, intersection, isEmpty, some } from 'lodash'; import { useSpdx } from '@/context/spdx'; import { FilterFormState, OperatingSystemFormState } from './filter.types'; import { SearchResult } from './search.types'; import { SearchResultTransformFunction } from './types'; function useFilterByPythonVersion( state: FilterFormState, results: SearchResult[], ): SearchResult[] { // Collect all versions selected on the filter form const selectedVersions = Object.entries(state.pythonVersions) .filter(([, enabled]) => enabled) .map(([version]) => version); if (isEmpty(selectedVersions)) { return results; } // Filter results that satisfy the enabled versions return results.filter(({ plugin }) => selectedVersions.some((version) => // Plugin version can be a specifier, so we need to check if any of the // selected versions match the plugin specifier. satisfies(version, plugin.python_version), ), ); } const FILTER_OS_PATTERN: Record<keyof OperatingSystemFormState, RegExp> = { linux: /Linux/, mac: /MacOS/, windows: /Windows/, }; function useFilterByOperatingSystem( state: FilterFormState, results: SearchResult[], ): SearchResult[] { return results.filter(({ plugin }) => { // Don't filter if plugin supports all operating systems if (plugin.operating_system.some((os) => os.includes('OS Independent'))) { return true; } // Don't filter if none of the checkboxes are enabled if (!some(state.operatingSystems, (enabled) => enabled)) { return true; } return plugin.operating_system.some((os) => some(state.operatingSystems, (enabled, osKey) => { if (enabled) { const pattern = FILTER_OS_PATTERN[osKey as keyof OperatingSystemFormState]; return !!pattern.exec(os); } return false; }), ); }); } const STABLE_DEV_STATUS = [ 'Development Status :: 5 - Production/Stable', 'Development Status :: 6 - Mature', ]; function useFilterByDevelopmentStatus( state: FilterFormState, results: SearchResult[], ): SearchResult[] { if (!state.developmentStatus.onlyStablePlugins) { return results; } // Filter plugins that include at least one of the stable dev statuses. return results.filter( ({ plugin }) => !isEmpty(intersection(STABLE_DEV_STATUS, plugin.development_status)), ); } function useFilterByLicense( state: FilterFormState, results: SearchResult[], ): SearchResult[] { const { isOSIApproved } = useSpdx(); if (!state.license.onlyOpenSourcePlugins) { return results; } return results.filter(({ plugin }) => isOSIApproved(plugin.license)); } /** * List of functions to include for filtering search results. */ const FILTERS = [ useFilterByPythonVersion, useFilterByOperatingSystem, useFilterByDevelopmentStatus, useFilterByLicense, ]; /** * Executes all filter functions on the search results with each result passed * to the next function. * * @param results The search results * @param state The filter form state * @returns The filtered search results */ export function useFilterResults( results: SearchResult[], state: FilterFormState, ): SearchResult[] { // `flow()` will execute a list of functions and provide successive results to // each function: // https://lodash.com/docs/4.17.15#flow const useFilter: SearchResultTransformFunction = flow( FILTERS.map((fn) => fn.bind(null, state)), ); return useFilter(results); } <file_sep>export * from './CheckboxIcon'; export * from './Close'; export * from './Copy'; export * from './CZI'; export * from './Expand'; export * from './GitHub'; export * from './Hub'; export * from './icons.type'; export * from './Menu'; export * from './NapariLogo'; export * from './ProjectDocumentation'; export * from './ProjectIssues'; export * from './ProjectSite'; export * from './ProjectSupport'; export * from './Search'; export * from './Twitter'; <file_sep>#!/bin/bash export AWS_REGION=us-west-2 export AWS_DEFAULT_REGION=us-west-2 export AWS_ACCESS_KEY_ID=nonce export AWS_SECRET_ACCESS_KEY=nonce export FRONTEND_URL=http://frontend.naparinet.local:8080 export BACKEND_URL=http://backend.naparinet.local:5000 # NOTE: This script is intended to run INSIDE the dockerized dev environment! # If you need to run it directly on your laptop for some reason, change # localstack below to localhost export LOCALSTACK_URL=http://localstack.naparinet.local:4566 echo -n "waiting for localstack to be ready: " until $(curl --output /dev/null --silent --head ${LOCALSTACK_URL}); do echo -n '.' sleep 1 done echo " done" echo "Creating s3 bucket secrets" local_aws="aws --endpoint-url=${LOCALSTACK_URL}" ${local_aws} s3api create-bucket --bucket imaging-test-napari-hub &>/dev/null || true cat << EOF | ${local_aws} s3 cp - s3://imaging-test-napari-hub/excluded_plugins.json &>/dev/null || true { "napari-demo":null, "napari-cellfinder":null, "napari-brainreg":null } EOF echo echo "Dev env is up and running!" echo " Frontend: ${FRONTEND_URL}" echo " Backend: ${BACKEND_URL}" <file_sep>import { ReactNode } from 'react'; /** * Helper type utility for describing a type that is either a singular or * array-like value. */ type ArrayOrSingleValue<T> = T | T[]; /** * Interface for metadata items that are links. */ export interface MetadataItemLink { href: string; icon: ReactNode; missingIcon?: ReactNode; text: string; } /** * Possible metadata values. */ export type MetadataValueTypes = string | MetadataItemLink; /** * Metadata values can be either a single value or an array. */ export type MetadataValue = ArrayOrSingleValue<MetadataValueTypes>; /** * Metadata item to render in the MetadataList component. */ export interface MetadataItem { title: string; value: MetadataValue; } <file_sep>import { useEffect, useMemo, useRef, useState } from 'react'; import { usePrevious } from 'react-use'; import { StringParam, useQueryParam, withDefault } from 'use-query-params'; import { useActiveURLParameter, usePlausible } from '@/hooks'; import { PluginIndexData } from '@/types'; import { Logger } from '@/utils/logger'; import { measureExecution } from '@/utils/performance'; import { DEFAULT_SORT_TYPE, SearchQueryParams, SearchSortType, } from './constants'; import { FuseSearchEngine } from './engines'; import { SearchEngine, SearchResult } from './search.types'; import { SortForm } from './sort.hooks'; const logger = new Logger('search.hooks.ts'); function getDefaultSearchEngine() { return new FuseSearchEngine(); } /** * Hook that creates the search engine instance for a given plugin index. * * @param index The list of plugins for indexing * @param getSearchEngine Function for creating a search engine. * @returns The search engine instance */ function useSearchEngine( index: PluginIndexData[], getSearchEngine: () => SearchEngine = getDefaultSearchEngine, ): SearchEngine | null { const [engine, setEngine] = useState<SearchEngine | null>(null); // Create new search engine whenever the index changes. useEffect(() => { const searchEngine = getSearchEngine(); const { duration } = measureExecution(() => searchEngine.index(index)); logger.debug('search index duration:', duration); setEngine(searchEngine); }, [index, getSearchEngine]); return engine; } function usePlausibleEvents(query?: string) { const plausible = usePlausible(); const prevQuery = usePrevious(query); useEffect(() => { if (query && query !== prevQuery) { plausible('Search'); } }, [plausible, prevQuery, query]); } /** * Hook that returns a new list of plugins when the search query updates. * * @param engine The search engine instance * @param query The query string * @param index The plugin index * @returns The filtered plugins */ function useSearchResults( engine: SearchEngine | null, query: string, index: PluginIndexData[], ): SearchResult[] { // Use `useMemo()` to only compute search results when the query changes. // Without it, React will execute the search query multiple times even if the // query hasn't changed. const plugins = useMemo(() => { // If the search engine hasn't been created yet or if the query is empty, // then return the full list of plugins. if (!engine || !query) { return index.map<SearchResult>((plugin, pluginIndex) => ({ plugin, index: pluginIndex, matches: {}, })); } const { duration, result: results } = measureExecution(() => engine.search(query), ); logger.debug('plugin search:', { query, results, duration, }); return results; }, [engine, index, query]); usePlausibleEvents(query); return plugins; } function useForm() { const initialQuery = useActiveURLParameter(SearchQueryParams.Search); const [query, setQuery] = useQueryParam( SearchQueryParams.Search, withDefault(StringParam, undefined), ); useEffect(() => { setQuery(initialQuery); }, [initialQuery, setQuery]); function clearQuery() { setQuery(undefined); } return { clearQuery, query, setQuery, }; } export type SearchForm = ReturnType<typeof useForm>; /** * Hook that sets up the browser search engine and searches for results using * the query string. * * @param index The plugin index * @returns Search query, results, and query updater */ export function useSearch(index: PluginIndexData[]) { const searchForm = useForm(); // Use search engine to find plugins using the query. const engine = useSearchEngine(index); const results = useSearchResults(engine, searchForm.query ?? '', index); return { results, searchForm }; } function getSortParameter() { const url = new URL(window.location.href); return url.searchParams.get(SearchQueryParams.Sort); } /** * Hook that handles updating the sort type based on the search query. When a * user enters a search query, the sort type is automatically switched to * `Relevance`. Similarly, when the user clears the query, the sort type is * switched to either the default value or the selected sort type if it isn't * `Relevance`. * * @param query The query string * @param form The sort form */ export function useSearchSetSortType( query: string | undefined, form: SortForm, ) { // Ref used to determine if user is searching or not. This ref is `true` when // `query` is a non-empty string, and `false` when `query` is an empty string. // This is used to reduce calls to `form.setSortType()` when the `form` object changes. const isSearchingRef = useRef(false); // Ref used for determining if the sort type should be set to `Relevance` // on initial load. If the URL uses a different sort type, then its value is // used instead. const initialLoadRef = useRef(true); useEffect(() => { if (query && !isSearchingRef.current) { // During initial load, set the sort parameter to `Relevance` if it isn't // already set using some other value. if (!initialLoadRef.current || !getSortParameter()) { form.setSortType(SearchSortType.Relevance); } isSearchingRef.current = true; } else if (!query && isSearchingRef.current) { isSearchingRef.current = false; // Don't set sort type if user already picked a different sort type. if (form.sortType === SearchSortType.Relevance) { form.setSortType(DEFAULT_SORT_TYPE); } } initialLoadRef.current = false; }, [form, query]); } <file_sep>import { TOCHeader } from '@/components/common/TableOfContents'; import { getHeadersFromMarkdown } from './Markdown.utils'; const MARKDOWN_WITHOUT_HEADERS = ` # Hello World! This is markdown ### Level 3 header, but not level 2 markdown is cool. `; const MARKDOWN_WITH_HEADERS = ` # Hello World! This is markdown ## Foo ## Bar ## Foo Bar `; describe('getHeadersFromMarkdown()', () => { it('should return empty array for empty markdown', () => { const headers = getHeadersFromMarkdown(''); expect(headers).toHaveLength(0); }); it('should return empty array when there are no headers', () => { const headers = getHeadersFromMarkdown(MARKDOWN_WITHOUT_HEADERS); expect(headers).toHaveLength(0); }); it('should return array of headers', () => { const headers = getHeadersFromMarkdown(MARKDOWN_WITH_HEADERS); const expected: TOCHeader[] = [ { id: 'foo', text: 'Foo', }, { id: 'bar', text: 'Bar', }, { id: 'foo-bar', text: 'Foo Bar', }, ]; expect(headers).toEqual(expected); }); }); <file_sep>import { ReactHTML } from 'react'; import { Node } from 'unist'; export interface MarkdownNode extends Node { children: MarkdownNode[]; tagName: keyof ReactHTML; } export interface TextNode extends MarkdownNode { value: string; } export interface HeadingNode extends MarkdownNode { children: [TextNode]; properties: { id: string; }; } <file_sep># Customizing your plugin's listing napari plugin developers can customize their plugin's listing on the napari hub by updating the metadata associated with their Python package or adding napari-specific configuration files to their GitHub repository. ## Data sources We have two sources of plugin information for the napari hub: PyPI and GitHub. ### PyPI napari and the napari hub support discovery of plugins on PyPI that are tagged with the `"Framework :: Napari"` trove classifier (we do not currently support discovery of plugins on Anaconda cloud). Most of the information about a napari plugin is specified in the [Python package metadata](https://packaging.python.org/specifications/core-metadata/) & PyPI is our primary source of plugin metadata. The [PyPI API](https://warehouse.pypa.io/api-reference/json.html) provides information about Python packages through a simple JSON structure. We use PyPI to source information such as the Python versions that a plugin supports, its dependencies, etc. Plugin developers can modify these fields when they package their plugin by setting values in the [Python package metadata](https://packaging.python.org/specifications/core-metadata/). Fields that can be defined through the Python package configuration include the following: - [Name](#name) - [Summary](#summary) - [Description](#description) - [Authors](#authors) - [License](#license) - [Version](#version) - [Python versions](#python-versions) - [Operating System](#operating-system) - [Requirements](#requirements) - [Development Status](#development-status) - [Project Site](#project-site) - [Documentation](#documentation) - [Support](#support) - [Report issues](#report-issues) - [Twitter](#twitter) - [Code repository](#code-repository) ### GitHub For some fields, we look to the plugin developer's GitHub repository instead of (or in addition to) PyPI. This is only supported, however, if the plugin developer has added a link to their GitHub repository in their PyPI metadata (see [Source Code](#code-repository)). Plugin developers can modify these fields by adding a `.napari` configuration folder to their repository, along with the relevant configuration files for a given field. We currently support two configuration files: - `.napari/DESCRIPTION.md` for a napari-specific description (see [Description](#description)) - `.napari/config.yml` for all other configurable fields Fields that can be defined through the napari config include... - [Summary](#summary) - [Description](#description) - [Authors](#authors) - [Project Site](#project-site) - [Documentation](#documentation) - [Support](#support) - [Report Issues](#report-issues) - [Twitter](#twitter) Fields that come from the GitHub API - [License](#license) ## Fields For each of the fields in a plugin's listing, we outline below how the field is used and where we source the data. | Metadata | Full view | List view | Filterable | Sortable | Searched | Source (Backup) | |----------------------|:-----------:|:-----------:|:-----------:|:-------------:|:---------:|:---------:| | Name | ✅ | ✅ | ⛔ | ✅ | ✅ | <img src="https://pypi.org/static/images/logo-small.svg" height="20"> | | Summary | ✅ | ✅ | ⛔ | ⛔ | ✅ | <img src="https://github.com/favicon.ico" height="20"> (<img src="https://pypi.org/static/images/logo-small.svg" height="20">) | | Description | ✅ | ⛔ | ⛔ | ⛔ | ✅ | <img src="https://github.com/favicon.ico" height="20"> (<img src="https://pypi.org/static/images/logo-small.svg" height="20">) | | Authors | ✅ | ✅ | ⛔ | ⛔ | ✅ | <img src="https://github.com/favicon.ico" height="20"> (<img src="https://pypi.org/static/images/logo-small.svg" height="20">) | | License | ✅ | ✅ | ✅ | ⛔ | ⛔ | <img src="https://github.com/favicon.ico" height="20"> (<img src="https://pypi.org/static/images/logo-small.svg" height="20">) | | Version | ✅ | ✅ | ⛔ | ⛔ | ⛔ | <img src="https://pypi.org/static/images/logo-small.svg" height="20"> | | Development Status | ✅ | ⛔ | ✅ | ⛔ | ⛔ | <img src="https://pypi.org/static/images/logo-small.svg" height="20"> | | Python Version | ✅ | ⛔ | ✅ | ⛔ | ⛔ | <img src="https://pypi.org/static/images/logo-small.svg" height="20"> | | Operating System | ✅ | ⛔ | ✅ | ⛔ | ⛔ | <img src="https://pypi.org/static/images/logo-small.svg" height="20"> | | Requirements | ✅ | ⛔ | ⛔ | ⛔ | ⛔ | <img src="https://pypi.org/static/images/logo-small.svg" height="20"> | | Project Site | ✅ | ⛔ | ⛔ | ⛔ | ⛔ | <img src="https://github.com/favicon.ico" height="20"> (<img src="https://pypi.org/static/images/logo-small.svg" height="20">) | | Documentation | ✅ | ⛔ | ⛔ | ⛔ | ⛔ | <img src="https://github.com/favicon.ico" height="20"> (<img src="https://pypi.org/static/images/logo-small.svg" height="20">) | | Support | ✅ | ⛔ | ⛔ | ⛔ | ⛔ | <img src="https://github.com/favicon.ico" height="20"> (<img src="https://pypi.org/static/images/logo-small.svg" height="20">) | | Report Issues | ✅ | ⛔ | ⛔ | ⛔ | ⛔ | <img src="https://github.com/favicon.ico" height="20"> (<img src="https://pypi.org/static/images/logo-small.svg" height="20">) | | Twitter | ✅ | ⛔ | ⛔ | ⛔ | ⛔ | <img src="https://github.com/favicon.ico" height="20"> (<img src="https://pypi.org/static/images/logo-small.svg" height="20">) | | Source Code | ✅ | ⛔ | ⛔ | ⛔ | ⛔ | <img src="https://pypi.org/static/images/logo-small.svg" height="20"> | | Release Date | ✅ | ✅ | ⛔ | ✅ | ⛔ | <img src="https://pypi.org/static/images/logo-small.svg" height="20"> | | First Released | ✅ | ⛔ | ⛔ | ✅ | ⛔ | <img src="https://pypi.org/static/images/logo-small.svg" height="20"> | ### Name This is the name of the Python package that implements the plugin. We display this on the detailed plugin page and the plugin listings. We index this field for searching. We source this from the `["info"]["name"]` field of the JSON returned by the PyPI API. You can set this by setting the `name` value in your package metadata. ``` INI # setup.cfg [metadata] # ... name=starfish # ... ``` ### Summary This is a short summary of the plugin. We display this on the detailed plugin page and the plugin listings. We index this field for searching. We source this from the `["info"]["summary"]` field of the JSON returned by the PyPI API. You can set this by setting the `summary` value in your package metadata. ``` INI # setup.cfg [metadata] # ... summary = Pipelines and pipeline components for the analysis of image-based transcriptomics data # ... ``` If you wish to customize this field with a napari-specific summary, you can also set this field by setting a `summary` value in your napari configuration file. ``` YAML # .napari/config.yml # ... summary: Build scalable pipelines that localize and quantify RNA transcripts in image data generated by any FISH method # ... ``` ### Description This is a detailed description of the plugin. We display this on the detailed plugin page only. We index this field for searching. We source this from the `["info"]["description"]` field of the JSON returned by the PyPI API. If the `["info"]["description_content_type"]` field denotes Markdown, then this field will be rendered as HTML. You can set this by setting the `long_description` value in your package metadata. ``` INI # setup.cfg [metadata] # ... long_description = file: README.md long_description_content_type = text/markdown # ... ``` You can denote sections your plugin description by adding Level 2 Headings (e.g. `## Summary`). We will automatically generate sidebar navigation for your plugin from the Level 2 Headings present in your plugin description. If your `description` begins with a Level 1 Heading, we will assume that this is a title (e.g. for your README) and drop it from the description. If you wish to customize this field with a napari-specific description which is different from the Python package description shown in PyPI, you can also set this field by adding a Markdown file to your GitHub repository at `.napari/DESCRIPTION.md`. This file will take precedence over the `description` in your Python package. ### Authors This is a list of authors of the plugin. We display this on the detailed plugin page and the plugin listings. We index this field for searching. We source this from the `["info"]["author"]` field of the JSON returned by the PyPI API. You can set this by setting the `author` value in your package metadata. ``` INI # setup.cfg [metadata] # ... author=<NAME> # ... ``` If you wish to customize this field with a full list of authors, you can also set this field by adding authors, along with an optional [ORCID ID](https://orcid.org/) for each author, to your napari configuration file. ``` YAML # .napari/config.yml # ... authors: - name: <NAME> - name: <NAME> orcid: 0000-0003-4998-6328 - name: <NAME> orcid: 0000-0002-8457-2836 - name: <NAME> orcid: 0000-0001-7077-7972 - name: <NAME> - name: <NAME> orcid: 0000-0002-4638-7015 - name: <NAME> orcid: 0000-0002-7793-5969 - name: <NAME> - name: <NAME> orcid: 0000-0002-7818-1388 # ... ``` Authors listed in your napari config file will take precedence over the `author` specified in your Python package. ### License This is the [SPDX Identifier](https://spdx.org/licenses/) for the license that the plugin is distributed under. We display this on the detailed plugin page and the plugin listings. We support filtering plugins based on whether the plugin is released under an [OSI-approved](https://opensource.org/licenses) open source license. We source this from the GitHub license API, if we cannot find one, we would source from `["info"]["license"]` field of the JSON returned by the PyPI API. You can set this by setting the `license` value in your package metadata. > **_NOTE:_** You must use either a valid SPDX Identifier or "Other". > If you specify a license here which is not an SPDX Identifier, we will display "Other". > You can find the full list of SPDX Identifiers at https://spdx.org/licenses/ ``` INI # setup.cfg [metadata] # ... license = MIT # ... ``` ### Version This is the version of the latest release of your plugin. We display this on the detailed plugin page and the plugin listings. We source this from the key of the latest release listed under `["releases"]` in the PyPI API. You can set this by setting the `version` of your Python package. See the [Python Packaging User Guide](https://packaging.python.org/guides/distributing-packages-using-setuptools/#version) for more info. > **_NOTE:_** We strongly encourage plugin developers to use Semantic Versioning, along with Python conventions for pre-releases (see [PEP 440](https://www.python.org/dev/peps/pep-0440/)). ### Development Status This is the development status of your plugin. We support the 7 levels of ["Development Status"](https://pypi.org/classifiers/) supported by PyPI: - `1 - Planning` - `2 - Pre-Alpha` - `3 - Alpha` - `4 - Beta` - `5 - Production/Stable` - `6 - Mature` - `7 - Inactive` We display this on the detailed plugin page and the plugin listings. We support filtering plugins that are "stable" based on this value. Plugins that are labeled as "5" (Production/Stable) or "6" (Mature) will be considered "stable". We source this from the list of classifiers in the `["info"]["classifiers"]` field of the JSON returned by the PyPI API. If multiple "Development Status" classifiers are listed, we will use the one with the highest value. You can set this by setting a ["Development Status" classifier](https://pypi.org/classifiers/) for your Python package in your package metadata. ``` INI # setup.cfg [metadata] # ... classifier = Development Status :: 5 - Production/Stable # ... ``` ### Python Versions These are the Python versions your plugin supports. We display this on the detailed plugin page and the plugin listings. We support filtering plugins according to the minor versions of Python they support, based on this field. For example, if a plugin developer notes that a plugin supports, Python ">=3.8", then the plugin will be tagged with Python versions `3.8` and `3.9`. We source this from `["info"]["requires_python"]` field of the JSON returned by the PyPI API. You can set this by [setting the `python_requires` value](https://packaging.python.org/guides/distributing-packages-using-setuptools/#id54) for your Python package in your package metadata. ``` INI # setup.cfg [metadata] # ... python_requires = '>=3.8' # ... ``` ### Operating System These are the operating systems your plugin supports. We display this on the detailed plugin page and the plugin listings. We support filtering plugins based on this value. We source this from the list of classifiers in the `["info"]["classifiers"]` field of the JSON returned by the PyPI API. You can set this by setting the relevant ["Operating System" classifiers](https://pypi.org/classifiers/) for your Python package in your package metadata. ``` INI # setup.cfg [metadata] # ... classifier = Operating System :: MacOS :: MacOS X Operating System :: Microsoft :: Windows Operating System :: POSIX :: Linux # ... ``` ``` INI # setup.cfg [metadata] # ... classifier = Operating System :: OS Independent # ... ``` ### Requirements This is a list of Python packages that are required by your plugin. We display this on the detailed plugin page. We source this from the list of requirements in the `["info"]["requires_dist"]` field of the JSON returned by the PyPI API. We do not display requirements for `napari-plugin-engine` or `napari`. You can set this by setting the `install_requires` value for your Python package in your package metadata. ``` INI # setup.cfg [options] # ... install_requires = dataclasses==0.6 h5py jsonschema matplotlib napari-plugin-engine numpy != 1.13.0 pandas >= 0.23.4 read_roi regional scikit-image >= 0.14.0 scikit-learn scipy sympy ~= 1.5.0 trackpy validators xarray >= 0.14.1 # ... ``` ### Project Site This is a link to the main project site for your plugin. We display this on the detailed plugin page. We source this from `["info"]["home_page"]` field of the JSON returned by the PyPI API. You can set this by setting the `url` value for your Python package in your package metadata. ``` INI # setup.cfg [metadata] # ... url = https://spacetx-starfish.readthedocs.io/en/latest/ project_urls = Bug Tracker = https://github.com/spacetx/starfish/issues Documentation = https://spacetx-starfish.readthedocs.io/en/latest/ Source Code = https://github.com/spacetx/starfish # ... ``` > **_NOTE:_** If we detect that a Github repository is the target of the `url` value, we will assign this URL to the "[Source Code](#source-code)" field instead of the Project Site field. Alternatively, you can also set this field by setting a value for `Project Site` in the `project_urls` section of your napari configuration file. ``` YAML # .napari/config.yml # ... project_urls: Project Site: https://spacetx-starfish.readthedocs.io/en/latest/ Report Issues: https://github.com/spacetx/starfish/issues Documentation: https://spacetx-starfish.readthedocs.io/en/latest/ User Support: https://forum.image.sc/tag/starfish Twitter: https://twitter.com/cziscience # ... ``` ### Documentation This is a link to further documentation for your plugin. We display this on the detailed plugin page. We source this from `["info"]["project_urls"]["Documentation"]` field of the JSON returned by the PyPI API. You can set this by adding a `Documentation` link to the `project_urls` value for your Python package in your package metadata. ``` INI # setup.cfg [metadata] # ... url = https://spacetx-starfish.readthedocs.io/en/latest/ project_urls = Bug Tracker = https://github.com/spacetx/starfish/issues Documentation = https://spacetx-starfish.readthedocs.io/en/latest/ Source Code = https://github.com/spacetx/starfish User Support = https://forum.image.sc/tag/starfish # ... ``` Alternatively, you can also set this field by setting a value for `Documentation` in the `project_urls` section of your napari configuration file. ``` YAML # .napari/config.yml # ... project_urls: Project Site: https://spacetx-starfish.readthedocs.io/en/latest/ Report Issues: https://github.com/spacetx/starfish/issues Documentation: https://spacetx-starfish.readthedocs.io/en/latest/ User Support: https://forum.image.sc/tag/starfish Twitter: https://twitter.com/cziscience # ... ``` ### User Support This is a link to user support for your plugin. We display this on the detailed plugin page. We source this from `["info"]["project_urls"]["User Support"]` field of the JSON returned by the PyPI API. You can set this by adding a `User Support` link to the `project_urls` value for your Python package in your package metadata. ``` INI # setup.cfg [metadata] # ... url = https://spacetx-starfish.readthedocs.io/en/latest/ project_urls = Bug Tracker = https://github.com/spacetx/starfish/issues Documentation = https://spacetx-starfish.readthedocs.io/en/latest/ Source Code = https://github.com/spacetx/starfish User Support = https://forum.image.sc/tag/starfish # ... ``` Alternatively, you can also set this field by setting a value for `User Support` in the `project_urls` section of your napari configuration file. ``` YAML # .napari/config.yml # ... project_urls: Project Site: https://spacetx-starfish.readthedocs.io/en/latest/ Report Issues: https://github.com/spacetx/starfish/issues Documentation: https://spacetx-starfish.readthedocs.io/en/latest/ User Support: https://forum.image.sc/tag/starfish Twitter: https://twitter.com/cziscience # ... ``` ### Report Issues This is a link to where users can report issues with your plugin. We display this on the detailed plugin page. We source this from `["info"]["project_urls"]["Bug Tracker"]` field of the JSON returned by the PyPI API. You can set this by adding a `Bug Tracker` link to the `project_urls` value for your Python package in your package metadata. ``` INI # setup.cfg [metadata] # ... url = https://spacetx-starfish.readthedocs.io/en/latest/ project_urls = Bug Tracker = https://github.com/spacetx/starfish/issues Documentation = https://spacetx-starfish.readthedocs.io/en/latest/ Source Code = https://github.com/spacetx/starfish User Support = https://forum.image.sc/tag/starfish # ... ``` Alternatively, you can also set this field by setting a value for `Report Issues` in the `project_urls` section of your napari configuration file. ``` YAML # .napari/config.yml # ... project_urls: Project Site: https://spacetx-starfish.readthedocs.io/en/latest/ Report Issues: https://github.com/spacetx/starfish/issues Documentation: https://spacetx-starfish.readthedocs.io/en/latest/ User Support: https://forum.image.sc/tag/starfish Twitter: https://twitter.com/cziscience # ... ``` ### Twitter This is a link to the Twitter feed for your plugin. We display this on the detailed plugin page. We source this from `["info"]["project_urls"]["Twitter"]` field of the JSON returned by the PyPI API. You can set this by adding a `Twitter` link to the `project_urls` value for your Python package in your package metadata. ``` INI # setup.cfg [metadata] # ... project_urls = Twitter = https://twitter.com/napari_imaging # ... ``` Alternatively, you can also set this field by setting a value for `Twitter` in the `project_urls` section of your napari configuration file. ``` YAML # .napari/config.yml # ... project_urls: Project Site: https://spacetx-starfish.readthedocs.io/en/latest/ Report Issues: https://github.com/spacetx/starfish/issues Documentation: https://spacetx-starfish.readthedocs.io/en/latest/ User Support: https://forum.image.sc/tag/starfish Twitter: https://twitter.com/cziscience # ... ``` ### Source Code This is a link to the source code repository for your plugin. We display this on the detailed plugin page. We source this from `["info"]["project_urls"]["Source Code"]` field of the JSON returned by the PyPI API. You can set this by adding a `Source Code` link to the `project_urls` value for your Python package in your package metadata. We will also source this from the `url` field if the target is a GitHub repository. ``` INI # setup.cfg [metadata] # ... url = https://spacetx-starfish.readthedocs.io/en/latest/ project_urls = Bug Tracker = https://github.com/spacetx/starfish/issues Documentation = https://spacetx-starfish.readthedocs.io/en/latest/ Source Code = https://github.com/spacetx/starfish User Support = https://forum.image.sc/tag/starfish # ... ``` <file_sep>// TODO Add more meaningful tests when E2E implementation of plugin page is ready. describe('/plugin (Plugin Page)', () => { beforeAll(async () => { await page.goto( 'http://localhost:8080/plugins/napari-compressed-labels-io', ); }); it('should render details', async () => { await expect(page).toHaveSelector('[data-testid=pluginDetails]'); }); it('should render title', async () => { await expect(page).toHaveText( '[data-testid=pluginDetails] h1', 'napari-compressed-labels-io', ); }); }); <file_sep>/* eslint-disable @typescript-eslint/no-explicit-any, no-console, */ /** * Class for logging messages at different levels. This allows us to format log * messages in a standardized way and also opens up the possibility of adding * different transports like logging to a file on the server or logging to the * cloud. */ export class Logger { /** * Creates a new logger instance. Named loggers can be created using the * `name` option and can be used for showing a unique name for all log * messages. This is similar to `logging.getLogger(__name__)` in python. * * @param name The name for the logger */ constructor(private name: string = '') {} log(...messages: any[]): void { if (process.env.NODE_ENV !== 'production') { console.log(...this.formatMessages(messages)); } } debug(...messages: any[]): void { if (process.env.NODE_ENV !== 'production') { console.debug(...this.formatMessages(messages)); } } error(...messages: any[]): void { console.error(...this.formatMessages(messages)); } info(...messages: any[]): void { console.info(...this.formatMessages(messages)); } warn(...messages: any[]): void { console.warn(...this.formatMessages(messages)); } trace(...messages: any[]): void { console.trace(...this.formatMessages(messages)); } private formatMessages(messages: any[]): any[] { const date = new Date(); return [`[${date.toISOString()}]`, this.name && `[${this.name}]`] .filter(Boolean) .concat(messages); } } <file_sep>interface MeasureExecutionResult { duration: string; } /** * Utility for measuring the execution duration of a function. * * @param fn Function to measure. * @returns The result of the function, if any. */ export function measureExecution<R>( fn: () => R, ): MeasureExecutionResult & { result: R } { const now = window.performance.now(); const result = fn(); const end = window.performance.now(); const duration = `${(end - now).toFixed(2)} ms`; return { duration, result, }; } <file_sep>import os import os.path import concurrent.futures import re from datetime import datetime, timedelta, timezone import tempfile from typing import List import json import yaml from flask import Flask, jsonify import requests from requests.auth import HTTPBasicAuth from requests.exceptions import HTTPError from requests.utils import requote_uri import boto3 from botocore.exceptions import ClientError from google.cloud import bigquery # Environment variable set through lambda terraform infra config bucket = os.environ.get('BUCKET') bucket_path = os.environ.get('BUCKET_PATH', '') slack_url = os.environ.get('SLACK_URL') zulip_credentials = os.environ.get('ZULIP_CREDENTIALS', "") github_client_id = os.environ.get('GITHUB_CLIENT_ID', None) github_client_secret = os.environ.get('GITHUB_CLIENT_SECRET', None) cache_ttl = int(os.environ.get('TTL', "6")) endpoint_url = os.environ.get('BOTO_ENDPOINT_URL', None) plugins_key = 'cache/plugins.json' index_key = 'cache/index.json' exclusion_list = 'excluded_plugins.json' index_subset = {'name', 'summary', 'description', 'description_content_type', 'authors', 'license', 'python_version', 'operating_system', 'release_date', 'version', 'first_released', 'development_status'} s3 = boto3.resource('s3', endpoint_url=endpoint_url) s3_client = boto3.client("s3", endpoint_url=endpoint_url) cache_ttl = timedelta(minutes=cache_ttl) github_pattern = re.compile("https://github\\.com/([^/]+)/([^/]+)") app = Flask(__name__) def get_attribute(obj: dict, path: list): """ Get attribute iteratively from a json object. :param obj: object to iterate on :param path: list of string to get subpath within json :return: the value if the path is accessible, empty string if not found """ part = obj for token in path: if isinstance(part, dict) and token in part: part = part[token] elif isinstance(part, list) and token < len(part): part = part[token] else: return "" return part def filter_prefix(str_list: List[str], prefix: str) -> list: """ Filter the list for strings with the given prefix. :param str_list: list of strings to filter :param prefix: prefix to filter on :return: list of filtered strings """ return [string for string in str_list if string.startswith(prefix)] def filter_index(plugin: str, version: str) -> dict: """ Filter index based to only include specified entries. :param plugin: name of the plugin :param version: version of the plugin :return: filtered json metadata for the plugin """ plugin_info = get_plugin(plugin, version) return {k: plugin_info[k] for k in index_subset} @app.route('/plugins/index') def get_index() -> dict: """ Get the index page related metadata for all plugins. :return: json for index page metadata """ if cache_available(index_key, cache_ttl): return jsonify(get_cache(index_key)) else: return update_index() @app.route('/plugins/index/update') def update_index() -> dict: """ update the index page related metadata for all plugins. :return: json for index page metadata """ results = [] with concurrent.futures.ThreadPoolExecutor(max_workers=32) as executor: futures = [executor.submit(filter_index, k, v) for k, v in get_plugins().items()] for future in concurrent.futures.as_completed(futures): results.append(future.result()) return jsonify(cache(results, index_key)) def get_file(download_url: str, file: str) -> [dict, None]: """ Get file from github. :param download_url: github url to download from :param file: filename to get :return: file context for the file to download """ api_url = download_url.replace("https://github.com/", "https://raw.githubusercontent.com/") try: url = f'{api_url}/HEAD/{file}' print(url) response = requests.get(url) if response.status_code != requests.codes.ok: response.raise_for_status() return response.text except HTTPError: pass return None def get_extra_metadata(download_url: str) -> dict: """ Extract extra metadata from the github download url :param download_url: github url to download from :return: extra metadata dictionary """ extra_metadata = {} github_license = get_license(download_url) if github_license is not None: extra_metadata['license'] = github_license description = get_file(download_url, ".napari/DESCRIPTION.md") if description is not None: extra_metadata['description'] = description yaml_file = get_file(download_url, ".napari/config.yml") if yaml_file: config = yaml.safe_load(yaml_file) extra_metadata.update(config) return extra_metadata def get_license(url: str) -> [str, None]: try: api_url = url.replace("https://github.com/", "https://api.github.com/repos/") auth = None if github_client_id is not None and github_client_secret is not None: auth = HTTPBasicAuth(github_client_id, github_client_secret) response = requests.get(f'{api_url}/license', auth=auth) if response.status_code != requests.codes.ok: response.raise_for_status() spdx_id = get_attribute(json.loads(response.text.strip()), ['license', "spdx_id"]) if spdx_id == "NOASSERTION": return None else: return spdx_id except HTTPError: return None def get_download_url(plugin: dict) -> [str, None]: """ Get download url for github. :param plugin: plugin metadata dictionary :return: download url if one is available, else None """ project_urls = get_attribute(plugin, ["info", "project_urls"]) if project_urls: source_code_url = get_attribute(project_urls, ["Source Code"]) if source_code_url: return source_code_url elif isinstance(project_urls, dict): for key, url in project_urls.items(): if url.startswith("https://github.com"): match = github_pattern.match(url) if match: return github_pattern.match(url).group(0) return None def format_plugin(plugin: dict) -> dict: """ Format the plugin dictionary to extra relevant information. :param plugin: plugin dictionary from pypi :return: formatted plugin dictionary """ version = get_attribute(plugin, ["info", "version"]) download_url = get_download_url(plugin) extra_metadata = {} project_urls = {} if download_url: extra_metadata = get_extra_metadata(download_url) project_urls = extra_metadata.get('project_urls', {}) return { "name": get_attribute(plugin, ["info", "name"]), "summary": extra_metadata.get('summary', get_attribute(plugin, ["info", "summary"])), "description": extra_metadata.get('description', f'{get_attribute(plugin, ["info", "description"])}'), "description_content_type": f'{get_attribute(plugin, ["info", "description_content_type"])}', "authors": extra_metadata.get('authors', [{'name': get_attribute(plugin, ["info", "author"]), 'email': get_attribute(plugin, ["info", "author_email"])}]), "license": extra_metadata.get('license', get_attribute(plugin, ["info", "license"])), "python_version": get_attribute(plugin, ["info", "requires_python"]), "operating_system": filter_prefix( get_attribute(plugin, ["info", "classifiers"]), "Operating System"), "release_date": get_attribute(plugin, ["releases", version, 0, "upload_time_iso_8601"]), "version": version, "first_released": min( get_attribute(release, [0, "upload_time_iso_8601"]) for _, release in get_attribute(plugin, ["releases"]).items() if get_attribute(release, [0, "upload_time_iso_8601"])), "development_status": filter_prefix( get_attribute(plugin, ["info", "classifiers"]), "Development Status"), # below are plugin details "requirements": get_attribute(plugin, ["info", "requires_dist"]), "project_site": project_urls.get('Project Site', get_attribute( plugin, ["info", "home_page"])), "documentation": project_urls.get('Documentation', get_attribute( plugin, ["info", "project_urls", "Documentation"])), "support": project_urls.get('User Support', get_attribute( plugin, ["info", "project_urls", "User Support"])), "report_issues": project_urls.get('Report Issues', get_attribute( plugin, ["info", "project_urls", "Bug Tracker"])), "twitter": project_urls.get('Twitter', get_attribute( plugin, ["info", "project_urls", "Twitter"])), "code_repository": download_url, } @app.route('/plugins') def get_plugins() -> dict: """ Get all valid plugins list. We would first try to see if there is a freshly cached list, and return that if available, then we try to read from pypi, and fail over to google bigquery analysis dump when pypi reading failed as well. If every attempts failed, we return the cached version regardless of freshness. :param context: context for the run to raise alerts :return: json of valid plugins and their version """ if cache_available(plugins_key, cache_ttl): return get_cache(plugins_key) packages = query_pypi() if packages: packages = filter_excluded_plugin(packages) if zulip_credentials is not None and len(zulip_credentials.split(":")) == 2: notify_new_packages(get_cache(plugins_key), packages) return cache(packages, plugins_key) send_alert(f"({datetime.now()})Actions Required! Failed to query pypi for " f"napari plugin packages, switching to backup analysis dump") packages = query_analysis_dump() if packages: return cache(filter_excluded_plugin(packages), index_key) send_alert(f"({datetime.now()}) Actions Required! Back up method also " f"failed! Immediate fix is required to bring the API back!") return get_cache(index_key) @app.route('/plugins/<plugin>', defaults={'version': None}) @app.route('/plugins/<plugin>/versions/<version>') def get_plugin(plugin: str, version: str = None) -> dict: """ Get plugin metadata for a particular plugin, get latest if version is None. :param plugin: name of the plugin to get :param version: version of the plugin :return: plugin metadata dictionary """ plugins = get_plugins() if plugin not in plugins: return {} elif version is None: version = plugins[plugin] if cache_available(f'cache/{plugin}/{version}.json', None): return get_cache(f'cache/{plugin}/{version}.json') url = f"https://pypi.org/pypi/{plugin}/{version}/json" try: response = requests.get(url) if response.status_code != requests.codes.ok: response.raise_for_status() info = format_plugin(json.loads(response.text.strip())) if version is None: version = info['version'] return cache(info, f'cache/{plugin}/{version}.json') except HTTPError: return {} def cache_available(key: str, ttl: [timedelta, None]) -> bool: """ Check if cache is available for the key. :param key: key to check in s3 :param ttl: ttl for the cache, if None always consider the cache is valid :return: True iff cache exists and is considered fresh """ if bucket is None: return False try: last_modified = s3.Object(bucket, os.path.join(bucket_path, key)).last_modified if ttl is None: return True if last_modified is None or \ datetime.now(timezone.utc) - last_modified > ttl: print(f"Updated Cache: {key}") return False else: return True except ClientError: print(f"Not cached: {key}") return False def filter_excluded_plugin(packages: dict) -> dict: """ Filter excluded plugins from the plugins list :param packages: all plugins list :return: only plugins not in the filtered list """ filtered = packages.copy() exclusions = get_exclusion_list() for exclusion, versions in exclusions.items(): if exclusion in packages and \ (versions is None or packages[exclusion] in versions): filtered.pop(exclusion) return filtered @app.route('/plugins/excluded') def get_exclusion_list() -> dict: """ Get the exclusion plugin list. :return: excluded plugin list """ if cache_available(exclusion_list, None): return get_cache(exclusion_list) else: return {} def query_pypi() -> dict: """ Query pypi to get all plugins. :return: all plugin names and latest version """ packages = {} page = 1 name_pattern = re.compile('class="package-snippet__name">(.+)</span>') version_pattern = re.compile( 'class="package-snippet__version">(.+)</span>') url = requote_uri(f"https://pypi.org/search/?q=&o=-created&c=Framework :: napari&page=") while True: try: response = requests.get(f'{url}{page}') if response.status_code != requests.codes.ok: response.raise_for_status() html = response.text names = name_pattern.findall(html) versions = version_pattern.findall(html) assert (len(names) == len(versions)) for name, version in zip(names, versions): packages[name] = version page += 1 except HTTPError: break return packages def send_alert(message: str): """ Send alert to slack with a message. :param message: message to send alongside the alert """ payload = { "text": message } if slack_url is None: print("Unable to send alert because slack URL is not set") else: try: requests.post(slack_url, json=payload) except HTTPError: print("Unable to send alert") def notify_new_packages(existing_packages: dict, new_packages: dict): """ Notify zulip about new packages. :param existing_packages: existing packages in cache :param new_packages: new packages found """ username = zulip_credentials.split(":")[0] key = zulip_credentials.split(":")[1] for package, version in new_packages.items(): if package not in existing_packages: send_zulip_message(username, key, package, f'A new plugin has been published on the napari hub! Check out [{package}](https://napari-hub.org/plugins/{package})!') elif existing_packages[package] != version: send_zulip_message(username, key, package, f'A new version of [{package}](https://napari-hub.org/plugins/{package}) is available on the napari hub! Check out [{version}](https://napari-hub.org/plugins/{package})!') for package, version in existing_packages.items(): if package not in new_packages: send_zulip_message(username, key, package, f'This plugin is no longer available on the [napari hub](https://napari-hub.org) :(') def send_zulip_message(username: str, key: str, topic: str, message: str): """ Send message to zulip :param username: username for the user to post message :param key: api key for the user :param topic: topic in zulip stream to send :param message: message to send """ try: data = { 'type': 'stream', 'to': 'hub-updates', 'topic': topic, 'content': message } response = requests.post('https://napari.zulipchat.com/api/v1/messages', auth=HTTPBasicAuth(username, key), data=data) if response.status_code != requests.codes.ok: response.raise_for_status() except HTTPError: pass def query_analysis_dump() -> dict: """ Query google bigquery for all plugins. :return: list of plugin name and version """ client = bigquery.Client() results = client.query( """ select name, max(version) as version from `bigquery-public-data.pypi.distribution_metadata` where "Framework :: napari" in UNNEST(classifiers) group by name; """ ) return {row.name: row.version for row in results} def get_cache(key: str) -> dict: """ Get cache for a given key. :param key: key to the cache to get :return: file content for the key """ return json.loads(s3.Object(bucket, os.path.join(bucket_path, key)).get()['Body'].read()) def cache(content: [dict, list], key: str) -> dict: """ Cache the given content to the key location. :param content: content to cache :param key: key path in s3 :return: content that is cached """ if bucket is None: send_alert(f"({datetime.now()}) Unable to find bucket for lambda " f"configuration, skipping caching for napari hub." f"Check terraform setup to add environment variable for " f"napari hub lambda") return content with tempfile.NamedTemporaryFile(mode="w") as fp: fp.write(json.dumps(content)) fp.flush() s3_client.upload_file(fp.name, bucket, os.path.join(bucket_path, key)) return content <file_sep>export * from './urlParameters.context'; <file_sep>import { LayoutMDX } from '@/components'; <LayoutMDX toc title="About"> # About **The napari hub is a service of the Chan Zuckerberg Initiative in collaboration with [napari]({NAPARI_WEBSITE}).** The napari hub seeks to solve many of the challenges and needs in finding analysis solutions to bioimaging problems. You can explore how the hub is being built in the open (including research studies, design prototypes and technical specs) by visiting its [GitHub repository]({HUB_REPO}). ## About napari Led by microscopy and Python experts and built by a growing community, [napari]({NAPARI_WEBSITE}) is quickly becoming an essential tool for visualizing and exploring imaging data. napari is a consensus-based community project and an open source tool that enables high performance visualization and exploration of a broad range of imaging data, including microscopy, medical imaging, geospatial data, and more, with a clearly defined [governance model, mission, and values]({NAPARI_GOVERNANCE}). CZI [supports development of open source and community run tools like napari]({CZI_EOSS}), as part of its mission to accelerate biomedical research and help every scientist make progress faster. You can learn more about the napari project by visiting <https://napari.org>. ## About CZI imaging The [CZI Imaging Program](https://chanzuckerberg.com/science/programs-resources/imaging/) seeks to remove barriers in the analysis of imaging and microscopy data and make it easier for biologists to access emerging methods for bioimage analysis that leverage machine learning. CZI sees promise in napari and seeks to assist its development by providing resources not always available to the open source community, including dedicated user research, design, and engineering support. CZI is proud to collaborate with the science community to accelerate research and enable open science for all. In addition to napari and the napari hub, CZI imaging supports a number of grants on key areas such as visual proteomics, deep tissue imaging, and expanding global access. You can learn more about the imaging program and grant resources by visiting <https://czi.co/Imaging>. </LayoutMDX> <file_sep>FROM public.ecr.aws/lambda/python:3.8 COPY requirements.txt . RUN ["pip", "install", "-r", "requirements.txt"] COPY . . CMD ["lambda.handler"] <file_sep># Writing the Perfect Description for your Plugin You've done the hard parts. You've built your new plugin. You've packaged it. You've got your unit tests passing. You're ready to share it with the world. It's time to write a description. The description is the main way for you to connect with your users through the napari hub. You can provide a description of your plugin by creating a markdown file in your repo at `.napari/DESCRIPTION.md`. If you've used napari's [cookiecutter template](https://github.com/napari/cookiecutter-napari-plugin), you'll find a template description already there for you. This description is the primary way for potential users learn about your plugin, understand what it does, and determine whether it might solve their problem. It's a blank canvas, for you to customize as you see fit. In this guide, we'll discuss a bit about what to include and what not to include to make a top notch description that helps you find your next users. ## What to include What should you include? A good start is the following. ### A summary The best plugin descriptions start with a clear summary of the plugin that lets users know what the plugin does. Things to consider: - Who is this plugin for? Is it for cell biologists or neuroscientists? For Python novices or folks with advanced computational experience? - What kind of data does this plugin work on? 2D or 3D? Time series? Multichannel? If your plugin provides a reader or writer, what file types does it support? - What makes your plugin different from other plugins that do similar things? Is it faster? More robust? For example, see the [affinder summary](https://github.com/jni/affinder/blob/master/.napari/DESCRIPTION.md#description) ### An example or "quick start" A quick start guide can help users get a sense of how your plugin works without even having to install it. Include images, GIFs, or video so they can see exactly what your plugin does. [Include some sample data with your plugin](https://napari.org/plugins/stable/hook_specifications.html#napari.plugins.hook_specifications.napari_provide_sample_data) and they can follow along when they first try your plugin! ### Headings for each section We generate a navigation menu on your napari hub plugin listing from the headings in your description. If you include informative headings, it will make it easier for users to jump to the relevant sections of your description. ### Relevant keywords When users search for a plugin, the hub searches plugin descriptions to find relevant plugins. So if a user searches for "segmentation" and your plugin does segmentation but doesn't mention "segmentation" in its description, it will be hard for users to find your plugin. ## What not to include There's a good chance that you've included some of this information as part of your Github README. However, we don't recommend simply duplicating your README. While your Github README focuses on information that is relevant to other Python developers, your description is meant for all users of your plugin, no matter their familiarity with Github or Python. ### We don't need no badges We don't recommend including any shields or badges in your description. These are great for Github, but we've found that hub users tend to find them distracting and confusing. If there's a shield that you're excited about, [reach out and add your idea to our discussion page](https://github.com/chanzuckerberg/napari-hub/discussions/categories/ideas) and we can explore other ways to add the relevant information to your plugin's metadata. ### Don't worry about installation Next to your plugin's description is a big "Install" button that will give users instructions on installing your plugin. Unless your plugin has advanced installation requirements or pre-requisites, there's no need to include an "Installation" section. ## Examples For examples of strong descriptions, check out the following plugins: - [affinder](https://github.com/jni/affinder/blob/master/.napari/DESCRIPTION.md) - [ome-zarr](https://github.com/ome/napari-ome-zarr/blob/main/.napari/DESCRIPTION.md) <file_sep>import { LayoutMDX } from '@/components'; <LayoutMDX title="Contact"> # Contact For help with or ideas for the napari hub, please [check out the discussion board]({HUB_REPO}/discussions). For bugs or features requests, please [submit an issue on our GitHub repository]({HUB_REPO}/issues). For issues related to napari, please visit its [image.sc forum]({IMAGESC}) or [GitHub]({NAPARI_REPO}/issues) site. If you believe you have found a security issue with the napari hub, please responsibly disclose by contacting the CZI security team at <mailto:<EMAIL>>. For all other inquiries, email the hub team at <mailto:<EMAIL>>. </LayoutMDX> <file_sep>const LINKS = { HOME: { title: 'Home', link: '/', }, ABOUT: { title: 'About', link: '/about', }, FAQ: { title: 'FAQ', link: '/faq', }, PRIVACY: { title: 'Privacy', link: '/privacy', }, CONTACT: { title: 'Contact', link: '/contact', }, HUB_REPO: { title: 'GitHub repo', link: 'https://github.com/chanzuckerberg/napari-hub', newTab: true, }, NAPARI_REPO: { title: 'GitHub repo', link: 'https://github.com/napari/napari', newTab: true, }, NAPARI_WEBSITE: { title: 'napari main website', link: 'https://napari.org', newTab: true, }, NAPARI_GOVERNANCE: { title: 'napari governance model', link: 'https://napari.org/community/governance.html', newTab: true, }, IMAGESC: { title: 'napari image.sc forum', link: 'https://forum.image.sc/tag/napari', newTab: true, }, PLAUSIBLE: { title: 'napari.dev analytics dashboard', link: 'https://plausible.io/napari.dev', newTab: true, }, PLAUSIBLE_PRIVACY: { title: 'Plausible.io Privacy Policy', link: 'https://plausible.io/data-policy', newTab: true, }, MAILCHIMP_PRIVACY: { title: 'Mailchimp Privacy Policy', link: 'https://mailchimp.com/legal/privacy/', newTab: true, }, CZI_EOSS: { title: 'Chan Zuckerberg Initiative EOSS Program', link: 'https://chanzuckerberg.com/eoss/', newTab: true, }, }; module.exports = { LINKS }; <file_sep>import { formatDate, formatOperatingSystem } from './format'; describe('formatDate()', () => { it('should format date', () => { const year = 2021; const month = 3; const day = 30; const date = new Date(year, month, day).toISOString(); expect(formatDate(date)).toBe(`${day} April ${year}`); }); it('should add leading zero for days', () => { const year = 2021; const month = 3; const day = 2; const date = new Date(year, month, day).toISOString(); expect(formatDate(date)).toBe(`0${day} April ${year}`); }); }); describe('formatOperatingSystem()', () => { const testCases = [ { name: 'should support one level', input: 'Operating System :: MacOS', output: 'MacOS', }, { name: 'should support deeply nested level', input: 'Operating System :: Microsoft :: Windows :: Windows 10', output: 'Windows 10', }, ]; testCases.forEach((testCase) => // eslint-disable-next-line jest/valid-title it(testCase.name, () => { const result = formatOperatingSystem(testCase.input); expect(result).toEqual(testCase.output); }), ); }); <file_sep>export * from './constants'; export * from './search.context'; export type { SearchResultMatch } from './search.types'; <file_sep>--- name: "\U0001F4D8 User Story" about: User stories are reserved for prioritized work on new features title: 'User Story: ' labels: feature assignees: '' --- ### Primary persona(s) - [ ] Research Biologist - [ ] Imaging Scientist - [ ] Bioimage Analyst ### Job Stories - **When I** _______________________, **I want to** _______________________ **so that** _______________________. ### Acceptance Criteria 1. [If I do A, B should happen.] [ Also, here are a few points that need to be addressed: 1. Constraint 1; 2. Constraint 2; 3. Constraint 3. ] ### Resources: * Tech Spec: [URL to tech spec] * Sketches: [URL to sketches] * Designs: [URL to Figma] * Anything else relevant ### Context * PRD: [URL to Product Requirements] * UXR insights: [URL to UXR synthesis] ### Notes [Some complementary notes if necessary:] * > Here goes a quote from an email * Here goes whatever useful information can exist… <file_sep>import { defaultsDeep } from 'lodash'; import { useEffect, useMemo, useState } from 'react'; import { JsonParam, useQueryParam, withDefault } from 'use-query-params'; import { DeepPartial } from 'utility-types'; import { useActiveURLParameter, usePlausible } from '@/hooks'; import { SearchQueryParams } from './constants'; import { FilterFormState } from './filter.types'; import { filterFalsyValues, getChipState, getDefaultState, } from './filter.utils'; import { useFilterResults } from './filters'; import { SearchResult } from './search.types'; /** * Hook that gets the initial form state. It first checks if there's state in * the filter query param and parses it. It then deep merges the initial state * with the form default state. * * @param results Search results * @returns The initial form state */ function useInitialFormState() { const initialFilterParam = useActiveURLParameter(SearchQueryParams.Filter) ?? ''; const initialFormState = useMemo(() => { try { return JSON.parse(initialFilterParam) as DeepPartial<FilterFormState>; } catch (_) { return {}; } }, [initialFilterParam]); return defaultsDeep(initialFormState, getDefaultState()) as FilterFormState; } function usePlausibleEvents() { const plausible = usePlausible(); function sendPlausibleEvent(field: string, value: string, checked: boolean) { plausible('Filter', { checked, field, value, }); } return sendPlausibleEvent; } /** * Hook that returns up the filter form state and state setters. * * @param results Search results to populate initial state with * @returns The filter form state */ function useForm() { const sendPlausibleEvent = usePlausibleEvents(); const initialState = useInitialFormState(); // We don't need the first parameter because we're storing the form state in a // separate `useState()` below. const [, setFilterParam] = useQueryParam< DeepPartial<FilterFormState> | undefined >(SearchQueryParams.Filter, withDefault(JsonParam, initialState)); const [state, setState] = useState<FilterFormState>(initialState); const chips = getChipState(state); /** * Resets the filter form state to its default state. */ function clearAll() { setState(getDefaultState()); } // Update the filter query parameter with the filtered state useEffect(() => setFilterParam(filterFalsyValues(state)), [ setFilterParam, state, ]); /** * Removes a chip from the chips state. * * @param key The key of root filter state * @param subKey The sub key of the filter state */ function removeChip(key: string, subKey: string) { setState((prevState) => ({ ...prevState, [key]: { ...prevState[key as keyof FilterFormState], [subKey]: false, }, })); sendPlausibleEvent(key, subKey, false); } /** * Higher order function that returns a state setter for checkbox sub-states. * * @param key The sub-state to use * @returns A function to merge state into the sub-state */ function getCheckboxSetter< K extends keyof FilterFormState, S extends FilterFormState[K] >(key: K) { return (nextState: Partial<S>): void => { setState((prevState) => ({ ...prevState, [key]: { ...prevState[key], ...nextState, }, })); Object.entries(nextState).forEach(([subKey, checked]) => sendPlausibleEvent(key, subKey, checked as boolean), ); }; } return { // State chips, state, // State update functions clearAll, removeChip, setDevelopmentStatus: getCheckboxSetter('developmentStatus'), setLicense: getCheckboxSetter('license'), setOperatingSystem: getCheckboxSetter('operatingSystems'), setPythonVersion: getCheckboxSetter('pythonVersions'), }; } /** * Return type of `useForm()` hook. This includes the form data and data * setters. */ export type FilterForm = ReturnType<typeof useForm>; /** * Hook that provides access to the filter form state and handles filtering * plugins based on enabled filters. * * @param results The search results * @returns Filtered results and form data */ export function useFilters(results: SearchResult[]) { const filterForm = useForm(); const filteredResults = useFilterResults(results, filterForm.state); return { filteredResults, filterForm, }; } <file_sep>--- name: "\U0001F41B Bug report" about: Create a report to help us improve title: '' labels: bug assignees: '' --- #### Description <!-- Example: Selecting the "napari-svg" plugin gives a 404 page > 0--> #### Steps/Code to Reproduce <!-- What did you do before the error? --> #### Expected Results <!-- Example: "I expected to see the details for the "napari-svg" plugin".--> #### Actual Results <!-- Feel free to paste a screenshot of the error --> <!-- Thanks for contributing! -->
d4dca84d80b374aab7eeb9554971c2c5e3925125
[ "Markdown", "JavaScript", "Makefile", "Python", "Text", "JSON with Comments", "TypeScript", "Dockerfile", "Shell" ]
88
Python
codemonkey800/napari-hub
34a40b68d67002de2514d55b575b71159c7456cb
171ac53b13590f2f18ad88a779c18d04c74a1437
refs/heads/master
<repo_name>Nikadeatul/textutils<file_sep>/sept3/views.py from django.shortcuts import render from django.http.response import HttpResponse def home(request): return render(request,'index.html') def analyze(request): djtext=request.GET.get('text','default') removepunc = request.GET.get('removepunc','off') fullcaps= request.GET.get('fullcaps','off') nlr= request.GET.get('nlr','off') esr= request.GET.get('esr','off') print(removepunc) print(djtext) if removepunc == "on": punctuations = '''!()-[]{};:'"\,<>./?@#$%^&*_~''' analyzed="" for char in djtext: if char not in punctuations: analyzed = analyzed + char params ={'purpose':'Removepunc','analyze_text':analyzed} return render(request,'analyze.html',params) elif(fullcaps=="on"): analyzed= "" for char in djtext: analyzed = analyzed + char.upper() params ={'purpose':'Change To Uppercase','analyze_text':analyzed} return render(request,'analyze.html',params) elif(nlr=="on"): analyzed="" for char in djtext: if char!="\n" and char!="\r": analyzed = analyzed + char params ={'purpose':'New Line Remover','analyze_text':analyzed} return render(request,'analyze.html',params) elif(esr=="on"): analyzed="" for index, char in enumerate(djtext): if djtext[index]==" " and djtext[index +1] ==" ": pass else: analyzed = analyzed + char params ={'purpose':'Extra Space Remover','analyze_text':analyzed} return render(request,'analyze.html',params) else: print("error")
d7d0ecda6573a96e01d24531dfd257924d035d9c
[ "Python" ]
1
Python
Nikadeatul/textutils
b92971632b974284a206fd7d8a3554c0786a68bf
3a7390c3e6776d2ea23bb01ed0043329888aa2ef
refs/heads/master
<repo_name>CaninoDev/deli-counter-nyc-web-042318<file_sep>/deli_counter.rb def line(customers) if customers.length == 0 puts "The line is currently empty." return end # Alternatively, the followiung outputs the same thing but doesn't satisfy the test: # print 'The line is currently: ' # customers.collect.each_with_index do |person, queue| print '#{queue + 1}. #{person} " end # print "\n" linemsg = "The line is currently:" customers.collect.each_with_index do |person, queue| linemsg += " #{queue + 1}. #{person}" end puts linemsg end def take_a_number(queue, name) queue.push(name) puts "Welcome, #{name}. You are number #{queue.length} in line." end def now_serving(queue) if queue.length == 0 puts "There is nobody waiting to be served!" else puts "Currently serving #{queue.shift}." end end
8bd85aa2e4bdf49f50c60ee433380d530e16b002
[ "Ruby" ]
1
Ruby
CaninoDev/deli-counter-nyc-web-042318
56a35870283146bd197b920da6504370a39fee43
45a8fd630599aa080fe66399b3139b8d665febf5