branch_name
stringclasses 149
values | text
stringlengths 23
89.3M
| directory_id
stringlengths 40
40
| languages
listlengths 1
19
| num_files
int64 1
11.8k
| repo_language
stringclasses 38
values | repo_name
stringlengths 6
114
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|
refs/heads/master
|
<file_sep># Asymmetric-Symmetric-Key-Encryption
Cyber Security Project:
- Asymmetric Key Encryption & Digital Signature Verification.
- Encrypted Client-Server Communication with Symmetric-Key and Asymmetric-Key.
# Warning
- This implementation specifies the cipher "RSA", which usually refers to RSA with PKCS#1 v1.5 in Java. Sadly, this cipher is vulnerable to a Bleicherbacher oracle attack allowing an active attacker to recover the plain text of a sent message. To mitigate the issue, please change to OAEP which can be used by changing the cipher to "RSA/ECB/OAEPWithSHA-256AndMGF1Padding".
## Contributors:
- Jinfeng (<NAME> <<EMAIL>>
- Zhipeng (<NAME> <<EMAIL>>
<file_sep>// File Name Server.java
import java.net.*;
import java.io.*;
import java.util.*;
import java.security.GeneralSecurityException;
import com.google.crypto.tink.*;
import com.google.crypto.tink.aead.AeadFactory;
import com.google.crypto.tink.aead.AeadKeyTemplates;
import com.google.crypto.tink.config.TinkConfig;
import com.google.crypto.tink.CleartextKeysetHandle;
import com.google.crypto.tink.JsonKeysetWriter;
public class Server extends Thread
{
private ServerSocket serverSocket;
public Server(int port) throws IOException
{
serverSocket = new ServerSocket(port);
//serverSocket.setSoTimeout(10000);
}
public void run()
{
System.out.println("Waiting for client... ");
try
{
TinkConfig.register();
//Generating key materials with AES_256 using GF/Counter Mode
KeysetHandle keysetHandle = KeysetHandle.generateNew(AeadKeyTemplates.AES256_GCM);
//Write to a file
String mySecretKeyset = "my_keyset.json";
CleartextKeysetHandle.write(keysetHandle, JsonKeysetWriter.withFile(new File(mySecretKeyset)));
//Reading the keyset from .json file
keysetHandle = CleartextKeysetHandle.read(JsonKeysetReader.withFile(new File(mySecretKeyset)));
//Getting the Primitive from input which uses for encryption
Aead aead = AeadFactory.getPrimitive(keysetHandle);
while(true)
{
try
{
Socket server = serverSocket.accept();
DataInputStream in = new DataInputStream(server.getInputStream());
String input = in.readUTF();
byte[] decryptedText = aead.decrypt(parseHexStr2Byte(input), null);
String output = new String(decryptedText);
//Output in command line
System.out.println("Cipher text is: " + input);
System.out.println("Plain text is: " + output);
// sent message to client ------------------------------------
OutputStream outToClient = server.getOutputStream();
DataOutputStream out = new DataOutputStream(outToClient);
Scanner myObj = new Scanner(System.in);
System.out.println("Enter a message");
String line = myObj.nextLine();
//encrypting plaintext
byte[] cipherText = aead.encrypt(line.getBytes(), null);
out.writeUTF(byte2hex(cipherText));
//out.writeUTF(line);
// sent message to client ------------------------------------
}
catch (SocketTimeoutException s)
{
System.out.println("Socket timed out!");
System.out.println("Connection ended!");
}
catch (IOException e)
{
e.printStackTrace();
}
/*catch (GeneralSecurityException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}*/
}
}
catch(IOException e)
{
e.printStackTrace();
}
catch (GeneralSecurityException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static String byte2hex(byte buf[])
{
StringBuffer sb = new StringBuffer();
for (int i = 0; i < buf.length; i++)
{
String hex = Integer.toHexString(buf[i] & 0xFF);
if (hex.length() == 1)
{
hex = '0' + hex;
}
sb.append(hex.toUpperCase());
}
return sb.toString();
}
public static byte[] parseHexStr2Byte(String hexStr)
{
if (hexStr.length() < 1)
return null;
byte[] result = new byte[hexStr.length() / 2];
for (int i = 0; i < hexStr.length() / 2; i++)
{
int high = Integer.parseInt(hexStr.substring(i * 2, i * 2 + 1), 16);
int low = Integer.parseInt(hexStr.substring(i * 2 + 1, i * 2 + 2),
16);
result[i] = (byte) (high * 16 + low);
}
return result;
}
public static void main(String [] args)
{
int port = Integer.parseInt(args[0]);
try
{
Thread t = new Server(port);
t.start();
}
catch (IOException e)
{
e.printStackTrace();
}
}
}
|
f9e400fa3867aab8001ee21bab86ec06d1a6197b
|
[
"Markdown",
"Java"
] | 2
|
Markdown
|
Jeffery-Liu/Asymmetric-Symmetric-Key-Encryption
|
a14c24367e161d39c68d804196a6a47d3ae2a6b7
|
858bbd29600e2bb1514c52ef83c2d4eff3c5d72f
|
refs/heads/master
|
<file_sep>package com.redemption.hair.lowCNKI.controller;
import com.alibaba.fastjson.JSON;
import com.redemption.hair.lowCNKI.DAO.*;
import com.redemption.hair.lowCNKI.model.*;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@Controller
public class testController {
@Autowired
Co_expertsDAO co_expertsDAO;
@Autowired
ExpertsDAO expertsDAO;
@Autowired
Paper_masterDAO paper_masterDAO;
@Autowired
Paper_journalDAO paper_journalDAO;
@Autowired
Paper_meetingDAO paper_meetingDAO;
@Autowired
PatentsDAO patentsDAO;
@Autowired
Bdxs_paperDAO bdxs_paperDAO;
@Autowired
Bdxs_authorDAO bdxs_authorDAO;
@Autowired
Patent_CNKIDAO patent_cnkiDAO;
@Autowired
UsersDAO usersDAO;
@RequestMapping(path = {"/", "/index"}, method = {RequestMethod.GET})
public String index() {
/*test bdxs_authorDAO
List<Bdxs_author> authorList1 = bdxs_authorDAO.getAuthorByField("农业经济管理");
List<Bdxs_author> authorList2 = bdxs_authorDAO.getAuthorByName("其");
List<Bdxs_author> authorList3 = bdxs_authorDAO.getAuthorByAffiliate("北京大学");
System.out.println("getAuthorByField"+JSON.toJSONString(authorList1));
System.out.println("getAuthorByName"+JSON.toJSONString(authorList2));
System.out.println("selectAuthorByAffiliate"+JSON.toJSONString(authorList3));
*/
/*test patent_cnkiDAO
System.out.println(JSON.toJSONString(patent_cnkiDAO.getPatentByName("展示盒")));
System.out.println(JSON.toJSONString(patent_cnkiDAO.getPatentByInventName("胡明珠")));
System.out.println(JSON.toJSONString(patent_cnkiDAO.getPatentByAgent_name("杭州君度")));
System.out.println(JSON.toJSONString(patent_cnkiDAO.getPatentByApply_name("九江学院")));
System.out.println(JSON.toJSONString(patent_cnkiDAO.getPatentByKeyword("平面镜装置")));
*/
//test bdxs_paperDAO
/*
System.out.println(JSON.toJSONString(bdxs_paperDAO.getPaperByAuthorName("其")));
System.out.println(JSON.toJSONString(bdxs_paperDAO.getPaperByKeywords("细胞")));
System.out.println(JSON.toJSONString(bdxs_paperDAO.getPaperByTitle("马克思主义")));
*/
return "search";
}
@RequestMapping(path = {"/blabla"}, method = {RequestMethod.GET})
@ResponseBody
public String funcForTest(Model model) {
//Experts e = expertsDAO.getExpertsByName(expertName);
//model.addAttribute("expert", e);
int rid = expertsDAO.getIdbyName("原");
List<Paper_master> papers = paper_masterDAO.getPaperByRid(rid);
//model.addAttribute("papers", papers);
System.out.println("-------a------");
String str = "";
for(Paper_master t: papers) {
str = t.getInstitution();
}
return str;
}
@RequestMapping(path = {"/testSolr"}, method = {RequestMethod.GET})
@ResponseBody
public String testSolr() {
final String SOLR_URL = "http://10.135.204.4:8983/solr/lowCNKI";
HttpSolrClient client = new HttpSolrClient.Builder(SOLR_URL).build();
List<Bdxs_paper> questionList = new ArrayList<>();
SolrQuery query = new SolrQuery();
query.setRows(10);
query.setStart(0);
query.setHighlight(true);
//query.set("q","Title_paper:决策树算法的系统实现与修剪优化");
String keyword = "人工智能";
query.set("q", "Title_paper:" + keyword);
//query.set("df", "title_paper");
QueryResponse response = null;
try {
response = client.query(query);
} catch (SolrServerException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
System.out.println(response);
SolrDocumentList list = response.getResults();
for (SolrDocument solrDocument : list) {
String authorName = (String)solrDocument.getFirstValue("AuthorName_paper");
String title_paper = (String)solrDocument.getFirstValue("Title_paper");
System.out.println("\nauthorName: "+ authorName + "\ntitle_paper: " + title_paper);
}
return "123";
}
@RequestMapping(path = {"/test"}, method = {RequestMethod.GET})
public String test() {
return "test";
}
@RequestMapping(path = {"/expertInfo"},method = {RequestMethod.GET})
public String expertInfo(){return "expert";}
@RequestMapping(path = {"/charge"}, method = {RequestMethod.GET})
public String charge() {
return "charge";
}
@RequestMapping(path = {"/login"}, method = {RequestMethod.GET})
public String testLogin() {
return "login";
}
@RequestMapping(path = {"/institution"},method = {RequestMethod.GET})
public String institution(){
return "insit";
}
@RequestMapping(path = {"/search"}, method = {RequestMethod.GET})
public String testFreeMarker() {
return "search";
}
@RequestMapping(path = {"/testblad"}, method = RequestMethod.GET)
@ResponseBody
public String testbla() {
Users user = usersDAO.selectByName("gbodjf");
if (user == null) {
System.out.print("1321");
}
return "123";
}
}
<file_sep>package com.redemption.hair.lowCNKI.model;
public class Experts {
private int id;
private String contact;
private String constitution;
private String name;
private String sex;
private int age;
private String major;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getContact() {
return contact;
}
public void setContact(String contact) {
this.contact = contact;
}
public String getConstitution() {
return constitution;
}
public void setConstitution(String constitution) {
this.constitution = constitution;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getSex() {
return sex;
}
public void setSex(String sex) {
this.sex = sex;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public String getMajor() {
return major;
}
public void setMajor(String major) {
this.major = major;
}
}
<file_sep>package com.redemption.hair.lowCNKI.DAO;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface TagsDAO {
String TABLE_NAME = " Tags ";
String SELECT_FIELDS = "id, name, state";
}
<file_sep>package com.redemption.hair.lowCNKI.controller;
import com.redemption.hair.lowCNKI.DAO.Bdxs_authorDAO;
import com.redemption.hair.lowCNKI.DAO.Bdxs_paperDAO;
import com.redemption.hair.lowCNKI.model.Bdxs_paper;
import com.redemption.hair.lowCNKI.model.HostHolder;
import com.redemption.hair.lowCNKI.service.SolrService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import java.util.ArrayList;
import java.util.List;
@Controller
public class PaperDetailsController {
@Autowired
Bdxs_paperDAO bdxs_paperDAO;
@Autowired
HostHolder hostHolder;
@Autowired
SolrService solrService;
@Autowired
Bdxs_authorDAO bdxs_authorDAO;
@RequestMapping(path = {"/essay"}, method = RequestMethod.GET)
public String essay(Model model, @RequestParam("paperId")int paperId, @RequestParam("page")int page) throws Exception {
Bdxs_paper paper = new Bdxs_paper();
paper = bdxs_paperDAO.getPaperById(paperId);
model.addAttribute("essayInfo", paper);
List<Bdxs_paper> list = (List<Bdxs_paper>)solrService.searchPaper("title", paper.getTitle(), (page-1)*10 ,10).get("paperList");
int refNumber = Integer.parseInt(paper.getCited())/10 > 10 ? 10 : Integer.parseInt(paper.getCited())/10;
model.addAttribute("refNumber",refNumber);
model.addAttribute("user", hostHolder.getUser());
String ScholarID = "CN-B07300AJ";
try {
List<String> slist = bdxs_authorDAO.getAuthorIdName(paper.getAuthorName());
if (slist != null)
ScholarID = slist.get(0);
}
catch (Exception ex){
ScholarID = " ";
}
model.addAttribute("refEssayList", list);
model.addAttribute("scholarId",ScholarID);
//int pageNum = (int)(Math.ceil(list.size()/10.0));
long Num = (long)solrService.searchPaper("title", paper.getTitle(), (page-1)*10 ,10).get("num");
long pageNum = (long)(Math.ceil(Num/10.0));
long pageLeft = (page-5)>=1?page-2:1;
long pageRight = (page+5)<=pageNum?page+5:pageNum;
model.addAttribute("pageCur",page);
model.addAttribute("pageLeft",pageLeft);
model.addAttribute("pageRight",pageRight);
return "essay";
}
}
<file_sep>package com.redemption.hair.lowCNKI.DAO;
import com.redemption.hair.lowCNKI.model.Patents;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Select;
import java.util.List;
@Mapper
public interface PatentsDAO {
String TABLE_NAME = " Patents ";
String SELECT_FIELDS = "pid, title, rid, application_date, summary, class_number, transfer, publication_date";
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, " where locate('title','title')>0"})
List<Patents> getPatentsByTitle(String title);
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, "where locate('rid','rid')>0"})
List<Patents> getPatentsByRid(int rid);
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, " where locate('summary','summary')>0"})
List<Patents> getPatentsByKeyword(String summary);
}
<file_sep>package com.redemption.hair.lowCNKI.DAO;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface MessagesDAO {
String TABLE_NAME = " Messages ";
String SELECT_FIELDS = "message_id, send_id, rec_id, title, state, date, message";
}
<file_sep>package com.redemption.hair.lowCNKI.model;
public class Expert_tag {
private int expert_id;
private int tag_id;
public Expert_tag() {
}
public int getExpert_id() {
return expert_id;
}
public int getTag_id() {
return tag_id;
}
public void setExpert_id(int expert_id) {
this.expert_id = expert_id;
}
public void setTag_id(int tag_id) {
this.tag_id = tag_id;
}
}
<file_sep>package com.redemption.hair.lowCNKI.DAO;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface Co_institutionDAO {
String TABLE_NAME = " Co_institution ";
String SELECT_FIELDS = "expert_id, co_constitution";
}
<file_sep>package com.redemption.hair.lowCNKI.controller;
import com.alibaba.fastjson.JSON;
import com.redemption.hair.lowCNKI.DAO.Wf_organizationDAO;
import com.redemption.hair.lowCNKI.model.Wf_organization;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
@Controller
public class InstitutionController {
@Autowired
Wf_organizationDAO wf_organizationDAO;
@RequestMapping(path = {"/institutionInfo"}, method = {RequestMethod.GET})
public String institutionInfo(Model model,
@RequestParam("institution")String institution){
//String institution = "四川旅游学院";
int index = institution.indexOf("大学");
String name = institution;
if(index != 0){
String substr = institution.substring(index+2);
name = institution.replace(substr,"");
//System.out.println(name);
}
Wf_organization wf = wf_organizationDAO.getOrganizationByName(name);
wf.setNum_cited(wf_organizationDAO.getNum_citedByName(name));
wf.setNum_papers(wf_organizationDAO.getNum_papersByName(name));
wf.setCore_inclusion(wf_organizationDAO.getCore_inclusionByName(name));
wf.setHighest_cited(wf_organizationDAO.getHighest_citedByName(name));
wf.setNum_experts(wf_organizationDAO.getNum_expertsByName(name));
wf.setRepre_author(wf_organizationDAO.getRepre_authorByName(name));
model.addAttribute("institution", wf);
return "insit";
}
}
<file_sep>package com.redemption.hair.lowCNKI.model;
import java.sql.Timestamp;
public class Patents {
private int pid;
private int rid;
private String title;
private Timestamp application_date;
private String summary;
private String class_number;
private int transfer;
private Timestamp publication_date;
public Patents() {
}
public Timestamp getApplication_date() {
return application_date;
}
public void setApplication_date(Timestamp application_date) {
this.application_date = application_date;
}
public String getClass_number() {
return class_number;
}
public void setClass_number(String class_number) {
this.class_number = class_number;
}
public int getPid() {
return pid;
}
public void setPid(int pid) {
this.pid = pid;
}
public int getRid() {
return rid;
}
public void setRid(int rid) {
this.rid = rid;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getSummary() {
return summary;
}
public void setSummary(String summary) {
this.summary = summary;
}
public int getTransfer() {
return transfer;
}
public void setTransfer(int transfer) {
this.transfer = transfer;
}
public Timestamp getPublication_date() {
return publication_date;
}
public void setPublication_date(Timestamp publication_date) {
this.publication_date = publication_date;
}
}
<file_sep>package com.redemption.hair.lowCNKI.DAO;
import com.redemption.hair.lowCNKI.model.Paper_meeting;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Select;
import java.util.List;
@Mapper
public interface Paper_meetingDAO {
String TABLE_NAME = " Paper_meeting ";
String SELECT_FIELDS = "lid, title, rid, summary, keywords, time, page_number, cited_times, class_number, download_times, references, meeting_name, meeting_place, meeting_time";
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, "where locate('title','title')>0"})
List<Paper_meeting> getMeetingByTitle(String title);
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, " where locate('rid','rid')>0"})
List<Paper_meeting> getMeetingByRid(int rid);
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, " where locate('keywords','keywords')>0"})
List<Paper_meeting> getMeetingByKeyword(String keywords);
}
<file_sep>package com.redemption.hair.lowCNKI.model;
public class Co_experts {
private int expert_id;
private int co_expert_id;
public Co_experts() {
}
public int getExpert_id() {
return expert_id;
}
public int getCo_expert_id() {
return co_expert_id;
}
public void setExpert_id(int expert_id) {
this.expert_id = expert_id;
}
public void setCo_expert_id(int co_expert_id) {
this.co_expert_id = co_expert_id;
}
}
<file_sep>package com.redemption.hair.lowCNKI.model;
public class Patent_tag {
private int patent_id;
private int tag_id;
public Patent_tag() {
}
public int getPatent_id() {
return patent_id;
}
public void setPatent_id(int patent_id) {
this.patent_id = patent_id;
}
public int getTag_id() {
return tag_id;
}
public void setTag_id(int tag_id) {
this.tag_id = tag_id;
}
}
<file_sep>package com.redemption.hair.lowCNKI.DAO;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Select;
@Mapper
public interface Co_expertsDAO {
String TEST_FIELD = " co_expert_id ";
String TABLE_NAME = " Co_experts ";
@Select({"select",TEST_FIELD ,"from", TABLE_NAME, "where locate('expert_id','co_expert_id')>0"})
int selectCoIdByExpertId(int expert_id);
}<file_sep>package com.redemption.hair.lowCNKI.model;
import java.sql.Timestamp;
import java.util.Date;
public class Paper_master {
private int lid;
private String title;
private int rid;
private String summary;
private String keywords;
private Timestamp time_;
private int page_number;
private String cited_times;
private String class_number;
private String download_times;
private String references_;
private String institution;
private String year;
private String tutor;
private String url;
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public int getLid() {
return lid;
}
public void setLid(int lid) {
this.lid = lid;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public int getRid() {
return rid;
}
public void setRid(int rid) {
this.rid = rid;
}
public String getSummary() {
return summary;
}
public void setSummary(String summary) {
this.summary = summary;
}
public String getKeywords() {
return keywords;
}
public void setKeywords(String keywords) {
this.keywords = keywords;
}
public int getPage_number() {
return page_number;
}
public void setPage_number(int page_number) {
this.page_number = page_number;
}
public String getCited_times() {
return cited_times;
}
public void setCited_times(String cited_times) {
this.cited_times = cited_times;
}
public String getClass_number() {
return class_number;
}
public void setClass_number(String class_number) {
this.class_number = class_number;
}
public String getDownload_times() {
return download_times;
}
public void setDownload_times(String download_times) {
this.download_times = download_times;
}
public Timestamp getTime_() {
return time_;
}
public void setTime_(Timestamp time_) {
this.time_ = time_;
}
public String getReferences_() {
return references_;
}
public void setReferences_(String references_) {
this.references_ = references_;
}
public String getInstitution() {
return institution;
}
public void setInstitution(String institution) {
this.institution = institution;
}
public String getYear() {
return year;
}
public void setYear(String year) {
this.year = year;
}
public String getTutor() {
return tutor;
}
public void setTutor(String tutor) {
this.tutor = tutor;
}
}
<file_sep>package com.redemption.hair.lowCNKI.model;
public class Users {
private int id;
private String avator;
private String nickname;
private double balance;
private String password;
private String salt;
public Users() {
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getAvator() {
return avator;
}
public void setAvator(String avator) {
this.avator = avator;
}
public String getNickname() {
return nickname;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
public double getBalance() {
return balance;
}
public void setBalance(double balance) {
this.balance = balance;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = <PASSWORD>;
}
public String getSalt() {
return salt;
}
public void setSalt(String salt) {
this.salt = salt;
}
}
<file_sep>package com.redemption.hair.lowCNKI.DAO;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface ManagersDAO {
String TABLE_NAME = " Managers ";
String SELECT_FIELDS = "id, account, password, salt";
}
<file_sep>package com.redemption.hair.lowCNKI.DAO;
import com.redemption.hair.lowCNKI.model.Experts;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Select;
import java.util.List;
@Mapper
public interface ExpertsDAO {
String TABLE_NAME = " Experts ";
String SELECT_FIELDS = " id, contact, constitution, name, sex, age, major";
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, " where locate('name','name')>0"})
Experts getExpertsByName(String name);
@Select({"select id from ", TABLE_NAME, " where locate('name','name')>0"})
int getIdbyName(String name);
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, " where id=#{id}"})
Experts getExpertsById(int id);
@Select({"select", SELECT_FIELDS," from ", TABLE_NAME, " where Major=#{Major}"})
List<Experts> getExpertsByMajor(String Major);
@Select({"select", SELECT_FIELDS," from ", TABLE_NAME, " where locate('constitution','constitution')>0"})
List<Experts> getExpertsByInstitution(String constitution);
}
<file_sep>package com.redemption.hair.lowCNKI.model;
public class Bdxs_paper {
private int paperId;
private String title;
private String sourceName;
private String sourceUrl;
private String authorName;
private String summary;
private String keywords;
private String doi;
private String cited;
private String downLoadUrl;
private String journalTitle;
private String journalPicture;
private String fields;
private int pub_year;
public int getPub_year() {
return pub_year;
}
public void setPub_year(int pub_year) {
this.pub_year = pub_year;
}
public int getPaperId() {
return paperId;
}
public void setPaperId(int paperId) {
this.paperId = paperId;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getSourceName() {
return sourceName;
}
public void setSourceName(String sourceName) {
this.sourceName = sourceName;
}
public String getSourceUrl() {
return sourceUrl;
}
public void setSourceUrl(String sourceUrl) {
this.sourceUrl = sourceUrl;
}
public String getAuthorName() {
return authorName;
}
public void setAuthorName(String authorName) {
this.authorName = authorName;
}
public String getSummary() {
return summary;
}
public void setSummary(String summary) {
this.summary = summary;
}
public String getKeywords() {
return keywords;
}
public void setKeywords(String keywords) {
this.keywords = keywords;
}
public String getDoi() {
return doi;
}
public void setDoi(String doi) {
this.doi = doi;
}
public String getCited() {
return cited;
}
public void setCited(String cited) {
this.cited = cited;
}
public String getDownLoadUrl() {
return downLoadUrl;
}
public void setDownLoadUrl(String downLoadUrl) {
this.downLoadUrl = downLoadUrl;
}
public String getJournalTitle() {
return journalTitle;
}
public void setJournalTitle(String journalTitle) {
this.journalTitle = journalTitle;
}
public String getJournalPicture() {
return journalPicture;
}
public void setJournalPicture(String journalPicture) {
this.journalPicture = journalPicture;
}
public String getFields() {
return fields;
}
public void setFields(String fields) {
this.fields = fields;
}
}
<file_sep>package com.redemption.hair.lowCNKI.model;
import java.sql.Timestamp;
public class Paper_journal {
private int lid;
private String title;
private int rid;
private String summary;
private String keywords;
private Timestamp time_;
private String cited_times;
private String class_number;
private String references_;
private String journal_title;
private String page_number;
private String ISSN;
public Paper_journal() {
}
public int getLid() {
return lid;
}
public void setLid(int lid) {
this.lid = lid;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public int getRid() {
return rid;
}
public void setRid(int rid) {
this.rid = rid;
}
public String getSummary() {
return summary;
}
public void setSummary(String summary) {
this.summary = summary;
}
public String getKeywords() {
return keywords;
}
public void setKeywords(String keywords) {
this.keywords = keywords;
}
public Timestamp getTime_() {
return time_;
}
public void setTime_(Timestamp time_) {
this.time_ = time_;
}
public String getCited_times() {
return cited_times;
}
public void setCited_times(String cited_times) {
this.cited_times = cited_times;
}
public String getClass_number() {
return class_number;
}
public void setClass_number(String class_number) {
this.class_number = class_number;
}
public String getReferences_() {
return references_;
}
public void setReferences_(String references_) {
this.references_ = references_;
}
public String getJournal_title() {
return journal_title;
}
public void setJournal_title(String journal_title) {
this.journal_title = journal_title;
}
public String getPage_number() {
return page_number;
}
public void setPage_number(String page_number) {
this.page_number = page_number;
}
public String getISSN() {
return ISSN;
}
public void setISSN(String ISSN) {
this.ISSN = ISSN;
}
}
<file_sep>package com.redemption.hair.lowCNKI.model;
import java.util.Date;
import java.sql.Timestamp;
public class Token {
private int id;
private int user_id;
private String token;
private Date token_time;
private int token_valid;
public Token() {
}
public int getId() {return id;}
public void setId(int id) { this.id=id;}
public int getUser_id() {
return user_id;
}
public void setUser_id(int user_id) {
this.user_id = user_id;
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public Date getToken_time() {
return token_time;
}
public void setToken_time(Date token_time) {
this.token_time = token_time;
}
public int getToken_valid() {
return token_valid;
}
public void setToken_valid(int token_valid) {
this.token_valid = token_valid;
}
}
<file_sep>package com.redemption.hair.lowCNKI.model;
import org.springframework.stereotype.Component;
@Component
public class HostHolder {
private static ThreadLocal<Users> users = new ThreadLocal<Users>();
public Users getUser() {
return users.get();
}
public void setUser(Users user) {
users.set(user);
}
public void clear() {
users.remove();;
}
}<file_sep>package com.redemption.hair.lowCNKI.DAO;
import com.redemption.hair.lowCNKI.model.Experts;
import com.redemption.hair.lowCNKI.model.Paper_journal;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Select;
import java.util.List;
@Mapper
public interface Paper_journalDAO {
String TABLE_NAME = " Paper_journal ";
String SELECT_FIELDS = " lid,title,rid,summary,keywords,time_,cited_times,class_number,download_times,references_,journal_title,page_number,ISSN";
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, "where locate('title','title')>0"})
List<Paper_journal> getJournalByTitle(String title);
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, " where locate('rid','rid')>0"})
List<Paper_journal> getJournalByRid(int rid);
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, "where locate('keywords','keywords')>0"})
List<Paper_journal> getJournalByKeyword(String keywords);
}
<file_sep>package com.redemption.hair.lowCNKI.DAO;
import com.redemption.hair.lowCNKI.model.Bdxs_paper;
import com.redemption.hair.lowCNKI.model.Patent_CNKI;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Select;
import java.util.List;
@Mapper
public interface Patent_CNKIDAO {
String TABLE_NAME = " Patent_CNKI ";
String SELECT_FIELDS = "name,apply_num,apply_date,pub_number,pub_date,apply_name,address,invent_name,agent_name,agent_man,code,abstract_,pages,typenum";
@Select({"select",SELECT_FIELDS ,"from", TABLE_NAME, "where locate(#{name},name)>0"})
List<Patent_CNKI> getPatentByName(String name);
@Select({"select",SELECT_FIELDS ,"from", TABLE_NAME, "where locate(#{invent_name},invent_name)>0"})
List<Patent_CNKI> getPatentByInventName(String invent_name);
@Select({"select",SELECT_FIELDS ,"from", TABLE_NAME, "where locate(#{apply_name},apply_name)>0"})
List<Patent_CNKI> getPatentByApply_name(String apply_name);
@Select({"select",SELECT_FIELDS ,"from", TABLE_NAME, "where locate(#{agent_name},agent_name)>0"})
List<Patent_CNKI> getPatentByAgent_name(String agent_name);
@Select({"select",SELECT_FIELDS ,"from", TABLE_NAME, "where locate(#{abstract_},abstract_)>0"})
List<Patent_CNKI> getPatentByKeyword(String abstract_);
@Select({"select",SELECT_FIELDS ,"from", TABLE_NAME, "where pub_number = #{pub_number}"})
Patent_CNKI getPatentByPubNum(String pub_number);
@Select({"select apply_num from", TABLE_NAME, "where pub_number = #{pub_number}"})
String getPatentapply_numByPubNum(String pub_number);
@Select({"select apply_date from", TABLE_NAME, "where pub_number = #{pub_number}"})
String getPatentapply_dateByPubNum(String pub_number);
@Select({"select pub_date from", TABLE_NAME, "where pub_number = #{pub_number}"})
String getPatentpub_dateByPubNum(String pub_number);
@Select({"select apply_name from", TABLE_NAME, "where pub_number = #{pub_number}"})
String getPatentapply_nameByPubNum(String pub_number);
@Select({"select address from", TABLE_NAME, "where pub_number = #{pub_number}"})
String getPatentaddressByPubNum(String pub_number);
@Select({"select invent_name from", TABLE_NAME, "where pub_number = #{pub_number}"})
String getPatentinvent_nameByPubNum(String pub_number);
@Select({"select agent_name from", TABLE_NAME, "where pub_number = #{pub_number}"})
String getPatentagent_nameByPubNum(String pub_number);
@Select({"select agent_man from", TABLE_NAME, "where pub_number = #{pub_number}"})
String getPatentagent_manByPubNum(String pub_number);
@Select({"select abstract_ from", TABLE_NAME, "where pub_number = #{pub_number}"})
String getPatentabstract_ByPubNum(String pub_number);
@Select({"select pages from", TABLE_NAME, "where pub_number = #{pub_number}"})
String getPatentpagesByPubNum(String pub_number);
@Select({"select typenum from", TABLE_NAME, "where pub_number = #{pub_number}"})
String getPatenttypenumagent_manByPubNum(String pub_number);
}
<file_sep>package com.redemption.hair.lowCNKI.model;
import java.util.Timer;
import java.sql.Timestamp;
public class Applies {
private int id;
private int expert_id;
private Timestamp created_at;
private String content;
private String user_name;
private String expert_name;
public Applies() {
}
public int getId() {
return id;
}
public int getExpert_id() {
return expert_id;
}
public Timestamp getCreated_at() {
return created_at;
}
public String getContent() {
return content;
}
public String getUser_name() {
return user_name;
}
public String getExpert_name() {
return expert_name;
}
public void setId(int id) {
this.id = id;
}
public void setExpert_id(int expert_id) {
this.expert_id = expert_id;
}
public void setCreated_at(Timestamp created_at) {
this.created_at = created_at;
}
public void setContent(String content) {
this.content = content;
};
public void setUser_name(String user_name) {
this.user_name = user_name;
}
public void setExpert_name(String expert_name) {
this.expert_name = expert_name;
}
}
<file_sep>package com.redemption.hair.lowCNKI.utils;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Service;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import java.util.ArrayList;
import java.util.List;
@Service
public class JedisAdapter implements InitializingBean {
private JedisPool pool;
@Override
public void afterPropertiesSet() throws Exception {
pool = new JedisPool();
}
public void addSearchHistory(String userId, String title_paper) {
Jedis jedis = pool.getResource();
jedis.lpush("SEARCHHISTORY:" + userId, title_paper);
}
public List<String> getSearchHistory(String userId) {
List<String> list = new ArrayList<>();
String key = "SEARCHHISTORY:" + userId;
Jedis jedis = pool.getResource();
if (!jedis.exists(key)) {
return list;
}
list = jedis.lrange(key, 0, jedis.llen(key)>10?10:jedis.llen(key));
return list;
}
}
<file_sep>package com.redemption.hair.lowCNKI.DAO;
import com.redemption.hair.lowCNKI.model.Paper_master;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Select;
import java.util.List;
@Mapper
public interface Paper_masterDAO {
String TABLE_NAME = " Paper_master ";
String SELECT_FIELDS = " lid, title, rid, summary, keywords, time_, page_number, cited_times, class_number, download_times, references_, institution, year, tutor, url ";
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, "where locate('rid','rid')>0"})
List<Paper_master> getPaperByRid(int rid);
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, " where locate('title','title')>0"})
List<Paper_master> getMasterByTitle(String title);
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, " where locate('keywords','keywords')>0"})
List<Paper_master> getMasterByKeyword(String keywords);
@Select({"select ", SELECT_FIELDS, " from ", TABLE_NAME, "where locate('rid','rid')>0"})
List<Paper_master> getMasterByRid(int rid);
}
|
36fa805e7ae0b2731bf384cf44eb47338be843bd
|
[
"Java"
] | 27
|
Java
|
jeager12138/lowCNKI
|
bf9033f47b2faab95aa45de4e0201a33d50c7f90
|
52063d0b54e05c97404012a833c642dec636928c
|
refs/heads/master
|
<repo_name>viktoriatelyatynska/photocloudapp<file_sep>/src/app/components/upload/create-post.controller.js
(function() {
'use strict';
angular.module('photocloud')
.controller('CreatePostController', CreatePostController);
CreatePostController.$inject = ['$scope', '$mdDialog', 'uploadService', 'postsService'];
function CreatePostController($scope, $mdDialog, uploadService, postsService) {
var vm = this;
vm.post = {
caption: "",
attachments: [],
attachmentIds: []
};
vm.browsed = {
isUploading: false,
file: null
};
vm.selectAttachment = function() {
uploadInput.click();
};
$scope.$watch("vm.browsed.file", function(file) {
if (file) {
uploadAttachment(file);
}
});
vm.createPost = function() {
$mdDialog.hide(vm.post);
};
vm.cancel = function() {
$mdDialog.cancel();
};
function uploadAttachment(file) {
vm.browsed.isUploading = true;
if (file) {
uploadService.uploadFile(file)
.then(function(response) {
vm.browsed.isUploading = false;
vm.post.attachments.push(response);
vm.post.attachmentIds = vm.post.attachments.map(function(attachment) {
return attachment.id;
});
vm.browsed.file = null;
},
function(error) {
vm.browsed.isUploading = false;
vm.browsed.file = null;
});
}
};
}
})();
|
141ec1383811fdd9f48e45330567432b0039a9ba
|
[
"JavaScript"
] | 1
|
JavaScript
|
viktoriatelyatynska/photocloudapp
|
87058a63b474451923221e5a8aac088b3cbf698f
|
d7e5554973c88e506433d8d63f01882f7cc0e35c
|
refs/heads/main
|
<repo_name>nordzilla/properties-to-ftl<file_sep>/README.md
# Mozilla Properties-to-Fluent Migration Helper
This is a hacky tool that mostly works.
Effectively, you point it at a JS file in `mozilla-central`,
and it figures out where and how that file uses messages in `.properties` files,
and converts those to Fluent `.ftl` files.
## Setup
```ini
git clone https://github.com/eemeli/properties-to-ftl.git
cd properties-to-ftl
npm install
npm link # for npx
```
## Mapping .properties to .ftl
When migrating legacy messages, multiple things change:
1. The message file extension changes, possible as well as its name.
2. The file's location within `mozilla-central` changes.
3. Message keys change, and often gain an identifying prefix in addition to being kebab-cased.
4. The syntax for referring to variables in messages changes.
5. The JavaScript API for formatting messages changes.
To help with the first three, you need to add some metadata comments to each `.properties` file that you're migrating:
```ini
# FTL path: foo/bar/baz.ftl
# FTL prefix: foobar
```
These comments don't need to be stored in the repo,
but keeping them there might help if a properties file is migrated in multiple commits.
The `FTL path` may use either the repo root or the default locale's directory as its root.
An `FTL prefix` is not required, but if set, may only contain lower-case letters and dashes: `^[a-z-]+$`.
## Command-line arguments
For full usage, run this somewhere in `mozilla-central`:
```ini
npx properties-to-ftl --help
```
When targeting a JS file, it is parsed for `chrome://` references to `.properties` and `.xhtml` files,
which are then parsed in turn.
XHTML may include `<stringbundle>` elements which are detected (and their source `.properties` also parsed),
and properties files may include `FTL path` references, which are also parsed.
All of those files are then modified in-place.
## Your Attention is Required
Because so many things change, it's unlikely that the script will catch everything.
Where possible, a comment `/* L10N-FIXME */` is injected immediately after points in the source that require human attention.
## TODO
- [ ] Migration script generator
- [ ] Better variable renaming
- [ ] Remove `.properties` files when empty & update `jar.mn`
- [ ] Allow targeting `.properties` files directly
- [ ] Tools for mapping `chrome://` references across the repo
- [ ] Some way of FTL path autodiscovery?
<file_sep>/lib/external-refs.js
import { Comment, parse as parseFluent, Resource } from '@fluent/syntax'
import { parseLines } from 'dot-properties'
import { existsSync } from 'fs'
import { readFile } from 'fs/promises'
import { resolve } from 'path'
import { types, visit } from 'recast'
import { resolveChromeUri } from 'resolve-chrome-uri'
import { getFtlMetadata } from './parse-properties.js'
import { parseStringBundleTags } from './parse-xhtml.js'
const n = types.namedTypes
export async function findExternalRefs(root, ast) {
/** @type {import('ast-types').NodePath[]} */
const propertiesUriPaths = []
/** @type {import('ast-types').NodePath[]} */
const xhtmlUriPaths = []
visit(ast, {
visitLiteral(path) {
const { value } = path.node
if (typeof value === 'string' && value.startsWith('chrome://')) {
if (value.endsWith('.xhtml')) xhtmlUriPaths.push(path)
if (value.endsWith('.properties')) propertiesUriPaths.push(path)
}
this.traverse(path)
}
})
const propUris = new Set(propertiesUriPaths.map((path) => path.node.value))
/**
* @type {{
* uri: string,
* path: string,
* bundleTags: { id: string, src: string, loc: { start: number, end: number } }[],
* src: string
* }[]}
*/
const xhtml = []
for (const uri of new Set(xhtmlUriPaths.map((path) => path.node.value))) {
const filePaths = await resolveChromeUri(root, uri)
if (filePaths.size === 0) console.warn(`Unresolved URI: ${uri}`)
else {
for (const fp of filePaths) {
const src = await readFile(fp, 'utf8')
const bundleTags = parseStringBundleTags(src)
if (bundleTags.length > 0) {
xhtml.push({ uri, path: fp, bundleTags, src })
for (const tag of bundleTags) propUris.add(tag.src)
}
}
}
}
/**
* @type {{
* uri: string,
* path: string,
* msgKeys: string[],
* migrated: Record<string, string>,
* requiresSync: boolean
* ftlPath: string | null,
* ftlRoot: string | null,
* ftlPrefix: string,
* ftl: import('@fluent/syntax').Resource,
* ast: import('dot-properties').Node[]
* }[]}
*/
const properties = []
for (const uri of propUris) {
const filePaths = await resolveChromeUri(root, uri)
if (filePaths.size === 0) console.warn(`Unresolved URI: ${uri}`)
else {
for (const fp of filePaths) {
const src = await readFile(fp, 'utf8')
const ast = parseLines(src, true)
const msgKeys = ast
.filter((node) => node.type === 'PAIR')
.map((pair) => pair.key)
const { ftlPath, ftlRoot, ftlPrefix } = getFtlMetadata(fp, ast)
const ftl = await getFluentResource(ftlRoot, ftlPath)
properties.push({
uri,
path: fp,
msgKeys,
migrated: {},
requiresSync: false,
ftlPath,
ftlRoot,
ftlPrefix,
ftl,
ast
})
}
}
}
return {
properties,
propertiesUriPaths,
xhtml,
xhtmlUriPaths
}
}
const mplLicenseHeader = `
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.`.trim()
async function getFluentResource(root, localPath) {
const path = root && localPath && resolve(root, localPath)
if (!path) return null
if (existsSync(path)) {
const src = await readFile(path, 'utf8')
return parseFluent(src, { withSpans: true })
}
return new Resource([new Comment(mplLicenseHeader)])
}
<file_sep>/lib/migrate-message.js
import kebabCase from 'lodash.kebabcase'
import { buildFluentMessage } from './build-fluent-message.js'
/**
* @param {{
* uri: string,
* migrated: Record<string, { key: string, attr: string | null }>,
* ftlPrefix: string | null,
* ftl: import('@fluent/syntax').Resource,
* ast: import('dot-properties').Node[]
* }} properties
* @param {string} propKey
*/
export function migrateMessage({ migrated, ftlPrefix, ftl, ast }, propKey) {
const prev = migrated[propKey]
if (prev) return prev
let attr = null
const dot = propKey.indexOf('.')
if (dot !== -1) {
attr = kebabCase(propKey.substring(dot + 1))
propKey = propKey.substring(0, dot)
}
let ftlKey = kebabCase(`${ftlPrefix}-${propKey}`)
const dm = ftlKey.match(/-\d+$/)
if (dm) {
// Try to drop numerical suffix
const bare = ftlKey.substring(0, ftlKey.length - dm[0].length)
if (!resourceHasKey(ftl, bare)) ftlKey = bare
}
// If required, add a numerical suffix
let n = 1
while (resourceHasKey(ftl, ftlKey))
ftlKey = ftlKey.replace(/(-\d+)?$/, `-${++n}`)
// Extract from properties & add to FTL
const migrate = []
for (let i = ast.length - 1; i >= 0; --i) {
const node = ast[i]
if (
node.type === 'PAIR' &&
(node.key === propKey || node.key.startsWith(propKey + '.'))
) {
// Grab preceding comments & empty lines above them, while leaving one
// after any preceding space-separated comments.
let prelude = 0
while (ast[i - prelude - 1]?.type === 'COMMENT') prelude += 1
while (ast[i - prelude - 1]?.type === 'EMPTY_LINE') prelude += 1
if (ast[i - prelude - 1]?.type === 'COMMENT') prelude -= 1
migrate.unshift(...ast.splice(i - prelude, prelude + 1))
i -= prelude
const attr = node.key.substring(propKey.length + 1)
migrated[node.key] = { key: ftlKey, attr: attr ? kebabCase(attr) : null }
}
}
const ftlMsg = buildFluentMessage(ftlKey, migrate)
ftl.body.push(ftlMsg)
return { key: ftlKey, attr }
}
function resourceHasKey(res, key) {
for (const entry of res.body)
if (entry.type === 'Message' && entry.id.name === key) return true
return false
}
<file_sep>/lib/build-fluent-message.js
import {
Attribute,
Comment,
Identifier,
Message,
Pattern,
Placeable,
TextElement,
VariableReference
} from '@fluent/syntax'
import kebabCase from 'lodash.kebabcase'
/**
* @param {string} msgName
* @param {import('dot-properties').Node[]} props
*/
export function buildFluentMessage(msgName, props) {
let comments = []
const getComment = () => {
const content = comments.join('\n').trim()
comments = []
return content ? new Comment(content) : null
}
const msg = new Message(new Identifier(msgName), null, [])
for (const node of props) {
switch (node.type) {
case 'EMPTY_LINE':
break
case 'COMMENT': {
const line = node.comment.replace(/^[!#]\s*/, '')
if (!line.startsWith('-*-')) comments.push(line) // ignore mode lines
break
}
case 'PAIR': {
const comment = getComment()
const pattern = parseMsgPattern(node.value)
const dot = node.key.indexOf('.')
if (dot === -1) {
msg.value = pattern
if (comment) msg.comment = comment
} else {
const attrName = kebabCase(node.key.substring(dot + 1))
msg.attributes.push(new Attribute(new Identifier(attrName), pattern))
if (comment) {
comment.content = `.${attrName}: ${comment.content}`
if (msg.comment) msg.comment.content += `\n${comment.content}`
else msg.comment = comment
}
}
}
}
}
return msg
}
/** @param {string} src */
function parseMsgPattern(src) {
const elements = []
let done = 0
let num = 0
for (const match of src.matchAll(/%(\d\$)?S/g)) {
if (match.index > done)
elements.push(new TextElement(src.substring(done, match.index)))
num = match[1] ? parseInt(match[1]) : num + 1
const id = new Identifier(`var${num}`)
elements.push(new Placeable(new VariableReference(id)))
done = match.index + match[0].length
}
if (done < src.length) elements.push(new TextElement(src.substring(done)))
return new Pattern(elements)
}
|
bf64d497fc4693665811831d39cdff4405415d7e
|
[
"Markdown",
"JavaScript"
] | 4
|
Markdown
|
nordzilla/properties-to-ftl
|
7c601e564278aa25cdc434a6d8efb5e07379a301
|
c9feeba1b249b9cb308079d294bd1299866c3f48
|
refs/heads/master
|
<repo_name>PIGNAL/TestRazor<file_sep>/TestRazor/TestRazor/Models/GestorProductos.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using TestRazor.Models.AccesoDatos;
namespace TestRazor.Models
{
public class GestorProductos
{
RepositorioProductos repo = new RepositorioProductos();
public void Guardar(Producto producto)
{
repo.Guardar(producto);
}
public List<Producto> Listar()
{
return repo.Listar();
}
public Producto Buscar(int id)
{
return repo.Buscar(id);
}
public void Eliminar(int id)
{
repo.Eliminar(id);
}
public void Modificar(Producto producto)
{
repo.Modificar(producto);
}
public List<Producto> BuscarProductos(string producto)
{
return repo.BuscarProductos(producto);
}
public void Eliminar(string codigo)
{
repo.Eliminar(codigo);
}
}
}<file_sep>/TestRazor/TestRazor/Controllers/ProductosController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using TestRazor.Common;
using TestRazor.ExtensionClasses;
using TestRazor.Models;
namespace TestRazor.Controllers
{
public class ProductosController : Controller
{
GestorProductos gestorProductos = new GestorProductos();
public ActionResult Index()
{
return View();
}
public ActionResult Alta()
{
return View();
}
public ActionResult Guardar(ViewModel.ProductoViewModel productoViewModel)
{
if (ModelState.IsValid)
{
gestorProductos.Guardar(productoViewModel.ConvertirAModelo());
}
else
{
return View("Alta");
}
return RedirectToAction("Listar");
}
public ActionResult Listar()
{
var productos = gestorProductos.Listar();
return View(productos.ConvertirAViewModel());
}
public ActionResult Buscar()
{
return View();
}
[HttpPost]
public ActionResult Busqueda(string producto)
{
Logger.Log("Pasó por el action Busqueda");
var productos = gestorProductos.BuscarProductos(producto);
return View(productos.ConvertirAViewModel());
}
public ActionResult EliminarPorId(int id)
{
gestorProductos.Eliminar(id);
return RedirectToAction("Listar");
}
public ActionResult EliminarPorCodigo(string codigo)
{
gestorProductos.Eliminar(codigo);
return RedirectToAction("Listar");
}
public ActionResult Detalle(int id)
{
var producto = gestorProductos.Buscar(id);
var prueba = producto.ConvertirAString();
return View(producto.ConvertirAViewModel());
}
public ActionResult Modificar(Producto producto)
{
gestorProductos.Modificar(producto);
return RedirectToAction("Listar");
}
}
}<file_sep>/TestRazor/TestRazor/Models/AccesoDatos/RepositorioProveedores.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using TestRazor.Common;
namespace TestRazor.Models.AccesoDatos
{
public class RepositorioProveedores
{
TestRazorEntities modelo = new TestRazorEntities();
public RepositorioProveedores()
{
modelo.Database.Log = Logger.Log;
}
public void Guardar(Proveedor proveedor)
{
modelo.Proveedor.Add(proveedor);
modelo.SaveChanges();
}
public List<Proveedor> Listar()
{
return modelo.Proveedor.ToList();
}
public Proveedor Buscar(int id)
{
return modelo.Proveedor.Where(x => x.ProveedorId == id).FirstOrDefault();
}
public void Eliminar(int id)
{
var proveedor = modelo.Proveedor.First(x => x.ProveedorId == id);
modelo.Proveedor.Remove(proveedor);
modelo.SaveChanges();
}
public void Modificar(Proveedor proveedor)
{
var proveedorParaModificar = modelo.Proveedor.First(x => x.ProveedorId == proveedor.ProveedorId);
proveedorParaModificar.Nombre = proveedor.Nombre;
proveedorParaModificar.Telefono = proveedor.Telefono;
proveedorParaModificar.Direccion = proveedor.Direccion;
modelo.SaveChanges();
}
public List<Proveedor> BuscarProvedores(string proveedor)
{
return modelo.Proveedor.Where(x => x.Nombre.Contains(proveedor) || x.Telefono.ToString().Contains(proveedor)).ToList();
}
public void Eliminar(string nombre)
{
var producto = modelo.Proveedor.FirstOrDefault(x => x.Nombre == nombre);
modelo.Proveedor.Remove(producto);
modelo.SaveChanges();
}
}
}
<file_sep>/Javascript/JonathanJsPrueba.js
var Persona=function(nombre){
this.Nombre=nombre;
this.Hablar=function(){
console.log('Hola mi nombre es: '+this.Nombre);
};
};
var p1=new Persona('Jorge');
console.log(p1.Nombre);
console.log(p1.Hablar());
var p2={
Nombre:'Jonathan'
};
console.log(p2)
var Persona2=function(){
var nombre='Jonathan';
this.Apellido='Ballatore';
this.Direccion='Famaillá';
};
var p3=new Persona2();
console.log(p3);
var Personas=[
{Nombre:'Jonathan', Apellido:'Ballatore', Edad:23},
{Nombre:'Jorge',Apellido:'Ballatore', Edad:51},
{Nombre:'Joaquin',Apellido:'Martinez', Edad:25}
];
console.log(Personas);
var sal = function(comportamiento){
num1=1
num2=14
suma=num1+num2;
comportamiento(suma);
}
var fun2=function(resultado){
console.log(resultado);
var otraFuncion=function(){
console.log('Se ejecuto otra funcion');
}
return otraFuncion;
}
sal(fun2);
var filtrado = Personas.filter(function(persona){
return (persona.Apellido=='Ballatore');
});
console.log(filtrado);
var funParametro=function(persona){
if((persona.Nombre=='Jorge')&&(persona.Apellido=='Ballatore')){
return true;
}
else
return false;
}
var NuestraFilter=function(lista,funParametro){
var resultado=[];
for(var i=0;i<lista.length;i++){
var resFunParametro=funParametro(lista[i])
if(resFunParametro==true){
resultado.push(lista[i]);
}
}
return resultado;
}
console.log(NuestraFilter(Personas,funParametro))
var filtrado2=Personas.filter(x => x.Nombre == 'Jorge' && x.Apellido=='Ballatore');
console.log(filtrado2);
var resultado2=Personas.reduce(function(p,sum){
console.log(sum);
console.log(p);
return sum+=p.Edad;
}, 0);
var funParametro2=function(valorInicial,persona){
return valorInicial + persona.Edad;
}
var nuestraReduce=function(lista,funParametro2){
var resultado=0;
for(var i=0;i<lista.length;i++){
resultado=funParametro2(resultado,lista[i])
}
return resultado
}
console.log(nuestraReduce(Personas,funParametro2));
var resultado3=Personas.reduce((sum,p)=>sum +=p.Edad,0);
console.log(resultado3);<file_sep>/TestRazor/TestRazor/Models/GestorProveedores.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using TestRazor.Models.AccesoDatos;
namespace TestRazor.Models
{
public class GestorProveedores
{
RepositorioProveedores repo = new RepositorioProveedores();
public void Guardar(Proveedor proveedor)
{
repo.Guardar(proveedor);
}
public List<Proveedor> Listar()
{
return repo.Listar();
}
public Proveedor Buscar(int id)
{
return repo.Buscar(id);
}
public void Eliminar(int id)
{
repo.Eliminar(id);
}
public void Modificar(Proveedor proveedor)
{
repo.Modificar(proveedor);
}
public List<Proveedor> BuscarProveedores(string proveedor)
{
return repo.BuscarProvedores(proveedor);
}
public void Eliminar(string nombre)
{
repo.Eliminar(nombre);
}
}
}<file_sep>/TestRazor/TestRazor/ExtensionClasses/ExtensionProducto.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace TestRazor.ExtensionClasses
{
public static class ExtensionProducto
{
public static Models.Producto ConvertirAModelo(this ViewModel.ProductoViewModel productoViewModel)
{
var producto = new Models.Producto
{
Nombre = productoViewModel.Nombre,
Precio = productoViewModel.Precio,
Codigo = productoViewModel.Codigo,
Stock = productoViewModel.Stock
};
return producto;
}
public static List<ViewModel.ProductoViewModel> ConvertirAViewModel(this List<Models.Producto> productos)
{
var listaDeViewModel = new List<ViewModel.ProductoViewModel>();
foreach (var productoDelModelo in productos)
{
var productoViewModel = new ViewModel.ProductoViewModel
{
Nombre = productoDelModelo.Nombre,
Precio = productoDelModelo.Precio,
Codigo = productoDelModelo.Codigo,
Stock = productoDelModelo.Stock,
ProductoId = productoDelModelo.ProductoId
};
listaDeViewModel.Add(productoViewModel);
}
return listaDeViewModel;
}
public static ViewModel.ProductoViewModel ConvertirAViewModel(this Models.Producto productoModelo)
{
var productoViewModel = new ViewModel.ProductoViewModel
{
Nombre = productoModelo.Nombre,
Precio = productoModelo.Precio,
Codigo = productoModelo.Codigo,
Stock = productoModelo.Stock,
ProductoId = productoModelo.ProductoId
};
return productoViewModel;
}
public static string ConvertirAString(this Models.Producto productoModelo)
{
var producto = productoModelo.Nombre + " " +
productoModelo.Precio + " " +
productoModelo.Codigo + " " +
productoModelo.Stock;
return producto;
}
}
}<file_sep>/TestRazor/TestRazor/ViewModel/ProveedorViewModel.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.ComponentModel.DataAnnotations;
using System.Web.Mvc;
namespace TestRazor.ViewModel
{
public class ProveedorViewModel
{
[HiddenInput(DisplayValue =false)]
public int? ProveedorId { get; set; }
[Required][MaxLength(100)]
public string Nombre { get; set; }
[Required]
public string Direccion { get; set; }
[Required]
public string Telefono { get; set; }
}
}<file_sep>/TestRazor/TestRazor/ExtensionClasses/ExtencionProvedor.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace TestRazor.ExtensionClasses
{
public static class ExtencionProvedor
{
public static Models.Proveedor ConvertirAModelo(this ViewModel.ProveedorViewModel proveedorViewModel)
{
var proveedor = new Models.Proveedor
{
Nombre = proveedorViewModel.Nombre,
Direccion = proveedorViewModel.Direccion,
Telefono = proveedorViewModel.Telefono
};
return proveedor;
}
public static List<ViewModel.ProveedorViewModel> ConvertirAViewModel(this List<Models.Proveedor> proveedores)
{
var listaDeViewModel = new List<ViewModel.ProveedorViewModel>();
foreach (var proveedorDelModelo in proveedores)
{
var proveedorViewModel = new ViewModel.ProveedorViewModel
{
ProveedorId = proveedorDelModelo.ProveedorId,
Nombre = proveedorDelModelo.Nombre,
Telefono = proveedorDelModelo.Telefono,
Direccion = proveedorDelModelo.Direccion
};
listaDeViewModel.Add(proveedorViewModel);
}
return listaDeViewModel;
}
public static ViewModel.ProveedorViewModel ConvertirModelAViewModel(this Models.Proveedor proveedorModelo)
{
var proveedorViewModel = new ViewModel.ProveedorViewModel
{
ProveedorId = proveedorModelo.ProveedorId,
Direccion = proveedorModelo.Direccion,
Nombre = proveedorModelo.Nombre,
Telefono = proveedorModelo.Telefono
};
return proveedorViewModel;
}
}
}<file_sep>/TestRazor/TestRazor/Controllers/ProveedoresController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using TestRazor.Common;
using TestRazor.ExtensionClasses;
using TestRazor.Models;
namespace TestRazor.Controllers
{
public class ProveedoresController : Controller
{
GestorProveedores gestorProvedor = new GestorProveedores();
public ActionResult Index()
{
return View();
}
public ActionResult Alta()
{
return View();
}
public ActionResult Guardar(ViewModel.ProveedorViewModel proveedorViewModel)
{
if (ModelState.IsValid)
{
gestorProvedor.Guardar(proveedorViewModel.ConvertirAModelo());
}
else
{
return View("Alta");
}
return RedirectToAction("Listar");
}
public ActionResult Listar()
{
var proveedores = gestorProvedor.Listar();
return View(proveedores.ConvertirAViewModel());
}
public ActionResult Buscar()
{
return View();
}
[HttpPost]
public ActionResult Busqueda(string proveedor)
{
Logger.Log("Pasó por el action Busqueda");
var proveedores = gestorProvedor.BuscarProveedores(proveedor);
return View(proveedores.ConvertirAViewModel());
}
public ActionResult EliminarPorId(int id)
{
gestorProvedor.Eliminar(id);
return RedirectToAction("Listar");
}
public ActionResult EliminarPorNombre(string proveedor)
{
gestorProvedor.Eliminar(proveedor);
return RedirectToAction("Listar");
}
public ActionResult Detalle(int id)
{
var proveedor = gestorProvedor.Buscar(id);
return View(proveedor.ConvertirModelAViewModel());
}
public ActionResult Modificar(Proveedor proveedor)
{
gestorProvedor.Modificar(proveedor);
return RedirectToAction("Listar");
}
}
}<file_sep>/TestRazor/TestRazor/Models/AccesoDatos/RepositorioProductos.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using TestRazor.Common;
namespace TestRazor.Models.AccesoDatos
{
public class RepositorioProductos
{
TestRazorEntities modelo = new TestRazorEntities();
public RepositorioProductos()
{
modelo.Database.Log = Logger.Log;
}
public void Guardar(Producto producto)
{
modelo.Producto.Add(producto);
modelo.SaveChanges();
}
public List<Producto>Listar()
{
return modelo.Producto.ToList();
}
public Producto Buscar(int id)
{
return modelo.Producto.Where(x => x.ProductoId == id).FirstOrDefault();
}
public void Eliminar(int id)
{
var producto = modelo.Producto.First(x => x.ProductoId == id);
modelo.Producto.Remove(producto);
modelo.SaveChanges();
}
public void Modificar(Producto producto)
{
var productoParaModificar = modelo.Producto.First(x => x.ProductoId == producto.ProductoId);
productoParaModificar.Nombre = producto.Nombre;
productoParaModificar.Precio = producto.Precio;
productoParaModificar.Codigo = producto.Codigo;
productoParaModificar.Stock = producto.Stock;
modelo.SaveChanges();
}
public List<Producto> BuscarProductos(string producto)
{
return modelo.Producto.Where(x => x.Nombre.Contains(producto) || x.Precio.ToString().Contains(producto)).ToList();
}
public void Eliminar(string codigo)
{
var producto = modelo.Producto.FirstOrDefault(x => x.Codigo == codigo);
modelo.Producto.Remove(producto);
modelo.SaveChanges();
}
}
}
<file_sep>/TestRazor/TestRazor/ViewModel/ProductoViewModel.cs
using System.ComponentModel.DataAnnotations;
using System.Web.Mvc;
namespace TestRazor.ViewModel
{
public class ProductoViewModel
{
[HiddenInput(DisplayValue =false)]
public int? ProductoId { get; set; }
[Required]
public string Nombre { get; set; }
[Required]
public decimal Precio { get; set; }
[Required]
public string Codigo { get; set; }
[Required]
public int Stock { get; set; }
}
}
|
77d60ad71d9dcb523a966f23fe65fdd79ef26074
|
[
"JavaScript",
"C#"
] | 11
|
C#
|
PIGNAL/TestRazor
|
e55bcd87069cbb42d04d816199b44d7044aa87a5
|
c8aa2a5887c7cd0e1f14b291f7039c2098b418c9
|
refs/heads/master
|
<file_sep>const express = require('express')
const router = express.Router()
const Record = require('../../models/Record')
const Category = require('../../models/Category')
router.get('/', async (req, res) => {
try {
let recordData = await Record.find().lean()
const categoryData = await Category.find().lean()
let filter = req.query.category
let icon = {}
let totalAmount = recordData.reduce((accumulator, record) => accumulator + record.amount, 0)
if (filter) {
recordData = recordData.filter(record => record.category === filter)
totalAmount = recordData.reduce((accumulator, record) => accumulator + record.amount, 0)
}
// Create icon object dynamic
for (const categories of categoryData) {
icon[categories.type] = categories.icon
}
// Change category with icon
for (const record of recordData) {
record.category = icon[record.category]
}
res.render('index', { recordData, categoryData, totalAmount, filter })
} catch (error) {
console.log(error)
}
})
module.exports = router<file_sep>const Category = require('../Category')
const db = require('../../config/mongoose')
db.once('open', async () => {
const categories = require('./data.json').category
console.log('mongoDB connected')
for (const item of categories) {
await Category.create({
name: item.name,
type: item.type,
icon: item.icon
}).then(() => console.log('seed build successfully'))
.catch(err => console.log(`something went wrong${err}`))
}
console.log('mongoDB done')
return db.close()
})
<file_sep>const handlebars = require('handlebars')
const helper = handlebars.registerHelper('filter', function (category, type, options) {
if (category === type) {
return options.fn(this)
} else {
return options.inverse(this)
}
})
module.export = helper<file_sep># 廣志の私帳
### 還在煩惱爆肝存下來的私房錢如何管控嗎?
那想必你應該會喜歡這個小工具
<a href="https://mighty-bastion-40786.herokuapp.com/">雲端連結</a>


## 本產品可使用的功能有以下(User Story)
<ul>
<li>廣志可以新增花費項目。
<li>廣志可以瀏覽所有支出項目。
<li>廣志可以編輯支出項目。
<li>廣志可以刪除支出項目。
<li>廣志可以依照種類篩選支出。
</ul>
## 環境需求 (Prerequisites)
<ul>
<li>詳細可見package.json中記載
<li>Node.js
<li>Express
<li>Express-handlebars
<li>Mongoose
<li>Method-override
<li>Body-parser
<li>Nodemon
</ul>
## 執行步驟 (Execution step)
clone我的專案吧<br>
`$ git clone https://github.com/tin8420/expense-tracker.git`
安裝npm<br>
`$ npm install`
在本地端執行專案<br>
`$ npm run dev`
請在瀏覽器中輸入url後開啟<br>
`App is running on http://localhost:3000`
|
f8d2d7fbdbc79be79774a6c7ce0628e9ce24cbb4
|
[
"JavaScript",
"Markdown"
] | 4
|
JavaScript
|
tin8420/expense-tracker
|
5c70781665fc0da764726f171c4fe10d466133a5
|
5b8213e11280106abd801d7b6c15eaebf54f106d
|
refs/heads/master
|
<repo_name>tvphuoc/TEAM06_PROJECT_QUIZZTEST<file_sep>/src/main/java/vn/edu/dao/UserDAO.java
package vn.edu.dao;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import vn.edu.models.User;
import java.util.ArrayList;
import java.util.List;
public class UserDAO {
/* @SuppressWarnings("unchecked")
public static void main(String[] args)
{
//JSON parser object to parse read file
JSONParser jsonParser = new JSONParser();
try (FileReader reader = new FileReader("src/main/java/vn/edu/dao/Students.json"))
{
//Read JSON file
Object obj = jsonParser.parse(reader);
ArrayList<User> userArrayList = new ArrayList<User>();
ArrayList<User> userArrayList1 = new ArrayList<User>();
JSONArray userList = (JSONArray) obj;
//listUser(userList);
System.out.println(userList);
listUser();
//Iterate over employee array
//userLixst.forEach( user -> parseUserObject( (JSONObject) user ) );
for (int i = 0; i < userList.size() ; i++)
{
JSONObject user = (JSONObject) userList.get(i);
User newUser = parseUserObject(user);
userArrayList.add(newUser);
}
for (int i = 0; i < userArrayList.size() ; i++)
{
User u = userArrayList.get(i);
System.out.println(u.getUsername());
System.out.println(u.getPassword());
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (ParseException e) {
e.printStackTrace();
}
}*/
public static ArrayList<User> listUser() throws IOException, ParseException{
//JSON parser object to parse read file
JSONParser jsonParser = new JSONParser();
FileReader reader = new FileReader("E:\\ISC_FE_BE_Team06\\THITRACNGHIEMTRUCTUYEN\\QizzTest\\src\\main\\java\\vn\\edu\\dao\\Students.json");
//Read JSON file
Object obj = jsonParser.parse(reader);
ArrayList<User> userArrayList = new ArrayList<User>();
JSONArray userList = (JSONArray) obj;
//System.out.println(userList);
for (int i = 0; i < userList.size() ; i++)
{
JSONObject user = (JSONObject) userList.get(i);
User newUser = parseUserObject(user);
userArrayList.add(newUser);
}
/*for (int i = 0; i < userArrayList.size() ; i++)
{
User u = userArrayList.get(i);
System.out.println(u.getUsername());
System.out.println(u.getPassword());
}*/
//System.out.println(userArrayList.size());
return userArrayList;
}
private static User parseUserObject(JSONObject user)
{
String username = (String) user.get("username");
String password = (String) user.get("password");
return new User(username, password);
}
}
<file_sep>/src/main/java/vn/edu/team06/WelcomeController.java
package vn.edu.team06;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
public class WelcomeController extends HttpServlet {
private static final long serialVersionUID = 1L;
public WelcomeController() {
super();
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
RequestDispatcher rd = request.getRequestDispatcher("index.jsp");
rd.forward(request, response);
}
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
}
}
<file_sep>/src/main/java/vn/edu/models/User.java
package vn.edu.models;
import vn.edu.dao.UserDAO;
import java.io.IOException;
import java.util.ArrayList;
import org.json.simple.parser.ParseException;
public class User {
private String username;
private String password;
public User(String username2, String password2) {
// TODO Auto-generated constructor stub
this.username = username2;
this.password = <PASSWORD>;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = <PASSWORD>;
}
}
<file_sep>/src/main/webapp/xuly/app.module.js
var teamapp = angular.module('nhom06', [
'ngRoute'
]);
teamapp.
config(['$routeProvider', '$locationProvider',
function config($routeProvider, $locationProvider) {
// $locationProvider.html5Mode(true);
$routeProvider.
when('/', {
templateUrl: 'home.html',
controller: 'homeController'
}).
when('/home', {
templateUrl: 'home.html',
controller: 'homeController'
}).
when('/login', {
templateUrl: 'login.html',
controller: 'loginController'
}).
when('/register', {
templateUrl: 'registration.html',
controller: 'registerController'
})
.otherwise({
redirectTo: '/'
});
}
]);
teamapp
.directive("ngHeader", function () {
return {
templateUrl: "UC/header.html"
}
})
.directive("ngNav", function () {
return {
templateUrl: "UC/nav.html"
}
})
.directive("ngAside", function () {
return {
templateUrl: "UC/aside.html"
}
})
.directive("ngFooter", function () {
return {
templateUrl: "UC/footer.html"
}
});
<file_sep>/src/main/webapp/xuly/user-service.js
teamapp
.factory('userService', ['$http', function ($http) {
var service = this;
service.allUsers = [];
service.currentUser = {};
if(service.allUsers && service.allUsers.length === 0) {
// console.log('load all users');
$http.get('./db/Students.js')
.then(function(result){
service.allUsers = result.data;
});
}
return service;
}]);
<file_sep>/src/main/java/vn/edu/team06/RegistrationController.java
package vn.edu.team06;
import java.io.IOException;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
//import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.simple.parser.ParseException;
import vn.edu.models.User;
import vn.edu.service.UserService;
public class RegistrationController extends HttpServlet{
private static final long serialVersionUID = 1L;
public RegistrationController() {
super();
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
RequestDispatcher rd = request.getRequestDispatcher("registration.jsp");
rd.forward(request, response);
}
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String username = request.getParameter("username");
String password = request.getParameter("<PASSWORD>");
UserService service = new UserService();
User user = new User(username, password);
try {
if (service.registration(user) == true) {
response.sendRedirect("welcome");
} else {
response.sendRedirect("registration?err=1");
}
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
<file_sep>/src/main/webapp/xuly/dang-nhap-controller.js
teamapp.controller('loginController', ['$scope', '$http', '$window', 'userService', function ($scope, $http, $window, userService) {
$scope.login = login;
$scope.loadStudenList = loadStudenList;
function loadStudenList() {
return $http.get('./db/Students.js')
.then(function(result){
return result.data;
});
}
function validateUsernameAndPassword(username, password, studentsList) {
for(var i = 0; i < studentsList.length; i++) {
var student = studentsList[i];
if(username.toLowerCase() === student.username.toLowerCase() &&
password === student.password) {
userService.currentUser = student;
return true;
}
}
return false;
}
function login(username, password) {
$scope.errorMessage = '';
if(userService.allUsers && userService.allUsers.length > 0) {
var loginOk = validateUsernameAndPassword(username, password, userService.allUsers);
if(loginOk) {
$window.location.href = 'app.html/';
}else {
$scope.errorMessage = 'Invalid username or password';
}
} else {
loadUsersAndLogin(username, password);
}
}
function loadUsersAndLogin(username, password) {
$scope.errorMessage = '';
$scope.loadStudenList().then(function(data) {
if(data && data.length > 0) {
userService.allUsers = data;
var loginOk = validateUsernameAndPassword(username, password, data);
if(loginOk) {
$window.location.href = 'app.html/';
}else {
$scope.errorMessage = 'Invalid username or password';
}
}
});
}
}]);
|
db9fcfb9ecc254d7921a7f94d2f6848a161d1b98
|
[
"JavaScript",
"Java"
] | 7
|
Java
|
tvphuoc/TEAM06_PROJECT_QUIZZTEST
|
7e0ba990ee94e49d16c374cc98d948e8a7171965
|
73ad8cd1b38d6333a7c8fbbd5ae0d58416365db0
|
refs/heads/main
|
<repo_name>ayush97goel/easyHTTPLibrary<file_sep>/app.js
/**
* @author <NAME> <<EMAIL>>
* @version v2.0.0
*
* Uncomment the respective line of code to check
*/
// ==========================================================================
// uses easyHTTP library v1.0.0 from here after, please check right
// js file is included in index.html
// ==========================================================================
// const http = new easyHttp()
// GET ALL POST
// http.get('https://jsonplaceholder.typicode.com/posts', function (error, posts) {
// if (error) {
// console.log(error);
// } else {
// console.log(posts);
// }
// });
// GET SINGLE POST
// http.get('https://jsonplaceholder.typicode.com/posts/2', function (error, post) {
// if (error) {
// console.log(error);
// } else {
// console.log(post);
// }
// })
// Data required for put and post
const data = {
title: 'Ayush Goel love life',
body: "haha Not found"
};
// POST NEW POST
// http.post('https://jsonplaceholder.typicode.com/posts', data, function (error, post) {
// if (error) {
// console.log(error);
// } else {
// console.log(post);
// }
// })
// REPLACE EXISTING POST WITH THE DATA CREATED ABOVE
// http.put('https://jsonplaceholder.typicode.com/posts/1', data, function (error, post) {
// if (error) {
// console.log(error);
// } else {
// console.log(post);
// }
// })
// console.log(data);
// DELETE EXISTING POST
// http.delete('https://jsonplaceholder.typicode.com/posts/5', function (error, response) {
// if (error) {
// console.log(error);
// } else {
// console.log(response);
// }
// });
// ==========================================================================
// uses easyHTTP library v2.0.0 from here after, please check right
// js file is included in index.html
// ==========================================================================
const http = new EasyHTTP();
// CALLING GET METHOD
// http.get('https://jsonplaceholder.typicode.com/posts')
// .then(posts => console.log(posts))
// .catch(error => console.log(error));
// CALLING POST METHOD
// http.post('https://jsonplaceholder.typicode.com/posts', data)
// .then(post => console.log(post))
// .catch(error => console.log(error));
// CALLING PUT METHOD
// http.put('https://jsonplaceholder.typicode.com/posts/6', data)
// .then(post => console.log(post))
// .catch(error => console.log(error));
// CALLING DELETE METHOD
http.delete('https://jsonplaceholder.typicode.com/posts/9')
.then(response => console.log(response))
.catch(error => console.log(error));
<file_sep>/README.md
# easyHTTPLibrary
* index.html
Minimal html file to display on browser but all the magic happens inside the console
* app.js:
It uses a Free fake API for testing and prototyping the easyHTTP Library.
More details about the API to which we are sending the request can be find here
https://jsonplaceholder.typicode.com/
* easyHTTP.js v1.0.0
1. Uses ES5 AJAX and xmlHttpRequest.
2. Prototypes provide the interface to this library to perform GET, PUT, POST, DELETE operation
3. Uses callback function to perform operation asynchronously
4. This is to make HTTP requests response using and XHR object much more simpler
* easyHTTP.js v1.0.02
1. Uses ES6 FetchAPI.
2. ES6 classes provide the interface to this library to perform GET, PUT, POST, DELETE operation
3. Uses Promises for asynchronous operations
4. This is to make HTTP requests response using the FETCH API even more simpler
|
4d997482a66b9b45439edf54cf64823c1be36d5a
|
[
"JavaScript",
"Markdown"
] | 2
|
JavaScript
|
ayush97goel/easyHTTPLibrary
|
e95dce94d8216c21dc1df7df7de1b85e7dc906cd
|
c0d3b690b1a36a71456c7b79707eb03539ed4d62
|
refs/heads/master
|
<file_sep>#!/bin/bash
VERSION="master"
TARGET="brcm2708/bcm2711"
ARCH="aarch64"
sh build.sh $VERSION $TARGET $ARCH
echo copy image to releases
SRC="./openwrt/"$VERSION"/bin/targets/"$TARGET"/"
DST="./releases/"$TARGET"/"$VERSION"/"
mkdir -p $DST
cp -r $SRC"openwrt-brcm2708-bcm2711-rpi-4-ext4-factory.img.gz" $DST
cp -r $SRC"openwrt-brcm2708-bcm2711-rpi-4-ext4-sysupgrade.img.gz" $DST<file_sep>#!/bin/bash
VERSION="18.06"
TARGET="brcm2708/bcm2710"
ARCH="aarch64"
sh build.sh $VERSION $TARGET $ARCH
echo copy image to releases
SRC="./openwrt/"$VERSION"/bin/targets/"$TARGET"/"
DST="./releases/"$TARGET"/"$VERSION"/"
mkdir -p $DST
cp -r $SRC"openwrt-brcm2708-bcm2710-rpi-3-ext4-factory.img.gz" $DST
cp -r $SRC"openwrt-brcm2708-bcm2710-rpi-3-ext4-sysupgrade.img.gz" $DST<file_sep>#!/bin/bash
git clone -b master https://github.com/openwrt/openwrt.git "./openwrt/master/"
echo follow OpenWrt README<file_sep>#!/bin/bash
echo cleanup
VERSION=$1"/"
DST="./openwrt/"$VERSION"files/"
# cleanup dst
rm -r $DST
mkdir -p $DST<file_sep>#!/bin/bash
VERSION=$1
TARGET=$2
ARCH=$3
#cleanup
sh cleanup.sh $VERSION
#copy files to openwrt
sh copy_files.sh $VERSION $TARGET $ARCH
#build image
sh make.sh $VERSION<file_sep>#!/bin/bash
echo copy files to openwrt
VERSION=$1"/"
TARGET=$2"/"
ARCH=$3"/"
DST="./openwrt/"$VERSION"files/"
# copy files dir
cp -r "./files/"* $DST
# copy arch specific files
cp -r "./files_arch/"$ARCH* $DST
# copy target specific files
cp -r "./files_target/"$TARGET* $DST
# copy openwrt .config
cp "./openwrt_config/"$VERSION"/targets/"$TARGET".config" "./openwrt/"$VERSION<file_sep># MonMotion
| Target | OpenWRT Version | Script | Image |
|---|---|---|---|
| Raspberry Pi 3B/3B+ | 18.06 | build_18.06_bcm2710.sh | openwrt-brcm2708-bcm2710-rpi-3-ext4-factory.img.gz |
| Raspberry Pi 4B | master | build_master_bcm2711.sh | openwrt-brcm2708-bcm2711-rpi-4-ext4-factory.img.gz |<file_sep>#!/bin/bash
echo make openwrt
VERSION=$1"/"
cd "./openwrt/"$VERSION
make
cd ..
cd ..<file_sep>#!/bin/bash
echo copy gou/ui files to openwrt
VERSION=$1"/"
TARGET=$2"/"
USER=$(id -u -n)
SRC="/home/"$USER"/go/src/github.com/dekoch/gouniversal/data/ui/"
DST="./files/root/gou/data/ui/"
mkdir -p $DST
# copy files from go dir
cp -r $SRC"backup/" $DST"backup/"
cp -r $SRC"console/" $DST"console/"
cp -r $SRC"logviewer/" $DST"logviewer/"
cp -r $SRC"program/" $DST"program/"
cp -r $SRC"static/" $DST"static/"<file_sep>#!/bin/bash
git clone -b openwrt-18.06 https://github.com/openwrt/openwrt.git "./openwrt/18.06/"
echo follow OpenWrt README<file_sep>#! /bin/sh
cd /root/gou/
./aarch64
|
b3167c0a99d1896b3522b9c52e12b0a9d46a78b0
|
[
"Markdown",
"Shell"
] | 11
|
Shell
|
dekoch/monmotion
|
91422082ffa786c90be1f00522ff65a0e0f1af08
|
6a7d37f01daf13d6aef74271c3ce8dc19aca8518
|
refs/heads/master
|
<repo_name>HITliuyu/SPI-Edison-Arduino<file_sep>/spiManualCks.c
#include "mraa.h"
#include <stdio.h>
#include <unistd.h>
#define BUFFSIZE 26
typedef union cvtfloat {
float val;
unsigned char bytes[4];
} MYFLOAT;
typedef union cvtint {
uint16_t val;
unsigned char bytes[2];
} MYINT;
typedef union cvtlong {
int val;
unsigned char bytes[4];
} MYLONG;
uint8_t sendbyte(uint8_t byte, int delay);
mraa_gpio_context CS1;
mraa_gpio_context SCK;
mraa_gpio_context MOSI;
mraa_gpio_context MISO;
mraa_gpio_context reset;
int main() {
const struct sched_param priority = { 1 };
sched_setscheduler(0, SCHED_FIFO, &priority);
reset = mraa_gpio_init(19);
mraa_gpio_dir(reset, MRAA_GPIO_OUT);
mraa_gpio_write(reset, 1);
CS1 = mraa_gpio_init(10);
mraa_gpio_use_mmaped(CS1, 1);
mraa_gpio_dir(CS1, MRAA_GPIO_OUT);
mraa_gpio_write(CS1, 1);
SCK = mraa_gpio_init(13);
mraa_gpio_use_mmaped(SCK, 1);
mraa_gpio_dir(SCK, MRAA_GPIO_OUT);
mraa_gpio_write(SCK, 0);
MOSI = mraa_gpio_init(11);
mraa_gpio_use_mmaped(MOSI, 1);
mraa_gpio_dir(MOSI, MRAA_GPIO_OUT);
mraa_gpio_write(MOSI, 0);
MISO = mraa_gpio_init(12);
mraa_gpio_use_mmaped(MISO, 1);
mraa_gpio_dir(MISO, MRAA_GPIO_IN);
mraa_gpio_read(MISO);
int delay = 1000;
uint8_t read;
int cycle=0;
uint8_t read_data[BUFFSIZE];
MYFLOAT myfloat;
MYINT myint;
MYLONG mylong;
int i = 5;
while(1)
{
while(i --)
{
int i, j;
char c = 'a';
uint8_t checksum = 0;
mraa_gpio_write(CS1, 0);
sendbyte(c, delay);
usleep(100);
uint8_t byte = 0;
sendbyte(byte, delay);
for(i=0; i<BUFFSIZE; i++)
{
usleep(100);
byte = 10;
read_data[i] = sendbyte(byte, delay);
checksum += read_data[i];
}
usleep(100);
printf("======================\n");
if ((checksum & 0xff) == sendbyte(byte, delay))
{
printf("checksum matches!\n");
}
else
{
printf("checksum ERROR!\n");
}
mraa_gpio_write(CS1, 1);
printf("recv %d %d\n", read_data[0], read_data[1]);
for(j=0; j<4; j++)
{
myfloat.bytes[j] = read_data[j+2];
}
printf("ratio 0 is %f \n", myfloat.val);
for(j=0; j<4; j++)
{
myfloat.bytes[j] = read_data[j+6];
}
printf("ratio 1 is %f \n", myfloat.val);
for(j=0; j<4; j++)
{
myfloat.bytes[j] = read_data[j+10];
}
printf("ratio 2 is %f \n", myfloat.val);
for(j=0; j<2; j++)
{
myint.bytes[j] = read_data[j+14];
}
printf("CO2PPM is %d \n", myint.val);
// for(j=0; j<4; j++)
// {
// mylong.bytes[j] = read_data[j+16];
// }
// printf("RangInCM is %d \n", mylong.val);
for(j=0; j<4; j++)
{
myfloat.bytes[j] = read_data[j+16];
}
printf("PM concentration is %f \n", myfloat.val);
for(j=0; j<2; j++)
{
myint.bytes[j] = read_data[j+20];
}
printf("visiable light is %d \n", myint.val);
for(j=0; j<2; j++)
{
myint.bytes[j] = read_data[j+22];
}
printf("IR light is %d \n", myint.val);
for(j=0; j<2; j++)
{
myint.bytes[j] = read_data[j+24];
}
printf("UV light is %f \n", (float)myint.val/100);
printf("cycle %d\n", cycle++);
sleep(5);
}
mraa_gpio_write(reset, 0);
usleep(100);
mraa_gpio_write(reset, 1);
i = 5;
cycle = 0;
}
return MRAA_SUCCESS;
}
uint8_t sendbyte(uint8_t byte, int delay) {
int i, j;
int read = 0;
// mraa_gpio_write(CS1, 0);
for (j = 1; j < delay / 8 + 100; j++) {
};
for (i = 0; i < 8; i++){
mraa_gpio_write(MOSI, byte & 0x80);
byte = byte << 1;
for (j = 1; j < delay; j++) {
};
mraa_gpio_write(SCK, 1);
read = read << 1;
read = read | (mraa_gpio_read(MISO));
for (j = 1; j < delay; j++) {
};
mraa_gpio_write(SCK, 0);
}
// mraa_gpio_write(CS1, 1);
for (j = 1; j < delay / 8 + 20; j++) { };
return (uint8_t) read;
}
<file_sep>/README.txt
SPI bus for communication between Edison board and Arduino UNO.
SpiManual.c : running on Edison board, including a self-implemented SPI bus.
spiSensor-Arduino: running on Arduino board, fetching sensors' data and sending back to Edison when requested.<file_sep>/spiSensor-Arduino/spiSensorNoPrintWatchDog/readme.txt
This sketch is used for latest work.<file_sep>/spiSensor-Arduino/spiSensorNoPrint/spiSensorNoPrint.ino
// Written by <NAME>
// Sept. 2017
#include <SPI.h>
#include "DHT.h"
#include <Wire.h> //I2C
#include "MutichannelGasSensor.h"
#include <SoftwareSerial.h>
#include "Ultrasonic.h"
#include "Arduino.h"
#include "SI114X.h" //sunlight
#define DHTPIN A0
#define UltrasonicPin A1
#define DHTTYPE DHT22 // DHT 11
#define sensor s_serial
#define SS 10 //slave select pin
#define BUFFSIZE 34
#define PWM_pin_1 3 //pwm output for white led
#define PWM_pin_2 9 // pwm output for yellow led
//union definition
union cvtfloat {
float val;
unsigned char bytes[4];
} myfloat;
union cvtint {
int val;
unsigned char bytes[2];
} myint;
union cvtlong {
long val;
unsigned char bytes[4];
} mylong;
//temperature and humidity
DHT dht(DHTPIN, DHTTYPE);
//CO2 sensor
const unsigned char cmd_get_sensor[] =
{
0xff, 0x01, 0x86, 0x00, 0x00,
0x00, 0x00, 0x00, 0x79
};
unsigned char dataRevice[9];
int CO2PPM;
SoftwareSerial s_serial(5, 6); // RX, TX
////ultrasonic sensor
//Ultrasonic ultrasonic(7);
//long RangeInCM;
//PM sensor
int pin = 8;
unsigned long duration;
unsigned long starttime;
unsigned long sampletime_ms = 15000;//15 seconds
unsigned long lowpulseoccupancy = 0;
float ratio = 0;
float concentration = 0;
//sunlight sensor
SI114X sunlight = SI114X();
//buffer used to store sensor data
//|0123.humidity|4567.temperature|8.9.10.11.ratio0|12.13.14.15.ratio1|16.17.18.19.ratio2|20.21.CO2PPM|22.23.24.25.PM concentration|26.27.light.visible|28.29.light.IR|
//|30.31.light.UV*100|32.33.Ultrasonic sensor|
uint8_t sensorData [BUFFSIZE];
byte command = 0;
int i = 0;
int j = 0;
int k = 0;
byte checksum;
int pwm_duty_cycle[2] = {128, 128};
bool pwm_duty_cycle_update_flag = true;
void setup (void)
{
Serial.begin (9600); // debugging
//tempurature and humidity sensor
dht.begin();
//multichannel gas sensor
gas.begin(0x04);//the default I2C address of the slave is 0x04
gas.powerOn();
// Serial.println(gas.getVersion());
//CO2 sensor
sensor.begin(9600);
//ultrasonic range sensor
//None
//PM sensor
pinMode(pin,INPUT);
starttime = millis();//get the current time;
//sunlight sensor
if(!sunlight.Begin())
{
sunlight.Begin();
}
//PWM output setting
pinMode(PWM_pin_1, OUTPUT);
pinMode(PWM_pin_2, OUTPUT);
//variable initialization
i = 0;
checksum = 0;
// turn on SPI in slave mode
SPCR |= bit (SPE);
// have to send on master in, *slave out*
pinMode(MISO, OUTPUT);
// now turn on interrupts
// SPI.attachInterrupt();
SPCR |= _BV(SPIE);
// interrupt for SS falling edge
// attachInterrupt (0, resetParam, FALLING);
attachInterrupt (0, resetParam, RISING);// we reset parameters when SS is released
} // end of setup
// SPI interrupt routine
ISR (SPI_STC_vect)
{
byte c = SPDR; // grab byte from SPI Data Register
switch (command)
{
// no command? then this is the command
case 0:
command = c;
SPDR = 0;
break;
// transmit buffered data
case 'a':
if (i < BUFFSIZE)
{
SPDR = sensorData[i];
checksum += sensorData[i];
i++;
}
else
{
SPDR = (checksum & 0xff);
}
break;
//change PWM output
case 'b':
pwm_duty_cycle[k] = SPDR;
if (k >= 1)
{
pwm_duty_cycle_update_flag = true;
}
k++;
break;
} // end of switch
} // end of interrupt routine SPI_STC_vect
// main loop - wait for flag set in interrupt routine
void loop (void)
{
//temperature and humidity
float c;//variable to store value
c = dht.readHumidity();
int j; //convert float to char counter
myfloat.val = c;
for (j = 0; j < 4; j++)
{
sensorData[j] = myfloat.bytes[j];
}
c = dht.readTemperature();
myfloat.val = c;
for (j = 0; j < 4; j++)
{
sensorData[j+4] = myfloat.bytes[j];
}
//pwm output setting
if (pwm_duty_cycle_update_flag)
{
analogWrite(PWM_pin_1, pwm_duty_cycle[0]);
analogWrite(PWM_pin_2, pwm_duty_cycle[1]);
pwm_duty_cycle_update_flag = false;
}
//multichannel sensor
/*
c = gas.measure_NH3();
Serial.print("The concentration of NH3 is ");
if(c>=0) Serial.print(c);
else Serial.print("invalid");
Serial.println(" ppm");
c = gas.measure_CO();
Serial.print("The concentration of CO is ");
if(c>=0) Serial.print(c);
else Serial.print("invalid");
Serial.println(" ppm");
c = gas.measure_NO2();
Serial.print("The concentration of NO2 is ");
if(c>=0) Serial.print(c);
else Serial.print("invalid");
Serial.println(" ppm");
c = gas.measure_C3H8();
Serial.print("The concentration of C3H8 is ");
if(c>=0) Serial.print(c);
else Serial.print("invalid");
Serial.println(" ppm");
c = gas.measure_C4H10();
Serial.print("The concentration of C4H10 is ");
if(c>=0) Serial.print(c);
else Serial.print("invalid");
Serial.println(" ppm");
c = gas.measure_CH4();
Serial.print("The concentration of CH4 is ");
if(c>=0) Serial.print(c);
else Serial.print("invalid");
Serial.println(" ppm");
c = gas.measure_H2();
Serial.print("The concentration of H2 is ");
if(c>=0) Serial.print(c);
else Serial.print("invalid");
Serial.println(" ppm");
c = gas.measure_C2H5OH();
Serial.print("The concentration of C2H5OH is ");
if(c>=0) Serial.print(c);
else Serial.print("invalid");
Serial.println(" ppm");
*/
//display ratio values
c = gas.measure_RATIO_0();
// Serial.print("The ratio0 is ");
if(c>=0)
{
// Serial.print(c);
myfloat.val = c;
for(j = 0; j < 4; j++)
{
sensorData[j+8] = myfloat.bytes[j];
}
}
// else Serial.print("invalid");
// Serial.println(" ");
c = gas.measure_RATIO_1();
// Serial.print("The ratio1 is ");
if(c>=0)
{
// Serial.print(c);
myfloat.val = c;
for(j = 0; j < 4; j++)
{
sensorData[j+12] = myfloat.bytes[j];
}
}
// else Serial.print("invalid");
// Serial.println(" ");
c = gas.measure_RATIO_2();
// Serial.print("The ratio2 is ");
if(c>=0)
{
// Serial.print(c);
myfloat.val = c;
for(j = 0; j < 4; j++)
{
sensorData[j+16] = myfloat.bytes[j];
}
}
// else Serial.print("invalid");
// Serial.println(" ");
//CO2 sensor
if(CO2dataRecieve())
{
// Serial.print(" CO2: ");
// Serial.print(CO2PPM);
// Serial.println("");
myint.val = CO2PPM;
sensorData[20] = myint.bytes[0];
sensorData[21] = myint.bytes[1];
}
// //ultrasonic sensor
// RangeInCM = ultrasonic.MeasureInCentimeters();
//// Serial.print("Range is ");
//// Serial.print(RangeInCM);
//// Serial.print(" cm\n");
// mylong.val = RangeInCM;
// for(j = 0; j < 4; j++)
// {
// sensorData[j+16] = mylong.bytes[j];
// }
//PM sensor
do{
duration = pulseIn(pin, LOW);
lowpulseoccupancy = lowpulseoccupancy+duration;
}while((millis()-starttime) <= sampletime_ms);
// if ((millis()-starttime) >= sampletime_ms)//if the sampel time = = sampletime_ms
// {
ratio = lowpulseoccupancy/(sampletime_ms*10.0); // Integer percentage 0=>100
concentration = 1.1*pow(ratio,3)-3.8*pow(ratio,2)+520*ratio+0.62; // using spec sheet curve
if (ratio != 0)
{
myfloat.val = concentration;
for(j = 0; j < 4; j++)
{
sensorData[j+22] = myfloat.bytes[j];
}
}
// Serial.print("ratio = ");
// Serial.print(ratio);
// Serial.print("\t");
// Serial.print("concentration = ");
// Serial.print(concentration);
// Serial.println(" pcs/0.01cf");
// Serial.println("\n");
lowpulseoccupancy = 0;
starttime = millis();
// }
//sunlight sensor
myint.val = sunlight.ReadVisible();
sensorData[26] = myint.bytes[0];
sensorData[27] = myint.bytes[1];
// Serial.print("Vis: ");
myint.val = sunlight.ReadIR();
sensorData[28] = myint.bytes[0];
sensorData[29] = myint.bytes[1];
// Serial.print("IR: "); Serial.println(myint.val);
//the real UV value must be div 100 from the reg value , datasheet for more information.
myint.val = sunlight.ReadUV();
sensorData[30] = myint.bytes[0];
sensorData[31] = myint.bytes[1];
// Serial.print("UV: "); Serial.println((float)myint.val/100);
//read analog signal from ultrasonic sensor
myint.val = analogRead(UltrasonicPin);
sensorData[32] = myint.bytes[0];
sensorData[33] = myint.bytes[1];
// delay(5000);
} // end of loop
// start of transaction, no command yet
void resetParam ()
{
command = 0;
i = 0;
k = 0;
checksum = 0;
} // end of interrupt service routine (ISR) resetParam
//CO2 sensor data
bool CO2dataRecieve(void)
{
byte data[9];
int i = 0;
//transmit command data
for(i=0; i<sizeof(cmd_get_sensor); i++)
{
sensor.write(cmd_get_sensor[i]);
}
delay(10);
//begin receiving data
if(sensor.available())
{
while(sensor.available())
{
for(int i=0;i<9; i++)
{
data[i] = sensor.read();
delay(1);
}
}
}
/*
for(int j=0; j<9; j++)
{
Serial.print(data[j]);
Serial.print(" ");
}
Serial.println("");
*/
if((i != 9) || (1 + (0xFF ^ (byte)(data[1] + data[2] + data[3] + data[4] + data[5] + data[6] + data[7]))) != data[8])
{
return false;
}
CO2PPM = (int)data[2] * 256 + (int)data[3];
return true;
}
<file_sep>/spiMultithread.c
#include "mraa.h"
#include <stdio.h>
#include <unistd.h>
#include <pthread.h>
#include <semaphore.h>
#define BUFFSIZE 30
typedef union cvtfloat {
float val;
unsigned char bytes[4];
} MYFLOAT;
typedef union cvtint {
uint16_t val;
unsigned char bytes[2];
} MYINT;
typedef union cvtlong {
int val;
unsigned char bytes[4];
} MYLONG;
static mraa_gpio_context CS1;
static mraa_gpio_context SCK;
static mraa_gpio_context MOSI;
static mraa_gpio_context MISO;
static uint8_t read_data[BUFFSIZE];
static sem_t semaphore;
uint8_t sendbyte(uint8_t byte, int delay);
void spiinit();
int spifetch();
void *update(void *arg);
int main() {
const struct sched_param priority = { 1 };
sched_setscheduler(0, SCHED_FIFO, &priority);
spiinit();
uint8_t read;
int cycle=0;
MYFLOAT myfloat;
MYINT myint;
MYLONG mylong;
sem_init(&semaphore, 0, 1);
pthread_t pid;
pthread_create(&pid, NULL, update, NULL);
while(1)
{
int j;
sem_wait(&semaphore);
printf(">>>>>>>>>Main thread print!>>>>>>>>>>\n");
printf("recv %d %d\n", read_data[0], read_data[1]);
for(j=0; j<4; j++)
{
myfloat.bytes[j] = read_data[j+2];
}
printf("ratio 0 is %f \n", myfloat.val);
for(j=0; j<4; j++)
{
myfloat.bytes[j] = read_data[j+6];
}
printf("ratio 1 is %f \n", myfloat.val);
for(j=0; j<4; j++)
{
myfloat.bytes[j] = read_data[j+10];
}
printf("ratio 2 is %f \n", myfloat.val);
for(j=0; j<2; j++)
{
myint.bytes[j] = read_data[j+14];
}
printf("CO2PPM is %d \n", myint.val);
for(j=0; j<4; j++)
{
mylong.bytes[j] = read_data[j+16];
}
printf("RangInCM is %d \n", mylong.val);
for(j=0; j<4; j++)
{
myfloat.bytes[j] = read_data[j+20];
}
printf("PM concentration is %f \n", myfloat.val);
for(j=0; j<2; j++)
{
myint.bytes[j] = read_data[j+24];
}
printf("visiable light is %d \n", myint.val);
for(j=0; j<2; j++)
{
myint.bytes[j] = read_data[j+26];
}
printf("IR light is %d \n", myint.val);
for(j=0; j<2; j++)
{
myint.bytes[j] = read_data[j+28];
}
printf("UV light is %f \n", (float)myint.val/100);
printf(">>>>>>>>>Main thread finish printing!>>>>>>>>>>\n");
sem_post(&semaphore);
printf("cycle %d\n", cycle++);
sleep(5);
}
pthread_join(pid, NULL);
return MRAA_SUCCESS;
}
uint8_t sendbyte(uint8_t byte, int delay) {
int i, j;
int read = 0;
// mraa_gpio_write(CS1, 0);
for (j = 1; j < delay / 8 + 100; j++) {
};
for (i = 0; i < 8; i++){
mraa_gpio_write(MOSI, byte & 0x80);
byte = byte << 1;
for (j = 1; j < delay; j++) {
};
mraa_gpio_write(SCK, 1);
read = read << 1;
read = read | (mraa_gpio_read(MISO));
for (j = 1; j < delay; j++) {
};
mraa_gpio_write(SCK, 0);
}
// mraa_gpio_write(CS1, 1);
for (j = 1; j < delay / 8 + 20; j++) { };
return (uint8_t) read;
}
void spiinit()
{
CS1 = mraa_gpio_init(10);
mraa_gpio_use_mmaped(CS1, 1);
mraa_gpio_dir(CS1, MRAA_GPIO_OUT);
mraa_gpio_write(CS1, 1);
SCK = mraa_gpio_init(13);
mraa_gpio_use_mmaped(SCK, 1);
mraa_gpio_dir(SCK, MRAA_GPIO_OUT);
mraa_gpio_write(SCK, 0);
MOSI = mraa_gpio_init(11);
mraa_gpio_use_mmaped(MOSI, 1);
mraa_gpio_dir(MOSI, MRAA_GPIO_OUT);
mraa_gpio_write(MOSI, 0);
MISO = mraa_gpio_init(12);
mraa_gpio_use_mmaped(MISO, 1);
mraa_gpio_dir(MISO, MRAA_GPIO_IN);
mraa_gpio_read(MISO);
}
int spifetch()
{
int delay = 1000;
int i;
char c = 'a';
uint8_t checksum = 0;
int checksumTrue;
mraa_gpio_write(CS1, 0);
sendbyte(c, delay);
usleep(100);
uint8_t byte = 0;
sendbyte(byte, delay);
for(i=0; i<BUFFSIZE; i++)
{
usleep(100);
byte = 10;
read_data[i] = sendbyte(byte, delay);
checksum += read_data[i];
}
usleep(100);
if ((checksum & 0xff) == sendbyte(byte, delay))
{
printf("checksum matches!\n");
checksumTrue = 1;
}
else
{
printf("checksum ERROR!\n");
checksumTrue = 0;
}
mraa_gpio_write(CS1, 1);
return checksumTrue;
}
void *update(void *arg)
{
while(1)
{
sem_wait(&semaphore);
printf("=========enter update========\n");
while(!spifetch())
{
usleep(500000);
}
printf("========finish update, release========\n");
sem_post(&semaphore);
sleep(3);
}
}
|
357f213f0289cdfe4ea04fdb3ce9f35e159e3959
|
[
"C",
"Text",
"C++"
] | 5
|
C
|
HITliuyu/SPI-Edison-Arduino
|
6dad47a4e26b26884f6e769028e3bd682798db7d
|
0dda26c3aed61682167645eae219f427d349f5be
|
refs/heads/master
|
<repo_name>alampiler/Agency<file_sep>/README.md
# Agency
Landing page template
<file_sep>/app/js/main.js
$(document).ready(function() {
$('.about_images .about_image_block').click(function () {
let tab_id = $(this).attr('data-panel');
$('.about_container .about_container_image').removeClass('active');
$('.about_content_menu .about_menu').removeClass('active');
$('.'+tab_id).addClass('active');
});
$('div.product_selector_panel div.panel').click(function () {
let tab_id = $(this).attr('data-tab');
$('div.product_selector_panel div.panel').removeClass('active');
$('div.product_selector_content div.content').removeClass('active');
$(this).addClass('active');
$('#'+tab_id).addClass('active');
});
var width = $(document).width();
if(width <= 871){
$('.ie').css('display', 'none');
$('.getInTouch_conInfo').css('display', 'block');
}
else if(width >= 871){
$('.ie').css('display', 'block');
}
$("a[href*='#']").on("click", function(e){
let this_elem = $(this);
$('html, body').stop().animate({
scrollTop: $(this_elem.attr('href')).offset().top
}, 777);
e.preventDefault();
return false;
});
function scrollBottom(elem){
$('html, body').stop().animate({
scrollTop: $(elem).offset().top
}, 777);
}
$(".contact_btn").on("click", function(){
let anchor = $(".footer");
scrollBottom(anchor);
});
$(".learn_btn").on("click", function(){
let anchor = $(".portfolio");
scrollBottom(anchor);
});
});
|
5ac92b2735dde0fb8128600df608cb9766f4ff4b
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
alampiler/Agency
|
b25c2cf10756bf459d23ce1c83270e52500ace40
|
e99be08024d21294ceaa6b0e24386b789554c7e8
|
refs/heads/master
|
<repo_name>Sognus/KIV-ZOS-2018<file_sep>/shell.h
#ifndef KIV_ZOS_SHELL_H
#define KIV_ZOS_SHELL_H
#include "structure.h"
typedef struct shell
{
boot_record *boot; // Boot record se kterym se pracuje
char *filename; // Fyzicky soubor se strukturou ntfs se kterym se pracuje
int32_t cwd; // UID aktualni slozky, se kterou se pracuje
mft_item *mft_array; // Nactene MFT
int mft_array_size; // Pocet polozek MFT
} shell;
/**
* Vytvori novy shell v libovolne slozce (na zaklade UID)
*
* @param record boot record kontext NTFS
* @param ntfs_filename fyzicky soubor ntfs
* @param directory uid slozky (1 pro root)
* @return
*/
shell *create_shell(boot_record *record, char *ntfs_filename, int32_t directory);
/**
* Vytvori novy shell v rootu ntfs
*
* @param record
* @param ntfs_filename
* @return
*/
shell *create_root_shell(boot_record *record, char *ntfs_filename);
/**
* Provede parodii na vypsani struktury shell
*
* @param shell ukazatel na shell
*/
void print_shell(shell *shell);
#endif //KIV_ZOS_SHELL_H
<file_sep>/path_logic.h
#ifndef KIV_ZOS_PATH_LOGIC_H
#define KIV_ZOS_PATH_LOGIC_H
#include <string.h>
#include <stdlib.h>
#include <stdio.h>
#include "path_logic.h"
#include "shell.h"
#include "ntfs_logic.h"
#include "usefull_functions.h"
// Oddelovac slozek
#define PATH_SEPARATOR '/'
// Velikost oddelovace slozek (pri pripadnem pouziti viceznakoveho oddelovace)
#define PATH_SEPARATOR_LENGTH 1
// Maximalni pocet casti v ceste
#define PATH_PARTS_LIMITATION 256
// Maximalni velikost jedne casti cestz
#define PATH_PART_MAX_LENGTH 255
/**
* Na zaklade aktualniho kontextu shell vytvori retezec
* obsahujici textovou reprezentaci absolutni cesty pro
* current working directory
*
* @param shell aktualni kontext
* @return ukazatel na retezec
*/
char *get_current_path(shell *shell);
/**
* Na zaklade kontextu shell overi zda cesta urcena vstupem
* path existuje v ramci VFS
*
* @param shell
* @param path
* @return
*/
int path_exist(shell *shell, char *path);
/**
* Vrati UID cilove slozky
*
* @param shell
* @param path
* @return
*/
int path_target_uid(shell *shell, char *path);
#endif //KIV_ZOS_PATH_LOGIC_H
<file_sep>/path_logic.c
#include "path_logic.h"
/**
* Na zaklade aktualniho kontextu shell vytvori retezec
* obsahujici textovou reprezentaci absolutni cesty pro
* current working directory
*
* @param shell aktualni kontext
* @return ukazatel na retezec
*/
char *get_current_path(shell *shell)
{
/*
* 1) Zacneme v cwd - ziskame jeji nazev a pridame velikost nazvu do int strlen,
* 2) precteme prvni dva int32_t datove casti cwd (2 * 4bytes)
* 3) pokud jsou obe cisla stejna, jsme v rootu, ukoncujeme prohledavani
* 4) pokud jsou rozdilne, vyhledame mft_item dle druheho cisla
* 5) pridame do cesty oddělovač složek a jeho velikost do int strlen
* 6) z mft_itemu ziskame nazev slozky a pridame velikost do strlen
* 7) Dle itemu precteme z datove casti prvni dva int32_t (pokračujeme krokem 2)
*
* Cesta je omezena na urcity pocet casti (256) id defaultne
*/
if(shell == NULL)
{
return NULL;
}
// Vypocet velikosti cesty a zjisteni vsech UID v ceste
int path_ids[PATH_PARTS_LIMITATION] = { 0 };
int path_parts = 0;
int path_strlen = 0;
// Current operation directory - urcuje slozku se kterou se nyni pracuje
int32_t cod = shell->cwd;
// Priprava souboru
FILE *file = fopen(shell->filename, "rb");
// Otevreni souboru
if(file == NULL)
{
// Nepodarilo se otevrit soubor
return NULL;
}
// Delam tak dlouho dokud nenarazim na ukoncovaci podminku
while(1)
{
// Najdu MFT zaznam current operation directory
mft_item *item = find_mft_item_by_uid(shell, cod);
// Zvetseni velikosti cesty
path_strlen = path_strlen + strlen(item->item_name);
// IDcka se ukladaji inverzne - 0 se preskoci a vypisi se jen cisla vetsi nebo rovna 1
path_ids[(PATH_PARTS_LIMITATION - 1 - path_parts)] = cod;
// Pocet pouzitych casti pole
path_parts = path_parts + 1;
// Overeni preteceni delky cesty (dle casti)
if(path_parts > PATH_PARTS_LIMITATION)
{
// Cesta ma prilis mnoho casti
return NULL;
}
// Paranoia = overim existenci zaznamu
if(item == NULL || item->uid < 1)
{
// Operace nemohla byt vykonana - neexistuje zaznam pro slozku nebo se to proste nejak rozbilo
return NULL;
}
// Ziskani prvniho fragmentu current operation directory
mft_fragment fragment = item->fragments[0];
// Deklarace mista na ulozeni
int32_t *folder_meta_uids = malloc(2 * sizeof(int32_t));
// 0 = Current UID
folder_meta_uids[0] = -1;
// 1 = Parrent UID
folder_meta_uids[1] = -1;
// Precteni souboru
fseek(file, fragment.fragment_start_address, SEEK_SET);
fread(folder_meta_uids, sizeof(int32_t), 2, file);
// Overeni precteni
if(folder_meta_uids[0] == -1 || folder_meta_uids[1] == -1)
{
// Nelze precist ID, ktere rozhodne neexistuje
return NULL;
}
// Zastaveni algoritmu v pripade, ze se dosahlo root slozky
if(cod == NTFS_ROOT_UID)
{
// Dosahli jsme root slozky
break;
}
// Overeni existence rodicovske slozky
mft_item *parrent = find_mft_item_by_uid(shell, folder_meta_uids[1]);
if(parrent == NULL || parrent->uid < 1)
{
// Zaznam neexistuje nebo je prazdny
return NULL;
}
// Jsme mimo root, musime prohledavat dal v nadrazene slozce
cod = folder_meta_uids[1];
// Pridame delku delimiteru do velikosti cesty
path_strlen = path_strlen + PATH_SEPARATOR_LENGTH;
// Uvolneni pameti
free(folder_meta_uids);
}
int32_t path_t = (sizeof(char) * path_strlen);
char *path = malloc(path_t);
memset(path, 0, path_t);
// Prozatim debug vypis
for(int i = 0; i < PATH_PARTS_LIMITATION; i++)
{
if(path_ids[i] < 1)
{
continue;
}
mft_item *contains_name = find_mft_item_by_uid(shell, path_ids[i]);
//printf("%s", contains_name->item_name);
strcat(path, contains_name->item_name);
// Zmena znaku separatoru na string zakonceny null terminatorem
char separator_string[2] = {PATH_SEPARATOR, '\0'};
if(contains_name->uid > 1) {
strcat(path, separator_string);
//printf("%s", PATH_SEPARATOR);
}
}
//printf("\n");
return path;
}
/**
* Na zaklade kontextu shell overi zda cesta urcena vstupem
* path existuje v ramci VFS
*
* @param shell
* @param path
* @return err (-inf,-1>, {0, 1}, ostatni wtf
*/
int path_exist(shell *shell, char *path)
{
// Detekce validity kontextu
if(shell == NULL)
{
// -1 = Kontext nesmi byt null
return -1;
}
// Overeni delky cesty
if(path == NULL || strlen(path) < 1)
{
// -2 = Ukazatel je NULL nebo je retezec moc kratky
return true;
}
int path_len = strlen(path);
//printf("PATH: %s \n", path);
/*
* Implementovany syntax cesty
* absolutni
* /dir1/dir2/file
* /dir1/dir2/file/
* /dir1/dir2/dir3/
* /dir1/dir2/dir3
* relativni
* dir1/dir2/dir3
* .././dir
* ../../
*
*/
// Pokud cesta zacina / je vzdy absolutni
if(path[0] == '/')
{
// Nacitam znaky tak dlouho dokud nenarazim na dalsi / ci konec retezce
char buffer[PATH_PART_MAX_LENGTH];
int buffer_used = 0;
memset(buffer, 0, PATH_PART_MAX_LENGTH);
// Current operation directory (1 = ROOT)
int cod = NTFS_ROOT_UID;
// Urcuje zda byl znak separatoru nalezen ci ne
int first_separator = 0;
for(int i = 0 ; i < strlen(path)+1; i++) {
// Aktualni znak
char current_char = i == strlen(path) ? path[i-1] : path[i];
// Pridani prvniho lomitka do bufferu - pro overeni rootu
if(i == 0 || (i == path_len-1 && current_char != '/'))
{
buffer[buffer_used] = current_char;
buffer_used++;
first_separator = 1;
}
// Jsme na poslednim znaku nebo jsme nalezli jsme dalsi cestu
if (i == path_len-1 || current_char == PATH_SEPARATOR) {
// Zpracovani retezce - nalezneme vsechny MFT itemy v COD
int *uids = NULL;
int uid_count = -1;
get_folder_members(shell, cod, &uids, &uid_count);
// Overeni zpracovani podslozek/souboru ve slozce
if(uids == NULL || uid_count < 1)
{
// Nebyla nalezena podslozka/soubor -> tj. cesta neexistuje
return 0;
}
// Prochazime vsechny idcka - hledame jmeno
int name_found = 0;
for(int a = 0; a < uid_count; a++)
{
// Nalezeni MFT zaznamu pro UID
mft_item *item = find_mft_item_by_uid(shell, uids[a]);
if(item == NULL)
{
// Nebyl nalezen MFT zaznam pro podslozku
return -3;
}
//printf("ITEM_NAME: %s VS ", item->item_name);
//printf("BUFFER: %s ", buffer);
//printf("(CMP: %d)\n", strcmp(item->item_name, buffer));
// Overeni jmena slozky/souboru
if(strcmp(item->item_name, buffer) == 0)
{
name_found = 1;
cod = item->uid;
// Vymazani pouziteho bufferu po nalezeni
memset(buffer, 0, PATH_PART_MAX_LENGTH);
buffer_used = 0;
break;
}
}
// Overeni zda byl item nalezen
if(name_found == 0)
{
// Nebyl -> tj. cesta neexistuje
return 0;
}
// Vymazani pouziteho bufferu
memset(buffer, 0, PATH_PART_MAX_LENGTH);
buffer_used = 0;
}
// Osetreni posledniho znaku
if(i == path_len - 1)
{
break;
}
// Pridani znaku do bufferu - krome separatoru
if(current_char != PATH_SEPARATOR) {
buffer[buffer_used] = current_char;
buffer_used++;
}
}
// Vsechno v ceste bylo nalezeno -> tj. cesta existuje
return 1;
}
else // Jinak je cesta relativni
{
// TODO: Overeni relativni cesty
/*
* TODO: nacitat pocet ..
* TODO: ignorovat ./
* TODO: prochazet FS od CWD->COD misto od rootu
* TODO: .. prepne COD na nadrazenou slozku (pokud se narazi na roota, zustava v rootu - parent rootu je root)
*
* ./test -> test -> $cwd$/test
* ../.. -> parent_folder(cwd) -> parentfolder(cwd
*/
char buffer[128] = {0};
int buffer_used = 0;
int cod = shell->cwd;
while(true)
{
char c = *path;
// Nalezli jsme oddelovace
if(c == '/' || c == '\0')
{
buffer[buffer_used] = '\0';
buffer_used++;
if(strcmp(buffer, "..") == 0)
{
// Zjisteni nadrazene slozky
cod = get_parent_uid(shell, cod);
// reset
buffer_used = 0;
memset(buffer, 0, 128);
path = path + 1;
continue;
}
if(strcmp(buffer, ".") == 0)
{
// reset
buffer_used = 0;
memset(buffer, 0, 128);
path = path + 1;
continue;
}
// Deklarace
int *uids = NULL;
int uid_count = -1;
get_folder_members(shell, cod, &uids, &uid_count);
int found = 0;
for(int i = 0; i < uid_count; i++)
{
mft_item *item = find_mft_item_by_uid(shell, uids[i]);
if(strcmp(buffer, item->item_name) == 0)
{
cod = item->uid;
found = 1;
break;
}
}
if(cod == 1 && strlen(buffer) < 1) return true;
if(found == 0) return false;
// reset
buffer_used = 0;
memset(buffer, 0, 128);
}
buffer[buffer_used] = *path;
buffer_used++;
path = path+1;
if(c == '\0')
{
break;
}
}
return true;
}
}
int path_target_uid(shell *shell, char *path)
{
if(path_exist(shell, path) != true)
{
return -50;
}
// Detekce validity kontextu
if(shell == NULL)
{
// -1 = Kontext nesmi byt null
return -1;
}
// Overeni delky cesty
if(path == NULL || strlen(path) < 1)
{
// -2 = Ukazatel je NULL nebo je retezec moc kratky
return shell->cwd;
}
int path_len = strlen(path);
//printf("PATH: %s \n", path);
/*
* Implementovany syntax cesty
* absolutni
* /dir1/dir2/file
* /dir1/dir2/file/
* /dir1/dir2/dir3/
* /dir1/dir2/dir3
* relativni
* dir1/dir2/dir3
* .././dir
* ../../
*
*/
// Pokud cesta zacina / je vzdy absolutni
if(path[0] == '/')
{
// Nacitam znaky tak dlouho dokud nenarazim na dalsi / ci konec retezce
char buffer[PATH_PART_MAX_LENGTH];
int buffer_used = 0;
memset(buffer, 0, PATH_PART_MAX_LENGTH);
// Current operation directory (1 = ROOT)
int cod = NTFS_ROOT_UID;
// Urcuje zda byl znak separatoru nalezen ci ne
int first_separator = 0;
mft_item *item = NULL;
for(int i = 0 ; i < strlen(path)+1; i++) {
// Aktualni znak
char current_char = i == strlen(path) ? path[i-1] : path[i];
// Pridani prvniho lomitka do bufferu - pro overeni rootu
if(i == 0 || (i == path_len-1 && current_char != '/'))
{
buffer[buffer_used] = current_char;
buffer_used++;
first_separator = 1;
}
// Jsme na poslednim znaku nebo jsme nalezli jsme dalsi cestu
if (i == path_len-1 || current_char == PATH_SEPARATOR) {
// Zpracovani retezce - nalezneme vsechny MFT itemy v COD
int *uids = NULL;
int uid_count = -1;
get_folder_members(shell, cod, &uids, &uid_count);
// Overeni zpracovani podslozek/souboru ve slozce
if(uids == NULL || uid_count < 1)
{
// Nebyla nalezena podslozka/soubor -> tj. cesta neexistuje
return 0;
}
// Prochazime vsechny idcka - hledame jmeno
int name_found = 0;
for(int a = 0; a < uid_count; a++)
{
// Nalezeni MFT zaznamu pro UID
item = find_mft_item_by_uid(shell, uids[a]);
if(item == NULL)
{
// Nebyl nalezen MFT zaznam pro podslozku
return -3;
}
//printf("ITEM_NAME: %s VS ", item->item_name);
//printf("BUFFER: %s ", buffer);
//printf("(CMP: %d)\n", strcmp(item->item_name, buffer));
// Overeni jmena slozky/souboru
if(strcmp(item->item_name, buffer) == 0)
{
name_found = 1;
cod = item->uid;
// Vymazani pouziteho bufferu po nalezeni
memset(buffer, 0, PATH_PART_MAX_LENGTH);
buffer_used = 0;
break;
}
}
// Overeni zda byl item nalezen
if(name_found == 0)
{
// Nebyl -> tj. cesta neexistuje
return 0;
}
// Vymazani pouziteho bufferu
memset(buffer, 0, PATH_PART_MAX_LENGTH);
buffer_used = 0;
}
// Osetreni posledniho znaku
if(i == path_len - 1)
{
break;
}
// Pridani znaku do bufferu - krome separatoru
if(current_char != PATH_SEPARATOR) {
buffer[buffer_used] = current_char;
buffer_used++;
}
}
// Vsechno v ceste bylo nalezeno -> tj. cesta existuje
return (item == NULL ? -50 : item->uid);
}
else // Jinak je cesta relativni
{
// TODO: Overeni relativni cesty
/*
* TODO: nacitat pocet ..
* TODO: ignorovat ./
* TODO: prochazet FS od CWD->COD misto od rootu
* TODO: .. prepne COD na nadrazenou slozku (pokud se narazi na roota, zustava v rootu - parent rootu je root)
*
* ./test -> test -> $cwd$/test
* ../.. -> parent_folder(cwd) -> parentfolder(cwd
*/
char buffer[128] = {0};
int buffer_used = 0;
int cod = shell->cwd;
while(true)
{
char c = *path;
// Nalezli jsme oddelovace
if(c == '/' || c == '\0')
{
buffer[buffer_used] = '\0';
buffer_used++;
if(strcmp(buffer, "..") == 0)
{
// Zjisteni nadrazene slozky
cod = get_parent_uid(shell, cod);
// reset
buffer_used = 0;
memset(buffer, 0, 128);
path = path + 1;
continue;
}
if(strcmp(buffer, ".") == 0)
{
// reset
buffer_used = 0;
memset(buffer, 0, 128);
path = path + 1;
continue;
}
// Deklarace
int *uids = NULL;
int uid_count = -1;
get_folder_members(shell, cod, &uids, &uid_count);
int found = 0;
for(int i = 0; i < uid_count; i++)
{
mft_item *item = find_mft_item_by_uid(shell, uids[i]);
if(strcmp(buffer, item->item_name) == 0)
{
cod = item->uid;
found = 1;
break;
}
}
if(cod == 1 && strlen(buffer) < 1) return cod;
if(found == 0) return -50;
// reset
buffer_used = 0;
memset(buffer, 0, 128);
}
buffer[buffer_used] = *path;
buffer_used++;
path = path+1;
if(c == '\0')
{
break;
}
}
return cod;
}
}
/* TODO:
* Prevod relativni cesty na absolutni
* tj.
* slozka/slozka -> $cwd$/cesta
* ./slozka/slozka -> slozka/slozka -> $cwd$/cesta
* ../../slozka/slozka -> parrent(cwd) -> parent(cwd) -> slozka/slozka -> $cwd$/slozka/slozka
*
*/
char *relative_path_to_absolute(shell *shell, char *path)
{
return NULL;
}<file_sep>/CMakeLists.txt
cmake_minimum_required(VERSION 2.7.2)
project(KIV_ZOS)
set(CMAKE_CXX_STANDARD 99)
include_directories(.)
set ( CMAKE_C_FLAGS "-lm")
add_executable(KIV_ZOS
main.c
ntfs.c
ntfs.h
structure.c
structure.h ntfs_commands.c ntfs_commands.h shell.c shell.h ntfs_logic.c ntfs_logic.h path_logic.c path_logic.h shell_app.c shell_app.h usefull_functions.c usefull_functions.h)
target_link_libraries(KIV_ZOS m)
<file_sep>/usefull_functions.c
#include "usefull_functions.h"
bool starts_with(const char *pre, const char *str)
{
size_t lenpre = strlen(pre);
size_t lenstr = strlen(str);
return lenstr < lenpre ? false : strncmp(pre, str, lenpre) == 0;
}
bool str_contains(const char c, const char *str)
{
while(*str != '\0')
{
if(*str == c)
{
return 1;
}
str = str+1;
}
return 0;
}<file_sep>/ntfs_commands.c
#include "ntfs_commands.h"
<file_sep>/ntfs_logic.h
#include "shell.h"
#ifndef KIV_ZOS_NTFS_LOGIC_H
#define KIV_ZOS_NTFS_LOGIC_H
/**
* Vrati jestli se jedna o slozku
*
* @param shell kontext
* @param uid identifikator mft itemu
* @return 0 = ne, 1 = ano, -1 = chyba
*/
int is_folder(shell *shell, int32_t uid);
/**
* Vrati jestli se jedna o soubor
*
* @param shell kontext
* @param uid identifikator mft itemu
* @return 0 = ne, 1 = ano, -1 = chyba
*/
int is_file(shell *shell, int32_t uid);
/**
* Vrati zda se jedna o symlink
*
* @param shell kontext
* @param uid identifikator mft itemu
* @return 0 = ne, 1 = ano, -1 = chyba
*/
int is_symlink(shell *shell, int32_t uid);
/**
* Vrati mft item na zaklade uid
*
* @param shell kontext
* @param uid identifikator mft itemu
* @return null když neexistuje, struct když existuje
*/
mft_item *find_mft_item_by_uid(shell *shell, int32_t uid);
/**
* Vrati 1 pokud ma current working directory soubor/slozku/symlink s danym jmenem
* Vrati 0 pokud ^ ne
*
* Vrati -1 pokud nastane chyba
*
* @return viz popis
*/
int cwd_has_item_name(shell *shell, char item_name[13]);
/**
* Vytvori novou slozku ve FS na zaklade current working directory
*
* @param shell
* @param folder_name
* @return
*/
int create_folder(shell *shell, char folder_name[12]);
/**
* Vrati identifikatory souboru/slozek/symlinku ktere jsou ve slozce current directory
*
* @param shell kontext
* @param uid uid slozky
*/
int get_folder_members_count(shell *shell, int32_t uid);
/**
* Upraví ukazetele out proměnný tak, aby jejich obsah bylo pole vsech uid v dane slozce
* out_count pak predstavuje pocet polozek v danem poli
*
* @param shell kontext
* @param uid uid prohledavane slozky
* @param out_array ukazatel na ukazatel pole kam se budou ukladat polozky pole
* @param out_count
*/
void get_folder_members(shell *shell, int32_t uid, int **out_array, int *out_count);
/**
* * Na zaklade aktualniho kontextu shell, najde PRVNI PRAZDNOU adresu a poradi daneho clusteru
*
* @param shell kontext funkce
* @param cluster_addr ukazatel na output int jaka je adresa clusteru volneho
* @param cluster_order ukazatel na output int kolikaty je volny cluster v bitmape
* @return chybova hlaska, 0 = bez chyby, <-max, 0) = error, (0, max) = notice
*/
int32_t find_empty_cluster(shell *shell, int32_t *cluster_addr, int *cluster_order);
/**
* Projde MFT tabulku, najde dalsi volne uid
*
* Nejprve inkrementuje nejvyssi nalezene cislo o 1, v pripade, ze je dosahnuto maximalniho
* cisla 32bit, pak prochazi cisla od 1 do max cisla
*
* @param shell
* @return
*/
int32_t get_next_uid(shell *shell);
/**
* Porovna dve struktury na zaklade UID pro Qsort
*
* @param s1 struktura 1
* @param s2 struktura 2
* @return porovnani
*/
int compare_mft_items(const void *s1, const void *s2);
/**
* Nalezne index volneho mft itemu, pokud neexistuje, hodi -2
*
* @param shell
* @return index volneho mft itemu nebo chyba
*/
int get_free_mft_item_index(shell *shell);
/**
* Prida fyzicky zaznam o souboru/slozce/symlinku do nadrazene slozky
*
* @param shell kontext
* @param parent_uid uid nadrazene slozky
* @param add_uid pridavane uid
* @return chyba nebo uspech ( = 0 )
*
*/
int parrent_add_uid(shell *shell, int32_t parent_uid, int32_t add_uid);
/**
* Vrati pocet alokovanych clusteru k jednomu mft itemu
*
* @param shell
* @param mft_item
* @return
*/
int get_allocated_cluster_count(mft_item *mft_item);
/**
* Vrati adresu dalsiho prvku kam zapsat UID
*
* @param shell
* @param uid
*/
int get_folder_next_member_adress(shell *shell, int32_t uid);
/**
* Zjisti uid rodicovske slozky
* pro koren je rodicovska slozka koren
* pokud je vstupni parament cwd zaporny, vyuzije se shell->cwd
*
* @param shell
* @param cwd
* @return
*/
int32_t get_parent_uid(shell *shell, int32_t cwd);
#endif //KIV_ZOS_NTFS_LOGIC_H
<file_sep>/ntfs_logic.c
#include "ntfs_logic.h"
#include "ntfs.h"
#include <stdlib.h>
#include <limits.h>
#include <math.h>
#include <structure.h>
/**
* Vrati jestli se jedna o slozku
*
* @param shell kontext
* @param uid identifikator mft itemu
* @return 0 = ne, 1 = ano, -1 = chyba
*/
int is_folder(shell *shell, int32_t uid)
{
// TODO: jedna se o symlink na directory?
// Chybny vstup
if(shell == NULL || uid < 0)
{
return -1;
}
mft_item *item = find_mft_item_by_uid(shell,uid);
int rtm = item->isDirectory == 1;
// TODO: uvolneni pameti
return rtm;
}
/**
* Vrati jestli se jedna o soubor
*
* @param shell kontext
* @param uid identifikator mft itemu
* @return 0 = ne, 1 = ano, -1 = chyba
*/
int is_file(shell *shell, int32_t uid)
{
// Chybny vstup
if(shell == NULL || uid < 0)
{
return -1;
}
// TODO: uvolneni pameti
mft_item *item = find_mft_item_by_uid(shell,uid);
int rtm = item->isDirectory == 0;
return rtm;
}
/**
* Vrati zda se jedna o symlink
*
* @param shell kontext
* @param uid identifikator mft itemu
* @return 0 = ne, 1 = ano, -1 = chyba
*/
int is_symlink(shell *shell, int32_t uid)
{
// Chybny vstup
if(shell == NULL || uid < 0)
{
return -1;
}
mft_item *item = find_mft_item_by_uid(shell,uid);
int rtm = item->isDirectory == 2;
// TODO: uvolneni pameti
return rtm;
}
/**
* Vrati mft item na zaklade uid
*
* @param shell kontext
* @param uid identifikator mft itemu
* @return null když neexistuje, struct když existuje
*/
mft_item *find_mft_item_by_uid(shell *shell, int32_t uid)
{
if(shell == NULL)
{
return NULL;
}
// Iterace danymi mft itemy pro ziskani struktury
for(int i = 0; i < shell->mft_array_size; i++)
{
mft_item item = shell->mft_array[i];
// ID je stejne
if(item.uid == uid) {
return &shell->mft_array[i];
}
}
return NULL;
}
/**
* Zjisti uid rodicovske slozky
* pro koren je rodicovska slozka koren
* pokud je vstupni parament cwd zaporny, vyuzije se shell->cwd
*
* @param shell
* @param cwd
* @return
*/
int32_t get_parent_uid(shell *shell, int32_t cwd)
{
int *uids = NULL;
int uid_count = -1;
if(cwd < 0) cwd = shell->cwd;
get_folder_members(shell, cwd, &uids, &uid_count);
if(uid_count < 2)
{
return -1;
}
return uids[1];
}
/**
* Vytvori novou slozku ve FS na zaklade current working directory
* Backend pro mkdir
*
* TODO: vytvorit implementaci pro path misto currect working directory
*
* @param shell
* @param folder_name
* @return
*/
int create_folder(shell *shell, char folder_name[12])
{
// Overeni shellu
if(shell == NULL)
{
return -1;
}
// TODO: Existuje soubor/slozka/symlink s danym nazvem = pokud ano nelze jej vytvorit
// Deklarace hodnot pro adresu clusteru a index bitmapy
int32_t free_cluster_addr = -1;
int32_t free_cluster_index = -1;
// Nalezeni clusteru
int operation_result = find_empty_cluster(shell, &free_cluster_addr, &free_cluster_index);
// Overeni na chybu < 0 => error
if(operation_result < 0)
{
// Find cluster operation error
return -3;
}
// Overeni na notice => navrat notice
if(operation_result > 0)
{
// 1 = No operation - nelze pokracovat ale neni to chyba
// 2 = Zadny cluster neni volny
return operation_result;
}
// Overeni adresy a inedexu
if(free_cluster_addr < 0 || free_cluster_index < 0)
{
// -4 = spatne hodnoty pro vytvoreni slozky
return -4;
}
// Nyni muzeme vyrobit dany MFT zaznam a zapsat ho na danou adresu
int32_t uid = get_next_uid(shell);
bool isDirectory = 1;
int8_t item_order = 1;
int8_t item_order_total = 1;
int32_t item_size = 8; // 4bytes = current folder uid, 4 bytes = parent folder uid
mft_fragment *frag = create_mft_fragment(free_cluster_addr, 1);
// Vytvoreni mft itemu
mft_item *created_mft_item = create_mft_item(uid, isDirectory, item_order, item_order_total, folder_name, item_size);
// Aktualizace mft fragmentu
created_mft_item->fragments[0] = *frag;
// Aktualizace ostatnich mft fragmentu
for(int j = 1; j < MFT_FRAGMENTS_COUNT; j++)
{
created_mft_item->fragments[j].fragment_start_address = -1;
created_mft_item->fragments[j].fragment_count = -1;
}
// Vypocet adresy volneho mft itemu
int index = get_free_mft_item_index(shell);
// Overeni existence volneho mft itemu
if(index < 0)
{
// -5 = Neni volny MFT item
return -5;
}
// Na tuto adresu zapiseme mft item
int32_t mft_addr = shell->boot->mft_start_address + (sizeof(mft_item) * index);
// Pridat UID do parent slozky
int parent_operation = parrent_add_uid(shell, shell->cwd, uid);
// Precteni bitmapy - zapis #1
int *bitmap = read_bitmap(shell->filename, shell->boot);
bitmap[index] = 1;
// Zapis slozky do current working directory
FILE *file = fopen(shell->filename, "r+b");
fseek(file, mft_addr, SEEK_SET);
fwrite(created_mft_item, sizeof(mft_item), 1, file);
// Zapis aktualizovane bitmapy do souboru
fseek(file, shell->boot->bitmap_start_address, SEEK_SET);
fwrite(bitmap, sizeof(int32_t), shell->boot->cluster_count, file);
// Aktualizace boot recordu
shell->mft_array[index] = *created_mft_item;
// ZAPIS obsahu složky
// AKTUAlNI UID, PARENT UID
fseek(file, free_cluster_addr, SEEK_SET);
int32_t odkaz[2];
// Prvni dve polozky jsou (1) aktualni slozka (2) nadrazena slozka
odkaz[0] = uid;
odkaz[1] = shell->cwd;
fwrite(odkaz, sizeof(int32_t), 2, file);
// TODO: mozna jeste neco dunno
// Uklid
fclose(file);
free(frag);
free(created_mft_item);
// TODO: implementace
return 0;
}
/**
* Prida fyzicky zaznam o souboru/slozce/symlinku do nadrazene slozky
*
* @param shell kontext
* @param parent_uid uid nadrazene slozky
* @param add_uid pridavane uid
* @return chyba nebo uspech ( = 0 )
*
*/
int parrent_add_uid(shell *shell, int32_t parent_uid, int32_t add_uid)
{
// Overeni shellu
if(shell == NULL)
{
// -1 = Kontext neni nastaven
return -1;
}
// Overeni existence rodice
mft_item *parent = find_mft_item_by_uid(shell, parent_uid);
if(parent == NULL){
// -2 = Neexistuje cilova slozka
return -2;
}
int32_t cluster_size = MAX_CLUSTER_SIZE;
int32_t size_needed = sizeof(int32_t) * (get_folder_members_count(shell, parent_uid) + 1) ;
double clusters_needed_help = (double)size_needed / (double)cluster_size;
int32_t clusters_needed = (int32_t)(ceil(clusters_needed_help));
int32_t allocated_clusters = get_allocated_cluster_count(parent);
int32_t free_space_in_cluster = size_needed % cluster_size; // kolik by melo byt mista v poslednim clusteru
// Priprava na vypocet adresy
int32_t uid_write_addr = -1;
// Clusteru je dost
if(clusters_needed <= allocated_clusters || free_space_in_cluster < sizeof(int32_t))
{
uid_write_addr = get_folder_next_member_adress(shell, parent_uid);
}
// Alokace dalšího clusteru pro složku? Please no
if(uid_write_addr < 0)
{
// -3 = Not implemented and never will be
return -3;
}
FILE *file = fopen(shell->filename, "r+b");
if(file == NULL)
{
// -4 = Soubor je derpnutej nebo neni
return -4;
}
// Nastaveni ukazatele
fseek(file, uid_write_addr, SEEK_SET);
// Zapis uid na adresu
fwrite((const void*)&add_uid, (size_t)sizeof(int32_t), 1, file);
// Uzavreni souboru
fclose(file);
return 0;
}
/**
* Nalezne index volneho mft itemu, pokud neexistuje, hodi -2
*
* @param shell
* @return index volneho mft itemu nebo chyba
*/
int get_free_mft_item_index(shell *shell)
{
// Overeni shellu
if(shell == NULL)
{
return -1;
}
for(int i = 0; i < shell->mft_array_size; i++)
{
mft_item item = shell->mft_array[i];
if(item.uid == 0)
{
// Nalezen volny mft item na indexu i
return i;
}
}
// Volny mft item nenalezen
return -2;
}
/**
* Projde MFT tabulku, najde dalsi volne uid
*
* Nejprve inkrementuje nejvyssi nalezene cislo o 1, v pripade, ze je dosahnuto maximalniho
* cisla 32bit, pak prochazi cisla od 1 do max cisla
*
* @param shell
* @return
*/
int32_t get_next_uid(shell *shell)
{
// Overeni shellu
if(shell == NULL)
{
return -1;
}
// Precteni mft tabulky
mft_item *mft_array = NULL;
int mft_array_size = -1;
read_mft_items(shell->filename, shell->boot, &mft_array, &mft_array_size);
// Overeni na chybu ve cteni MFT
if(mft_array == NULL || mft_array_size < 1)
{
// -2 = Nepodarilo se precist MFT zaznam
return -2;
}
// Linearni pruchod MFT - prvni zpusob nalezeni
int32_t largest = 0;
for(int i = 0; i < mft_array_size; i++)
{
mft_item item = mft_array[i];
if(item.uid > largest)
{
largest = item.uid;
}
}
// Otestovani, jestli iterace pretece
int32_t a = largest;
int32_t x = 1;
if ((x > 0 && x > INT_MAX - x) ||
(x < 0 && x < INT_MIN - x))
{
// Hodnota pretece, musime nalezt jinak volne UID
// Jinak = sort struktur podle UID, pokud dostaneme sekvenci 1 2 3 4 6 tak vime ze 5 je volne
// tj. nacteme dve hodnoty a pokud lisi o vice jak 1 tak je zde volne misto
qsort(mft_array, mft_array_size, sizeof(mft_item) ,compare_mft_items);
for(int z = 0; z < mft_array_size-1; z++)
{
mft_item item1 = mft_array[z];
mft_item item2 = mft_array[z+1];
// Nalezena mezera mezi mft itemy - ukoncuji vyhledavani
if(abs(item1.uid - item2.uid) > 1)
{
largest = item1.uid + 1;
break;
}
}
}
else
{
// Hodnota nepretece, muzeme ji vratit
largest = largest + 1;
}
// Uvolneni pameti
free(mft_array);
// Vraceni hodnoty dalsiho indexu
return largest;
}
/**
* Porovna dve struktury na zaklade UID pro Qsort
*
* @param s1 struktura 1
* @param s2 struktura 2
* @return porovnani
*/
int compare_mft_items(const void *s1, const void *s2)
{
mft_item *e1 = (mft_item *)s1;
mft_item *e2 = (mft_item *)s2;
return e1->uid - e2->uid;
}
/**
* * Na zaklade aktualniho kontextu shell, najde PRVNI PRAZDNOU adresu a poradi daneho clusteru
*
* @param shell kontext funkce
* @param cluster_addr ukazatel na output int jaka je adresa clusteru volneho
* @param cluster_order ukazatel na output int kolikaty je volny cluster v bitmape
* @return chybova hlaska, 0 = bez chyby, <-max, 0) = error, (0, max) = notice
*/
int32_t find_empty_cluster(shell *shell, int32_t *cluster_addr, int *cluster_order)
{
// return
int rtn = 0;
// Overeni shellu
if(shell == NULL)
{
// -1 = neexistuje shell
return -1;
}
// Funkce muze menit jednu hodnotu na out adresach nebo dve. Pokud jsou oba ukazatele null, neudela nic
if(cluster_addr == NULL && cluster_order == NULL)
{
// 1 = nebyla provedena zadna operace
return 1;
}
// Precteni bitmapy
int *bitmap = read_bitmap(shell->filename, shell->boot);
int bitmap_size = shell->boot->cluster_count;
// Iterace pro ziskani poradi
int iter = 0;
for(iter = 0; iter < bitmap_size; iter++)
{
// Nalezen prvni nulovy cluster
if(bitmap[iter] == 0)
{
break;
}
}
int32_t data_start_addr;
int32_t first_cluster_empty_addr;
if(iter == bitmap_size - 1 && bitmap[iter] == 1)
{
// Vsechny clustery jsou obsazene
data_start_addr = -1;
first_cluster_empty_addr = -1;
// Return 2 = neexistuje prazdny cluster
rtn = 2;
}
else {
// vypocet adresy
data_start_addr = shell->boot->data_start_address;
first_cluster_empty_addr = data_start_addr + iter * shell->boot->cluster_size;
}
// Zajisteni vystupu adresy
if(cluster_addr != NULL)
{
*cluster_addr = first_cluster_empty_addr;
}
if(cluster_order != NULL)
{
*cluster_order = iter;
}
// Navrat infa
return rtn;
}
/**
* Vrati 1 pokud ma current working directory soubor/slozku/symlink s danym jmenem
* Vrati 0 pokud ^ ne
*
* Vrati -1 pokud nastane chyba
*
*
* @return viz popis
*/
int cwd_has_item_name(shell *shell, char item_name[13])
{
//TODO: Implementace
return -1;
}
/**
* Vrati identifikatory souboru/slozek/symlinku ktere jsou ve slozce current directory
*
* @param shell
* @param uid
*/
int get_folder_members_count(shell *shell, int32_t uid)
{
// Overeni shellu
if(shell == NULL)
{
return -1;
}
// TODO: or is symlink to folder
if(!is_folder(shell, uid))
{
return -2;
}
// Ziskani mft itemu
mft_item *current_folder = find_mft_item_by_uid(shell, uid);
// Overeni itemu
if(current_folder == NULL)
{
return -3;
}
// Otevreni souboru pro precteni
FILE *file = fopen(shell->filename, "rb");
// Overeni otevreni
if(file == NULL)
{
return -4;
}
// Count of items
int estimated_count = 0;
// Projiti mft fragmentu pro cteni dat
for(int i = 0; i < MFT_FRAGMENTS_COUNT; i++)
{
mft_fragment frag = current_folder->fragments[i];
// Zbytecny cist, kdyz je adresa zaporna
if(frag.fragment_count == -1 && frag.fragment_start_address == -1)
{
continue;
}
int32_t start_addr = frag.fragment_start_address;
int32_t end_addr = frag.fragment_start_address + MAX_CLUSTER_SIZE * frag.fragment_count;
int32_t current_addr = start_addr;
// Zjisteni poctu itemu
while(current_addr < end_addr)
{
// Priprava pro fread
int *read_uid = malloc(sizeof(int32_t));
// Seek set
fseek(file, current_addr, SEEK_SET);
// Precteni data
fread(read_uid, sizeof(int32_t), 1, file);
//printf("DEBUG IDko %d\n", *read_uid);
// Jestlize mame uid vetsi jak 0 mame uid clenu current slozky
if(*read_uid > 0)
{
// Inkrementace o jeden
estimated_count = estimated_count + 1;
}
// Uklid po fread
free(read_uid);
// Posun po 4 bytes
current_addr = current_addr + sizeof(int32_t);
}
}
// Uzavreni souboru
fclose(file);
return estimated_count;
}
/**
* Vrati adresu dalsiho prvku kam zapsat UID
*
* @param shell
* @param uid
*/
int get_folder_next_member_adress(shell *shell, int32_t uid)
{
// Overeni shellu
if(shell == NULL)
{
return -1;
}
// TODO: or is symlink to folder
if(!is_folder(shell, uid))
{
return -2;
}
// Ziskani mft itemu
mft_item *current_folder = find_mft_item_by_uid(shell, uid);
// Overeni itemu
if(current_folder == NULL)
{
return -3;
}
// Otevreni souboru pro precteni
FILE *file = fopen(shell->filename, "rb");
// Overeni otevreni
if(file == NULL)
{
return -4;
}
// Count of items
int estimated_count = 0;
// Projiti mft fragmentu pro cteni dat
for(int i = 0; i < MFT_FRAGMENTS_COUNT; i++)
{
mft_fragment frag = current_folder->fragments[i];
// Zbytecny cist, kdyz je adresa zaporna
if(frag.fragment_count == -1 && frag.fragment_start_address == -1)
{
continue;
}
int32_t start_addr = frag.fragment_start_address;
int32_t end_addr = frag.fragment_start_address + MAX_CLUSTER_SIZE * frag.fragment_count;
int32_t current_addr = start_addr;
// Zjisteni poctu itemu
while(current_addr < end_addr)
{
// Priprava pro fread
int *read_uid = malloc(sizeof(int32_t));
// Seek set
fseek(file, current_addr, SEEK_SET);
// Precteni data
fread(read_uid, sizeof(int32_t), 1, file);
//printf("DEBUG IDčko %d\n", *read_uid);
// Jestlize mame uid vetsi jak 0 mame uid clenu current slozky
if(*read_uid > 0)
{
// Inkrementace o jeden
estimated_count = estimated_count + 1;
}
// pokud mame 0, nasli jsme volne misto
if(*read_uid == 0)
{
fclose(file);
return current_addr;
}
// Uklid po fread
free(read_uid);
// Posun po 4 bytes
current_addr = current_addr + sizeof(int32_t);
}
}
// Uzavreni souboru
fclose(file);
// -5 = Unknown
return -5;
}
/**
* Upraví ukazetele out proměnný tak, aby jejich obsah bylo pole vsech uid v dane slozce
* out_count pak predstavuje pocet polozek v danem poli
*
* @param shell kontext
* @param uid uid prohledavane slozky
* @param out_array ukazatel na ukazatel pole kam se budou ukladat polozky pole
* @param out_count
*/
void get_folder_members(shell *shell, int32_t uid, int **out_array, int *out_count)
{
// Overeni shellu
if(shell == NULL)
{
return;
}
// TODO: or is symlink to folder
if(!is_folder(shell, uid))
{
return;
}
// Zjisteni poctu UID
int array_size = get_folder_members_count(shell, uid);
// Overeni zda se neco nerozbilo
if(array_size < 1)
{
// Rozbilo se neco - podle navratove hodnoty - 0 = blbost, slozka nemuze mit 0 UID v sobe
return;
}
// Vytvoreni pole o zjistene velikosti
int *array = malloc(array_size * sizeof(int32_t));
// Ziskani mft itemu
mft_item *current_folder = find_mft_item_by_uid(shell, uid);
// Overeni itemu
if(current_folder == NULL)
{
return;
}
// Otevreni souboru pro precteni
FILE *file = fopen(shell->filename, "rb");
// Overeni otevreni
if(file == NULL)
{
return;
}
// Count of items
int estimated_count = 0;
// Projiti mft fragmentu pro cteni dat
for(int i = 0; i < MFT_FRAGMENTS_COUNT; i++)
{
mft_fragment frag = current_folder->fragments[i];
// Zbytecny cist, kdyz je adresa zaporna
if(frag.fragment_count == -1 && frag.fragment_start_address == -1)
{
continue;
}
int32_t start_addr = frag.fragment_start_address;
int32_t end_addr = frag.fragment_start_address + MAX_CLUSTER_SIZE * frag.fragment_count;
int32_t current_addr = start_addr;
// Zjisteni poctu itemu
while(current_addr < end_addr)
{
// Priprava pro fread
int *read_uid = malloc(sizeof(int32_t));
// Seek set
fseek(file, current_addr, SEEK_SET);
// Precteni data
fread(read_uid, sizeof(int32_t), 1, file);
// Ulozeni hodnoty do pole
array[estimated_count] = *read_uid;
// Jestlize mame uid vetsi jak 0 mame uid clenu current slozky
if(*read_uid > 0)
{
// Inkrementace o jeden
estimated_count = estimated_count + 1;
}
// Uklid po fread
free(read_uid);
// Posun po 4 bytes
current_addr = current_addr + sizeof(int32_t);
}
}
// Uprava out promennych
*out_array = array;
*out_count = estimated_count;
// Uzavreni souboru
fclose(file);
}
/**
* Vrati pocet alokovanych clusteru k jednomu mft itemu
*
* @param shell
* @param mft_item
* @return
*/
int get_allocated_cluster_count(mft_item *mft_item)
{
if(mft_item == NULL)
{
// -1 = Mft item neni nastaveno
return -1;
}
int clusters_so_far = 0;
// Projiti vsech fragmentu, scitani clusteru
for(int i = 0; i < MFT_FRAGMENTS_COUNT; i++)
{
mft_fragment frag = mft_item->fragments[i];
if(frag.fragment_count > 0) {
clusters_so_far += frag.fragment_count;
}
}
return clusters_so_far;
}
<file_sep>/ntfs.h
#include <stdint.h>
#include <stdbool.h>
#include <stdlib.h>
#include <stdio.h>
#include "structure.h"
#ifndef KIV_ZOS_NTFS_H
#define KIV_ZOS_NTFS_H
/**
* Vytvori fyzicky soubor reprezentujici pseudo-filesystem
*
* @param filename nazev vytvoreneho souboru
* @param cluster_count pocet clusteru
* @param cluster_size velikost jednoho clusteru v B
*/
void create_file(char filename[], int32_t cluster_count, int32_t cluster_size);
/**
* Na zaklade vstupniho parametru overi, zda soubor s danym jmenem existuje
*
* @param file_name nazev/cesta souboru k overeni
* @return existence souboru (0 - neexistuje, 1 existuje)
*/
bool file_exists(const char *file_name);
/**
* Vrati velikost souboru v bytech, pokud soubor neexustuje, vrati -1
*
* @param filename nazev souboru
* @return velikost souboru v bytech
*/
int file_size(const char *filename);
/**
* Na zaklade jmena souboru precte boot_record
*
* @param filename nazev souboru pro cteni
* @return boot record or null
*/
boot_record *read_boot_record(char filename[]);
/**
* Na zaklade jmena souboru a boot recordu precte bitmapu
*
* @param filename soubor, ze ktereho se bude cist
* @param record zaznam podle ktereho se bude bitmapa načítat
* @return ukazatel na pole (velikost pole je v boot_record)
*/
int *read_bitmap(char filename[], boot_record *record);
/**
* Na zaklade souboru a bootrecordu s nim spjatym precte ze souboru vsechny neprazdne
* struktury mft_item, vysledek navrati skrz ukazatele
*
* @param filename soubor FS
* @param record zaznam FS
* @param mft_array ukazatel, pole struktur mft_item
* @param mft_array_size ukazatel, pocet prvku mft_item
*/
void read_mft_items(char filename[], boot_record *record, mft_item **mft_array, int *mft_array_size);
/**
* Vytvori soubor zadane velikosti
*
* @param filename
* @param bytes
*/
void format_file(char filename[], int bytes);
#endif //KIV_ZOS_NTFS_H
<file_sep>/main.c
#include <stdio.h>
#include <stdint.h>
#include <stdlib.h>
#include <memory.h>
#include <string.h>
#include "structure.h"
#include "ntfs.h"
#include "ntfs_logic.h"
#include "shell.h"
#include "path_logic.h"
#include "shell_app.h"
/*
* TODO: Implementace commandu ve virtual shellu
*
* TODO: Ukladani slozek.
* na miste dat budou UID jednotlivych MFT itemu
* prvni hodnota bude vzdy UID aktualni slozky
* druha hodnota bude UID slozky nadrazene (pro root bude nadrazena slozka root)
*
* TODO: command ls
* Z MFT fragmentu se prectou ciselna data
* na zaklade ciselnych dat (UID) se prectou nazvy jednotlivych souboru a slozek
* nazvy jsou ulozene v MFT_ITEM pod polozkou item_name
* TODO: Path
* vzdy se cesta bude buildit odzadu rekurzivne
* najde se posledni slozka/soubor v ceste (tzn. pro cestu /slozka1/slozka2/file1.txt se bude hledat mft item s nazvem file1.txt)
* pokud je posledni cast cesty soubor odrizne se z cesty a precte se nazev slozky (tzn. z /slozka1/slozka2/file.txt se stane /slozka1/slozka2)
* nasledne se dle uid rodice slozky rekurzivne projde az k rootu / (root ma stejneho rodice jako svoje id)
* v ramci rekurzivniho prochazeni se bude ukladat nazvy slozek (+ nazev souboru) - do linked list (nebo stack) a na zaklade ulozeneho textu se slozi cesta
*
* je nutne overovat zdali cesta existuje
* pri incp neni nutne aby soubor ve FS existoval, musi existovat jen slozky
* TODO: mkdir
* v dane slozce vytvori podslozku
* cely fs jsou podslozky
* jedina slozka je root
*
* overi se cesta (zda existuje rodic)
* u overeni cesty nemusi existovat vytvarena slozka
*
* zapise se mft item, vytvori se mft fragment
* do fragmentu se zapise uid aktualniho souboru a uid rodice (dle cesty
*
* TODO:
* Prevod relativni cesty na absolutni
* tj.
* slozka/slozka -> $cwd$/cesta
* ./slozka/slozka -> slozka/slozka -> $cwd$/cesta
* ../../slozka/slozka -> parrent(cwd) -> parent(cwd) -> slozka/slozka -> $cwd$/slozka/slozka
*
*
*
*/
int test_folder(int argc, char *argv[])
{
// Deklarace
int *uids = NULL;
int uid_count = -1;
if(argc < 2)
{
printf("ERROR: Program was started without arguments!");
return -1;
}
printf("NTFS START!\n");
printf("\n");
// Vytvoreni FS
printf("CREATING FORMATTED FILE!\n");
format_file(argv[1], 128000);
// Precteci BR z FS
printf("READING BOOT RECORD!\n");
boot_record *record = read_boot_record(argv[1]);
print_boot_record(record);
printf("\n");
// Precteni bitmapy z FS
printf("READING BITMAP!\n");
int *bitmap = read_bitmap(argv[1], record);
print_bitmap(bitmap, record);
printf("\n");
// Vytvoreni SHELLU
printf("CREATING SHELL!\n");
shell *sh = create_root_shell(record, argv[1]);
// IDCKA
get_folder_members(sh, sh->cwd, &uids, &uid_count);
// VYPIS IDECEK
printf("VYPIS IDECEK ROOTU:\n");
for(int i = 0; i < uid_count; i++)
{
printf("UID: %d\n", uids[i]);
}
// CREATE MULTIPLE FOLDER
create_folder(sh, "folder1");
create_folder(sh, "folder2");
create_folder(sh, "folder3");
// IDCKA
uids = NULL;
uid_count = -1;
get_folder_members(sh, sh->cwd, &uids, &uid_count);
// VYPIS IDECEK
printf("VYPIS IDECEK ROOTU:\n");
for(int i = 0; i < uid_count; i++)
{
printf("(%d) -> %s\n", uids[i], find_mft_item_by_uid(sh,uids[i])->item_name);
}
printf("\n\n");
// VYTVORENI SLOZEK V JINYCH SLOZKACH
// prikaz: cd /folder1
sh->cwd = 2;
uids = NULL;
uid_count = -1;
get_folder_members(sh, sh->cwd, &uids, &uid_count);
// VYPIS IDECEK
printf("VYPIS IDECEK FOLDER1:\n");
for(int i = 0; i < uid_count; i++)
{
printf("(%d) -> %s\n", uids[i], find_mft_item_by_uid(sh,uids[i])->item_name);
}
create_folder(sh,"test1");
create_folder(sh,"test2");
create_folder(sh,"test3");
uids = NULL;
uid_count = -1;
get_folder_members(sh, sh->cwd, &uids, &uid_count);
// VYPIS IDECEK
printf("VYPIS IDECEK FOLDER1:\n");
for(int i = 0; i < uid_count; i++)
{
printf("(%d) -> %s\n", uids[i], find_mft_item_by_uid(sh,uids[i])->item_name);
}
bitmap = read_bitmap(sh->filename, sh->boot);
print_bitmap(bitmap, sh->boot);
printf("\n");
// TEST PATH
printf("/ -> %d\n",path_exist(sh, "/"));
printf("/folder1 -> %d\n",path_exist(sh, "/folder1"));
printf("/folder1/ -> %d\n",path_exist(sh, "/folder1/"));
printf("/folder1/test3 -> %d\n",path_exist(sh, "/folder1/test3"));
printf("/folder1/test3/ -> %d\n",path_exist(sh, "/folder1/test3/"));
printf("/folder1/test4 -> %d\n",path_exist(sh, "/folder1/test4"));
printf("\n");
sh->cwd = 2;
printf("test3 -> %d\n",path_exist(sh, "test3"));
printf("test4 -> %d\n",path_exist(sh, "test4"));
printf("test5 -> %d\n",path_exist(sh, "test5"));
printf("test6 -> %d\n",path_exist(sh, "test6"));
printf("test1 -> %d\n",path_exist(sh, "test1"));
printf("test2 -> %d\n",path_exist(sh, "test2"));
printf("\n");
sh->cwd = 7;
printf("../test2 -> %d\n",path_exist(sh, "../test2"));
printf("./test3 -> %d\n",path_exist(sh, "./test3"));
printf("../../folder3 -> %d\n",path_exist(sh, "../../folder3"));
printf("../../ -> %d\n",path_exist(sh, "../../"));
printf("\n\n");
return 0;
}
// Obecny test systemu - netrideny
int test(int argc, char *argv[]) {
if(argc < 2)
{
printf("ERROR: Program was started without arguments!");
return -1;
}
printf("NTFS START!\n");
printf("\n");
//create_file(argv[1], MAX_CLUSTER_COUNT, MAX_CLUSTER_SIZE);
// Vytvoreni FS
printf("CREATING FORMATTED FILE!\n");
format_file(argv[1], 128000);
// Precteci BR z FS
printf("READING BOOT RECORD!\n");
boot_record *record = read_boot_record(argv[1]);
print_boot_record(record);
printf("\n");
// Precteni bitmapy z FS
printf("READING BITMAP!\n");
int *bitmap = read_bitmap(argv[1], record);
print_bitmap(bitmap, record);
printf("\n");
// Precteni MFT z fS
printf("READING MFT!\n");
mft_item *mft_array = NULL;
int mft_array_size = -1;
read_mft_items(argv[1], record, &mft_array, &mft_array_size);
print_mft_items(mft_array, mft_array_size);
printf("\n\n\n\n\n");
// SIMULACE SHELLU
shell *sh = create_root_shell(record, argv[1]);
print_shell(sh);
printf("\n");
mft_item *root = find_mft_item_by_uid(sh, 1);
print_mft_item(*root);
printf("IS ROOT FOLDER: %d\n", is_folder(sh,1));
printf("IS ROOT FILE: %d\n", is_file(sh,1));
printf("IS ROOT SYMLINK: %d\n", is_symlink(sh,1));
printf("\n\n");
// Pocet polozek ve slozce
printf("ROOT NALEZENA UID: %d\n", get_folder_members_count(sh, 1));
// IDCKA
int *uids = NULL;
int uid_count = -1;
get_folder_members(sh, 1, &uids, &uid_count);
// VYPIS IDECEK
printf("VYPIS IDECEK ROOTU:\n");
for(int i = 0; i < uid_count; i++)
{
printf("UID: %d\n", uids[i]);
}
printf("\n\n");
// Prvni volny cluster
int32_t *first_empty_addr = malloc(sizeof(int32_t));
int32_t *first_empty_iter = malloc(sizeof(int32_t));
find_empty_cluster(sh, first_empty_addr, first_empty_iter);
printf("FIRST EMPTY CLUSTER ADDR: %d\n", *first_empty_addr);
printf("FIRST EMPTY CLUSTER BITMAP INDEX: %d\n", *first_empty_iter);
printf("\n\n");
// Nalezeni prvniho volneho UID
int32_t next_uid = get_next_uid(sh);
printf("NEXT FREE UID: %d\n", next_uid);
printf("\n\n");
// Vytvoreni slozky
printf("CREATING FOLDER NAMED TEST\n");
int operation_code = create_folder(sh, "test");
printf("CREATE OPERATION CODE: %d\n", operation_code);
printf("\n\n");
// Precteni souboru shell2
shell *shell2 = create_root_shell(record, argv[1]);
printf("READING BITMAP AFTER FOLDER CREATE: \n");
int *bitmap2 = read_bitmap(argv[1], record);
print_bitmap(bitmap2, record);
printf("\n");
printf("READING MFT AFTER FOLDER CREATE: \n");
mft_item *mft_array2 = NULL;
int mft_array_size2 = -1;
read_mft_items(argv[1], record, &mft_array2, &mft_array_size2);
print_mft_items(mft_array2, mft_array_size2);
printf("\n\n\n\n\n");
int *uids2 = NULL;
int uid_count2 = -1;
get_folder_members(sh, 2, &uids2, &uid_count2);
// VYPIS IDECEK
printf("VYPIS IDECEK TESTU:\n");
for(int i = 0; i < uid_count2; i++)
{
printf("UID: %d\n", uids2[i]);
}
printf("\n\n");
printf("\n\n");
// Test cesty
shell *shell3 = create_root_shell(record, argv[1]);
printf("VYPISUJI CESTU ROOTU: \n");
printf("%s\n", get_current_path(shell3));
printf("VYPISUJI CESTU TEST: \n");
shell3->cwd = 2;
printf("%s\n", get_current_path(shell3));
printf("\n\n");
printf("\n\n");
// Vytvarim slozku ahoj ve slozce TEST
create_folder(shell3, "ahoj");
printf("READING MFT AFTER FOLDER CREATE: \n");
mft_item *mft_array3 = NULL;
int mft_array_size3 = -1;
read_mft_items(argv[1], record, &mft_array3, &mft_array_size3);
print_mft_items(mft_array3, mft_array_size3);
printf("VYPISUJI CESTU AHOJ: \n");
shell3->cwd = 3;
printf("%s\n", get_current_path(shell3));
printf("\n\n");
// EXISTENCE CESTY
printf("EXISTUJE CESTA K ROOTU: %d\n", path_exist(shell3, "/"));
printf("\n");
printf("EXISTUJE CESTA K TEST: %d\n", path_exist(shell3, "/test/"));
printf("\n");
printf("EXISTUJE CESTA K TEST2: %d\n", path_exist(shell3, "/test"));
printf("\n");
printf("EXISTUJE CESTA K AHOJ: %d\n", path_exist(shell3, "/test/ahoj"));
printf("\n");
printf("EXISTUJE CESTA K SVETE: %d\n", path_exist(shell3, "/svete/"));
printf("\n");
printf("EXISTUJE CESTA K CURRENT WORKING DIRECTORY (%s): %d\n", get_current_path(shell3), path_exist(shell3, get_current_path(shell3)));
printf("\n");
printf("\n\n");
printf("NTFS END!\n");
return 0;
}
int main(int argc, char *argv[]) {
if(argc < 2)
{
printf("ERROR: Program was started without arguments!");
return -1;
}
//test_folder(argc, argv);
return shell_app_main(argv[1]);
}<file_sep>/shell_app.h
#include <string.h>
#include <stdlib.h>
#include <stdio.h>
#include "shell.h"
#include "ntfs.h"
#include "path_logic.h"
#include "usefull_functions.h"
#ifndef KIV_ZOS_SHELL_APP_H
#define KIV_ZOS_SHELL_APP_H
/**
* Hlavni procedura virtualizace shellu
*
* @param filename
* @return
*/
int shell_app_main(char *filename);
// Prikaz: ls
// TODO: symlinky
int command_ls(shell *shell);
// Parser prikazu
int commands(shell *shell, char *command);
// Prikaz: cd <cesta>
// TODO: relativni cesty
int command_cd(shell *shell, char *command);
// Prikaz: pwd
int command_pwd(shell *shell);
// Prikaz: mkdir
int command_mkdir(shell *shell, char *command);
#endif //KIV_ZOS_SHELL_APP_H
<file_sep>/usefull_functions.h
#include <stdbool.h>
#include <string.h>
#ifndef KIV_ZOS_USEFULL_FUNCTIONS_H
#define KIV_ZOS_USEFULL_FUNCTIONS_H
bool starts_with(const char *pre, const char *str);
bool str_contains(const char c, const char *str);
#endif //KIV_ZOS_USEFULL_FUNCTIONS_H
<file_sep>/shell.c
#include "shell.h"
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include "ntfs.h"
/**
* Vytvori novy shell v libovolne slozce (na zaklade UID)
*
* @param record boot record kontext NTFS
* @param ntfs_filename fyzicky soubor ntfs
* @param directory uid slozky (1 pro root)
* @return
*/
shell *create_shell(boot_record *record, char *ntfs_filename, int32_t directory)
{
shell *sh = malloc(sizeof(shell));
// TODO: is UID directory
sh->boot = record;
sh->cwd = directory;
sh->filename = malloc(strlen(ntfs_filename)+1);
strcpy(sh->filename, ntfs_filename);
read_mft_items(ntfs_filename, record, &sh->mft_array ,&sh->mft_array_size);
return sh;
}
/**
* Provede parodii na vypsani struktury shell
*
* @param shell ukazatel na shell
*/
void print_shell(shell *shell)
{
if(shell == NULL)
{
printf("Shell je NULL!\n");
return;
}
printf("SHELL BOOT RECORD NULL: %d\n", (shell->boot == NULL));
printf("SHELL CURRENT WORKING DIR: %d\n", shell->cwd);
printf("SHELL FILENAME: %s\n", shell->filename);
printf("SHELL MFT ITEM ARRAY NULL: %d\n", (shell->mft_array == NULL));
printf("SHELL MFT ITEM COUNT : %d\n", shell->mft_array_size);
}
/**
* Vytvori novy shell v rootu ntfs
*
* @param record
* @param ntfs_filename
* @return
*/
shell *create_root_shell(boot_record *record, char *ntfs_filename)
{
// 1 = UID rootu
return create_shell(record, ntfs_filename, 1);
}
<file_sep>/structure.c
#include <string.h>
#include <memory.h>
#include <stdbool.h>
#include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include "structure.h"
/**
* Na zaklade vstupnich hodnot prepocita hodnoty v boot zaznamu a zmeni je
*
* @param record zaznam ke zmeneni
* @param cluster_count novy pocet clusteru
* @param cluster_size velikost jednoho clusteru
*/
void boot_record_resize(boot_record *record, int32_t cluster_count, int32_t cluster_size)
{
/* VYPOCET VELIKOSTI JEDNOTLIVYCH CASTI NTFS */
// Velikost datove casti: pocet clusteru * velikost clusteru v B
int32_t data_size = cluster_count * cluster_size;
// Velikost bitmapy: pocet clusteru * velikost bool
int32_t bitmap_size = cluster_count * sizeof(int);
// MFT = 10% datoveho bloku
int32_t mft_size = (int32_t)(ceil(0.1 * data_size));
// boot_record size
int32_t boot_record_size = sizeof(boot_record);
int32_t ntfs_size = data_size + bitmap_size + mft_size + boot_record_size;
/* ------------------------------------------- */
/* VYPOCET POCATECNICH ADRES JEDNOTLIVYCH CASTI NTFS */
int32_t mft_start = boot_record_size;
int32_t bitmap_start = boot_record_size + mft_size;
int32_t data_start = boot_record_size + mft_size + bitmap_size;
/* -------------------------------------------- */
/* UPRAVA BOOT RECORDU */
record->cluster_count = cluster_count;
record->cluster_size = cluster_size;
record->data_start_address = data_start;
record->bitmap_start_address = bitmap_start;
record->mft_start_address = mft_start;
// NOTE: Disk size je hodnota bez alignment bytes je nutno ji pripocitat
record->disk_size = ntfs_size;
}
/**
* Na zaklade vstupnich parametru vytvori boot_record pro filesystem
*
* @param signature ID FS
* @param volume_descriptor popis FS
* @param disk_size celkova velikost FS
* @param cluster_size velikost clusteru
* @param cluster_count pocet clusteru
* @param mft_start_adress adresa pocatku MFT
* @param bitmap_start_adress adresa pocatku bitmapy
* @param data_start_adress adresa pocatku dat
* @param mft_max_fragment_count maximalni pocet fragmentu v jednom MFT zaznamu
* @return pointer na vytvoreny boot_record
*/
boot_record *create_boot_record(char signature[9], char volume_descriptor[251], int32_t disk_size,
int32_t cluster_size, int32_t cluster_count, int32_t mft_start_adress, int32_t bitmap_start_adress,
int32_t data_start_adress, int32_t mft_max_fragment_count)
{
// TODO: Ověření vstupů
// Variable for boot_record pointer declared
boot_record *pointer = NULL;
// Memory for struct allocated
pointer = malloc(sizeof(boot_record));
// Filling assigned memory
strcpy(pointer->signature, signature);
pointer->signature[8] = '\0';
strcpy(pointer->volume_descriptor, volume_descriptor);
pointer->volume_descriptor[250] = '\0';
pointer->disk_size = disk_size;
pointer->cluster_size = cluster_size;
pointer->cluster_count = cluster_count;
pointer->mft_start_address = mft_start_adress;
pointer->bitmap_start_address = bitmap_start_adress;
pointer->data_start_address = data_start_adress;
pointer->mft_max_fragment_count = mft_max_fragment_count;
}
/**
* Vytvori standardni boot_record pro KIV/ZOS
*
* @return pointer na vytvoreny boot_record
*/
boot_record *create_standard_boot_record()
{
/* VYPOCET VELIKOSTI JEDNOTLIVYCH CASTI NTFS */
// Velikost datove casti: pocet clusteru * velikost clusteru v B
int32_t data_size = MAX_CLUSTER_COUNT * MAX_CLUSTER_SIZE;
// Velikost bitmapy: pocet clusteru * velikost bool
int32_t bitmap_size = MAX_CLUSTER_COUNT * sizeof(int);
// MFT = 10% datoveho bloku
int32_t mft_size = (int32_t)(ceil(0.1 * data_size));
// boot_record size
int32_t boot_record_size = sizeof(boot_record);
int32_t ntfs_size = data_size + bitmap_size + mft_size + boot_record_size;
/* ------------------------------------------- */
/* VYPOCET POCATECNICH ADRES JEDNOTLIVYCH CASTI NTFS */
int32_t mft_start = boot_record_size;
int32_t bitmap_start = boot_record_size + mft_size;
int32_t data_start = boot_record_size + mft_size + bitmap_size;
return create_boot_record("viteja", "KIV/ZOS - PSEUDO NTFS - JAKUB VITEK - ZS 2018", ntfs_size,
MAX_CLUSTER_SIZE, MAX_CLUSTER_COUNT, mft_start, bitmap_start, data_start, MFT_FRAGMENTS_COUNT);
}
/**
* Na zaklade vstupnich pararametru vytvori strukturu mft_item a vrati na ni ukazatel
*
* @param uid unikatni ID zaznamu
* @param isDirectory zda se jedna o slozku ci ne (pripadne slink)
* @param item_order poradi zaznamu pri vice zaznamech pro jeden soubor
* @param item_order_total celkovy pocet zaznamu pro jeden soubor
* @param item_name nazev souboru
* @param item_size velikost souboru
* @return vytvorena struktura mft_item
*/
mft_item *create_mft_item(int32_t uid, bool isDirectory, int8_t item_order, int8_t item_order_total,
char item_name[12], int32_t item_size)
{
// Deklarace ukazatele
mft_item *pointer = NULL;
// Alokace pameti pro mft_item
pointer = malloc(sizeof(struct mft_item));
// Vyplneni struktury daty
pointer->uid = uid;
pointer->isDirectory = isDirectory;
pointer->item_order = item_order;
pointer->item_order_total = item_order_total;
pointer->item_size = item_size;
strcpy(pointer->item_name, item_name);
pointer->item_name[12] = '\0';
// Navrat ukazatele
return pointer;
}
/**
* Vytvori strukturu mft_fragment a vrati na ni ukazatel
*
* @param fragment_start_adress pocatecni adresa fragmentu (casti souboru)
* @param fragment_count souvisly pocet clusteru, ve kterych je fragment
* @return ukazatel na strukturu mft_fragment
*/
mft_fragment *create_mft_fragment(int32_t fragment_start_adress, int32_t fragment_count)
{
// Deklarace ukazatele
mft_fragment *pointer = NULL;
// Alokace pameti
pointer = malloc(sizeof(mft_fragment));
// Vyplneni struktury daty
pointer->fragment_count = fragment_count;
pointer->fragment_start_address = fragment_start_adress;
// Navrat ukazatele
return pointer;
}
/**
* Vypise formatovany stav struktury boot_record
*
* @param record ukazatel na strukturu boot_record
*/
void print_boot_record(boot_record *pointer)
{
printf("Vypisuji boot record:\n");
if(!pointer)
{
printf("BOOT RECORD je prazdny\n");
return;
}
printf("Signature: %s\n", pointer->signature);
printf("Volume descriptor: %s\n", pointer->volume_descriptor);
printf("Disk size: %dB\n", pointer->disk_size);
printf("Cluster size: %dB\n", pointer->cluster_size);
printf("Cluster count: %d\n", pointer->cluster_count);
printf("MFT start adress: %d\n", pointer->mft_start_address);
printf("Bitmap start adress: %d\n", pointer->bitmap_start_address);
printf("Data start adress: %d\n", pointer->data_start_address);
printf("MFT fragment count: %d\n", pointer->mft_max_fragment_count);
}
/**
* Na zaklade ukazatele vztazeneho k boot recordu vypise bitmapu
*
* @param bitmap ukazatel na bitmapu v pameti
* @param record ukazatel na pouzity boot
*/
void print_bitmap(int *bitmap, boot_record *record) {
if (record == NULL || bitmap == NULL) {
// Nelze vypsat bitmapu
printf("ERROR: Nelze vypsat bitmapu!\n");
return;
}
for (int i = 0; i < record->cluster_count; i++) {
printf("%d ", bitmap[i]);
}
printf("\n");
}
/**
* Vypise pole struktur mft_item
*
* @param array ukazatel na pole
* @param size velikost pole
*/
void print_mft_items(mft_item *array, int size)
{
int zero_uid = 0;
for(int i = 0; i < size; i++)
{
mft_item itm = array[i];
// Vypocet poctu prazdnych mft itemu
if(itm.uid == 0)
{
zero_uid = zero_uid + 1;
continue;
}
print_mft_item(array[i]);
printf("***\n");
}
// vypis poctu prazdnych mft itemu
printf("EMPTY MFT ITEMS COUNT: %d\n", zero_uid);
printf("***\n");
}
/**
* Vypise jeden mft_item
*
* @param item ukazatel na mft item
*/
void print_mft_item(mft_item item)
{
int empty_fragments = 0;
printf("UID: %d\n", item.uid);
printf("DIRECTORY: %d\n", item.isDirectory);
printf("ITEM ORDER: %d\n", item.item_order);
printf("ITEM ORDER TOTAL: %d\n", item.item_order_total);
printf("ITEM NAME: %s\n", item.item_name);
printf("ITEM SIZE %d\n", item.item_size);
printf("**\n");
for(int i = 0; i < MFT_FRAGMENTS_COUNT; i++)
{
mft_fragment frag = item.fragments[i];
// Vypocet prazdnych fragmentu pro redukci textu
if(frag.fragment_count == -1 && frag.fragment_start_address == -1 )
{
empty_fragments = empty_fragments + 1;
continue;
}
print_mft_fragment(item.fragments[i]);
printf("*\n");
}
// Vypis poctu prazdnych fragmentu
printf("EMPTY FRAGMENTS COUNT: %d\n", empty_fragments);
printf("*\n");
}
/**
* Vypise jeden mft fragment
*
* @param fragment
*/
void print_mft_fragment(mft_fragment fragment)
{
printf("FRAGMENT START ADRESS: %d\n", fragment.fragment_start_address);
printf("FRAGMENT COUNT: %d\n", fragment.fragment_count);
}<file_sep>/shell_app.c
#include "shell_app.h"
int shell_app_main(char *filename)
{
// Overeni vstupu
if(filename == NULL || strlen(filename) < 1)
{
printf("Invalid input filename! Exiting");
return -1;
}
// Existence souboru, neexistuje-li, bude zalozen
if(!file_exists(filename))
{
format_file(filename, 128000);
}
boot_record *boot = read_boot_record(filename);
shell *shell = create_shell(boot, filename, NTFS_ROOT_UID);
// Hlavni loop aplikace - nacitani prikazu
while(true)
{
char *line = NULL; /* forces getline to allocate with malloc */
size_t len = 0;
ssize_t read;
// Vypis aktualni slozky
printf("%s$ ", get_current_path(shell));
read = getline (&line, &len, stdin);
line[read-1] = '\0';
// Testovani ukonceni shellu
if(strcmp("exit", line) == 0) {
printf("Shell exit\n");
break;
}
// Podprogram pro prikazy
commands(shell, line);
}
return 0;
}
// Parser prikazu
int commands(shell *shell, char *command)
{
// command: ls
if(strcmp("ls", command) == 0)
{
return command_ls(shell);
}
if(strcmp("pwd", command) == 0)
{
return command_pwd(shell);
}
if(starts_with("cd", command))
{
return command_cd(shell, command);
}
if(starts_with("mkdir", command))
{
return command_mkdir(shell, command);
}
return 1;
}
// Prikaz: ls
int command_ls(shell *shell)
{
int *uids = NULL;
int uid_count = -1;
get_folder_members(shell, shell->cwd, &uids, &uid_count);
for(int i = 0; i < uid_count; i++)
{
// TODO: symlink vypis
mft_item *item = find_mft_item_by_uid(shell, uids[i]);
char *mark = item->isDirectory == 0 ? "-" : "+";
char *name = i < 2 ? (i == 0 ? "." : "..") : item->item_name;
printf("%s%s\n", mark, name);
}
return 0;
}
int command_cd(shell *shell, char *command)
{
// presun na druhou cast retezce po mezere
char delim[] = " ";
char *ptr = strtok(command, delim);
ptr = strtok(NULL, delim);
int target_dir = path_target_uid(shell, ptr);
// Nastavit
if(target_dir > 0)
{
printf("OK\n");
shell->cwd = target_dir;
}
else
{
printf("PATH NOT FOUND (neexistující cesta)\n");
}
printf("path: %s -> %d\n", ptr, target_dir);
return 0;
}
int command_pwd(shell *shell)
{
printf("%s\n", get_current_path(shell));
return 0;
}
int command_mkdir(shell *shell, char *command)
{
// presun na druhou cast retezce po mezere
char delim[] = " ";
char *ptr = strtok(command, delim);
ptr = strtok(NULL, delim);
char *full_path = malloc(sizeof(char) * strlen(ptr));
strcpy(full_path, ptr);
// Posledni token
char * token, * last;
last = token = strtok(ptr, "/");
for (;(token = strtok(NULL, "/")) != NULL; last = token);
// Cesta bez koncove slozky
char *path = malloc(sizeof(char) * strlen(full_path));
strcpy(path, full_path);
path[strlen(full_path) - strlen(last)] = '\0';
//printf("FULL: %s\n", full_path);
//printf("LAST: %s\n", last);
//printf("CESTA: %s\n", path);
// Ulozeni aktualni slozky
int current_uid = shell->cwd;
if(path_exist(shell,path))
{
// Zmena na cilovou slozku
shell->cwd = path_target_uid(shell, path);
// Zjisteni zda slozka s danym jmenem existuje
int *uids = NULL;
int uid_count = -1;
get_folder_members(shell, shell->cwd, &uids, &uid_count);
int found = 0;
for(int i = 0; i < uid_count; i++)
{
mft_item *item = find_mft_item_by_uid(shell, uids[i]);
if(strcmp(item->item_name, last) == 0)
{
found = 1;
break;
}
}
// Pokud nebylo jmeno nalezeno, vytvor ho
if(found == 1)
{
printf("EXIST (nelze založit, již existuje)\n");
}
else
{
printf("OK\n");
create_folder(shell, last);
}
}
else
{
printf("PATH NOT FOUND (neexistuje zadaná cesta)\n");
}
// Navrat na aktualni slozku
shell->cwd = current_uid;
//create_folder(shell, ptr);
return 0;
}<file_sep>/structure.h
#ifndef KIV_ZOS_STRUCTURE_H
#define KIV_ZOS_STRUCTURE_H
#include <stdbool.h>
#include <stdint.h>
// ID rootu
#define NTFS_ROOT_UID 1
// Znak, ktery bude vyuzit jako vypln prazdneho souboru
#define NTFS_NEUTRAL_CHAR '\0'
// ID MFT_ITEM v priopade, ze je item prazdny
#define UID_ITEM_FREE 0
// Pomer velikosti MFT ku velikosti datove slozky FS
#define MFT_RATIO 0.1
// Maximalni pocet fragmentu v jednom zaznamu MFT
#define MFT_FRAGMENTS_COUNT 32
// Celkovy pocet datovych clusteru ve filesystemu
#define MAX_CLUSTER_COUNT 4096
// Velikost jednoho clusteru v B
#define MAX_CLUSTER_SIZE 4096
typedef struct boot_record {
char signature[9]; //login autora FS
char volume_descriptor[251]; //popis vygenerovaného FS
int32_t disk_size; //celkova velikost VFS
int32_t cluster_size; //velikost clusteru
int32_t cluster_count; //pocet clusteru
int32_t mft_start_address; //adresa pocatku mft
int32_t bitmap_start_address; //adresa pocatku bitmapy
int32_t data_start_address; //adresa pocatku datovych bloku
int32_t mft_max_fragment_count; //maximalni pocet fragmentu v jednom zaznamu v mft (pozor, ne souboru)
// stejne jako MFT_FRAGMENTS_COUNT
} boot_record;
typedef struct mft_fragment {
int32_t fragment_start_address; //start adresa
int32_t fragment_count; //pocet clusteru ve fragmentu
} mft_fragment;
typedef struct mft_item {
int32_t uid; //UID polozky, pokud UID = UID_ITEM_FREE, je polozka volna
bool isDirectory; //soubor, nebo adresar (0 = soubor, 1 = složka, 2 = symbolic link)
int8_t item_order; //poradi v MFT pri vice souborech, jinak 1
int8_t item_order_total; //celkovy pocet polozek v MFT
char item_name[12]; //8+3 + /0 C/C++ ukoncovaci string znak
int32_t item_size; //velikost souboru v bytech
mft_fragment fragments[MFT_FRAGMENTS_COUNT]; //fragmenty souboru
} mft_item;
/**
* Na zaklade vstupnich parametru vytvori boot_record pro filesystem
*
* @param signature ID FS
* @param volume_descriptor popis FS
* @param disk_size celkova velikost FS
* @param cluster_size velikost clusteru
* @param cluster_count pocet clusteru
* @param mft_start_adress adresa pocatku MFT
* @param bitmap_start_adress adresa pocatku bitmapy
* @param data_start_adress adresa pocatku dat
* @param mft_max_fragment_count maximalni pocet fragmentu v jednom MFT zaznamu
* @return pointer na vytvoreny boot_record
*/
boot_record *create_boot_record(char signature[9], char volume_descriptor[251], int32_t disk_size,
int32_t cluster_size, int32_t cluster_count, int32_t mft_start_adress, int32_t bitmap_start_adress,
int32_t data_start_adress, int32_t mft_max_fragment_count);
/**
* Vytvori standardni boot_record pro KIV/ZOS
*
* @return pointer na vytvoreny boot_record
*/
boot_record *create_standard_boot_record();
/**
* Vypise formatovany stav struktury boot_record
*
* @param record ukazatel na strukturu boot_record
*/
void print_boot_record(boot_record *pointer);
/**
* Na zaklade vstupnich pararametru vytvori strukturu mft_item a vrati na ni ukazatel
*
* @param uid unikatni ID zaznamu
* @param isDirectory zda se jedna o slozku ci ne (pripadne slink)
* @param item_order poradi zaznamu pri vice zaznamech pro jeden soubor
* @param item_order_total celkovy pocet zaznamu pro jeden soubor
* @param item_name nazev souboru
* @param item_size velikost souboru
* @return vytvorena struktura mft_item
*/
mft_item *create_mft_item(int32_t uid, bool isDirectory, int8_t item_order, int8_t item_order_total,
char item_name[12], int32_t item_size);
/**
* Vytvori strukturu mft_fragment a vrati na ni ukazatel
*
* @param fragment_start_adress pocatecni adresa fragmentu (casti souboru)
* @param fragment_count souvisly pocet clusteru, ve kterych je fragment
* @return ukazatel na strukturu mft_fragment
*/
mft_fragment *create_mft_fragment(int32_t fragment_start_adress, int32_t fragment_count);
/**
* Na zaklade vstupnich hodnot prepocita hodnoty v boot zaznamu a zmeni je
*
* @param record zaznam ke zmeneni
* @param cluster_count novy pocet clusteru
* @param cluster_size velikost jednoho clusteru
*/
void boot_record_resize(boot_record *record, int32_t cluster_count, int32_t cluster_size);
/**
* Na zaklade ukazatele vztazeneho k boot recordu vypise bitmapu
*
* @param bitmap ukazatel na bitmapu v pameti
* @param record ukazatel na pouzity boot
*/
void print_bitmap(int *bitmap, boot_record *record);
/**
* Vypise pole struktur mft_item
*
* @param array ukazatel na pole
* @param size velikost pole
*/
void print_mft_items(mft_item *array, int size);
/**
* Vypise jeden mft_item
*
* @param item ukazatel na mft item
*/
void print_mft_item(mft_item item);
/**
* Vypise jeden mft fragment
*
* @param fragment
*/
void print_mft_fragment(mft_fragment fragment);
#endif //KIV_ZOS_STRUCTURE_H
<file_sep>/ntfs.c
#include <math.h>
#include "ntfs.h"
#include "structure.h"
#include "math.h"
// POZN: MFT je 10% data size
/**
* Vytvori soubor zadane velikosti
*
* @param filename
* @param bytes
*/
void format_file(char filename[], int bytes)
{
/* Pokud soubor existuje, smažeme jej */
if(file_exists(filename))
{
// Remove file if exist
remove(filename);
}
// cheat protoze matika je zlo
FILE *file = fopen(filename, "wb");
// Paranoia je mocna - radsi zkontrolovat otevreni souboru
if(file == NULL)
{
printf("ERROR: Soubor %s nelze otevrit/vytvorit!\n", filename);
return;
}
// Vytvoreni souboru urcite velikosti - ALOKACE - THIS IS A CHEAT
int used = 0;
while(used < bytes)
{
fputc(NTFS_NEUTRAL_CHAR, file);
used = used + 1;
}
fclose(file);
// Na zacatku je velikost dat stejna jako pozadavana velikost systemu
int size_left = bytes;
//printf("DEBUG: Data size (=original size): %d\n", size_left);
/* Musime odecist velikosti odstatnich casti */
// odecteni velikosti bootrecordu
size_left = size_left - sizeof(boot_record);
//printf("DEBUG: Data size (-boot record tj. %d): %d\n", sizeof(boot_record),size_left);
// odecteni velikosti MFT
int mft_size = (int)(floor(bytes * MFT_RATIO));
size_left = size_left - mft_size;
//printf("DEBUG: Data size (-mft size tj. %d): %d\n", (int)(ceil(size_left * MFT_RATIO)), size_left);
// Vypocet poctu clusteru a odpocet volneho mista
int cluster_count = (int)floor((size_left / MAX_CLUSTER_SIZE));
int cluster_data_size = (cluster_count * MAX_CLUSTER_SIZE);
size_left = size_left - cluster_data_size;
//printf("DEBUG: Data size (-clusters tj. %d ): %d\n",(cluster_count * MAX_CLUSTER_SIZE), size_left);
/* Vypocet velikosti pro bitmapu */
// TODO: mozna upravit bitmapu na bool (char) misto int
// Pocet bytu, ktery je potreba pro zapsani bitmapy
int bitmap_bytes_needed = cluster_count * sizeof(int);
// Pokud je pocet potrebnych byte vetsi nez pocet zbylych bytes
//printf("DEBUG: Number of clusters = %d\n", cluster_count);
while(bitmap_bytes_needed > size_left)
{
// Musime jeden cluster odebrat
cluster_count = cluster_count - 1;
size_left = size_left + MAX_CLUSTER_SIZE;
//printf("DEBUG: Data size (+ 1 cluster size tj. %d): %d\n", MAX_CLUSTER_SIZE, size_left);
}
// Odpocet bytu pouzitych na bitmapu
size_left = size_left - bitmap_bytes_needed;
//printf("DEBUG: Data size (-bitmap bytes tj. %d): %d\n", bitmap_bytes_needed, size_left);
// Pocet bytu potrebnych na zarovnani na urcitou velikost FS
int alignment_bytes = size_left;
// Odpocet alignment bytes - v tomto bode by size_left melo byt 0
size_left = size_left - alignment_bytes;
//printf("DEBUG: Data size (-alignment bytes tj. %d): %d\n", alignment_bytes, size_left);
//printf("DEBUG: FINAL number of clusters = %d\n", cluster_count);
if(size_left != 0)
{
printf("ERROR: Chyba ve vypoctu velikosti souboroveho systemu!\n");
return;
}
// Vytvoreni struktury souboru
create_file(filename, cluster_count, MAX_CLUSTER_SIZE);
}
/*
* Vytvori soubor s danym pseudo NTFS v pripade, ze neexistuje
*
* filename - nazev souboru
* cluster_count - maximalni pocet clusteru pro data
* cluster_size - maximalni velikost jednoho clusteru v Byte
*/
void create_file(char filename[], int32_t cluster_count, int32_t cluster_size)
{
// Deklarace promennych
FILE *file;
// TODO: mozna upravit bitmapu na bool (char) misto int
int *bitmap = malloc(cluster_count*sizeof(int));
file = fopen(filename, "r+b");
// Paranoia je mocna - radsi zkontrolovat otevreni souboru
if(file == NULL)
{
printf("ERROR: Soubor %s nelze otevrit/vytvorit!\n", filename);
return;
}
// Nastaveni ukazatele na zacatek souboru
fseek(file, 0, SEEK_SET);
/* VYTVORENI A ZAPIS BOOT RECORDU */
// Vytvoreni standardniho boot recordu
boot_record *record = create_standard_boot_record();
// Uprava standardniho boot recordu (resize)
boot_record_resize(record, cluster_count, cluster_size);
// Zapis boot_record do souboru
fwrite(record, sizeof(boot_record), 1, file);
/* ----------------------------------------------------------- */
/* ZAPIS BITMAPY */
// Posun na zacatek oblasti pro bitmapu
fseek(file, record->bitmap_start_address, SEEK_SET);
// Prvni cluster je využit rootem
bitmap[0] = true;
// Vytvoreni prazdne bitmapy
for(int i = 1; i < cluster_count; i++)
{
bitmap[i] = false;
}
// Zapis bitmapy
fwrite(bitmap, sizeof(int), (size_t)cluster_count, file);
/* --------------------------------------------------------- */
/* VYTVORENI MFT PRO ROOT */
// Posun na misto v souboru, kde zacina MFT
fseek(file, record->mft_start_address, SEEK_SET);
// Vytvoreni mft itemu
mft_item *mfti = create_mft_item(1, 1, 1, 1, "/\0", 1);
// Vytvoreni prvniho fragmentu
mfti->fragments[0].fragment_start_address = record->data_start_address;
mfti->fragments[0].fragment_count = 1;
for(int i = 1; i < MFT_FRAGMENTS_COUNT; i++)
{
mfti->fragments[i].fragment_start_address = -1;
mfti->fragments[i].fragment_count = -1;
}
// Zapis mft_item do soubooru
fwrite(mfti, sizeof(mft_item), 1, file);
/* ---------------------------------------------------------------------------------------------------- */
/* ZAPIS ROOTU */
fseek(file, record->data_start_address, SEEK_SET);
int32_t odkaz[2];
// Prvni dve polozky jsou (1) aktualni slozka (2) nadrazena slozka
odkaz[0] = 1;
odkaz[1] = 1;
fwrite(odkaz, sizeof(int32_t), 2, file);
// Uvolneni pameti
fflush(file);
free(record);
free(mfti);
fclose(file);
free(bitmap);
}
/**
* Na zaklade jmena souboru precte boot_record
*
* @param filename nazev souboru pro cteni
* @return boot record or null
*/
boot_record *read_boot_record(char filename[])
{
// Overeni existence souboru
if(!file_exists(filename))
{
printf("ERROR: Nelze precist boot record ze souboru: Soubor neexistuje!\n");
return NULL;
}
// Otevreni souboru pro cteni
FILE *file = fopen(filename, "r+b");
fseek(file, 0, SEEK_SET);
// Deklarace
boot_record *record = malloc(sizeof(boot_record));
// Nacteni
fread(record, sizeof(boot_record), 1, file);
// Navrat vysledku
return record;
}
/**
* Na zaklade jmena souboru a boot recordu precte bitmapu
*
* @param filename soubor, ze ktereho se bude cist
* @param record zaznam podle ktereho se bude bitmapa načítat
* @return ukazatel na pole (velikost pole je v boot_record)
*/
int *read_bitmap(char filename[], boot_record *record)
{
// Overeni existence souboru
if(!file_exists(filename))
{
printf("ERROR: Nelze precist bitmapu ze souboru: Soubor neexistuje!\n");
return NULL;
}
// Overeni nenulovitosti struktury boot record
if(record == NULL)
{
printf("ERROR: Boot record nemuze byt NULL!\n");
return NULL;
}
// Otevreni souboru
FILE *file = fopen(filename, "rb");
// Cluster count = pocet polozek bitmapy
int32_t bitmap_size = record->cluster_count;
int32_t bitmap_addr = record->bitmap_start_address;
// Priprava pameti pro bitmapu
int *bitmap = malloc(sizeof(int) * bitmap_size);
// Nastaveni adresy pro cteni
fseek(file, bitmap_addr, SEEK_SET);
// Nacteni dat do paměti
fread(bitmap, sizeof(int), bitmap_size, file);
// TODO: Kontrola? Ale jak :(
// UKLID
fclose(file);
// Navrat ukazatele na bitmapu
return bitmap;
}
/**
* Na zaklade souboru a bootrecordu s nim spjatym precte ze souboru vsechny neprazdne
* struktury mft_item, vysledek navrati skrz ukazatele
*
* @param filename soubor FS
* @param record zaznam FS
* @param mft_array ukazatel, pole struktur mft_item
* @param mft_array_size ukazatel, pocet prvku mft_item
*/
void read_mft_items(char filename[], boot_record *record, mft_item **mft_array, int *mft_array_size)
{
// Overeni existence souboru
if(!file_exists(filename))
{
printf("ERROR: Nelze precist bitmapu ze souboru: Soubor neexistuje!\n");
return;
}
// Overeni nenulovitosti struktury boot record
if(record == NULL)
{
printf("ERROR: Boot record nemuze byt NULL!\n");
return;
}
// Otevreni souboru
FILE *file = fopen(filename, "rb");
// Precteni dulezitych rad z recordu
int32_t mft_addr_start = record->mft_start_address;
int32_t mft_addr_end = record->bitmap_start_address - 1;
int32_t mft_fragments = record->mft_max_fragment_count;
// Vypocet maximalniho poctu itemu
size_t mft_item_size = sizeof(mft_item);
int32_t mft_item_max_count = (int32_t) floor((mft_addr_end - mft_addr_start) / mft_item_size);
// deklarace pomocnych
int array_size = mft_item_max_count;
mft_item *array = malloc(mft_item_max_count * mft_item_size);
// Nastaveni ukazatele souboru
fseek(file, mft_addr_start, SEEK_SET);
// Precteni vsech moznych struktur
fread(array, mft_item_size, mft_item_max_count, file);
*mft_array = array;
*mft_array_size = array_size;
// TODO: Absolutně pochybuju, že to bude fungovat
// UKLID
fclose(file);
}
/**
* Na zaklade vstupniho parametru overi, zda soubor s danym jmenem existuje
*
* @param file_name nazev/cesta souboru k overeni
* @return existence souboru (0 - neexistuje, 1 existuje)
*/
bool file_exists(const char *fname)
{
FILE *file;
if ((file = fopen(fname, "r")))
{
fclose(file);
return 1;
}
return 0;
}
/**
* Vrati velikost souboru v bytech, pokud soubor neexustuje, vrati -1
*
* @param filename nazev souboru
* @return velikost souboru v bytech
*/
int file_size(const char *filename)
{
if(!file_exists(filename))
{
return -1;
}
FILE *fp = fopen(filename, "r");
if(fp == NULL)
{
return -1;
}
fseek(fp, 0L, SEEK_END);
int size = ftell(fp);
fclose(fp);
return size;
}
<file_sep>/ntfs_commands.h
#include "structure.h"
#include "ntfs.h"
#ifndef KIV_ZOS_NTFS_COMMANDS_H
#define KIV_ZOS_NTFS_COMMANDS_H
#endif //KIV_ZOS_NTFS_COMMANDS_H
|
e3adecafe00430850564f7fccd03df2019be50ac
|
[
"C",
"CMake"
] | 18
|
C
|
Sognus/KIV-ZOS-2018
|
60f27ce8b9c7932fa15b11cfd783818e3fa29007
|
015882f66f0d074e10699648efac91d14766edd1
|
refs/heads/master
|
<file_sep>// Initialize Firebase
var config = {
apiKey: "<KEY>",
authDomain: "train-schedule-a6a67.firebaseapp.com",
databaseURL: "https://train-schedule-a6a67.firebaseio.com",
projectId: "train-schedule-a6a67",
storageBucket: "",
messagingSenderId: "932346733823"
};
firebase.initializeApp(config);
var database = firebase.database();
// 2. Button for adding Trains
$("#add-train-btn").on("click", function(event) {
event.preventDefault();
// Grabs user input
var trainName = $("#train-name-input").val().trim();
var trainDest = $("#destination-input").val().trim();
var trainTime = moment($("#time-input").val().trim(), "HH:mm").subtract(1, "years").format("X");
var trainFreq = $("#frequency-input").val().trim();
// Creates local "temporary" object for holding train data
var newTrain = {
name: trainName,
dest: trainDest,
time: trainTime,
freq: trainFreq
};
// Uploads train data to the database
database.ref().push(newTrain);
// Clears all of the text-boxes
$("#train-name-input").val("");
$("#destination-input").val("");
$("#time-input").val("");
$("#frequency-input").val("");
});
// 3. Create Firebase event for adding train to the database and a row in the html when a user adds an entry
database.ref().on("child_added", function(childSnapshot, prevChildKey) {
// Store everything into a variable.
var trainName = childSnapshot.val().name;
var trainDest = childSnapshot.val().dest;
var trainTime = childSnapshot.val().time;
var trainFreq = childSnapshot.val().freq;
// Train arrival Math
// freq min - current min = difference
// difference % freq min = remainder
// freq min - remiander = minutes away
// min away + current time = arrival time
// Difference between the times
var diffTime = moment().diff(moment.unix(trainTime), "minutes");
// Time apart
var tRemainder = diffTime % trainFreq;
// Minute Until Train
var minutesAway = trainFreq - tRemainder;
// Next Train
var trainArrival = moment().add(minutesAway, "minutes").format("hh:mm A");
// Add each train data to the table
$("#train-data").append("<tr><td>" + trainName + "</td><td>" + trainDest + "</td><td>" + trainFreq +
"</td><td>" + trainArrival + "</td><td>" + minutesAway + "</td></tr>");});
|
7aaf395208c80d329b6e0ccddc5a64ebd27fd0ff
|
[
"JavaScript"
] | 1
|
JavaScript
|
aricci1024/Train-Activity
|
31ddccf2c36906675e2836f5e8a1933420827a8c
|
0cddf9d206665d3628a4321a15b0afe359394438
|
refs/heads/master
|
<repo_name>gkudelis/spotify-backup<file_sep>/spotify.js
var spotifyEndpoint = 'https://api.spotify.com/v1';
function Spotify(clientId, rateLimitLock) {
this.clientId = clientId;
this.ax = null;
this.startAuth = function(options) {
var qs = objectToQueryString({
client_id: this.clientId,
scope: options.scope,
state: options.state,
show_dialog: 'true',
response_type: 'token',
redirect_uri: window.location.href.split('#')[0]
});
var newURI = 'https://accounts.spotify.com/authorize/?' + qs;
window.location.replace(newURI);
}
this.finishAuth = function(token) {
this.ax = axios.create({
baseURL: spotifyEndpoint,
headers: { Authorization: 'Bearer ' + token }
});
}
this.makeRequest = function(requestLambda) {
var spi = this;
// make sure we wait for the rate limit lock (null if not limited)
return Promise.resolve(rateLimitLock)
.then(function() {
// execute request and wait for response
return requestLambda();
}).then(function(response) {
return response.data;
}).catch(function(error) {
if (error.response && error.response.status === 429) {
// if we're the first to get a 429 set rateLimitLock
if (rateLimitLock === null) {
var retryAfter = parseInt(error.response.headers['retry-after']);
rateLimitLock = new Promise(function(resolve, reject) {
console.log('Spotify waiting for ' + retryAfter + ' s');
setTimeout(function() {
rateLimitLock = null;
resolve();
}, retryAfter * 1000);
});
}
// retry after waiting
return rateLimitLock.then(spi.makeRequest.bind(spi, requestLambda));
} else if (error.response && error.response.status === 401) {
console.log('auth token has expired');
} else if (error.response && error.response.status === 403) {
console.log('request not authorized (hint: scope)');
} else if (error.response) {
console.log('response received, error unknown');
console.log(error);
} else if (error.request) {
// did not get a response from the server - retry now
console.log('retrying request');
return spi.makeRequest(requestLambda);
} else {
console.log(error);
}
});
}
this.getMe = function() {
var sp = this;
return this.makeRequest(function() {
return sp.ax.get('/me');
});
}
this.getAlbums = function(limit, offset) {
var sp = this;
return this.makeRequest(function() {
return sp.ax.get('/me/albums', {
params: { limit: limit, offset: offset }
});
});
}
this.saveAlbums = function(albumIds) {
var sp = this;
return this.makeRequest(function() {
return sp.ax.put('/me/albums', albumIds);
});
}
this.getTracks = function(limit, offset) {
var sp = this;
return this.makeRequest(function() {
return sp.ax.get('/me/tracks', {
params: { limit: limit, offset: offset }
});
});
}
this.saveTracks = function(trackIds) {
var sp = this;
return this.makeRequest(function() {
return sp.ax.put('/me/tracks', trackIds);
});
}
this.deleteTracks = function(trackIds) {
var sp = this;
return this.makeRequest(function() {
return sp.ax.delete('/me/tracks', { data: trackIds });
});
}
}
function objectToQueryString (o) {
var qs = Object
.keys(o)
.map(function(k) {
return encodeURIComponent(k) + '=' + encodeURIComponent(o[k]);
})
.join('&');
return qs;
}
<file_sep>/Dockerfile
FROM python:3.7-stretch
WORKDIR /app
COPY . .
CMD [ "python", "-u", "-m", "http.server" ]
<file_sep>/promisq.js
module.exports = PromisQ;
function PromisQ (maxLength) {
this.queue = [];
this.maxLength = maxLength;
// used to keep track of resolvers for promises given out when there are
// too many / too few items in the queue
this.popResolve = null;
this.pushResolve = null;
var pq = this;
this.pop = function () {
return new Promise(function (resolve, reject) {
if (pq.popResolve === null) {
if (pq.queue.length > 0) {
if (pq.pushResolve !== null) {
pq.pushResolve();
pq.pushResolve = null;
}
resolve(pq.queue.shift());
} else {
pq.popResolve = resolve;
}
} else {
reject();
}
});
}
this.push = function (item) {
return new Promise(function (resolve, reject) {
if (pq.pushResolve === null) {
if (pq.queue.length < (maxLength - 1)) {
if (pq.popResolve !== null) {
pq.popResolve(item);
pq.popResolve = null;
} else {
pq.queue.push(item);
}
resolve();
} else {
pq.queue.push(item);
pq.pushResolve = resolve;
}
} else {
reject();
}
});
}
}
<file_sep>/app.js
var spotifyClientId = 'f<KEY>';
var spotifyRateLimitLock = null;
var fromSpotify = new Spotify(spotifyClientId, spotifyRateLimitLock);
var toSpotify = new Spotify(spotifyClientId, spotifyRateLimitLock);
var pq = new PromisQ(10);
new Vue({
el: '#app',
data: {
fromAuthComplete: false,
toAuthComplete: false,
trackLoadPercentage: 0,
trackCopyPercentage: 0,
progressLog: 'Waiting for authentication\n',
fromAccName: null,
toAccName: null
},
created: function() {
var vm = this;
// get auth tokens from querystring
var fragment = window.location.hash.substr(1);
var params = queryStringToObject(fragment);
if (params.hasOwnProperty('access_token')) {
if (params['state'] === 'authFrom') {
document.cookie = 'authFrom=' + encodeURIComponent(params['access_token']);
} else if (params['state'] === 'authTo') {
document.cookie = 'authTo=' + encodeURIComponent(params['access_token']);
} else console.log('This is strange...');
document.location.replace('#');
}
// make an object representing the cookies
var cookies = document.cookie.split(';').reduce(function(acc, c) {
var kv = c.split('=');
acc[kv[0].trim()] = decodeURIComponent(kv[1]);
return acc;
}, {});
// check and complete authentications
if (cookies.hasOwnProperty('authFrom')) {
vm.progressLog += 'Authenticated "from" account\n';
fromSpotify.finishAuth(cookies['authFrom']);
vm.fromAuthComplete = true;
fromSpotify.getMe()
.then(function(data) {
vm.fromAccName = data.display_name;
}).catch(console.log.bind(console));
}
if (cookies.hasOwnProperty('authTo')) {
vm.progressLog += 'Authenticated "to" account\n';
toSpotify.finishAuth(cookies['authTo']);
vm.toAuthComplete = true;
toSpotify.getMe()
.then(function(data) {
vm.toAccName = data.display_name;
}).catch(console.log.bind(console));
}
},
methods: {
authFrom: function() {
fromSpotify.startAuth({
scope: 'user-library-read',
state: 'authFrom'
});
},
authTo: function() {
toSpotify.startAuth({
scope: 'user-library-modify',
state: 'authTo'
});
},
copyTracks: function() {
var vm = this;
vm.progressLog += 'Loading tracks\n';
var trackCount;
var trackPageSize = 50;
return fromSpotify.getTracks(1).then(function(trackData) {
// first find the number of tracks
trackCount = trackData.total;
// create a list of offsets
var offsets = [];
for (var i = 0; i < trackCount; i += trackPageSize) {
offsets.unshift(i);
}
// queue track fetches (these are happening in parallel)
offsets.reduce(function(p, offset) {
// set up a new unresolved promise for track results
var trackLoadResolve;
var trackLoad = new Promise(function(resolve, reject) {
trackLoadResolve = resolve;
});
// wait for the queue to accept our push
return p.then(function() {
return pq.push(trackLoad);
}).then(function() {
// resolve the load promise with another promise
trackLoadResolve(fromSpotify.getTracks(trackPageSize, offset));
});
}, Promise.resolve());
// once all have been queued - collect results from the queue
var batchesProcessed = 0;
var allSavedTracks = [];
return offsets.reduce(function(p) {
return p.then(function() {
return pq.pop();
}).then(function(trackData) {
batchesProcessed += 1;
vm.trackLoadPercentage = Math.ceil((batchesProcessed / offsets.length) * 100);
// add current batch to the full list of tracks
trackData.items.reduce(function(savedTrackAcc, item) {
savedTrackAcc.push({
id: item.track.id,
added_at: item.added_at
});
return savedTrackAcc;
}, allSavedTracks);
return allSavedTracks;
});
}, Promise.resolve());
}).then(function(allSavedTracks) {
vm.progressLog += 'Finished loading tracks\n';
// sort in ascending order by added date
var sortedTracks = allSavedTracks.sort(function(a, b) {
return new Date(a.added_at) - new Date(b.added_at);
});
// group by added date, also split up groups if larger than 50
var groupedTracks = sortedTracks.reduce(function(acc, track) {
if (track.added_at === acc.lastDate &&
acc.groups[acc.groups.length - 1].length < 50) {
acc.groups[acc.groups.length - 1].push(track.id);
} else {
acc.groups.push([track.id]);
}
return {
lastDate: track.added_at,
groups: acc.groups
};
}, {lastDate: null, groups: []}).groups;
vm.progressLog += 'Saving tracks\n';
// save tracks (groups are ordererd by date originally saved)
var batchesProcessed = 0;
return groupedTracks.reduce(function(p, trackIds) {
return p.then(function() {
return toSpotify.saveTracks(trackIds);
}).then(function() {
batchesProcessed += 1;
vm.trackCopyPercentage = Math.ceil((batchesProcessed / groupedTracks.length) * 100);
});
}, Promise.resolve());
}).then(function() {
vm.progressLog += 'Finished saving tracks\n';
});
},
//wipeToAccountTracks: function() {
// var vm = this;
// vm.progressLog += 'Wiping "to" account tracks\n';
// var trackCount;
// var trackPageSize = 50;
// return toSpotify.getTracks(1).then(function(trackData) {
// // first find the number of tracks
// trackCount = trackData.total;
// // create a list of offsets
// var offsets = [];
// for (var i = 0; i < trackCount; i += trackPageSize) {
// offsets.unshift(i);
// }
// // queue track fetches
// offsets.reduce(function(p, offset) {
// // set up a new unresolved promise for track results
// var trackLoadResolve;
// var trackLoad = new Promise(function(resolve, reject) {
// trackLoadResolve = resolve;
// });
// // wait for the queue to accept our push
// return p.then(function() {
// return pq.push(trackLoad);
// }).then(function() {
// // resolve the load promise with another promise
// trackLoadResolve(toSpotify.getTracks(trackPageSize, offset));
// });
// }, Promise.resolve());
// // once all have been queued - queue track deletes
// return offsets.reduce(function(p) {
// return p.then(function() {
// return pq.pop();
// }).then(function(trackData) {
// var trackIds = trackData.items.map(function(item) {
// return item.track.id;
// });
// return toSpotify.deleteTracks(trackIds);
// });
// }, Promise.resolve());
// }).then(function() {
// vm.progressLog += 'Finished wiping tracks\n';
// });
//}
}
});
function queryStringToObject (qs) {
return qs.split('&').reduce(function(acc, val) {
var kv = val.split('=');
acc[decodeURIComponent(kv[0])] = decodeURIComponent(kv[1]);
return acc;
}, {});
}
<file_sep>/spec/promisq-spec.js
const PromisQ = require('../promisq.js');
describe('PromisQ', function() {
var q;
beforeEach(function() {
q = new PromisQ(5);
});
it('accepts and gives out a single item', function() {
q.push('item');
q.pop().then(function(item) {
expect(item).toBe('item');
});
});
it('accepts and gives out multiple items', function() {
q.push(1).then(function() {
return q.push(4);
}).then(function() {
return q.pop();
}).then(function(v1) {
expect(v1).toBe(1);
return q.pop();
}).then(function(v2) {
expect(v2).toBe(2);
return q.pop();
}).then(function(v3) {
expect(v3).toBe(3);
return q.pop();
}).then(function(v4) {
expect(v4).toBe(4);
});
// will happen before the first .then handler
q.push(2);
q.push(3);
});
it('correctly deals with more values than it can hold', function() {
var values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
// queue pushes
values.reduce(function(promise, value) {
return promise.then(function() {
return q.push(value);
});
}, Promise.resolve());
// queue pops and value checks
values.reduce(function(promise, value) {
return promise.then(function(poppedValue) {
expect(poppedValue).toBe(value);
return q.pop();
});
}, q.pop());
});
it('makes poppers wait for values', function() {
q.pop().then(function(v) {
expect(v).toBe(1);
return q.pop();
}).then(function(v) {
expect(v).toBe(2);
});
q.push(1);
q.push(2);
});
});
|
0677dfded667583dfe01bf46956787f1f63b441a
|
[
"JavaScript",
"Dockerfile"
] | 5
|
JavaScript
|
gkudelis/spotify-backup
|
d5c63011800f458e4e7cf03521716ff18e075f4b
|
c833c83d93bb6ba3b4d92384102d5cd4c506b3d4
|
refs/heads/master
|
<file_sep><?php
require_once('plugins/kjAccessor.php');
$dba = new kjPHP\Accessor('localhost','style','phpdude','dude!@#$');
$colors = $dba->_query('SELECT * FROM colors');
?>
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>StyleBase : Color</title>
<link rel="stylesheet" type="text/css" href="styles/common.css" />
<link rel="stylesheet" type="text/css" href="styles/template.css" />
</head>
<body>
<?
while( $color = $colors->fetch(PDO::FETCH_ASSOC) )
{
?>
<div class="color-ticket">
<div class="color" style="background-color:<?=$color['id']?>;"></div>
<table>
<thead><tr><td colspan="2" style="color:<?=$color['id']?>;"><?=$color['name']?></td></tr></thead>
<tbody>
<tr>
<th>RGB (CSS3)</th>
<td>(<?=$color['red']?>,<?=$color['green']?>,<?=$color['blue']?>)</td>
</tr>
<tr>
<th>HSV</th>
<td>(<?=$color['hue']?>,<?=$color['sv']?>)</td>
</tr>
<tr>
<th>HSL</th>
<td>(<?=$color['hue']?>,<?=$color['sl']?>)</td>
</tr>
<tr>
<th>CMYK</th>
<td>(<?=intval($color['cyan'])/100?>,<?=intval($color['magenta'])/100?>,<?=intval($color['yellow'])/100?>,<?=intval($color['black'])/100?>)</td>
</tr>
</tbody>
</table>
</div>
<?
}
?>
</body>
</html><file_sep># KungFuSchool
kj's KungFuSchool Game webpage.
<file_sep><?php
namespace kjPHP
{
class Accessor
{
private $pdo = null;
public $RECORD_SESSION = false;
function __construct($server,$database,$username,$password)
{
$options = array( \PDO::MYSQL_ATTR_INIT_COMMAND => 'SET NAMES utf8' );
$this->pdo = new \PDO('mysql:host='.$server.';dbname='.$database,$username,$password,$options);
if( !$this->pdo ) { $this->writeLogFile('Create PDO fail.'); }
}
function writeLogFile($message)
{
$prefix = '';
if( $this->RECORD_SESSION )
{
$sid = session_id();
if( $sid == '' )
{
session_start();
$sid = session_id();
}
$prefix = 'SESSION(' . $sid . ') ';
}
$ipaddr = isset($_SERVER['REMOTE_ADDR']) ? str_replace( '.', '-', $_SERVER['REMOTE_ADDR'] ) : null;
if( empty($ipaddr) ) { $ipaddr = 'UNKNOWN'; }
$fp = fopen( date('Y-m-d') . '_' . $ipaddr . '.log', 'a' );
fwrite($fp,$prefix.$message.PHP_EOL);
fclose($fp);
}
public function _query($sql,$args = null)
{
$result = null;
if( $args != null )
{
$stmt = $this->pdo->prepare($sql);
if( $stmt->execute($args) ) { $result = $stmt; }
} else { $result = $this->pdo->query($sql); }
if( $result === null ) { $this->writeLogFile('Invalid Argument: kjPHP\Accessor::_query.'); }
if( $result === false )
{
$errinfo = $this->pdo->errorInfo();
$this->writeLogFile('PDO Exception: '.$errinfo[2]);
return false;
}
return $result;
}
public function _execute($sql,$args = null)
{
//$result = null;
if( $args == null ) { $args = array(); }
$stmt = $this->pdo->prepare($sql);
if( $stmt->execute($args) )
{
try
{
$lastid = $this->pdo->lastInsertId();
return $lastid;
} catch( PDOException $e ) { $this->writeLogFile('Fail to get last ID: '.$e->getMessage()); }
}
else
{
$errinfo = $stmt->errorInfo();
$this->writeLogFile('PDO Exception: '.$errinfo[2]);
}
}
}
}<file_sep><?php
require_once('../plugins/kjAccessor.php');
function fromHEX($hex,$size = null)
{
$hexpair = array(
'0' => 0, '1' => 1, '2' => 2, '3' => 3,
'4' => 4, '5' => 5, '6' => 6, '7' => 7,
'8' => 8, '9' => 9, 'A' => 10, 'B' => 11,
'C' => 12, 'D' => 13, 'E' => 14, 'F' => 15
);
$result = 0;
if( $size == null ) { $size = strlen($hex); }
for($i=0;$i<$size;$i++)
{
$base = 16 * ($size-$i-1);
if( $base <= 0 ) { $base = 1; }
$chr = substr($hex,$i,1);
$result += $hexpair[$chr] * $base;
}
return $result;
}
$dba = new kjPHP\Accessor('localhost','style','phpdude','dude!@#$');
$file = 'ch_color.csv';
$fp = fopen($file,'r');
$header = fgetcsv($fp);
while( $line = fgetcsv($fp) )
{
$colordata = array();
$color = array_combine($header,$line);
$rgb16 = strtoupper($color['rgb']);
if( empty($rgb16) ) { continue; }
$c_row = $dba->_query('SELECT name,more,alias FROM colors WHERE id=:cid',array(':cid'=>$rgb16));
if( $row = $c_row->fetch(PDO::FETCH_ASSOC) )
{
$alias = json_decode($row['alias'],true);
if( isset($color['name']) )
{
$name = rtrim($color['name'],'色');
if( $name != $row['name'] ) { $alias[] = $name; }
}
if( isset($color['eng_name']) ) { $alias[] = trim($color['eng_name']); }
$description = $row['more'];
if( isset($color['description']) ) { $description .= trim($color['description']); }
$dba->_execute('UPDATE colors SET more=:more,alias=:alias WHERE id=:cid LIMIT 1',array(
':cid' => $rgb16,
':more' => $description,
':alias' => json_encode($alias)
));
echo $rgb16 . ' has been recorded, update alias & information.' . PHP_EOL;
continue;
}
$red = isset($color['red']) ? $color['red'] : fromHEX(substr($rgb16,1,2));
$colordata[':red'] = $red;
$green = isset($color['green']) ? $color['green'] : fromHEX(substr($rgb16,3,2));
$colordata[':green'] = $green;
$blue = isset($color['blue']) ? $color['blue'] : fromHEX(substr($rgb16,5,2));
$colordata[':blue'] = $blue;
$r_rate = $red / 255;
$g_rate = $green / 255;
$b_rate = $blue / 255;
$rgb_max = max($r_rate,$g_rate,$b_rate);
$rgb_min = min($r_rate,$g_rate,$b_rate);
$diff = $rgb_max - $rgb_min;
// HSL,HSV
$hue = 0;
if( $rgb_max == $rgb_min )
{
$hue = 0;
}
else if ( $rgb_max == $r_rate )
{
$hue = 60 * ($g_rate-$b_rate) / $diff;
if( $g_rate < $b_rate ) { $hue += 360; }
}
else if ( $rgb_max == $g_rate )
{
$hue = 120 + 60 * ($b_rate-$r_rate) / $diff;
}
else
{
$hue = 240 + 60 * ($r_rate-$g_rate) / $diff;
}
$colordata[':hue'] = round($hue);
$hsl_l = ( $rgb_max + $rgb_min ) / 2;
$hsl_s = 0;
if( $rgb_max == $rgb_min OR $hsl_l == 0 )
{
$hsl_s = 0;
}
else if ( $hsl_l > 0 AND $hsl_l <= 0.5 )
{
$hsl_s = $diff / ($hsl_l*2);
}
else
{
$hsl_s = $diff / (2-($hsl_l*2));
}
$hsv_v = $rgb_max;
$hsv_s = ( ($rgb_max) == 0 ) ? 0 : 1-$rgb_min/$rgb_max;
$colordata[':sl'] = round($hsl_s*100) . ',' . round($hsl_l*100);
$colordata[':sv'] = round($hsv_s*100) . ',' . round($hsv_v*100);
// CMYK
$cyan = 0;
$magenta = 0;
$yellow = 0;
$black = 0;
$c = 1- $r_rate;
$m = 1- $g_rate;
$y = 1- $b_rate;
$k = min($c,$m,$y);
if( $k == 1 )
{
$black = 1;
}
else
{
$black = $k;
$key = 1 - $k;
$cyan = ($c-$k) / $key;
$magenta = ($m-$k) / $key;
$yellow = ($y-$k) / $key;
}
$colordata[':cyan'] = round($cyan*100);
$colordata[':magenta'] = round($magenta*100);
$colordata[':yellow'] = round($yellow*100);
$colordata[':black'] = round($black*100);
if( isset($color['name']) ) { $colordata[':name'] = rtrim($color['name'],'色'); }
if( isset($color['description']) ) { $colordata[':more'] = trim($color['description']); }
$alias = array();
if( isset($color['eng_name']) ) { $alias[] = trim($color['eng_name']); }
$colordata[':alias'] = json_encode($alias,JSON_UNESCAPED_UNICODE);
$dba->_execute(
'INSERT INTO colors(id,red,green,blue,hue,sv,sl,cyan,magenta,yellow,black,name,more,alias,tags) '
. "VALUE ('{$rgb16}',:red,:green,:blue,:hue,:sv,:sl,:cyan,:magenta,:yellow,:black,:name,:more,:alias,'[]')",
$colordata
);
echo ( isset($colordata[':name']) ? $colordata[':name'] : $rgb16 ) . ' is recorded.' . PHP_EOL;
}<file_sep><?php
require_once('plugins/kjAccessor.php');
$dba = new kjPHP\Accessor('localhost','KungFuSchool','phpdude','dude!@#$');
$limit = isset($_GET['items']) ? $_GET['limit'] : 50;
$offset = ( isset($_GET['page']) ? $_GET['page'] : 0 ) * $limit;
$list = $dba->_query('SELECT handle,name,weapon,yinyang*wuxing AS attr FROM movementlist LIMIT '.$limit.' OFFSET '.$offset);
$movement = array(
'name' => '',
'belong' => '',
'ordinal' => 0,
);
if( isset($_GET['q']) )
{
$query = $dba->_query('SELECT * FROM movementlist WHERE handle=:handle LIMIT 1',array(':handle'=>$_GET['q']));
if( $m = $query->fetch(PDO::FETCH_ASSOC) )
{
$movement = $m;
}
}
$weapon_name = array(
'unarmed' => '空手',
'sword' => '劍',
'blade' => '刀',
'rod' => '棍',
'stick' => '棒',
'staff' => '杖',
'hammer' => '鎚',
'glove' => '手套',
'lance' => '槍',
'whip' => '鞭',
'dagger' => '匕首',
'claw' => '爪',
'throw' => '暗器',
'other' => '奇門'
);
$attribute = array(
1 => '甲木', -1 => '乙木',
2 => '丙火', -2 => '丁火',
3 => '戊土', -3 => '己土',
4 => '庚金', -4 => '辛金',
5 => '壬水', -5 => '癸水'
);
?>
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>開山祖師 - 招式</title>
<link rel="stylesheet" type="text/css" href="styles/common.css" />
<link rel="stylesheet" type="text/css" href="styles/builder.css" />
<style>
label { display: block; margin: 2px; }
</style>
</head>
<body>
<div id="builder">
<header>
<h1>招式(Movement)產生器</h1>
</header>
<form class="-border">
<label>
<input type="text" name="title" value="<?=$movement['name']?>" />
</label>
<label>
<input type="text" name="belong" value="<?=$movement['belong']?>" />
<input type="number" name="ordinal" min="0" value="<?=$movement['ordinal']?>" />
</label>
<label>
<select name="weapon">
<?
foreach( $weapon_name as $k => $v )
{
$selected = ( $movement['weapon'] == $k ) ? ' selected' : '';
echo '<option value="' . $k . '"' . $selected . '>' . $v . '</option>';
}
?>
<option value="_new">(新增)</option>
</select>
<input type="text" name="new_weapon" />
</label>
<label>
<select name="yinyang">
<option value="1"<?=(($movement['yinyang']==1)?' selected':'')?>>陽</option>
<option value="-1"<?=(($movement['yinyang']==-1)?' selected':'')?>>陰</option>
</select>
<select name="wuxing">
<option value="1"<?=(($movement['wuxing']==1)?' selected':'')?>>木</option>
<option value="2"<?=(($movement['wuxing']==2)?' selected':'')?>>火</option>
<option value="3"<?=(($movement['wuxing']==3)?' selected':'')?>>土</option>
<option value="4"<?=(($movement['wuxing']==4)?' selected':'')?>>金</option>
<option value="5"<?=(($movement['wuxing']==5)?' selected':'')?>>水</option>
</select>
</label>
<label>
<select name="hitdice">
<option value="4">四面骰</option>
<option value="6">六面骰</option>
<option value="8">八面骰</option>
<option value="12">十二面骰</option>
<option value="20">二十面骰</option>
</select>
</label>
<label>
<select name="timing">
<option value="0">施展招式</option>
<option value="1">招式成功</option>
<option value="2">招式失敗</option>
</select>
後
<input type="number" name="duration" min="0" max="5" />
招內
<select name="target">
<option value="0">所有</option>
<option value="1">同我</option>
<option value="2">生我</option>
<option value="3">我生</option>
<option value="4">相生</option>
<option value="5">剋我</option>
<option value="6">我剋</option>
<option value="7">相剋</option>
</select>
招式
<select name="effect">
<option value="0">無效果</option>
<option value="1">增加命中率</option>
<option value="2">制敵加成</option>
<option value="3">加速制敵提昇</option>
<option value="4">延長持續時間</option>
<option value="-1">降低命中率</option>
<option value="-2">制敵減半</option>
<option value="-3">抑制制敵提昇</option>
<option value="-4">減少持續時間</option>
</select>
</label>
<button type="button" onclick="KungFu.submit(this.form);">完成</button>
<button type="reset">重設</button>
</form>
</div>
<div id="list">
<table>
<thead>
<tr>
<th>招式名稱</th>
<th style="width:100px;">使用兵器</th>
<th style="width:80px;">陰陽五行</th>
</tr>
</thead>
<tbody>
<?
while( $move = $list->fetch(PDO::FETCH_ASSOC) )
{
echo '<tr>';
echo '<td><a href="movement.php?q=' . $move['handle'] . '">' . $move['name'] . '</a></td>';
echo '<td>' . $weapon_name[$move['weapon']] . '</td>';
echo '<td>' . $attribute[$move['attr']] . '</td>';
}
?>
</tbody>
</table>
</div>
</body>
</html>
|
548bcb3d4df16d4309e8858393228e9a49b61c97
|
[
"Markdown",
"PHP"
] | 5
|
PHP
|
falsewinds/KungFuSchool
|
f84c0f32459d768ad73b0a2b752254c43b70ec4d
|
86632e47bf9d76ce90b9743150ed34c45ad4113e
|
refs/heads/master
|
<file_sep>"""
Turn characters into words with a set of very strong assumptions:
This will probably not work for documents where the text has been created
from OCR, or where text is not aligned vertically.
"""
import argparse, csv, sys, json
# Because we're outputting to stdout, setting to true will hose the output.
debug = False
# not sure where this should come from, but
RETURN_PRECISION = 3
# We prob don't wanna return all of these, just leaving them in for now.
fieldnames = ['adv', 'fontname', 'linewidth', 'pageid', 'doctop', 'text', 'top', 'object_type', 'height', 'upright', 'width', 'y1', 'y0', 'x0', 'x1', 'size']
# cut n' paste from pdfplumbers cli.py
def parse_page_spec(p_str):
if "-" in p_str:
return list(range(*map(int, p_str.split("-"))))
else: return [ int(p_str) ]
def parse_args():
parser = argparse.ArgumentParser("coalesce_words")
stdin = (sys.stdin.buffer if sys.version_info[0] >= 3 else sys.stdin)
parser.add_argument("infile", nargs="?",
type=argparse.FileType("r"),
default=stdin)
parser.add_argument("--pages", nargs="+",
type=parse_page_spec)
parser.add_argument('--precision',
default=1,
choices=(0,1,2,3,4),
type=int)
parser.add_argument("--format",
action="store",
dest="format",
choices=["csv", "json"],
default="csv")
args = parser.parse_args()
return args
def get_chars_hashed_by_yoffset(csv_reader, precision, pages=None):
""" Hash the characters by the y-offset height rounded to [precision] decimals. """
line_hash = {}
for i, row in enumerate(csv_reader):
if pages:
if int(row['pageid']) not in pages:
continue
if row['object_type'] == 'char':
y_rounded = round(float(row['y0']), precision)
hash_key = "%s@%s" % (row['pageid'], y_rounded)
try:
line_hash[hash_key]['count'] += 1
line_hash[hash_key]['chars'].append(row)
except KeyError:
line_hash[hash_key] = {'count':1, 'chars':[row]}
return line_hash
def coalesce_into_words(char_height_dict):
""" Takes a dictionary of characters--where the key is the shared baseline and the value is an array of chars. Returns an array of words, where each word is an array or chars. This doesn't preserve lines, though we're assuming that's not a problem. """
result_word_lines = []
for char_height_key in char_height_dict.keys():
sorted_chars = sorted(char_height_dict[char_height_key]['chars'], key=lambda k: float(k['x0']))
if len(sorted_chars) == 1:
result_word_lines.append([sorted_chars[0]])
continue
else:
cur_word_start = 0
this_current_word = []
is_word_boundary = False
last_char_x1 = float(sorted_chars[0]['x1'])
last_char_height = float(sorted_chars[0]['height'])
for i, char in enumerate(sorted_chars):
separation_width = float(char['x0'])-last_char_x1
relevant_char_height = last_char_height
last_char_x1 = float(char['x1'])
last_char_height = float(char['height'])
if char['text'] == ' ' or separation_width > relevant_char_height/3:
if this_current_word:
result_word_lines.append(this_current_word)
if (debug):
prospective_word = "".join([i['text'] for i in this_current_word])
print("***Prospective word: '%s'" % prospective_word)
this_current_word = []
if char['text'] != ' ':
this_current_word.append(char)
else:
if char['text'] != ' ':
this_current_word.append(char)
if this_current_word:
result_word_lines.append(this_current_word)
if (debug):
prospective_word = "".join([i['text'] for i in this_current_word])
print("***Prospective word: '%s'" % prospective_word)
return result_word_lines
def merge_word_arrays(words_by_array):
word_array = []
for i, line in enumerate(words_by_array):
word_length = len(line)
this_word = {}
last_char = line[-1:][0]
first_char = line[0]
y1 = max([float(i['y1']) for i in line])
y0 = min([float(i['y0']) for i in line])
this_word_text = "".join([i['text'] for i in line]).strip()
width = round(float(last_char['x1']) - float(first_char['x0']),RETURN_PRECISION)
# just use the pageid, doctop, upright, fontname of the first char for the word
# May not be a good assumption.
word_array.append({'adv':first_char['adv'], 'fontname':first_char['fontname'], 'linewidth':None, 'pageid':first_char['pageid'], 'doctop':first_char['doctop'], 'text':this_word_text, 'top':first_char['top'], 'object_type':'word', 'height':first_char['height'], 'upright':first_char['upright'], 'width':width, 'y1':y1, 'y0':y0, 'x0':first_char['x0'], 'x1':last_char['x1'], 'size':round(y1-y0,RETURN_PRECISION)})
return word_array
def to_csv(word_list, output):
output.write(",".join(fieldnames) + "\n")
dictwriter = csv.DictWriter(output, fieldnames=fieldnames, restval='', extrasaction='ignore')
for word in word_list:
dictwriter.writerow(word)
def to_json(word_list, output):
output.write(json.dumps(word_list))
def process_file(infile, outfile, precision=1, format='csv', pages=None):
reader = csv.DictReader(infile)
char_height_dict = get_chars_hashed_by_yoffset(reader, precision, pages=pages)
# page numbers come back as strings
#pages_to_read = ['1']
words_by_array = coalesce_into_words(char_height_dict)
word_list = merge_word_arrays(words_by_array)
if format=='csv':
to_csv(word_list, outfile)
elif format=='json':
to_json(word_list, outfile)
return 1
def main():
args = parse_args()
if (debug):
print("args are: ", args)
# argparser gives pages as [[x]] or [[x,y,z]], so pass pages[0]
page_list = args.pages[0] if args.pages else None
process_file(args.infile, sys.stdout, precision=args.precision, format=args.format, pages=page_list)
if __name__ == "__main__":
main()
<file_sep>
Some utility scripts I'm noodling around with for helping standardize word-based output into a csv format. In general there are three cases I want to address: 1. text-based pdfs, 2. pdfs that have already been OCR'ed and had text 'attached' to them, and 3. image-based pdfs that need to be scanned. For cases 1. and 2. the first step is to use jsvine's convenient PDFMiner wrapper [pdfplumber](https://github.com/jsvine/pdfplumber), to output the character-bounding boxes, and then coalesce them into words. Coalesce_words.py assumes the output of pdfplumber.
The same approach will work for #2 if we know that the pdfs are not rotated. More complex tooling is needed if the pdfs are rotated.
For case #3 we're going to assume that the pdfs have been tesseracted, and we're dealing with .html files in HOCR format. Tessereact is not necessarily the best choice for high-quality OCR, but it's freely available…
## coalesce_words.py
usage: coalesce_words [-h] [--pages PAGES [PAGES ...]]
[--precision {0,1,2,3,4}] [--format {csv,json}]
[infile]
positional arguments:
infile
optional arguments:
-h, --help show this help message and exit
--pages PAGES [PAGES ...]
--precision {0,1,2,3,4}
--format {csv,json}
Reads a csv of single letters (such as is output by pdfplumber) and outputs a similarly formatted file of words with some fairly strict assumptions: that char y0s are the same when rounded to `--precision` precision, and that words are separated by a space character or are physically separated by a distance greater than a third of the font height. This heuristic is just a guess, and might need to change.
These assumptions *may not* hold for pdfs that have OCR'ed text attached to them, and won't work if the text is slanted.
### example usage:
The file `../examples/1.27.16_Clinton_1362500_INV.csv` is pdfplumber's output of `../examples/1.27.16_Clinton_1362500_INV.pdf`. It consists of a .csv file with one line per character (or a few other detected types) and a bounding box defined by x0, x1, y0 and y1 for each char. To get word-level bounding boxes, run:
$ python coalesce_words.py ../examples/1.27.16_Clinton_1362500_INV.csv > 1.27.16_Clinton_1362500_INV_words.csv
|
79421c8648219e74e08cf23fb041bdabd64d443c
|
[
"Markdown",
"Python"
] | 2
|
Python
|
jsfenfen/pdf_bbox_utils
|
4a91d1f8f7f5dd5a0d1239ad70659e2d8c995ae2
|
818d0de5cf7ac6dea92fe426c985c29bc8d899f3
|
refs/heads/main
|
<file_sep>dddd
jjj
Wed, May 19, 2021 9:03:42 PM
love2dad
<file_sep>cccc
Wed, May 19, 2021 10:26:40 PM
|
1fdca4d3699090b4eb8c86ab7444f89202c175e6
|
[
"Java"
] | 2
|
Java
|
jerry-carney/pluto-app
|
969634336bf0b1dc2b4a124c1c07f00ad6e35cc4
|
4e41409824a2f23539ec831fe73e34972dd95625
|
refs/heads/master
|
<file_sep># Explora.in
A promotional website for the Explora Architecture Acadamy.
Made with `Materializecss`.
Can be visited [here](http://explora.in/new_site) [Currently Under Development]
<file_sep><?php
$connection = mysqli_connect("localhost","traveltr_db","Q%cC.FM.yuVb") or die("Error " . mysqli_error($connection));
$db = mysqli_select_db($connection,"traveltr_db");
$sql = "select * from explora";
$result = mysqli_query($connection, $sql) or die("Error in Selecting " . mysqli_error($connection));
//echo $result["id"] . " && ". $result["url"];
$row=mysqli_fetch_array($result);
echo $row[1];
?> <file_sep><?php
require("explora.php");
echo "downloading pdf...."
?>
<file_sep><!DOCTYPE html>
<html>
<head>
<title>Send mail</title>
</head>
<body>
<?php
require 'master/PHPMailerAutoload.php';
$a= $_GET['$form'] ;
echo $a;
$mail = new PHPMailer;
//$mail->SMTPDebug = 3;
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'smtp.gmail.com'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 587; // TCP port to connect to
$mail->SMTPOptions = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
'allow_self_signed' => true
)
);
$mail->From = '<EMAIL>';
$mail->FromName = 'D';
$mail->addAddress('<EMAIL>', '<NAME>'); // Add a recipient
//$mail->addAddress('<EMAIL>'); // Name is optional
$mail->addReplyTo('<EMAIL>', 'Information');
//$mail->addCC('<EMAIL>');
//$mail->addBCC('<EMAIL>');
//$mail->addAttachment('/var/tmp/file.tar.gz'); // Add attachments
//$mail->addAttachment('/tmp/image.jpg', 'new.jpg'); // Optional name
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Here is the subject';
$mail->Body = 'This is the HTML message body <b>in bold!</b>';
$mail->AltBody = 'This is the body in plain text for non-HTML mail clients';
$mail->AddAttachment('dhaval.pdf','helo.pdf');
if(!$mail->send()) {
echo 'Message could not be sent.';
echo 'Mailer Error: ' . $mail->ErrorInfo;
} else {
echo 'Message has been sent';
}
?>
</body>
</html><file_sep><?php
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1.0, user-scalable=no"/>
<title>Results::Explora Academy of Design</title>
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
<link href="css/materialize.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link href="css/style.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link rel="icon" href="media/logo.png">
<style>
body{
background: url(media/cardbg.png);
background-repeat: repeat;
}
</style>
</head>
<body>
<ul id='dropdown1' class='dropdown-content'>
<li><a href="history.html">History</a></li>
<li><a href="key_people.html">Key People</a></li>
<li><a href="#our_course" class="modal-trigger">Course</a></li>
<li><a href="methodology.html">Teaching Methodology</a></li>
</ul>
<ul id='dropdown2' class='dropdown-content'>
<li><a href="history.html">History</a></li>
<li><a href="key_people.html">Key People</a></li>
<li><a href="#our_course" class="modal-trigger">Course</a></li>
<li><a href="methodology.html">Teaching Methodology</a></li>
</ul>
<nav role="navigation">
<div class="nav-wrapper container">
<a id="logo-container" href="index.html" class="brand-logo">
<img src="media/logo.png" class="responsive-img logo1"></img>
<!---->
<span class="headername">Explora Academy of Design</span>
</a>
<ul class="right hide-on-med-and-down">
<li><a class="dropdown-button" href="#!" data-activates="dropdown1">About Us<i class="material-icons suffix" style="float: right;">keyboard_arrow_down</i></a></li>
<li><a href="results.php">Results</a></li>
<li><a href="gallery.html">Gallery</a></li>
<li><a href="#">Contact Us</a></li>
<li><a href="tel:9924376644"><i class="material-icons prefix" style="float: left;">phone</i><span style="float: left">9924376644</span></a></li>
</ul>
<ul id="nav-mobile" class="side-nav">
<li><a class="dropdown-button" href="#!" data-activates="dropdown2">About Us<i class="material-icons suffix" style="float: right;vertical-align:bottom; padding-top: 17px;">keyboard_arrow_right</i></a></li>
<li><a href="results.php">Results</a></li>
<li><a href="#">Gallery</a></li>
<li><a href="#">Contact Us</a></li>
<li><a href="tel:9924376644"><i class="material-icons prefix" style="float: left;margin-top:17px;">phone</i><span style="float: left;vertical-align:top">9924376644</span></a></li>
</ul>
<a href="#" data-activates="nav-mobile" class="button-collapse"><i class="material-icons">menu</i></a>
</div>
</nav>
<div class="slider">
<ul class="slides">
<li>
<img src="media/ResultImages/Banner/1.jpg">
</li>
<li>
<img src="media/ResultImages/Banner/2.jpg">
</li>
<li>
<img src="media/ResultImages/Banner/3.jpg">
</li>
<li>
<img src="media/ResultImages/Banner/4.jpg">
</li>
</ul>
</div>
<div id="our_course" class="modal">
<div class="modal-content modal-course">
<div class="card blue-grey darken-1">
<div class="card-titl"><h4>Our Course</h4></div>
<div class="card-content thiscard white-text">
<h5><strong>NATA </strong></h5>
<p>The National Aptitude Test in Architecture (<strong>NATA</strong>), is an examination conducted for admission to undergraduate Architecture courses at any recognized institute in India. It judges the basic designing aptitude of the applicants. It is mandatory for admission to a recognized undergraduate architecture course anywhere in India.</p>
<br/>
<p><b>How to apply for NATA 2015:</b><br>
Candidates need to apply NATA 2015 online application form through <a href="http://nata.in">www.nata.in</a>. Candidate must fill form in appropriate manner with all mandatory details.</p>
<br/>
<p><b>NATA Exam 2015 Eligibility:</b><br>
Candidates should have qualified 10+2 or equivalent from a recognized Board/University with Mathematics as a subject of examination with at least 50% aggregate marks <b><u>OR</u></b> Candidates should have qualified 10+3 Diploma (any stream) recognized by Central/ State Governments with 50% aggregate marks</p>
<br/>
<p><b>NATA 2015 Question paper consists of two parts:</b><br>
Drawing Test – 100 marks <br>
Duration of test – 2 hours <br><br>
Aesthetic Sensitivity Test – 100 marks<br>
Duration of test – 1 hour</p>
<br/>
<p><b>NATA 2015 Syllabus, Test Structure:</b><br>
The exam will be of 3 hrs. One has to clear the NATA eligibility criteria before applying. The test is divided into 2 sections. As per the Council of Architecture, a candidate applying to any school/ college of Architecture in India should have achieved 40% score in NATA, i.e. at least 80/200, for consideration of admission into first year B.Arch course.</p>
<br/>
<h5>Test 1</h5>
<h6>The drawing aptitude of the candidate shall be judged on the following aspects</h6>
<ol>
<li>Ability to sketch</li>
<li>Visualizing and Drawing</li>
<li>Sense of Perspective Drawing</li>
<li>Creating 2D and 3D Composition</li>
<li>Understanding of scale and sense of proportion</li>
<li>Memory drawing from experience</li>
</ol>
<br/>
<h5>Test 2</h5>
<h6>Aesthetic sensitivity is to evaluate candidates perception, imagination, observation, creativity and communication and architectural awareness.</h6><br>
<h6>The test shall be of objective type and comprise of topics related to:</h6>
<ol>
<li>Visualizing 3D objects from 2D drawings</li>
<li>Visualizing different sides of 3D objects</li>
<li>Analytical Reasoning</li>
<li>Mental Ability</li>
<li>Imaginative Comprehension and Expression</li>
<li>Architectural Awareness</li>
</ol>
</div>
</div>
</div>
<div class="modal-footer">
<button class="btn waves-effect waves-light submeet coursemodal blue-grey " >Okay <i class="material-icons">done_all</i></button>
</div>
</div>
<!-- Results container over here -->
<!-- main results-->
<div class="container">
<div class="row">
<div class="col s12 m12 l12">
<div class="card white">
<div class="card-content">
<div class="row center">
<h2><u>Result</u></h2>
<p>Maintained 100% success in all its academic years since its inception in 2009. All past students from explora successfully secured admissions in the top schools of architecture in the state.</p>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="container">
<div class="row">
<?php
$connection = mysqli_connect("localhost","root","","explora") or die("Error " . mysqli_error($connection));
$rowcounter=0;
$sql = "select * from result order by score desc";
$result = mysqli_query($connection, $sql) or die("Error in Selecting " . mysqli_error($connection));
while($row=mysqli_fetch_array($result))
{?>
<div class="col s6 m3 l2">
<div class="card">
<div class="card-image">
<img src="<?php echo "admin/".$row[4]?>">
<span class="card-title"></span>
</div>
<div class="card-content center">
<p><b><?php echo $row[1]?></b></p>
<p><b>Score:<?php echo $row[2]?></b></p>
<p><b><?php echo $row[3]?></b></p>
</div>
</div>
</div>
<?php
$rowcounter++;
}
?>
</div>
</div>
<!-- national exams over here-->
<!-- main results-->
<div class="container">
<div class="row">
<div class="col s12 m12 l12">
<div class="card white">
<div class="card-content">
<div class="row center">
<h5>Explora’s achievements are not only marked at state level but also at National level (Online Exams)</h5>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- Results container finished here -->
<footer class="page-footer grey">
<div class="container footer">
<div class="row">
<div class="col l6 s12">
<h5 class="white-text">Explora Academy of Design</h5>
<p class="grey-text text-lighten-4">We are a team of college students working on this project like it's our full time job. Any amount would help support and continue development on this project and is greatly appreciated.</p>
</div>
<div class="col l3 s12">
<h5 class="white-text">Quick Links</h5>
<ul>
<li><a class="white-text" href="#!">Link 1</a></li>
<li><a class="white-text" href="#!">Link 2</a></li>
<li><a class="white-text" href="#!">Link 3</a></li>
<li><a class="white-text" href="#!">Link 4</a></li>
</ul>
</div>
<div class="col l3 s12">
<h5 class="white-text">Connect</h5>
<ul>
<li><a class="white-text" href="#!">Link 1</a></li>
<li><a class="white-text" href="#!">Link 2</a></li>
<li><a class="white-text" href="#!">Link 3</a></li>
<li><a class="white-text" href="#!">Link 4</a></li>
</ul>
</div>
</div>
</div>
<div class="footer-copyright">
<div class="container center">
Developed by <a class="white-text text-lighten-3" href="#"><NAME> and <NAME></a>
</div>
</div>
</footer>
<script src="js/jquery.min.js"></script>
<script src="js/materialize.js"></script>
<script src="js/resultinit.js"></script>
</body>
</html><file_sep><?php
$connection = mysqli_connect("localhost","root","","explora") or die("Error " . mysqli_error($connection));
?><file_sep><?php
$idv=$_POST['idv'];
$fees=$_POST['totalfeesm'];
$form=$_POST['formno'];
include 'connection.php';
$sql="UPDATE registration SET f_reg=$fees, flag=1,form_no=$form where s_id=$idv";
$result = mysqli_query($connection, $sql) or die("Error in Selecting " . mysqli_error($connection));
echo '<script type="text/javascript"> alert("Approved Successfully")</script>';
echo "<script type='text/javascript'>window.location='pending.php'</script>";
?><file_sep><?php
session_start();
$otp=$_POST['otp'];
include 'connection.php';
$sql = "select * from new_reg where otp='$otp'";
//echo $sql;
$_SESSION['lotp']=$otp;
$result = mysqli_query($connection, $sql) or die("Error in Selecting " . mysqli_error($connection));
if(mysqli_affected_rows($connection)==0)
{
$_SESSION['otp']='Already used OTP, please try registering again from www.explora.in';
echo '<script type="text/javascript"> alert("Invalid or Already used OTP, please try registering again from www.explora.in")</script>';
echo "<script type='text/javascript'>window.location='otp.php'</script>";
}
while($row=mysqli_fetch_array($result))
{
if($row['flag']==0)
{
$_SESSION['otpl']='1';
echo "<script type='text/javascript'>window.location='newreg.php'</script>";
break;
}
else
{
$_SESSION['otp']='Already used OTP, please try registering again from www.explora.in';
echo '<script type="text/javascript"> alert("Invalid or Already used OTP, please try registering again from www.explora.in")</script>';
echo "<script type='text/javascript'>window.location='otp.php'</script>";
}
}
?> <file_sep><?php
session_start();
if(!isset($_SESSION['unm'])) header('location:index.php');
include 'connection.php';
$rowcounter=0;
$sql = "select * from registration where flag=1 order by first_name desc";
$result = mysqli_query($connection, $sql) or die("Error in Selecting " . mysqli_error($connection));
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1.0, user-scalable=no"/>
<title>Explora Academy of Design</title>
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
<link rel="stylesheet" href="css/common.css">
<link href="../css/materialize.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link href="../css/style.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link href="css/card.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<style type="text/css">
.thiscard{
padding: 30px;
background-color: inherit;
text-align: center;
}
.thiscard a{
margin: 15px;
}
.card a {
color: black;
}
</style>
<script src="../js/modernizr.js"></script>
<link rel="icon" href="../media/logo.png">
</head>
<body>
<nav role="navigation">
<div class="nav-wrapper container">
<a id="logo-container" href="index.php" class="brand-logo">
<img src="../media/logo.png" class="responsive-img logo1"></img>
<span class="headername">Explora Academy of Design</span>
</a>
<ul class="right hide-on-med-and-down">
<li><a href="welcome.php">Home</a></li>
<li><a href="logout.php">Logout</a></li>
</ul>
<ul id="nav-mobile" class="side-nav">
<li><a href="welcome.php">Home</a></li>
<li><a href="logout.php">Logout</a></li>
</ul>
<a href="#" data-activates="nav-mobile" class="button-collapse"><i class="material-icons">menu</i></a>
</div>
</nav>
<div class="container">
<section>
<h3 class="center">Approved Students</h3>
</section>
</div>
<div class="container">
<div class="row">
<?php
while($row=mysqli_fetch_array($result))
{
?>
<div class="col s6 m3 l2">
<div class="card result-card">
<div class="card-image">
<img src="<?php echo "../".$row['img_path'];?>">
<span class="card-title"><?php echo $row['first_name']." ".$row['middle_name'];?></span>
</div>
<div class="card-content">
<p><b>Fees:</b><?php echo $row['f_reg'];?></p>
<p><b>Mob:</b><?php echo $row['mobile_no'];?></p><br/>
<p><center><a onclick="return confirm('Are you sure , you want to delete?')"
href="delete.php?idv=<?php echo $row['s_id'];?>"><i class="material-icons">delete</i></a>
<a href="sendmail.php?idv=<?php echo $row['s_id'];?>"><i class="material-icons">mail</i></a></center></p>
</div>
</div>
</div>
<?php }?>
</div>
</div>
<footer class="page-footer grey">
<div class="container footer">
<div class="row">
<div class="col l6 s12">
<h5 class="white-text">Explora Academy of Design</h5>
<p class="grey-text text-lighten-4">Studio 2, First Floor, <br>Cross Road Complex, <br>Next to Mr.Puff, <br>Near to Domino's Pizza, <br>Subhanpura, Vadodara - 390023</p>
</div>
<div class="col l3 s12">
<h5 class="white-text">Quick Links</h5>
<ul>
<li><a href="#" class="white-text">1</a></li>
<li><a href="#" class="white-text">2</a></li>
<li><a href="#" class="white-text">3</a></li>
<li><a href="#" class="white-text">4</a></li>
<li><a href="#" class="white-text">5</a></li>
</ul>
</div>
<div class="col l3 s12">
<h5 class="white-text">Connect</h5>
<ul>
<li><a class="white-text" href="#!">Link 1</a></li>
<li><a class="white-text" href="#!">Link 2</a></li>
<li><a class="white-text" href="#!">Link 3</a></li>
<li><a class="white-text" href="#!">Link 4</a></li>
</ul>
</div>
</div>
</div>
<div class="footer-copyright">
<div class="container center">
Developed by <a class="white-text text-lighten-3" href="#"><NAME> and <NAME></a>
</div>
</div>
</footer>
<script src="../js/jquery.min.js"></script>
<script src="../js/materialize.js"></script>
<script src="../js/init.js"></script>
<script type="text/javascript">
var x;
$(document).on("click", ".mod-ajax", function () {
var myBookId = $(this).data('id');
$(".modal-body #idv").val( myBookId);
});
</script>
</body>
</html><file_sep><!DOCTYPE html>
<html>
<head>
<title>Send mail</title>
</head>
<body>
<?php
require 'master/PHPMailerAutoload.php';
$name=$_GET['name'];
$email=$_GET['email'];
$study=$_GET['study'];
$mob=$_GET['mob'];
$address=$_GET['address'];
$enqq=$_GET['enqq'];
$mail = new PHPMailer;
//$mail->SMTPDebug = 4;
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'sg2plcpnl0054.prod.sin2.secureserver.net'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
$mail->SMTPSecure = 'ssl'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 465; // TCP port to connect to
// TCP port to connect to
$mail->SMTPOptions = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
'allow_self_signed' => true
)
);
$newUrl="http://www.explora.in/otp.php";
$mail->From = '<EMAIL>';
$mail->FromName = "Registration @ Explora Academy of Design";
$mail->addAddress($email, $name); // Add a recipient
$otp1=str_split($mob,6);
$otp=$otp1[1];
$mail->addReplyTo('<EMAIL>', 'Information');
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Explora Academy of Design Registration';
$mail->Body = 'Thank you for showing your interest in our academy.
To confirm your registration, please click link below and fill out the form.
Once you fill up the form, our representative will contact you with further instructions regarding confirmation.<br/>'.$newUrl.'<br/>Your one time password(OTP) for completing the registration process is <b>'.$otp.'</b>';
if(!$mail->send()) {
echo 'Message could not be sent.';
echo 'Mailer Error: ' . $mail->ErrorInfo;
} else {
$connection = mysqli_connect("localhost","explora_new","random1234rewA","explora_new") or die("Error " . mysqli_error($connection));
$sql = "INSERT INTO new_reg (`otp`,`flag`) VALUES ($otp,0)";
$result = mysqli_query($connection, $sql) or die("Error in Selecting " . mysqli_error($connection));
if($result>0)
{
echo '<script type="text/javascript"> alert("Submitted Successfully")</script>';
echo "<script type='text/javascript'>window.location='newregtext.php'</script>";
$_SESSION['result']=1;
}
else
{
echo "<script>
window.location = 'welcome.php';
</script>";
exit("Error, Try Again after sometime");
}
}
?>
</body>
</html><file_sep><?php
include 'connection.php';
$sql="select * from login";
$query=mysqli_query($connection,$sql) or die("query failed1".mysqli_error());
if(mysqli_num_rows($query)>0)
{
while($row=mysqli_fetch_array($query))
{
if($row['username']==$_POST['username'] && $row['password']==$_POST['password'])
{
session_start();
$_SESSION['unm']=$row['username'];
header('location:welcome.php');
}
else
{
$msg = "Wrong Username or Password ";
session_start();
$_SESSION['warning']=$msg;
header("location:index.php");
}
}
}
?><file_sep><?php
$fname=$_POST['name'];
$score=$_POST['score'];
$college=$_POST['college'];
$target_dir = "Result/";
$target_file = $target_dir . basename($_FILES["file"]["name"]);
$uploadOk = 1;
$imageFileType = pathinfo($target_file,PATHINFO_EXTENSION);
// Check if image file is a actual image or fake image
if(isset($_POST["action"])) {
$check = getimagesize($_FILES["file"]["tmp_name"]);
if($check !== false) {
echo "File is an image - " . $check["mime"] . ".";
$uploadOk = 1;
} else {
echo "File is not an image.";
$uploadOk = 0;
}
}
if ($uploadOk == 0) {
echo "Sorry, your file was not uploaded.";
// if everything is ok, try to upload file
} else {
if (move_uploaded_file($_FILES["file"]["tmp_name"], $target_file)) {
echo "The file ". basename( $_FILES["file"]["name"]). " has been uploaded. \n";
$flag1=1;
include 'connection.php';
$sql = "INSERT INTO result (`name`, `score`, `admission`, `url`) VALUES ('$fname',$score,'$college','$target_file')";
echo $sql;
$result = mysqli_query($connection, $sql) or die("Error in Selecting " . mysqli_error($connection));
if($result>0)
{
echo '<script language="javascript">';
echo 'alert("Result added successfully")';
echo '</script>';
echo "<script>
window.location = 'results.php';
</script>";
$_SESSION['result']=1;
}
else
{
echo "<script>
window.location = 'welcome.php';
</script>";
exit("Error While uploading image on the server");
}
} else {
echo "Sorry, there was an error uploading your file.";
}
}
?>
<file_sep><?php
session_start();
if(!isset($_SESSION['unm'])) header('location:index.php');
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1.0, user-scalable=no"/>
<title>Explora Academy of Design</title>
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
<link rel="stylesheet" href="css/common.css">
<link href="../css/materialize.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link href="../css/style.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<style type="text/css">
.thiscard{
padding: 30px;
background-color: inherit;
text-align: center;
}
.thiscard a{
margin: 15px;
}
</style>
<script src="../js/modernizr.js"></script>
<link rel="icon" href="../media/logo.png">
</head>
<body>
<nav role="navigation">
<div class="nav-wrapper container">
<a id="logo-container" href="index.php" class="brand-logo">
<img src="../media/logo.png" class="responsive-img logo1"></img>
<span class="headername">Explora Academy of Design</span>
</a>
<ul class="right hide-on-med-and-down">
<li><a href="welcome.php">Home</a></li>
<li><a href="logout.php">Logout</a></li>
</ul>
<ul id="nav-mobile" class="side-nav">
<li><a href="welcome.php">Home</a></li>
<li><a href="logout.php">Logout</a></li>
</ul>
<a href="#" data-activates="nav-mobile" class="button-collapse"><i class="material-icons">menu</i></a>
</div>
</nav>
<div id="contact" class="modal">
<div class="modal-header"><h5>Contact Us</h5></div>
<div class="modal-content">
<div class="row">
<div class="col s12 m6">
<div class="card card3 grey ">
<div class="card-content white-text">
<span class="card-title">Address</span><br><br>
Studio 2, First Floor <br>Cross Road Complex <br>Next to Mr.Puff, <br>Near to Domino's Pizza, <br>Subhanpura, Vadodara - 390023
</div>
</div>
</div>
<div class="col s12 m6">
<div class="card card3 grey ">
<div class="card-content white-text">
<span class="card-title">Contact</span>
<div><span><NAME>: </span><a href="tel:9924376644"><u class="white-text"><span>+91 9924376644</span></u></a></div>
<div><span><NAME>: </span><a href="tel:9998130018"><u class="white-text"><span>+91 9998130018</span></u></a></div>
</div>
</div>
<div>
<div>
<a href="http://www.facebook.com/exploraacademy"><span class="fbicon large btn-floating btn-large waves-effect waves-light"></span></a>
<a href="mailto:<EMAIL>" class="btn-floating btn-large waves-effect waves-light grey yo"><i class="material-icons large">email</i></a></div>
</div>
</div>
</div>
</div>
<div class="modal-footer">
<button class="btn coursemodal waves-effect waves-light submeet" type="submit" name="submit">Okay <i class="material-icons">done_all</i></button>
</div>
</div>
<div class="container">
<section>
<h3 class="center">ADMIN PANEL</h3>
<p>
<div class="row">
<div class="col s12 m12 l12">
<div class="card blue-grey darken-1">
<div class="card-content white-text">
<span class="card-title">Applications' Approval Status</span>
<p class="thiscard">
<a href="pending.php" class="waves-effect waves-light red lighten-2 btn-large"><i class="material-icons right">announcement</i>Pending</a>
<a href="approved.php" class="waves-effect waves-light green lighten-2 btn-large"><i class="material-icons right">done_all</i>Approved</a>
<a href="logout.php" class="waves-effect waves-light cyan lighten-2 btn-large"><i class="material-icons right">keyboard_tab</i>Logout</a>
</p>
<p class="thiscard">
<a href="results.php" class="waves-effect waves-light brown lighten-2 btn-large"><i class="material-icons right">assessment</i>Add Result</a>
<a href="gallery.php" class="waves-effect waves-light orange lighten-2 btn-large"><i class="material-icons right">image</i>Gallery</a>
</p>
</div>
</div>
</div>
</div>
</p>
</section>
</div>
<div id="our_course" class="modal">
<div class="modal-content modal-course">
<div class="card card3 blue-grey darken-1">
<div class="card-titl"><h4>Our Course</h4></div>
<div class="card-content thiscard white-text">
<h5><strong>NATA </strong></h5>
<p>The National Aptitude Test in Architecture (<strong>NATA</strong>), is an examination conducted for admission to undergraduate Architecture courses at any recognized institute in India. It judges the basic designing aptitude of the applicants. It is mandatory for admission to a recognized undergraduate architecture course anywhere in India.</p>
<br/>
<p><b>How to apply for NATA 2015:</b><br>
Candidates need to apply NATA 2015 online application form through <a href="http://nata.in">www.nata.in</a>. Candidate must fill form in appropriate manner with all mandatory details.</p>
<br/>
<p><b>NATA Exam 2015 Eligibility:</b><br>
Candidates should have qualified 10+2 or equivalent from a recognized Board/University with Mathematics as a subject of examination with at least 50% aggregate marks <b><u>OR</u></b> Candidates should have qualified 10+3 Diploma (any stream) recognized by Central/ State Governments with 50% aggregate marks</p>
<br/>
<p><b>NATA 2015 Question paper consists of two parts:</b><br>
Drawing Test – 100 marks <br>
Duration of test – 2 hours <br><br>
Aesthetic Sensitivity Test – 100 marks<br>
Duration of test – 1 hour</p>
<br/>
<p><b>NATA 2015 Syllabus, Test Structure:</b><br>
The exam will be of 3 hrs. One has to clear the NATA eligibility criteria before applying. The test is divided into 2 sections. As per the Council of Architecture, a candidate applying to any school/ college of Architecture in India should have achieved 40% score in NATA, i.e. at least 80/200, for consideration of admission into first year B.Arch course.</p>
<br/>
<h5>Test 1</h5>
<h6>The drawing aptitude of the candidate shall be judged on the following aspects</h6>
<ol>
<li>Ability to sketch</li>
<li>Visualizing and Drawing</li>
<li>Sense of Perspective Drawing</li>
<li>Creating 2D and 3D Composition</li>
<li>Understanding of scale and sense of proportion</li>
<li>Memory drawing from experience</li>
</ol>
<br/>
<h5>Test 2</h5>
<h6>Aesthetic sensitivity is to evaluate candidates perception, imagination, observation, creativity and communication and architectural awareness.</h6><br>
<h6>The test shall be of objective type and comprise of topics related to:</h6>
<ol>
<li>Visualizing 3D objects from 2D drawings</li>
<li>Visualizing different sides of 3D objects</li>
<li>Analytical Reasoning</li>
<li>Mental Ability</li>
<li>Imaginative Comprehension and Expression</li>
<li>Architectural Awareness</li>
</ol>
</div>
</div>
</div>
<div class="modal-footer">
<button class="btn waves-effect waves-light submeet coursemodal blue-grey " >Okay <i class="material-icons">done_all</i></button>
</div>
</div>
<footer class="page-footer grey">
<div class="container footer">
<div class="row">
<div class="col l6 s12">
<h5 class="white-text">Explora Academy of Design</h5>
<p class="grey-text text-lighten-4">Studio 2, First Floor, <br>Cross Road Complex, <br>Next to Mr.Puff, <br>Near to Domino's Pizza, <br>Subhanpura, Vadodara - 390023</p>
</div>
<div class="col l3 s12">
<h5 class="white-text">Quick Links</h5>
<ul>
<li><a class="white-text" href="#!">Link 1</a></li>
<li><a class="white-text" href="#!">Link 2</a></li>
<li><a class="white-text" href="#!">Link 3</a></li>
<li><a class="white-text" href="#!">Link 4</a></li>
</ul>
</div>
<div class="col l3 s12">
<h5 class="white-text">Connect</h5>
<ul>
<li><a class="white-text" href="#!">Link 1</a></li>
<li><a class="white-text" href="#!">Link 2</a></li>
<li><a class="white-text" href="#!">Link 3</a></li>
<li><a class="white-text" href="#!">Link 4</a></li>
</ul>
</div>
</div>
</div>
<div class="footer-copyright">
<div class="container center">
Developed by <a class="white-text text-lighten-3" href="#"><NAME> and <NAME></a>
</div>
</div>
</footer>
<script src="../js/jquery.min.js"></script>
<script src="../js/materialize.js"></script>
<script src="../js/init.js"></script>
</body>
</html><file_sep><!DOCTYPE html>
<html>
<head>
<title>Send mail</title>
</head>
<body>
<?php
require 'master/PHPMailerAutoload.php';
$name=$_GET['name'];
$email=$_GET['email'];
$study=$_GET['study'];
$mob=$_GET['mob'];
$address=$_GET['address'];
$enqq=$_GET['enqq'];
$mail = new PHPMailer;
//$mail->SMTPDebug = 4;
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'sg2plcpnl0054.prod.sin2.secureserver.net'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
$mail->SMTPSecure = 'ssl'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 465; // TCP port to connect to
$mail->SMTPOptions = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
'allow_self_signed' => true
)
);
$mail->From = '<EMAIL>';
$mail->FromName = $name;
$mail->addAddress('<EMAIL>', 'Enquiry'); // Add a recipient
$mail->addReplyTo($email, $name);
$mail->Subject = 'Explora enquiry from '.$name;
$mail->Body = $enqq;
if(!$mail->send()) {
//$mail->SMTPDebug=2;
echo 'Message could not be sent.';
// echo 'Mailer Error: ' . $mail->ErrorInfo;
} else {
echo '<script type="text/javascript"> alert("Enquiry Submitted Successfully")</script>';
echo "<script type='text/javascript'>window.location='newenqtext.php'</script>";
}
?>
</body>
</html><file_sep><?php
$check=$_POST['group1'];
$name=$_POST['fname'];
$email=$_POST['email'];
$study=$_POST['study'];
$mob=$_POST['mob'];
$address=$_POST['address'];
$enqq=$_POST['enqq'];
if(strcmp($check,"enquiry")==0)
{
$query=[
'email'=>$email,
'name'=>$name,
'address'=>$address,
'study'=>$study,
'mob'=>$mob,
'enqq'=>$enqq];
header('Location: http://www.explora.in/enquiryMailer.php?'.http_build_query($query));
}
if(strcmp($check,"newreg")==0)
{
$query=[
'email'=>$email,
'name'=>$name,
'address'=>$address,
'study'=>$study,
'mob'=>$mob,
'enqq'=>$enqq];
header('Location: http://www.explora.in/newregmail.php?'.http_build_query($query));
//header('Location: http://localhost:8181/Explora.in/newregmail.php?'.http_build_query($query));
}
?><file_sep><?php
include 'connection.php';$id=$_GET['idv'];
$sql="DELETE FROM registration where s_id='$id'";
$result2=mysqli_query($connection,$sql);
if($result2>0)
{
echo "<script type='text/javascript'>alert('Record Deleted Successfully');</script>";
echo "<script type='text/javascript'> window.location='pending.php'</script>";
}
else
{
echo "<script type='text/javascript'>alert('Failed to delete record, try again!');</script>";
echo "<script type='text/javascript'> window.location='pending.php'</script>";
}
?><file_sep><?php
session_start();
$host="localhost";
$username="root";
$password="";
$db_name="demo";
$con= new mysqli("$host","$username","$password","$db_name") or die("cannot connect");
$sql = "USE demo";
$result = mysqli_query($con,$sql);
?><file_sep><?php session_start();?>
<!DOCTYPE html>
<html>
<head>
<title>OTP::Explora Academy of Design</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1.0, user-scalable=no"/>
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
<link href="css/materialize.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link href="css/style.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<style>
input[type=text]:focus:not([readonly]) + label, input[type=password]:focus:not([readonly]) + label, input[type=email]:focus:not([readonly]) + label, input[type=url]:focus:not([readonly]) + label, input[type=time]:focus:not([readonly]) + label, input[type=date]:focus:not([readonly]) + label, input[type=datetime-local]:focus:not([readonly]) + label, input[type=tel]:focus:not([readonly]) + label, input[type=number]:focus:not([readonly]) + label, input[type=search]:focus:not([readonly]) + label, textarea.materialize-textarea:focus:not([readonly]) + label {
color: #fff;
}
label{
color:#fff !important;
}
.input-field .prefix.active {
color: #fff; }
input[type=text], input[type=password], input[type=email], input[type=url], input[type=time], input[type=date], input[type=datetime-local], input[type=tel], input[type=number], input[type=search], textarea.materialize-textarea {
border-bottom-color:#fff !important }
</style>
<link rel="icon" href="../media/logo.png">
<style type="text/css">
.main-thing{
width: 500px;
height: 420px;
margin: 50px auto 0px auto;
padding: 40px;
color: white;
background-color: teal;
-moz-border-radius: 25px;
-webkit-border-radius: 25px;
-moz-box-shadow: 25px 25px 25px #888888;
-webkit-box-shadow: 25px 25px 25px #888888;
box-shadow: 25px 25px 25px #888888;
border-top-left-radius: 0px;
margin-top: 10%;
}
.image-logo{
width:400px;
height: 80px;
padding-bottom: 20px;
margin-right: auto;
margin-left: auto;
}
.submeet{
vertical-align: top;
width: 170px;
}
.submeet i{
padding-left: 20px;
}
.active{
color: white !important;
}
.wrong{
color: red;
padding:10px;
text-align: center;
}
@media only screen and (max-width : 600px) {
.main-thing{
width: 90%;
height: 78%;
padding: 15px 15px 25px 10px;
}
.image-logo{
width:95%
}
}
</style>
</head>
<body>
<div class="row main-thing">
<h4 class="center">One Time Login Form</h4>
<form name="login_form" id="login_form" method="POST" class="col s12" action="checkotp.php">
<div class="row">
<?php
if(isset($_SESSION['otp']))
{
?>
<div class="col s12 wrong"> <?php echo $_SESSION['otp'];?></div>
<?php
}
unset($_SESSION['otp']);
?>
</div>
<div class="row">
<div class="input-field col s12">
<i class="material-icons prefix">account_box</i>
<input id="icon_prefix" type="number" class="validate" autocomplete="off" name="otp" required>
<label for="icon_prefix">OTP</label>
</div>
</div>
<button class="btn waves-effect waves-light submeet right white teal-text" type="submit" name="action">Submit
<i class="material-icons">send</i>
</button>
</form>
</div>
</body>
<script src="js/jquery.min.js"></script>
<script src="js/materialize.js"></script>
</body>
</html><file_sep><!DOCTYPE html>
<html>
<head>
<title>Send Email</title>
<link href="css/materialize.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link rel="shortcut icon" href="../exploralocal/images/favicon.ico">
</head>
<?php
session_start();
require '../master/PHPMailerAutoload.php';
$a= $_GET ;
$mail = new PHPMailer;
$mail->SMTPDebug = 4;
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'sg2plcpnl0054.prod.sin2.secureserver.net'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
$mail->SMTPSecure = 'ssl'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 465; // TCP port to connect to
$mail->SMTPOptions = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
'allow_self_signed' => true
)
);
$mail->From = '<EMAIL>';
$mail->FromName = "Registration @ Explora Academy of Design";
$mail->addAddress($_SESSION['email'], $_SESSION['name']); // Add a recipient
$mail->addAddress('<EMAIL>', 'Receipt'); // Add a recipient
$mail->addReplyTo('<EMAIL>', 'Information for receipt');
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Receipt for '.$_SESSION['name'];
$mail->Body = 'Hello <b>'.$_SESSION["name"].'</b>,<br/>
This mail is in reference to your current admission at <b>Explora Academy of Design<b/>.<br/>
Below attached is the receipt of payment made during registration. Please verify it and revert back if any changes or query arises.';
$mail->AddAttachment("Receipts/".$a['name'].".pdf",$a['name'].".pdf");
if(!$mail->send()) {
$mail->SMTPDebug = 4;
echo 'Message could not be sent.';
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
else {
unset($_SESSION['email']);
unset($_SESSION['name']);
echo '<script language="javascript">';
echo 'alert("Mail has been sent.")';
echo '</script>';
echo "<script type='text/javascript'> window.location='approved.php'</script>";
}
?>
</html><file_sep><?php
session_start();
require('view/fpdf.php');
// Create me a new pdf object:
include('connection.php');
$id=$_GET['idv'];
$sql1='SELECT * FROM `registration` WHERE `s_id`='.$id.'';
$result1=$connection->query($sql1);
date_default_timezone_set("Asia/Kolkata");
$signm='<NAME>';
$signa='<NAME>';
$signr='<NAME>';
while($row=mysqli_fetch_array($result1))
{
$form=$row['form_no'];
$course=$row['course'];
$firstname=$row['first_name'];
$surname=$row['middle_name'];
$middlename=$row['surname'];
$address=$row['address'];
$mobile=$row['mobile_no'];
$f_reg=$row['f_reg'];
$f_reg_date=$row['f_reg_date'];
$f_april=$row['f_april'];
$f_april_date=$row['f_april_date'];
$f_may=$row['f_may'];
$f_may_date=$row['f_may_date'];
$dob=$row['dob'];
$dob=date('d-m-Y',strtotime($dob));
$pimg="../".$row['img_path'];
$email=$row['email'];
$_SESSION['email']=$email;
$_SESSION['name']=$firstname;
$pdf = new FPDF();
$image12="done2.png";
// Add a page to that object
$pdf->AddPage();
$pdf->setleftmargin(10);
$pdf->setX(10);
$pdf->setY(10);
// Add some text
$pdf->SetFont('Arial','B',10);
// width, height, text, no border, next line - below & left margin, alignement
// shree rang in centre
$pdf->Cell(200,10,'|| Shree Rang ||',0,1,"C");
//create rectangle and registration form
$pdf->SetFillColor(0,0,0);
$pdf->Rect(0, 20, 210, 10, 'F');
$pdf->SetTextColor(255,255,255);
$pdf->SetFont('Arial','B',15);
$pdf->Cell(200,10,'REGISTRATION FORM - 2016',0,1,"C");
// space below registration form
$pdf->Cell(210,5,'',0,1,"C");
//set font and height
$pdf->SetFont('Arial','B',12);
$pdf->SetTextColor(0,0,0);
//space before form no.
$pdf->Cell(10,5,'',0,0,"C");
$pdf->Cell(5,5,'FORM NO.',0,0,"C");
//space between rect and form no tag
$pdf->Cell(10,5,'',0,0,"C");
// create box for form
$pdf->Cell(30,5,$form,1,0,"C");
//space after box
$pdf->Cell(100,5,'',0,0,"C");
//display date
$pdf->SetFont('Arial','B',12);
$pdf->Cell(20,5,date("d-m-Y"),0,1,"C");
//add new row
$pdf->Cell(210,5,'',0,1,"C");
//Explora academy title
$pdf->SetFont('Helvetica','',25);
$pdf->Cell(200,15,'Explora Academy of Design',0,1,"C");
//add new row
$pdf->Cell(210,5,'',0,1,"C");
$pdf->SetFont('Arial','',12);
$pdf->SetTextColor(0,0,0);
//nata and all 3 courses
$pdf->Cell(80,5,'',0,0,"C");
if(strtoupper($course)=="NATA")
{
$pdf->Cell( 10,5, $pdf->Image($image12, $pdf->GetX(), $pdf->GetY(),10,5), 1, 0, 'L', false );
$pdf->Cell(15,5,'NATA',0,1,"C");
}else
{
$pdf->Cell(10,5,'',1,0,"C");
$pdf->Cell(15,5,'NATA',0,1,"C");
}
$pdf->Cell(1,1,'',0,1,"C");
$pdf->Cell(210,4,'',0,1,"C");
$pdf->SetFont('Arial','',12);
$pdf->SetTextColor(0,0,0);
$pdf->Cell(80,0,'',0,0,"C");
if(strtoupper($course)=="INTERIOR DESIGN")
{
$pdf->Cell( 10,5, $pdf->Image($image12, $pdf->GetX(), $pdf->GetY(),10,5), 1, 0, 'L', false );
$pdf->Cell(41,5,'INTERIOR DESIGN',0,1,"C");
}else
{
$pdf->Cell(10,5,'',1,0,"C");
$pdf->Cell(41,5,'INTERIOR DESIGN',0,1,"C");
}
$pdf->setFont('Arial','',12);
$pdf->Cell(20,15,'Surname',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,15,$surname,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,90, 114,0.5, 'F');
$pdf->setFont('Arial','',12);
$pdf->Cell(20,5,'Firstname',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,5,$firstname,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,100, 114,0.5, 'F');
// Explora icon
$pdf->setXY(160,61);
$image1 ="logo.png";
$pdf->Cell( 40, 40, $pdf->Image($image1, $pdf->GetX(), $pdf->GetY(), 33.78), 0, 1, 'L', false );
$pdf->setFont('Arial','',12);
$pdf->Cell(20,12,'Middlename',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,12,$middlename,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,109, 114,0.5, 'F');
//add new row
$pdf->Cell(210,10,'',0,20,"C");
$pdf->setFont('Arial','',12);
$pdf->Cell(20,-13,'Birthdate',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,-13,$dob,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,119, 114,0.5, 'F');
//add new row
$pdf->Cell(210,10,'',0,1,"C");
$pdf->setFont('Arial','',12);
$pdf->Cell(20,12,'E-mail',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,12,$email,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,129, 114,0.5, 'F');
//first address
$pdf->Cell(210,3,'',0,1,"C");
$pdf->setFont('Arial','',12);
$pdf->Cell(200,15,'Postal Address for Communication',0,1,"L");
if(strlen($address)>87)
{
$addr1=str_split($address,87);
$addr=$addr1[0];
$addr2=$addr1[1];
$pdf->setFont('Arial','',10);
$pdf->Cell(0.01,0,'',0,0,"L");
$pdf->Cell(190,0,strtoupper($addr),0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(10.8,152, 190,0.5, 'F');
//second address
$pdf->Cell(210,9,'',0,1,"C");
$pdf->setFont('Arial','',10);
$pdf->Cell(0.01,0,'',0,0,"L");
$pdf->Cell(100,0,strtoupper($addr2),0,1,"L");
}
else
{
$pdf->setFont('Arial','',10);
$pdf->Cell(0.01,0,'',0,0,"L");
$pdf->Cell(190,0,strtoupper($address),0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(10.8,152, 190,0.5, 'F');
//second address
$pdf->Cell(210,9,'',0,1,"C");
$pdf->setFont('Arial','',10);
$pdf->Cell(0.01,0,'',0,0,"L");
$pdf->Cell(100,0,"",0,1,"L");
}
$pdf->Cell(115,5,'',0,0,"C");
$pdf->Cell(100,4,'M -',0,1,"L");
//second address underline
$pdf->SetFillColor(150,150,150);
$pdf->Rect(10.8,161, 110,0.5, 'F');
//mobile number underline
$pdf->SetFillColor(150,150,150);
// $pdf->Rect(133,160, 50,5, 'F');
$pdf->Cell(125,0,'',0,0,"C");
$mob0=str_split($mobile,1);
for($i=0;$i<strlen($mobile);$i++)
{
$mob1=$mob0[$i];
$pdf->Cell(5,-5,$mob1,1,0,"C");
}
//sign of the candidate
$pdf->Cell(200,3,'',0,1,"C");
$pdf->Cell(0,6,'Sign of the candidate',0,1,"L");
//use this as a horizontal line
$pdf->SetFillColor(150,150,150);
$pdf->Rect(148,170,25,0.5, 'F');
//box for the signature
$pdf->Cell(1,0,'',0,0,"C");
$pdf->Cell(45,10,'',1,1,"C");
//box for the photos
$pdf->setXY(160,102);
//$pdf->Cell( 30, 35, , 0, 1, 'L', false );
$pdf->Cell(1,0,'',0,0,"C");
$pdf->Cell(30,35,$pdf->Image($pimg, $pdf->GetX(), $pdf->GetY(), 30,35),1,1,"C");
//branch head
$pdf->setXY(145,162);
$pdf->Cell(10,10,' Branch Head (Vadodara)',0,1,"L");
$pdf->Cell(0,5,'<NAME> : 09924376644',0,1,"R");
$pdf->Cell(0,5,'<NAME> : 09998130018',0,1,"R");
//use this as a horizontal line below signature box
$pdf->SetFillColor(150,150,150);
$pdf->Rect(11,188,190,0.5, 'F');
//contact
$pdf->Cell(0,10,'',0,1,"L");
$pdf->Cell(0,5,'Website : www.explora.in',0,1,"L");
$pdf->Cell(0,7,'Contact us: <EMAIL>',0,1,"L");
//address
$pdf->setXY(117,188 );
$pdf->setFont('Arial','B',10);
$pdf->Cell(0,5,'Address : ',0,1,"L");
$pdf->setFont('Arial','',10);
$pdf->setXY(117,193 );
$pdf->Cell(0,5,'Studio-2, First Floor, Cross Road Complex.',0,1,"L");
$pdf->setXY(117,198);
$pdf->Cell(0,5,'Next to Mr. Puff, Near to Domino\'s Pizza',0,1,"L");
$pdf->setXY(117,203);
$pdf->Cell(0,5,'Subhanpura, Vadodara-390023',0,1,"L");
$pdf->Cell(0,5,'',0,1,"L");
//use this as a horizontal line below signature box
$pdf->SetFillColor(10,10,10);
$pdf->Rect(11,208,190,1, 'F');
//for office use only
$pdf->SetFont('Arial','B',12);
$pdf->Cell(0,10,'For Office Use Only',0,1,"C");
$pdf->SetFont('Arial','B',12);
$pdf->Cell(0,8,'Fees Details',0,1,"L");
$pdf->SetFont('Arial','',10);
$pdf->Cell(37,0,'',0,0,"L");
$pdf->Cell(0,5,'Cash / Cheque',0,0,"L");
$pdf->Cell(-85,0,'',0,0,"L");
$pdf->Cell(47,5,'Date',0,0,"L");
$pdf->Cell(0,5,'Authorized Signature',0,1,"L");
$pdf->SetFont('Arial','',12);
$pdf->Cell(0,7,'Registration',0,0,"L");
$signr='Manoj_sign.jpg';
if($f_reg==0)
{
$f_reg="";
$signr="";
$f_reg_date="";
}
// create box for form
$pdf->Cell(0,0,'',0,1,"L");
$pdf->Cell(30,5,'',0,0,"L");
$pdf->Cell(42,8 ,$f_reg,1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,date("d-m-Y"),1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
//$pdf->Cell(42,8,$signr,1,1,"C");
$pdf->Cell(42,8, $pdf->Image($signr, $pdf->GetX(), $pdf->GetY(),42,8), 1, 1, 'C', false );
$signa="Manoj_sign.jpg";
$pdf->SetFont('Arial','',12);
$pdf->Cell(0,18,'April',0,0,"L");
if($f_april==0)
{
$f_april="";
$signa="sign.jpg";
$f_april_date="";
}
$pdf->Cell(0,5,'',0,1,"L");
$pdf->Cell(30,5,'',0,0,"L");
$pdf->Cell(42,8 ,$f_april,1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,$f_april_date,1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8, $pdf->Image($signa, $pdf->GetX(), $pdf->GetY(),42,8), 1, 1, 'C', false );
$signm="Manoj_sign.jpg";
$pdf->SetFont('Arial','',12);
$pdf->Cell(0,18,'May',0,0,"L");
if($f_may==0)
{
$f_may="";
$signm="sign.jpg";
$f_may_date="";
}
$pdf->Cell(0,5,'',0,1,"C");
$pdf->Cell(30,5,'',0,0,"C");
$pdf->Cell(42,8 ,$f_may,1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,$f_may_date,1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8, $pdf->Image($signm, $pdf->GetX(), $pdf->GetY(),42,8), 1, 1, 'C', false );
$pdf->SetFillColor(10,10,10);
$pdf->Rect(11,278,190,1, 'F');
// $pdf->Output("");
$pdf->Output("Form/".$form.".pdf");
}
//echo '<script> window.location="send_mail.php?name=';
//echo $form;
//echo'"</script>';
?>
<?php
$id=$_GET['idv'];
$sql1='SELECT * FROM `registration` WHERE `s_id`='.$id.'';
$result1=$connection->query($sql1);
date_default_timezone_set("Asia/Kolkata");
while($row=mysqli_fetch_array($result1))
{
$form=$row['form_no'];
$address=$row['address'];
$mobile=$row['mobile_no'];
$f_reg=$row['f_reg'];
$f_april=$row['f_april'];
$f_may=$row['f_may'];
$total_fees=$f_reg;
if($f_may==0)
{
if($f_april==0)
{
$total_fees=$f_reg;
}
else
{
$total_fees=$f_april;
}
}
else
{
$total_fees=$f_may;
}
$pdf = new FPDF('P','mm',array(210,210));
// Add a page to that object
$pdf->AddPage();
$pdf->setleftmargin(10);
$pdf->setX(20);
$pdf->setY(10);
$pdf->SetFillColor(0,0,0);
$pdf->Cell(190,140,'',1,0,"C");
$pdf->setX(20);
$pdf->setY(15);
$pdf->SetFillColor(0,0,0);
$image1='logo.png';
// Add some text
$pdf->SetFont('Arial','',20);
// width, height, text, no border, next line - below & left margin, alignement
// shree rang in centre
$pdf->Cell(175,10,' Explora Academy of Design',0,0,"C");
$pdf->Cell( 15, 10, $pdf->Image($image1, $pdf->GetX(), $pdf->GetY(), 10), 0, 1, 'R', false );
$pdf->SetFillColor(0,0,0);
$pdf->Rect(20,26, 175,1,'F');
$pdf->Cell(10,8,'',0,1,"C");
//set font and height
$pdf->SetFont('Arial','',12);
$pdf->SetTextColor(0,0,0);
//space before form no.
$pdf->Cell(20,10,'',0,0,"C");
$pdf->Cell(20,7,'RECEIPT NUMBER:',0,0,"C");
//space between rect and form no tag
$pdf->Cell(15,10,'',0,0,"C");
// create box for form
$pdf->Cell(30,7,$form,1,0,"C");
//space after box
$pdf->Cell(30,10,'',0,0,"C");
$pdf->Cell(15,7,'DATE:',0,0,"C");
//display date
$pdf->SetFont('Arial','',12);
$pdf->Cell(20,7,date("d-m-Y"),0,1,"L");
$pdf->SetFillColor(155,155,155);
$pdf->Rect(140,39, 25,0.5,'F');
$pdf->SetFillColor(0,0,0);
$pdf->Cell(10,4,'',0,1,"C");
$pdf->Cell(10,10,'',0,0,"C");
$pdf->Cell(52,7,'Received with thanks from ',0,0,"L");
//display date
$pdf->SetFont('Arial','',12);
$pdf->Cell(100,7,$surname." ".$firstname." ".$middlename,0,1,"L");
$pdf->SetFillColor(155,155,155);
$pdf->Rect(73,50, 123,0.5,'F');
$pdf->SetFillColor(0,0,0);
$pdf->Cell(10,4,'',0,1,"C");
$pdf->Cell(29,7,'A sum of ',0,0,"R");
$totalfees0=str_split($total_fees,1);
$length=strlen($total_fees);
for($i=0;$i<$length;$i++)
{
$totalfees1=$totalfees0[$i];
$pdf->Cell(8,6,$totalfees1,1,0,"C");
}
$pdf->Cell(59,7,'rupees by cash/Cheque No.',0,0,"R");
for($i=0;$i<8;$i++)
{
$pdf->Cell(6,6,' ',1,0,"C");
}
$pdf->Cell(10,5,'',0,1,"C");
$pdf->Cell(10,5,'',0,1,"C");
$pdf->Cell(26.5,7,'Address',0,0,"R");
$pdf->SetFont('Arial','',12);
$pdf->Cell(120,7,$address,0,1,"L");
$pdf->SetFillColor(155,155,155);
$pdf->Rect(38,71, 157,0.5,'F');
$pdf->Cell(26.5,7,'',0,0,"R");
$pdf->Cell(120,7,'',0,1,"L");
$pdf->Rect(38,79, 157,0.5,'F');
$pdf->Cell(10,4,'',0,1,"C");
$pdf->SetFillColor(0,0,0);
$pdf->Cell(41,7,'Phone No. (M) :',0,0,"R");
$pdf->SetFont('Arial','',12);
$mob0=str_split($mobile,1);
for($i=0;$i<strlen($mobile);$i++)
{
$mob1=$mob0[$i];
$pdf->Cell(6,6,$mob1,1,0,"C");
}
$pdf->SetFont('Arial','B',12);
$pdf->Cell(10,5,'',0,1,"C");
$pdf->Cell(10,5,'',0,1,"C");
$pdf->Cell(56,7,'Authorized Signature :',0,0,"R");
$pdf->Cell(15,7,'',0,0,"C");
$pdf->SetFont('Arial','',12);
$pdf->Cell(56,7,'Name',0,1,"R");
$image2='Manoj_sign.jpg';
$pdf->Cell(10,2,'',0,1,"C");
$pdf->Cell(55,7,'',0,0,"C");
$pdf->Cell(50,10, $pdf->Image($image2, $pdf->GetX(), $pdf->GetY(), 50,10), 1, 0, 'R', false );
$pdf->Cell(10,7,'',0,0,"C");
$pdf->Cell(50,7,'<NAME>',1,1,"C");
$pdf->SetFont('Arial','',11);
$pdf->Cell(10,4,'',0,1,"C");
$pdf->Cell(8.5,50,'',0,0,"C");
$pdf->Cell(120,5,'Subject to Realisation in case of Cheques.',0,1,"L");
$pdf->SetFont('Arial','',11);
$pdf->Cell(8.5,5,'',0,0,"C");
$pdf->Cell(120,5,'Subject to Vadodara Jurisdiction.',0,1,"L");
$pdf->SetFillColor(155,155,155);
$pdf->Rect(19,124, 175,0.5,'F');
$pdf->SetFillColor(0,0,0);
$pdf->SetFont('Arial','B',12);
$pdf->Cell(10,3,'',0,1,"C");
$pdf->Cell(8.5,5,'',0,0,"C");
$pdf->Cell(40,5,'Address :',0,1,"L");
$pdf->SetFont('Arial','',11);
$pdf->Cell(8.5,5,'',0,0,"C");
$pdf->Cell(40,5,'Studio-2, First Floor, Cross Road Complex.',0,0,"L");
$pdf->Cell(60,5,'',0,0,"C");
$pdf->Cell(40,5,'<NAME> : 09924376644',0,1,"L");
$pdf->Cell(8.5,5,'',0,0,"C");
$pdf->Cell(40,5,'Next to Mr. Puff, Near to Domino'.'s Pizza,',0,0,"L");
$pdf->Cell(60,5,'',0,0,"C");
$pdf->Cell(40,5,'<NAME> : 09998130018',0,1,"L");
$pdf->Cell(8.5,5,'',0,0,"C");
$pdf->Cell(40,5,'Subhanpura, Vadodara-390023.',0,1,"L");
$pdf->SetFont('Arial','',8);
$pdf->Cell(10,4,'',0,1,"C");
$pdf->Cell(8.5,5,'',0,0,"C");
$pdf->Cell(40,5,'NOTE: FEES WILL NOT BE REFUNDABLE AFTER ONE WEEK',0,0,"L");
$pdf->Cell(60,10,'',0,0,"C");
$pdf->Rect(116,151.5, 2,1.5,'F');
$pdf->Cell(40,5,'Registration fees for 2016 NATA test',0,1,"L");
$pdf->Cell(108.5,10,'',0,0,"C");
$pdf->SetFont('Arial','',8);
$pdf->Cell(40,5,'WWW.EXPLORA.IN',0,1,"L");
$pdf->SetFillColor(155,155,155);
$pdf->Rect(5,160, 200,0.5,'F');
//$pdf->Output();
$pdf->Output("Receipts/".$form.".pdf");
}
echo '<script> window.location="send_mail.php?name=';
echo $form;
echo'"</script>';
?>
<file_sep><?php
session_start();
?>
<!DOCTYPE html>
<html>
<head>
<title>Send Email</title>
<link href="css/materialize.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link rel="shortcut icon" href="../exploralocal/images/favicon.ico">
</head>
<?php
require '../master/PHPMailerAutoload.php';
$a= $_GET ;
$mail = new PHPMailer;
//$mail->SMTPDebug = 2;
$email=$_SESSION['email'];
$name=$_SESSION['name'];
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'sg2plcpnl0054.prod.sin2.secureserver.net'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
$mail->SMTPSecure = 'ssl'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 465; // TCP port to connect to
$mail->SMTPOptions = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
'allow_self_signed' => true
)
);
$mail->From = '<EMAIL>';
$mail->FromName = "Registration @ Explora Academy of Design";
$mail->addAddress($_SESSION['email'], $_SESSION['name']); // Add a recipient
$mail->addAddress('<EMAIL>', 'Receipt'); // Add a recipient
$mail->addReplyTo('<EMAIL>', 'Information for receipt');
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Receipt for '.$_SESSION['name'];
$mail->Body = 'Hello <b>'.$_SESSION["name"].'</b>,<br/>
This mail is in reference to your current admission at <b>Explora Academy of Design<b/>.<br/>
Below attached is the receipt of payment made during registration. Please verify it and revert back if any changes or query arises.';
$mail->AddAttachment("Receipts/".$a['name'].".pdf",$a['name'].".pdf");
$mail->AddAttachment("Form/".$a['name'].".pdf","stu-".$a['name'].".pdf");
if(!$mail->send())
{
$mail->SMTPDebug = 2;
echo '<script language="javascript">';
echo 'alert("Message could not be sent.")';
echo '</script>';
echo "<script type='text/javascript'> window.location='approved.php'</script>";
}
else
{
$form=$a['name'];
echo '<script language="javascript">';
echo 'alert("Mail has been sent.")';
echo '</script>';
echo "<script type='text/javascript'> window.location='approved.php'</script>";
//echo "<script type='text/javascript'> window.location='send_mail_s.php?name=$form'</script>";
}
?>
</html><file_sep><!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1.0, user-scalable=no"/>
<title>Key People::Explora Academy of Design</title>
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
<link href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.0/css/materialize.min.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link href="css/style.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link href="css/keypeople.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link rel="icon" href="media/logo.png">
</head>
<body>
<ul id='dropdown1' class='dropdown-content'>
<li><a href="history.html">History</a></li>
<li><a href="#">Key People</a></li>
<li><a href="#our_course" class="modal-trigger">Course</a></li>
<li><a href="methodology.html">Teaching Methodology</a></li>
</ul>
<ul id='dropdown2' class='dropdown-content'>
<li><a href="history.html">History</a></li>
<li><a href="#">Key People</a></li>
<li><a href="#our_course" class="modal-trigger">Course</a></li>
<li><a href="methodology.html">Teaching Methodology</a></li>
</ul>
<nav role="navigation">
<div class="nav-wrapper container">
<a id="logo-container" href="index.html" class="brand-logo">
<img src="media/logo.png" class="responsive-img logo1">
<span class="headername">Explora Academy of Design</span>
</a>
<ul class="right hide-on-med-and-down">
<li><a class="dropdown-button" href="#!" data-activates="dropdown1">About Us<i class="material-icons suffix" style="float: right;">keyboard_arrow_down</i></a></li>
<li><a href="results.php">Result</a></li>
<li><a href="#">Gallery</a></li>
<li><a href="#">Contact Us</a></li>
<li><a href="tel:9924376644"><i class="material-icons prefix" style="float: left;">phone</i><span style="float: left">9924376644</span></a></li>
</ul>
<ul id="nav-mobile" class="side-nav">
<li><a href="index.html">Home</a></li>
<li><a class="dropdown-button" href="#!" data-activates="dropdown2" href="#">About Us<i class="material-icons" style="float: right;vertical-align:bottom; padding-top: 17px;">keyboard_arrow_right</i></a></li>
<li><a href="results.php">Result</a></li>
<li><a href="#">Gallery</a></li>
<li><a href="#">Contact Us</a></li>
<li><a href="tel:9924376644"><i class="material-icons prefix" style="float: left;margin-top:17px;">phone</i><span style="float: left;vertical-align:top">9924376644</span></a></li>
</ul>
<a href="#" data-activates="nav-mobile" class="button-collapse"><i class="material-icons">menu</i></a>
</div>
</nav>
<div id="our_course" class="modal">
<div class="modal-content modal-course">
<div class="card blue-grey darken-1">
<div class="card-titl"><h4>Our Course</h4></div>
<div class="card-content notthisone thiscard white-text">
<h5><strong>NATA </strong></h5>
<p>The National Aptitude Test in Architecture (<strong>NATA</strong>), is an examination conducted for admission to undergraduate Architecture courses at any recognized institute in India. It judges the basic designing aptitude of the applicants. It is mandatory for admission to a recognized undergraduate architecture course anywhere in India.</p>
<br/>
<p><b>How to apply for NATA 2015:</b><br>
Candidates need to apply NATA 2015 online application form through <a href="http://nata.in">www.nata.in</a>. Candidate must fill form in appropriate manner with all mandatory details.</p>
<br/>
<p><b>NATA Exam 2015 Eligibility:</b><br>
Candidates should have qualified 10+2 or equivalent from a recognized Board/University with Mathematics as a subject of examination with at least 50% aggregate marks <b><u>OR</u></b> Candidates should have qualified 10+3 Diploma (any stream) recognized by Central/ State Governments with 50% aggregate marks</p>
<br/>
<p><b>NATA 2015 Question paper consists of two parts:</b><br>
Drawing Test – 100 marks <br>
Duration of test – 2 hours <br><br>
Aesthetic Sensitivity Test – 100 marks<br>
Duration of test – 1 hour</p>
<br/>
<p><b>NATA 2015 Syllabus, Test Structure:</b><br>
The exam will be of 3 hrs. One has to clear the NATA eligibility criteria before applying. The test is divided into 2 sections. As per the Council of Architecture, a candidate applying to any school/ college of Architecture in India should have achieved 40% score in NATA, i.e. at least 80/200, for consideration of admission into first year B.Arch course.</p>
<br/>
<h5>Test 1</h5>
<h6>The drawing aptitude of the candidate shall be judged on the following aspects</h6>
<ol>
<li>Ability to sketch</li>
<li>Visualizing and Drawing</li>
<li>Sense of Perspective Drawing</li>
<li>Creating 2D and 3D Composition</li>
<li>Understanding of scale and sense of proportion</li>
<li>Memory drawing from experience</li>
</ol>
<br/>
<h5>Test 2</h5>
<h6>Aesthetic sensitivity is to evaluate candidates perception, imagination, observation, creativity and communication and architectural awareness.</h6><br>
<h6>The test shall be of objective type and comprise of topics related to:</h6>
<ol>
<li>Visualizing 3D objects from 2D drawings</li>
<li>Visualizing different sides of 3D objects</li>
<li>Analytical Reasoning</li>
<li>Mental Ability</li>
<li>Imaginative Comprehension and Expression</li>
<li>Architectural Awareness</li>
</ol>
</div>
</div>
</div>
<div class="modal-footer">
<button class="btn waves-effect waves-light submeet coursemodal blue-grey " >Okay <i class="material-icons">done_all</i></button>
</div>
</div>
<div class="container">
<div class="row">
<h2 class="col s12 grey-text center">Key People</h2>
</div>
</div>
<div class="container">
<div class="row rowcontainer">
<div class="maintwocard">
<div class="col s12 m12 l6 ">
<div class="card-panel grey lighten-5 z-depth-1">
<div class="row valign-wrapper">
<div class="col s4 ">
<img src="media/KeyPeople/1.jpg" alt="" class="circle responsive-img keypeople"
data-caption="<NAME>">
<!-- notice the "circle" class -->
</div>
<div class="col s8">
<span class="black-text">
<span class="founder"><b><NAME> (Founder)</b></span><hr>
B.Arch from APIED<hr>
M.Tech
(climate change and sustainable development),
Cept University, Ahmedabad.<hr>
Currently involved in academic research & professional field.
</span>
</div>
</div>
</div>
</div>
<div class="col s12 m12 l6 ">
<div class="card-panel grey lighten-5 z-depth-1">
<div class="row valign-wrapper">
<div class="col s4">
<img src="media/KeyPeople/2.jpg" alt="" class="circle responsive-img keypeople">
<!-- notice the "circle" class -->
</div>
<div class="col s8">
<span class="black-text">
<span class="founder"><b><NAME> (Founder)</b></span><hr>
B.Arch(GoldMedalist) from SVIT, Vasad<hr>
Studying M.Planning: SP University, APIED, Vidhyanagar<hr>
Currently involved in professional field.<br><br>
</ul>
</span>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="container">
<div class="row">
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/3.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME>(Admin)</p>
</div>
</div>
</div>
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/4.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/5.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/6.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div><div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image circle responsive-img">
<img src="media/KeyPeople/7.jpg" >
<span class="card-title"></span>
</div>
<div class="card-content">
<p>ID. <NAME></p>
</div>
</div>
</div><div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/8.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
</div>
<div class="row">
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/9.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/10.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/11.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/12.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div><div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image circle responsive-img">
<img src="media/KeyPeople/13.jpg" >
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div><div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/14.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
</div>
<div class="row">
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/15.jpg">
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/16.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/17.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p>Zeal Shah</p>
</div>
</div>
</div>
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/18.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div><div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image circle responsive-img">
<img src="media/KeyPeople/19.jpg" >
<span class="card-title"></span>
</div>
<div class="card-content">
<p>Alifiya Lanewala</p>
</div>
</div>
</div><div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/20.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
</div>
<div class="row">
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/21.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/22.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/23.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
<div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/24.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div><div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image circle responsive-img">
<img src="media/KeyPeople/25.jpg" >
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div><div class="col s6 m3 l2 ">
<div class="card">
<div class="card-image">
<img src="media/KeyPeople/26.jpg">
<span class="card-title"></span>
</div>
<div class="card-content">
<p><NAME></p>
</div>
</div>
</div>
</div>
</div>
</div>
<footer class="page-footer grey">
<div class="container footer">
<div class="row">
<div class="col l6 s6">
<h5 class="white-text">Explora Academy of Design</h5>
<p class="grey-text text-lighten-4">We are a team of college students working on this project like it's our full time job. Any amount would help support and continue development on this project and is greatly appreciated.</p>
</div>
<div class="col l3 s6">
<h5 class="white-text">Quick Links</h5>
<ul >
<li><a href="key_people.html" class="white-text">Key People</a></li>
<li><a href="#our_course" class="white-text modal-trigger">Course</a></li>
<li><a href="results.php" class="white-text">Results</a></li>
<li><a href="gallery.php" class="white-text">Gallery</a></li>
<li><a href="#contact" class="white-text modal-trigger">Contact Us</a></li>
</ul>
</div>
<div class="col l3 s6">
<h5 class="white-text">Connect</h5>
<ul>
<li><a class="white-text" href="#!">Link 1</a></li>
<li><a class="white-text" href="#!">Link 2</a></li>
<li><a class="white-text" href="#!">Link 3</a></li>
<li><a class="white-text" href="#!">Link 4</a></li>
</ul>
</div>
</div>
</div>
<div class="footer-copyright">
<div class="container center">
Developed by <a class="white-text text-lighten-3" href="#"><NAME> and <NAME></a>
</div>
</div>
</footer>
<!-- Scripts-->
<script src="https://code.jquery.com/jquery-2.1.4.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.0/js/materialize.min.js"></script>
<script src="js/init.js"></script>
</body>
</html>
<file_sep><?php
require('fpdf.php');
// Create me a new pdf object:
include('include_db.php');
if (isset($_GET["idv"]))
{
$idv = $_GET["idv"];
}
$sql1="SELECT * FROM `registration` WHERE s_id=$idv";
$result1=$con->query($sql1);
while($row1=$result1->fetch_assoc())
{
$form=$row1['s_id'];
$lastname=$row1['middle_name'];
$photo=$row1['img_path'];
$date=$row1['f_reg_date'];
$mobile=$row1['mobile_no'];
$cash=20000;
$address=$row1['address'];
$birthdate=$row1['dob'];
$firstname=$row1['first_name'];
$surname=$row1['surname'];
$course=$row1['course'];
$pdf = new FPDF();
$image12="done2.png";
$logo="logo.png";
// Add a page to that object
$pdf->AddPage();
$pdf->setleftmargin(10);
$pdf->setX(10);
$pdf->setY(10);
// Add some text
$pdf->SetFont('Arial','B',10);
$text="hello";
// width, height, text, no border, next line - below & left margin, alignement
// shree rang in centre
$pdf->Cell(200,10,'|| <NAME> ||',0,1,"C");
//create rectangle and registration form
$pdf->SetFillColor(0,0,0);
$pdf->Rect(0, 20, 210, 10, 'F');
$pdf->SetTextColor(255,255,255);
$pdf->SetFont('Arial','B',15);
$pdf->Cell(200,10,'REGISTRATION FORM - 2016',0,1,"C");
// space below registration form
$pdf->Cell(210,5,'',0,1,"C");
//set font and height
$pdf->SetFont('Arial','B',12);
$pdf->SetTextColor(0,0,0);
//space before form no.
$pdf->Cell(10,5,'',0,0,"C");
$pdf->Cell(5,5,'FORM NO.',0,0,"C");
//space between rect and form no tag
$pdf->Cell(10,5,'',0,0,"C");
// create box for form
$pdf->Cell(30,5,$form,1,0,"C");
//space after box
$pdf->Cell(100,5,'',0,0,"C");
//display date
$pdf->SetFont('Arial','B',12);
$pdf->Cell(20,5,$date,0,1,"C");
//add new row
$pdf->Cell(210,5,'',0,1,"C");
//Explora academy title
$pdf->SetFont('Helvetica','',25);
$pdf->Cell(200,15,'Explora Academy of Design',0,1,"C");
//add new row
$pdf->Cell(210,5,'',0,1,"C");
$pdf->SetFont('Arial','',12);
$pdf->SetTextColor(0,0,0);
//nata and all 3 courses
$pdf->Cell(80,5,'',0,0,"C");
if(strtoupper($course)=="NATA")
{
$pdf->Cell( 10,5, $pdf->Image($image12, $pdf->GetX(), $pdf->GetY(),10,5), 1, 0, 'L', false );
$pdf->Cell(15,5,'NATA',0,1,"C");
}else
{
$pdf->Cell(10,5,'',1,0,"C");
$pdf->Cell(15,5,'NATA',0,1,"C");
}
$pdf->Cell(1,1,'',0,1,"C");
$pdf->Cell(210,4,'',0,1,"C");
$pdf->SetFont('Arial','',12);
$pdf->SetTextColor(0,0,0);
$pdf->Cell(80,0,'',0,0,"C");
if(strtoupper($course)=="INTERIOR DESIGN")
{
$pdf->Cell( 10,5, $pdf->Image($image12, $pdf->GetX(), $pdf->GetY(),10,5), 1, 0, 'L', false );
$pdf->Cell(41,5,'INTERIOR DESIGN',0,1,"C");
}else
{
$pdf->Cell(10,5,'',1,0,"C");
$pdf->Cell(41,5,'INTERIOR DESIGN',0,1,"C");
}
$pdf->Cell(210,5,'',0,1,"C");
$pdf->SetFont('Arial','',12);
$pdf->SetTextColor(0,0,0);
$pdf->Cell(80,5,'',0,0,"C");
if(strtoupper($course)=="JEE ( B.ARCH )")
{
$pdf->Cell( 10,5, $pdf->Image($image12, $pdf->GetX(), $pdf->GetY(),10,5), 1, 0, 'L', false );
$pdf->Cell(33,5,'JEE ( B.ARCH )',0,0,"C");
}else
{
$pdf->Cell(10,5,'',1,0,"C");
$pdf->Cell(33,5,'JEE ( B.ARCH )',0,0,"C");
}
// Explora icon
$pdf->setXY(160,61);
$pdf->Cell( 40, 40, $pdf->Image($logo, $pdf->GetX(), $pdf->GetY(), 33.78), 0, 1, 'L', false );
$pdf->setFont('Arial','',12);
$pdf->Cell(20,0,'Surname',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,0,$surname,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,104, 114,0.5, 'F');
//add new row
$pdf->Cell(210,10,'',0,1,"C");
$pdf->setFont('Arial','',12);
$pdf->Cell(20,0,'Firstname',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,0,$firstname,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,114, 114,0.5, 'F');
//add new row
$pdf->Cell(210,10,'',0,1,"C");
$pdf->setFont('Arial','',12);
$pdf->Cell(20,0,'Lastname',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,0,$lastname,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,124, 114,0.5, 'F');
//add new row
$pdf->Cell(210,10,'',0,1,"C");
$pdf->setFont('Arial','',12);
$pdf->Cell(20,0,'Birthdate',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,0,$birthdate,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,134, 114,0.5, 'F');
//first address
$pdf->Cell(210,5,'',0,1,"C");
$pdf->setFont('Arial','',12);
$pdf->Cell(200,12,'Postal Address for Communication',0,1,"L");
if(strlen($address)>90)
{
$addr1=str_split($address,90);
$addr=$addr1[0];
$addr2=$addr1[1];
$pdf->setFont('Arial','',10);
$pdf->Cell(0.01,0,'',0,0,"L");
$pdf->Cell(190,0,strtoupper($addr),0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(10.8,151, 190,0.5, 'F');
//second address
$pdf->Cell(210,9,'',0,1,"C");
$pdf->setFont('Arial','',10);
$pdf->Cell(0.01,0,'',0,0,"L");
$pdf->Cell(100,0,strtoupper($addr2),0,1,"L");
}
else
{
$pdf->setFont('Arial','',10);
$pdf->Cell(0.01,0,'',0,0,"L");
$pdf->Cell(190,0,strtoupper($address),0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(10.8,151, 190,0.5, 'F');
//second address
$pdf->Cell(210,9,'',0,1,"C");
$pdf->setFont('Arial','',10);
$pdf->Cell(0.01,0,'',0,0,"L");
$pdf->Cell(100,0,"",0,1,"L");
}
$pdf->Cell(115,5,'',0,0,"C");
$pdf->Cell(100,4,'M -',0,1,"L");
//second address underline
$pdf->SetFillColor(150,150,150);
$pdf->Rect(10.8,160, 110,0.5, 'F');
//mobile number underline
$pdf->SetFillColor(150,150,150);
// $pdf->Rect(133,160, 50,5, 'F');
$pdf->Cell(125,0,'',0,0,"C");
$mob0=str_split($mobile,1);
for($i=0;$i<strlen($mobile);$i++)
{
$mob1=$mob0[$i];
$pdf->Cell(5,-5,$mob1,1,0,"C");
}
//sign of the candidate
$pdf->Cell(200,5,'',0,1,"C");
$pdf->Cell(0,10,'Sign of the candidate',0,1,"L");
//use this as a horizontal line
$pdf->SetFillColor(150,150,150);
$pdf->Rect(149,170,23,0.5, 'F');
//box for the signature
$pdf->Cell(1,0,'',0,0,"C");
$pdf->Cell(45,10,'',1,1,"C");
//box for the photos
$pdf->setXY(160,102);
$image1 ="a.jpg";
//$pdf->Cell( 30, 35, , 0, 1, 'L', false );
$pdf->Cell(1,0,'',0,0,"C");
$pdf->Cell(30,35,$pdf->Image($image12, $pdf->GetX(), $pdf->GetY(), 30,35),1,1,"C");
//branch head
$pdf->setXY(148,162);
$pdf->Cell(0,10,' Branch Head (Vadodara)',0,1,"L");
$pdf->Cell(0,5,'<NAME> : 09924376644',0,1,"R");
$pdf->Cell(0,5,'<NAME> : 09998130018',0,1,"R");
//use this as a horizontal line below signature box
$pdf->SetFillColor(150,150,150);
$pdf->Rect(11,188,190,0.5, 'F');
//contact
$pdf->Cell(0,10,'',0,1,"L");
$pdf->Cell(0,5,'Website : www.explora.in',0,1,"L");
$pdf->Cell(0,7,'Contact us: <EMAIL>',0,1,"L");
//address
$pdf->setXY(117,188 );
$pdf->setFont('Arial','B',10);
$pdf->Cell(0,5,'Address : ',0,1,"L");
$pdf->setFont('Arial','',10);
$pdf->setXY(117,193 );
$pdf->Cell(0,5,'Studio-2, First Floor, Cross Road Complex.',0,1,"L");
$pdf->setXY(117,198);
$pdf->Cell(0,5,'Next to Mr. Puff, Near to Domino\'s Pizza',0,1,"L");
$pdf->setXY(117,203);
$pdf->Cell(0,5,'Subhanpura, Vadodara-390023',0,1,"L");
$pdf->Cell(0,5,'',0,1,"L");
//use this as a horizontal line below signature box
$pdf->SetFillColor(10,10,10);
$pdf->Rect(11,208,190,1, 'F');
//for office use only
$pdf->SetFont('Arial','B',12);
$pdf->Cell(0,10,'For Office Use Only',0,1,"C");
$pdf->SetFont('Arial','B',12);
$pdf->Cell(0,8,'Fees Details',0,1,"L");
$pdf->SetFont('Arial','',10);
$pdf->Cell(37,0,'',0,0,"L");
$pdf->Cell(0,5,'Cash / Cheque',0,0,"L");
$pdf->Cell(-85,0,'',0,0,"L");
$pdf->Cell(47,5,'Date',0,0,"L");
$pdf->Cell(0,5,'Authorized Signature',0,1,"L");
$pdf->SetFont('Arial','',12);
$pdf->Cell(0,7,'Registration',0,0,"L");
// create box for form
$pdf->Cell(0,0,'',0,1,"L");
$pdf->Cell(30,5,'',0,0,"L");
$pdf->Cell(42,8 ,$cash,1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,$date,1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,"manoj",1,1,"C");
$pdf->SetFont('Arial','',12);
$pdf->Cell(0,18,'April',0,0,"L");
$pdf->Cell(0,5,'',0,1,"L");
$pdf->Cell(30,5,'',0,0,"L");
$pdf->Cell(42,8 ,'20000',1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,'02/05/2015',1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,"manoj",1,1,"C");
$pdf->SetFont('Arial','',12);
$pdf->Cell(0,18,'May',0,0,"L");
$pdf->Cell(0,5,'',0,1,"L");
$pdf->Cell(30,5,'',0,0,"L");
$pdf->Cell(42,8 ,'',1,0,"L");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,'',1,0,"L");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,'',1,1,"L");
$pdf->SetFillColor(10,10,10);
$pdf->Rect(11,278,190,1, 'F');
/*
$pdf->Output($form.".pdf");
header( 'Location: sendmail.php?name='.$form );
*/
$pdf->Output("");
}
?><file_sep> <?php
session_start();
require('view/fpdf.php');
// Create me a new pdf object:
include 'connection.php';
$id=$_GET['idv'];
$sql1='SELECT * FROM `registration` WHERE `s_id`='.$id;
$result1 = mysqli_query($connection, $sql1) or die("Error in Selecting " . mysqli_error($connection));
date_default_timezone_set("Asia/Kolkata");
$signm='<NAME>';
$signa='<NAME>';
$signr='<NAME>';
while($row=mysqli_fetch_array($result1))
{
$form=$row['form_no'];
$course=$row['course'];
$firstname=$row['first_name'];
$lastname=$row['middle_name'];
$surname=$row['surname'];
$address=$row['address'];
$mobile=$row['mobile_no'];
$f_reg=$row['f_reg'];
$f_reg_date=$row['f_reg_date'];
$f_april=$row['f_april'];
$f_april_date=$row['f_april_date'];
$f_may=$row['f_may'];
$f_may_date=$row['f_may_date'];
$dob=$row['dob'];
$pimg="../".$row['img_path'];
$email=$row['email'];
$_SESSION['email']=$email;
$_SESSION['name']=$firstname;
$pdf = new FPDF();
$image12="done2.png";
// Add a page to that object
$pdf->AddPage();
$pdf->setleftmargin(10);
$pdf->setX(10);
$pdf->setY(10);
// Add some text
$pdf->SetFont('Arial','B',10);
$text="hello";
// width, height, text, no border, next line - below & left margin, alignement
// shree rang in centre
$pdf->Cell(200,10,'|| Shree Rang ||',0,1,"C");
//create rectangle and registration form
$pdf->SetFillColor(0,0,0);
$pdf->Rect(0, 20, 210, 10, 'F');
$pdf->SetTextColor(255,255,255);
$pdf->SetFont('Arial','B',15);
$pdf->Cell(200,10,'REGISTRATION FORM - 2016',0,1,"C");
// space below registration form
$pdf->Cell(210,5,'',0,1,"C");
//set font and height
$pdf->SetFont('Arial','B',12);
$pdf->SetTextColor(0,0,0);
//space before form no.
$pdf->Cell(10,5,'',0,0,"C");
$pdf->Cell(5,5,'FORM NO.',0,0,"C");
//space between rect and form no tag
$pdf->Cell(10,5,'',0,0,"C");
// create box for form
$pdf->Cell(30,5,$form,1,0,"C");
//space after box
$pdf->Cell(100,5,'',0,0,"C");
//display date
$pdf->SetFont('Arial','B',12);
$pdf->Cell(20,5,date("Y/m/d"),0,1,"C");
//add new row
$pdf->Cell(210,5,'',0,1,"C");
//Explora academy title
$pdf->SetFont('Helvetica','',25);
$pdf->Cell(200,15,'Explora Academy of Design',0,1,"C");
//add new row
$pdf->Cell(210,5,'',0,1,"C");
$pdf->SetFont('Arial','',12);
$pdf->SetTextColor(0,0,0);
//nata and all 3 courses
$pdf->Cell(80,5,'',0,0,"C");
if(strtoupper($course)=="NATA")
{
$pdf->Cell( 10,5, $pdf->Image($image12, $pdf->GetX(), $pdf->GetY(),10,5), 1, 0, 'L', false );
$pdf->Cell(15,5,'NATA',0,1,"C");
}else
{
$pdf->Cell(10,5,'',1,0,"C");
$pdf->Cell(15,5,'NATA',0,1,"C");
}
$pdf->Cell(1,1,'',0,1,"C");
$pdf->Cell(210,4,'',0,1,"C");
$pdf->SetFont('Arial','',12);
$pdf->SetTextColor(0,0,0);
$pdf->Cell(80,0,'',0,0,"C");
if(strtoupper($course)=="INTERIOR DESIGN")
{
$pdf->Cell( 10,5, $pdf->Image($image12, $pdf->GetX(), $pdf->GetY(),10,5), 1, 0, 'L', false );
$pdf->Cell(41,5,'INTERIOR DESIGN',0,1,"C");
}else
{
$pdf->Cell(10,5,'',1,0,"C");
$pdf->Cell(41,5,'INTERIOR DESIGN',0,1,"C");
}
$pdf->setFont('Arial','',12);
$pdf->Cell(20,15,'Surname',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,15,$surname,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,90, 114,0.5, 'F');
$pdf->setFont('Arial','',12);
$pdf->Cell(20,5,'Firstname',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,5,$firstname,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,100, 114,0.5, 'F');
// Explora icon
$pdf->setXY(160,61);
$image1 ="logo.png";
$pdf->Cell( 40, 40, $pdf->Image($image1, $pdf->GetX(), $pdf->GetY(), 33.78), 0, 1, 'L', false );
$pdf->setFont('Arial','',12);
$pdf->Cell(20,12,'Lastname',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,12,$lastname,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,109, 114,0.5, 'F');
//add new row
$pdf->Cell(210,10,'',0,20,"C");
$pdf->setFont('Arial','',12);
$pdf->Cell(20,-13,'Birthdate',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,-13,$dob,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,119, 114,0.5, 'F');
//add new row
$pdf->Cell(210,10,'',0,1,"C");
$pdf->setFont('Arial','',12);
$pdf->Cell(20,12,'E-mail',0,0,"L");
$pdf->setFont('Arial','',12);
$pdf->Cell(10,0,'',0,0,"C");
$pdf->Cell(40,12,$email,0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(40,129, 114,0.5, 'F');
//first address
$pdf->Cell(210,3,'',0,1,"C");
$pdf->setFont('Arial','',12);
$pdf->Cell(200,15,'Postal Address for Communication',0,1,"L");
if(strlen($address)>87)
{
$addr1=str_split($address,87);
$addr=$addr1[0];
$addr2=$addr1[1];
$pdf->setFont('Arial','',10);
$pdf->Cell(0.01,0,'',0,0,"L");
$pdf->Cell(190,0,strtoupper($addr),0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(10.8,152, 190,0.5, 'F');
//second address
$pdf->Cell(210,9,'',0,1,"C");
$pdf->setFont('Arial','',10);
$pdf->Cell(0.01,0,'',0,0,"L");
$pdf->Cell(100,0,strtoupper($addr2),0,1,"L");
}
else
{
$pdf->setFont('Arial','',10);
$pdf->Cell(0.01,0,'',0,0,"L");
$pdf->Cell(190,0,strtoupper($address),0,1,"L");
$pdf->SetFillColor(150,150,150);
$pdf->Rect(10.8,152, 190,0.5, 'F');
//second address
$pdf->Cell(210,9,'',0,1,"C");
$pdf->setFont('Arial','',10);
$pdf->Cell(0.01,0,'',0,0,"L");
$pdf->Cell(100,0,"",0,1,"L");
}
$pdf->Cell(115,5,'',0,0,"C");
$pdf->Cell(100,4,'M -',0,1,"L");
//second address underline
$pdf->SetFillColor(150,150,150);
$pdf->Rect(10.8,161, 110,0.5, 'F');
//mobile number underline
$pdf->SetFillColor(150,150,150);
// $pdf->Rect(133,160, 50,5, 'F');
$pdf->Cell(125,0,'',0,0,"C");
$mob0=str_split($mobile,1);
for($i=0;$i<strlen($mobile);$i++)
{
$mob1=$mob0[$i];
$pdf->Cell(5,-5,$mob1,1,0,"C");
}
//sign of the candidate
$pdf->Cell(200,3,'',0,1,"C");
$pdf->Cell(0,6,'Sign of the candidate',0,1,"L");
//use this as a horizontal line
$pdf->SetFillColor(150,150,150);
$pdf->Rect(148,170,25,0.5, 'F');
//box for the signature
$pdf->Cell(1,0,'',0,0,"C");
$pdf->Cell(45,10,'',1,1,"C");
//box for the photos
$pdf->setXY(160,102);
$image1 =$pimg;
//$pdf->Cell( 30, 35, , 0, 1, 'L', false );
$pdf->Cell(1,0,'',0,0,"C");
$pdf->Cell(30,35,$pdf->Image($image1, $pdf->GetX(), $pdf->GetY(), 30,35),1,1,"C");
//branch head
$pdf->setXY(145,162);
$pdf->Cell(10,10,' Branch Head (Vadodara)',0,1,"L");
$pdf->Cell(0,5,'<NAME> : 09924376644',0,1,"R");
$pdf->Cell(0,5,'<NAME> : 09998130018',0,1,"R");
//use this as a horizontal line below signature box
$pdf->SetFillColor(150,150,150);
$pdf->Rect(11,188,190,0.5, 'F');
//contact
$pdf->Cell(0,10,'',0,1,"L");
$pdf->Cell(0,5,'Website : www.explora.in',0,1,"L");
$pdf->Cell(0,7,'Contact us: <EMAIL>',0,1,"L");
//address
$pdf->setXY(117,188 );
$pdf->setFont('Arial','B',10);
$pdf->Cell(0,5,'Address : ',0,1,"L");
$pdf->setFont('Arial','',10);
$pdf->setXY(117,193 );
$pdf->Cell(0,5,'Studio-2, First Floor, Cross Road Complex.',0,1,"L");
$pdf->setXY(117,198);
$pdf->Cell(0,5,'Next to Mr. Puff, Near to Domino\'s Pizza',0,1,"L");
$pdf->setXY(117,203);
$pdf->Cell(0,5,'Subhanpura, Vadodara-390023',0,1,"L");
$pdf->Cell(0,5,'',0,1,"L");
//use this as a horizontal line below signature box
$pdf->SetFillColor(10,10,10);
$pdf->Rect(11,208,190,1, 'F');
//for office use only
$pdf->SetFont('Arial','B',12);
$pdf->Cell(0,10,'For Office Use Only',0,1,"C");
$pdf->SetFont('Arial','B',12);
$pdf->Cell(0,8,'Fees Details',0,1,"L");
$pdf->SetFont('Arial','',10);
$pdf->Cell(37,0,'',0,0,"L");
$pdf->Cell(0,5,'Cash / Cheque',0,0,"L");
$pdf->Cell(-85,0,'',0,0,"L");
$pdf->Cell(47,5,'Date',0,0,"L");
$pdf->Cell(0,5,'Authorized Signature',0,1,"L");
$pdf->SetFont('Arial','',12);
$pdf->Cell(0,7,'Registration',0,0,"L");
if($f_reg==0)
{
$f_reg="";
$signr="";
$f_reg_date="";
}
// create box for form
$pdf->Cell(0,0,'',0,1,"L");
$pdf->Cell(30,5,'',0,0,"L");
$pdf->Cell(42,8 ,$f_reg,1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,date("Y/m/d"),1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,$signr,1,1,"C");
$pdf->SetFont('Arial','',12);
$pdf->Cell(0,18,'April',0,0,"L");
if($f_april==0)
{
$f_april="";
$signa="";
$f_april_date="";
}
$pdf->Cell(0,5,'',0,1,"L");
$pdf->Cell(30,5,'',0,0,"L");
$pdf->Cell(42,8 ,$f_april,1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,$f_april_date,1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,$signa,1,1,"C");
$pdf->SetFont('Arial','',12);
$pdf->Cell(0,18,'May',0,0,"L");
if($f_may==0)
{
$f_may="";
$signm="";
$f_may_date="";
}
$pdf->Cell(0,5,'',0,1,"C");
$pdf->Cell(30,5,'',0,0,"C");
$pdf->Cell(42,8 ,$f_may,1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,$f_may_date,1,0,"C");
$pdf->Cell(17,4,'',0,0,"L");
$pdf->Cell(42,8,$signm,1,1,"C");
$pdf->SetFillColor(10,10,10);
$pdf->Rect(11,278,190,1, 'F');
//$pdf->Output();
$pdf->Output("Receipts/".$form.".pdf");
}
echo "<script type='text/javascript'>window.location='send_mail.php?name=$form'</script>";
?>
<file_sep><?php
session_start();
$group=$_POST['group2'];
$fname=$_POST['fname'];
$lname=$_POST['lname'];
$surname=$_POST['surname'];
$email=$_POST['email'];
$mob=$_POST['mob'];
$address=$_POST['address'];
$dob=$_POST['dob'];
$dob=date('Y-m-d',strtotime($dob));
$flag1=0;
$flag2=0;
$target_dir = "uploads/";
$target_file = $target_dir . basename($_FILES["file"]["name"]);
$uploadOk = 1;
$imageFileType = pathinfo($target_file,PATHINFO_EXTENSION);
// Check if image file is a actual image or fake image
if(isset($_POST["action"])) {
$check = getimagesize($_FILES["file"]["tmp_name"]);
if($check !== false) {
echo "File is an image - " . $check["mime"] . ".";
$uploadOk = 1;
} else {
echo "File is not an image.";
$uploadOk = 0;
}
}
if ($uploadOk == 0) {
echo "Sorry, your file was not uploaded.";
// if everything is ok, try to upload file
} else {
if (move_uploaded_file($_FILES["file"]["tmp_name"], $target_file)) {
//echo "The file ". basename( $_FILES["file"]["name"]). " has been uploaded. ";
$connection = mysqli_connect("localhost","explora_new","random1234rewA","explora_new") or die("Error " . mysqli_error($connection));
//$connection = mysqli_connect("localhost","root","","explora") or die("Error " . mysqli_error($connection));
$sql = "INSERT INTO `registration`(`course`, `first_name`, `middle_name`, `surname`, `address`, `mobile_no`,`img_path`, `dob`) VALUES ('$group','$fname','$surname','$lname','$address',$mob,'$target_file','$dob')";
$result = mysqli_query($connection, $sql) or die("Error in Selecting " . mysqli_error($connection));
if($result>0)
{
$sql = "UPDATE new_reg SET flag=1 where otp=".$_SESSION['lotp'];
$result = mysqli_query($connection, $sql) or die("Error in Selecting " . mysqli_error($connection));
unset($_SESSION['lotp']);
echo '<script type="text/javascript"> alert("Registered Successfully")</script>';
echo "<script type='text/javascript'>window.location='newregtext.php'</script>";
$_SESSION['result']=1;
}
else
{
echo "<script>
window.location = 'index.php';
</script>";
exit("Error, Try Again after sometime");
}
} else {
echo "Sorry, there was an error uploading your file.";
}
}
?>
<file_sep><?php
session_start();
if(!isset($_SESSION['otpl'])) header('location:otp.php');
?>
<!DOCTYPE html>
<html>
<head>
<title>New Registrations::Explora Academy of Design</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1.0, user-scalable=no"/>
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
<link href="css/materialize.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link href="css/newreg.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link rel="icon" href="media/logo.png">
</head>
<body>
<?php unset($_SESSION['otpl']);?>
<div class="container">
<div class="row main-thing">
<h2 class="center">New Registrations</h2>
<form name="login_form" id="login_form" method="POST" enctype="multipart/form-data" class="col s12 white-text" action="register.php">
<div class="row">
<p class="center">
<input name="group2" type="radio" id="nata" value="nata" checked/>
<label for="nata">Nata</label>
<input name="group2" type="radio" id="ID" value="ID"/>
<label for="ID">Interior Design</label>
</p>
</div>
<div class="row">
<div class="input-field col s12 l4 m4">
<input id="surname" type="text" class="validate" autocomplete="off" name="surname" required>
<label for="surname">Surname</label>
</div>
<div class="input-field col s12 l4 m4">
<i class="material-icons prefix"></i>
<input id="fname" type="text" class="validate" autocomplete="off" name="fname" required>
<label for="fname">First Name</label>
</div>
<div class="input-field col s12 l4 m4">
<i class="material-icons prefix"></i>
<input id="lname" type="text" class="validate" autocomplete="off" name="lname" required>
<label for="lname">Last Name</label>
</div>
</div>
<div class="row">
<div class="input-field col s12 l12 m12">
<textarea id="address" class="materialize-textarea" length="120" name="address"></textarea>
<label for="address">Address</label>
</div>
</div>
<div class="row">
<div class="input-field col s12 l12 m12">
<input id="dob" type="date" class="validate datepicker" autocomplete="off" name="dob" required>
<label for="dob">Date of Birth</label>
</div>
</div>
<div class="row">
<div class="input-field col s12 l6 m6">
<input id="mob" type="text" class="validate" autocomplete="off" length="13" name="mob" required>
<label for="mob">Contact</label>
</div>
<div class="input-field col s12 l6 m6">
<input id="mail" type="email" class="validate" autocomplete="off" name="email" required>
<label for="mail">Email</label>
</div>
</div>
<div class="row">
<div class="file-field input-field col s12 l12 m12">
<div class="btn">
<span>Upload Photo</span>
<input type="file" name="file" id="photo" accept="image/*" required="">
</div>
<div class="file-path-wrapper">
<input class="file-path validate" type="text">
</div>
</div>
</div>
<button class="btn waves-effect waves-light btn-large right" type="submit" name="action">Submit
<i class="material-icons">save</i>
</button>
</form>
</div>
</div>
</body>
<script src="js/jquery.min.js"></script>
<script src="js/materialize.js"></script>
<script src="js/init.js"></script>
</body>
</html>
|
45016709efd41eb0ea72eede41879ac0235fd251
|
[
"Markdown",
"HTML",
"PHP"
] | 26
|
Markdown
|
shailshah9/Explora.in
|
7e0ec4e3435c989b9c133d08c26bfdb37b9e497d
|
6dae49abaf9ca8ce421c341992bcfa5dba864dce
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
namespace aiTestBackend.Controllers
{
[Produces("application/json")]
[Route("api/Jobsheet")]
//[ApiController]
public class JobsheetController : ControllerBase
{
[HttpGet]
public ActionResult<IEnumerable<Models.JobSheet>> Get()
{
return new Models.JobSheet[] {
new Models.JobSheet() { Text = "hello" },
new Models.JobSheet() { Text = "from the backend" },
};
}
[HttpPost]
public void Post([FromBody] Models.JobSheet jobsheet)
{
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace aiTestBackend
{
public class ApprovalSystem
{
public decimal CostOfJob { get; set; }
public decimal ReferencePriceLimit { get; set; }
public enum ApprovalDecision
{
decline = 0,
approve = 1,
refer = 2,
}
public ApprovalSystem(OrderedJob orderedJob, Customer customer)
{
CostOfJob = orderedJob.CalcTotalCost();
ReferencePriceLimit = customer.MaxReferralPrice;
}
public ApprovalDecision Decision()
{
decimal tenPercentOnRefPrice = (ReferencePriceLimit / 100 * 10) + ReferencePriceLimit;
decimal fifteenPercentOnRefPrice = (ReferencePriceLimit / 100 * 15) + ReferencePriceLimit;
ApprovalDecision decision = ApprovalDecision.decline;
if (CostOfJob <= tenPercentOnRefPrice)
{
decision = ApprovalDecision.approve;
}
else if (CostOfJob > fifteenPercentOnRefPrice)
{
decision = ApprovalDecision.decline;
}
else
{
decision = ApprovalDecision.refer;
}
return decision;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Threading.Tasks;
namespace aiTestBackend
{
public abstract class ReplacementParts
{
public TimeSpan TimeToFit { get; set; }
public decimal UnitPrice { get; set; }
[DatabaseGenerated(DatabaseGeneratedOption.Identity)]
public Guid ID { get; set; }
}
public class Tyres : ReplacementParts
{
Tyres()
{
TimeToFit = new TimeSpan(0, 30, 0);
UnitPrice = 200;
Guid ID;
}
}
public class BrakeDiscs : ReplacementParts
{
BrakeDiscs()
{
TimeToFit = new TimeSpan(0, 90, 0);
UnitPrice = 100;
Guid ID;
}
}
public class BrakePads : ReplacementParts
{
BrakePads()
{
TimeToFit = new TimeSpan(0, 60, 0);
UnitPrice = 50;
Guid ID;
}
}
public class Oil : ReplacementParts
{
Oil()
{
TimeToFit = new TimeSpan(0, 30, 0);
UnitPrice = 20;
Guid ID;
}
}
public class Exhaust : ReplacementParts
{
Exhaust()
{
TimeToFit = new TimeSpan(0, 240, 0);
UnitPrice = 175;
Guid ID;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Threading.Tasks;
namespace aiTestBackend
{
public class Customer
{
public string CustomerName { get; set; }
public decimal MaxReferralPrice { get; set; }
[DatabaseGenerated(DatabaseGeneratedOption.Identity)]
public Guid ID { get; set; }
public void CreateNewCustomer(string name, decimal maxPrice)
{
CustomerName = name;
MaxReferralPrice = maxPrice;
}
}
}
<file_sep>import { Injectable } from '@angular/core'
import { HttpClient } from '@angular/common/http'
@Injectable()
export class ApiService {
constructor(private http: HttpClient) { }
postJobSheet(job) {
this.http.post('https://localhost:44351/api/jobsheet', job).subscribe(res => {
console.log(res)
})
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace aiTestBackend
{
public class OrderedJob
{
public List<ReplacementParts> ReplacementParts { get; set; }
public int JobReference { get; set; }
public OrderedJob(List<ReplacementParts> injectedListOfParts)
{
ReplacementParts = injectedListOfParts;
JobReference++;
CalcTimeToFit();
CalcTotalCost();
}
public TimeSpan CalcTimeToFit()
{
TimeSpan currentDurationOfJob = new TimeSpan(0, 0, 0);
foreach (var part in ReplacementParts)
{
part.TimeToFit += currentDurationOfJob;
}
return currentDurationOfJob;
}
public decimal CalcTotalCost()
{
decimal currentTotal = 0;
foreach (var part in ReplacementParts)
{
part.UnitPrice += currentTotal;
}
return currentTotal;
}
}
}
<file_sep>import { Component } from '@angular/core'
import { ApiService } from './api.service'
import { FormControl } from '@angular/forms'
@Component({
selector: 'jobSheet',
templateUrl: './jobSheet.component.html',
})
export class JobSheetComponent {
jobsheet = {}
constructor(private api: ApiService) { }
post(jobsheet) {
this.api.postJobSheet(jobsheet)
}
}
<file_sep>import { Component } from '@angular/core'
import { JobSheetComponent } from './jobSheet.component'
@Component({
selector: 'app-root',
template: '<jobSheet></jobSheet>'
})
export class AppComponent {
title = 'Patrick\'s AutoIntegrate App';
}
|
4b74cc4715dacd76d665d62e62ad248e47376361
|
[
"C#",
"TypeScript"
] | 8
|
C#
|
leastXpctdMost/aiTest
|
aad35eae013b96fe434cb255e8bcd7a70ec0632c
|
7e4580df5d6d8840231e2e92821d582c3152c29a
|
refs/heads/master
|
<repo_name>JTejs/activejs<file_sep>/src/active_view/template.js
/* ***** BEGIN LICENSE BLOCK *****
*
* Copyright (c) 2009 Aptana, Inc.
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* ***** END LICENSE BLOCK ***** */
ActiveView.Template = {
create: function create(src,helpers)
{
var klass = function klass(){};
klass.helpers = {};
ActiveSupport.extend(klass.helpers,helpers || {});
ActiveSupport.extend(klass.helpers,ActiveView.Template.Helpers);
ActiveSupport.extend(klass,ActiveView.Template.ClassMethods);
klass.template = ActiveView.Template.generateTemplate(src);
return klass;
}
};
ActiveView.Template.generateTemplate = function generateTemplate(source)
{
try
{
// Original Implementation: Simple JavaScript Templating
// <NAME> - http://ejohn.org/ - MIT Licensed
var processed_source = source
.replace(/<%([^\=](.+?))\)(\s*)%>/g,'<%$1);$3%>') //fix missing semi-colons
.replace(/[\r\t\n]/g, " ")
.replace(/'(?=[^%]*%>)/g,"\t")
.split("'").join("\\'")
.split("\t").join("'")
.replace(/<%=(.+?)%>/g, "',$1,'")
.split("<%").join("');")
.split("%>").join("p.push('")
;
return new Function("data",[
"var p = [];",
"var print = function(){p.push.apply(p,arguments);};",
"with(this.helpers){with(data){p.push('",
processed_source,
"');}}",
"return p.join('');"
].join(''));
}
catch(e)
{
ActiveSupport.throwError(ActiveView.Template.Errors.CompilationFailed,'input:',source,'processed:',processed_source,e);
}
};
ActiveView.Template.Errors = {
CompilationFailed: ActiveSupport.createError('The template could not be compiled:')
};
ActiveView.Template.ClassMethods = {
render: function render(data)
{
return ActiveSupport.bind(this.template,this)(data || {});
}
};
ActiveView.Template.Helpers = {};
<file_sep>/src/active_view/main.js
/* ***** BEGIN LICENSE BLOCK *****
*
* Copyright (c) 2009 Aptana, Inc.
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* ***** END LICENSE BLOCK ***** */
ActiveView = {};
ActiveView.logging = false;
ActiveView.create = function create(structure,methods)
{
if(typeof(options) === 'function')
{
options = {
structure: options
};
}
var klass = function klass(){
this.initialize.apply(this,arguments);
};
ActiveSupport.extend(klass,ClassMethods);
ActiveSupport.extend(klass.prototype,methods || {});
ActiveSupport.extend(klass.prototype,InstanceMethods);
klass.prototype.structure = structure || ActiveView.defaultStructure;
ActiveEvent.extend(klass);
return klass;
};
ActiveView.defaultStructure = function defaultStructure()
{
return ActiveView.Builder.div();
};
ActiveView.makeArrayObservable = function makeArrayObservable(array)
{
ActiveEvent.extend(array);
array.makeObservable('shift');
array.makeObservable('unshift');
array.makeObservable('pop');
array.makeObservable('push');
array.makeObservable('splice');
};
/**
* This method is not usually called directly but is utilized by data
* bindings and ActiveControllers.
*
* This method is normalizes or renders a variety of inputs. Strings or
* Element objects are returned untouched, ActiveView instances will have
* their DOM container returned, ActiveView classes will be rendered and
* the DOM container returned. If a function is passed in it will be called
* with the passed scope. That function should return a string or Element.
*
* @alias ActiveView.render
* @param {mixed} content
* @param {Object} [scope]
* @return {mixed}
*/
ActiveView.render = function render(content,scope)
{
if(!scope)
{
scope = {};
}
//if content is a function, that function can return nodes or an ActiveView class or instance
if(typeof(content) === 'function' && !content.prototype.structure)
{
content = content(scope);
}
if(content && (typeof(content) == 'string' || content.nodeType == 1))
{
return content;
}
else if(content && content.container && content.container.nodeType == 1)
{
//is ActiveView instance
return content.container;
}
else if(content && content.prototype && content.prototype.structure)
{
//is ActiveView class
return new content(scope).container;
}
return ActiveSupport.throwError(Errors.InvalidContent);
};
var InstanceMethods = {
initialize: function initialize(scope,parent)
{
this.parent = parent;
this.setupScope(scope);
if(ActiveView.logging)
{
ActiveSupport.log('ActiveView: initialized with scope:',scope);
}
this.builder = ActiveView.Builder;
ActiveView.generateBinding(this);
this.container = this.structure();
if(!this.container || !this.container.nodeType || this.container.nodeType !== 1)
{
return ActiveSupport.throwError(Errors.ViewDoesNotReturnContainer,typeof(this.container),this.container);
}
for(var key in this.scope._object)
{
this.scope.set(key,this.scope._object[key]);
}
},
setupScope: function setupScope(scope)
{
this.scope = (scope ? (scope.toObject ? scope : new ActiveEvent.ObservableHash(scope)) : new ActiveEvent.ObservableHash({}));
for(var key in this.scope._object)
{
var item = this.scope._object[key];
if((item !== null && typeof item === "object" && 'splice' in item && 'join' in item) && !item.observe)
{
ActiveView.makeArrayObservable(item);
}
}
},
get: function get(key)
{
return this.scope.get(key);
},
set: function set(key,value)
{
if((value !== null && typeof value === "object" && 'splice' in value && 'join' in value) && !value.observe)
{
ActiveView.makeArrayObservable(value);
}
return this.scope.set(key,value);
},
registerEventHandler: function registerEventHandler(element,event_name,observer)
{
this.eventHandlers.push([element,event_name,observer]);
}
};
var ClassMethods = {
};
var Errors = {
ViewDoesNotReturnContainer: ActiveSupport.createError('The view constructor must return a DOM element. Returned: '),
InvalidContent: ActiveSupport.createError('The content to render was not a string, DOM element or ActiveView.'),
MismatchedArguments: ActiveSupport.createError('Incorrect argument type passed: ')
};<file_sep>/src/active_record/adapters/jaxer_sqlite.js
/* ***** BEGIN LICENSE BLOCK *****
*
* Copyright (c) 2009 Aptana, Inc.
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* ***** END LICENSE BLOCK ***** */
(function(){
/**
* Adapter for Jaxer configured with SQLite
* @alias ActiveRecord.Adapters.JaxerSQLite
* @property {ActiveRecord.Adapter}
*/
ActiveRecord.Adapters.JaxerSQLite = function JaxerSQLite(){
ActiveSupport.extend(this,ActiveRecord.Adapters.InstanceMethods);
ActiveSupport.extend(this,ActiveRecord.Adapters.SQLite);
ActiveSupport.extend(this,{
log: function log()
{
if (!ActiveRecord.logging)
{
return;
}
if (arguments[0])
{
arguments[0] = 'ActiveRecord: ' + arguments[0];
}
return ActiveSupport.log.apply(ActiveSupport,arguments || {});
},
executeSQL: function executeSQL(sql)
{
ActiveRecord.connection.log("Adapters.JaxerSQLite.executeSQL: " + sql + " [" + ActiveSupport.arrayFrom(arguments).slice(1).join(',') + "]");
var response = Jaxer.DB.execute.apply(Jaxer.DB.connection, arguments);
return response;
},
getLastInsertedRowId: function getLastInsertedRowId()
{
return Jaxer.DB.lastInsertId;
},
iterableFromResultSet: function iterableFromResultSet(result)
{
result.iterate = function iterate(iterator)
{
if (typeof(iterator) === 'number')
{
if (this.rows[iterator])
{
return ActiveSupport.clone(this.rows[iterator]);
}
else
{
return false;
}
}
else
{
for (var i = 0; i < this.rows.length; ++i)
{
var row = ActiveSupport.clone(this.rows[i]);
delete row['$values'];
iterator(row);
}
}
};
return result;
},
transaction: function transaction(proceed)
{
try
{
ActiveRecord.connection.executeSQL('BEGIN');
proceed();
ActiveRecord.connection.executeSQL('COMMIT');
}
catch(e)
{
ActiveRecord.connection.executeSQL('ROLLBACK');
return ActiveSupport.throwError(e);
}
}
});
};
ActiveRecord.Adapters.JaxerSQLite.connect = function connect(path)
{
Jaxer.DB.connection = new Jaxer.DB.SQLite.createDB({
PATH: Jaxer.Dir.resolve(path || 'ActiveRecord.sqlite')
});
return new ActiveRecord.Adapters.JaxerSQLite();
};
})();<file_sep>/test/test_template.html
<html>
<head>
<!--[if IE ]>
<script type="text/javascript" src="http://getfirebug.com/releases/lite/1.2/firebug-lite-compressed.js"></script>
<![endif]-->
<script src="../latest/active.js" runat="server"></script>
<script runat="server">
var t = ActiveView.Template.create('hi <%= test %>');
console.log(t.render({
test: 'hi!!!!'
}))
window.onload = function(){
var source = document.getElementById('test').innerHTML.replace(/>/g,'>').replace(/</g,'<');
console.log(source) //fix to add missing semi-colons
console.log()
var t = ActiveView.Template.create(source);
/*
console.log(t.render({
categories: [],
learning_paths: []
}));
*/
};
</script>
</head>
<body>
<h1>ActiveJS Tests</h1>
<p>Check console for test results.</p>
<textarea id="test">
<%= render({ partial: 'home/featured_main.html' }) %>
<div class="separator"> </div>
<div class="secondary left">
<%= render({ partial: 'home/featured_other.html' }) %>
</div>
<div class="secondary middle">
<h2 class="png-fix">Learning Paths</h2>
<h3>New to Aptana? Start Here.</h3>
<table width="100%" cellspacing="0" cellpadding="0" border="0">
<tr>
<td valign="top">
<ul>
<% learning_paths.forEach(function(path) { %>
<li><a href="<%= learnUrl({ slug: path.slug }) %>"><%= path.name %></a></li>
<% }) %>
</ul>
</td>
</tr>
</table>
<div class="view-all">
<a href="<%= site_info('base') %>learn">all learning paths »</a>
</div>
</div>
<div class="secondary right">
<h2 class="png-fix">Video Topics</h2>
<h3>Learn about what interests you</h3>
<table width="100%" cellspacing="0" cellpadding="0" border="0">
<% var count = 0; %>
<tr>
<% categories.forEach(function(category) { %>
<% if (count == 0 || count == 4 ) { %>
<td valign="top" width="50%">
<ul>
<% } %>
<li><a href="<%= topicUrl({ slug: category.slug }) %>"><%= category.name %></a></li>
<% if (count == 3 || count == 7) { %>
</ul>
</td>
<% } %>
<% count++; %>
<% }) %>
<% if (count < 7) { %>
</ul>
</td>
<% } %>
</tr>
</table>
<div class="view-all">
<a href="<%= site_info('base') %>topics">all video topics »</a>
</div>
</div>
<div class="clearfix"></div>
</textarea>
</body>
</html>
|
3e81be5b96dea35cca2f39e728764c1f348eff95
|
[
"JavaScript",
"HTML"
] | 4
|
JavaScript
|
JTejs/activejs
|
e72ad8ffe8e750f57323b45e4420fdc07f3c9949
|
7c1aa551dc0dead06204be9eb8f171dd3960588f
|
refs/heads/master
|
<repo_name>SyraTR/passwordgenerator<file_sep>/passwordgenerator.py
import time,string,random,os
print """
##################################
# PASSWORD GENERATOR #
# Author: <NAME> #
##################################
"""
chars = string.ascii_letters + string.digits + '!@#$%^&*'
random.seed = (os.urandom(512))
while True:
length = input("--> Choose length of your password(8-20 digits): ")
if ((length >= 8) and (length <= 20)):
print ("--> your password is going to be created... ")
time.sleep(3)
password = ''.join(random.choice(chars) for r in range(length))
print ("[+] Password created: %s" %(password))
time.sleep(60)
quit()
break
else:
print ("[-] Length is less than 8 or more than 20 !!")
print ("Shutting down...")
time.sleep(3)
quit()
break
|
633d278b8c43a49215e2719e89800403f2d3f9e4
|
[
"Python"
] | 1
|
Python
|
SyraTR/passwordgenerator
|
c03b25ea8cb4733561e1a3e3b00fcde6618347b6
|
9a0e9387965d731e676cb0f14ad2eeb1875283fa
|
refs/heads/master
|
<file_sep>var app = angular.module('qpl',['ngDialog']);
app.controller('qplController', ['$scope', 'ngDialog', '$compile', function($scope,ngDialog,$compile) {
$scope.openTeam = function(teamName){
ngDialog.open({
template: 'teams/'+teamName,
controller: 'firstDialogCtrl',
closeByDocument: false,
className: 'ngdialog-theme-default',
closeByEscape: true,
showClose: true,
scope: $scope
});
};
window.onload = function(){
$compile(document.getElementsByTagName('body')[0])($scope);
};
}]);
app.controller('firstDialogCtrl', ['$scope', 'ngDialog', function($scope, ngDialog){
$scope.openPlayer = function(playerName){
ngDialog.open({
template: 'player/'+playerName+'/details',
closeByDocument: true,
className: 'ngdialog-theme-default',
closeByEscape: true,
showClose: true,
scope: $scope
});
};
}]);<file_sep>from django.contrib import admin
from models import Designation, Fixture, Goal, Player, Team
admin.site.register(Designation)
admin.site.register(Fixture)
admin.site.register(Goal)
admin.site.register(Player)
admin.site.register(Team)
<file_sep>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^fixtures$', views.fixtures, name='fix'),
url(r'^player/(?P<name>\w+)/details$', views.details, name='player'),
url(r'^$', views.index, name='index'),
url(r'^indx$', views.indx, name='indx'),
url(r'^teams/(?P<name>\w+)$', views.teams, name='teams'),
url(r'^standings$', views.standings, name='standings'),
]
<file_sep>from __future__ import unicode_literals
from django.db import models
# Create your models here.
class Designation(models.Model):
desig = models.CharField(max_length=200)
def __unicode__(self):
return self.desig
class Team(models.Model):
logo = models.FileField(upload_to='images')
name = models.CharField(max_length=200)
def __unicode__(self):
return self.name
class Fixture(models.Model):
team1 = models.ForeignKey(Team,related_name="first_team")
team2 = models.ForeignKey(Team, related_name="second_team")
goal1 = models.IntegerField(default=0)
goal2 = models.IntegerField(default=0)
date = models.DateTimeField()
def __unicode__(self):
return self.team1.name + " Vs " + self.team2.name
class Player(models.Model):
name = models.CharField(max_length=200)
goals = models.IntegerField(default=0)
team = models.ForeignKey(Team)
image = models.FileField(upload_to='images')
designation = models.ForeignKey(Designation)
is_captain = models.BooleanField(default=False)
def __unicode__(self):
if self.is_captain:
return '[Captain] '+ self.name
else:
return self.name
class Goal(models.Model):
player = models.ForeignKey(Player, related_name="goaling_player")
time = models.TimeField()
assist = models.ForeignKey(Player, related_name="goal_assist_player")
fixture = models.ForeignKey(Fixture)
team = models.ForeignKey(Team)
def __unicode__(self):
return self.player.name + " @ " + str(self.fixture)
<file_sep>from models import Team
def get_teams(request):
teams = Team.objects.all()
return {'teams_all':teams}
<file_sep>from django.shortcuts import render
from django.http import Http404
from models import Fixture, Player, Team
from datetime import datetime, timedelta
# Create your views here.
def fixtures(request):
fixes = Fixture.objects.all()
template ='qpl/fixture.html'
context = {
"fix":fixes,
}
return render(request,template,context)
def index(request):
# highscorer = Player.objects.filter().order_by('-goals')[0]
# last_game = Fixture.objects.filter(date__lt = datetime.now())[::-1][0]
# context = {
# "top": highscorer,
# "last_game":last_game,
# "difference":abs(last_game.goal1 - last_game.goal2)
# }
return render(request, 'qpl/index.html', {})
def indx(request):
highscorer = Player.objects.filter().order_by('-goals')[0]
last_game = Fixture.objects.filter(date__lt = datetime.now())[::-1][0]
context = {
"top": highscorer,
"last_game":last_game,
"difference":abs(last_game.goal1 - last_game.goal2)
}
return render(request, 'qpl/indx.html', context)
def details(request,name):
try:
player = Player.objects.get(id=name)
context = {
"player":player,
}
return render(request,'qpl/details.html',context)
except Exception:
raise Http404("Player Does not exist ")
def teams(request,name):
try:
team = Team.objects.get(id=name)
players = Player.objects.filter(team=team)
context = { 'team':team, 'players':players}
return render(request, 'qpl/teams.html', context)
except Exception:
raise Http404("Team Not Found")
#class Standing:
#
# def calculate_points(self):
# self.points = games_won * 1
#
# def __init__(self,**kwargs):
# for key in kwargs:
# self.
# self.team = team
# self.games_played = GP
# self.games_won = GW
# self.games_defeated = GD
# self.goals_attained = GA
# self.goals_lost = GL
# calculate_points()
#
def standings(request):
try:
stands = []
fixtures = Fixture.objects.all()
teams = Team.objects.all()
for team in teams:
local = {'team':team,'GP':0,'GW':0,'GD':0,'GA':0,'GL':0,}
for fixture in fixtures:
if team == fixture.team1:
local['GP'] +=1
if fixture.goal1 > fixture.goal2:
local['GW'] += 1
local['GA'] += fixture.goal1
local['GL'] += fixture.goal2
elif fixture.goal1 < fixture.goal2:
local['GD'] += 1
local['GA'] += fixture.goal1
local['GL'] += fixture.goal2
else:
print 'Game is a draw'
elif team == fixture.team2:
local['GP'] +=1
if fixture.goal2 > fixture.goal1:
local['GW'] += 1
local['GA'] += fixture.goal2
local['GL'] += fixture.goal1
elif fixture.goal2 < fixture.goal1:
local['GD'] += 1
local['GA'] += fixture.goal2
local['GL'] += fixture.goal1
else:
print 'Game is a draw'
else:
pass
local['P'] = local['GW'] * 1
local['diff'] = local['GA'] - local['GL']
stands.append(local)
sorted_stands = sorted(stands, key=lambda k: (k['P'],k['diff']),reverse=True)
context = {
'standings_list' : sorted_stands,
}
return render(request, 'qpl/standings.html', context)
except Exception as e:
raise Http404("Standings error" + str(e))
|
b1afcd273b1777f4a90fdd07d3fbb6216a83539d
|
[
"JavaScript",
"Python"
] | 6
|
JavaScript
|
thusharprakash/QPL
|
90bad4f9ffdc710623c861451074b2c2f5859b3e
|
0296f2a248daa55a1c1833b7cbcb46d280789f9b
|
refs/heads/master
|
<repo_name>AiryShift/seppuku<file_sep>/SudoSolver.py
import unittest
from copy import deepcopy
class SudokuBoard(object):
"""Simulates a sudoku board"""
BOARD_SIZE = 9
BOARD_INDEX = BOARD_SIZE - 1
POSSIBLE_VALUES = range(1, BOARD_SIZE + 1)
UNDETERMINED_VALUE = 0
def __init__(self, board):
self.board = board
def solve(self):
tempBoard = deepcopy(self.board)
self.recurse(0, 0)
self.board, tempBoard = deepcopy(tempBoard), deepcopy(self.board)
return tempBoard
def recurse(self, y, x):
if (y == self.BOARD_INDEX and x == self.BOARD_INDEX):
for i in self.POSSIBLE_VALUES:
self.board[y][x] = i
if self.is_valid(y, x):
return True
self.board[y][x] = 0
return False
nextX = self.next_coord(x)
nextY = y
if nextX == 0: # Step down a row
nextY = self.next_coord(y)
exit = False
if self.board[y][x] == self.UNDETERMINED_VALUE:
i = 0
while (i < self.BOARD_SIZE and not exit):
self.board[y][x] = self.POSSIBLE_VALUES[i]
if self.is_valid(y, x):
exit = self.recurse(nextY, nextX)
i += 1
if not exit:
self.board[y][x] = self.UNDETERMINED_VALUE
else:
exit = self.recurse(nextY, nextX)
return exit
def is_valid(self, y, x):
row = (i for i in self.board[y])
col = (self.board[i][x] for i in range(self.BOARD_SIZE))
box = (self.board[i][j]
for i in self.find_box(y) for j in self.find_box(x))
for generator in (row, col, box):
if not self.all_unique(generator):
return False
return True
@staticmethod
def find_box(coord):
box = coord // 3
if box == 0:
return range(0, 3)
elif box == 1:
return range(3, 6)
elif box == 2:
return range(6, 9)
@classmethod
def all_unique(cls, generator):
occurences = set()
for value in generator:
if value != cls.UNDETERMINED_VALUE:
if value in occurences:
return False
occurences.add(value)
return True
@classmethod
def next_coord(cls, coord):
return (coord + 1) % cls.BOARD_SIZE
class TestBoard(unittest.TestCase):
def setUp(self):
self.maxDiff = None
self.board = SudokuBoard([
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 0, 0, 0, 0, 0, 0, 0, 9],
[0, 0, 0, 0, 0, 0, 0, 0, 7],
[0, 0, 0, 0, 0, 0, 0, 0, 6],
[0, 0, 0, 0, 0, 0, 0, 0, 5],
[0, 0, 0, 0, 0, 0, 0, 0, 4],
[0, 0, 0, 0, 0, 0, 1, 0, 3],
[0, 0, 0, 0, 0, 0, 0, 0, 2],
[0, 0, 0, 0, 0, 0, 0, 0, 1]])
self.solveable_1 = SudokuBoard([
[1, 0, 0, 0, 0, 7, 0, 0, 0],
[0, 0, 0, 0, 2, 0, 0, 0, 5],
[0, 6, 9, 0, 0, 0, 0, 0, 2],
[0, 0, 3, 0, 0, 0, 2, 0, 0],
[4, 9, 8, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 7, 8, 0, 0, 0],
[7, 0, 0, 0, 4, 0, 6, 0, 0],
[0, 0, 6, 3, 0, 0, 9, 0, 0],
[0, 5, 0, 0, 0, 0, 8, 0, 0]])
def test_is_valid(self):
self.assertFalse(self.board.is_valid(0, 8))
self.assertFalse(self.board.is_valid(7, 7))
self.assertFalse(self.board.is_valid(8, 8))
self.assertTrue(self.board.is_valid(5, 8))
def test_next_coord(self):
self.assertEqual(6, SudokuBoard.next_coord(5))
self.assertEqual(0, SudokuBoard.next_coord(8))
def test_all_unique(self):
self.assertTrue(SudokuBoard.all_unique(i for i in [
1, 4, 3, 2, 9, 0, 0]))
self.assertFalse(SudokuBoard.all_unique(i for i in [
0, 0, 0, 1, 1]))
self.assertTrue(SudokuBoard.all_unique(i for i in [
1, 2, 3, 0, 0, 0]))
def test_solve(self):
self.assertEqual(self.solveable_1.solve(), [
[1, 2, 4, 5, 8, 7, 3, 6, 9],
[3, 8, 7, 9, 2, 6, 1, 4, 5],
[5, 6, 9, 1, 3, 4, 7, 8, 2],
[6, 7, 3, 4, 9, 5, 2, 1, 8],
[4, 9, 8, 2, 1, 3, 5, 7, 6],
[2, 1, 5, 6, 7, 8, 4, 9, 3],
[7, 3, 2, 8, 4, 9, 6, 5, 1],
[8, 4, 6, 3, 5, 1, 9, 2, 7],
[9, 5, 1, 7, 6, 2, 8, 3, 4]])
if __name__ == '__main__':
unittest.main(verbosity=2)
print('All tests passed! You the best!')
<file_sep>/sudoku.py
from SudoSolver import SudokuBoard
from time import time
now = time()
print('Opening the file...')
print('Found this sudoku inside...\n')
with open('sudoin.txt') as f:
board = []
for i in f:
print(i.strip())
board.append([int(num) for num in i.strip()])
print('\nSolving the sudoku...\n')
with open('sudoout.txt', 'w') as f:
for i in SudokuBoard(board).solve():
print(''.join([str(num) for num in i]))
print(''.join([str(num) for num in i]), file=f)
print('\nDone!')
print('Took {} seconds long.'.format(time() - now))
<file_sep>/README.md
# seppuku
Solves Seppuku|Commits Sudoku
To run, run `sudoku.py` with a file `sudoin.txt` in the same directory. `sudoin.txt` should contain the layout of an unsolved Sudoku puzzle with `0`s as all the undetermined digits.
|
e036789a402bba3e04117a2e3e87168ebcebdcde
|
[
"Markdown",
"Python"
] | 3
|
Python
|
AiryShift/seppuku
|
b6682e406d68621319434f705000606d595a55d7
|
6c6b0c4bdaef7a4c4f9e0f97c149d6327a5a161e
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using hotkeys;
namespace QuickLaunch {
public partial class Form2 : Form {
public Form2() {
InitializeComponent();
}
private void Form2_Load(object sender, EventArgs e) {
fillData();
}
private void fillData() {
string temp;
switch (Settings1.Default.hotKey1) {
case 1:
temp = "ALT";
break;
case 2:
temp = "CTRL";
break;
default:
temp = "SHFT";
break;
}
textBox1.Text = temp + " + " + Settings1.Default.hotKey2;
//textBox2.Text = temp;
textBox3.Text = Settings1.Default.hotKey2.ToString();
}
private void SaveButton_Click(object sender, EventArgs e) {
textBox3.Text = textBox3.Text.ToUpper();
int key2Ascii = (int)textBox3.Text[0];
if (textBox3.Text.Length != 1) {
MessageBox.Show("Please enter one character A-Z for the second hotkey");
textBox3.Text = "";
} else if (key2Ascii < 65 || key2Ascii > 90) {
MessageBox.Show("Please enter one character A-Z for the second hotkey");
textBox3.Text = "";
} else {
Settings1.Default.hotKey2 = textBox3.Text[0];
Settings1.Default.Save();
Application.Restart();
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Collections;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.Diagnostics;
using System.IO;
using System.Xml.Serialization;
using System.Runtime.InteropServices;
using System.Drawing.Imaging;
using hotkeys;
using CSWinFormLayeredWindow;
using System.Drawing.Drawing2D;
using System.Xml;
namespace QuickLaunch
{
public partial class Form1 : Form {
[DllImport("user32.dll")]
public static extern bool RegisterHotKey(IntPtr hWnd, int id, int fsModifiers, int vlc);
[DllImport("user32.dll")]
public static extern bool UnregisterHotKey(IntPtr hWnd, int id);
List<Button> buttons = new List<Button>();
//string saveFile = "hey.xml";
//string saveFile = "C:\\Users\\Scott\\Desktop\\ql.txt";
string savePath = "C:\\Program Files (x86)\\QuickLaunch\\SaveData"; // path of folder
string pathway = "C:\\Program Files (x86)\\QuickLaunch\\SaveData\\ql.txt";
public Form1()
{
InitializeComponent();
//this.SelectBitmap(Properties.Resources.transparentBackground);
var HotKeyManager = new HotkeyManager(this);
RegisterHotKey(HotKeyManager.Handle, 123, Settings1.Default.hotKey1 /*Constants.ALT*/ /*+ Constants.SHIFT*/, (int)Settings1.Default.hotKey2 /*(int)Keys.S*/);
//RegisterHotKey(HotKeyManager.Handle, 234, Constants.ALT + Constants.SHIFT, (int)Keys.O);
}
private void Form1_Load(object sender, EventArgs e)
{
this.ShowInTaskbar = false;
// uncomment to have hidden at the start
//this.WindowState = FormWindowState.Minimized;
//this.ShowInTaskbar = false;
//this.TransparencyKey = this.BackColor;
this.BackColor = Color.Red;
this.TransparencyKey = Color.Red;
this.CenterToScreen();
//DirectoryInfo di = Directory.CreateDirectory(savePath);
handleLoading();
initializeButtons();
}
private void initializeButtons() {
Image testImage = ResizeImage(Properties.Resources.browseIcon, new Size(90, 90));
UnSemi((Bitmap)testImage);
testImage = fixAlpha(testImage);
button1.Image = testImage;
button1.FlatAppearance.BorderSize = 0;
button1.BackColor = Color.FromArgb(0, 255, 255, 255);
button1.FlatStyle = FlatStyle.Flat;
button1.FlatAppearance.MouseOverBackColor = Color.FromKnownColor(KnownColor.Control);
testImage = ResizeImage(Properties.Resources.settings, new Size(65, 65));
UnSemi((Bitmap)testImage);
testImage = fixAlpha(testImage);
settingsButton.Image = testImage;
settingsButton.FlatAppearance.BorderSize = 0;
settingsButton.BackColor = Color.FromArgb(0, 255, 255, 255);
settingsButton.FlatStyle = FlatStyle.Flat;
settingsButton.FlatAppearance.MouseOverBackColor = Color.FromKnownColor(KnownColor.Control);
}
public static void UnSemi(Bitmap bmp) {
Size s = bmp.Size;
PixelFormat fmt = bmp.PixelFormat;
Rectangle rect = new Rectangle(Point.Empty, s);
BitmapData bmpData = bmp.LockBits(rect, ImageLockMode.ReadOnly, fmt);
int size1 = bmpData.Stride * bmpData.Height;
byte[] data = new byte[size1];
System.Runtime.InteropServices.Marshal.Copy(bmpData.Scan0, data, 0, size1);
for (int y = 0; y < s.Height; y++) {
for (int x = 0; x < s.Width; x++) {
int index = y * bmpData.Stride + x * 4;
// alpha, threshold = 255
data[index + 3] = (data[index + 3] < 100) ? (byte)0 : (byte)255;
}
}
System.Runtime.InteropServices.Marshal.Copy(data, 0, bmpData.Scan0, data.Length);
bmp.UnlockBits(bmpData);
}
public void Save(string fileName) {
Settings1.Default.Save_Data = "";
foreach (Button b in buttons) {
Settings1.Default.Save_Data += "?" + b.Tag.ToString();
}
Settings1.Default.Save();
/*using (StreamWriter sw = File.CreateText(fileName)) {
foreach (Button b in buttons) {
sw.WriteLine(b.Tag.ToString());
}
}*/
}
public static string loadFromFile(string fileName) {
string hold = "";
StreamReader sr = File.OpenText(fileName);
hold = sr.ReadToEnd();
sr.Close();
return hold;
}
public void handleLoading() {
string tagPaths = Settings1.Default.Save_Data;
string[] pathLists = tagPaths.Split('?');
foreach (string path in pathLists) {
path.Replace(" ", "");
if (path != "") {
makeNewButton(path);
}
}
}
/*private void Button1_Click(object sender, EventArgs e)
{
OpenFileDialog of = new OpenFileDialog();
of.Filter = "Exe Files (.exe)|*.exe|All Files (*.*)|*.*";
if (of.ShowDialog() == DialogResult.OK)
{
makeNewButton(of.FileName);
}
}*/
public static Image ResizeImage(Image image, Size size, bool preserveAspectRatio = true) {
int newWidth;
int newHeight;
if (preserveAspectRatio) {
int originalWidth = image.Width;
int originalHeight = image.Height;
float percentWidth = (float)size.Width / (float)originalWidth;
float percentHeight = (float)size.Height / (float)originalHeight;
float percent = percentHeight < percentWidth ? percentHeight : percentWidth;
newWidth = (int)(originalWidth * percent);
newHeight = (int)(originalHeight * percent);
} else {
newWidth = size.Width;
newHeight = size.Height;
}
Image newImage = new Bitmap(newWidth, newHeight);
using (Graphics graphicsHandle = Graphics.FromImage(newImage)) {
graphicsHandle.InterpolationMode = InterpolationMode.HighQualityBicubic;
graphicsHandle.DrawImage(image, 0, 0, newWidth, newHeight);
}
return newImage;
}
public Bitmap fixAlpha(Image image) {
Size s = image.Size;
Bitmap img = new Bitmap(image);
// find those dumbass pixels
/*Color pixel = img.GetPixel(57, 49);
textBox1.Text = "transparency: " + pixel.A + " A: " + pixel.ToString();
img.SetPixel(57, 49, Color.Fuchsia);*/
for(int i = 0; i < s.Height; i++) {
for (int j = 0; j < s.Width; j++) {
Color pixel2 = img.GetPixel(i, j);
if (pixel2.A == 255 && pixel2.R == 240 && pixel2.G == 240 && pixel2.B == 240) {
if (i > 0) {
Color tempPixel = img.GetPixel(i - 1, j);
int newR = tempPixel.R;
int newG = tempPixel.G;
int newB = tempPixel.B;
img.SetPixel(i, j, Color.FromArgb(newR, newG, newB));
} else {
img.SetPixel(i, j, Color.White);
}
}
}
}
//MessageBox.Show(count + "");
return img;
}
public static System.Drawing.Bitmap ReplaceTransparency(System.Drawing.Bitmap bitmap, System.Drawing.Color background) {
/* Important: you have to set the PixelFormat to remove the alpha channel.
* Otherwise you'll still have a transparent image - just without transparent areas */
var result = new System.Drawing.Bitmap(bitmap.Size.Width, bitmap.Size.Height, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
var g = System.Drawing.Graphics.FromImage(result);
g.Clear(background);
g.CompositingMode = System.Drawing.Drawing2D.CompositingMode.SourceOver;
g.DrawImage(bitmap, 0, 0);
return result;
}
private void makeNewButton(string filePath) {
Icon fileIcon = Icon.ExtractAssociatedIcon(filePath);
int buttonsArrayLength = buttons.Count;
int prevX;
int prevY;
int Bwidth = 150;
int Bheight = 75;
int offsetX;
int offsetY = 0;
int maxPerRow = 6;
int row = buttonsArrayLength / maxPerRow;
// old way
/*
if (buttonsArrayLength == 0) {
prevX = 10;
prevY = 10;
offsetX = 0;
offsetY = 0;
} else {
prevX = buttons[buttonsArrayLength - 1].Location.X;
prevY = buttons[buttonsArrayLength - 1].Location.Y;
offsetX = Bwidth;
}*/
if (buttonsArrayLength == 0) { // the first button
prevX = 10;
prevY = 10;
offsetX = 0;
} else if (buttonsArrayLength % maxPerRow != 0) { // the first 6 buttons in a row
prevX = buttons[buttonsArrayLength - 1].Location.X;
prevY = buttons[buttonsArrayLength - 1].Location.Y;
offsetX = Bwidth;
} else { // the first button in a new row
offsetX = 0;
offsetY += Bheight;
prevX = buttons[buttonsArrayLength - maxPerRow].Location.X;
prevY = buttons[buttonsArrayLength - maxPerRow].Location.Y;
}
Button newButton = new Button();
this.Controls.Add(newButton);
newButton.Visible = true;
//newButton.BackColor = Color.DodgerBlue;
newButton.Width = Bwidth;
newButton.Height = Bheight;
newButton.Location = new Point(prevX + offsetX, prevY + offsetY);
newButton.FlatAppearance.BorderSize = 0;
newButton.BackColor = Color.FromArgb(0, 255, 255, 255);
newButton.FlatStyle = FlatStyle.Flat;
newButton.FlatAppearance.MouseOverBackColor = Color.FromKnownColor(KnownColor.Control);
Image myImage = ResizeImage(fileIcon.ToBitmap(), new Size(75, 75));
UnSemi((Bitmap)myImage);
Bitmap b = new Bitmap(myImage);
//myImage = ReplaceTransparency(b, Color.Fuchsia);
myImage = fixAlpha(myImage);
newButton.Image = myImage;
newButton.Tag = filePath;
newButton.MouseDown += new MouseEventHandler(clickedButton);
newButton.MouseEnter += onMouseEnter;
newButton.MouseLeave += onMouseExit;
buttons.Add(newButton);
Save(pathway);
}
private void onMouseEnter(object sender, EventArgs e) {
(sender as Button).Size = new Size(170, 85);
(sender as Button).FlatAppearance.BorderSize = 1;
(sender as Button).FlatAppearance.BorderColor = Color.FromKnownColor(KnownColor.MenuHighlight);
}
private void onMouseExit(object sender, EventArgs e) {
(sender as Button).Size = new Size(150, 75);
(sender as Button).FlatAppearance.BorderSize = 0;
}
private void clickedButton(object sender, System.Windows.Forms.MouseEventArgs e) {
if (e.Button == MouseButtons.Left)
{
Process.Start((sender as Button).Tag.ToString());
}
else {
(sender as Button).Visible = false;
buttons.Remove((sender as Button));
Save(pathway);
if (buttons.Count != 0) {
updateButtons();
}
}
}
// updates the button positions
private void updateButtons() {
int prevX = 10;
int prevY = 10;
int Bwidth = 150; // width of button
int bHeigt = 75;
int offsetX = 0;
int offsetY = 0;
int count = 1;
int maxPerRow = 6;
foreach (Button b in buttons) {
if (count % (maxPerRow + 1) == 0) {
offsetY += bHeigt;
offsetX = 10;
prevX = 0;
count = 1;
}
count++;
b.Location = new Point(prevX + offsetX, prevY + offsetY);
prevX = b.Location.X;
prevY = b.Location.Y;
offsetX = Bwidth;
offsetY = 0;
}
}
private void NotifyIcon1_MouseDoubleClick(object sender, MouseEventArgs e)
{
this.WindowState = FormWindowState.Normal;
this.Visible = true;
}
private void Button1_Click(object sender, EventArgs e) {
OpenFileDialog of = new OpenFileDialog();
of.Filter = "Exe Files (.exe)|*.exe|All Files (*.*)|*.*";
if (of.ShowDialog() == DialogResult.OK) {
makeNewButton(of.FileName);
}
}
private void Button1_MouseEnter(object sender, EventArgs e) {
button1.FlatAppearance.BorderSize = 1;
button1.FlatAppearance.BorderColor = Color.FromKnownColor(KnownColor.MenuHighlight);
}
private void Button1_MouseLeave(object sender, EventArgs e) {
button1.FlatAppearance.BorderSize = 0;
}
private void SettingsButton_Click(object sender, EventArgs e) {
Form2 settingsForm = new Form2();
settingsForm.Show();
}
private void SettingsButton_MouseEnter(object sender, EventArgs e) {
settingsButton.FlatAppearance.BorderSize = 1;
settingsButton.FlatAppearance.BorderColor = Color.FromKnownColor(KnownColor.MenuHighlight);
}
private void SettingsButton_MouseLeave(object sender, EventArgs e) {
settingsButton.FlatAppearance.BorderSize = 0;
}
}
}
<file_sep>using System.Windows.Forms;
using System.ComponentModel;
using System;
namespace hotkeys {
//This class is not required but makes managing the modifiers easier.
public static class Constants {
public const int NOMOD = 0x0000;
public const int ALT = 0x0001;
public const int CTRL = 0x0002;
public const int SHIFT = 0x0004;
public const int WIN = 0x0008;
public const int WM_HOTKEY_MSG_ID = 0x0312;
}
public sealed class HotkeyManager : NativeWindow, IDisposable {
private Form mainForm;
public HotkeyManager(Form theMainForm) {
CreateHandle(new CreateParams());
mainForm = theMainForm;
}
protected override void WndProc(ref Message m) {
if (m.Msg == Constants.WM_HOTKEY_MSG_ID) {
if (m.WParam.ToInt32() == 123) {
//MessageBox.Show("HotKey ID: 123 has been pressed");
//mainForm.WindowState = FormWindowState.Normal;
if (mainForm.Visible) {
mainForm.Visible = false;
mainForm.WindowState = FormWindowState.Minimized;
} else {
mainForm.WindowState = FormWindowState.Normal;
mainForm.Visible = true;
}
}
if (m.WParam.ToInt32() == 234) {
MessageBox.Show("HotKey ID: 234 has been pressed");
}
}
base.WndProc(ref m);
}
public void Dispose() {
DestroyHandle();
}
}
}
|
9f740dc0c535e1a9c1b5fc3b4c8ed29caf4d3f1c
|
[
"C#"
] | 3
|
C#
|
ScottDFerguson/QuickLaunch
|
db0c781cb5794592ebdffbcb9c4ef6041d6fbd2c
|
1ac61e3042ddec034ef804a0e7be13c92f534ec1
|
refs/heads/master
|
<file_sep># Bootstrap OS X
Here follow the random ramblings of a mad man
## Install homebrew
```
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
```
## Setup Git
Setup SSH keys, and then clone this repo
## Install binaries
```
./brew.sh
```
## Install NVM
https://github.com/creationix/nvm
Install latest node
And restart all your open bash sessions or you will be like.. wtf is going on?
## Run bootstrap script
This will symlink all the important files to your ~
```
./bootstrap.sh
```
## Install vim plugins
Run `PlugInstall!` in Vim
## Colors and Typography
- Color Scheme: base16-yesterday.dark.256.terminal
- font: Input Monto Light 14 pt
<file_sep>#!/usr/bin/env bash
# Install command-line tools using Homebrew.
# Make sure we’re using the latest Homebrew.
brew update
# Upgrade any already-installed formulae.
brew upgrade
# Install GNU core utilities (those that come with macOS are outdated).
# Don’t forget to add `$(brew --prefix coreutils)/libexec/gnubin` to `$PATH`.
brew install coreutils
# Install some other useful utilities like `sponge`.
brew install moreutils
# Install GNU `find`, `locate`, `updatedb`, and `xargs`, `g`-prefixed.
brew install findutils
# Install GNU `sed`, overwriting the built-in `sed`.
brew install gnu-sed
# Install Bash 4.
# Note: don’t forget to add `/usr/local/bin/bash` to `/etc/shells` before
# running `chsh`.
brew install bash
brew install bash-completion2
# Switch to using brew-installed bash as default shell
if ! fgrep -q '/usr/local/bin/bash' /etc/shells; then
echo '/usr/local/bin/bash' | sudo tee -a /etc/shells;
chsh -s /usr/local/bin/bash;
fi;
# Install more recent versions of some macOS tools.
brew install wget
brew install vim
brew install grep
brew install openssh
brew install screen
# Essentials
brew install fzf
brew install tmux
$(brew --prefix)/opt/fzf/install
brew install the_silver_searcher
# Remove outdated versions from the cellar.
brew cleanup
# for copy/paste integration with terminal/tmux
# https://github.com/tmux/tmux/issues/543
brew install reattach-to-user-namespace
# Enabled preview of JSON files in OSX preview
brew cask install quicklook-json
<file_sep># Get some colors in here
export CLICOLOR=1
export LSCOLORS=ExFxBxDxCxegedabagacad
# Shortcuts
alias ls='ls -GFh'
alias gs='git status'
alias gp='git push'
alias gb='git branch'
alias gpo='git push -u origin HEAD'
alias vimrc='vim ~/.vimrc'
alias bashrc='vim ~/.bash_profile'
alias tmuxrc='vim ~/.tmux.conf'
alias vim="vi"
# FZF
[ -f ~/.fzf.bash ] && source ~/.fzf.bash
# Git
source ~/.git-completion.bash
source ~/.git-prompt.sh
export GIT_PS1_SHOWUNTRACKEDFILES=true
export GIT_PS1_SHOWCOLORHINTS=true
export GIT_PS1_SHOWDIRTYSTATE=true
export PROMPT_COMMAND='\W$(__git_ps1 " (%s)") \$ '
export PROMPT_COMMAND='__git_ps1 "\W" " "'
# NVM
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion
# Set global NPM path
export PATH=~/.npm-global/bin:$PATH
|
19cf617e6a6dfdf504159fbd37035c3d0c846080
|
[
"Markdown",
"Shell"
] | 3
|
Markdown
|
danielstocks/dotfiles
|
9ca4ba0e602d6934718cb5cd875702408578ac16
|
3cda85433c9be828bdbe6632a505fc9887993e22
|
refs/heads/master
|
<file_sep>__author__ = 'alan'
import fileinput
from app.ccacc.account import Account
def main():
"""
Process all command line arguments. Print out the balances of all accounts after processing.
"""
accdict = {}
for line in fileinput.input():
process_input(line, accdict)
fileinput.close()
sort_print_accounts(accdict)
def process_input(input, accdict):
"""
Process all arguments into accounts and add them to the dictionary. Return a dictionary of all active accounts.
:param input: the command line arguments converted into a list:
:param accdict: a dictionary containing all active accounts with the account name as a key.
:return an updated accdict:
"""
accinfo = input.split()
if accinfo[1] in accdict:
acc = accdict[accinfo[1]]
else:
acc = None
if not acc and accinfo[0] == 'Add':
acc = Account(name=accinfo[1], ccnum=accinfo[2], cclimit=accinfo[3])
if acc and accinfo[0] == 'Charge':
acc.charge_acc(accinfo[2])
if acc and accinfo[0] == 'Credit':
acc.credit_acc(accinfo[2])
accdict[acc.name] = acc
return accdict
def sort_print_accounts(accdict):
alphasort = sorted([k for k in accdict.iterkeys()])
for name in alphasort:
print accdict[name].name + ':', '$' + str(accdict[name].balance) if accdict[name].ccnum else "error"
if __name__ == '__main__':
main()
<file_sep>__author__ = 'alan'
import unittest
import runapp
from app.ccacc.account import Account
class TestRunApp(unittest.TestCase):
def test_process_input_create_acc(self):
dict = {}
dict = runapp.process_input('Add Tom 4111111111111111 $1000', dict)
self.assertEqual(dict['Tom'].name, 'Tom')
self.assertEqual(dict['Tom'].ccnum, 4111111111111111)
self.assertEqual(dict['Tom'].cclimit, 1000)
def test_process_input_charge_acc(self):
dict = {}
acc = Account(name='Tom', ccnum='4111111111111111', cclimit='$1000')
dict[acc.name] = acc
dict = runapp.process_input('Charge Tom $500',dict)
self.assertEqual(dict[acc.name].name, 'Tom')
self.assertEqual(dict[acc.name].ccnum, 4111111111111111)
self.assertEqual(dict[acc.name].cclimit, 1000)
self.assertEqual(dict[acc.name].balance, 500)
def test_process_input_credit_acc(self):
dict = {}
acc = Account(name='Tom', ccnum='4111111111111111', cclimit='$1000')
acc.charge_acc('$800')
dict[acc.name] = acc
accarry = runapp.process_input('Credit Tom $500',dict)
self.assertEqual(dict[acc.name].name, 'Tom')
self.assertEqual(dict[acc.name].ccnum, 4111111111111111)
self.assertEqual(dict[acc.name].cclimit, 1000)
self.assertEqual(dict[acc.name].balance, 300)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestRunApp)
unittest.TextTestRunner(verbosity=2).run(suite)<file_sep>__author__ = 'alan'
import unittest
from app.ccacc.account import Account
class TestCCProcModule(unittest.TestCase):
def test_create_acc(self):
acc = Account(name='Alan', ccnum='946546', cclimit='$3000')
def test_cln_amt(self):
acc = Account(name='Alan', ccnum='5454545454545454',cclimit='$3000')
self.assertEqual(acc._cln_amt('$10000'), 10000)
def test_update_cclimit(self):
acc = Account(name='Alan', ccnum='5454545454545454',cclimit='$300')
acc.cclimit = '$850'
self.assertEqual(850, acc.cclimit)
def test_verify_acc_cc_num_change_valid(self):
acc = Account(name='Alan', ccnum='5454545454545454',cclimit='$3000')
acc.ccnum = '4111111111111111'
self.assertEqual(4111111111111111, acc.ccnum)
def test_verify_acc_cc_num_change_invalid(self):
acc = Account(name='Alan', ccnum='5454545454545454',cclimit=3000)
acc.ccnum = '4111111111111112'
self.assertEqual(False, acc.ccnum)
def test_charge_acc_accepted(self):
acc = Account(name='Alan', ccnum='4111111111111111', cclimit='$3000')
self.assertTrue(acc.charge_acc('$3000'))
def test_charge_acc_declined(self):
acc = Account(name='Alan', ccnum=4111111111111111,cclimit='$3000')
self.assertFalse(acc.charge_acc('$3500'))
def test_charge_acc_success_then_decline(self):
acc = Account(name='Alan', ccnum='4111111111111111',cclimit='$1000')
self.assertTrue(acc.charge_acc('$300'))
self.assertFalse(acc.charge_acc('$2701'))
def test_credit_acc_above_zero(self):
chrgamt = '$2589'
crdamt = '$50'
acc = Account(name='Alan', ccnum='4111111111111111',cclimit='$3000')
acc.charge_acc(chrgamt)
acc.credit_acc(crdamt)
self.assertEqual(acc.balance, 2539)
def test_credit_acc_below_zero(self):
chrgamt = '$2589'
crdamt = '$2600'
acc = Account(name='Alan', ccnum='4111111111111111',cclimit='$3000')
acc.charge_acc(chrgamt)
acc.credit_acc(crdamt)
self.assertEqual(acc.balance, -11)
def test_verify_acc_cc_num(self):
acc = Account(name='Alan', ccnum='5454545454545454',cclimit='$3000')
self.assertEqual(5454545454545454, acc.ccnum)
def test_verify_acc_cc_num_error(self):
acc = Account(name='Alan', ccnum='1234567890123456',cclimit='$3000')
self.assertEqual(False, acc.ccnum)
def test_charge_acc_decline_invalid_cc_num(self):
acc = Account(name='Alan', ccnum='1234567890123456',cclimit='$3000')
self.assertFalse(acc.charge_acc('$59'))
def test_credit_acc_decline_invalid_cc_num(self):
acc = Account(name='Alan', ccnum='1234567890123456',cclimit='$3000')
self.assertFalse(acc.credit_acc('$59'))
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestCCProcModule)
unittest.TextTestRunner(verbosity=2).run(suite)<file_sep>__author__ = 'alan'
import re
class Account(object):
def __init__(self, name, ccnum, cclimit):
self.name = name
self._ccnum = self._ver_cc_num(ccnum)
self._cclimit = self._cln_amt(cclimit)
self.balance = 0
@property
def ccnum(self):
return self._ccnum
@ccnum.setter
def ccnum(self, ccnum):
self._ccnum = self._ver_cc_num(ccnum)
@property
def cclimit(self):
return self._cclimit
@cclimit.setter
def cclimit(self, cclimit):
self._cclimit = self._cln_amt(cclimit)
def charge_acc(self, chrgamt):
"""
This method is meant to charge an account with a valid credit card number.
:param chrgamt: is an integer that is used to place a charge value on the account:
:return True if the charge was successfully added. False if it was declined:
"""
if self.ccnum:
chrgamt = self._cln_amt(chrgamt)
if self.balance + chrgamt <= self.cclimit:
self.balance += chrgamt
return True
else:
return False
else:
return False
def credit_acc(self, crdamt):
"""
This method is meant to credit an account with a valid credit card number
:param crdamt: is an integer that is used to place a credit value on the account:
:return True if a credit was successfully added. False if it was declined:
"""
if self.ccnum:
crdamt = self._cln_amt(crdamt)
self.balance -= crdamt
return True
else:
return False
def _ver_cc_num(self, ccnum):
"""
This method uses the Luhn 10 algorithm to check for incorrectly entered credit card numbers
:param ccnum: should be an integer:
:return If true, return validated credit card number.If false, return error:
"""
ccnumarry = [int(x) for x in str(ccnum)]
ccnumarry.reverse()
oddccnums = sum([val for ind, val in enumerate(ccnumarry) if (ind + 1) % 2 != 0])
evenccnums = [2 * val for ind, val in enumerate(ccnumarry) if (ind + 1) % 2 == 0]
evenccnums = sum([ 1 + (x % 10) if x > 9 else x for x in evenccnums])
check = oddccnums + evenccnums
if check % 10 == 0:
return int(ccnum)
else:
return False
def _cln_amt(self, amt):
"""
Remove all non integers from value string and convert that string value into an integer
:param amt: is the string value:
:return amt as an integer without any symbols:
"""
amt = re.sub(r'[^\d]', '', str(amt))
return int(amt)
|
ffcc33d6262e36538ba6172faf28f999fe6d6164
|
[
"Python"
] | 4
|
Python
|
rad08d/credit_card_process
|
b01791b18d217853b87ab2c996dfb57ee7bb4e1e
|
ac290a73aa51e0c9a9e89d14b837fbe58798aa6d
|
refs/heads/master
|
<file_sep>//
// Created by chenhong on 2017/3/5.
//
|
8863f72bf662aa0ad529be6d8e6cad327ac47c43
|
[
"C++"
] | 1
|
C++
|
chlss/testgithub
|
dc2e9be296f872af10269e650973d37a89276a42
|
3477713f78b6be2e44df022785a1970f8b48d622
|
refs/heads/master
|
<repo_name>devkwon97/Algorithm<file_sep>/src/baekjoon_string/Baekjoon10809.java
package baekjoon_string;
//알파벳 소문자로만 이루어진 단어 S가 주어진다. 각각의 알파벳에 대해서, 단어에 포함되어 있는 경우에는 처음 등장하는 위치를,
// 포함되어 있지 않은 경우에는 -1을 출력하는 프로그램을 작성하시오.
//첫째 줄에 단어 S가 주어진다. 단어의 길이는 100을 넘지 않으며, 알파벳 소문자로만 이루어져 있다.
import java.io.*;
public class Baekjoon10809 {
public static void main(String[] args)throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(System.out));
String input = br.readLine();
// 아스키값 a=97 ~ z=122 총 26개
// 인덱스오브는 문자값 아스키로 돌려서 input에 들어오는 문자값 하나하나씩 검증하고 스펠링이없음 -1 있으면 몇번째자리에있는지 대입
for(int i='a'; i<='z'; i++){
bw.write(input.indexOf(i) + " ");
}
bw.newLine();
br.close();
bw.flush();
bw.close();
}
}
<file_sep>/src/baekjoon_function/Baekjoon1065.java
package baekjoon_function;
//어떤 양의 정수 X의 각 자리가 등차수열을 이룬다면, 그 수를 한수라고 한다.
// 등차수열은 연속된 두 개의 수의 차이가 일정한 수열을 말한다. N이 주어졌을 때, 1보다 크거나 같고, N보다 작거나 같은 한수의 개수를 출력하는 프로그램을 작성하시오.
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
public class Baekjoon1065 {
public static void main(String[] args)throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int num = Integer.parseInt(br.readLine());
System.out.print(arithmetic_sequence(num));
}
public static int arithmetic_sequence(int num){
int cnt = 0; // 한수 카운팅
if(num<100){
return num;
}else{
cnt = 99;
for(int i=100; i<=num; i++){
int hun = i/100;
int ten = (i/10)%10;
int one = i%10;
if((hun-ten) == (ten-one)){
cnt++;
}
}
}
return cnt;
}
}
<file_sep>/src/baekjoon_recursion/Beakjoon2447.java
package baekjoon_recursion;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
public class Beakjoon2447 {
static char[][] arr;
public static void main(String[] args) throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(System.out));
int input = Integer.parseInt(br.readLine());
arr = new char[input][input];
star(0, 0, input, false);
for(int i=0; i<input; i++){
bw.write(arr[i]);
bw.write("\n");
}
bw.flush();
bw.close();
}
static void star(int x, int y, int input, boolean blank){
// 공백일 경우
if(blank){
for(int i=x; i<x+input; i++){
for(int j=y; j<y+input; j++){
arr[i][j] = ' ';
}
}
return;
}
// 더이상 나눌 수 없는 블록일 경우
if(input == 1){
arr[x][y] = '*';
return;
}
int size = input / 3;
int count = 0;
for(int i=x; i<x+input; i+=size){
for(int j=y; j<y+input; j+=size){
count++;
if(count == 5){
star(i, j, size, true);
}else{
star(i, j, size, false);
}
}
}
}
}
<file_sep>/src/baekjoon_string/Baekjoon11654.java
package baekjoon_string;
//알파벳 소문자, 대문자, 숫자 0-9중 하나가 주어졌을 때, 주어진 글자의 아스키 코드값을 출력하는 프로그램을 작성하시오.
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
public class Baekjoon11654 {
public static void main(String[] args)throws IOException{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int input = br.readLine().charAt(0);
System.out.print(input);
}
}
<file_sep>/README.md
## :orange_book: 알고리즘 공부
```
백준 알고리즘 문제풀이 공간~
```<file_sep>/src/baekjoon_for/Baekjoon2439.java
package baekjoon_for;
//첫째 줄에는 별 1개, 둘째 줄에는 별 2개, N번째 줄에는 별 N개를 찍는 문제
//
//하지만, 오른쪽을 기준으로 정렬한 별(예제 참고)을 출력하시오. 오른쪽부터 채우는별
import java.io.*;
public class Baekjoon2439 {
public static void main(String[] args)throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(System.out));
int input = Integer.parseInt(br.readLine());
for(int i=1; i<=input; i++){
for(int j=1; j<=input-i; j++){
bw.write(" ");
}
for(int k=1; k<=i; k++){
bw.write("*");
}
bw.newLine();
}
br.close();
bw.flush();
bw.close();
}
}
<file_sep>/src/baekjoon_recursion/Baekjoon10870.java
package baekjoon_recursion;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
public class Baekjoon10870 {
public static void main(String[] args)throws IOException{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int input = Integer.parseInt(br.readLine());
System.out.println(fibo(input));
}
public static int fibo(int num){
if (num == 0){
return 0;
}else if(num == 1 ){
return 1;
}else{
return fibo(num-2) + fibo(num-1);
}
}
}<file_sep>/src/baekjoon_function/Baekjoon15596.java
package baekjoon_function;
//정수 n개가 주어졌을 때, n개의 합을 구하는 함수를 작성하시오.
import java.io.IOException;
public class Baekjoon15596 {
public static long add(int[] a){
long sum = 0; //a 배열 정수 합
for(int i=0; i<a.length; i++){
sum += a[i];
}
return sum;
}
public static void main(String[] args)throws IOException {
int[] num = {50,20,30};
long num1 = add(num);
System.out.println(num1);
}
}
<file_sep>/src/baekjoon_binarysearch/Baekjoon1920.java
package baekjoon_binarysearch;
// 백준 1920 수 찾기
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Arrays;
public class Baekjoon1920 {
public static void main(String[] args) throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int n = Integer.parseInt(br.readLine()); // 자연수 N
String[] beforeAArray = br.readLine().split(" "); // 스트링 배열로 받아지는 특징 때문에 인트배열 담기 전에 생성
int[] aArray = new int[n];
/* 스트링 배열로 들어온 문자를 인트 배열로 바꿔줌 */
for(int i=0; i< beforeAArray.length; i++){
aArray[i] = Integer.parseInt(beforeAArray[i]);
}
/* 이진탐색을 위한 오름차순으로 정렬 */
Arrays.sort(aArray);
int m = Integer.parseInt(br.readLine()); // M개의 수
String[] beforeComArray = br.readLine().split(" ");
int[] compareArray = new int[m];
/* 스트링 배열로 들어온 문자를 인트 배열로 바꿔줌 */
for(int j=0; j<beforeComArray.length; j++){
compareArray[j] = Integer.parseInt(beforeComArray[j]); // 주어질 수에 대한 배열
}
for(int i =0; i<compareArray.length; i++){ // 지금부터 주어진 수가 aArray 배열에 담긴 수와 일치하는지 반복문을 통해 찾을 것이다.
int flag = 0;
/* max와 min을 밖에다 선언해주면 while문이 돌지 않는다.(max, min 값을 계속 초기화 해주어야 반복함)*/
/* 왜냐하면 for문 밖에 max와 min을 선언해두면 while문이 한 번 false가 되었을 때, break 되기 때문에 다시 돌지 않는다*/
int max = n; // 이진 탐색 결과 후 새 중간값을 구하기 위한 최대값, n을 하는 이유는 배열의 마지막 번째 수인 n-1번째 배열 값을 체크하기 위해서
int min = -1; // 이진 탐색 결과 후 새 중간값을 구하기 위한 최소값, -1을 하는 이유는 0번째 배열값을 체크하기 위해서
int middle;
while(max - min > 1){ // max와 min 차이가 2 이상 나지 않으면 max와 min 사이에 정수가 존재하지 않는다.
middle = (min+max)/2; // 이진탐색의 중간값
if(compareArray[i] == aArray[middle]){
flag = 1; // 주어진 수가 aArray배열 안에 담겨 있으면 1로 break 아니면 0 출력
break;
}
if(compareArray[i] > aArray[middle]){ // 주어진 수가 중간값보다 크면 중간값이 최소값이 된다.
min = middle;
}else if(compareArray[i] < aArray[middle]){ // 주어진 수가 중간값보다 작으면 중간값이 최대값이 된다.
max = middle;
}
}
System.out.println(flag);
}
}
}
<file_sep>/src/baekjoon_string/Baekjoon1152.java
package baekjoon_string;
//영어 대소문자와 띄어쓰기만으로 이루어진 문자열이 주어진다. 이 문자열에는 몇 개의 단어가 있을까?
// 이를 구하는 프로그램을 작성하시오. 단, 한 단어가 여러 번 등장하면 등장한 횟수만큼 모두 세어야 한다.
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
public class Baekjoon1152 {
public static void main(String[] args)throws IOException{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
String[] input = br.readLine().trim().split(" ");
int cnt = 0;
for(int i=0; i<input.length; i++){
if(!input[i].equals("")){
cnt++;
}
}
System.out.println(cnt);
}
}<file_sep>/src/baekjoon_for/Baekjoon15552.java
package baekjoon_for;
//본격적으로 for문 문제를 풀기 전에 주의해야 할 점이 있다. 입출력 방식이 느리면 여러 줄을 입력받거나 출력할 때 시간초과가 날 수 있다는 점이다.
//
//C++을 사용하고 있고 cin/cout을 사용하고자 한다면, cin.tie(NULL)과 sync_with_stdio(false)를 둘 다 적용해 주고, endl 대신 개행문자(\n)를 쓰자. 단, 이렇게 하면 더 이상 scanf/printf/puts/getchar/putchar 등 C의 입출력 방식을 사용하면 안 된다.
//
//Java를 사용하고 있다면, Scanner와 System.out.println 대신 BufferedReader와 BufferedWriter를 사용할 수 있다. BufferedWriter.flush는 맨 마지막에 한 번만 하면 된다.
//
//Python을 사용하고 있다면, input 대신 sys.stdin.readline을 사용할 수 있다. 단, 이때는 맨 끝의 개행문자까지 같이 입력받기 때문에 문자열을 저장하고 싶을 경우 .rstrip()을 추가로 해 주는 것이 좋다.
//
//또한 입력과 출력 스트림은 별개이므로, 테스트케이스를 전부 입력받아서 저장한 뒤 전부 출력할 필요는 없다. 테스트케이스를 하나 받은 뒤 하나 출력해도 된다.
import java.io.*;
import java.util.StringTokenizer;
public class Baekjoon15552 {
public static void main(String[] args)throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));// Scanner 대신
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(System.out));// println 대신
int input = Integer.parseInt(br.readLine());
StringTokenizer st;
for(int i=0; i<input; i++){
st = new StringTokenizer(br.readLine(), " ");
bw.write((Integer.parseInt(st.nextToken()) + Integer.parseInt(st.nextToken()))+"\n");
}
br.close();
bw.flush();//버퍼가 가득차짆은 상황에서 강제로 버퍼의 내용을 전송하여 데드락 상태를 해제 System.out은 자동적으로 버퍼링해줌
bw.close();
}
}
<file_sep>/src/baekjoon_array/Baekjoon1546.java
package baekjoon_array;
//세준이는 기말고사를 망쳤다. 세준이는 점수를 조작해서 집에 가져가기로 했다. 일단 세준이는 자기 점수 중에 최댓값을 골랐다. 이 값을 M이라고 한다. 그리고 나서 모든 점수를 점수/M*100으로 고쳤다.
//
//예를 들어, 세준이의 최고점이 70이고, 수학점수가 50이었으면 수학점수는 50/70*100이 되어 71.43점이 된다.
//
//세준이의 성적을 위의 방법대로 새로 계산했을 때, 새로운 평균을 구하는 프로그램을 작성하시오.
//첫째 줄에 시험 본 과목의 개수 N이 주어진다. 이 값은 1000보다 작거나 같다. 둘째 줄에 세준이의 현재 성적이 주어진다.
// 이 값은 100보다 작거나 같은 음이 아닌 정수이고, 적어도 하나의 값은 0보다 크다.
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
public class Baekjoon1546 {
public static void main(String[] args)throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
double max = 0;
double sum = 0;
double[] arr = new double[Integer.parseInt(br.readLine())];
StringTokenizer st = new StringTokenizer(br.readLine(), " ");
for(int i=0; i<arr.length; i++){
arr[i] = Double.parseDouble(st.nextToken());
if(arr[i]>max){
max = arr[i];
}
}
for(int i=0; i<arr.length; i++){
sum+=((arr[i]/max)*100);
}
System.out.print(sum/arr.length);
}
}
|
5e192fcd457470b081dc98b82751709a1d5b34a3
|
[
"Markdown",
"Java"
] | 12
|
Java
|
devkwon97/Algorithm
|
cc72ea2c8d08891c8766f3395053dbf227fe66f3
|
e84744ddaa325d78c77128c96ef1209593686f8c
|
refs/heads/master
|
<file_sep># tool_performance_report
extract and daily update data automatically for tool performance dashboard and relevant report
<file_sep>--Upgraded Tool performance dashboard to CLV2.0
CREATE MULTISET VOLATILE TABLE impression AS
(SELECT
COALESCE(ams_prgrm_id, -999) AS ams_prgrm_id
,IMPRSN_DT
,COALESCE(pblshr_id, -999) AS pblshr_id
,ams_tool_id
,SUM(CASE WHEN trfc_src_cd IN (1, 2, 3) THEN 1 ELSE 0 END) AS impr_mobile
,COUNT(IMPRSN_CNTNR_ID) AS impr_all
FROM prs_ams_v.AMS_IMPRSN_CNTNR
WHERE IMPRSN_DT between '2016-01-01' and current_date - 2
AND ams_trans_rsn_cd = 0
GROUP BY 1, 2, 3, 4
HAVING impr_all > 0
) WITH DATA PRIMARY INDEX (IMPRSN_DT, pblshr_id, ams_prgrm_id)
ON COMMIT PRESERVE ROWS;
--sel min(click_dt) from click
--Click
--- DROP TABLE click;
CREATE MULTISET VOLATILE TABLE click AS
(SELECT
COALESCE(ams_prgrm_id, -999) AS ams_prgrm_id
,click_dt
,COALESCE(pblshr_id, -999) AS pblshr_id
,ams_tool_id
,SUM(CASE WHEN trfc_src_cd IN (1, 2, 3) THEN 1 ELSE 0 END) AS click_mobile
,COUNT(1) AS click_all
FROM prs_ams_v.ams_click
WHERE click_dt between '2016-01-01' and current_date - 2
AND ams_trans_rsn_cd = 0
GROUP BY 1, 2, 3, 4
HAVING click_all > 0
) WITH DATA PRIMARY INDEX (click_dt, pblshr_id, ams_prgrm_id)
ON COMMIT PRESERVE ROWS;
COLLECT STATISTICS COLUMN (AMS_PRGRM_ID ,CLICK_DT ,PBLSHR_ID,AMS_TOOL_ID) ON click;
COLLECT STATISTICS COLUMN (AMS_PRGRM_ID ,IMPRSN_DT ,PBLSHR_ID,AMS_TOOL_ID) ON impression;
--dROP TABLE impr_click;
CREATE MULTISET volatile TABLE impr_click as (
sel COALESCE(b.IMPRSN_DT,a.CLICK_dt) AS cal_dt
,COALESCE(b.ams_prgrm_id,a.ams_prgrm_id) AS ams_prgrm_id
,COALESCE(b.pblshr_id,a.pblshr_id) AS pblshr_id
,COALESCE(b.AMS_TOOL_ID,A.AMS_TOOL_ID) AS AMS_TOOL_ID
,COALESCE(a.click_mobile, 0) AS click_mobile
,COALESCE(a.click_all, 0) AS click_all
,COALESCE(b.impr_mobile, 0) AS impr_mobile
,COALESCE(b.impr_all, 0) AS impr_all
From click a
full join impression b on a.CLICK_dt = b.IMPRSN_DT and a. ams_prgrm_id = b.ams_prgrm_id and a.pblshr_id = b.pblshr_id and COALESCE(a.ams_tool_id, '(no value)') = COALESCE(b.ams_tool_id, '(no value)')
)WITH DATA PRIMARY INDEX(cal_dt
,ams_prgrm_id
,pblshr_id
,ams_tool_id) ON COMMIT PRESERVE ROWS
;
--Transection
CREATE MULTISET VOLATILE TABLE click_table AS
(
SEL
CAST(CLICK_TS AS DATE) AS CLICK_dt,
click_id,
AMS_PRGRM_ID,
ams_tool_id,
pblshr_id
FROM PRS_AMS_V.AMS_CLICK a
WHERE 1=1
---AND AMS_TRANS_RSN_CD=0
AND CLICK_dt between '2015-01-01' and current_date - 2
) WITH DATA PRIMARY INDEX (CLICK_dt, pblshr_id, ams_prgrm_id,ams_tool_id)
ON COMMIT PRESERVE ROWS;
--DROP TABLE TRANS;
CREATE MULTISET VOLATILE TABLE TRANS AS
(SELECT
fam.CK_TRANS_DT AS ck_trans_dt
,fam.ams_prgrm_id
,fam.EPN_PBLSHR_ID
,c.ams_tool_id
-- ,COALESCE(c.ams_tool_id, '(no value)') as ams_tool_id
,SUM(CASE WHEN clv_dup_ind =0 and DEVICE_TYPE_ID IN (1) THEN coalesce(GMB_PLAN_RATE_AMT,0) ELSE 0 END) AS GMB_24HR_desktop -- GMB
,SUM(CASE WHEN clv_dup_ind =0 and DEVICE_TYPE_ID IN (2,3) THEN coalesce(GMB_PLAN_RATE_AMT,0) ELSE 0 END) AS GMB_BBOWAC_mobile -- GMB
,SUM(CASE WHEN clv_dup_ind =0 and DEVICE_TYPE_ID NOT IN (1,2,3) THEN coalesce(GMB_PLAN_RATE_AMT,0) ELSE 0 END) AS GMB_other_device -- new_added
,SUM(CASE WHEN DEVICE_TYPE_ID IN (1) THEN coalesce(fam.IGMB_PLAN_RATE_AMT,0)ELSE 0 END) AS fam2_IGMB_desktop
,SUM(CASE WHEN DEVICE_TYPE_ID IN (2,3) THEN coalesce(fam.IGMB_PLAN_RATE_AMT,0)ELSE 0 END) AS iGMB_BBOWAC_mobile
,SUM(CASE WHEN DEVICE_TYPE_ID NOT IN (1,2,3) THEN coalesce(fam.IGMB_PLAN_RATE_AMT,0)ELSE 0 END) AS iGMB_other_device -- new_added
,count(distinct case when DEVICE_TYPE_ID IN (1) THEN CK_TRANS_ID||ITEM_ID end) AS fam2_trx_desktop
,count(distinct case when DEVICE_TYPE_ID IN (2,3) THEN CK_TRANS_ID||ITEM_ID end) AS fam3_trx_mobile
,count(distinct case when DEVICE_TYPE_ID NOT IN (1,2,3) THEN CK_TRANS_ID||ITEM_ID end) AS fam3_trx_other_device -- new_added
,SUM(CASE WHEN fam.CLV_BUYER_TYPE_CD IN (1,2) and DEVICE_TYPE_ID IN (1) THEN 1 ELSE 0 END) AS fam2_norb_desktop
,SUM(CASE WHEN fam.CLV_BUYER_TYPE_CD IN (1,2) and DEVICE_TYPE_ID IN (2,3) THEN 1 ELSE 0 END) AS fam3_norb_mobile
,SUM(CASE WHEN fam.CLV_BUYER_TYPE_CD IN (1,2) and DEVICE_TYPE_ID NOT IN (1,2,3) THEN 1 ELSE 0 END) AS fam3_norb_other_device --new_added
--,SUM(CASE WHEN fam.CLV_BUYER_TYPE_CD IN (1,2) THEN INCR_FCTR ELSE 0 END ) AS INORB
FROM PRS_RESTRICTED_V.MH_IM_CORE_FAM2_FACT AS fam
LEFT OUTER JOIN click_table AS c
ON fam.RVR_ID = c.click_id
WHERE fam.MPX_CHNL_ID = 6
AND fam.CK_TRANS_DT between '2016-01-01' and current_date - 2
--AND fam.client_id = fam.client_id_global ---- added to sync with FAM3, excluding GBH/Geox from reporting.
AND fam.EPN_PBLSHR_ID <> -999
--and ams_tool_id =11006
GROUP BY 1,2,3,4
) WITH DATA PRIMARY INDEX (ck_trans_dt, ams_prgrm_id,ams_tool_id)
ON COMMIT PRESERVE ROWS;
------ Spend
CREATE MULTISET volatile TABLE MPX_spend_2 as (
select
TRANS_DT,
AMS_PRGRM_ID,
ams_tool_id,
AMS_PBLSHR_ID,
--CASE WHEN a.trfc_src_cd <> 0 THEN 'Mobile' ELSE 'Desktop' END as DEVICE ,
sum( CASE WHEN a.trfc_src_cd <> 0 THEN COALESCE(ERNG_USD,0.00) else 0 end) as Spend_Mobile,
sum( CASE WHEN a.trfc_src_cd = 0 THEN COALESCE(ERNG_USD,0.00) else 0 end) as Spend_Desktop,
sum(COALESCE(ERNG_USD,0.00)) as Spend
FROM prs_ams_v.AMS_PBLSHR_ERNG a
where
TRANS_DT between '2016-01-01' and current_date - 2
group by 1,2,3,4
) WITH DATA PRIMARY INDEX(TRANS_DT
,AMS_PRGRM_ID
,ams_tool_id,AMS_PBLSHR_ID) on commit preserve rows;
COLLECT STATISTICS COLUMN (AMS_PRGRM_ID ,CLICK_DT ,PBLSHR_ID,AMS_TOOL_ID) ON click;
COLLECT STATISTICS COLUMN (AMS_PRGRM_ID ,IMPRSN_DT ,PBLSHR_ID,AMS_TOOL_ID) ON impression;
COLLECT STATISTICS COLUMN (CK_TRANS_DT ,AMS_PRGRM_ID ,EPN_PBLSHR_ID) ON TRANS;
COLLECT STATISTICS COLUMN (AMS_TOOL_ID,AMS_PRGRM_ID,AMS_PBLSHR_ID) ON MPX_spend_2;
CREATE MULTISET volatile TABLE dtl_pb_tool as (
sel
COALESCE(a.cal_dt,fam2.ck_trans_dt,b.TRANS_DT) as cal_dt
,COALESCE(a.ams_prgrm_id, fam2. ams_prgrm_id, b.ams_prgrm_id) as ams_prgrm_id
,COALESCE(a.pblshr_id,fam2.EPN_PBLSHR_ID,b.AMS_PBLSHR_ID) as pblshr_id
,COALESCE(a.AMS_TOOL_ID,fam2.ams_tool_id,b.ams_tool_id) as AMS_TOOL_ID
,COALESCE(a.click_mobile, 0) AS click_mobile
,COALESCE(a.click_all, 0) AS click_all
,COALESCE(a.impr_mobile, 0) AS impr_mobile
,COALESCE(a.impr_all, 0) AS impr_all
,COALESCE(fam2.GMB_24HR_desktop,0.00) AS GMB_24HR_desktop
,0 AS GMB_24HR_all
,COALESCE(fam2.fam2_IGMB_desktop, 0.00) AS fam2_iGMB_desktop
,0 AS fam2_iGMB_all
,COALESCE(fam2.fam2_trx_desktop, 0) AS fam2_trx_desktop
,0 AS fam2_trx_all
,COALESCE(fam2.fam2_norb_desktop, 0) AS fam2_norb_desktop
,0 AS fam2_norb_all
,COALESCE(fam2.GMB_BBOWAC_mobile, 0.00) AS GMB_BBOWAC_mobile
,0 AS GMB_BBOWAC_all
,COALESCE(fam2.iGMB_BBOWAC_mobile, 0.00) AS iGMB_BBOWAC_mobile
,0 AS iGMB_BBOWAC_all
,COALESCE(fam2.fam3_trx_mobile, 0) AS fam3_trx_mobile
,0 AS fam3_trx_all
,COALESCE(fam2.fam3_norb_mobile, 0) AS fam3_norb_mobile
,0 AS fam3_norb_all
,COALESCE(fam2.GMB_other_device, 0.00) AS GMB_other_device -- new_added
,COALESCE(fam2.iGMB_other_device, 0.00) AS iGMB_other_device -- new_added
,COALESCE(fam2.fam3_trx_other_device, 0) AS fam3_trx_other_device -- new_added
,COALESCE(fam2.fam3_norb_other_device, 0) AS fam3_norb_other_device -- new_added
,COALESCE(b.Spend_Mobile, 0) AS Spend_Mobile
,COALESCE(b.Spend_Desktop, 0) AS Spend_Desktop
,COALESCE(b.Spend, 0) AS Spend_All
From impr_click a
full join TRANS fam2 on fam2.ck_trans_dt = a.cal_dt and fam2. ams_prgrm_id = a.ams_prgrm_id and a.pblshr_id = fam2.EPN_PBLSHR_ID and COALESCE(a.ams_tool_id, '(no value)') = COALESCE(fam2.ams_tool_id, '(no value)')
full join MPX_spend_2 b on a.cal_dt = b.TRANS_DT and a. ams_prgrm_id = b.ams_prgrm_id and a.pblshr_id = b.AMS_PBLSHR_ID and COALESCE(a.ams_tool_id, '(no value)') = COALESCE(b.ams_tool_id, '(no value)')
and fam2.ck_trans_dt = b.TRANS_DT and fam2. ams_prgrm_id = b.ams_prgrm_id and b.AMS_PBLSHR_ID = fam2.EPN_PBLSHR_ID and COALESCE(b.ams_tool_id, '(no value)') = COALESCE(fam2.ams_tool_id, '(no value)')
--and cal_dt is not null
)WITH DATA PRIMARY INDEX(cal_dt
,ams_prgrm_id
,pblshr_id
,ams_tool_id) ON COMMIT PRESERVE ROWS;
--sel * from trans where ams_tool_id is null;
--DELETE FROM p_tiansheng_t.tool_performance_clv2
--WHERE cal_dt >= (SEL MIN(cal_dt) FROM dtl_pb_tool);
--show table p_tiansheng_t.tool_performance_clv2
--CREATE multiset TABLE p_cac_epn_t.tool_performance_clv2 AS (*/
--INSERT INTO p_tiansheng_t.tool_performance_clv2
DROP TABLE p_tiansheng_t.tool_performance_clv2;
CREATE MULTISET TABLE p_tiansheng_t.tool_performance_clv2 AS (
SEL
b.cal_dt
,b.ams_prgrm_id
,pg.prgrm_name
,b.pblshr_id
,pb.PBLSHR_CMPNY_NAME
,bm.manual_bm as BM
,bm.manual_sub_bm as Sub_BM
,b.ams_tool_id
,lkp.tool_name
,d.ams_tool_categ_name AS tool_categ_name
,impr_mobile
,impr_all
,click_mobile
,click_all
,GMB_24HR_desktop
,GMB_24HR_all
,fam2_iGMB_desktop
,fam2_iGMB_all
,fam2_trx_desktop
,fam2_trx_all
,fam2_norb_desktop
,fam2_norb_all
,GMB_BBOWAC_mobile
,GMB_BBOWAC_all
,iGMB_BBOWAC_mobile
,iGMB_BBOWAC_all
,fam3_trx_mobile
,fam3_trx_all
,fam3_norb_mobile
,fam3_norb_all
,GMB_other_device --new_added
,iGMB_other_device --new_added
,fam3_trx_other_device --new_added
,fam3_norb_other_device -- new_added
,Spend_Mobile
,Spend_Desktop
,Spend_All
FROM
dtl_pb_tool b
LEFT OUTER JOIN prs_ams_v.AMS_TOOL lkp
ON b.ams_tool_id = lkp.ams_tool_id
LEFT OUTER JOIN prs_ams_v.AMS_TOOL_CATEG d
ON lkp.tool_ctgry_cd = d.ams_tool_categ_cd
LEFT OUTER JOIN prs_ams_v.ams_pblshr pb
ON b.pblshr_id = pb.ams_pblshr_id
LEFT JOIN prs_ams_v.AMS_PRGRM pg
ON b.AMS_PRGRM_ID = pg.AMS_PRGRM_ID
left join App_mrktng_l2_v.new_bm bm
on b.pblshr_id = bm.ams_pblshr_id
--where cal_dt = '2018-03-05'
--) WITH DATA PRIMARY INDEX ( cal_dt , ams_tool_id ) ;
)WITH DATA PRIMARY INDEX ( pblshr_id,cal_dt ,ams_prgrm_id,ams_tool_id ) ;
/*
SELECT
TABLENAME,
SUM(CURRENTPERM) /(1024*1024) AS CURRENTPERM,
(100 - (AVG(CURRENTPERM)/MAX(CURRENTPERM)*100)) AS SKEWFACTOR
FROM
DBC.TABLESIZE
WHERE DATABASENAME= 'p_cac_epn_t'
AND
TABLENAME = 'tool_performance_clv2'
GROUP BY 1;*/
DROP TABLE p_tiansheng_t.tool_performance_summary_clv2;
CREATE MULTISET TABLE p_tiansheng_t.tool_performance_summary_clv2 AS
(SEL
a.retail_wk_end_date
,a.ams_prgrm_id
,a.PRGRM_NAME
,AMS_TOOL_ID
,TOOL_NAME
,tool_categ_name
,a.click_mobile
,a.click_all
,a.impr_mobile
,a.impr_all
,a.GMB_24HR_desktop
,a.fam2_iGMB_desktop
,a.fam2_trx_desktop
,a.fam2_norb_desktop
,a.GMB_BBOWAC_mobile
,a.iGMB_BBOWAC_mobile
,a.fam3_trx_mobile
,a.fam3_norb_mobile
,a.GMB_other_device --new_added
,a.iGMB_other_device --new_added
,a.fam3_trx_other_device --new_added
,a.fam3_norb_other_device --new_added
,a.Spend_Mobile
,a.Spend_Desktop
,b.click_mobile AS tot_click_mobile
,b.click_all AS tot_click_all
,b.impr_mobile AS tot_impr_mobile
,b.impr_all AS tot_impr_all
,b.GMB_24HR_desktop AS tot_GMB_24HR_desktop
,b.fam2_iGMB_desktop AS tot_fam2_iGMB_desktop
,b.fam2_trx_desktop AS tot_fam2_trx_desktop
,b.fam2_norb_desktop AS tot_fam2_norb_desktop
,b.GMB_BBOWAC_mobile AS tot_GMB_BBOWAC_mobile
,b.iGMB_BBOWAC_mobile AS tot_iGMB_BBOWAC_mobile
,b.fam3_trx_mobile AS tot_fam3_trx_mobile
,b.fam3_norb_mobile AS tot_fam3_norb_mobile
,b.GMB_other_device AS tot_GMB_BBOWAC_other_device --new_added
,b.iGMB_other_device AS tot_iGMB_BBOWAC_other_device --new_added
,b.fam3_trx_other_device AS tot_fam3_trx_other_device --new_added
,b.fam3_norb_other_device AS tot_fam3_norb_other_device --new_added
,b.Spend_Mobile AS tot_Spend_Mobile
,b.Spend_Desktop AS tot_Spend_Desktop
FROM
(SEL
retail_wk_end_date
,ams_prgrm_id
,PRGRM_NAME
,AMS_TOOL_ID
,TOOL_NAME
,tool_categ_name
,SUM(impr_mobile) impr_mobile
,SUM(impr_all) impr_all
,SUM(click_mobile) click_mobile
,SUM(click_all) click_all
,SUM(GMB_24HR_desktop) GMB_24HR_desktop
,SUM(fam2_iGMB_desktop) fam2_iGMB_desktop
,SUM(fam2_trx_desktop) fam2_trx_desktop
,SUM(fam2_norb_desktop) fam2_norb_desktop
,SUM(GMB_BBOWAC_mobile) GMB_BBOWAC_mobile
,SUM(iGMB_BBOWAC_mobile) iGMB_BBOWAC_mobile
,SUM(fam3_trx_mobile) fam3_trx_mobile
,SUM(fam3_norb_mobile) fam3_norb_mobile
,SUM(GMB_other_device) GMB_other_device --new_added
,SUM(iGMB_other_device) iGMB_other_device --new_added
,SUM(fam3_trx_other_device) fam3_trx_other_device --new_added
,SUM(fam3_norb_other_device) fam3_norb_other_device --new_added
,SUM(Spend_Mobile) Spend_Mobile
,SUM(Spend_Desktop) Spend_Desktop
FROM p_tiansheng_t.tool_performance_clv2 a
INNER JOIN dw_cal_dt b
ON a.cal_dt = b.cal_dt
GROUP BY 1,2,3,4,5,6
) AS a
INNER JOIN
(SEL
retail_wk_end_date
,ams_prgrm_id
,PRGRM_NAME
,SUM(impr_mobile) impr_mobile
,SUM(impr_all) impr_all
,SUM(click_mobile) click_mobile
,SUM(click_all) click_all
,SUM(GMB_24HR_desktop) GMB_24HR_desktop
,SUM(fam2_iGMB_desktop) fam2_iGMB_desktop
,SUM(fam2_trx_desktop) fam2_trx_desktop
,SUM(fam2_norb_desktop) fam2_norb_desktop
,SUM(GMB_BBOWAC_mobile) GMB_BBOWAC_mobile
,SUM(iGMB_BBOWAC_mobile) iGMB_BBOWAC_mobile
,SUM(fam3_trx_mobile) fam3_trx_mobile
,SUM(fam3_norb_mobile) fam3_norb_mobile
,SUM(GMB_other_device) GMB_other_device --new_added
,SUM(iGMB_other_device) iGMB_other_device --new_added
,SUM(fam3_trx_other_device) fam3_trx_other_device --new_added
,SUM(fam3_norb_other_device) fam3_norb_other_device --new_added
,SUM(Spend_Mobile) Spend_Mobile
,SUM(Spend_Desktop) Spend_Desktop
FROM p_tiansheng_t.tool_performance_clv2 a
INNER JOIN dw_cal_dt b
ON a.cal_dt = b.cal_dt
GROUP BY 1,2,3
) AS b
ON a.retail_wk_end_date = b.retail_wk_end_date
AND a.ams_prgrm_id = b.ams_prgrm_id
) WITH DATA PRIMARY INDEX(retail_wk_end_date, AMS_TOOL_ID)
;
sel * from p_tiansheng_t.tool_performance_clv2 where ams_tool_id =11006;
HELP TABLE p_tiansheng_t.tool_performance_clv2
sel DISTINCT CAL_DT FROM p_tiansheng_t.tool_performance_clv2;
/* ,a.click_mobile*1.0000/NULLIFZERO(b.click_mobile) AS "Click_Mobile %"
,a.click_all*1.0000/NULLIFZERO(b.click_all) AS "Click_All %"
,a.GMB_24HR_desktop*1.0000/NULLIFZERO(b.GMB_24HR_desktop) AS "GMB_Desktop %"
,a.GMB_BBOWAC_mobile*1.0000/NULLIFZERO(b.GMB_BBOWAC_mobile) AS "GMB_Mobile %"
,(COALESCE(a.GMB_24HR_desktop,0)+COALESCE(a.GMB_BBOWAC_mobile,0))
/NULLIFZERO((COALESCE(b.GMB_24HR_desktop,0)+COALESCE(b.GMB_BBOWAC_mobile,0))) AS "GMB_All %"
,a.fam2_iGMB_desktop*1.0000/NULLIFZERO(b.fam2_iGMB_desktop) AS "iGMB_Desktop %"
,a.iGMB_BBOWAC_mobile*1.0000/NULLIFZERO(b.iGMB_BBOWAC_mobile) AS "iGMB_Mobile %"
,(COALESCE(a.fam2_iGMB_desktop,0)+COALESCE(a.iGMB_BBOWAC_mobile,0))
/NULLIFZERO((COALESCE(b.fam2_iGMB_desktop,0)+COALESCE(b.iGMB_BBOWAC_mobile,0))) AS "iGMB_All %"
,a.fam2_trx_desktop*1.0000/NULLIFZERO(b.fam2_trx_desktop) AS "Trans_Desktop %"
,a.fam3_trx_mobile*1.0000/NULLIFZERO(b.fam3_trx_mobile) AS "Trans_Mobile %"
,(COALESCE(a.fam2_trx_desktop,0)+COALESCE(a.fam3_trx_mobile,0))
/NULLIFZERO((COALESCE(b.fam2_trx_desktop,0)+COALESCE(b.fam3_trx_mobile,0))) AS "Trans_All %"
,a.fam2_norb_desktop*1.0000/NULLIFZERO(b.fam2_norb_desktop) AS "NORB_Desktop %"
,a.fam3_norb_mobile*1.0000/NULLIFZERO(b.fam3_norb_mobile) AS "NORB_Mobile %"
,(COALESCE(a.fam2_norb_desktop,0)+COALESCE(a.fam3_norb_mobile,0))
/NULLIFZERO((COALESCE(b.fam2_norb_desktop,0)+COALESCE(b.fam3_norb_mobile,0))) AS "NORB_All %" */
<file_sep>DROP TABLE CLICK_TABLE ;
CREATE MULTISET VOLATILE TABLE click_table AS
(
SEL
CAST(CLICK_TS AS DATE) AS CLICK_dt,
click_id,
AMS_PRGRM_ID,
ams_tool_id,
pblshr_id
FROM PRS_AMS_V.AMS_CLICK a
WHERE 1=1
---AND AMS_TRANS_RSN_CD=0
AND CLICK_dt between '2017-04-26' and '2018-05-27'
) WITH DATA PRIMARY INDEX (CLICK_dt, pblshr_id, ams_prgrm_id,ams_tool_id)
ON COMMIT PRESERVE ROWS;
--DROP TABLE TRANS;
CREATE MULTISET VOLATILE TABLE TRANS AS
(SELECT
fam.CK_TRANS_DT AS ck_trans_dt
,fam.ams_prgrm_id
--,fam.EPN_PBLSHR_ID
,c.ams_tool_id
-- ,COALESCE(c.ams_tool_id, '(no value)') as ams_tool_id
,SUM(CASE WHEN clv_dup_ind =0 and DEVICE_TYPE_ID IN (1) THEN coalesce(GMB_PLAN_RATE_AMT,0) ELSE 0 END) AS GMB_24HR_desktop -- GMB
,SUM(CASE WHEN clv_dup_ind =0 and DEVICE_TYPE_ID IN (2,3) THEN coalesce(GMB_PLAN_RATE_AMT,0) ELSE 0 END) AS GMB_BBOWAC_mobile -- GMB
,SUM(CASE WHEN clv_dup_ind =0 THEN coalesce(GMB_PLAN_RATE_AMT,0) ELSE 0 END) AS GMB -- GMB
,SUM(CASE WHEN DEVICE_TYPE_ID IN (1) THEN coalesce(fam.IGMB_PLAN_RATE_AMT,0)ELSE 0 END) AS fam2_IGMB_desktop
,SUM(CASE WHEN DEVICE_TYPE_ID IN (2,3) THEN coalesce(fam.IGMB_PLAN_RATE_AMT,0)ELSE 0 END) AS iGMB_BBOWAC_mobile
,SUM(coalesce(fam.IGMB_PLAN_RATE_AMT,0)) AS iGMB
,count(distinct case when DEVICE_TYPE_ID IN (1) THEN CK_TRANS_ID||ITEM_ID end) AS fam2_trx_desktop
,count(distinct case when DEVICE_TYPE_ID IN (2,3) THEN CK_TRANS_ID||ITEM_ID end) AS fam3_trx_mobile
,count(distinct CK_TRANS_ID||ITEM_ID) AS trx
,SUM(CASE WHEN fam.CLV_BUYER_TYPE_CD IN (1,2) and DEVICE_TYPE_ID IN (1) THEN 1 ELSE 0 END) AS fam2_norb_desktop
,SUM(CASE WHEN fam.CLV_BUYER_TYPE_CD IN (1,2) and DEVICE_TYPE_ID IN (2,3) THEN 1 ELSE 0 END) AS fam3_norb_mobile
,SUM(CASE WHEN fam.CLV_BUYER_TYPE_CD IN (1,2) THEN 1 ELSE 0 END) AS norb
--,SUM(CASE WHEN fam.CLV_BUYER_TYPE_CD IN (1,2) THEN INCR_FCTR ELSE 0 END ) AS INORB
FROM PRS_RESTRICTED_V.MH_IM_CORE_FAM2_FACT AS fam
LEFT OUTER JOIN click_table AS c
ON fam.RVR_ID = c.click_id
WHERE fam.MPX_CHNL_ID = 6
AND fam.CK_TRANS_DT between '2018-04-26' and '2018-05-27'
--AND fam.client_id = fam.client_id_global ---- added to sync with FAM3, excluding GBH/Geox from reporting.
AND fam.EPN_PBLSHR_ID <> -999
and ams_tool_id =11006
GROUP BY 1,2,3
) WITH DATA PRIMARY INDEX (ck_trans_dt, ams_prgrm_id,ams_tool_id)
ON COMMIT PRESERVE ROWS;
sel * from trans
sel * from p_tiansheng_t.tool_performance_clv2 where cal_dt between '2018-04-10' and '2018-05-27' and ams_tool_id = 11006
sel * from P_ePNPEM_T.mbai_pub_daily_pfm_0306 where trans_dt> current_date -3
<file_sep>import decimal
import pyodbc
import time
import csv
import datetime
import os
import sys
import subprocess
import numpy
import email
import smtplib
import shutil
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.application import MIMEApplication
from datetime import datetime, timedelta
from datetime import datetime as dt
os.chdir('C:/Users/tianxu/Documents/Tool_Performance')
pyodbc.pooling = False
#def main():
Login_info = open('C:/Work/LogInMozart_ts.txt', 'r')
server_name = Login_info.readline()
server_name = server_name[:server_name.index(';')+1]
UID = Login_info.readline()
UID = UID[:UID.index(';') + 1]
PWD = Login_info.readline()
PWD = PWD[:PWD.index(';') + 1]
Login_info.close()
#today_dt = datetime.date.today()
print 'Connecting Server to determine date info at: ' + time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + '.'
conn = pyodbc.connect('DRIVER={Teradata};DBCNAME='+ server_name +'UID=' + UID + 'PWD=' + PWD)
curs = conn.cursor()
curs.execute('''
CREATE MULTISET VOLATILE TABLE impression AS
(SELECT
COALESCE(ams_prgrm_id, -999) AS ams_prgrm_id
,IMPRSN_DT
,COALESCE(pblshr_id, -999) AS pblshr_id
,ams_tool_id
,SUM(CASE WHEN trfc_src_cd IN (1, 2, 3) THEN 1 ELSE 0 END) AS impr_mobile
,COUNT(IMPRSN_CNTNR_ID) AS impr_all
FROM prs_ams_v.AMS_IMPRSN_CNTNR
WHERE IMPRSN_DT between current_date -10 and current_date - 2
AND ams_trans_rsn_cd = 0
GROUP BY 1, 2, 3, 4
HAVING impr_all > 0
) WITH DATA PRIMARY INDEX (IMPRSN_DT, pblshr_id, ams_prgrm_id)
ON COMMIT PRESERVE ROWS;
''')
conn.commit()
print 'impression table is ok now!'
curs.execute('''
CREATE MULTISET VOLATILE TABLE click AS
(SELECT
COALESCE(ams_prgrm_id, -999) AS ams_prgrm_id
,click_dt
,COALESCE(pblshr_id, -999) AS pblshr_id
,ams_tool_id
,SUM(CASE WHEN trfc_src_cd IN (1, 2, 3) THEN 1 ELSE 0 END) AS click_mobile
,COUNT(1) AS click_all
FROM prs_ams_v.ams_click
WHERE click_dt between current_date-10 and current_date - 2
AND ams_trans_rsn_cd = 0
GROUP BY 1, 2, 3, 4
HAVING click_all > 0
) WITH DATA PRIMARY INDEX (CLICK_DT, pblshr_id, ams_prgrm_id)
ON COMMIT PRESERVE ROWS;
''')
conn.commit()
print 'click table is ok now'
curs.execute('''
COLLECT STATISTICS COLUMN (AMS_PRGRM_ID ,CLICK_DT ,PBLSHR_ID,AMS_TOOL_ID) ON click;
''')
conn.commit()
curs.execute('''
COLLECT STATISTICS COLUMN (AMS_PRGRM_ID ,IMPRSN_DT ,PBLSHR_ID,AMS_TOOL_ID) ON impression;
''')
conn.commit()
curs.execute('''
CREATE MULTISET volatile TABLE impr_click as (
sel COALESCE(b.IMPRSN_DT,a.CLICK_dt) AS cal_dt
,COALESCE(b.ams_prgrm_id,a.ams_prgrm_id) AS ams_prgrm_id
,COALESCE(b.pblshr_id,a.pblshr_id) AS pblshr_id
,COALESCE(b.AMS_TOOL_ID,A.AMS_TOOL_ID) AS AMS_TOOL_ID
,COALESCE(a.click_mobile, 0) AS click_mobile
,COALESCE(a.click_all, 0) AS click_all
,COALESCE(b.impr_mobile, 0) AS impr_mobile
,COALESCE(b.impr_all, 0) AS impr_all
From click a
full join impression b on a.CLICK_dt = b.IMPRSN_DT and a. ams_prgrm_id = b.ams_prgrm_id and a.pblshr_id = b.pblshr_id and COALESCE(a.ams_tool_id, '(no value)') = COALESCE(b.ams_tool_id, '(no value)')
)WITH DATA PRIMARY INDEX(cal_dt
,ams_prgrm_id
,pblshr_id
,ams_tool_id) ON COMMIT PRESERVE ROWS
;
''')
conn.commit()
print "Be patient, you know, i'm working hard now, impr_click is ok"
curs.execute('''
CREATE MULTISET VOLATILE TABLE click_table AS
(
SEL
CAST(CLICK_TS AS DATE) AS CLICK_dt,
click_id,
AMS_PRGRM_ID,
ams_tool_id,
pblshr_id
FROM PRS_AMS_V.AMS_CLICK a
WHERE 1=1
---AND AMS_TRANS_RSN_CD=0
AND CLICK_dt between current_date - 375 and current_date - 2
) WITH DATA PRIMARY INDEX (CLICK_dt, pblshr_id, ams_prgrm_id,ams_tool_id)
ON COMMIT PRESERVE ROWS;
''')
conn.commit()
curs.execute('''
CREATE MULTISET VOLATILE TABLE TRANS AS
(SELECT
fam.CK_TRANS_DT AS ck_trans_dt
,fam.ams_prgrm_id
,fam.EPN_PBLSHR_ID
,c.ams_tool_id
-- ,COALESCE(c.ams_tool_id, '(no value)') as ams_tool_id
,SUM(CASE WHEN clv_dup_ind =0 and DEVICE_TYPE_ID IN (1) THEN coalesce(GMB_PLAN_RATE_AMT,0) ELSE 0 END) AS GMB_24HR_desktop -- GMB
,SUM(CASE WHEN clv_dup_ind =0 and DEVICE_TYPE_ID IN (2,3) THEN coalesce(GMB_PLAN_RATE_AMT,0) ELSE 0 END) AS GMB_BBOWAC_mobile -- GMB
,SUM(CASE WHEN clv_dup_ind =0 and DEVICE_TYPE_ID NOT IN (1,2,3) THEN coalesce(GMB_PLAN_RATE_AMT,0) ELSE 0 END) AS GMB_other_device -- new_added
,SUM(CASE WHEN DEVICE_TYPE_ID IN (1) THEN coalesce(fam.IGMB_PLAN_RATE_AMT,0)ELSE 0 END) AS fam2_IGMB_desktop
,SUM(CASE WHEN DEVICE_TYPE_ID IN (2,3) THEN coalesce(fam.IGMB_PLAN_RATE_AMT,0)ELSE 0 END) AS iGMB_BBOWAC_mobile
,SUM(CASE WHEN DEVICE_TYPE_ID NOT IN (1,2,3) THEN coalesce(fam.IGMB_PLAN_RATE_AMT,0)ELSE 0 END) AS iGMB_other_device -- new_added
,count(distinct case when DEVICE_TYPE_ID IN (1) THEN CK_TRANS_ID||ITEM_ID end) AS fam2_trx_desktop
,count(distinct case when DEVICE_TYPE_ID IN (2,3) THEN CK_TRANS_ID||ITEM_ID end) AS fam3_trx_mobile
,count(distinct case when DEVICE_TYPE_ID NOT IN (1,2,3) THEN CK_TRANS_ID||ITEM_ID end) AS fam3_trx_other_device -- new_added
,SUM(CASE WHEN fam.CLV_BUYER_TYPE_CD IN (1,2) and DEVICE_TYPE_ID IN (1) THEN 1 ELSE 0 END) AS fam2_norb_desktop
,SUM(CASE WHEN fam.CLV_BUYER_TYPE_CD IN (1,2) and DEVICE_TYPE_ID IN (2,3) THEN 1 ELSE 0 END) AS fam3_norb_mobile
,SUM(CASE WHEN fam.CLV_BUYER_TYPE_CD IN (1,2) and DEVICE_TYPE_ID NOT IN (1,2,3) THEN 1 ELSE 0 END) AS fam3_norb_other_device --new_added
--,SUM(CASE WHEN fam.CLV_BUYER_TYPE_CD IN (1,2) THEN INCR_FCTR ELSE 0 END ) AS INORB
FROM PRS_RESTRICTED_V.MH_IM_CORE_FAM2_FACT AS fam
LEFT OUTER JOIN click_table AS c
ON fam.RVR_ID = c.click_id
WHERE fam.MPX_CHNL_ID = 6
AND fam.CK_TRANS_DT between current_date-10 and current_date - 2
--AND fam.client_id = fam.client_id_global ---- added to sync with FAM3, excluding GBH/Geox from reporting.
AND fam.EPN_PBLSHR_ID <> -999
--and ams_tool_id =11006
GROUP BY 1,2,3,4
) WITH DATA PRIMARY INDEX (ck_trans_dt, ams_prgrm_id,ams_tool_id)
ON COMMIT PRESERVE ROWS;
''')
conn.commit()
print "TRANS is ok"
curs.execute('''
CREATE MULTISET volatile TABLE MPX_spend_2 as (
select
TRANS_DT,
AMS_PRGRM_ID,
ams_tool_id,
AMS_PBLSHR_ID,
--CASE WHEN a.trfc_src_cd <> 0 THEN 'Mobile' ELSE 'Desktop' END as DEVICE ,
sum( CASE WHEN a.trfc_src_cd <> 0 THEN COALESCE(ERNG_USD,0.00) else 0 end) as Spend_Mobile,
sum( CASE WHEN a.trfc_src_cd = 0 THEN COALESCE(ERNG_USD,0.00) else 0 end) as Spend_Desktop,
sum(COALESCE(ERNG_USD,0.00)) as Spend
FROM prs_ams_v.AMS_PBLSHR_ERNG a
where
TRANS_DT between current_date -10 and current_date - 2
group by 1,2,3,4
) WITH DATA PRIMARY INDEX(TRANS_DT
,AMS_PRGRM_ID
,ams_tool_id,AMS_PBLSHR_ID) on commit preserve rows;
''')
conn.commit()
print "mpx_spend_2!"
curs.execute('''
COLLECT STATISTICS COLUMN (AMS_PRGRM_ID ,CLICK_DT ,PBLSHR_ID,AMS_TOOL_ID) ON click;
''')
conn.commit()
curs.execute('''
COLLECT STATISTICS COLUMN (AMS_PRGRM_ID ,IMPRSN_DT ,PBLSHR_ID,AMS_TOOL_ID) ON impression;
''')
conn.commit()
curs.execute('''
COLLECT STATISTICS COLUMN (CK_TRANS_DT ,AMS_PRGRM_ID ,EPN_PBLSHR_ID) ON TRANS;
''')
conn.commit()
curs.execute('''
COLLECT STATISTICS COLUMN (AMS_TOOL_ID,AMS_PRGRM_ID,AMS_PBLSHR_ID) ON MPX_spend_2;
''')
conn.commit()
curs.execute('''
CREATE MULTISET volatile TABLE dtl_pb_tool as (
sel
COALESCE(a.cal_dt,fam2.ck_trans_dt,b.TRANS_DT) as cal_dt
,COALESCE(a.ams_prgrm_id, fam2. ams_prgrm_id, b.ams_prgrm_id) as ams_prgrm_id
,COALESCE(a.pblshr_id,fam2.EPN_PBLSHR_ID,b.AMS_PBLSHR_ID) as pblshr_id
,COALESCE(a.AMS_TOOL_ID,fam2.ams_tool_id,b.ams_tool_id) as AMS_TOOL_ID
,COALESCE(a.click_mobile, 0) AS click_mobile
,COALESCE(a.click_all, 0) AS click_all
,COALESCE(a.impr_mobile, 0) AS impr_mobile
,COALESCE(a.impr_all, 0) AS impr_all
,COALESCE(fam2.GMB_24HR_desktop,0.00) AS GMB_24HR_desktop
,0 AS GMB_24HR_all
,COALESCE(fam2.fam2_IGMB_desktop, 0.00) AS fam2_iGMB_desktop
,0 AS fam2_iGMB_all
,COALESCE(fam2.fam2_trx_desktop, 0) AS fam2_trx_desktop
,0 AS fam2_trx_all
,COALESCE(fam2.fam2_norb_desktop, 0) AS fam2_norb_desktop
,0 AS fam2_norb_all
,COALESCE(fam2.GMB_BBOWAC_mobile, 0.00) AS GMB_BBOWAC_mobile
,0 AS GMB_BBOWAC_all
,COALESCE(fam2.iGMB_BBOWAC_mobile, 0.00) AS iGMB_BBOWAC_mobile
,0 AS iGMB_BBOWAC_all
,COALESCE(fam2.fam3_trx_mobile, 0) AS fam3_trx_mobile
,0 AS fam3_trx_all
,COALESCE(fam2.fam3_norb_mobile, 0) AS fam3_norb_mobile
,0 AS fam3_norb_all
,COALESCE(fam2.GMB_other_device, 0.00) AS GMB_other_device -- new_added
,COALESCE(fam2.iGMB_other_device, 0.00) AS iGMB_other_device -- new_added
,COALESCE(fam2.fam3_trx_other_device, 0) AS fam3_trx_other_device -- new_added
,COALESCE(fam2.fam3_norb_other_device, 0) AS fam3_norb_other_device -- new_added
,COALESCE(b.Spend_Mobile, 0) AS Spend_Mobile
,COALESCE(b.Spend_Desktop, 0) AS Spend_Desktop
,COALESCE(b.Spend, 0) AS Spend_All
From impr_click a
full join TRANS fam2 on fam2.ck_trans_dt = a.cal_dt and fam2. ams_prgrm_id = a.ams_prgrm_id and a.pblshr_id = fam2.EPN_PBLSHR_ID and COALESCE(a.ams_tool_id, '(no value)') = COALESCE(fam2.ams_tool_id, '(no value)')
full join MPX_spend_2 b on a.cal_dt = b.TRANS_DT and a. ams_prgrm_id = b.ams_prgrm_id and a.pblshr_id = b.AMS_PBLSHR_ID and COALESCE(a.ams_tool_id, '(no value)') = COALESCE(b.ams_tool_id, '(no value)')
and fam2.ck_trans_dt = b.TRANS_DT and fam2. ams_prgrm_id = b.ams_prgrm_id and b.AMS_PBLSHR_ID = fam2.EPN_PBLSHR_ID and COALESCE(b.ams_tool_id, '(no value)') = COALESCE(fam2.ams_tool_id, '(no value)')
--and cal_dt is not null
)WITH DATA PRIMARY INDEX(cal_dt
,ams_prgrm_id
,pblshr_id
,ams_tool_id) ON COMMIT PRESERVE ROWS;
''')
conn.commit()
print "Start to delete"
curs.execute('''
delete from p_tiansheng_t.tool_performance_clv2
where cal_DT between current_date-10 and current_date-2;
''')
conn.commit()
print "ok,delete! let's start to insert."
curs.execute('''
insert into p_tiansheng_t.tool_performance_clv2
SEL
b.cal_dt
,b.ams_prgrm_id
,pg.prgrm_name
,b.pblshr_id
,pb.PBLSHR_CMPNY_NAME
,bm.manual_bm as BM
,bm.manual_sub_bm as Sub_BM
,b.ams_tool_id
,lkp.tool_name
,d.ams_tool_categ_name AS tool_categ_name
,impr_mobile
,impr_all
,click_mobile
,click_all
,GMB_24HR_desktop
,GMB_24HR_all
,fam2_iGMB_desktop
,fam2_iGMB_all
,fam2_trx_desktop
,fam2_trx_all
,fam2_norb_desktop
,fam2_norb_all
,GMB_BBOWAC_mobile
,GMB_BBOWAC_all
,iGMB_BBOWAC_mobile
,iGMB_BBOWAC_all
,fam3_trx_mobile
,fam3_trx_all
,fam3_norb_mobile
,fam3_norb_all
,GMB_other_device --new_added
,iGMB_other_device --new_added
,fam3_trx_other_device --new_added
,fam3_norb_other_device -- new_added
,Spend_Mobile
,Spend_Desktop
,Spend_All
FROM
dtl_pb_tool b
LEFT OUTER JOIN prs_ams_v.AMS_TOOL lkp
ON b.ams_tool_id = lkp.ams_tool_id
LEFT OUTER JOIN prs_ams_v.AMS_TOOL_CATEG d
ON lkp.tool_ctgry_cd = d.ams_tool_categ_cd
LEFT OUTER JOIN prs_ams_v.ams_pblshr pb
ON b.pblshr_id = pb.ams_pblshr_id
LEFT JOIN prs_ams_v.AMS_PRGRM pg
ON b.AMS_PRGRM_ID = pg.AMS_PRGRM_ID
left join App_mrktng_l2_v.new_bm bm
on b.pblshr_id = bm.ams_pblshr_id;
''')
conn.commit()
print "successfully insert"
execfile('EmailSender_Tool.py')
print 'Send eMail'
conn.close()
|
28de22ded44f56ffa849a82b9f042b10c8b62e29
|
[
"Markdown",
"SQL",
"Python"
] | 4
|
Markdown
|
tianxu23/tool_performance_report
|
692b55b4ec586c054a10999a7b893fd2b47644d5
|
f625b5e74c91a9f1b6088eeb7341fbfe01ed9b0d
|
refs/heads/master
|
<repo_name>AniketARSR/pythonProject<file_sep>/packageInfo.py
import os
androidPath = input("Android File Path : ")
# iosPath = input("iOS Info File Path : ")
packageNamePath = input("Package Name Path :")
newPackageNamePath = input("New Package Name Path :")
os.rename(packageNamePath, newPackageNamePath)
packageName = input("Package Name To Change : ")
versionName = input("Version Name To Change : ")
texts_to_replace = {'com.natifi.storifi': packageName, '1.0.0': versionName}
with open(androidPath, 'r') as f:
text = f.read()
for replaceText in texts_to_replace:
text = text.replace(replaceText, texts_to_replace[replaceText])
with open(androidPath, 'w') as f:
f.write(text)
# with open(iosPath, 'r') as f:
# text = f.read()
# for replaceText in texts_to_replace:
# text = text.replace(replaceText, texts_to_replace[replaceText])
# with open(iosPath, 'w') as f:
# f.write(text)
<file_sep>/apiCall.py
import requests
import base64
import os
userID = input("Please Enter UserID of App : ")
file_Path = input("Enter the Location to Save File : ")
response = requests.post("https://api.storifi.app/shopify-notifications/fetch-credentials",
data={"userId": "603b906e67e20993f0ad6106"})
data = response.json()
print(response.json())
# print("Encrypted Android File - " + data["data"]["configAndroid"])
print("Decrypted Android File - " + base64.b64decode(data["data"]["configAndroid"]).decode('utf-8'))
androidFileContent = base64.b64decode(data["data"]["configAndroid"]).decode('utf-8')
filepath = os.path.join(file_Path, 'google-serviceTest.json')
if not os.path.exists(file_Path):
os.makedirs(file_Path)
f = open(filepath, "a")
f.write(androidFileContent)
f.close()
file = open(filepath, 'r')
print(file.read())
print("Google Service File Saved")
|
2aeb05941de9f6e79c05923731adf95f4496bdcf
|
[
"Python"
] | 2
|
Python
|
AniketARSR/pythonProject
|
32634721756329b3306eefd57e918e8d3e694765
|
49f544d50bec65be878d5c1b75bafc328f2497fb
|
refs/heads/master
|
<file_sep>import nltk.classify.util
from nltk.classify import NaiveBayesClassifier
from nltk.corpus import movie_reviews
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
import pickle
from tqdm import tqdm
print("import complete...")
def create_word_features(words):
useful = [word for word in words if word not in stopwords.words('english')]
use_dict = dict([(word, True) for word in useful])
return use_dict
negative_reviews = []
for fileid in tqdm(movie_reviews.fileids('neg')):
words = movie_reviews.words(fileid)
negative_reviews.append((create_word_features(words), "negative"))
print("compiled negative reviews", len(negative_reviews))
positive_reviews = []
for fileid in tqdm(movie_reviews.fileids('pos')):
words = movie_reviews.words(fileid)
negative_reviews.append((create_word_features(words), "positive"))
print("compiled positive reviews", len(positive_reviews))
train_set = negative_reviews[:750] + positive_reviews[:750]
test_set = negative_reviews[750:] + positive_reviews[750:]
classifier = NaiveBayesClassifier.train(train_set)
accuracy = nltk.classify.util.accuracy(classifier, test_set)
print(accuracy)
with open('classifier_nltk2.pkl', 'wb') as fopen:
pickle.dump(classifier, fopen)
<file_sep>import pickle
from nltk.tokenize import word_tokenize
from nltk.corpus import stopwords
with open('classifier_nltk2.pkl' , 'rb') as fopen:
clf = pickle.load(fopen)
review1 = ''' its the best one'''
def create_word_features(words):
useful_words = [word for word in words if word not in stopwords.words("english")]
my_dict = dict([(word, True) for word in useful_words])
return my_dict
word1 = word_tokenize(review1)
words = create_word_features(word1)
print(words)
print(clf.classify(words))
<file_sep># FB-Sentiment-analyser<file_sep>import pickle
from nltk.tokenize import word_tokenize
from nltk.corpus import stopwords
with open('classifier_nltk2.pkl' , 'rb') as fopen:
clf = pickle.load(fopen)
review1 = ''' its the best one'''
review2 = "its the not best one"
review3 = "it may be good or bad, i dont know"
review4 = "i dont know"
def create_word_features(words):
useful_words = [word for word in words if word not in stopwords.words("english")]
my_dict = dict([(word, True) for word in useful_words])
return my_dict
word1 = word_tokenize(review1)
word2 = word_tokenize(review2)
word3 = word_tokenize(review3)
word4 = word_tokenize(review4)
all_words = [word1, word2, word3, word4]
created_dict = []
for i in range(len(all_words)):
words = create_word_features(all_words[i])
print(clf.classify(words))
print(" " )
words = create_word_features(word1)
print(clf.classify(words))
<file_sep>import urllib
a = urllib.request.urlopen('http://api.worldbank.org/v2/countries/all/indicators/SP.POP.TOTL?format=json')
import json
b = json.loads(a)
print(b)<file_sep>import nltk.classify.util
from nltk.classify import NaiveBayesClassifier
from nltk.corpus import movie_reviews
from nltk.tokenize import word_tokenize
import pickle
def word_feats(words):
return dict([(word, True) for word in words])
negids = movie_reviews.fileids('neg')
posids = movie_reviews.fileids('pos')
#the generators are much faster than for-loop
negfeats = [(word_feats(movie_reviews.words(fileids=[f])), 'neg') for f in negids]
posfeats = [(word_feats(movie_reviews.words(fileids=[f])), 'pos') for f in posids]
negcutoff = (len(negfeats)*3)/4
poscutoff = (len(posfeats)*3)/4
trainfeats = negfeats[:750] + posfeats[:750]
testfeats = negfeats[750:] + posfeats[750:]
classifier = NaiveBayesClassifier.train(trainfeats)
print ('accuracy:', nltk.classify.util.accuracy(classifier, testfeats))
with open('pickled_classifier.pkl', 'wb') as fopen:
pickle.dump(classifier, fopen)
|
bd8be13b89f74ca7a0c13fc5b8c552f911f0cec2
|
[
"Markdown",
"Python"
] | 6
|
Python
|
ASH1998/FB-Sentiment-analyser
|
26952a9651c7759fbcdd0376ff410776a98e9c1b
|
562f1bb70971cc1bea15db7f598a3f25f2ef8b1b
|
refs/heads/master
|
<file_sep>class User < ApplicationRecord
has_many :articles
belongs_to :users
end
<file_sep>class Article < ApplicationRecord
belongs_to :categories
belongs_to :users
has_many :commentaires
end
<file_sep># This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
require 'faker'
#100.times do
# user = user.create!(name: Faker::Company.name, email: Faker::Internet.email)
#end
#10.times do
# user = User.create!(first_name: "#{Faker::Company.name}", email: "#{Faker::Internet.email}")
#end
i= 1
j=11
10.times do
##article = Article.create!(name: "#{Faker::Company.name}", content: "#{Faker::Movie.name}" , categories_id: Integer , user: Integer )
article = Article.create!(name: "#{Faker::Company.name}", content: "#{Faker::Movie.name}" , categories_id: "#{j}" , index_articles_on_commentaire: "#{i}" )
##categorie = Category.create!(name: "#{Faker::Company.name}")
##commentaire = Commentaire.create!(content: "#{Faker::Company.name}")
end
<file_sep>class ChangeUsers2 < ActiveRecord::Migration[5.2]
def change
add_column :users, :firstname, :lastname
end
end
|
8807cfe0f7e865c7337996b8625def550611570f
|
[
"Ruby"
] | 4
|
Ruby
|
Christine79/20181024_blog
|
fc183b0f136a7be4eea5947d54437db2c828624e
|
3275a8e5f1047bd03724cdf51462759cab6d1edb
|
refs/heads/develop
|
<repo_name>arthurjamain/osgjs<file_sep>/sources/osgWrappers/osgWrappers.js
define( [
'osgWrappers/serializers/osg',
'osgWrappers/serializers/osgAnimation'
], function ( osg, osgAnimation ) {
'use strict';
var osgWrappers = {};
osgWrappers.osg = osg;
osgWrappers.osgAnimation = osgAnimation;
return osgWrappers;
} );
<file_sep>/sources/osgAnimation/FindNearestParentSkeleton.js
define( [
'osg/Utils',
'osg/NodeVisitor',
'osgAnimation/Skeleton'
], function ( MACROUTILS, NodeVisitor, Skeleton ) {
'use strict';
/**
* FindNearestParentSkeleton
*/
var FindNearestParentSkeleton = function () {
NodeVisitor.call( this, NodeVisitor.TRAVERSE_PARENTS );
this._root = undefined;
};
FindNearestParentSkeleton.prototype = MACROUTILS.objectInherit( NodeVisitor.prototype, {
apply: function ( node ) {
if ( this._root ) return;
if ( node.typeID === Skeleton.typeID ) this._root = node;
this.traverse( node );
}
} );
return FindNearestParentSkeleton;
} );
<file_sep>/sources/osgShadow/osgShadow.js
define( [
'osgShadow/ShadowCastAttribute',
'osgShadow/ShadowCastCompiler',
'osgShadow/ShadowCastShaderGenerator',
'osgShadow/ShadowCasterVisitor',
'osgShadow/ShadowFrustumIntersection',
'osgShadow/ShadowMap',
'osgShadow/ShadowReceiveAttribute',
'osgShadow/ShadowSettings',
'osgShadow/ShadowTechnique',
'osgShadow/ShadowTexture',
'osgShadow/ShadowedScene'
], function ( ShadowCastAttribute, ShadowCastCompiler, ShadowCastShaderGenerator, ShadowCasterVisitor, ShadowFrustumIntersection, ShadowMap, ShadowReceiveAttribute, ShadowSettings, ShadowTechnique, ShadowTexture, ShadowedScene ) {
'use strict';
var osgShadow = {};
osgShadow.ShadowCastAttribute = ShadowCastAttribute;
osgShadow.ShadowCastCompiler = ShadowCastCompiler;
osgShadow.ShadowReceiveAttribute = ShadowReceiveAttribute;
osgShadow.ShadowCasterVisitor = ShadowCasterVisitor;
osgShadow.ShadowFrustumIntersection = ShadowFrustumIntersection;
osgShadow.ShadowMap = ShadowMap;
osgShadow.ShadowedScene = ShadowedScene;
osgShadow.ShadowSettings = ShadowSettings;
osgShadow.ShadowCastShaderGenerator = ShadowCastShaderGenerator;
osgShadow.ShadowTechnique = ShadowTechnique;
osgShadow.ShadowTexture = ShadowTexture;
return osgShadow;
} );
<file_sep>/sources/osgShader/node/animation.js
define( [
'osg/Utils',
'osg/Texture',
'osgShader/utils',
'osgShader/node/Node'
], function ( MACROUTILS, Texture, ShaderUtils, Node ) {
'use strict';
var AnimationNode = function () {
Node.apply( this );
};
AnimationNode.prototype = MACROUTILS.objectInherit( Node.prototype, {
type: 'Animation',
validInputs: [ 'weights', 'bonesIndex', 'matrixPalette' ],
validOutputs: [ 'mat4' ],
globalFunctionDeclaration: function () {
return '#pragma include "skeletal.glsl"';
},
computeShader: function () {
// TODO for now matrixPalette is used as a global (uBones) because an array means a dynamic function signature in the glsl...
return ShaderUtils.callFunction( 'skeletalTransform', this._outputs.mat4, [ this._inputs.weights, this._inputs.bonesIndex ] );
}
} );
return {
Animation: AnimationNode
};
} );
<file_sep>/sources/osgText/osgText.js
define( [
'osgText/Text'
], function ( Text ) {
var osgText = {};
osgText.Text = Text;
return osgText;
} );
<file_sep>/sources/osgAnimation/osgAnimation.js
define( [
'osg/Utils',
'osgAnimation/Animation',
'osgAnimation/AnimationAttribute',
'osgAnimation/AnimationUpdateCallback',
'osgAnimation/BasicAnimationManager',
'osgAnimation/Bone',
'osgAnimation/Channel',
'osgAnimation/CollectAnimationUpdateCallbackVisitor',
'osgAnimation/Easing',
'osgAnimation/Interpolator',
'osgAnimation/RigGeometry',
'osgAnimation/Skeleton',
'osgAnimation/StackedMatrix',
'osgAnimation/StackedQuaternion',
'osgAnimation/StackedRotateAxis',
'osgAnimation/StackedScale',
'osgAnimation/StackedTranslate',
'osgAnimation/UpdateBone',
'osgAnimation/UpdateMatrixTransform',
'osgAnimation/UpdateSkeleton'
], function ( MACROUTILS, Animation, AnimationAttribute, AnimationUpdateCallback, BasicAnimationManager, Bone, Channel, CollectAnimationUpdateCallbackVisitor, Easing, Interpolator, RigGeometry, Skeleton, StackedMatrix, StackedQuaternion, StackedRotateAxis, StackedScale, StackedTranslate, UpdateBone, UpdateMatrixTransform, UpdateSkeleton ) {
'use strict';
var osgAnimation = {};
MACROUTILS.objectMix( osgAnimation, Easing );
MACROUTILS.objectMix( osgAnimation, Interpolator );
osgAnimation.Animation = Animation;
osgAnimation.AnimationAttribute = AnimationAttribute;
osgAnimation.AnimationUpdateCallback = AnimationUpdateCallback;
osgAnimation.BasicAnimationManager = BasicAnimationManager;
osgAnimation.Bone = Bone;
osgAnimation.Channel = Channel;
osgAnimation.CollectAnimationUpdateCallbackVisitor = CollectAnimationUpdateCallbackVisitor;
osgAnimation.RigGeometry = RigGeometry;
osgAnimation.Skeleton = Skeleton;
osgAnimation.StackedMatrix = StackedMatrix;
osgAnimation.StackedQuaternion = StackedQuaternion;
osgAnimation.StackedRotateAxis = StackedRotateAxis;
osgAnimation.StackedScale = StackedScale;
osgAnimation.StackedTranslate = StackedTranslate;
osgAnimation.UpdateBone = UpdateBone;
osgAnimation.UpdateMatrixTransform = UpdateMatrixTransform;
osgAnimation.UpdateSkeleton = UpdateSkeleton;
return osgAnimation;
} );
<file_sep>/tests/osgWrappers/osgAnimation.js
define( [
'qunit',
'q',
'osgDB/Input',
'osg/Notify',
'osg/Image'
], function ( QUnit, Q, Input, Notify, Image ) {
'use strict';
return function () {
QUnit.module( 'osgWrapper' );
QUnit.asyncTest( 'osgAnimation', function () {
var input = new Input();
input.readNodeURL( '../examples/media/models/material-test/file.osgjs').then( function( scene ) {
ok( scene !== undefined, true );
start();
}).fail( function( error ) {
Notify.error( error );
});
} );
};
} );
<file_sep>/sources/osgAnimation/Target.js
define( [
'osg/Matrix',
'osg/Quat',
'osg/Vec3',
'osgAnimation/channelType'
], function ( Matrix, Quat, Vec3, channelType ) {
'use strict';
var target = {};
target.InvalidTargetID = -1;
// defaultValue is used when no channels affect the value
var createTarget = function ( type, value, defaultValue ) {
return {
type: type,
id: target.InvalidTargetID, // -1 means no id assigned yet
channels: [],
value: value,
defaultValue: defaultValue
};
};
var createQuatTarget = function ( defaultValue ) {
return createTarget( channelType.Quat,
Quat.copy( defaultValue, Quat.create() ),
Quat.copy( defaultValue, Quat.create() ) );
};
var createMatrixTarget = function ( defaultValue ) {
return createTarget( channelType.Matrix,
Matrix.copy( defaultValue, Matrix.create() ),
Matrix.copy( defaultValue, Matrix.create() ) );
};
var createVec3Target = function ( defaultValue ) {
return createTarget( channelType.Vec3,
Vec3.copy( defaultValue, Vec3.create() ),
Vec3.copy( defaultValue, Vec3.create() ) );
};
var createFloatTarget = function ( defaultValue ) {
return createTarget( channelType.Float,
defaultValue,
defaultValue );
};
target.createQuatTarget = createQuatTarget;
target.createVec3Target = createVec3Target;
target.createFloatTarget = createFloatTarget;
target.createMatrixTarget = createMatrixTarget;
return target;
} );
<file_sep>/tests/osgText/osgTextTests.js
define( [
'tests/osgText/Text',
], function ( Text ) {
'use strict';
return function () {
Text();
};
} );
<file_sep>/sources/osgAnimation/StackedMatrix.js
define( [
'osg/Utils',
'osg/Object',
'osg/Matrix',
'osgAnimation/Target',
], function ( MACROUTILS, Object, Matrix, Target ) {
'use strict';
var StackedMatrix = function ( name, matrix ) {
Object.call( this );
this._target = Target.createMatrixTarget( matrix || Matrix.identity );
if ( name ) this.setName( name );
};
StackedMatrix.prototype = MACROUTILS.objectInherit( Object.prototype, {
init: function ( matrix ) {
this.setMatrix( matrix );
Matrix.copy( matrix, this._target.defaultValue );
},
getTarget: function () {
return this._target;
},
getMatrix: function () {
return this._target.value;
},
setMatrix: function ( m ) {
Matrix.copy( m, this._target.value );
},
resetToDefaultValue: function () {
this.setMatrix( this._target.defaultValue );
},
applyToMatrix: function ( m ) {
Matrix.preMult( m, this._target.value );
}
} );
return StackedMatrix;
} );
<file_sep>/sources/osgShadow/ShadowCastCompiler.js
define( [
'osg/Utils',
'osgShader/Compiler',
], function ( MACROUTILS, Compiler ) {
'use strict';
var CompilerShadowCast = function () {
Compiler.apply( this, arguments );
this._isVertexColored = false;
this._isLighted = false;
};
CompilerShadowCast.prototype = MACROUTILS.objectInherit( Compiler.prototype, {
getCompilerName: function () {
return 'ShadowCast';
},
getFragmentShaderName: function () {
return this.getCompilerName();
},
initAttributes: function () {
var attributes = this._attributes;
for ( var i = 0, l = attributes.length; i < l; i++ ) {
var type = attributes[ i ].className();
if ( type === 'ShadowCastAttribute' ) {
this._shadowCastAttribute = attributes[ i ];
} else if ( type === 'AnimationAttribute' ) {
this._animation = attributes[ i ];
}
}
},
registerTextureAttributes: function () {},
registerTextureShadow: function () {},
// Fast Path, only Depth
declareVertexTransforms: function ( glPosition ) {
this.declareTransformWithEyeSpace( glPosition );
},
// Depth Shadow Map Casted from Light POV
// Depth encoded in color buffer
createShadowCastDepth: function () {
var frag = this.createVariable( 'vec4' );
this.getNode( 'ShadowCast' ).setShadowCastAttribute( this._shadowCastAttribute ).inputs( {
exponent0: this.getOrCreateUniform( 'float', 'exponent0' ),
exponent1: this.getOrCreateUniform( 'float', 'exponent1' ),
shadowDepthRange: this.getOrCreateUniform( 'vec4', 'Shadow_DepthRange' ),
fragEye: this.getOrCreateInputPosition()
} ).outputs( {
color: frag
} );
return frag;
},
// encapsulate for easier overwrite by user defined compiler
// that would inherint from this compiler
// Do not merge with above method
createFragmentShaderGraph: function () {
var depthFrag = this.createShadowCastDepth();
var frag = this.getNode( 'glFragColor' );
this.getNode( 'SetFromNode' ).inputs( depthFrag ).outputs( frag );
return [ frag ];
}
} );
return CompilerShadowCast;
} );
<file_sep>/sources/osgAnimation/StackedQuaternion.js
define( [
'osg/Utils',
'osg/Object',
'osg/Matrix',
'osg/Quat',
'osgAnimation/Target'
], function ( MACROUTILS, Object, Matrix, Quat, Target ) {
'use strict';
var StackedQuaternion = function ( name, quat ) {
Object.call( this );
this._target = Target.createQuatTarget( quat || Quat.identity );
if ( name ) this.setName( name );
};
StackedQuaternion.prototype = MACROUTILS.objectInherit( Object.prototype, {
init: function ( q ) {
this.setQuaternion( q );
Quat.copy( q, this._target.defaultValue );
},
setQuaternion: function ( q ) {
Quat.copy( q, this._target.value );
},
getTarget: function () {
return this._target;
},
resetToDefaultValue: function () {
this.setQuaternion( this._target.defaultValue );
},
applyToMatrix: ( function () {
var matrixTmp = Matrix.create();
return function applyToMatrix( m ) {
var mtmp = matrixTmp;
Matrix.setRotateFromQuat( mtmp, this._target.value );
Matrix.preMult( m, mtmp );
};
} )()
} );
return StackedQuaternion;
} );
<file_sep>/sources/osgAnimation/StackedRotateAxis.js
define( [
'osg/Utils',
'osg/Object',
'osg/Matrix',
'osg/Vec3',
'osg/Quat',
'osgAnimation/Target',
], function ( MACROUTILS, Object, Matrix, Vec3, Quat, Target ) {
'use strict';
var StackedRotateAxis = function ( name, axis, angle ) {
Object.call( this );
this._axis = Vec3.set( 0, 0, 1, Vec3.create() );
if ( axis ) Vec3.copy( axis, this._axis );
this._target = Target.createFloatTarget( typeof angle === 'number' ? angle : 0.0 );
if ( name ) this.setName( name );
};
StackedRotateAxis.prototype = MACROUTILS.objectInherit( Object.prototype, {
init: function ( axis, angle ) {
this.setAxis( axis );
this.setAngle( angle );
this._target.defaultValue = angle;
},
setAxis: function ( axis ) {
Vec3.copy( axis, this._axis );
},
setAngle: function ( angle ) {
this._target.value = angle;
},
getTarget: function () {
return this._target;
},
resetToDefaultValue: function () {
this.setAngle( this._target.defaultValue );
},
applyToMatrix: ( function () {
var matrixTmp = Matrix.create();
var quatTmp = Quat.create();
return function ( m ) {
var axis = this._axis;
var qtmp = quatTmp;
var mtmp = matrixTmp;
var angle = this._target.value;
Quat.makeRotate( angle, axis[ 0 ], axis[ 1 ], axis[ 2 ], qtmp );
Matrix.setRotateFromQuat( mtmp, qtmp );
Matrix.preMult( m, mtmp );
};
} )()
} );
return StackedRotateAxis;
} );
<file_sep>/sources/osgAnimation/AnimationAttribute.js
define( [
'osg/Map',
'osg/Utils',
'osg/StateAttribute',
'osg/Uniform'
], function ( Map, MACROUTILS, StateAttribute, Uniform ) {
'use strict';
/**
* AnimationAttribute encapsulate Animation State
* @class AnimationAttribute
* @inherits StateAttribute
*/
var AnimationAttribute = function ( disable, boneUniformSize ) {
StateAttribute.call( this );
this._enable = !disable;
// optional, if it's not provided, it will fall back to the maximum bone uniform size
// boneUniformSize represents the number of vec4 (uniform) used in the shader for all the bones
this._boneUniformSize = boneUniformSize;
};
AnimationAttribute.uniforms = {};
AnimationAttribute.maxBoneUniformSize = 1;
AnimationAttribute.maxBoneUniformAllowed = Infinity; // can be overriden by application specific limit on startup (typically gl limit)
AnimationAttribute.prototype = MACROUTILS.objectLibraryClass( MACROUTILS.objectInherit( StateAttribute.prototype, {
attributeType: 'AnimationAttribute',
cloneType: function () {
return new AnimationAttribute( true, this._boneUniformSize );
},
setBoneUniformSize: function ( boneUniformSize ) {
this._boneUniformSize = boneUniformSize;
},
getBoneUniformSize: function () {
return this._boneUniformSize !== undefined ? this._boneUniformSize : AnimationAttribute.maxBoneUniformSize;
},
getTypeMember: function () {
return this.attributeType + '_' + this.getBoneUniformSize();
},
getOrCreateUniforms: function () {
// uniform are once per CLASS attribute, not per instance
var obj = AnimationAttribute;
var typeMember = this.getTypeMember();
if ( obj.uniforms[ typeMember ] ) return obj.uniforms[ typeMember ];
var uniforms = {};
uniforms[ 'uBones' ] = new Uniform.createFloat4Array( [], 'uBones' );
obj.uniforms[ typeMember ] = new Map( uniforms );
return obj.uniforms[ typeMember ];
},
setMatrixPalette: function ( matrixPalette ) {
this._matrixPalette = matrixPalette;
// update max bone size
if ( this._boneUniformSize === undefined ) {
AnimationAttribute.maxBoneUniformSize = Math.max( AnimationAttribute.maxBoneUniformSize, matrixPalette.length / 4 );
AnimationAttribute.maxBoneUniformSize = Math.min( AnimationAttribute.maxBoneUniformAllowed, AnimationAttribute.maxBoneUniformSize );
}
},
getMatrixPalette: function () {
return this._matrixPalette;
},
// need a isEnabled to let the ShaderGenerator to filter
// StateAttribute from the shader compilation
isEnabled: function () {
return this._enable;
},
getHash: function () {
// bonesize is important, as the shader itself
// has a different code and uniform are not shared
// geoms have each their own bones matrix palette
// it's up to rigGeometry to use same anim Attrib per
// same bone matrix palette
// as uniform array size must be statically declared
// in shader code
return this.getTypeMember() + this.isEnabled();
},
apply: function () {
if ( !this._enable )
return;
var uniformMap = this.getOrCreateUniforms();
uniformMap.uBones.glData = uniformMap.uBones.data = this._matrixPalette; // hack to avoid copy
this.setDirty( false );
}
} ), 'osgShadow', 'AnimationAttribute' );
MACROUTILS.setTypeID( AnimationAttribute );
return AnimationAttribute;
} );
<file_sep>/sources/osgAnimation/CollectAnimationUpdateCallbackVisitor.js
define( [
'osg/Notify',
'osg/Utils',
'osg/NodeVisitor',
'osg/Object',
'osgAnimation/AnimationUpdateCallback'
], function ( Notify, MACROUTILS, NodeVisitor, Object, AnimationUpdateCallback ) {
'use strict';
// search into a subgraph all target
var CollectAnimationUpdateCallbackVisitor = function () {
NodeVisitor.call( this );
this._animationUpdateCallback = {};
};
CollectAnimationUpdateCallbackVisitor.prototype = MACROUTILS.objectInherit( NodeVisitor.prototype, {
getAnimationUpdateCallbackMap: function () {
return this._animationUpdateCallback;
},
apply: function ( node ) {
var cbs = node.getUpdateCallbackList();
// collect and remove animation update callback
for ( var i = 0, cbsLength = cbs.length; i < cbsLength; i++ ) {
var cb = cbs[ i ];
if ( cb instanceof AnimationUpdateCallback ) {
this._animationUpdateCallback[ cb.getInstanceID() ] = cb;
//node.removeUpdateCallback( cb );
}
}
this.traverse( node );
}
} );
return CollectAnimationUpdateCallbackVisitor;
} );
<file_sep>/sources/osgGA/osgGA.js
define( [
'hammer',
'osgGA/FirstPersonManipulator',
'osgGA/FirstPersonManipulatorDeviceOrientationController',
'osgGA/FirstPersonManipulatorHammerController',
'osgGA/FirstPersonManipulatorOculusController',
'osgGA/FirstPersonManipulatorStandardMouseKeyboardController',
'osgGA/Manipulator',
'osgGA/OrbitManipulator',
'osgGA/OrbitManipulatorDeviceOrientationController',
'osgGA/OrbitManipulatorGamePadController',
'osgGA/OrbitManipulatorHammerController',
'osgGA/OrbitManipulatorLeapMotionController',
'osgGA/OrbitManipulatorOculusController',
'osgGA/OrbitManipulatorStandardMouseKeyboardController',
'osgGA/SwitchManipulator',
'osgGA/OrbitManipulatorEnums'
], function ( Hammer, FirstPersonManipulator, FirstPersonManipulatorDeviceOrientationController, FirstPersonManipulatorHammerController, FirstPersonManipulatorStandardMouseKeyboardController, FirstPersonManipulatorOculusController, Manipulator, OrbitManipulator, OrbitManipulatorDeviceOrientationController, OrbitManipulatorGamePadController, OrbitManipulatorHammerController, OrbitManipulatorLeapMotionController, OrbitManipulatorStandardMouseKeyboardController, OrbitManipulatorOculusController, SwitchManipulator, OrbitManipulatorEnums ) {
'use strict';
var osgGA = {};
Hammer.NO_MOUSEEVENTS = true; // disable hammer js mouse events
osgGA.FirstPersonManipulator = FirstPersonManipulator;
osgGA.getFirstPersonDeviceOrientationController = function () {
return FirstPersonManipulatorDeviceOrientationController;
};
osgGA.getFirstPersonManipulatorHammerController = function () {
return FirstPersonManipulatorHammerController;
};
osgGA.getFirstPersonStandardMouseKeyboardControllerClass = function () {
return FirstPersonManipulatorStandardMouseKeyboardController;
};
osgGA.getFirstPersonOculusControllerClass = function () {
return FirstPersonManipulatorOculusController;
};
osgGA.Manipulator = Manipulator;
osgGA.OrbitManipulator = OrbitManipulator;
osgGA.getOrbitManipulatorDeviceOrientationController = function () {
return OrbitManipulatorDeviceOrientationController;
};
osgGA.getOrbitManipulatorGamePadController = function () {
return OrbitManipulatorGamePadController;
};
osgGA.getOrbitManipulatorHammerController = function () {
return OrbitManipulatorHammerController;
};
osgGA.getOrbitManipulatorLeapMotionController = function () {
return OrbitManipulatorLeapMotionController;
};
osgGA.getOrbitManipulatorStandardMouseKeyboardController = function () {
return OrbitManipulatorStandardMouseKeyboardController;
};
osgGA.getOrbitManipulatorOculusController = function () {
return OrbitManipulatorOculusController;
};
osgGA.SwitchManipulator = SwitchManipulator;
osgGA.OrbitManipulator.Rotate = OrbitManipulatorEnums.ROTATE;
osgGA.OrbitManipulator.Pan = OrbitManipulatorEnums.PAN;
osgGA.OrbitManipulator.Zoom = OrbitManipulatorEnums.ZOOM;
return osgGA;
} );
<file_sep>/sources/osgAnimation/StackedTranslate.js
define( [
'osg/Utils',
'osg/Object',
'osg/Matrix',
'osg/Vec3',
'osgAnimation/Target'
], function ( MACROUTILS, Object, Matrix, Vec3, Target ) {
'use strict';
/**
* StackedTranslate
*/
var StackedTranslate = function ( name, translate ) {
Object.call( this );
this._target = Target.createVec3Target( translate || Vec3.zero );
if ( name ) this.setName( name );
};
StackedTranslate.prototype = MACROUTILS.objectInherit( Object.prototype, {
init: function ( translate ) {
this.setTranslate( translate );
Vec3.copy( translate, this._target.defaultValue );
},
setTranslate: function ( translate ) {
Vec3.copy( translate, this._target.value );
},
getTarget: function () {
return this._target;
},
resetToDefaultValue: function () {
this.setTranslate( this._target.defaultValue );
},
applyToMatrix: function ( m ) {
Matrix.preMultTranslate( m, this._target.value );
}
} );
return StackedTranslate;
} );
<file_sep>/sources/osgAnimation/CollectBoneVisitor.js
define( [
'osg/Utils',
'osg/NodeVisitor',
'osg/Notify',
'osgAnimation/Bone'
], function ( MACROUTILS, NodeVisitor, Notify, Bone ) {
'use strict';
var CollectBoneVisitor = function () {
NodeVisitor.call( this, NodeVisitor.TRAVERSE_ALL_CHILDREN );
this._boneMap = {};
};
CollectBoneVisitor.prototype = MACROUTILS.objectInherit( NodeVisitor.prototype, {
apply: function ( node ) {
if ( node.typeID === Bone.typeID ) {
var name = node.getName();
if ( !name ) {
Notify.warn( 'found Bone without name' );
} else {
this._boneMap[ name ] = node;
}
}
this.traverse( node );
},
getBoneMap: function () {
return this._boneMap;
}
} );
return CollectBoneVisitor;
} );
<file_sep>/sources/osg/BillboardAttribute.js
define( [
'osg/Utils',
'osg/StateAttribute'
], function ( MACROUTILS, StateAttribute ) {
'use strict';
var BillboardAttribute = function () {
StateAttribute.call( this );
this._attributeEnable = false;
};
BillboardAttribute.prototype = MACROUTILS.objectLibraryClass( MACROUTILS.objectInherit( StateAttribute.prototype, {
attributeType: 'Billboard',
cloneType: function () {
return new BillboardAttribute();
},
setEnabled: function ( state ) {
this._attributeEnable = state;
this.dirty();
},
isEnabled: function () {
return this._attributeEnable;
},
apply: function ( /*state*/) {
this.setDirty( false );
}
} ), 'osg', 'Billboard' );
return BillboardAttribute;
} );
|
7ef6d0d1d0d859cd471a7b9a91878e0ad3de22b1
|
[
"JavaScript"
] | 19
|
JavaScript
|
arthurjamain/osgjs
|
9d7a35d5ab494ca13ee9f9381522c3a3213ee18b
|
e01bc4893404c742a579276b5c63047a48b04e34
|
refs/heads/master
|
<file_sep>function Param() {
this.breakpoint = 750;
this.displayType = (window.innerWidth > this.breakpoint) ? "pc" : "sp";
this.webgl = {
ball: {
splashTime: 120, /* スプラッシュ表示時間 1s = 60 */
num: {
splash: 30, /* 初回表示時 */
normal: 16 /* 表示後にループさせているとき */
}
}
}
this.status = {
}
};
function EffectManager() {
this.$svgTarget = $(".js__tsc-svg-write,.js__svg-write");
this.conf = {
// rotRadius : 20,
// scrollInc : 0.27,
// transY : 0.07,
// transX : 0.35,
// startPosiX : 0,
// transZ : 1,
// transRot:-0.45,
// opacity : 80,
rotRadius: 20,
scrollInc: 0.5,
transY: 1,
transX: (window.innerWidth <= 320) ? 0.4 : 0.5,
startPosiX: 0,
transZ: 1,
transRot: -0.22,
opacity: 80,
}
}
EffectManager.prototype.show = function ($target, type) {
if (type === "set") {
TweenMax.set($target, {
x: -100,
z: -1,
opacity: 0,
// scale:0.8
});
} else if (type === "out") {
TweenMax.staggerTo($target, 0.6, {
x: -100,
z: -1,
opacity: 0,
ease: Back.easeIn.config(2),
}, 0.05);
} else {
TweenMax.staggerTo($target, 0.9, {
x: 0,
z: 0,
opacity: 1,
// scale:1,
ease: Back.easeOut.config(2),
}, 0.1);
}
}
EffectManager.prototype.scale = function ($target, type) {
if (type === "set") {
TweenMax.set($target, {
opacity: 0,
scale: 0
});
} else if (type === "out") {
TweenMax.staggerTo($target, 0.6, {
opacity: 0,
scale: 0,
ease: Power2.easeIn,
}, 0.1);
} else {
TweenMax.staggerTo($target, 0.6, {
opacity: 1,
scale: 1,
ease: Power2.easeOut,
}, 0.1);
}
}
EffectManager.prototype.float = function ($target, type) {
if (type === "set") {
TweenMax.set($target, {
x: -20,
y: -30,
opacity: 0,
// rotationX:30,
// rotationY:90,
scale: 0
});
} else {
TweenMax.staggerTo($target, 0.8, {
x: 0,
y: 0,
opacity: 1,
// rotationX:0,
// rotationY:0,
scale: 1,
ease: Power2.easeOut,
}, 0.05);
}
}
EffectManager.prototype.svgInit = function () {
TweenLite.set(this.$svgTarget.find(".js__write"), { drawSVG: "0%" });
}
EffectManager.prototype.svgAnimation = function ($target, delayTime) {
TweenMax.staggerTo($target.find(".js__write"), 0.3, {
drawSVG: "100%",
ease: Power2.easeOut,
delay: delayTime
}, .05);
}
EffectManager.prototype.svgAnimationOut = function ($target, delayTime) {
TweenMax.staggerTo($target.find(".js__write"), 0.3, {
drawSVG: "0%",
ease: Power2.easeIn,
delay: delayTime
}, .05);
}
EffectManager.prototype.splitText = function ($target, type) {
if ($target.length == 0) {
console.log('element not exist');
return;
}
if (type === "set") {
let text = new SplitText($target, { type: "words,chars" }),
chars = text.chars;
TweenMax.set(chars, { opacity: 0, x: 150 });
} else if (type === "out") {
TweenMax.staggerTo($target, 0.6,
{
opacity: 0, x: 150,
// filter:"blur(0px)",
ease: "Power2.easeIn",
},
0.01
);
} else {
TweenMax.staggerTo($target, 0.8,
{
opacity: 1, x: 0,
// filter:"blur(0px)",
ease: "Power2.easeOut",
},
0.05
);
}
}
function Util() {}
Util.prototype.map = function(num, toMin, toMax, fromMin, fromMax) {
if (num <= fromMin) {
return toMin;
}
if (num >= fromMax) {
return toMax;
}
let p = (toMax - toMin) / (fromMax - fromMin);
return ((num - fromMin) * p) + toMin;
}
Util.prototype.rand = function(max, min) {
return Math.floor(Math.random() * (max + 1 - min)) + min;
}
const param = new Param();
const effect = new EffectManager();
const util = new Util();
export default class webglManager {
constructor() {
this.w = window.innerWidth;
this.h = window.innerHeight;
this.mouse;
this.stage;
this.renderer;
this.container;
this.ball = [];
this.conf = {
splash: {
time: param.webgl.ball.splashTime,
flg: false
},
ball: {
NUM: param.webgl.ball.num.splash,
color: ["0xff9d88", "0x38c0fc", "0xcdd6d5"],
size: [60, 45, 40, 30, 20, 10],
scaleSize: 1, /* ボールのサイズが可変する動作スピード */
scaleSpeed: 6, /* ボールのサイズが可変する動作スピード */
speed: 1,/* ボールの流れるスピード */
float: [80, 60, 40], /* ボールがゆれるスピード */
floatSpan: 1, /* ボールの揺れ幅 */
mode: "splash",
mouse: [60, 40, 30, 20, 10],
}
}
this.count = [];
/* ftps低下時の対策 */
this.time = 0;
this.timeRatio = 1;
this.FPS_60_SEC = 1000 / 60;
this.timer = 0;
}
init() {
this.stage = new PIXI.Stage(0xffffff);
this.renderer = new PIXI.autoDetectRenderer(this.w, this.h, {
transparent: true,
backgroundColor: 0xffffff,
antialias: true,
// resolution : 1, /* 解像度 */
});
this.container = new PIXI.Container();
// this.renderer.renderer.autoResize = true;
document.getElementById('l-ball').appendChild(this.renderer.view);
for (let i = 0; this.conf.ball.NUM > i; i++) {
this.count[i] = 0;
// let _d = i/this.conf.ball.NUM;
// let _y = {
// start : this.h*(1.5 + (1 *_d)),
// end : this.h*(4 + (2 * _d))
// }
this.ball[i] = {
shape: new PIXI.Graphics(),
size: this.conf.ball.size[util.rand(-1, 7)],
mouse: this.conf.ball.mouse[util.rand(-1, 5)],
color: this.conf.ball.color[util.rand(-1, 3)],
start: {
// x : util.rand(0,this.w),
// y : util.rand(_y.start,_y.end),
x: 0,
y: 0
},
speed: util.rand(200, 400),
float: this.conf.ball.float[util.rand(-1, 3)],
}
}
// this.datGUI();
}
updateTimeRatio() {
let _lastTime = this.time;
if (_lastTime > 0) {
let _dTime = new Date().getTime() - _lastTime;
this.timeRatio = _dTime / this.FPS_60_SEC;
}
this.time = new Date().getTime();
}
render(mouse) {
// this.updateTimeRatio();
this.timer++;
/* fpsが下がったらアンチエリアスを外す */
if (this.timeRatio !== 1) {
this.renderer.antialias = false;
} else {
this.renderer.antialias = true;
}
if (this.timer == this.conf.splash.time && !this.conf.splash.flg) {
this.conf.splash.flg = true;
let _this = this;
TweenMax.to(this.stage.children[0], 0.6, {
alpha: 0, ease: Power2.easeIn, onComplete: function () {
_this.conf.ball.mode = "normal";
_this.conf.ball.NUM = param.webgl.ball.num.normal;
_this.container.removeChildren();
TweenMax.set(_this.stage.children[0], { alpha: 1 });
_this.stage.removeChildren();
}
});
}
for (let i = 0; this.conf.ball.NUM > i; i++) {
this.count[i] += 0.01;
let _speed = (this.conf.ball.mode === "splash") ? this.ball[i].speed * 10 * this.conf.ball.speed : this.ball[i].speed * this.conf.ball.speed;
this.ball[i].shape.clear();
this.ball[i].shape.lineStyle(0);
this.ball[i].shape.beginFill(this.ball[i].color, 1.0);
this.ball[i].shape.drawCircle(
this.ball[i].start.x - (Math.sin(this.count[i]) * this.ball[i].float + (mouse.x / (700 / this.ball[i].size))) * this.conf.ball.floatSpan,
this.ball[i].start.y - this.count[i] * _speed + (mouse.y / (1000 / this.ball[i].size)),
this.ball[i].size - Math.sin(this.count[i] * this.conf.ball.scaleSpeed) * this.conf.ball.scaleSize - ((mouse.x + mouse.y) / 200)
);
// if(i===1) console.log(i, this.ball[i].shape.graphicsData[0].shape.y);
// this.ball[i].shape.drawCircle(470 - Math.sin(this.count)*40,this.h - this.count*100,60);
this.ball[i].shape.endFill();
this.container.addChild(this.ball[i].shape);
this.stage.addChild(this.container);
// if(this.conf.ball.NUM-1==i){
// console.log(-this.h,this.ball[i].shape.graphicsData[0]);
// }
if (-this.h > this.ball[i].shape.graphicsData[0].shape.y) {
this.count[i] = 0;
}
}
this.renderer.render(this.stage);
}
resize() {
this.w = window.innerWidth;
this.h = window.innerHeight;
if (this.ball[0]) {
for (let i = 0; this.conf.ball.NUM > i; i++) {
let _d = i / this.conf.ball.NUM;
let _y = {
start: this.h * (1.01 + (1 * _d)),
end: this.h * (2 + (2 * _d))
}
this.ball[i].start.x = util.rand(0, this.w);
this.ball[i].start.y = util.rand(_y.start, _y.end);
}
}
this.renderer.resize(this.w, this.h);
}
/* -------------------------------------------
-- ステータスいじいじするやつ
------------------------------------------- */
datGUI() {
let _gui = new dat.gui.GUI();
_gui.remember(effect);
// //
var _f1 = _gui.addFolder('BALL');
// _f1.add(this.conf.ball, 'NUM', 0.0, 30.0);
_f1.add(this.conf.ball, 'speed', 1.00, 3.00);
_f1.add(this.conf.ball, 'scaleSize', 0.0, 10.0);
_f1.add(this.conf.ball, 'scaleSpeed', 0.0, 20.0);
_f1.add(this.conf.ball, 'floatSpan', 1.0, 20.0);
// let _floatSpeed = _f1.add(this.conf.ball, 'float', 0, 1000);
// // _f1.add(effect.product, 'transZ', 0.00, 2.00);
// _f1.add(effect.product, 'transRot', -2.00, 2.00);
//
//
//
// startPosiX.onChange(function(value) {
//
// TweenMax.set($("#p-product-list"),{x:value});
//
// });
// _f1.add(effect.product, 'opacity', 0, 300);
}
}<file_sep>export default class Util {
constructor(){
}
// 値のマッピング
// -----------------------------------
// num : マッピングする値
// toMin : 変換後の最小値
// toMax : 変換後の最大値
// fromMin : 変換前の最小値
// fromMax : 変換前の最大値
// -----------------------------------
map (num, toMin, toMax, fromMin, fromMax){
if(num <= fromMin) {
return toMin;
}
if(num >= fromMax){
return toMax;
}
let p = (toMax - toMin) / (fromMax - fromMin);
return ((num - fromMin) * p) + toMin;
}
// 範囲を指定して乱数取得
// -----------------------------------
// max : 最大値
// min : 最小値
// -----------------------------------
rand (max,min){
return Math.floor( Math.random() * (max + 1 - min) ) + min;
}
}<file_sep>import Param from './param';
import effectManager from './effect';
import Util from './util';
const param = new Param();
const effect = new effectManager();
const util = new Util();
export default class webglManager{
constructor(){
this.w = window.innerWidth;
this.h = window.innerHeight;
this.mouse;
this.stage;
this.renderer;
this.container;
this.ball = [];
this.conf = {
splash:{
time : param.webgl.ball.splashTime,
flg : false
},
ball : {
NUM : param.webgl.ball.num.splash,
color : ["0xff9d88","0x38c0fc","0xcdd6d5"],
size : [60,45,40,30,20,10],
scaleSize : 1, /* ボールのサイズが可変する動作スピード */
scaleSpeed : 6, /* ボールのサイズが可変する動作スピード */
speed : 1,/* ボールの流れるスピード */
float : [80,60,40], /* ボールがゆれるスピード */
floatSpan : 1, /* ボールの揺れ幅 */
mode : "splash",
mouse : [60,40,30,20,10],
}
}
this.count = [];
/* ftps低下時の対策 */
this.time = 0;
this.timeRatio = 1;
this.FPS_60_SEC = 1000 / 60;
this.timer = 0;
}
init(){
this.stage = new PIXI.Stage(0xffffff);
this.renderer = new PIXI.autoDetectRenderer(this.w, this.h, {
transparent: true,
backgroundColor : 0xffffff,
antialias : true,
// resolution : 1, /* 解像度 */
});
this.container = new PIXI.Container();
// this.renderer.renderer.autoResize = true;
document.getElementById('l-ball').appendChild(this.renderer.view);
for(let i=0; this.conf.ball.NUM>i; i++){
this.count[i] = 0;
// let _d = i/this.conf.ball.NUM;
// let _y = {
// start : this.h*(1.5 + (1 *_d)),
// end : this.h*(4 + (2 * _d))
// }
this.ball[i] = {
shape : new PIXI.Graphics(),
size : this.conf.ball.size[util.rand(-1,7)],
mouse : this.conf.ball.mouse[util.rand(-1,5)],
color : this.conf.ball.color[util.rand(-1,3)],
start : {
// x : util.rand(0,this.w),
// y : util.rand(_y.start,_y.end),
x : 0,
y : 0
},
speed : util.rand(200,400),
float : this.conf.ball.float[util.rand(-1,3)],
}
}
// this.datGUI();
}
updateTimeRatio() {
let _lastTime = this.time;
if(_lastTime > 0) {
let _dTime = new Date().getTime() - _lastTime;
this.timeRatio = _dTime / this.FPS_60_SEC;
}
this.time = new Date().getTime();
}
render(mouse) {
// this.updateTimeRatio();
this.timer++;
/* fpsが下がったらアンチエリアスを外す */
if(this.timeRatio!==1){
this.renderer.antialias = false;
}else{
this.renderer.antialias = true;
}
if(this.timer==this.conf.splash.time&&!this.conf.splash.flg) {
this.conf.splash.flg = true;
let _this = this;
TweenMax.to(this.stage.children[0],0.6,{alpha:0,ease:Power2.easeIn,onComplete:function () {
_this.conf.ball.mode = "normal";
_this.conf.ball.NUM = param.webgl.ball.num.normal;
_this.container.removeChildren();
TweenMax.set(_this.stage.children[0],{ alpha:1 });
_this.stage.removeChildren();
}});
}
for(let i=0; this.conf.ball.NUM>i; i++){
this.count[i] += 0.01;
let _speed = (this.conf.ball.mode==="splash")?this.ball[i].speed*10*this.conf.ball.speed:this.ball[i].speed*this.conf.ball.speed;
this.ball[i].shape.clear();
this.ball[i].shape.lineStyle(0);
this.ball[i].shape.beginFill(this.ball[i].color, 1.0);
this.ball[i].shape.drawCircle(
this.ball[i].start.x -( Math.sin(this.count[i])*this.ball[i].float + (mouse.x/(700/this.ball[i].size)) )*this.conf.ball.floatSpan,
this.ball[i].start.y - this.count[i] * _speed + (mouse.y/(1000/this.ball[i].size)),
this.ball[i].size - Math.sin(this.count[i]*this.conf.ball.scaleSpeed)*this.conf.ball.scaleSize-( (mouse.x+mouse.y)/200 )
);
// if(i===1) console.log(i, this.ball[i].shape.graphicsData[0].shape.y);
// this.ball[i].shape.drawCircle(470 - Math.sin(this.count)*40,this.h - this.count*100,60);
this.ball[i].shape.endFill();
this.container.addChild(this.ball[i].shape);
this.stage.addChild(this.container);
// if(this.conf.ball.NUM-1==i){
// console.log(-this.h,this.ball[i].shape.graphicsData[0]);
// }
if(-this.h > this.ball[i].shape.graphicsData[0].shape.y){
this.count[i] = 0;
}
}
this.renderer.render(this.stage);
}
resize(){
this.w = window.innerWidth;
this.h = window.innerHeight;
if(this.ball[0]){
for(let i=0; this.conf.ball.NUM>i; i++){
let _d = i/this.conf.ball.NUM;
let _y = {
start : this.h*(1.01 + (1 *_d)),
end : this.h*(2 + (2 * _d))
}
this.ball[i].start.x = util.rand(0,this.w);
this.ball[i].start.y = util.rand(_y.start,_y.end);
}
}
this.renderer.resize(this.w,this.h);
}
/* -------------------------------------------
-- ステータスいじいじするやつ
------------------------------------------- */
datGUI(){
let _gui = new dat.gui.GUI();
_gui.remember(effect);
// //
var _f1 = _gui.addFolder('BALL');
// _f1.add(this.conf.ball, 'NUM', 0.0, 30.0);
_f1.add(this.conf.ball, 'speed', 1.00, 3.00);
_f1.add(this.conf.ball, 'scaleSize', 0.0, 10.0);
_f1.add(this.conf.ball, 'scaleSpeed', 0.0, 20.0);
_f1.add(this.conf.ball, 'floatSpan', 1.0, 20.0);
// let _floatSpeed = _f1.add(this.conf.ball, 'float', 0, 1000);
// // _f1.add(effect.product, 'transZ', 0.00, 2.00);
// _f1.add(effect.product, 'transRot', -2.00, 2.00);
//
//
//
// startPosiX.onChange(function(value) {
//
// TweenMax.set($("#p-product-list"),{x:value});
//
// });
// _f1.add(effect.product, 'opacity', 0, 300);
}
}<file_sep>import Param from '../param';
const cntEasing = "Circ.easeIn";
const cntSpeed = 2.38;
const cntDelay = 0.1;
let param = new Param();
export default class effectManager {
constructor(){
this.$svgTarget = $(".js__tsc-svg-write,.js__svg-write");
this.conf = {
// rotRadius : 20,
// scrollInc : 0.27,
// transY : 0.07,
// transX : 0.35,
// startPosiX : 0,
// transZ : 1,
// transRot:-0.45,
// opacity : 80,
rotRadius : 20,
scrollInc : 0.5,
transY : 1,
transX : (window.innerWidth<=320)?0.4:0.5,
startPosiX : 0,
transZ : 1,
transRot:-0.22,
opacity : 80,
}
}
show($target,type){
if(type==="set"){
TweenMax.set($target,{
x:-100,
z:-1,
opacity:0,
// scale:0.8
});
}else if(type==="out"){
TweenMax.staggerTo($target,0.6, {
x:-100,
z:-1,
opacity:0,
ease: Back.easeIn.config(2),
},0.05);
}else{
TweenMax.staggerTo($target,0.9, {
x:0,
z:0,
opacity:1,
// scale:1,
ease: Back.easeOut.config(2),
},0.1);
}
}
scale($target,type){
if(type==="set"){
TweenMax.set($target,{
opacity:0,
scale:0
});
}else if(type==="out"){
TweenMax.staggerTo($target,0.6, {
opacity:0,
scale:0,
ease: Power2.easeIn,
},0.1);
}else{
TweenMax.staggerTo($target,0.6, {
opacity:1,
scale:1,
ease: Power2.easeOut,
},0.1);
}
}
float($target,type){
if(type==="set"){
TweenMax.set($target,{
x:-20,
y:-30,
opacity:0,
// rotationX:30,
// rotationY:90,
scale:0
});
}else{
TweenMax.staggerTo($target,0.8, {
x:0,
y:0,
opacity:1,
// rotationX:0,
// rotationY:0,
scale:1,
ease: Power2.easeOut,
},0.05);
}
}
svgInit(){
TweenLite.set(this.$svgTarget.find(".js__write"), {drawSVG: "0%"});
}
svgAnimation($target,delayTime) {
TweenMax.staggerTo($target.find(".js__write"), 0.3, {
drawSVG: "100%",
ease: Power2.easeOut,
delay : delayTime
}, .05);
}
svgAnimationOut($target,delayTime) {
TweenMax.staggerTo($target.find(".js__write"), 0.3, {
drawSVG: "0%",
ease: Power2.easeIn,
delay : delayTime
}, .05);
}
splitText($target,type){
if($target.length == 0){
console.log('element not exist');
return;
}
if(type === "set"){
let text = new SplitText($target, {type:"words,chars"}),
chars = text.chars;
TweenMax.set(chars,{ opacity:0, x:150});
}else if(type === "out"){
TweenMax.staggerTo($target, 0.6,
{
opacity:0,x:150,
// filter:"blur(0px)",
ease: "Power2.easeIn",
},
0.01
);
}else{
TweenMax.staggerTo($target, 0.8,
{
opacity:1,x:0,
// filter:"blur(0px)",
ease: "Power2.easeOut",
},
0.05
);
}
}
}
<file_sep>export default class Param {
constructor(){
this.breakpoint = 750;
this.displayType = ( window.innerWidth>this.breakpoint)?"pc":"sp";
this.webgl = {
ball : {
splashTime : 120, /* スプラッシュ表示時間 1s = 60 */
num : {
splash : 30, /* 初回表示時 */
normal : 16 /* 表示後にループさせているとき */
}
}
}
this.status = {
}
this.instagram = {
src : "https://www.eventstudio.jp/getInstagram.php"
}
}
}
|
3e210360890192351945866b1566944debdb1e42
|
[
"JavaScript"
] | 5
|
JavaScript
|
saviour2008/ScrollMagicDemo
|
6227605074ddb6c35e73231c49626369b970eb67
|
f4ab9c1caa3a909b3f0ec11ee41dcad9a2c2bf34
|
refs/heads/master
|
<repo_name>magictear/MagicVid<file_sep>/routes/index.js
const express = require('express')
const router = express.Router()
const User = require('../models/user')
const Theme = require('../models/theme')
var url = require('url');
const upload = require('../utilis/upload')
var fs = require('fs');
module.exports = (app, passport) => {
var order_home, order_user, order_cate
var keyword, category
var filter_home, filter_user, filter_cate
router.get('/', function(req, res) {
res.render('index', { user: req.user });
})
router.get('/home', function(req, res) {
//Model.find(condition, fields, { sort: [['_id', -1]] }, callback);
//Model.find().sort({ '_id': -1 }).limit(1).exec(function (err, docs) { })
var params = url.parse(req.url, true).query;
keyword = params.s
if (keyword == null) {
filter_home = {}
} else filter_home = {
name: eval("/" + keyword + "/i")
//name: { $regex: title, $Option: "$i" }
}
var x = params.orderby;
switch (x) {
case 'price':
order_home = { 'price': 1 };
break;
case 'price-desc':
order_home = { 'price': -1 };
break;
case '_id':
order_home = { '_id': 1 };
break;
case '_id_desc':
order_home = { '_id': -1 };
break;
case 'name':
order_home = { 'name': 1 };
break;
case 'date':
order_home = { 'date': 1 };
break;
case 'date_desc':
order_home = { 'date': -1 };
break;
default:
order_home = { '_id': 1 };
}
Theme.find(filter_home).sort(order_home).exec(function(err, allThemes) {
if (err) {
console.log(err);
} else {
res.render('home', { user: req.user, videos: allThemes });
}
});
})
router.get('/user/:username', function(req, res) {
var params = url.parse(req.url, true).query;
keyword = params.s;
var x = params.orderby;
if (keyword == null) {
filter_user = { username: req.params.username };
} else filter_user = {
name: eval("/" + keyword + "/i"),
username: req.params.username
//name: { $regex: title, $Option: "$i" }
}
switch (x) {
case 'price':
order_user = { 'price': 1 };
break;
case 'price-desc':
order_user = { 'price': -1 };
break;
case '_id':
order_user = { '_id': 1 };
break;
case '_id_desc':
order_user = { '_id': -1 };
break;
case 'name':
order_user = { 'name': 1 };
break;
case 'date':
order_home = { 'date': 1 };
break;
case 'date_desc':
order_home = { 'date': -1 };
break;
default:
order_user = { '_id': 1 };
}
Theme.find(filter_user).sort(order_user).exec(function(err, classThemes) {
if (err) {
console.log(err);
res.redirect('home')
} else {
res.render('user', { user: req.user, videos: classThemes });
}
})
})
router.get('/signup', function(req, res) {
res.render('signup')
})
router.post('/signup', function(req, res) {
User.register(new User({ username: req.body.username, email: req.body.email }), req.body.password, function(err, user) {
if (err) {
console.log(err);
res.render('signup')
}
passport.authenticate('local')(req, res, function() {
req.flash('注册成功!')
res.redirect('/home')
})
})
})
router.get('/signin', function(req, res) {
res.render('signin')
})
router.post('/signin', passport.authenticate('local', {
successRedirect: '/home',
failureRedirect: '/signin',
}))
router.get('/logout', function(req, res) {
req.logout()
req.flash('成功退出!')
res.redirect('/home')
});
/* GET newtheme page. */
router.get('/newtheme', function(req, res) {
res.render('newtheme', { user: req.user });
});
/* POST newtheme logic. */
router.post('/newtheme', upload.single('file'), function(req, res) {
var newvid = {
name: req.file.filename,
video: req.file.path.replace("magic", "").split("\\").join("/"),
username: req.user.username,
price: req.body.price,
classification: req.body.classification,
date: new Date(),
description: req.body.description
};
Theme.create(newvid, function(err) {
if (err) {
console.log(err);
res.render('newtheme');
} else {
res.redirect('home');
}
});
});
router.get('/official-themes', function(req, res) {
res.render('official-themes', { user: req.user });
});
/* GET product page. */
router.get('/product/:id', function(req, res) {
Theme.findById(req.params.id, function(err, foundTheme) {
if (err) {
console.log(err);
} else {
res.render('product', { user: req.user, video: foundTheme });
}
});
});
router.get('/product/:id/edit', function(req, res) {
Theme.findById(req.params.id, function(err, foundTheme) {
if (err) {
console.log(err);
} else {
res.render('edit', { user: req.user, video: foundTheme });
}
});
});
router.put('/product/:id', function(req, res) {
var newvid = {
name: req.body.name,
video: req.body.video,
username: req.user.username,
price: req.body.price,
classification: req.body.classification,
date: new Date(),
description: req.body.description
};
Theme.findByIdAndUpdate(req.params.id, newvid,
function(err, updated) {
if (err) {
console.log(err);
res.redirect('/home')
} else {
res.redirect('/product/' + req.params.id);
}
});
});
router.delete('/product/:id', function(req, res) {
Theme.findByIdAndDelete(req.params.id, function(err) {
if (err) {
res.redirect('/home')
} else {
res.redirect('/home');
}
});
});
router.get('/sell', function(req, res) {
res.render('sell', { user: req.user });
});
router.get('/category/:category', function(req, res) {
var params = url.parse(req.url, true).query;
keyword = params.s
category = req.params.category
if (keyword == null) {
filter_cate = { classification: category }
} else filter_cate = {
name: eval("/" + keyword + "/i"),
classification: category
//name: { $regex: title, $Option: "$i" }
}
var x = params.orderby;
switch (x) {
case 'price':
order_cate = { 'price': 1 };
break;
case 'price-desc':
order_cate = { 'price': -1 };
break;
case '_id':
order_cate = { '_id': 1 };
break;
case '_id_desc':
order_cate = { '_id': -1 };
break;
case 'name':
order_cate = { 'name': 1 };
break;
case 'date':
order_home = { 'date': 1 };
break;
case 'date_desc':
order_home = { 'date': -1 };
break;
default:
order_cate = { '_id': 1 };
}
Theme.find(filter_cate).sort(order_cate).exec(function(err, classThemes) {
if (err) {
console.log(err);
res.redirect('home')
} else {
res.render('category', { user: req.user, videos: classThemes });
}
});
});
router.post('/category', function(req, res) {
fs.readFile('./public/category.json', function(err, data) {
if (err) {
return console.error(err);
}
var cate = data.toString(); //将二进制的数据转换为字符串
cate = JSON.parse(cate); //将字符串转换为json对象
cate.categories.push(req.body); //将传来的对象push进数组对象中
var str = JSON.stringify(cate); //因为nodejs的写入文件只认识字符串或者二进制数,所以把json对象转换成字符串重新写入json文件中
fs.writeFile('./public/category.json', str, function(err) {
if (err) {
console.error(err);
}
console.log('----------新增成功-------------');
})
})
res.redirect('newtheme');
});
app.use('/', router)
}<file_sep>/README.md
<img src="https://github.com/magictear/magicvid/blob/master/public/images/icon/favicon.png" width="128" height="128" style="margin: 0 auto"/>
# MagicVid 基于express+nodejs的视频分享商业网站。
> 技术栈:express + nodejs + mongoose
## 使用说明
## Environment
`Node >= 6`
`mongoDB`
``` bash
#克隆项目
git clone <EMAIL>:magictear/magicvid.git
# 安装依赖
cd MagicVid
npm install
# 构建
npm start
# 本地开发环境 访问http://localhost:3000
```
## 功能
*背景滚动首页
*注册、登录页面
*主页面
*视频分类页面
*用户页面
*自定义添加分类功能
*点击预览
*视频详情页面
*添加本地视频功能
*编辑视频功能
*帅选、搜索功能
*其他页面
## 部分截图
<img src="https://github.com/magictear/magicvid/blob/master/Screenshots/index-1.png" style="margin: 0 auto"/>
<img src="https://github.com/magictear/magicvid/blob/master/Screenshots/index-2.png" style="margin: 0 auto"/>
###
<img src="https://github.com/magictear/magicvid/blob/master/Screenshots/home-1.png" style="margin: 0 auto"/>
###
<img src="https://github.com/magictear/magicvid/blob/master/Screenshots/category-celebs.png" style="margin: 0 auto"/>
###
<img src="https://github.com/magictear/magicvid/blob/master/Screenshots/new-theme.png" style="margin: 0 auto"/>
###
<img src="https://github.com/magictear/magicvid/blob/master/Screenshots/signin.png" style="margin: 0 auto"/>
<file_sep>/utilis/upload.js
const fs = require('fs');
var multer = require('multer');
// 使用硬盘存储模式设置存放接收到的文件的路径以及文件名
var storage = multer.diskStorage({
destination: function(req, file, cb) {
// 创建文件夹
var createFolder = function(folder) {
try {
// 测试 path 指定的文件或目录的用户权限,我们用来检测文件是否存在
// 如果文件路径不存在将会抛出错误"no such file or directory"
fs.accessSync(folder);
} catch (e) {
// 文件夹不存在,以同步的方式创建文件目录。
fs.mkdirSync(folder);
}
};
var uploadFolder = './magic/' + req.body.classification + "/";
createFolder(uploadFolder);
// 接收到文件后输出的保存路径(若不存在则需要创建)
cb(null, uploadFolder);
},
filename: function(req, file, cb) {
// 将保存文件名设置为 时间戳 + 文件原始名,比如 151342376785-123.jpg
cb(null, file.originalname);
}
});
// 创建 multer 对象
var upload = multer({ storage: storage });
module.exports = upload;
|
26e32c6b36e60dc0dee9006e823cbfd23d43ef7f
|
[
"JavaScript",
"Markdown"
] | 3
|
JavaScript
|
magictear/MagicVid
|
f779629db17b5dbd7cc0d25bc0c3dc019edf750c
|
523092de0b48d1952b7a94c4490326731125d24f
|
refs/heads/master
|
<repo_name>qqbnureke/cookBookRecipe<file_sep>/App.js
import React from 'react';
import { StyleSheet, Text, View, Button } from 'react-native';
import { ApolloProvider } from "react-apollo";
import ApolloClient from 'apollo-boost';
import { Constants } from 'expo';
import { createStackNavigator } from 'react-navigation';
import CreateRecipeForm from './components/CreateRecipeForm';
import RecipesDetails from './components/RecipesDetails';
import RecipesList from './components/RecipesList';
import LoginPage from './components/LoginPage';
import Register from './components/Register';
const AppStackNavigator = createStackNavigator({
LoginPage: {
screen: LoginPage,
navigationOptions: {
header: null
}
},
RegisterPage: {
screen: Register,
navigationOptions: {
title: 'Register',
}
},
RecipeList: {
screen: RecipesList,
},
NewRecipeForm: {
screen: CreateRecipeForm,
},
Details: {
screen: RecipesDetails,
},
});
const client = new ApolloClient({
//uri: "https://api.graph.cool/file/v1/cjj6obv0i01j10166a63u7okj"
uri: "https://api.graph.cool/simple/v1/cjj6obv0i01j10166a63u7okj"
});
export default class App extends React.Component {
render() {
return (
<ApolloProvider client={client}>
<AppStackNavigator initialRouteName='LoginPage' />
</ApolloProvider>
)
}
}
const styles = StyleSheet.create({
container: {
marginTop: Constants.statusBarHeight
},
});
<file_sep>/components/RecipesList.js
import React from 'react';
import {
View,
Text,
StyleSheet,
Button,
Platform,
TouchableOpacity,
ScrollView,
FlatList,
ActivityIndicator,
Image,
} from 'react-native';
import { Icon } from 'react-native-elements';
import { ApolloProvider } from 'react-apollo';
import ApolloClient from 'apollo-boost';
import { Query } from 'react-apollo';
import gql from 'graphql-tag';
import Seperator from './utils/Seperator';
const GET_ALL_RECIPES = gql`
query GetAllRecipes {
allRecipes {
id
title
imageUri
ingredients
description
instructions
}
}
`;
export default class RecipesList extends React.Component {
static navigationOptions = ({ navigation }) => {
const { params = {} } = navigation.state;
let headerTitle = 'Main';
let headerTitleStyle = styles.headerTitleColor;
let headerStyle = styles.headerStylee;
let headerLeft = null;
let headerRight = (
<View style={styles.headerStyle}>
<TouchableOpacity onPress={() => {}}>
<Image
source={require('./../assets/star.png')}
style={styles.headerImageStyle}
/>
</TouchableOpacity>
<TouchableOpacity
style={styles.headerButton}
onPress={() => {
params.onSave();
}}>
<Image
source={require('./../assets/plus-xxl.png')}
style={styles.headerImageStylee}
/>
</TouchableOpacity>
<TouchableOpacity
style={styles.headerButton}
onPress={() => {
params.onSignout();
}}>
<Image
source={require('./../assets/logout-xxl.png')}
style={styles.headerImageStylee}
/>
</TouchableOpacity>
</View>
);
return {
headerTitle,
headerTitleStyle,
headerStyle,
headerRight,
headerLeft,
};
};
_onSave = () => {
this.props.navigation.navigate('NewRecipeForm');
};
_onSignout = () => {
this.props.navigation.navigate('LoginPage');
};
componentDidMount() {
this.props.navigation.setParams({
onSave: this._onSave.bind(this),
onSignout: this._onSignout.bind(this),
});
}
renderItem = ({ item }) => (
<View style={styles.container}>
<TouchableOpacity
onPress={() => {
this.props.navigation.navigate('Details', { details: item });
}}>
<View style={styles.containerText}>
<Text style={styles.textConst}>{item.title} </Text>
</View>
<Text style={styles.textDescription}>{item.description} </Text>
<Seperator />
</TouchableOpacity>
</View>
);
render() {
return (
<ScrollView>
<Query query={GET_ALL_RECIPES}>
{({ loading, data, error, refetch }) =>
loading ? (
<ActivityIndicator />
) : (
<FlatList
refreshing={data.networkStatus === 4}
keyExtractor={item => item.id}
extraData={this.state}
data={data ? data.allRecipes : []}
renderItem={this.renderItem}
onRefresh={() => refetch()}
/>
)
}
</Query>
</ScrollView>
);
}
}
const styles = StyleSheet.create({
container: {
marginTop: 1,
marginBottom: 4,
marginRight: 5,
marginLeft: 5,
borderWidth: 1,
},
containerText: {
alignItems: 'center',
justifyContent: 'center',
},
btnCreateNew: {
fontSize: 15,
color: 'white',
},
btnContainer: {
margin: 5,
padding: 10,
borderRadius: 10,
backgroundColor: 'darkviolet',
},
textAdd: {
fontSize: 40,
color: 'white',
},
textConst: {
fontSize: 20,
margin: 2,
fontWeight: 'bold',
},
textDescription: {
fontSize: 18,
marginLeft: 4,
},
headerStyle: {
margin: 5,
flexDirection: 'row',
},
headerImageStyle: {
width: 30,
height: 30,
margin: 15,
},
headerImageStylee: {
width: 25,
height: 25,
margin: 20,
},
headerButton: {
backgroundColor: 'rgb(226,81,65)',
},
headerStylee: {
backgroundColor: 'rgb(226,81,65)',
borderBottomColor: 'white',
},
headerTitleColor: {
color: 'white',
},
});
<file_sep>/README.md
# cookBookRecipe
Create account
Log in
See all recipes
click on object to see descriptions (title, description, image, ingredients, instructions)
+ share it
+ Add new one if you want
Enjoy with app
<file_sep>/components/RecipesDetails.js
import React from 'react';
import {
View,
Text,
StyleSheet,
FlatList,
TouchableOpacity,
Image,
ScrollView,
} from 'react-native';
import { Share } from 'react-native';
export default class RecipeDetails extends React.Component {
static navigationOptions = ({ navigation }) => {
const { params = {} } = navigation.state;
let headerTitle = 'Recipe Detail';
let headerTitleStyle = styles.headerTitleColor;
let headerTintColor = 'white';
let headerStyle = styles.headerStyle;
let headerRight = (
<View style={styles.headerStylee}>
<TouchableOpacity
style={styles.headerButton}
onPress={() => {
params.onShare();
}}>
<Image
source={require('./../assets/sharethis-xxl.png')}
style={styles.headerImageStylee}
/>
</TouchableOpacity>
</View>
);
return {
headerTitle,
headerTitleStyle,
headerStyle,
headerTintColor,
headerRight,
};
};
componentDidMount() {
this.props.navigation.setParams({ onShare: this._onShare.bind(this) });
}
_onShare() {
const details = this.props.navigation.getParam('details');
Share.share({
message:
details.title +
'\n' +
details.description +
'\nIngridients:\n' +
details.ingredients +
'\nInstructions:\n' +
details.instructions +
'\n',
});
}
render() {
const details = this.props.navigation.getParam('details');
return (
<ScrollView>
<View style={styles.container}>
<View style={styles.textContainer}>
<Text style={styles.textVar}>
{details.title} {'\n'}
</Text>
</View>
<Text style={styles.textConst}>
{details.description} {'\n'}
</Text>
<Image source={{ uri: details.imageUri }} style={styles.imageStyle} />
<Text style={styles.textList}>Ingredients:</Text>
<FlatList
keyExtractor={(item, index) => index.toString()}
data={details.ingredients}
renderItem={({ item }) => (
<Text style={styles.textList}>
{'\u2022'} {item}
</Text>
)}
/>
<Text style={styles.textList}>{'\n'}Instructions:</Text>
<FlatList
keyExtractor={(item, index) => index.toString()}
data={details.instructions}
renderItem={({ item, index }) => (
<Text style={styles.textList}>
{index + 1}. {item}
</Text>
)}
/>
<View style={styles.textContainer}>
<Text style={styles.lastText}>Bon Appetit!</Text>
</View>
</View>
</ScrollView>
);
}
}
const styles = StyleSheet.create({
container: {
marginLeft: 8,
flexDirection: 'column',
},
textConst: {
fontSize: 15,
color: '#5F5F3D',
},
textVar: {
fontSize: 22,
justifyContent: 'center',
alignItems: 'center',
fontWeight: 'bold',
},
textContainer: {
alignItems: 'center',
justifyContent: 'center',
},
textList: {
fontSize: 18,
},
lastText: {
fontSize: 18,
},
headerTitleColor: {
color: 'white',
},
headerStyle: {
backgroundColor: 'rgb(226,81,65)',
borderBottomColor: 'white',
},
headerStylee: {
marginRight: 5,
flexDirection: 'row',
},
headerImageStylee: {
width: 25,
height: 25,
margin: 20,
},
headerButton: {
backgroundColor: 'rgb(226,81,65)',
},
imageStyle: {
width: 300,
height: 200,
},
});
|
2c1d0e62bf1f2f8006c4d53b9ac2d6c732c6de06
|
[
"JavaScript",
"Markdown"
] | 4
|
JavaScript
|
qqbnureke/cookBookRecipe
|
0f3bdbd4bd225add80290f92081ad26b820933f1
|
1a7be2c125de3309127b286cf655bd98bbc849fd
|
refs/heads/master
|
<repo_name>donchi-k/three<file_sep>/index.js
window.addEventListener("DOMContentLoaded", init);
function init() {
const width = 960;
const height = 540;
// WebGLのレンダリングをするためのレンダラーを作成。引数はHTMLのmyCanvas
const renderer = new THREE.WebGLRenderer({
//id="canvas"に返す
canvas: document.querySelector("#myCanvas")
});
//レンダラーのサイズを設定
renderer.setSize(width, height);//(width, height)
renderer.setPixelRatio(window.devicePixelRatio);//このままでいい
//シーンを作成
const scene = new THREE.Scene();
//カメラを作成
// new THREE.PerspectiveCamera(画角, アスペクト比, 描画開始距離, 描画終了距離)
const camera = new THREE.PerspectiveCamera(
60,
width / height,
2,
20000
);
camera.position.set(0, 0, +1000);
//new THREE.BoxGeometry(幅、高さ、奥行き)
//立方体や直方体のような箱状の形状を生成するためのBoxGeometryを使用
//今回は、CylinderGeometryで円柱を作成
const geometry = new THREE.CylinderGeometry(250, 250, 500);
//materialは、色や質感の情報となる。今回は青(0x0000ff)を使用
const material = new THREE.MeshStandardMaterial({
color: 0xff00ff
});
//作成したgeometryとmaterialでメッシュを作りシーンに追加
// new THREE.Mesh(ジオメトリ,マテリアル)
const box = new THREE.Mesh(geometry, material);
// シーンに追加
scene.add(box);
//真っ暗なのでライトをつける。
//DirectionalLightは平行光源という意味。平行光源は太陽光のように一定方向から差し込む光であり、ライトもシーンに追加することで反映される。
//光源が斜めから差し込むように位置も変更する。
const directionalLight = new THREE.DirectionalLight(
0xffffff
);
directionalLight.position.set(0, 1, 1,);
//シーンに追加
scene.add(directionalLight);
// マウスコントロール
var controls = new THREE.OrbitControls(camera);
// 初回実行
tick();
function tick() {
requestAnimationFrame(tick);
// 箱を回転させる
box.rotation.x += 0.01;
box.rotation.y += 0.03;
box.rotation.z += 0.01;
controls.update();
//renderer.renderメソッドにシーンとカメラを引数に渡すことでcanvas上に表示される。
renderer.render(scene, camera);
}
};
|
1b5d9bc81c910b50700310f8a98549ffde2ebb40
|
[
"JavaScript"
] | 1
|
JavaScript
|
donchi-k/three
|
3750496508ea8d55098e1024ee97292c4f1c3aee
|
d8224b40b3a7366e1b4e206b62fc9857aed22e80
|
refs/heads/master
|
<repo_name>ndayisengaosca/oscar2<file_sep>/README.md
# Système de détection d'intrusion à l'aide de méthode d'apprentissage
Ce repo contient les trois programmes développés pour notre projet de master.
### PSO_Same_Number_Of_Neurons.py
Ce programme utilise l'algorithme Optimisation par Essaim Particulaire pour retourner un réseau de neurones basé sur KDD ayant le même nombres de neurones pour chaque couches cachées.
### PSO_Different_Number_Of_Neurons.py
Ce programme utilise l'algorithme Optimisation par Essaim Particulaire pour retourner un réseau de neurones basé sur KDD ayant un nombre différent de neurones dans chaque couches cachées.
### L'algorithme génetique
Ce programme utilise l'Algorithme Génétique pour retourner une architecture du réseau de neurones basé sur KDD.
<file_sep>/Intrusion Detection System using genetic algorithm.py
import tensorflow as tf
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import random
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras.utils import to_categorical
from tensorflow.keras import layers
from keras import backend as k
from keras.utils import to_categorical
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.utils.vis_utils import plot_model
from numpy.random import randint
from random import choice
from numpy.random import uniform
import matplotlib.pyplot
#lecture du dataset
def kddnsl(show_examples=False):
dftrain = pd.read_csv('KDDNSLFULL.csv')
dftest=pd.read_csv('KDDTEST.csv')
#print(dftrain.head())
#print(dftest.head())
rows, cols = dftrain.shape
rowst, colst = dftest.shape
label_names = dftrain['attacktype'].unique()
label_namest = dftest['attacktype'].unique()
index_and_label = list(enumerate(label_names))
index_and_labelt = list(enumerate(label_namest))
label_to_index = dict((label, index) for index, label in index_and_label)
label_to_indext = dict((label, index) for index, label in index_and_labelt)
dftrain = dftrain.replace(label_to_index)
dftest = dftest.replace(label_to_indext)
dftrain = dftrain.sample(frac=1.0)
dftest = dftest.sample(frac=1.0)
train_data = dftrain
test_data = dftest
x_train = train_data.iloc[:,:-1]
y_train = train_data.iloc[:, -1:]
#print (len(x_train))
y_train = to_categorical(y_train, 5)[:len(x_train)]
#print(x_train.shape)
#print(y_train.shape)
x_test = test_data.iloc[:,:-1]
y_test = test_data.iloc[:, -1:]
y_test = to_categorical(y_test, 5)[:len(x_train)]
#print(x_test.shape)
#print(y_test.shape)
i_r, i_c, n_c = 28, 28, 5
i_sh = (1, i_r, i_c)
#print(i_sh)
return x_train, x_test, y_train, y_test,i_sh
#classe permettant de créer le réseau de neurones
class Net:
def __init__(self,epoch,dropout,hiddenActivationFn,outputActivationFn,lossFunction,optimiser,totalAttributes,totalHiddenLayers,neuronesPerLayer):
self.epoch = epoch #epoch value
self.dropout =dropout #drop out value
self.hiddenActivationFn = hiddenActivationFn #activation for hidden layer
self.outputActivationFn=outputActivationFn #activation function for the output layer
self.lossFunction = lossFunction #loss function
self.optimiser = optimiser #optimization
self.accuracy = 0 #accuracy
self.totalAttributes = totalAttributes #total attributes/features of the dataset [n]
self.totalHiddenLayers = totalHiddenLayers #total hidden layers per neural network [pMax]
self.neuronesPerLayer = neuronesPerLayer #total neurones per layer
self.parentDropOut = 0.0 #total dropout per parent
self.parentTotalHiddentLayer = 0#self.parentTotalHiddentLayer #total hiddentlayers per parent
self.patentTotalNeurone = 0#self.patentTotalNeurone #total neurone per parent
def init_params(self):
params = { 'epochs' : self.epoch,
'dropout' : self.dropout,
'hiddenActivation' : self.hiddenActivationFn,
'outputActivation' : self.outputActivationFn,
'loss' : self.lossFunction,
'optimizer' : self.optimiser,
'accuracy' : self.accuracy,
'totalAttributes' : self.totalAttributes,
'totalHiddenLayers' : self.totalHiddenLayers,
'neuronesPerLayer' : self.neuronesPerLayer,
'parentDropOut' : self.parentDropOut, #total dropout per parent
'parentTotalHiddentLayer' : self.parentTotalHiddentLayer, #total hiddentlayers per parent
'patentTotalNeurone' : self.patentTotalNeurone #total neurone per parent
}
return params
def init_net(epoch,dropout,hiddenActivationFn,outputActivationFn,lossFunction,optimiser,totalAttributes,totalHiddenLayers,neuronesPerLayer,p):
return [Net(epoch,dropout,hiddenActivationFn,outputActivationFn,lossFunction,optimiser,totalAttributes,totalHiddenLayers,neuronesPerLayer) for _ in range(p)]
def fitness(n, n_c, i_shape, x, y, b, x_test, y_test,pMax):
for cnt, i in enumerate(n):
p = i.init_params()
epoch = p['epochs'] #epoch value
dropout = p['dropout'] #drop out value
hiddenActivationFn = p['hiddenActivation'] #activation for hidden layer
outputActivationFn = p['outputActivation'] #activation function for the output layer
lossFunction = p['loss'] #loss function
optimiser = p['optimizer'] #optimization
totalAttributes = p['totalAttributes'] #total attributes/features of the dataset [n]
totalHiddenLayers = p['totalHiddenLayers'] #total hidden layers per neural network [pMax]
neuronesPerLayer = p['neuronesPerLayer'] #total neurones per layer
parentDropOut = p['parentDropOut'] #total dropout per parent
parentTotalHiddentLayer = p['parentTotalHiddentLayer'] #total hiddentlayers per parent
patentTotalNeurone =p['patentTotalNeurone']
#total neurone per parent
try:
# Parameter name # Suggested value
m = net_model(epoch = epoch, # epoch number
hiddenActivationFn = hiddenActivationFn, # hiden layers activation function
outputActivationFn = outputActivationFn, # output layer activation function
dropout = dropout, # dropout 2 Not used by algorithm
optimiser = optimiser, # optimizer 'adadelta'
lossFunction = lossFunction, # loss function 'categorical crossentropy'
xtrain = x, # train data
ytrain = y, # train label
batchSize = b, # bias value
xtest = x_test, # test data
ytest = y_test, # test label
totalAttributes = totalAttributes,
totalHiddenLayers = totalHiddenLayers,
neuronesPerLayer = neuronesPerLayer,
pMax = pmax,
parentDropOut = parentDropOut, #total dropout per parent
parentTotalHiddentLayer = parentTotalHiddentLayer, #total hiddentlayers per parent
patentTotalNeurone =patentTotalNeurone
)
# # Current best: 99.15%
s = m.evaluate(x=x_test, y=y_test, verbose=0)
i.accuracy = s[1]
#print(m.summary())
modelConfig = m.get_config()
#print(modelConfig['layers'])
#print(type(modelConfig['layers'])
print( modelConfig['config'])
#print("total layer "+ str(len(modelConfig['layers'])))
count = 0
patentTotalNeurone = 0
parentTotalHiddentLayer = 0
parentDropOut=0
print ('-----------------------------------------------FONCTION DE FITNESS-----------------------------------------------')
totalList = len(modelConfig['layers'])-1
#print("Total Number of "+ str(totalList))
for layer in modelConfig['layers']:
while (count < len(modelConfig['layers'])-1):
#print(str(count) + layer['class_name'] )
count = count+1
#Exclue les couches ayant les fonctions d'activations et la couche d'entrée
if ((layer['class_name'] != 'Activation') and (layer['class_name'] != 'InputLayer')):
if (count>2): #exclus la couche entrée
if(layer['class_name']== 'Dense'): # les couches cachées
patentTotalNeurone = patentTotalNeurone + layer['config']['units']
print ("Nombre de neurone du parent: " + str(patentTotalNeurone))
print('------------------------------------------------------------------')
parentTotalHiddentLayer = parentTotalHiddentLayer + 1
print ("Nombre de couche cachée du parent: " + str(parentTotalHiddentLayer))
print('------------------------------------------------------------------')
if(layer['class_name']== 'Dropout'):
parentDropOut = parentDropOut+layer['config']['rate']
print ("Drop out du parent:" + str(parentDropOut))
print('------------------------------------------------------------------')
#print(m.get_config())
print('Accuracy: {}'.format(round((i.accuracy * 100),2)))
print("test test")
print(p)
print ('-----------------------------------------------FONCTION DE FITNESS-----------------------------------------------')
except Exception as e:
print(e)
return n
#fonction qui génère le réseau de neurones
def net_model(epoch, hiddenActivationFn,outputActivationFn, dropout, optimiser, lossFunction, xtrain, ytrain, batchSize, xtest, ytest,totalAttributes,totalHiddenLayers,neuronesPerLayer,pMax, parentDropOut,parentTotalHiddentLayer,patentTotalNeurone):
model = Sequential()
print("Hidden layers will be "+ str(totalHiddenLayers))
print ('-------------------------------------------GENERATION DU RESEAU DE NEURONE----------------------------------------------')
inputNeurones = randint(totalAttributes/2,totalAttributes)
print('Nombre de neurones de la couche d''entrée:' + str(neuronesPerLayer))
model.add(Dense(inputNeurones, input_shape=[totalAttributes,]))
model.add(Activation(hiddenActivationFn))
#hidden layers
for i in range(totalHiddenLayers):
HiddenNeurones = randint(totalAttributes/2,totalAttributes)
print('Couche Cachée: ' + str(i))
print('------------------------------------------------------------------')
print('------------------------------------------------------------------')
print('Nombre de neurone de la couche: ' + str(HiddenNeurones))
dropoutVal = random.uniform(0,1)
print("Drop out de la couche: " + str(dropoutVal))
print('------------------------------------------------------------------')
model.add(Dense(HiddenNeurones))
model.add(Activation(hiddenActivationFn))
model.add(layer=Dropout(rate=dropoutVal))
print('Couche Cachée: ' + str(i))
print('------------------------------------------------------------------')
print('------------------------------------------------------------------')
#output layers
model.add(Dense(5))
model.add(Activation(outputActivationFn))
model.compile(optimizer=optimiser, loss=lossFunction, metrics=['accuracy'])
model.fit(x=xtrain, y=ytrain, batch_size=batchSize, epochs=epoch, verbose=0, validation_data=(xtest, ytest))
# print(model.summary())
tf.keras.utils.plot_model(model, to_file='model_plot3.png', show_shapes=True, show_layer_names=True,expand_nested=True)
print ('-------------------------------------------GENERATION DU RESEAU DE NEURONE----------------------------------------------')
return model
def selection(n):
n = sorted(n, key=lambda j: j.accuracy, reverse=True)
n = n[:int(len(n))]
return n
def crossover(n,pMax,maxNeurone):
offspring = []
p1 = choice(n)
p2 = choice(n)
print ('---------------------------------------------------CROSSOVER----------------------------------------------------')
#print('Nombre de neurone du parent P1: '+ str(p1.patentTotalNeurone) + ' et P2:' + str(p2.patentTotalNeurone))
print('Nombre de neurone du parent P1: '+ str(p1.neuronesPerLayer) + ' et P2:' + str(p2.neuronesPerLayer))
print('------------------------------------------------------------------')
# print('Drop out du parent P1:'+ str(p1.parentDropOut) + ' et P2:' + str(p2.parentDropOut))
print('Drop out du parent P1:'+ str(p1.dropout) + ' et P2:' + str(p2.dropout))
print('------------------------------------------------------------------')
"""if (round((p1.patentTotalNeurone + p2.patentTotalNeurone)/2) >maxNeurone):
neuronesPerLayerChild = round((p1.patentTotalNeurone + p2.patentTotalNeurone)/4)
else:
neuronesPerLayerChild = round((p1.patentTotalNeurone + p2.patentTotalNeurone)/2) #moyenne du nombre de neuronnes des 2 parents
if (round((p1.parentDropOut + p2.parentDropOut)/2) >1):
dropoutofChild = round((p1.parentDropOut + p2.parentDropOut)/4) #moyenne du nombre de neuronnes des 2 parents
else:
dropoutofChild = round((p1.parentDropOut + p2.parentDropOut)/2)
if(round((p1.parentTotalHiddentLayer + p2.parentTotalHiddentLayer)/2) >pMax):
numberofHiddenLayersPerChild = round((p1.parentTotalHiddentLayer+p2.parentTotalHiddentLayer)/4) #nombre de couche cachées
elif (round((p1.parentTotalHiddentLayer + p2.parentTotalHiddentLayer)/2) <pMax):
#ajout de nouvelle couche au hasard ayant un drop out de 0
numberofHiddenLayersPerChild = round((p1.parentTotalHiddentLayer+p2.parentTotalHiddentLayer)/2)
else:
numberofHiddenLayersPerChild = round((p1.parentTotalHiddentLayer+p2.parentTotalHiddentLayer)/2) #nombre de couche cachées
"""
if (round((p1.neuronesPerLayer + p2.neuronesPerLayer)/2) >maxNeurone):
neuronesPerLayerChild = round((p1.neuronesPerLayer + p2.neuronesPerLayer)/4)
else:
neuronesPerLayerChild = round((p1.neuronesPerLayer + p2.neuronesPerLayer)/2) #moyenne du nombre de neuronnes des 2 parents
if (round((p1.dropout + p2.dropout)/2) >1):
dropoutofChild = round((p1.dropout + p2.dropout)/4) #moyenne du nombre de neuronnes des 2 parents
else:
dropoutofChild = round((p1.dropout + p2.dropout)/2)
if(round((p1.totalHiddenLayers + p2.totalHiddenLayers)/2) >pMax):
numberofHiddenLayersPerChild = round((p1.totalHiddenLayers+p2.totalHiddenLayers)/4) #nombre de couche cachées
elif (round((p1.totalHiddenLayers + p2.totalHiddenLayers)/2) <pMax):
#ajout de nouvelle couche au hasard ayant un drop out de 0
numberofHiddenLayersPerChild = round((p1.totalHiddenLayers+p2.totalHiddenLayers)/2)
else:
numberofHiddenLayersPerChild = round((p1.totalHiddenLayers+p2.totalHiddenLayers)/2) #nombre de couche cachées
#!!!!!!!!! TO DO !!!!!!!!!
# To loop throught the Neural Network and get the different parameters
# if numberofHiddenLayersPerChild < Pmax, insert new random layers in P1 and P2 with drop out value = 0 => In Progress
c1 = Net(p1.epoch, dropoutofChild, p1.hiddenActivationFn, p1.outputActivationFn, p1.lossFunction, p1.optimiser, p1.totalAttributes,numberofHiddenLayersPerChild,neuronesPerLayerChild)
c2 = Net(p2.epoch, dropoutofChild, p2.hiddenActivationFn, p2.outputActivationFn, p2.lossFunction, p2.optimiser, p2.totalAttributes, numberofHiddenLayersPerChild,neuronesPerLayerChild)
offspring.append(c1)
offspring.append(c2)
n.extend(offspring)
print("Child1: "+"Epoch: "+ str(c1.epoch))
print("Child2: "+ str(c2.init_params()))
print ('---------------------------------------------------CROSSOVER----------------------------------------------------')
return n
def mutate(n):
p1 = choice(n)
drp = 0
print ('---------------------------------------------------MUTATION----------------------------------------------------')
#print("Drop out avant la mutation:" + str(p1.parentDropOut))
print("Drop out avant la mutation:" + str(p1.dropout))
print('------------------------------------------------------------------')
dropoutPreMutation = p1.dropout
dropoutPostMutation = 1 - dropoutPreMutation
print("Drop out après la mutation:" + str(dropoutPostMutation))
print('------------------------------------------------------------------')
#!!!!!!!!! TO DO !!!!!!!!!
# To loop throught the Neural Network and get the different parameters
#Select a random hidden layer and updates its drop out value with value of dropoutPostMutation = 1 - dropoutPreMutation
"""for i in n:
if uniform(0, 1) <= 0.1:
i.epoch += randint(0, 5)
#i.u1 += randint(0, 5)
"""
for i in n:
dropoutPreMutation = i.dropout
dropoutPostMutation = 1 - dropoutPreMutation
print('check'+str(dropoutPostMutation))
i.dropout = dropoutPostMutation
print ('---------------------------------------------------MUTATION----------------------------------------------------')
return n
if __name__ == "__main__":
#lecture et chargement du dataset
xtrain, xtest, ytrain, ytest, I_sh = kddnsl(show_examples=True)
#définition des variables
pmax = 5 # nombre de couche cachées
n = 41 # taille du vecteur d'entrée: nombre d'attribut du dataset NSL-KDD
totalHiddenLayers = randint(1,pmax) #génération du nombre de couche cachée au hasard
neuronesPerLayer = randint(n/2,n) #nombre de neurones par couche: selection au hasard pour que le nombre se situe entre n/2 et n ou n est le nombre d'attribut maximum du dataset
print('Total hidden layer: ' + str(totalHiddenLayers))
print('------------------------------------------------------------------')
print('Number of Neurones per layer: ' + str(neuronesPerLayer))
print('------------------------------------------------------------------')
population = 10 # Population
generation = 55 # Generation
batchSize = 128 # Batch size
classNumber = 5 # Class number
threshold = 0.90 # Threshold
accuracy_list = []
#creation du réseaut de neurones en prenant en consideration la taille de la population
#dropout value is overwritten during the execution of
N = init_net( epoch = 5,dropout = random.uniform(0,1), hiddenActivationFn = 'relu',outputActivationFn = 'softmax',
lossFunction='categorical_crossentropy',optimiser='adadelta' ,totalAttributes = n,totalHiddenLayers = totalHiddenLayers,neuronesPerLayer= neuronesPerLayer,p=population)
for g in range(generation):
print('Generation {}'.format(g + 1))
print('------------------------------------------------------------------')
print('------------------------------------------------------------------')
N = fitness(n=N,
n_c=classNumber,
i_shape=I_sh,
x=xtrain,
y=ytrain,
b=batchSize,
x_test=xtest,
y_test=ytest,
pMax=pmax
)
# print(type(N))
# print(N)
accuracies = np.empty(shape=(g))
N = selection(n=N)
N = crossover(n=N,pMax = pmax,maxNeurone = n)
N = mutate(n=N)
for q in N:
acc = round((q.accuracy * 100),2)
accuracy_list.append(acc)
# print(accuracy_list)
if q.accuracy >threshold:
accuracies[g] = N[0]
print('Threshold satisfied')
print(q.init_params())
print('Best accuracy: {}%'.format(acc))
exit(code=0)
#print(accuracy_list)
print("The best accuracy so far {}%".format(max(accuracy_list)))
matplotlib.pyplot.plot(np.array(accuracy_list), linewidth=5, color="black")
matplotlib.pyplot.xlabel("Iteration", fontsize=20)
matplotlib.pyplot.ylabel("Fitness", fontsize=20)
matplotlib.pyplot.xticks(np.arange(0, g+1,10), fontsize=15)
matplotlib.pyplot.yticks(np.arange(0, 101, 10), fontsize=15)
fig = plt.figure(dpi=100, figsize=(14, 7))
plt.show()
plt.savefig('test3.png')
<file_sep>/PSO_Same_Number_Of_Neurons.py
# -*- coding: utf-8 -*-
"""PSO_KDD_TS.ipynb
author: <NAME> & <NAME>
"""
import copy
import numpy as np
class Particle(object):
"""Particle class for PSO
This class encapsulates the behavior of each particle in PSO and provides
an efficient way to do bookkeeping about the state of the swarm in any given
iteration.
Args:
lower_bound (np.array): Vector of lower boundaries for particle dimensions.
upper_bound (np.array): Vector of upper boundaries for particle dimensions.
dimensions (int): Number of dimensions of the search space.
objective function (function): Black-box function to evaluate.
"""
def __init__(self,
lower_bound,
upper_bound,
dimensions,
objective_function):
self.reset(dimensions, lower_bound, upper_bound, objective_function)
def reset(self,
dimensions,
lower_bound,
upper_bound,
objective_function):
"""Particle reset
Allows for reset of a particle without reallocation.
Args:
lower_bound (np.array): Vector of lower boundaries for particle dimensions.
upper_bound (np.array): Vector of upper boundaries for particle dimensions.
dimensions (int): Number of dimensions of the search space.
"""
position = []
for i in range(dimensions):
if lower_bound[i] < upper_bound[i]:
position.extend(np.random.randint(lower_bound[i], upper_bound[i] + 1, 1, dtype=int))
elif lower_bound[i] == upper_bound[i]:
position.extend(np.array([lower_bound[i]], dtype=int))
else:
assert False
self.position = [position]
self.velocity = [np.multiply(np.random.rand(dimensions),
(upper_bound - lower_bound)).astype(int)]
self.best_position = self.position[:]
self.function_value = [objective_function(self.best_position[-1])]
self.best_function_value = self.function_value[:]
def update_velocity(self, omega, phip, phig, best_swarm_position):
"""Particle velocity update
Args:
omega (float): Velocity equation constant.
phip (float): Velocity equation constant.
phig (float): Velocity equation constant.
best_swarm_position (np.array): Best particle position.
"""
random_coefficient_p = np.random.uniform(size=np.asarray(self.position[-1]).shape)
random_coefficient_g = np.random.uniform(size=np.asarray(self.position[-1]).shape)
self.velocity.append(omega
* np.asarray(self.velocity[-1])
+ phip
* random_coefficient_p
* (np.asarray(self.best_position[-1])
- np.asarray(self.position[-1]))
+ phig
* random_coefficient_g
* (np.asarray(best_swarm_position)
- np.asarray(self.position[-1])))
self.velocity[-1] = self.velocity[-1].astype(int)
def update_position(self, lower_bound, upper_bound, objective_function):
"""Particle position update
Args:
lower_bound (np.array): Vector of lower boundaries for particle dimensions.
upper_bound (np.array): Vector of upper boundaries for particle dimensions.
objective function (function): Black-box function to evaluate.
"""
new_position = self.position[-1] + self.velocity[-1]
if np.array_equal(self.position[-1], new_position):
self.function_value.append(self.function_value[-1])
else:
mark1 = new_position < lower_bound
mark2 = new_position > upper_bound
new_position[mark1] = lower_bound[mark1]
new_position[mark2] = upper_bound[mark2]
self.function_value.append(objective_function(self.position[-1]))
self.position.append(new_position.tolist())
if self.function_value[-1] < self.best_function_value[-1]:
self.best_position.append(self.position[-1][:])
self.best_function_value.append(self.function_value[-1])
class Pso(object):
"""PSO wrapper
This class contains the particles and provides an abstraction to hold all the context
of the PSO algorithm
Args:
swarmsize (int): Number of particles in the swarm
maxiter (int): Maximum number of generations the swarm will run
"""
def __init__(self, swarmsize=100, maxiter=100):
self.max_generations = maxiter
self.swarmsize = swarmsize
self.omega = 0.5
self.phip = 0.5
self.phig = 0.5
self.minstep = 1e-4
self.minfunc = 1e-4
self.best_position = [None]
self.best_function_value = [1]
self.particles = []
self.retired_particles = []
def run(self, function, lower_bound, upper_bound, kwargs=None):
"""Perform a particle swarm optimization (PSO)
Args:
objective_function (function): The function to be minimized.
lower_bound (np.array): Vector of lower boundaries for particle dimensions.
upper_bound (np.array): Vector of upper boundaries for particle dimensions.
Returns:
best_position (np.array): Best known position
accuracy (float): Objective value at best_position
:param kwargs:
"""
if kwargs is None:
kwargs = {}
objective_function = lambda x: function(x, **kwargs)
assert hasattr(function, '__call__'), 'Invalid function handle'
assert len(lower_bound) == len(upper_bound), 'Invalid bounds length'
lower_bound = np.array(lower_bound)
upper_bound = np.array(upper_bound)
assert np.all(upper_bound > lower_bound), 'Invalid boundary values'
dimensions = len(lower_bound)
self.particles = self.initialize_particles(lower_bound,
upper_bound,
dimensions,
objective_function)
# Start evolution
generation = 1
while generation <= self.max_generations:
for particle in self.particles:
particle.update_velocity(self.omega, self.phip, self.phig, self.best_position[-1])
particle.update_position(lower_bound, upper_bound, objective_function)
if particle.best_function_value[-1] == 0:
self.retired_particles.append(copy.deepcopy(particle))
particle.reset(dimensions, lower_bound, upper_bound, objective_function)
elif particle.best_function_value[-1] < self.best_function_value[-1]:
stepsize = np.sqrt(np.sum((np.asarray(self.best_position[-1])
- np.asarray(particle.position[-1])) ** 2))
if np.abs(np.asarray(self.best_function_value[-1])
- np.asarray(particle.best_function_value[-1])) \
<= self.minfunc:
return particle.best_position[-1], particle.best_function_value[-1]
elif stepsize <= self.minstep:
return particle.best_position[-1], particle.best_function_value[-1]
else:
self.best_function_value.append(particle.best_function_value[-1])
self.best_position.append(particle.best_position[-1][:])
generation += 1
return self.best_position[-1], self.best_function_value[-1]
def initialize_particles(self,
lower_bound,
upper_bound,
dimensions,
objective_function):
"""Initializes the particles for the swarm
Args:
objective_function (function): The function to be minimized.
lower_bound (np.array): Vector of lower boundaries for particle dimensions.
upper_bound (np.array): Vector of upper boundaries for particle dimensions.
dimensions (int): Number of dimensions of the search space.
Returns:
particles (list): Collection or particles in the swarm
"""
particles = []
for _ in range(self.swarmsize):
particles.append(Particle(lower_bound,
upper_bound,
dimensions,
objective_function))
if particles[-1].best_function_value[-1] < self.best_function_value[-1]:
self.best_function_value.append(particles[-1].best_function_value[-1])
self.best_position.append(particles[-1].best_position[-1])
self.best_position = [self.best_position[-1]]
self.best_function_value = [self.best_function_value[-1]]
return particles
import pandas as pd
from tensorflow.keras.utils import get_file
from sklearn.model_selection import train_test_split
batch_size = 128
num_classes = 10
epochs = 100
try:
path = get_file('kddcup.data_10_percent.gz', origin='http://kdd.ics.uci.edu/databases/kddcup99/kddcup.data_10_percent.gz')
except:
print('Error downloading')
raise
df = pd.read_csv(path, header=None)
df.dropna(inplace=True,axis=1)
df.columns = [
'duration',
'protocol_type',
'service',
'flag',
'src_bytes',
'dst_bytes',
'land',
'wrong_fragment',
'urgent',
'hot',
'num_failed_logins',
'logged_in',
'num_compromised',
'root_shell',
'su_attempted',
'num_root',
'num_file_creations',
'num_shells',
'num_access_files',
'num_outbound_cmds',
'is_host_login',
'is_guest_login',
'count',
'srv_count',
'serror_rate',
'srv_serror_rate',
'rerror_rate',
'srv_rerror_rate',
'same_srv_rate',
'diff_srv_rate',
'srv_diff_host_rate',
'dst_host_count',
'dst_host_srv_count',
'dst_host_same_srv_rate',
'dst_host_diff_srv_rate',
'dst_host_same_src_port_rate',
'dst_host_srv_diff_host_rate',
'dst_host_serror_rate',
'dst_host_srv_serror_rate',
'dst_host_rerror_rate',
'dst_host_srv_rerror_rate',
'outcome'
]
ENCODING = 'utf-8'
# Encode a numeric column as zscores
def encode_numeric_zscore(df, name, mean=None, sd=None):
if mean is None:
mean = df[name].mean()
if sd is None:
sd = df[name].std()
df[name] = (df[name] - mean) / sd
# Encode text values to dummy variables(i.e. [1,0,0],[0,1,0],[0,0,1] for red,green,blue)
def encode_text_dummy(df, name):
dummies = pd.get_dummies(df[name])
for x in dummies.columns:
dummy_name = f"{name}-{x}"
df[dummy_name] = dummies[x]
df.drop(name, axis=1, inplace=True)
# Now encode the feature vector
encode_numeric_zscore(df, 'duration')
encode_text_dummy(df, 'protocol_type')
encode_text_dummy(df, 'service')
encode_text_dummy(df, 'flag')
encode_numeric_zscore(df, 'src_bytes')
encode_numeric_zscore(df, 'dst_bytes')
encode_text_dummy(df, 'land')
encode_numeric_zscore(df, 'wrong_fragment')
encode_numeric_zscore(df, 'urgent')
encode_numeric_zscore(df, 'hot')
encode_numeric_zscore(df, 'num_failed_logins')
encode_text_dummy(df, 'logged_in')
encode_numeric_zscore(df, 'num_compromised')
encode_numeric_zscore(df, 'root_shell')
encode_numeric_zscore(df, 'su_attempted')
encode_numeric_zscore(df, 'num_root')
encode_numeric_zscore(df, 'num_file_creations')
encode_numeric_zscore(df, 'num_shells')
encode_numeric_zscore(df, 'num_access_files')
encode_numeric_zscore(df, 'num_outbound_cmds')
encode_text_dummy(df, 'is_host_login')
encode_text_dummy(df, 'is_guest_login')
encode_numeric_zscore(df, 'count')
encode_numeric_zscore(df, 'srv_count')
encode_numeric_zscore(df, 'serror_rate')
encode_numeric_zscore(df, 'srv_serror_rate')
encode_numeric_zscore(df, 'rerror_rate')
encode_numeric_zscore(df, 'srv_rerror_rate')
encode_numeric_zscore(df, 'same_srv_rate')
encode_numeric_zscore(df, 'diff_srv_rate')
encode_numeric_zscore(df, 'srv_diff_host_rate')
encode_numeric_zscore(df, 'dst_host_count')
encode_numeric_zscore(df, 'dst_host_srv_count')
encode_numeric_zscore(df, 'dst_host_same_srv_rate')
encode_numeric_zscore(df, 'dst_host_diff_srv_rate')
encode_numeric_zscore(df, 'dst_host_same_src_port_rate')
encode_numeric_zscore(df, 'dst_host_srv_diff_host_rate')
encode_numeric_zscore(df, 'dst_host_serror_rate')
encode_numeric_zscore(df, 'dst_host_srv_serror_rate')
encode_numeric_zscore(df, 'dst_host_rerror_rate')
encode_numeric_zscore(df, 'dst_host_srv_rerror_rate')
# display 5 rows
df.dropna(inplace=True,axis=1)
df[0:5]
# This is the numeric feature vector, as it goes to the neural net
# Convert to numpy - Classification
x_columns = df.columns.drop('outcome')
x = df[x_columns].values
dummies = pd.get_dummies(df['outcome']) # Classification
outcomes = dummies.columns
num_classes = len(outcomes)
y = dummies.values
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size=0.25, random_state=42)
from __future__ import print_function
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras import backend as K
import time
"""
# input image dimensions
img_rows, img_cols = 28, 28
# the data, shuffled and split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()
if K.image_data_format() == 'channels_first':
x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)
x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)
input_shape = (1, img_rows, img_cols)
else:
x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)
x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
"""
def func(param):
start_time = time.time()
n,l,fk,dr = param[0],param[1],param[2],param[3]
model = Sequential()
model.add(Dense(10, input_dim=x_train.shape[1], kernel_initializer='normal', activation='relu'))
for i in range(l):
if fk == 1:
# Need input shape for first layer.
model.add(Dense(n, activation='relu', input_dim=x_train.shape[1]))
elif fk == 2:
model.add(Dense(n, activation='sigmoid', input_dim=x_train.shape[1]))
model.add(Dropout(dr/10)) # hard-coded dropout
model.add(Dense(1, kernel_initializer='normal'))
model.add(Dense(y.shape[1],activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adam(),
metrics=['accuracy'])
cp = [keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=0, mode='auto')];
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=0,
validation_data=(x_test, y_test),callbacks=cp)
score = model.evaluate(x_test, y_test, verbose=0)
# loss, val
time_taken = time.time() - start_time
print('current config:',param,'val:',score[1],' duration: ', time_taken)
return score[1]
##################################################################
pso = Pso(swarmsize=4,maxiter=14)
bp,value = pso.run(func,[1,1,1,1],[16,16,2,10])
v = func(bp);
##################################################################
print('Test loss:', bp)
print('Test accuracy:', value,v)
|
d5d0215505453271dc85017c8eeeb2175691b31f
|
[
"Markdown",
"Python"
] | 3
|
Markdown
|
ndayisengaosca/oscar2
|
93b3ac797c0eaa3ab797073c9587aab6e4de5693
|
aacded4858aa9f3553d6fc5f2bdff2d5106866a0
|
refs/heads/main
|
<repo_name>jupyterhub/mybinder.org-deploy<file_sep>/docs/source/components/cloud.md
# Cloud products
mybinder.org runs on [Google Cloud](https://cloud.google.com/) currently.
This document lists the various cloud products we use, and how we use them.
## Philosophy
We use **only** commodity cloud products - things that can be easily
replicated in other clouds _and_ bare-metal hardware. This gives us
several technical and social advantages:
1. We avoid vendor lock-in, and can migrate providers if need be
for any reason easily.
2. It makes our infrastructure easily reproducible by others,
who might have different resources available to them. This is
much harder if we have a hard dependency on any single cloud-provider's
products.
3. We can more easily contribute back to the open-source community.
Most such commodity products are open source, or have binary-
compatible open source implementations available. This allows us
to file and fix bugs in other Open Source Software for the benefit
of everyone, rather than just a particular cloud provider's implementation.
4. Local testing when a core component depends on a cloud provider's
product is usually very difficult. Constraining ourselves to commodity
products only makes this easier.
As an example, using PostgreSQL via [Google Cloud SQL](https://cloud.google.com/sql/docs/)
would be fine since anyone can run PostgreSQL. But using something like
[Google Cloud Spanner](https://cloud.google.com/spanner/) or
[Google Cloud PubSub](https://cloud.google.com/pubsub/docs/) is something to be
avoided, since these can not be run without also being on Google Cloud.
Similarly, using [Google Cloud LoadBalancing](https://cloud.google.com/load-balancing/)
is also perfectly fine, since a lot of open source solutions (HAProxy, Envoy, nginx, etc)
can be used to provide the same service.
## Projects
We have a [project](https://cloud.google.com/storage/docs/projects)
that runs on Google Cloud: `binderhub`. It contains the following two clusters:
1. `prod`, runs production - `mybinder.org` and all resources
needed for it.
2. `staging` runs staging - `staging.mybinder.org` and all resources
needed for it.
We try to make staging and prod be as similar as possible. Staging should
be smaller and use fewer resources. Everything we describe below
is present in staging too.
## Google Kubernetes Engine
The open source [Kubernetes](https://kubernetes.io/) project is used to run
all our code. [Google Kubernetes Engine (GKE)](https://cloud.google.com/kubernetes-engine/)
is the google hosted version of Kubernetes. It is very close to what is shipped
as Open Source, and does not have much in the way of proprietary enhancements.
### Cluster
In production, the cluster is called `prod`. In staging, it is called `staging`.
### Node Pools
GKE has the concept of a [NodePool](https://cloud.google.com/kubernetes-engine/docs/concepts/node-pools)
that specifies the kind of machine (RAM, CPU, Storage) we want to use for our Kubernetes
cluster. If we want to change the kind of machines we use, we can create a new NodePool,
cordon the current one, wait for all pods in current nodes to die, and then delete the
current NodePool.
### Machine sizes
The `prod` cluster currently uses `n1-highmem-32` machines. These have
32 CPU cores and 208 GB of Memory. We use the `highmem` machines (with more Memory per CPU)
as opposed to `standard` machines for the following reasons:
1. Memory is an _incompressible_ resource - once you give a process memory, you can
not take it away without killing the process. CPU is _compressible_ - you can
take away how much CPU a process is using without killing it.
2. Our users generally seem to be running not-very-cpu-intensive code, as can be
witnessed from our generally low CPU usage.
3. Docker layer caching gives us massive performance boosts - less time spent
pulling images leads to faster startup times for users. Using larger nodes
increases the cache hit rate, so we use nodes with more rather than less RAM.
_tl;dr: Using `highmem` machines saves us a lot of money, since we are not paying for CPU
we are not using!_
The `staging` cluster uses much smaller machines than the production one, to keep costs
down.
### Boot disk sizes
In `prod`, we use 1000 GB SSD disks as boot disks. On Google Cloud, the size of
the disk [controls the performance](https://cloud.google.com/compute/docs/disks/performance) - larger the disk, the faster it is. Our disks need to be fast since we
are doing a lot of I/O operations during docker build / push / pull / run, so we
use SSDs.
Note that SSD boot disks are _not_ a feature available on GKE to all customers -
we have been given [early access](https://github.com/kubernetes/kubernetes/issues/36499)
to this feature, since it makes a dramatic difference to our performance (and
we knew where to ask!).
Staging does not use SSD boot disks.
### Autoscaling
We use the GKE [Cluster Autoscaler](https://cloud.google.com/kubernetes-engine/docs/concepts/cluster-autoscaler)
feature to add more nodes when we run out of resources. When the cluster is 100%
full, the cluster autoscaler adds a new node to handle more pods. However,
there is no way to make the autoscaler kick in at 80% of 90% utilization
([bug](https://github.com/kubernetes/autoscaler/issues/148)), so this leads
to [launch failures](https://github.com/jupyterhub/mybinder.org-deploy/issues/474)
for a short time when a new node comes up.
The autoscaler can be set to have a `minimum` number of nodes and a `maximum` number
of nodes.
## Google Container Registry
A core part of mybinder.org is building, storing and then running docker images
(built by [repo2docker](https://github.com/jupyterhub/repo2docker)). Docker images
are generally stored in a [docker registry](https://github.com/distribution/distribution),
using a well-defined standard API.
We use Google Cloud's hosted docker registry - [Google Container Registry (GCR)](https://cloud.google.com/container-registry/).
This lets us use a standard mechanism for storing and retrieving docker images
without having to run any of that infrastructure ourselves.
### Authentication
GCR is private by default, and can be only used from inside the Google Cloud project
the registry is located in. When using GKE, the authentication for pulling images
to run is already set up for us, so we do not need to do anything special. For pushing
images, we authenticate via a [service account](https://cloud.google.com/container-registry/docs/advanced-authentication#json-key).
You can find this service account credential under `registry` in `secrets/config/prod.yaml`
and `secrets/config/staging.yaml`.
### Access
The images are scoped per-project, the images made by `mybinder.org` are
stored in the `binder-prod` project, and the images made by `staging.mybinder.org`
are stored in the `binder-staging` project.
We do not allow users to pull our images, for a few reasons:
1. We pay network egress costs when images are used outside the project they are in,
and this can become very costly!
2. This can be abused to treat us as a content redistributor - build
an image with content you want, and then just pull the image from elsewhere. This
makes us a convenient possible hop in cybercrime / piracy / other operations,
complicates possible [DMCA](https://en.wikipedia.org/wiki/Digital_Millennium_Copyright_Act)
/ [GDPR](https://en.wikipedia.org/wiki/General_Data_Protection_Regulation) compliance and
probably a bunch of other bad things we do not have the imagination to foresee.
3. We might decide to clean up old images when we no longer need them, and this might
break other users who might depend on this.
For users who want access to a docker image similar to how it is built with Binder,
we recommend using [repo2docker](https://github.com/jupyterhub/repo2docker) to build
your own, and push it to a registry of your choice.
### Naming
Since building an image takes a long time, we would like to re-use images as much
as possible. If we have built an image once for a particular repository at a particular
commit, we want to not rebuild that image - just re-use it as much as possible.
We generate an image name for each image we build that is uniquely derived from
the name of the repository + the commit hash. This lets us check if
the image has already been built, and if so, we can skip the building step.
The code for generating the image name from repository information is
in [binderhub's builder.py](https://github.com/jupyterhub/binderhub/blob/HEAD/binderhub/builder.py),
under `_generate_build_name`.
Sometimes, we _do_ want to invalidate all previously built images - for example,
when we do a major notebook version bump. This will cause all repositories to be
rebuilt the next time they are launched. There is a performance cost to this, so
this invalidation has to be done judiciously. This is done by giving all the images
a `prefix` (`binderhub.registry.prefix` in `config/prod.yaml` and `config/staging.yaml`).
Changing this prefix will invalidate all existing images.
## Logging, Metrics, and Dashboarding
We use [Google Stackdriver](https://cloud.google.com/products/operations) for logging
activity on the Kubernetes deployment. This is useful for listing the raw
logs coming out of BinderHub, though we don't use it for dashboarding (see below).
We use [prometheus](metrics) for collecting more fine-grained metrics about
what's happening on the deployment, and [grafana](dashboards) for generating
dashboards using the data from prometheus.
We use [Google Analytics](https://analytics.google.com/analytics/web) to keep
track of activity on the `mybinder.org` site, though note that we lose track
of users as soon as they are directed to their Binder session.
<file_sep>/scripts/get-prs.py
#!/usr/bin/env python
import os
import re
import uuid
from argparse import ArgumentParser
import github
def extract_gitref(s):
"""
Extract git ref from a container registry tag or Helm chart version
Examples:
- 2022.02.0 -> 2022.02.0
- 2022.02.0-90.g0345a86 -> 0345a86
- 0.2.0 -> 0.2.0
- 0.2.0-n1011.hb49edf6 -> b49edf6
- 0.2.0-0.dev.git.2752.h3450e52 -> 3450e52
"""
m = re.match(r"[\d\.]+-[\w\.]+[gh]([0-9a-f]+)", s)
if m:
return m.group(1)
return s
token = os.getenv("GITHUB_TOKEN")
parser = ArgumentParser(description="Summarise PRs from a repo")
parser.add_argument("repo", help="The repository in format user/repo")
parser.add_argument("start", help="commit or image/chart version from which to start")
parser.add_argument("end", help="commit or image/chart version to which to end")
parser.add_argument(
"--write-github-actions-output",
help="Name of a GitHub Action's output variable to write to",
)
parser.add_argument(
"--max-commits",
type=int,
default=100,
help="Maximum number of commits to check",
)
args = parser.parse_args()
gh = github.Github(token)
r = gh.get_repo(args.repo)
start = extract_gitref(args.start)
end = extract_gitref(args.end)
prs = set()
git_compare = r.compare(start, end)
commits = list(git_compare.commits)
if len(commits) > args.max_commits:
pr_summaries = [
f"{len(commits)} commits between {start} and {end}, not searching for PRs"
]
else:
for c in commits:
if len(c.parents) == 1:
# Chartpress ignores merge commits when generating the Helm chart SHA
prs.update(c.get_pulls())
pr_summaries = [
f"- [#{pr.number}]({pr.html_url}) {pr.title} ({', '.join(label.name for label in pr.labels)})"
for pr in sorted(prs, key=lambda pr: pr.number)
]
md = ["# PRs"] + pr_summaries + ["", f"{r.html_url}/compare/{start}...{end}"]
md = "\n".join(md)
if args.write_github_actions_output:
# GitHub Actions docs on setting a output variable with a multiline string:
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings
#
eof_marker = str(uuid.uuid4()).replace("-", "_")
with open(os.environ["GITHUB_OUTPUT"], "a") as f:
print(f"{args.write_github_actions_output}<<{eof_marker}", file=f)
print(md, file=f)
print(eof_marker, file=f)
else:
print(md)
<file_sep>/docs/source/components/metrics.md
# Metrics collection with Prometheus
We collect operational metrics about all the components of mybinder.org
and create dashboards from them. This document details the components
involved in collecting, storing and querying the metrics.
This is only for operational metrics - **not** for analytics on repositories
built or traffic.
## Metrics Storage + Querying
We use [Prometheus](https://prometheus.io/) to store and query our metrics.
### What is Prometheus?
Prometheus is a [Time Series Database](https://en.wikipedia.org/wiki/Time_series_database)
optimized for storing operational metrics. It stores all data as
streams of timestamped values belonging to the same **metric** and the
same set of **labels**.
The **metric name** specifies the general feature of a system that is
measured (e.g. `http_requests_total` - the total number of HTTP requests received).
A set of labels for the same metric name identifies a particular
dimensional instantiation of that metric (for example: all HTTP requests
that used the method `POST` to the `/api/tracks` handler would be represented
as the time series `http_requests_total{method="POST", handler="/api/tracks"}`).
The prometheus documentation has more information on its
[data model](https://prometheus.io/docs/concepts/data_model/) and the different
[kinds of time series](https://prometheus.io/docs/concepts/metric_types/) available.
These two pages are fairly short and are highly recommended reading!
### Querying
Prometheus has its own query language called
[PromQL](https://prometheus.io/docs/prometheus/latest/querying/basics/),
optimized for time series queries.
The prometheus documentation has fairly clear and thorough documentation
on PromQL - [basics](https://prometheus.io/docs/prometheus/latest/querying/basics/),
[operators](https://prometheus.io/docs/prometheus/latest/querying/operators/)
and [functions](https://prometheus.io/docs/prometheus/latest/querying/functions/).
You do not need to become an expert, but a basic understanding is useful.
There are also [examples](https://prometheus.io/docs/prometheus/latest/querying/examples/)
to pick up and play with!
[prometheus.mybinder.org](https://prometheus.mybinder.org/graph) is our public
prometheus installation, and you can practice your queries there!
### Metrics Ingestion
Prometheus uses a **pull** model for metrics. It has a list of
targets, and constantly polls them for their current state, and
records what it gets back. The targets are supposed to respond
to these HTTP requests with data in the
[prometheus format](https://prometheus.io/docs/instrumenting/exposition_formats/).
Our data is currently sourced from the following targets.
#### Node information
The [node_exporter](https://github.com/prometheus/node_exporter) exports
information about each node we run - CPU usage, memory left, disk space,
etc. It provides fairly detailed info, usually prefixed with `node_`.
This is not kubernetes specific.
#### Kubernetes information
[kube-state-metrics](https://github.com/kubernetes/kube-state-metrics)
exposes information about the kubernetes cluster - such as number of pods
and the states they are in, number of nodes, etc. These are usually
prefixed with `kube_`.
These only contain information from kubernetes API itself. For example,
'how much RAM are these containers using' is not recorded by `kube-state-metrics`,
since that is not information that is available to the Kubernetes API.
'how much RAM are these pods requesting' is, however, available.
#### Container information
[cadvisor](https://github.com/google/cadvisor) provides detailed runtime
information about all the containers running in the cluster. This is information
mostly not available from `kube-state-metrics` - such as 'how much RAM are
these containers using right now', etc. These are usually prefixed with
`container_`.
#### HTTP request information
We use the [nginx-ingress helm chart](https://github.com/kubernetes/ingress-nginx/tree/main/charts/ingress-nginx)
to let all HTTP traffic into our cluster. This allows us to use
the [nginx VTS exporter](https://hnlq715.github.io/nginx-vts-exporter/)
to collect information in prometheus about requests / responses.
These metrics are prefixed with `nginx_`.
#### BinderHub information
[BinderHub](https://github.com/jupyterhub/binderhub) itself exposes
metrics about its operations in the prometheus format, using
the [python prometheus client library](https://github.com/prometheus/client_python).
These are currently somewhat limited, and prefixed with `binderhub_`
### Configuration
Prometheus is installed using the
[prometheus helm chart](https://github.com/prometheus-community/helm-charts/tree/main/charts/prometheus).
This installs the following components:
1. Prometheus server (storage + querying)
2. `node_exporter` on every node
3. A `kube-state-metrics` instance
`cadvisor` is already present on all nodes (it ships with the `kubelet`
kubernetes component), and the prometheus helm chart has configuration
that adds those as targets.
You can see the available options for configuring the prometheus
helm chart in its [values.yaml](https://github.com/prometheus-community/helm-charts/tree/main/charts/prometheus/values.yaml)
file. You can see the current configuration we have under the `prometheus`
section of `mybinder/values.yaml`, `config/prod.yaml` and `config/staging.yaml`.
<file_sep>/docs/source/incident-reports/2018-03-26-no-space-left.md
# 2018-03-26, "no space left on device"
## Summary
A node became unhealthy, correlated with a flood of "no space left on device"
messages in the logs. Kubernetes noticed the issue and appeared to recover itself in 20 minutes. Binder launches were failing during this time.
## Timeline
All times in CEST
### 2018-03-26 10:34
Launch success metric reaches 0%.
### 2018-03-26 10:55
Cluster self-heals and everything returns to normal, launch success back to 100%.
### 2018-03-26 15:00
Dip in log success metric noticed via grafana, investigation launched.
Found hundreds of "No space left on device" messages on node at the time of the event.
Cordoned node where the error occurred (gke-prod-a-ssd-pool-32-134a959a-wlmp),
despite the fact that it appears to have recovered.
Created log metric for "No space left on device" logs, exported to stackdriver.
Upon testing of stackdriver, observed that another node, gke-prod-a-ssd-pool-32-134a959a-ql6n,
has been reporting the same message hundreds of times.
Cordoned that node as well, for good measure.
Both of the cordoned nodes were aged 5-6 days and were the oldest nodes in the cluster.
I suspect that node age is related to this, and we will see it every week or so as something accumulates on the nodes.
The root cause is still unknown.
## Lessons learned
### What went well
- Cluster noticed the issue and self-healed. Recovery took ~20 minutes.
- Investigation was not under pressure as the cluster was in a functioning state at the time.
- Correlating VM logs in the google cloud console with grafana charts indicating an issue is
very useful but somewhat difficult as there is a very large amount of information.
### What went wrong
- Binder was unavailable, but nobody was notified.
Only proactive checking of Binder status on Grafana revealed the issue.
## Action items
- [x] Add log metric for "no space left on device" messages that seem correlated with problematic nodes
- [x] Add log metric for "Error creating user" message
- [x] Add metrics-based alert for "Error creating user" messages via stackdriver
- [ ] Enable SMTP for alerts from grafana metrics [GitHub Issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/365)
- [ ] Investigate root cause of "no space left on device" messages
- [ ] Systematically cordon and cull nodes older than 3-5 days?
<file_sep>/tests/test_launch.py
import json
import pytest
import requests
@pytest.mark.timeout(497)
def test_launch_binder(binder_url):
"""
We can launch an image that most likely already has been built.
"""
# Known good version of this repo
repo = "binder-examples/requirements"
ref = "50533eb"
build_url = binder_url + f"/build/gh/{repo}/{ref}"
r = requests.get(build_url, stream=True)
r.raise_for_status()
for line in r.iter_lines():
line = line.decode("utf8")
if line.startswith("data:"):
data = json.loads(line.split(":", 1)[1])
if data.get("phase") == "ready":
notebook_url = data["url"]
token = data["token"]
break
else:
# This means we never got a 'Ready'!
assert False
headers = {"Authorization": f"token {token}"}
r = requests.get(notebook_url + "/api", headers=headers)
assert r.status_code == 200
assert "version" in r.json()
r = requests.post(notebook_url + "/api/shutdown", headers=headers)
assert r.status_code == 200
<file_sep>/docs/source/incident-reports/2018-04-18-cull-flood.md
# 2018-04-18, Culler flood
A deploy of updates to the Hub included an upgraded implementation of the idle-culler script and an update to kubespawner.
The culler script had a bug which resulted in flooding the Hub with requests to stop servers that aren't running.
Additionally, a resource leak was introduced in the update to kubespawner,
which caused JupyterHub to become unresponsive after launching a certain
number of servers.
As a result, Binder service was degraded with periodic outages of 10-30 minutes.
## Timeline
All times in CEST
### 2018-04-18 12:35
Upgrade of jupyterhub, binderhub charts is deployed. Tests pass and builds and launches are working.
### ~13:30-13:45
Launch success drops to zero.
### 14:29
Binder outage is reported on Gitter by @jakamkon, investigation is started.
JupyterHub is determined to be inaccessible. Suspecting network issues, the proxy pod is restarted.
This does not resolve the issue, so the nginx-ingress pods are restarted.
This also does not resolve the issue.
Upon discovering that the proxy-patches endpoint is responsive (by manually visiting https://hub.mybinder.org/user/doesntexist).
This endpoint working means that the proxy and ingress are both working,
and it is only the Hub itself that is not responsive.
The hub pod is restarted, and launches quickly return back to 100% success rate.
### 14:49
After returning the deployment to working order, the logs of the Hub pod around the start of the outage (~13:30) are investigated.
The logs show a large and increasing volume of 400 errors in the culler,
indicating that the recent changes in the culler may be responsible.
This suggests that an outage will recur in a similar amount of time after restarting the Hub.
Coincidence: a [pull request](https://github.com/jupyterhub/jupyterhub/pull/1807) had just been merged, fixing bugs in the culler.
It is suspected that these are exactly the bugs responsible for the outage.
The process to deploy a change to mybinder.org begins:
### 14:43
[apply changes to jupyterhub chart](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/pull/655)
### 14:55
[pull updated jupyterhub into binderhub chart](https://github.com/jupyterhub/binderhub/pull/526)
### 15:36
[deploy to mybinder.org](https://github.com/jupyterhub/mybinder.org-deploy/pull/562)
The final deploy has to be resubmitted multiple times before it succeeds.
This is in part because the culler bug resulted in a failure to delete users,
so users were constantly accumulating.
The large number of users meant the updated Hub took a long time to deploy.
When the new culler arrived it began to cull the old users,
of which there were many.
The sustained deletions also put a lot of load on the Hub,
but a one-time cost because the deletions succeeded this time.
Hub behavior is believed to have returned to normal.
### 17:03
Believing the issue to be resolved, work resumes,
merging new pull requests into binderhub.
### 17:23
Observing culler behavior is fixed, but memory and CPU growth continues.
Reviewing further changes that were part of the revised deploy,
a new feature of kubespawner is suspected as the source of the leak.
The process begins to deploy reverting this change.
Since this is a zero-to-jupyterhub change, it will again take an hour to propagate to mybinder.org
### 19:47
Latest deploy is attempted to mybinder.org.
Deploying to master fails due to a bug in the newly introduced image culler.
Updates are reverted to the last-known-working configuration.
### 19. April, 11:00
Fix for image-culler bug that prevented the previous deploy is [applied](https://github.com/jupyterhub/binderhub/pull/530)
and [deployed](https://github.com/jupyterhub/mybinder.org-deploy/pull/567).
### 11:07
This deploy is successful except on production,
due to another bug in the image cleaner.
This time, the image cleaner is disabled by
[setting the inode threshold to 0](https://github.com/jupyterhub/mybinder.org-deploy/pull/568) without reverting other changes.
### 11:27
Deploy is successful, but took close to half an hour.
Memory leak is not fixed.
### 11:47
To avoid reverting more large deploys,
only the hub image is [reverted to the last known good version](https://github.com/jupyterhub/mybinder.org-deploy/pull/569).
After reverting the hub image update, everything is okay again.
### 11:58
On investigation of logs, it appears that the Hub was not fully upgraded to its target version.
The cause for this is still unknown,
but could be the result of manual interactions with the cluster during the upgrade.
The pinning of the hub image is [reverted](https://github.com/jupyterhub/mybinder.org-deploy/pull/570)
and everything is now up-to-date.
It is observed that the memory leak does not recur,
confirming that the kubespawner update is the root cause.
## Lessons learned
- It takes about an hour to deploy a change to zero-to-jupyterhub all the way to
mybinder.org. This is because we must wait for the tests in all repositories to
run twice: once to verify the pull request, then again after merge before
publishing the chart. Since these tests each take ~10 minutes, that's 40
minutes of waiting, not counting the human time of observing one success and
submitting the next pull request.
- Culler behavior is not covered by tests.
This is difficult, since the culler accumulates tasks over time,
but some basic test coverage should be possible.
- Image cleaner is not covered by tests.
- Deploying many changes at once makes it more challenging to identify the causes of regressions.
- Since so many of these deployment processes take a very long time,
even if a fix is known, reverting a bad version and waiting for the new one
may often be preferable to keeping the degraded state while the fix propagates.
The downside of doing this is that large (many services changed) deploys
can take a long time as rolling updates are performed.
Reverting a large deploy can result in significant downtime
during the revert.
- Three separate bugs were introduced and resolved in this process:
1. flood bug in updated culler
2. memory leak bug in updated kubespawner
3. failure-to-start bug in new image-cleaner
More granular continuous deployments would have allowed us to find and catch
each of these issues one at a time.
On the other hand, deploying at dedicated, less frequent times
would allow the team to be prepared to handle and respond to the update process,
rather than reacting to issues as they are deployed throughout the week.
## Action items
- Revisit automatic pull requests to encourage keeping
mybinder.org up-to-date with smaller, more-frequent changes
([Issue](https://github.com/jupyterhub/binderhub/pull/222))
- Add alarms for sustained high CPU and memory usage from JupyterHub and BinderHub pods. Related to [this issue](https://github.com/jupyterhub/mybinder.org-deploy/pull/527)
- Figure out tests for the culler (this may be part of splitting out the culler into its own package)
([Issue](https://github.com/jupyterhub/jupyterhub/issues/1791))
- Fix bugs in image-culler preventing it from running
([PR](https://github.com/jupyterhub/binderhub/pull/534))
- Allow disabling image culler in helm chart
- Investigate and fix memory leak in kubespawner
([Issue](https://github.com/jupyterhub/kubespawner/issues/165))
- Describe process for deploying jupyterhub image bumps to mybinder.org as short-term fixes during an incident?
<file_sep>/images/analytics-publisher/README.md
# Events Archiver
**Events Archiver** processes events emitted by BinderHub & makes
them publicly available. It reads from a Google Cloud Storage bucket
populated by [StackDriver export](https://cloud.google.com/logging/docs/export/using_exported_logs),
puts them into a more standard format, and publishes it to another
GCS bucket.
## Timestamp resolution reduction
Publicly published events have timestamps with minute resolution -
second & microsecond information is trimmed. This is a precautionary measure
to protect against possibly de-anonymization attacks in the future.
## Running Locally
You can test events archiver locally with:
```bash
GOOGLE_APPLICATION_CREDENTIALS=secrets/analytics-publisher-auth-key-prod.json \
python images/analytics-publisher/archiver.py \
--debug \
--dry-run \
binderhub \
binderhub-events-text \
mybinder-events-raw-export \
mybinder-events-archive
```
The `--debug` and `--dry-run` options tell the script to print output
to stdout, rather than upload to another GCS bucket.
## How to update requirements.txt
Because `pip-compile` resolves `requirements.txt` with the current Python for
the current platform, it should be run on the same Python version and platform
as our Dockerfile.
```shell
# run from images/analytics-publisher
# update requirements.txt based on requirements.in
docker run --rm \
--env=CUSTOM_COMPILE_COMMAND="see README.md" \
--volume=$PWD:/io \
--workdir=/io \
--user=root \
python:3.9-slim-bullseye \
sh -c 'pip install pip-tools==6.* && pip-compile --upgrade'
```
<file_sep>/docs/source/operation_guide/common_problems.md
# Common problems and their solutions
This is a page to list a few of the common problems that we run into during
operation of `mybinder.org`, and some common solutions that solve these
problems. In general, **manual intervention** is something that we should
avoid requiring, but sometimes it is necessary. This page serves as a helpful
guide for people maintaining `mybinder.org`, and an informal list of things
we should design new technology to fix.
## The Docker-in-Docker socket
When using Docker-in-Docker, there is a chance that `dind` hasn't started when a
build is requested. If this happens, the volume mount to load
`/var/run/dind/docker.sock` into the build container may occur before `dind`
has created the socket. If this happens, the volume mount will create a
directory at the mount point (which we don't want to happen). If this happens,
Docker-in-Docker will be inaccessible until `/var/run/dind` is manually deleted
and the `dind` pod is restarted.
### How to spot the problem
Build pods will not be working, and the `dind` pods are stuck in `CrashLoopBackoff`.
### How to resolve the problem
1. Find out which node contains the crashing `dind` pod (aka, the node that has
_folder_ in `/var/run/dind/docker.sock` rather than the socket file).
You can do so by running:
kubectl --namespace=<ns> get pod -o wide
2. Once you find the node of interest, SSH into it with:
gcloud compute ssh <nodename>
3. Manually delete the `docker.sock` folder from the node.
sudo rm -rf /var/run/dind/docker.sock/
4. Delete the `dind` pod (k8s will automatically create a new one)
kubectl --namespace=<ns> delete pod <dind-pod-name>
## Networking Errors
Sometimes there are networking errors between pods, or between one pod and
all other pods. This section covers how to debug and correct for networking
on the Kubernetes deployment.
### Manually confirm network between pods is working
To confirm that binderhub can talk to jupyterhub, to the internet in general, or
you want to confirm for yourself that there is no connectivity problem between
pods follow this recipe.
1. connect to the pod you want to use as "source", for example the jupyterhub
pod: `kubectl --namespace=prod exec -it hub-989cc9bd-bbkbk /bin/bash`
1. start `python3`, `import requests`
1. use `requests.get(host)` to check connectivity. Some interesting hostnames
to try talking to are:
- http://binder/, the binderhub service
- http://hub:8081/hub/api, the jupyterhub API
- http://proxy-public/hub/api, the CHP route that redirects you to the
jupyterhub API (content of the response should be equal)
- http://google.com/, the internet
- the CHP API needs a token so run: `headers={'Authorization': 'token ' + os.environ['CONFIGPROXY_AUTH_TOKEN']}`
and then`requests.get('http://proxy-api:8001/api/routes', headers=headers)`
- Other hostnames within the Kubernetes deployment. To find out hostnames
to try look at the `metadata.name` field of a kubernetes
service in the helm chart. You should be able to connect to each of them using
the name as the hostname. Take care to use the right port, not all of them are
listening on 80.
Here's a code snippet to try all of the above in quick succession:
```python
import requests
import os
urls = ["binder/", "hub:8081/hub/api", "proxy-public/hub/api", "google.com/"]
for url in urls:
resp = requests.get("http://" + url)
print('{}: {}'.format(url, resp))
```
### Spikes in traffic
Spikes in traffic can cause congestion, slowness, or surface bugs in the
deployment. Here are some ways to detect spikes.
#### Spikes to `mybinder.org`
Spikes to `mybinder.org` are most-easily detected by going to the project's
Google Analytics page. Look at the "real-time" page and see if there is a big
shift from typical patterns of behavior.
#### Spikes to the `/build` API
Sometimes there are spikes to the BinderHub build API, which we cannot capture
with Google Analytics. Spikes to the build API usually come from a single
repository, and can be found with the following command.
To list the API requests to `/build`:
```python
kubectl --namespace=prod logs -l component=controller | grep '/build'
```
and to list the number of API requests to `/build` that contain a particular
word:
```python
kubectl --namespace=prod logs -l component=controller | grep '/build' | grep <word-name> | wc -l
```
<file_sep>/docs/source/incident-reports/2018-01-18-ssl-outdated.md
# 2018-01-11, Warning from letsencrypt about outdated SSL certificate
## Summary
A team member received a warning from letsencrypt saying that the SSL certificate
for `beta.mybinder.org` was going to expire soon. This was unexpected because
we use `kube-lego` to automatically register new SSL certificates for various
sub-domains of `mybinder.org`. After a few days, we re-checked the SSL
certificate on `beta.mybinder.org` and it seemed to have renewed properly,
so this was a noop from our perspective. However it revealed a few things we
should do differently to make sure this doesn't happen again.
## Timeline
All times in PST
### 2018-01-11
A team member received an email that our SSL for `beta.mybinder.org` was going to expire.
He opened https://github.com/jupyterhub/mybinder.org-deploy/issues/283.
### 2018-01-18
Another team member used the SSL certificate checking website below:
https://www.ssllabs.com/ssltest/analyze.html?d=beta.mybinder.org&latest
to inspect the current SSL certificate of `beta.mybinder.org`. This seemed
to be correctly renewed, and the immediate problem considered resolved.
### 2018-01-19 11:00
We received another email saying that both `beta` and `docs` were out
of date.
Upon looking at the Google Analytics history, we realized that the date
these certificates were scheduled to run out was exactly 3 months from the
day we switched `mybinder.org` to point to the `beta` deployment.
The `letsencrypt` [expiration emails doc](https://letsencrypt.org/docs/expiration-emails/)
says that if the name / details of the certificate you request change at all,
you may receive these emails even though you've successfully renewed your certificate.
We double checked that the cert for `beta`, `docs`, and `*` look correct, which they did.
So, we concluded that we're getting these notices because the SSL details
changed and letsencrypt has (expectedly) failed to link the two.
### 2018-01-19 11:00
A team member noticed that this is because our kubernetes deployment has an
account that's unique to the domain we were using. So when we changed domains
(from `beta` to `*`), we also switched accounts on letsencrypt. Our old account
is what is triggering the emails, but our new account is working fine.
## Action items
### Process
- Do not use a single team member's email address for letsencrypt
- Instead, use a shared google groups email account so we all get pinged
- This has been done: <EMAIL>
- keep an eye on the SSL once the first expiration date comes around and make sure
this is a correct assumption.
## Related Issues / PRs
https://github.com/jupyterhub/mybinder.org-deploy/issues/283
<file_sep>/WISDOM.md
# A place to put the collective wisdom of the Binder project.
- When you are in an outage, focus only on fixing the outage - do not try to do anything else.
- Prefer minor annoyances happening infrequently but at regular intervals, rather than major annoyances happening rarely but at unpredictable intervals.
<file_sep>/docs/source/incident-reports/2017-11-30-oom-proxy.md
# 2017-11-30 4:23PM PST, OOM (Out of Memory) Proxy
## Summary
Tim and Chris noticed at 0:59 CET (need to convert to PST) that mybinder.org showed an error that creating temporary users failed when launching a new server. This was noticed because Tim happened to look at grafana. One node running a proxy had run out of memory which broke the proxy. Probably a build container had used up all the memory on the node as builds had no memory limits enforced.
## Timeline
### 2017-11-30 16:22 PST
Node runs out of memory, service starts degrading.
### 16:59 PST
Investigation starts after noticing grafana looks weird. Error message when launching a new server:
```
Found built image, launching...
Launching server...
Failed to create temporary user for gcr.io/binder-prod/r2d-fd74043wildtreetech-explore-open-data
:6be6d6329de99168d0a0b68f9d52228ecc61ac50
```
Confirmed by Tim and Chris.
### 17:02 PST
Suggestion that it might be related to https://status.cloud.google.com/incident/storage/17006. All nodes show status "Ready" on "Kubernetes clusters" page of cloud console.
### 17:04 PST
Chris reports that proxy pods are in CrashloopBackoff state and had been restarted ~13 times. A kubectl describe on the pod gave:
```
Normal Started 42m kubelet, gke-prod-a-default-pool-6ab3d8ec-jsv7 Started container
Normal Created 42m (x3 over 42m) kubelet, gke-prod-a-default-pool-6ab3d8ec-jsv7 Created container
Normal Started 42m (x3 over 42m) kubelet, gke-prod-a-default-pool-6ab3d8ec-jsv7 Started container
Normal Pulled 42m (x3 over 42m) kubelet, gke-prod-a-default-pool-6ab3d8ec-jsv7 Container image "gcr.io/google_containers/nginx-ingress-controller:0.9.0-beta.15" already present on machine
Warning BackOff 7m (x163 over 42m) kubelet, gke-prod-a-default-pool-6ab3d8ec-jsv7 Back-off restarting failed container
Warning FailedSync 2m (x187 over 42m) kubelet, gke-prod-a-default-pool-6ab3d8ec-jsv7 Error syncing pod
```
and
```
Normal Created 10s (x3 over 41s) kubelet, gke-prod-a-default-pool-6ab3d8ec-0h28 Created container
Normal Started 10s (x3 over 41s) kubelet, gke-prod-a-default-pool-6ab3d8ec-0h28 Started container
Normal Pulled 10s (x3 over 41s) kubelet, gke-prod-a-default-pool-6ab3d8ec-0h28 Container image "gcr.io/google_containers/nginx-ingress-controller:0.9.0-beta.15" already present on machine
Warning Unhealthy 9s kubelet, gke-prod-a-default-pool-6ab3d8ec-0h28 Readiness probe failed: Get http://10.12.5.36:10254/healthz: dial tcp 10.12.5.36:10254: getsockopt: connection refused
Warning BackOff 5s (x5 over 33s) kubelet, gke-prod-a-default-pool-6ab3d8ec-0h28 Back-off restarting failed container
Warning FailedSync 5s (x5 over 33s) kubelet, gke-prod-a-default-pool-6ab3d8ec-0h28 Error syncing pod
```
and logs gave:
`Error from server (BadRequest): a container name must be specified for pod proxy-fd9c4f59b-pjhth, choose one of: [nginx chp]`
### 17:07
Tim asks if "some of them are still running?". Chris says that pods are running but no new ones are being created.
### 17:09
jupyterhub pod shows lots of DELETE requests that are timing out and tracebacks. Tim suggests restarting jupyterhub pod.
### 17:12
Tim asks how to contact Yuvi. Tim restarts jhub pod with `kubectl get pod hub-deployment-944c6474c-22sjh -n prod -o yaml | kubectl replace --force -f -`
This restarts the jupyterhub, removes all user pods, error persists.
### 17:14
Chris runs `kubectl describe` on the node on which the proxy runs which gives:
```
Warning OOMKilling 58m kernel-monitor, gke-prod-a-default-pool-6ab3d8ec-0h28 Memory cgroup out of memory: Kill process 8339 (python) score 1576 or sacrific
```
### 17:18
Chris suggests to try cordoning the OOM node and then deleting the proxy pod.
### 17:20
Chris notices that proxy pod is working again. Service appears to be restored. proxy pod had been running for 10m already by this point (since ~15:10). Chris uncordons node again.
### 17:21 PST
Service is restored, user pods start running again.
## Action items
### Repo2docker
Limit memory available to build pods. (https://github.com/jupyterhub/repo2docker/pull/159)
### Jupyterhub
Fix configuration so that user pods are not cleaned up when jupyterhub pod restarts (https://github.com/jupyterhub/mybinder.org-deploy/issues/198)
### Process
1. Should nominate one person to be the driver, maybe a shared terminal. So that only one person is issuing commands to change the state of the cluster and the other person knows about them.
2. Better alerting for when something is wrong with the cluster. This one could have been alerted by counting 4xx responses on the jupyterhub pod.
## Logs
Trying to collect to gather logs looked at during the incident:
- https://hackmd.io/JwBgjAhgTARgHAZgLRjlMSAsBjCB2JGAEzwiTgFYwKBTNANnoDNwg===?edit (bhub pod log starting from before the problem was noticed)
- https://console.cloud.google.com/logs/viewer?project=binder-prod&minLogLevel=0&expandAll=false×tamp=2017-11-30T23%3A22%3A18.000000000Z&resource=container%2Fcluster_name%2Fprod-a%2Fnamespace_id%2Fprod&advancedFilter=resource.type%3D%22container%22%0Aresource.labels.pod_id%3D%22prod-nginx-ingress-controller-7b4fdbdcc8-qp6jk%22%0Aresource.labels.zone%3D%22us-central1-a%22%0Aresource.labels.project_id%3D%22binder-prod%22%0Aresource.labels.cluster_name%3D%22prod-a%22%0Aresource.labels.container_name%3D%22nginx-ingress-controller%22%0Aresource.labels.namespace_id%3D%22prod%22%0Aresource.labels.instance_id%3D%224874803091664833146%22%0Atimestamp%3D%222017-11-30T23%3A22%3A18.000000000Z%22%0AinsertId%3D%221xxo64cg1cnzk5b%22 weird things start showing up in the log around here
- gitter chat log https://gitter.im/jupyterhub/binder?at=5a209b75ffa3e37919df1c15
- grafana screenshots https://github.com/jupyterhub/binder/issues/39
<file_sep>/tests/conftest.py
import sys
from pathlib import Path
import pytest
import yaml
# make sure repo root is on path so we can import from `deploy`
here = Path(__file__).parent.resolve()
repo_root = here.parent
sys.path.insert(0, str(repo_root))
from deploy import get_config_files
def pytest_addoption(parser):
parser.addoption(
"--release",
default="staging",
help="Name of the federation member release. For loading configuration",
)
def _helm_merge(a, b):
"""Merge two items, similar to helm
- dicts are merged
- lists and scalars are overridden without merge
- 'a' is modified in place, if a merge occurs
"""
if not (isinstance(b, dict) and isinstance(a, dict)):
# if either one is not a dict,
# there's no merging to do: use 'b'
return b
for key, value in b.items():
if key in a:
a[key] = _helm_merge(a[key], value)
else:
a[key] = value
return a
@pytest.fixture(scope="session")
def release(request):
return request.config.getoption("--release")
@pytest.fixture(scope="session")
def helm_config(release):
"""Load the helm values"""
config = {}
for config_file in [repo_root / "mybinder/values.yaml"] + get_config_files(release):
# don't load secret config
if "secrets" in str(config_file):
continue
with open(config_file) as f:
loaded = yaml.safe_load(f)
config = _helm_merge(config, loaded)
return config
@pytest.fixture
def binder_url(helm_config):
if not helm_config["binderhubEnabled"]:
pytest.skip("binderhub not enabled")
return "https://" + helm_config["binderhub"]["ingress"]["hosts"][0]
@pytest.fixture
def hub_url(helm_config):
if not helm_config["binderhubEnabled"]:
pytest.skip("binderhub not enabled")
return helm_config["binderhub"]["config"]["BinderHub"]["hub_url"]
@pytest.fixture
def federation_url(helm_config):
if not helm_config["federationRedirect"]["enabled"]:
pytest.skip("federationRedirect not enabled")
return "https://" + helm_config["federationRedirect"]["host"]
@pytest.fixture
def static_url(helm_config):
return "https://" + helm_config["static"]["ingress"]["hosts"][0]
<file_sep>/docs/source/getting_started/index.rst
===============
Getting started
===============
These resources describe how to get started with the mybinder.org operations
team. It contains checklists of steps to take to make sure you have the right
permissions, as well as contextual information about the mybinder.org deployment.
.. toctree::
:maxdepth: 3
getting_started
production_environment
terminology
<file_sep>/terraform/gcp/README.md
# GKE Terraform deployment info
Common configuration for GKE is in terraform/modules/mybinder
most deployed things are in mybinder/resource.tf
variables (mostly things that should differ in staging/prod) in mybinder/variables.tf
per-deployment config in $deployment/main.tf
Variables declared in mybinder/variables.tf can be overridden in $deployment/main.tf
First you must login to `gcloud` to gain application credentials:
```bash
gcloud auth application-default login
```
Then, to deploy e.g. staging:
```bash
cd staging
terraform init
terraform apply
```
which will create a plan and prompt for confirmation.
Review the proposed changes and if they look right, type 'yes' to apply the changes.
## Getting secrets out
Terraform will create the service accounts needed for the deployment.
The private keys for these will need to be exported to `secrets/config/$deployment.yaml`.
**This part is not yet automated**.
To get a service-account key for deployment:
```bash
cd staging
terraform output -json private_keys | jq -r '.deployer' > ../../secrets/gke-auth-key-staging2.json
```
and to get private keys to put in secrets/config/${deployment}.yaml:
```bash
terraform output -json private_keys | jq '.["events-archiver"]' | pbcopy
```
with key names: "events-archiver", "matomo", and "binderhub-builder" and paste them into the appropriate fields in `secrets/config/$deployment.yaml`.
## Notes
- requesting previously-allocated static ip via loadBalancerIP did not work.
Had to manually mark LB IP as static via cloud console.
- sql admin API needed to be manually enabled [here](https://console.developers.google.com/apis/library/sqladmin.googleapis.com)
- matomo sql data was manually imported/exported via sql dashboard and gsutil in cloud console
- events archive history was manually migrated via `gsutil -m rsync` in cloud console
## OVH
The new OVH cluster is also deployed via terraform in the `ovh` directory.
This has a lot less to deploy than flagship GKE,
but deploys a Harbor (container image) registry as well.
### OVH Notes
- credentials are in `terraform/secrets/ovh-creds.py`
- token in credentials is owned by Min because OVH tokens are always owned by real OVH users, not per-project 'service account'.
The token only has permissions on the MyBinder cloud project, however.
- the only manual creation step was the s3 bucket and user for terraform state, the rest is created with terraform
- harbor registry on OVH is old, and this forces us to use an older
harbor _provider_.
Once OVH upgrades harbor to at least 2.2 (2.4 expected in 2022-12), we should be able to upgrade the harbor provider and robot accounts.
<file_sep>/images/tc-init/Dockerfile
FROM alpine:3.13
RUN apk add --no-cache iproute2
ADD tc-init /usr/local/bin/tc-init
CMD ["/usr/local/bin/tc-init"]
<file_sep>/images/tc-init/tc-init
#!/bin/sh
# ref https://wiki.archlinux.org/index.php/advanced_traffic_control
set -e
export IFACE=${IFACE:-eth0}
# export DST_IP=${DST_IP:-$KUBERNETES_SERVICE_HOST}
export SRC_IP=${SRC_IP:-$(ifconfig ${IFACE} | grep 'inet addr' | cut -d: -f2 | awk '{print $1}')}
if [[ -z "${EGRESS_BANDWIDTH}" && -z "${INGRESS_BANDWIDTH}" ]]; then
# nothing to throttle
echo "EGRESS_BANDWIDTH not set, skipping tc-init"
exit 0
fi
# echo commands that we execute
set -x
# set up defaults
tc qdisc add dev ${IFACE} root handle 1: htb default 30
# 30 is the default. Is it ever used?
tc class add dev ${IFACE} parent 1: classid 1:30 htb rate 100gbit
tc qdisc add dev ${IFACE} parent 1:30 handle 30: sfq perturb 10
# 10: rate-limit uploads from the container
if [[ ! -z "${EGRESS_BANDWIDTH}" ]]; then
echo "Limiting uploads from ${SRC_IP} at ${EGRESS_BANDWIDTH}"
tc class add dev ${IFACE} parent 1: classid 1:10 htb rate ${EGRESS_BANDWIDTH}
tc qdisc add dev ${IFACE} parent 1:10 handle 10: sfq perturb 10
if [[ ! -z "${WHITELIST_CIDR}" ]]; then
# whitelist egress destinations that should not be throttled,
# e.g. 10.0.0.0/8 for local connections
tc filter add dev ${IFACE} parent 1: prio 1 protocol ip u32 match ip dst "${WHITELIST_CIDR}" flowid 1:30
fi
tc filter add dev ${IFACE} parent 1: prio 10 protocol ip u32 match ip src ${SRC_IP}/32 flowid 1:10
fi
# 20: rate-limit downloads
# not sure what the download rate does yet, if anything.
# it's definitely not downloads to the container from the outside
if [[ ! -z "${INGRESS_BANDWIDTH}" ]]; then
echo "Limiting downloads from ${SRC_IP} at ${INGRESS_BANDWIDTH}"
tc class add dev ${IFACE} parent 1: classid 1:20 htb rate ${INGRESS_BANDWIDTH}
tc qdisc add dev ${IFACE} parent 1:20 handle 20: sfq perturb 10
tc filter add dev ${IFACE} parent 1: prio 1 protocol ip u32 match ip dst ${SRC_IP}/32 flowid 1:20
fi
<file_sep>/docs/source/deployment/prereqs.md
# Pre-requisite technologies
The following are tools and technologies that mybinder.org uses. You should have
a working familiarity with them in order to make changes to the mybinder.org deployment.
This is a non-exhaustive list. Feel free to ask us questions on the gitter channel or
here if something specific could be clearer!
## Google Cloud Platform
MyBinder.org currently runs on Google Cloud. There are two Google Cloud projects
that we use:
1. `binder-staging` contains all resources for the staging deployment
2. `binder-prod` contains all resources for the production deployment
We'll hand out credentials to anyone who wants to play with the staging deployment,
so please just ask!
While you only need merge access in this repository to deploy changes, ideally
you should also have access to the two Google Cloud Projects so you can debug
things when deployments fail.
## Kubernetes
We heavily use [Kubernetes](https://kubernetes.io/) for the mybinder.org deployment, and it is important you
have a working knowledge of how to use Kubernetes. Detailed explanations are out
of the scope of this repository, but there is a good [list of tutorials](https://kubernetes.io/docs/tutorials/).
Specifically, going through the [interactive tutorial](https://kubernetes.io/docs/tutorials/kubernetes-basics/)
to get comfortable using `kubectl` is required.
## Helm
We use [helm](https://helm.sh) to manage our deployments, and it is important you
have a working knowledge of how to use helm. Detailed explanations are out of the
scope of this repository, but [docs.helm.sh](https://docs.helm.sh) is an excellent
source of information. At a minimum, you must at least understand:
- [What is a chart?](https://helm.sh/docs/chart_template_guide/getting_started/#charts)
- [What are values files?](https://helm.sh/docs/chart_template_guide/values_files/)
- [How do chart dependencies work?](https://helm.sh/docs/chart_template_guide/subcharts_and_globals/)
## Travis
We use [Travis CI](https://travis-ci.org/) for doing all our deployments. Our
`.travis.yml` file contains the entire configuration for our deployment. Travis CI
has documentation on the [various components of the `.travis.yml` file](https://docs.travis-ci.com/user/customizing-the-build/).
[mybinder.org]: https://mybinder.org
[staging.mybinder.org]: https://staging.mybinder.org
<file_sep>/scripts/datacenters.py
#!/usr/bin/env python3
"""
Collect CIDR ip ranges from data centers
Generates inputs for network policies to ban ingress
`cidrs_:name()` returns a list of CIDRs for a datacenter owner
Currently only collecting ipv4 addresses
"""
import ipaddress
import os
from html.parser import HTMLParser
import requests
import yaml
def cidrs_aws():
"""AWS datacenters"""
url = "https://ip-ranges.amazonaws.com/ip-ranges.json"
r = requests.get(url)
r.raise_for_status()
return [prefix["ip_prefix"] for prefix in r.json()["prefixes"]]
def cidrs_gcp():
"""Google Cloud datacenters"""
url = "https://www.gstatic.com/ipranges/cloud.json"
r = requests.get(url)
r.raise_for_status()
cidrs = []
for record in r.json()["prefixes"]:
if "ipv4Prefix" in record:
cidrs.append(record["ipv4Prefix"])
else:
assert "ipv6Prefix" in record, f"Unexpected gcp record: {record}"
return cidrs
class MicrosoftDownloadParser(HTMLParser):
"""Minimal HTML parser to find microsoft download links"""
def __init__(self):
super().__init__()
self.links = []
def handle_starttag(self, tag, attrs):
if tag.lower() == "a":
for attr, value in attrs:
if attr.lower() == "href" and value.startswith(
"https://download.microsoft.com"
):
if value not in self.links:
self.links.append(value)
def cidrs_azure():
"""Azure data centers"""
# leave it to microsoft to make this require a human confirmation dialog
r = requests.get(
"https://www.microsoft.com/en-us/download/confirmation.aspx?id=56519"
)
r.raise_for_status()
link_finder = MicrosoftDownloadParser()
link_finder.feed(r.text)
links = link_finder.links
if len(links) != 1:
raise ValueError(f"Expected exactly one download link, got {links}")
# example: https://download.microsoft.com/download/7/1/D/71D86715-5596-4529-9B13-DA13A5DE5B63/ServiceTags_Public_20210208.json
download_url = links[0]
r = requests.get(download_url)
r.raise_for_status()
# use a set because azure reports the same CIDRs many times
cidrs = set()
for record in r.json()["values"]:
for cidr in record["properties"]["addressPrefixes"]:
if ":" not in cidr:
# exclude ipv6 cidrs that look like fdf8:f53e:61e4::18/125
cidrs.add(cidr)
return cidrs
datacenters = {
"aws": {
"message": "AWS",
"get_cidrs": cidrs_aws,
},
"gcp": {
"message": "Google Cloud",
"get_cidrs": cidrs_gcp,
},
"azure": {
"message": "Azure",
"get_cidrs": cidrs_azure,
},
}
def generate_files():
"""Collect CIDRs and output them to consistent formats
for consumption by our helm chart as network policies
"""
config_common = os.path.join("config", "common")
os.makedirs(config_common, exist_ok=True)
for name, cfg in sorted(datacenters.items()):
if name == "azure":
# FIXME: skip azure until we work out
# how to authorize test requests.
# GitHub Actions run on Azure.
continue
message = cfg["message"]
get_cidrs = cfg["get_cidrs"]
# filter to unique values
raw_cidrs = get_cidrs()
print(f"Collected {len(raw_cidrs)} CIDRs for {message}")
# Collapse overlapping CIDRs.
# Azure in particular is reduced by a factor of 20 from 32k to ~1800
# because of how they organize the data.
# This also happens to ensure better sorting than lexicographical
# sorting of strings (i.e. 3.4.5.6 comes before 172.16.17.32)
networks = [ipaddress.ip_network(cidr) for cidr in raw_cidrs]
cidrs = [str(net) for net in sorted(ipaddress.collapse_addresses(networks))]
dest_file = os.path.join(config_common, f"datacenter-{name}.yaml")
print(f"Writing {len(cidrs)} CIDRs to {dest_file}")
ban_networks = {cidr: message for cidr in cidrs}
with open(dest_file, "w") as f:
yaml.dump(
{
"binderhub": {
"config": {
"BinderHub": {
"ban_networks": ban_networks,
},
}
}
},
f,
)
if __name__ == "__main__":
generate_files()
<file_sep>/docs/source/incident-reports/2018-02-20-jupyterlab-announcement.md
# 2018-02-20, JupyterLab Announcement swamps Binder
## Summary
The JupyterLab [announcement post](https://blog.jupyter.org/jupyterlab-is-ready-for-users-5a6f039b8906) drove a great deal of traffic to mybinder.org.
This caused several outages throughout the day from varying causes.
We prepared for this by temporarily increasing the mininum number of nodes.
After a deployment, most users were getting "Failed to create temporary user for gcr.io/binder-prod/" when attempting to launch their image. This was caused by a small bug that manifests only when large numbers of users are using Binder at the same time. The bug was identified and fixed, but due to logistical issues it caused mybinder to be unusable for about 1h50m, and unstable for ~1 day.
## Timeline
All times in CET (GMT+1)
### Feb 20 2018 14:17
JupyterLab announcement blog post goes live via medium, twitter. At this point, the per-repo limit is 300.
### 14:55
Autoscaling increases node count to 4 from 3 as intended. This results in a
slight increase in launch backlog while the new node is prepared (and the
jupyterlab image is pulled)
### 15:05
Autoscaling increases node count to 5. JupyterLab is very popular! Grafana
shows that JupyterLab is heading very quickly for the 300 limit. It is decided
to raise the per-repo limit to 500.
### 15:08
JupyterLab hits the rate limit of 300 and Grafana starts listing failed builds
due to the per-repo limit. The rate limit is behaving as intended.
[Action Item] launches that are rejected due to rate limiting are registered as a
'failed launch' in our launch success metric. This should instead be its own label.
### 15:10
[PR #428](https://github.com/jupyterhub/mybinder.org-deploy/pull/428) increases the per-repo limit to 500.
### 15:15
Deployment of PR #428 to production fails on Travis due to a timeout waiting for `helm upgrade` on prod. The `binder` pod never became available.
The travis deployment is restarted.
### 15:20
Travis deployment fails again, this time hanging during grafana annotation.
It is discovered that the grafana pod is unhealthy:
```
$ kubectl describe pod grafana...
Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Warning Unhealthy 2m kubelet, gke-prod-a-ssd-pool-32-134a959a-vvsw Readiness probe failed: Get http://10.12.8.137:3000/login: dial tcp 10.12.8.137:3000: getsockopt: connection refused
Warning FailedSync 2m kubelet, gke-prod-a-ssd-pool-32-134a959a-vvsw Error syncing pod
Warning Failed 2m kubelet, gke-prod-a-ssd-pool-32-134a959a-vvsw Error: failed to start container "grafana": Error response from daemon: cannot join network of a non running container: 8babe89dbb28ea4c09f5490797b8bb2bd4e6298a8d79a04b8653febed86fec19
Warning Unhealthy 1m kubelet, gke-prod-a-ssd-pool-32-134a959a-vvsw Readiness probe failed: Get http://10.12.8.137:3000/login: net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)
Normal SandboxChanged 1m kubelet, gke-prod-a-ssd-pool-32-134a959a-vvsw Pod sandbox changed, it will be killed and re-created.
Normal Pulling 1m (x4 over 4h) kubelet, gke-prod-a-ssd-pool-32-134a959a-vvsw pulling image "grafana/grafana:4.6.3"
Normal Pulled 1m (x4 over 4h) kubelet, gke-prod-a-ssd-pool-32-134a959a-vvsw Successfully pulled image "grafana/grafana:4.6.3"
Normal Started 1m (x3 over 4h) kubelet, gke-prod-a-ssd-pool-32-134a959a-vvsw Started container
Normal Created 1m (x4 over 4h) kubelet, gke-prod-a-ssd-pool-32-134a959a-vvsw Created container
```
The grafana pod is deleted, as is the `binder` pod that failed to start.
### 15:32
Travis is retried once more, and succeeds this time.
Launch success begins to climb as JupyterLab pods rise from 300 to the new limit of 500.
Grafana pod dies and is restarted multiple times. This time by Kubernetes without intervention.
### 16:00
JupyterLab has hit the 500 user limit and is ?????
After returning to working order, ????
### 16:05
Launch success rate is at 0%. Something is wrong beyond load.
### 16:18
Hub is restarted to attempt to clear out bad state via `kubectl delete pod`
The hub comes back and promptly culls many inactive pods. However, there appears to be a problem in the culler itself. Every cull request fails due to 400 requests asking for an already-stopping server to stop again, resulting in the culler exiting. **The culler shouldn't exit when there is an error**
### 16:29
restart both hub and proxy pods
Everything's still failing. Binder requests to the Hub are failing with a timeout:
```
[E 180220 15:32:35 launcher:93] Error creating user jupyterlab-jupyterlab-demo-gjvedw6o: HTTP 599: Timeout while connecting
[W 180220 15:32:35 web:1588] 500 GET /build/gh/jupyterlab/jupyterlab-demo/18a9793b58ba86660b5ab964e1aeaf7324d667c8 (10.12.8.27): Failed to create temporary user for gcr.io/binder-prod/r2d-fd74043jupyterlab-jupyterlab-demo:<PASSWORD>
```
### 16:33
BinderHub is restarted, in case there is an issue in BinderHub itself.
After this restart, launches begin to succeed again. It appears that BinderHub was unable to talk to JupyterHub. It could be that the tornado connection pool was draining (this has happened before due to [a tornado bug](https://github.com/tornadoweb/tornado/pull/1582)).
It could also have been a kubernetes networking issue where pod-networking is no longer working.
### 16:40
Grafana pod restarted itself again. No indication as to why, but it could just be being reassigned to new nodes as the cluster resizes.
In hindsight, it is most likely because it only requests 100Mi of RAM and nothing more.
### 16:55
Launches begin failing again with the same 599: Timeout errors
### 17:00-18:00
Since BinderHub is reaching a timeout after several requests to the hub have accumulated
### 18:03
- cull jupyterlab pods older than 2 hours (103 pods)
- install pycurl on binderhub, which has been known to fix some timeout issues on jupyterhub underload
- revert per-repo limit back to 300 pods
### 18:10
Travis deployment [fails tests on prod](https://travis-ci.org/jupyterhub/mybinder.org-deploy/builds/343924455):
```
def test_hub_up(hub_url):
"""
JupyterHub url is up and returning sensible result (403)
"""
resp = requests.get(hub_url)
# 403 is expected since we are using nullauthenticator
# FIXME: Have a dedicated health check endpoint for the hub
> assert resp.status_code == 403
E assert 504 == 403
E + where 504 = <Response [504]>.status_code
```
due to networking failure on a node, but we don't know that yet.
(Note: I suspect this is due to JupyterHub being too busy, not because of node failure)
### 18:22
pycurl PR is reverted due to suspicion that it caused Service Unavailable errors.
It turns out this is not the case, the Hub really was unavailable due to bad networking state on at least one node.
Hub logs show:
```
[E 2018-02-20 17:18:21.889 JupyterHub app:1623]
Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/jupyterhub/app.py", line 1620, in launch_instance_async
yield self.initialize(argv)
File "/usr/lib/python3.6/types.py", line 184, in throw
return self.__wrapped.throw(tp, *rest)
File "/usr/local/lib/python3.6/dist-packages/jupyterhub/app.py", line 1382, in initialize
yield self.init_spawners()
File "/usr/local/lib/python3.6/dist-packages/jupyterhub/app.py", line 1210, in init_spawners
self.users[orm_user.id] = user = User(orm_user, self.tornado_settings)
File "/usr/local/lib/python3.6/dist-packages/jupyterhub/user.py", line 178, in __init__
self.spawners[name] = self._new_spawner(name)
File "/usr/local/lib/python3.6/dist-packages/jupyterhub/user.py", line 208, in _new_spawner
spawner = spawner_class(**spawn_kwargs)
File "/usr/local/lib/python3.6/dist-packages/kubespawner/spawner.py", line 83, in __init__
self.api = client.CoreV1Api()
File "/usr/local/lib/python3.6/dist-packages/kubernetes/client/apis/core_v1_api.py", line 35, in __init__
api_client = ApiClient()
File "/usr/local/lib/python3.6/dist-packages/kubernetes/client/api_client.py", line 67, in __init__
self.pool = ThreadPool()
File "/usr/lib/python3.6/multiprocessing/pool.py", line 789, in __init__
Pool.__init__(self, processes, initializer, initargs)
File "/usr/lib/python3.6/multiprocessing/pool.py", line 174, in __init__
self._repopulate_pool()
File "/usr/lib/python3.6/multiprocessing/pool.py", line 239, in _repopulate_pool
w.start()
File "/usr/lib/python3.6/multiprocessing/dummy/__init__.py", line 48, in start
threading.Thread.start(self)
File "/usr/lib/python3.6/threading.py", line 846, in start
_start_new_thread(self._bootstrap, ())
RuntimeError: can't start new thread
```
Node cordoned, as it is suspected to be the culprit. Hub pod is deleted to be reassigned to a new node
### 18:30
to help recover, user pods older than one hour are deleted
### 18:37
nothing is responding to requests anymore, including prometheus, grafana, hub, binder. ingress controller pods are deleted to try to help. It helps!
Previously cordoned node is drained, as it is suspected of causing widespread outage.
### 18:40
Launch success rate is back to 100% after ~1hr of total downtime and ~4 hours of diminished capacity.
### 19:00
JupyterLab pods once again reach 300 user limit. Things seem to behave as intended at this point.
### 19:33
The repo quota is bumped to 303 in order to see if we can handle deployments under heavy load.
### 19:35
Notice that the launch rate now begins falling strongly
### 19:40
Note that logs show many `Failed to create temporary user for` errors
### 19:42
Note that `binder-` logs also show `Error creating user jupyterlab-jupyterlab-demo-7qptz8ws: HTTP 599: Connection timed out`
Also note that Grafana launch percentile plot has stopped reporting
### 19:45
Delete the Binder pod: `kubectl delete pod --namespace=prod binder-66fcc59fb9-58btx`
### 19:52
Note that Grafana launch percentile plot is back, launch success rate is back up. `binder-` pod is stable.
### 2X:XX (not sure exact time)
Same `connection timed out` errors popping up as before - launch
success rate is down to zero again.
### 21:57
Realization that the `hub-` pod is being overwhelmed by HTTP
requests due to the high traffic, and is locking up. This is causing the behavior.
### 22:12
All pods are deleted
### 22:30
Pods launching again, we are keeping an eye on the `hub-` pod CPU usage, which was _really_ high during the spikes in traffic (~125%).
Doing this with `kubectl --namespace=prod exec -it hub-989cc9bd-5qdcb /bin/bash` and then running `top`
### 22:43
CPU usage on the hub seems to be stabilized.
### 22:44
Another realization: if user pods were deleted while they were
still running their session, then HTTP requests would be sent to
"default", which was the `hub`. This was overwhelming the hub
even more. We should make "default" go to a 404 page rather than
hub. [Response codes for the hub](https://grafana.mybinder.org/render/dashboard-solo/db/main-dashboard?refresh=30s&orgId=1&from=1519107353593&to=1519193753593&panelId=4&width=1000&height=500&tz=UTC%2B01%3A00), note the switch from mostly 2xx to 3xx.
### 22:52 (maybe earlier?)
We have the idea to delete the routing table for the `hub-` pod so that it reduces the HTTP requests. This is done with the following commands.
- First, enter the `hub-` pod and start a python session with:
`kubectl --namespace=prod exec -it hub-989cc9bd-5qdcb /bin/bash`
then
`python`
- delete default HTTP route:
`requests.delete('http://proxy-api:8001/api/routes//', headers={'Authorization': 'token ' + os.environ['CONFIGPROXY_AUTH_TOKEN']})`
- add route for /hub/api:
`requests.post('http://proxy-api:8001/api/routes//hub/api', headers={'Authorization': 'token ' + os.environ['CONFIGPROXY_AUTH_TOKEN']}, json={'hub': True, 'target': 'http://10.15.251.161:8081', 'jupyterhub': True, 'last_activity': '2018-02-20T21:18:29.579Z'})`
We decide to _not_ experiment on live users right now.
Everything has been stable once the 100-users-per-repo
throttling had been re-established..
## Feb 21, 13:00
A more permanent fix for the proxy routes is applied to redirect all requests for stopped pods back to the Binder landing page ([PR #441](https://github.com/jupyterhub/mybinder.org-deploy/pull/441)) by adding a route on `/user/` in the configurable-http-proxy (CHP) routing table that is handled by an nginx instance redirecting all requests to the main deployment URL.
### 15:15
`/user/` route is updated to serve 404 on most URLs instead of redirecting to BinderHub, to avoid shifting request load to BinderHub.
### 16:20
many pods are failing to stop, causing an increase in failures. cordon nodes `-qz3m` and `-vvsm` as likely culprits due to lots of error logs attributed to these instances in VM logs.
note: need to find a better way to identify bad nodes
## Feb 22, 07:13
It is discovered that the hub's high CPU usage is due to a known bug in KubeSpawner with the Kubernetes Python client version 4.0 that was [fixed](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/pull/462) in the jupyterhub helm chart v0.6. With kubernetes-client 4.0 Each Spawner allocates four idle threads via a ThreadPool. After some time, the total thread count gets high enough (~7000) that CPU usage get very high, even when the threads are idle.
### 07:18
The jupyterhub chart version used by binderhub [is updated to v0.6](https://github.com/jupyterhub/binderhub/pull/463).
Once deployed, CPU usage of the Hub returns close to zero.
This high CPU usage in the Hub due to a flood of threads created by kubernetes-client is believed to be the root cause of most of our issues during this incident.
## Conclusions
The ultimate cause of this incident was a bug in a specific version JupyterHub+KubeSpawner+kubernetes-client that causes unreasonably high load for a moderate load. This bug had been fixed weeks ago in the jupyterhub chart, and was present. BinderHub was using a development version of the jupyterhub chart _prior_ to the latest stable release.
1. deploying a capacity increase during heavy load may not be a recipe for success, but this is inconclusive.
1. handling of slow shutdown needs work in jupyterhub
1. there is a bug in jupyterhub causing it to attempt to delete routes from the proxy that are not there. The resulting 404 is already fixed in jupyterhub, but the bug causing the incorrect _attempt_ is still undiagnosed.
1. grafana is regularly being restarted, which causes the page to be down. Since deployments now notify grafana of a deploy, this can prevent deploy success. It is a harmless failure in this case because if the grafana annotation fails, no deploy stages are attempted, so a Travis retry is quite safe.
1. culler has an issue where it exits if its request fails with 400
1. culler shouldn't be making requests that fail with 400
1. Deploying a change to `prod` under heavy load causes instability, in this case manifesting in
new users not being created.
1. Unclear if this instability was fixed by deleting `binder-`, or if this was just waiting for a
change to propagate.
1. JupyterHub was basically getting swamped because it was handling more HTTP requests by an order of magnitude or more. This was because of a few factors:
1. The aforementioned big bump in usage
2. The "default" route points to the hub, so when a user's pod would get delete and they'd continue doing stuff, all resulting requests went to the hub.
3. We don't have a mechanism for throttling requests on the hub
4. We only have a single hub that's handling all HTTP requests
5. There were cascading effects going on where errors would generate more HTTP requests that would worsen the problem.
1. some issues may have been attributable to unhealthy nodes, but diagnosing unhealthy nodes is difficult.
## Action Items
### JupyterHub
- Release JupyterHub 0.9 (or backport for 0.8.2), which has some known fixes for some of these bugs (https://github.com/jupyterhub/jupyterhub/issues/1676)
- Improve handling of spawners that are slow to stop https://github.com/jupyterhub/jupyterhub/issues/1677
- Investigating allowing deletion of users whose servers are slow to stop or fail to stop altogether https://github.com/jupyterhub/jupyterhub/issues/1677
- implement API-only mode for use cases like Binder (https://github.com/jupyterhub/jupyterhub/issues/1675)
### Zero-to-JupyterHub
cull_idle_servers:
- identify reason why 400 responses cause script to exit (https://github.com/jupyterhub/zero-to-jupyterhub-k8s/issues/522)
- avoid 400 responses by waiting for servers to stop before deleting users (https://github.com/jupyterhub/zero-to-jupyterhub-k8s/issues/522)
### BinderHub
- ensure pycurl is used, which is known to perform better with large numbers of requests than tornado's default SimpleAsyncHTTPClient (https://github.com/jupyterhub/binderhub/pull/460)
- Investigate timeout issue, which may be due to lack of pycurl, too many concurrent requests, or purely the overloaded Hub (https://github.com/jupyterhub/binderhub/issues/464)
- separate rejection code/metadata for launch failures due to repo limit vs. "regular" launch failures. Note: on investigation, we already do this so launch failures should _not_ include rejected launches.
- Figure out if there's a way to reduce the number of HTTP requests that are going to the JupyterHub (this became a problem w/ high load) (https://github.com/jupyterhub/binderhub/pull/461)
- Make it possible for Binder to launch multiple JupyterHubs and direct users through those hubs in a round-robin fashion (https://github.com/jupyterhub/binderhub/issues/465)
### Deployment
- Avoid sending requests for stopped pods to the Hub (which may overwhelm it if there's high load) (https://github.com/jupyterhub/mybinder.org-deploy/pull/444)
- Document ways to suspect and identify unhealthy nodes. At least some of the issues had to do with nodes that had become unhealthy, but diagnosing this was difficult. (https://github.com/jupyterhub/mybinder.org-deploy/issues/468)
- Come up with group guidelines for deploying changes under heavy loads. (https://github.com/jupyterhub/mybinder.org-deploy/issues/466)
- Investigate what are "expected" downtimes for a change to repo user limits, or other changes more broadly (https://github.com/jupyterhub/mybinder.org-deploy/issues/466)
- Find a way to limit HTTP requests to the JupyterHub in cases
of high load.
- "we should also monitor and alert on jupyterhub process > 70% CPU" (monitoring done [in grafana](https://grafana.mybinder.org/dashboard/db/components-resource-metrics))
- Move Grafana and other support services to an external cluster, so they are not affected by load in the main cluster. Our tools for debugging should not be affected by the bugs we are trying to debug (https://github.com/jupyterhub/mybinder.org-deploy/issues/438)
- Document clear processes for requesting limit raises and how they should be granted https://github.com/jupyterhub/mybinder.org-deploy/issues/438
- Fix cadvisor + prometheus setup so we properly get CPU / Memory statistics from cadvisor https://github.com/jupyterhub/mybinder.org-deploy/pull/449
- Make all infrastructure pods be in the `Guaranteed` QoS so they do not get restarted when resources get scarce
<file_sep>/images/analytics-publisher/archiver.py
"""
Read event logs from stackdriver & export them to Google Cloud Storage
"""
import argparse
import json
import tempfile
from datetime import datetime
from dateutil.parser import parse
from google.cloud import storage
def process_event(event):
"""
Post process event if needed.
Takes in a dict representing event, returns dict back.
"""
if "timestamp" in event:
# Trim timestamp to minute resolution before making public
# Should hopefully make it harder to de-anonymize users by observing timing
event["timestamp"] = (
parse(event["timestamp"]).replace(second=0, microsecond=0).isoformat()
)
return event
def archive_events(
project,
log_name,
source_bucket,
destination_bucket,
date,
object_name_template="events-{date}.jsonl",
debug=False,
dry_run=False,
):
storage_client = storage.Client()
src_bucket = storage.Bucket(storage_client, source_bucket)
dest_bucket = storage.Bucket(storage_client, destination_bucket)
prefix = log_name + "/" + date.strftime("%Y/%m/%d")
print(f"Finding blobs with prefix {prefix}")
src_blobs = src_bucket.list_blobs(prefix=prefix)
count = 0
all_events = []
for src_blob in src_blobs:
with tempfile.TemporaryFile(mode="wb+") as temp:
src_blob.download_to_file(temp)
temp.seek(0)
for line in temp:
event = json.loads(line)["jsonPayload"]
# Account for time when 'message' was nested
if "message" in event:
event.update(json.loads(event["message"]))
del event["message"]
# Account for time when 'event' was nested
if "event" in event:
event.update(event["event"])
del event["event"]
event = process_event(event)
if debug:
print(event)
if not dry_run:
all_events.append(event)
count += 1
if not dry_run:
# Timestamp is ISO8601 in UTC, so can be sorted lexicographically
all_events.sort(key=lambda event: event["timestamp"])
with tempfile.TemporaryFile(mode="w+") as out:
for event in all_events:
out.write(json.dumps(event) + "\n")
out.seek(0)
blob_name = object_name_template.format(date=date.strftime("%Y-%m-%d"))
blob = dest_bucket.blob(blob_name)
# Set metadata on the object so we know when this archive is for & how many events there are
blob.metadata = {
"Events-Date": date.strftime("%Y-%m-%d"),
"Events-Count": len(all_events),
}
blob.upload_from_file(out)
print(f"Uploaded {destination_bucket}/{blob_name} with {count} events")
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument("project", help="Name of the GCP project to read logs from")
argparser.add_argument("log_name", help="Name of log to read from")
argparser.add_argument(
"source_bucket", help="GCS bucket to read exported stackdriver events from"
)
argparser.add_argument(
"destination_bucket", help="GCS bucket to write archived events to"
)
argparser.add_argument(
"--date",
help="Date to archive events for. Defaults to today",
type=parse,
default=datetime.utcnow().isoformat(),
)
argparser.add_argument(
"--object-name-template",
help="Template to use when outputting archived events. {date} is substituted",
default="events-{date}.jsonl",
)
argparser.add_argument(
"--debug",
help="Print events when processing",
action="store_true",
default=False,
)
argparser.add_argument(
"--dry-run",
help="Do not upload processed events to GCS",
action="store_true",
default=False,
)
args = argparser.parse_args()
archive_events(
args.project,
args.log_name,
args.source_bucket,
args.destination_bucket,
args.date,
args.object_name_template,
args.debug,
args.dry_run,
)
if __name__ == "__main__":
main()
<file_sep>/docs/source/incident-reports/2019-04-03-ingress-cordoned.md
# 2019-04-03, 30min outage during node pool upgrade
## Summary
During a Kubernetes version upgrade all nodes running our ingress-controller
pods were cordoned. This went unnoticed and caused 40min of total outage.
## Timeline
All times in GMT+2
### 2019-04-03 12:50
Start of incident. The final two nodes in the old user node pool are cordoned.
### 13:28
Investigation starts after a user reported that mybinder.org was down.
### 13:29
The ingress controller pods were deleted and rescheduled on uncordoned
nodes. Service resumes. Incident ends.
## Lessons learnt
### What went well
List of things that went well. For example,
1. service was quickly restored once outage was reported
### What went wrong
Things that could have gone better. Ideally these should result in concrete
action items that have GitHub issues created for them and linked to under
Action items.
1. Outage went unnoticed for 40minutes
### Where we got lucky
These are good things that happened to us but not because we had planned for them.
For example,
1. A user reported the outage on gitter and someone was around to see it and
react to it
## Action items
These are only sample subheadings. Every action item should have a GitHub issue
(even a small skeleton of one) attached to it, so these do not get forgotten.
### Technical improvements
1. Update SRE guide to include guidance for moving ingress controller pods
2. Setup our ingress deployment to be robust against nodes being cordoned
<file_sep>/.github/ISSUE_TEMPLATE/repo_check.md
---
name: "\U00002705 Repo Check"
about: "If you're concerned if your repository is acceptable for mybinder.org, we can check your repo"
labels: question
---
<!--
🌟🌟🌟🌟🌟
Use this form to check if a repository may be liable for banning.
👉 Please answer all these questions.
🌟🌟🌟🌟🌟
-->
### A link to the repository
### Why are you concerned this repository maybe liable for banning?
For example: it will have lots of users, it makes a lot of API calls, creates a lot of network traffic?
<file_sep>/docs/source/incident-reports/2018-03-13-PVC-hub-locked.md
# 2018-03-13, PVC for hub is locked
## Questions for follow up
-
## Summary
After a few hours we noticed that JupyterHub wasn't
spawning any new users. Upon investigation it seemed that
some nodes had gone bad. The JupyterHub pod was on one such node,
but wasn't being deleted properly. Since it wasn't culled, it couldn't "release" the PVC
that contained the hub db, which meant that new hub pods
could not access the db, resulting in the outage.
[link to Gitter incident start](https://gitter.im/jupyterhub/binder?at=5aa76f7de4ff28713a26bf63)
## Timeline
All times in CET
### 2018-03-13 07:28
Problem is identified
- mybinder launch success rate has been at zero for several hours now [https://grafana.mybinder.org/dashboard/db/main-dashboard?refresh=1m&orgId=1&panelId=17&fullscreen&from=1520900905664&to=1520922505664](https://grafana.mybinder.org/dashboard/db/main-dashboard?refresh=1m&orgId=1&panelId=17&fullscreen&from=1520900905664&to=1520922505664)
- lots of pods are in state "Unknown" and "NodeLost"
- pods in weird states are on at least these nodes `10.128.0.7` `10.128.0.5`
### 07:33
Attempts to cordon bad nodes to see if this helps things.
- `kubectl cordon gke-prod-a-ssd-pool-32-134a959a-n2sk` and `kubectl cordon gke-prod-a-ssd-pool-32-134a959a-6`hmq
- the hub pod (`hub-65d9f46698-dj4jb`) was in state `Unknown` and a second hub pod (`hub-65d9f46698-dmlcv`) in `ContainerCreating` since 5h -> deleted both to see if this reschedules them on a healthy node
### 07:39
- bhub pod can't talk to the jhub pod, but it can connect to google.com and github.com
- starting a new jhub pod is failing because the PVC is still claimed by another old pod, presumably that old pod is dead/lost in action and hence not releasing the claim
### 07:59
- manually deleted all pods on node `-n2sk`, will this help the node to release the PVC?
- (it does not)
- manually reset `gke-prod-a-ssd-pool-32-134a959a-n2sk` in the hope that this will force the release of the PVC. This is done in the GCP web user-interface
### 08:05
- `hub-65d9f46698-454cf` is now pulling its docker image, scheduled on `gke-prod-a-ssd-pool-32-134a959a-1j33`
- pod successfully scheduled and launched. running again, and the `binder-examples/r` repo successfully launches
- persistent plot of launch success rate dropping to zero https://grafana.mybinder.org/render/dashboard-solo/db/main-dashboard?refresh=1m&orgId=1&from=1520881680522&to=1520924880522&panelId=17&width=1000&height=500&tz=UTC%2B01%3A00
### 08:11
General cleanup because many pods are not identified by k8s. Deleting all pods in "Unknown" state.
### 08:17
- deleting pods in Unknown state does not seem to do anything. Pods remain listed.
- we assume the reason the pod is marked as "Unknown" is because k8s can't find out anything about it, which explains why it can't delete it. We will have to investigate what to do about those pods. Restarting the node seems to remove them but that feels pretty heavy handed
### 13:32
We realize that many other services are down, because they're running on pods that were attached to failed nodes.
The pods are in stated `Unknown` or `NodeLost`
### 13:34
Discovered that we can force-delete pods
in this state with:
`kubectl --namespace=prod delete pod <pod-name> --grace-period=0 --force`
Ran this code snippet to do so for all pods:
```python
from subprocess import check_output
out = check_output('kubectl get pod -o wide --namespace=prod'.split())
lines = out.decode().split('\n')
lines = [ii.split() for ii in lines]
df = pd.DataFrame(lines[1:], columns=lines[0])
lost_df = df.query('STATUS in ["Unknown", "NodeLost"]')
for nm in lost_df['NAME'].values:
cmd = 'kubectl --namespace=prod delete pod {} --grace-period=0 --force'.format(nm)
print('Deleting pod {}'.format(nm))
check_output(cmd.split())
```
This deleted all pods that were in `Unknown` or
`NodeLost` state. Services run on these pods then recovered.
### 2018-03-21 06:30
We discover that another node has entered "NodeLost" state. Grafana and Prometheus are both down as they are on this node. Many pods are in state "Unknown" or "NodeLost".
### 2018-03-21 06:31
Run:
- `kubectl cordon gke-prod-a-ssd-pool-32-134a959a-bmsw`
- `deleting all pods on the lost node, w/ state "NodeLost" or "Unknown"`
The grafana/prometheus pods that were _trying_ to start before still didn't (they had been in that state for many hours) so we deleted those pods to see if new ones worked.
This resolved the issue, however it deleted the prometheus data collected up to that point.
## Lessons learnt
### What went well
1. Once the issue was identified, the steps taken to resolve this problem were logged well and quickly escalated in their heavy-handedness as necessary. There was minimal hub downtime once the issue was identified.
### What went wrong
1. It took quite some time before we noticed this error. This is strange because we did not get a stackdriver email about this. The stackdriver emails seem to get marked as spam and the notification about this arrives hours later.
## Action items
### Investigation
Why did this outage start in the first place? Was there a rouge pod? GCE outage? This kind of major outage should not "just happen". Read Stackdriver logs, check Google cloud incidents, other ideas.
### Process improvements
1. Make sure we have a non-stackdriver alerting service so we can catch these issues earlier (currently blocking on [#365](https://github.com/jupyterhub/mybinder.org-deploy/issues/365))
### Documentation improvements
1. Document how to check if a PVC hasn't been released so we can quickly identify this problem in the future.
2. Document how to manually restart a node if commands in general aren't working.
3. Document how to delete pods that have entered an "Unknown" state so k8s doesn't totally miss them. ([#512](https://github.com/jupyterhub/mybinder.org-deploy/pull/512))
### Technical improvements
1. Store the prometheus data somewhere more stable than its server pod. Otherwise whenever this pod restarts, we lose all the data.
<file_sep>/tests/test_build.py
import json
import os
import subprocess
import sys
import tempfile
import time
from contextlib import contextmanager
import pytest
import requests
@contextmanager
def push_dummy_gh_branch(repo, branch, keyfile):
"""
Makes a dummy commit on a given github repo as a given branch
Requires that the branch not exist. keyfile should be an absolute path.
Should be used as a contextmanager, it will delete the branch & the
clone directory when done.
"""
git_env = {"GIT_SSH_COMMAND": f"ssh -i {keyfile}"}
with tempfile.TemporaryDirectory() as gitdir:
subprocess.check_call(["git", "clone", repo, gitdir], env=git_env)
branchfile = os.path.join(gitdir, "branchname")
with open(branchfile, "w") as f:
f.write(branch)
subprocess.check_call(["git", "add", branchfile], cwd=gitdir)
subprocess.check_call(
["git", "commit", "-m", f"Dummy update for {branch}"], cwd=gitdir
)
subprocess.check_call(
["git", "push", "origin", f"HEAD:{branch}"],
env=git_env,
cwd=gitdir,
)
try:
yield
finally:
# Delete the branch so we don't clutter!
subprocess.check_call(
["git", "push", "origin", f":{branch}"],
env=git_env,
cwd=gitdir,
)
@pytest.mark.timeout(498)
def test_build_binder(binder_url):
"""
We can launch an image that we know hasn't been built
"""
branch = str(time.time())
repo = "binderhub-ci-repos/cached-minimal-dockerfile"
with push_dummy_gh_branch(
f"<EMAIL>:/{repo}.git",
branch,
os.path.abspath("secrets/binderhub-ci-repos-key"),
):
build_url = binder_url + f"/build/gh/{repo}/{branch}"
print(f"building {build_url}")
r = requests.get(build_url, stream=True)
r.raise_for_status()
for line in r.iter_lines():
line = line.decode("utf8")
if line.startswith("data:"):
data = json.loads(line.split(":", 1)[1])
# include message output for debugging
if data.get("message"):
sys.stdout.write(data["message"])
if data.get("phase") == "ready":
notebook_url = data["url"]
token = data["token"]
break
else:
# This means we never got a 'Ready'!
assert False
headers = {"Authorization": f"token {token}"}
r = requests.get(notebook_url + "/api", headers=headers)
assert r.status_code == 200
assert "version" in r.json()
r = requests.post(notebook_url + "/api/shutdown", headers=headers)
assert r.status_code == 200
<file_sep>/scripts/README.md
# scripts for managing mybinder.org
## delete-old-images.py
This script talks to the docker registry API and tries to delete old images.
The script has the following conditions to check:
- images that don't match the build prefix (e.g. changing the BinderHub.image_prefix in configuration)
- limiting the number of builds stored for a given repo (`--max-builds`)
- deleting all images older than a certain date (`--delete-before`)
Use:
```bash
python3 scripts/delete-old-images.py prod --dry-run
```
This script is not run automatically.
## prune_harbor.py
Harbor registries have far more powerful garbage collection and image retention policies than the basic docker registry implementation,
or Google Container Registry.
The only missing piece is removing _repositories_ that have no artifacts in them,
which seems to affect the performance of those policies and other aspects of harbor.
This script only deletes repositories with no images in them.
Right now, this script is run for our harbor-using federation members
(currently OVH),
via the [prune-harbor workflow](../.github/workflows/prune-harbor.yaml).
Credentials are found in `secrets/{name}-harbor.yaml`.
Use:
```bash
python3 scripts/prune_harbor.py ovh2
```
<file_sep>/docs/source/deployment/what.md
# What does a MyBinder.org deployment do?
This document tries to explain _what_ is going on when a deployment
to mybinder.org happens. For _how_ to do a deploy, please see [how](how).
The deployment happens in various **stages**, each of which comprise of
a series of **steps**. Each step of the deployment is
controlled by `.travis.yml`, which should be considered the authoritative
source of truth for deployment. _If this document disagrees with it,`.travis.yml` is correct!_
If any of the steps in any stage fails, all following steps
are canceled and the deployment is marked as failed.
## Stage 1: Installing deployment tools
### Step 1: Install all the things!
#### Background
Deployment requires the following tools to be installed. Note:
since deployments are handled with _Travis CI_, you don't
need them on your local computer.
1. [`gcloud`](https://cloud.google.com/sdk/)
mybinder.org currently runs on [Google Cloud](https://cloud.google.com)
in a [Google Kubernetes Engine](https://cloud.google.com/kubernetes-engine/)
cluster. We need `gcloud` to authenticate ourselves to this cluster.
2. [`helm`](https://helm.sh)
`helm` is the package manager for Kubernetes. We use this for actually installing
and upgrading the various components running mybinder.org (BinderHub, JupyterHub,
extra mybinder.org-specific services, etc)
3. [`kubectl`](https://kubernetes.io/docs/reference/kubectl/)
`kubectl` is the canonical command line client for interacting with the Kubernetes
API. We primarily use it to explicitly wait for our deployment to complete
before running our tests.
4. [`pytest`](https://docs.pytest.org)
We use `pytest` to verify that our deployment successfully completed, by running
a series of end-to-end tests (present in the `tests/` directory) against the
new deployment. This makes sure that both builds and launches are working,
and is an important part of giving us confidence to do continuous deployment.
5. [`git-crypt`](https://github.com/AGWA/git-crypt)
We have a bunch of secrets (in `secrets/`) in our deployment - things like
cloud credentials, etc. We use `git-crypt` to keep them in this repository
in an encrypted form. We use the [encrypted travis file](https://docs.travis-ci.com/user/encrypting-files/)
for our repository to store the `git-crypt` decryption key.
#### What happens
- All of the tools above are installed. We use the `before_deploy` section
in `.travis.yml` to install these, mostly so we get nice log folding. The only exception
is the `pytest` installation - that is in the `install` section, so we can leverage
[travis caching](https://docs.travis-ci.com/user/caching/) to speed up our deploys.
#### What could go wrong?
All **Stage 1** failures can be attributed to one of the following causes:
1. Network connections from Travis are being flaky, leading to failed installations
This is the most likely cause of Stage 1 failures. When this happens, we have no choice
but to restart the Travis Build.
If a restart also fails, there are two possible reasons:
1. Travis is having some infrastructure issues. Check the [Travis Status Page](https://www.traviscistatus.com/)
to see if this is the case.
2. The method we are using to install any of these bits of software is
having issues - either it no longer works due to some changes to the software, or
the software installer is depending on things that are having temporary difficulties.
Look at which software installation is failing, and debug that!
2. The commit we are trying to deploy modified `.travis.yml`, and introduced a bug / typo.
The person who wrote the PR modifying `.travis.yml` should debug what
the error is and fix it in a subsequent PR.
## Stage 2: Configuring deployment tools
### Step 1: Decrypting secrets
#### Background
The following secrets are present in encrypted form in the repository:
1. Secret config for the helm charts (under `secrets/config`). These contain various
deployment secrets for staging and prod, such as proxy tokens, registry authentication,
etc.
2. [Google Cloud Service Accounts](https://cloud.google.com/compute/docs/access/service-accounts)
for both the staging and production Google cloud projects
(as `secrets/gke-auth-key-staging.json` and `secrets/gke-auth-key-prod.json`).
These have a [custom Role](https://cloud.google.com/iam/docs/understanding-roles)
called `travis-deployer` that gives them _just_ the permissions needed to do
deployments.
3. A [GitHub deploy key](https://developer.github.com/v3/guides/managing-deploy-keys/)
for the [binderhub-ci-repos/cached-minimal-dockerfile](https://github.com/binderhub-ci-repos/cached-minimal-dockerfile)
repo (as `secrets/binderhub-ci-repos-deploy-key`). This is used in our tests to force the
deployed binderhub to do a build + launch, rather than just a launch (via
`tests/test_build.py`)
The `git-crypt` symmetric key needed to decrypt these secrets is `travis/crypt-key.enc`,
encrypted with Travis's [encrypted file](https://docs.travis-ci.com/user/encrypting-files/)
support. Travis only supports one encrypted file per repo, and these are one-way encrypted
only (you can not get plain text back easily!), forcing us to use `git-crypt`.
#### What happens?
1. Decrypt the `git-crypt` key with the travis-provided `openssl` command
2. Decrypt all other secrets with the `git-crypt` key
At the end of this step, all the secrets required for a successful deployment
are available in unencrypted form.
#### What could go wrong?
1. Someone has used the `travis encrypt-file` command for this repository, overwriting
the current travis encryption key (which is used to decrypt the `git-crypt` encryption
key), and committed this change. This causes issues because `travis encrypt-file`
can only encrypt one file per repo, so if you encrypt another file the first file
becomes undecryptable.
This will manifest as an error from the `openssl` command.
The simplest fix is to revert the PR that encrypted another file. `git-crypt`
should be used instead for encrypting additional files.
### Step 2: Setting up Helm
#### Background
We use _helm charts_
to configure mybinder.org. We use charts both from the
[official kubernetes charts repository](https://github.com/helm/charts),
as well as the [JupyterHub charts repository](https://jupyterhub.github.io/helm-chart).
#### What happens
To set up helm to do the deployment, we do the following:
1. Set up the helm client, allowing it to create the local config files it needs
to function.
2. Set up the JupyterHub charts repository for use with this helm installation,
and fetch the latest chart definitions.
3. Fetch all the dependencies of the `mybinder` deployment chart with the versions
specified in `mybinder/Chart.yaml`, and store them locally to ready them
for deployment.
At the end of this step, `helm` has been fully configured to do deployment of our
charts.
#### What could go wrong?
1. Invalid version for a dependency in `mybinder/Chart.yaml`
This manifests as an error from `helm dep up` that looks like the following:
```
Error: Can't get a valid version for repositories <dependency>. Try changing the version constraint in Chart.yaml
```
`<dependency>` in the above error message should point to the erroring dependency
whose version needs to be fixed.
If this happens for the **binderhub** dependency, the most common reason is that
you have not waited long enough after merging a PR in the binderhub repo before
bumping the version here. Make sure the version of binderhub is visible in
https://jupyterhub.github.io/helm-chart before merging a PR here.
### Step 3: Tell Grafana our deployment is starting
We create an [annotation](https://grafana.com/docs/grafana/latest/dashboards/annotations/)
in Grafana, recording the fact that a deployment is starting.
This is very useful when looking at dashboards, since you can see
the effects of deployments in various metrics.
## Stage 3: Deploy to staging
We have a [staging environment](https://staging.mybinder.org) that is configured
exactly like production, but smaller (to control costs). We use this to test all
deployments before they hit the production mybinder.org website.
### Step 1: Set up and do the helm upgrade
#### What happens
We use the `deploy.py` script to do the helm deployment. This script does the
following:
1. Use the Google Cloud Service Accounts we decrypted in Stage 2, Step 1 to get
a valid [~/.kube/config](https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/)
file. This file is used by both `helm` and `kubectl` to access the cluster.
2. Use `helm upgrade`
to actually do the deployment. This deploys whatever changes the commit has -
new chart versions, changes to configuration, new repo2docker version, etc.
We have a ten minute timeout here.
3. We use `kubectl rollout` to wait for all [Deployment](https://kubernetes.io/docs/concepts/workloads/controllers/deployment/)
and [DaemonSet](https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/)
objects to be fully ready. Theoretically the `--wait` param to `helm upgrade` does
this - but it is not complete enough for our use case.
Once we have verified that all the `Deployment` and `DaemonSet` objects are ready,
the helm deployment is complete!
#### What could go wrong?
1. YAML formatting issue in one of the config files
YAML syntax can be finnicky sometimes, and fail in non-obvious ways. The most common
error is the presence of tab characters in YAML, which will make them always fail.
Learn X in Y Minutes also has a nice [guide on YAML](https://learnxinyminutes.com/docs/yaml/).
You can also use [yamllint](https://github.com/adrienverge/yamllint) locally to validate
your YAML files.
Remember to **not** copy paste any secret files into online YAML Linting applications
for linting! That could possibly compromise mybinder.org.
2. Kubernetes cluster is having difficulties
This is usually manifested by either `helm` or `kubectl` reporting connection errors.
3. Bugs in helm itself
Fairly rare, but bugs in helm itself might cause failure.
4. Severe bugs in the version of binderhub, jupyterhub or any of the dependencies deployed.
This will usually manifest as a `kubectl rollout` command hanging forever. This is
caused by a bug in the component that `kubectl rollout` is waiting for constantly
crashing, unable to stay up.
Looking at what component it is, and perhaps in the logs, would help!
### Step 2: Validate the deployment
#### What happens
We run the tests in `tests/` with `pytest` to validate that the deployment succeeded.
These try to be as thorough as possible, simulating the tests a human would do to
ensure that the site works as required.
Look at the docstrings in the files under [`tests/`](https://github.com/jupyterhub/mybinder.org-deploy/tree/HEAD/tests)
to see what are the tests being run.
If all the tests succeed, we can consider the staging deployment success!
#### What could go wrong?
1. Bugs in the version of binderhub or jupyterhub deployed, causing any of the tests
in `tests/` to fail.
The output should tell you which test fails. You can look at the docstring for the
failing test to understand what it is was testing, and debug from there.
## Stage 4: Deploy to production
After deploying to `staging` and validating it with tests, we have a reasonable amount of confidence
that it is safe to deploy to production. Production deploy has the exact same steps as
`staging`, but targets production (branch and namespace `prod`) instead of staging.
<file_sep>/tests/test_federation_redirect.py
import requests
def test_active_hosts(helm_config, federation_url):
r = requests.get(federation_url + "/active_hosts")
r.raise_for_status()
resp = r.json()
assert "active_hosts" in resp
# assert anything about the state of active hosts?
# 'empty' is a valid state
def test_proxy_page(helm_config, federation_url):
r = requests.get(federation_url)
r.raise_for_status()
assert "How it works" in r.text
<file_sep>/docs/source/deployment/index.rst
========================
Deployment and Operation
========================
.. toctree::
:maxdepth: 2
prereqs
how
what
<file_sep>/mybinder/files/etc/jupyter/jupyter_notebook_config.py
import os
from distutils.version import LooseVersion as V
import notebook
c.NotebookApp.extra_template_paths.append("/etc/jupyter/templates")
# For old notebook versions we have to explicitly enable the translation
# extension
if V(notebook.__version__) < V("5.1.0"):
c.NotebookApp.jinja_environment_options = {"extensions": ["jinja2.ext.i18n"]}
binder_launch_host = os.environ.get("BINDER_LAUNCH_HOST", "")
binder_request = os.environ.get("BINDER_REQUEST", "")
binder_persistent_request = os.environ.get("BINDER_PERSISTENT_REQUEST", "")
repo_url = os.environ.get("BINDER_REPO_URL", "")
# Disable JITSI integration for now
jitsi_url = ""
c.NotebookApp.jinja_template_vars.update(
{
"binder_url": binder_launch_host + binder_request,
"persistent_binder_url": binder_launch_host + binder_persistent_request,
"repo_url": repo_url,
"ref_url": os.environ.get("BINDER_REF_URL", ""),
"jitsi_url": jitsi_url,
}
)
<file_sep>/docs/source/incident-reports/2017-09-29-504.md
# 2017-09-29, 504
## Summary
mybinder.org was failing to launch any images.
Building worked fine, but launching would fail with 504 Timeout on `/run?`, served by nginx.
Further, not visible to users, some idle users were unable to be deleted.
## Timeline
All times in CEST
### Sep 29 2017 08:59
https://github.com/jupyterhub/binderhub/issues/140 is opened, reporting failure to build due to "Stream disconnection"
### 14:00
while investigating the above issue, test builds were performed of the affected repo.
Builds succeeded (failing to reproduce the issue), but launching the image failed with nginx 504 Timeout. No image could be launched.
After retrieving the pods for the Hub log, several errors were discovered:
- The `Spawner.start()` method is not returning, as indicated by the logs:
binder-testing-xyz's server failed to start in 300 seconds, giving up
This is the first stage in launching, and purely under `KubeSpawner`'s control.
JupyterHub does not get a chance to proceed to check if the server is running.
- Inspecting the pod that failed to start reveals that it is indeed running and responsive.
At the same time, other errors are in the logs:
- Some Spawners had entered a permanent `stop_pending` state, as indicated by the cull-idle service failing to delete users because they were 'pending stop'. Logs:
400 DELETE /hub/api/users/binder-testing-chcr2h3j (192.168.3.11): binder-testing-chcr2h3j's server is in the process of stopping, please wait.
- inspecting the logs reveals that the pod has been deleted, but `Spawner.stop` has not returned.
- I believe this has been reported before here: https://github.com/jupyterhub/jupyterhub/issues/1420
I believe all of these are reflecting a bug in the KubeSpawner reflector missing events and never recovering (see https://github.com/jupyterhub/kubespawner/pull/81 for a possible fix).
Additionally, there were several errors related to the reflector connection, which are likely related to the unhandled events:
```
2017-09-29 00:46:54,810 WARNING Retrying (Retry(total=2, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<urllib3.connection.VerifiedHTTPSConnection object at 0x7fd66401bd68>: Failed to establish a new connection: [Errno 111] Connection refused',)': /api/v1/namespaces/beta/pods?labelSelector=heritage%3Djupyterhub%2Ccomponent%3Dsingleuser-server
[E 2017-09-29 00:46:54.814 JupyterHub reflector:113] Error when watching pods, retrying in 25.6s
<snip>
urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='x.x.x.x', port=443): Max retries exceeded with url: /api/v1/namespaces/beta/pods?labelSelector=heritage%3Djupyterhub%2Ccomponent%3Dsingleuser-server (Caused by NewConnectionError('<urllib3.connection.VerifiedHTTPSConnection object at 0x7fd666225748>: Failed to establish a new connection: [Errno 111] Connection refused',))
```
### 14:10
hub pod is relaunched to clear invalid state.
Service is back to normal, though no actual fixes have been applied.
The same issue is expected to return.
## Action Items
### BinderHub
1. Removing the redirect page will get rid of the 504 timeout issue
by redirecting directly from the event-streams on the build page
[Pull Request](https://github.com/jupyterhub/binderhub/pull/135).
2. Diagnose the original [issue](https://github.com/jupyterhub/binderhub/issues/140),
which still has not been identified.
### KubeSpawner
1. Fix known issue of PodReflector missing events
[Pull Request](https://github.com/jupyterhub/kubespawner/pull/81).
2. Investigate further possibilities of reflector failure and recovery
[Issue](https://github.com/jupyterhub/kubespawner/issues/85)
3. Unrecoverable PodReflector errors should abort the Hub
[Pull Request](https://github.com/jupyterhub/kubespawner/pull/86).
The primary cause of these issues is the loss of events on the KubeSpawner PodReflector.
https://github.com/jupyterhub/kubespawner/pull/81 fixes at least one known case.
### Deployment
1. Implement health monitoring and alerting to respond more quickly to these problems [Issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/19).
2. Automatic recovery could potentially relaunch the Hub pod when it enters an unhealthy state.
<file_sep>/images/federation-redirect/test_rendezvous.py
from collections import Counter
from app import rendezvous_rank
def test_50_50_split():
# check that two buckets with equal weight get used roughly the
# same number of times
result = Counter(
[rendezvous_rank([("a", 1.0), ("b", 1.0)], "key-%i" % i)[0] for i in range(100)]
)
# as there is no randomness involved the result should always be the same
assert result["a"] == 52
assert result["b"] == 48
def test_80_20_split():
# check that two buckets with a 80-20 weighting work
result = Counter(
[rendezvous_rank([("a", 0.8), ("b", 0.2)], "key-%i" % i)[0] for i in range(100)]
)
# as there is no randomness involved the result should always be the same
assert result["a"] == 75
assert result["b"] == 25
def test_100_0_split():
# check that a bucket with zero weight never gets selected
# the weight of bucket "a" doesn't matter
result = Counter(
[rendezvous_rank([("a", 0.8), ("b", 0.0)], "key-%i" % i)[0] for i in range(100)]
)
assert Counter({"a": 100}) == result
<file_sep>/tests/test_http.py
"""Basic HTTP tests to make sure things are running"""
import pprint
import pytest
import requests
def test_binder_up(binder_url):
"""
Binder Hub URL is up & returning sensible text
"""
resp = requests.get(binder_url)
assert resp.status_code == 200
assert "GitHub" in resp.text
def test_hub_health(hub_url):
"""check JupyterHubHub health endpoint"""
resp = requests.get(hub_url + "/hub/api/health")
print(resp.text)
assert resp.status_code == 200
def test_binder_health(binder_url):
"""check BinderHub health endpoint"""
resp = requests.get(binder_url + "/health")
pprint.pprint(resp.json())
assert resp.status_code == 200
# the proxy-patches pod can take up to 30 seconds
# to register its route after a proxy restart
@pytest.mark.flaky(reruns=3, reruns_delay=10)
def test_hub_user_redirect(hub_url):
"""Requesting a Hub URL for a non-running user"""
# this should *not* redirect for now,
resp = requests.get(hub_url + "/user/doesntexist")
assert resp.status_code == 424
assert "Binder not found" in resp.text
resp = requests.get(hub_url + "/other/doesntexist")
assert resp.status_code == 404
assert "Binder not found" in resp.text
@pytest.mark.parametrize(
"name",
[
"badge.svg",
"badge_logo.svg",
],
)
def test_static(name, static_url):
"""Static files are being served"""
resp = requests.get(static_url + "/" + name)
assert resp.status_code == 200
<file_sep>/post-grafana-annotation.py
#!/usr/bin/env python3
"""
Script to post [Grafana Annotations](https://grafana.com/docs/grafana/latest/dashboards/annotations/)
This is primarily used to annotate deployments in Grafana,
which can be very useful when displayed alongside various graphs.
This script requires:
- An environment variable GRAFANA_API_KEY with a grafana
[API Key](https://grafana.com/docs/grafana/latest/http_api/auth/#create-api-token)
with at least Editor permissions
- The requests library
"""
import argparse
import os
import time
import requests
def create_annotation(grafana_url, grafana_api_key, tags, text):
"""
Create annotation in a grafana instance.
"""
return requests.post(
grafana_url + "/api/annotations",
json={
"tags": tags,
"text": text,
"time": int(time.time() * 1000),
"isRegion": False,
},
headers={"Authorization": f"Bearer {grafana_api_key}"},
).text
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument("--grafana-url", help="URL of the grafana instance to use")
argparser.add_argument(
"--tag",
help="Tags to add to the annotation",
default=[],
action="append",
dest="tags",
)
argparser.add_argument("text", help="Text to use for the annotation")
args = argparser.parse_args()
print(
create_annotation(
args.grafana_url, os.environ["GRAFANA_API_KEY"], args.tags, args.text
)
)
if __name__ == "__main__":
main()
<file_sep>/docs/source/getting_started/production_environment.md
# Production environment
This section is an overview of the repositories, projects, and
systems used in a mybinder.org production deployment.
Reference: [Google SRE book section on Production Environment](https://sre.google/sre-book/production-environment/)
## Repository structure
This repository contains a 'meta chart' (`mybinder`) that fully captures the
state of the deployment on mybinder.org. Since it is a full helm chart, you
can read the [official helm chart structure](https://docs.helm.sh/developing_charts/#the-chart-file-structure)
document to know more about its structure.
## Dependent charts
The core of the meta-chart pattern is to install a bunch of [dependent charts](https://docs.helm.sh/developing_charts/#chart-dependencies),
specified in `mybinder/Chart.yaml`. This contains both support
charts like nginx-ingress, grafana, prometheus, but also the core application chart
`binderhub`. Everything is version pinned here.
## Configuration values
The following files fully capture the state of the deployment for staging:
1. `mybinder/values.yaml` - Common configuration values between prod &
staging
2. `secret/config/staging.yaml` - Secret values specific to the staging
deployment
3. `config/staging.yaml` - Non-secret values specific to the staging
deployment
The following files fully capture the state of the production deployment:
1. `mybinder/values.yaml` - Common configuration values between prod &
staging
2. `secret/config/prod.yaml` - Secret values specific to the production
deployment
3. `config/prod.yaml` - Non-secret values specific to the production
deployment
**Important**: For maintainability and consistency, we try to keep the contents
of `staging.yaml` and `prod.yaml` super minimal - they should be as close
to each other as possible. We want all common config in `values.yaml` so testing
on staging gives us confidence it will work on prod. We also never share the same
secrets between staging & prod for security boundary reasons.
## Deployment nodes and pools
The staging cluster has one node pool, which makes things simple.
The production cluster has two, one for "core" pods (the hub, etc.)
and another dedicated to "user" pods (builds and user servers).
This strategy helps protect our key services from potential issues caused by users and helps us drain user nodes when we need to.
Since ~only user pods should be running on the user nodes,
cordoning that node should result in it being drained and reclaimed
after the max-pod-age lifetime limit
which often wouldn't happen without manual intervention.
It is still _not quite true_ that only user pods are running on the user nodes at this point.
There can be some pods such as heapster and kube-dns that may run on user nodes,
and need to be manually removed from the pod after cordoning before the autoscaler will allow culling.
In the future, when we implement a pod packing strategy and node taints,
nodes could get reclaimed truly automatically without any intervention,
but we are not there yet.
Users and core pods are assigned to their pools via a `nodeSelector` in `config/prod.yaml`.
We use a custom label `mybinder.org/node-purpose = core | user`
to select which node a pod should run on.
## mybinder.org specific extra software
We sometimes want to run additional software for the mybinder deployment that
does not already have a chart, or would be too cumbersome to use with a chart.
For those cases, we can create kubernetes objects directly from the `mybinder`
meta chart. You can see an example of this under `mybinder/templates/redirector`
that is used to set up a simple nginx based HTTP redirector.
## Related repositories
Related repositories used by the [mybinder.org][] service are:
1. [binderhub][]
This contains the [binderhub][] code (UI & hub management) & helm chart.
To change the UI / UX or hub management aspects of [mybinder.org][],
go to [binderhub][].
2. [repo2docker][]
This is used to do the actual building of git repositories into docker
images, and can be used standalone too. If you want to change how a git
repository is converted into a docker image to be run for the user,
go to [repo2docker][].
## The Deployment Helm Meta Chart
BinderHub is deployed using a Kubernetes Helm Chart, which is a specification
for instructing Kubernetes how to deploy particular applications. Sometimes,
applications depend on others in order to function properly, similar to how
a package might depend on other packages (e.g., Pandas depends on Numpy).
These dependencies are specified with a Helm "Meta Chart".
For example, let's say that you'd like to begin using Prometheus in your
Kubernetes deployment. Since Prometheus has a helm chart for deploying it
on Kubernetes, we can add it as a dependency in a Helm Meta Chart. We'd
create a section called `dependencies` in `mybinder/Chart.yaml` and put the
following in it:
```yaml
dependencies:
- name: prometheus
version: 11.16.9
repository: https://prometheus-community.github.io/helm-charts
```
This also allows us to pin a _version_ of Prometheus, which improves
reliability of the site.
```{note}
It is still possible to deploy each of these applications on their own *without*
a Meta Helm Chart, this is simply a way of clustering dependencies together
and simplifying the deployment structure.
```
Another benefit of Meta Charts is that you can use a single configuration
file (`config.yaml`) with multiple Helm Charts. For example, look at the
BinderHub Helm Chart. Note that there are multiple
top-level sections (e.g., for [jupyterhub](https://github.com/jupyterhub/mybinder.org-deploy/blob/5aa6dde60c9b5f3012686f9ba2b23b176c19b224/mybinder/values.yaml#L53) and for [prometheus](https://github.com/jupyterhub/mybinder.org-deploy/blob/5aa6dde60c9b5f3012686f9ba2b23b176c19b224/mybinder/values.yaml#L204)) and that each section
has a corresponding entry in the Helm Meta Chart. In this way, we can provide
configuration for each dependency of BinderHub without needing a separate
file for each, and we can deploy them all at the same time.
## HTTPS configuration for `mybinder.org`
Using HTTPS requires having a signed certificate. BinderHub uses [kube-lego](https://github.com/jetstack/kube-lego),
a tool that obtains and deploys a free _Let's Encrypt_ certificate automatically.
This section describes how to use `kube-lego` to configure and deploy HTTPS support.
`kube-lego` provides 90 day SSL certificates for `mybinder.org` through
the [letsencrypt](https://letsencrypt.org/) service. As the 90
day cycle nears its end, `kube-lego` will automatically request a new
certificate and configure the kubernetes deployment to use it.
`kube-lego` is a kubernetes application, with its own Helm Chart that is
referenced in the [`mybinder.org` Meta Chart](https://github.com/jupyterhub/mybinder.org-deploy/blob/5aa6dde60c9b5f3012686f9ba2b23b176c19b224/mybinder/values.yaml#L152). This tells kubernetes which
account to use for letsencrypt certification.
Once we have a letsencrypt account set up, we need to attach the SSL
certificate to a particular `ingress` object. This is a Kubernetes object
that controls how traffic is routed _into_ the deployment. This is also
done with the `mybinder.org` Helm Chart ([see here for example](https://github.com/jupyterhub/mybinder.org-deploy/blob/5aa6dde60c9b5f3012686f9ba2b23b176c19b224/mybinder/values.yaml#L13)).
Note that letsencrypt will send you an email if your SSL certificate is
about to expire. If you get such an email, it might mean that the automatic
`kube-lego` renewal process hasn't worked properly. To debug this, we
recommend running the standard Kubernetes debugging commands with the
`kube-lego` object used with your deployment. For example:
```
kubectl --namespace=<my-namespace> logs <kube-lego-object>
```
### Exceptions on the OVH cluster
On the OVH cluster all the binder components use a specific certificate on `*.mybinder.ovh` domain.
Traffic for `ovh.mybinder.org` is redirected with a CNAME on `binder.mybinder.ovh`. That's why the OVH cluster should be able to serve 2 different certificates.
- The `*.mybinder.ovh` certificate is managed by ingresses in the ovh helm configuration.
- The `ovh.mybinder.org` certificate is managed by a specific ingress and `kube-lego` on the launch of `deploy.py` on the ovh stack.
## Secrets
Since we use this repo for deployment, it needs credentials for things like our
google cloud account, and secret tokens for our helm charts. Since this is a
public repo, we don't want these credentials to be readable in public! To solve
this, we use [git-crypt][] to store _encrypted_ versions of files that should
be kept secret. These files are in the `secrets` folder. `git-crypt` uses a
shared secret to encrypt and decrypt files. For automated deployments, Travis
has access to the git-crypt secret in an encrypted environment variable. If you
don't need to edit the secret files, then you don't need the git-crypt secret,
or to see the contents of the secret files. When you clone, you will just have
the opaque, encrypted files. If you need access to view or edit the encrypted
files, you will need the git-crypt secret. See below for a procedure to share
the secret. Once you have unlocked the repo with `git-crypt`, you will be able
to view and edit the encrypted files as if they were any other file. `git- crypt` handles the encryption and decryption transparently via git filters.
### Sharing secrets
Sharing secrets is tricky! There is a handy tool called [ssh-vault][] which
allows you to securely share information via a mechanism we all have available
here: ssh public keys on GitHub!
To securely share the git-crypt key, both parties should have git-crypt and
ssh-vault. On mac, these are both available from homebrew:
brew install git-crypt ssh-vault
To encrypt the key with ssh-vault, pipe the key file through `ssh-vault create`. Assuming you are in a mybinder.org-deploy directory that is already
setup with git-crypt:
```bash
[sender] $ cat .git/git-crypt/keys/default | ssh-vault -u receiver create
```
where `receiver` is the recipient's GitHub username, e.g. `willing` or
`choldgraf`.
The result should look something like this:
```
SSH-VAULT;AES256;30:40:9b:bd:16:26:f6:d2:1d:85:7a:dc:63:c9:e6:ae
<KEY>
EoTGNMwI=
```
The sender can deliver this encrypted copy to the receiver, via less secure
transport mechanism, such as a gitter private message, SMS, email, etc.
The receiver can now decrypt the message with ssh-vault and use it to unlock
the mybinder.org-deploy repo. Assuming the shared message has been saved to a
file `encrypted-key`:
```bash
[receiver] $ cat encrypted-key | ssh-vault view | git-crypt unlock -
# remove the encrypted temporary file
[receiver] $ rm encrypted-key
```
If your ssh key requries a passphrase then the above might not work. Below is a
method that works, but creates an intermediate file containing the human-readable
text. Make sure this file is secure and not discoverable by others!
If you have `ssh-vault` >= v0.12.4 you can run the following:
```
ssh-vault -o clear-git-crypt-key view encrypted-key
git-crypt unlock clear-git-crypt-key
rm clear-git-crypt-key
```
This solves the problem that `ssh-vault` prints the passphrase prompt to
standard out as well as the decrypted key. Make sure to delete `clear-git-crypt-key`,
which contains the clear text git-crypt key.
On a mac, you can use `pbcopy` and `pbpaste` to use the clipboard instead of
creating files:
```bash
[sender] $ cat .git/git-crypt/keys/default | ssh-vault -u receiver create | pbcopy
# the encrypted message is in sender's clipboard
# deliver it to the receiver, and once it is in their clipboard:
[receiver] $ pbpaste | ssh-vault view | git-crypt unlock -
```
### Who has the keys?
People who currently have the git-crypt secret include:
- [@minrk at GitHub](https://github.com/minrk/)
- [@yuvipanda at GitHub](https://github.com/yuvipanda/)
- [@choldgraf at GitHub](https://github.com/choldgraf/)
- [@mael-le-gal at GitHub](https://github.com/mael-le-gal/)
- [@sgibson91 at GitHub](https://github.com/sgibson91/)
- [@bitnik at GitHub](https://github.com/bitnik/)
- [@arnim at GitHub](https://github.com/arnim/)
- [@MridulS at GitHub](https://github.com/MridulS/)
- [@callummole at GitHub](https://github.com/callummole/)
- [@rgaiacs at GitHub](https://github.com/rgaiacs/)
- _add yourself here if you have it_
Contact one of them if you need access to the git-crypt key.
[mybinder.org-deploy]: https://github.com/jupyterhub/mybinder.org-deploy
[prod]: https://mybinder.org
[mybinder.org]: https://mybinder.org
[staging.mybinder.org]: https://staging.mybinder.org
[staging]: https://staging.mybinder.org
[binderhub]: https://github.com/jupyterhub/binderhub
[`jupyterhub/binderhub`]: https://github.com/jupyterhub/binderhub
[binderhub documentation]: https://binderhub.readthedocs.io/en/latest/
[repo2docker]: https://github.com/jupyterhub/repo2docker
[git-crypt]: https://github.com/AGWA/git-crypt
[ssh-vault]: https://github.com/ssh-vault/ssh-vault
<file_sep>/images/tc-init/README.md
# tc-init
Kubernetes init container for throttling bandwidth with tc.
Currently, only egress bandwidth limits work (sending data out of the container).
## First things first: don't use this
...if you don't have to!
Kubernetes has [annotations](https://github.com/kubernetes/kubernetes/blob/v1.8.4/pkg/util/bandwidth/utils.go#L38)
that _should_ enable you to accomplish bandwidth limits without any help:
```yaml
spec:
annotations:
- kubernetes.io/egress-bandwidth: 1M
- kubernetes.io/ingress-bandwidth: 10M
```
Unfortunately, I've found that many network implementations do not respect these annotations (as of kubernetes 1.8, anyway),
so test if these more official annotations work before adopting tc-init.
## Using tc-init
Add tc-init to your containers as an initContainer.
It will run `tc` and limit the egress bandwidth to EGRESS_BANDWIDTH.
The value is passed to [tc](https://lartc.org/manpages/tc.txt),
e.g `10mbit` for ten megabits per second,
or `5mbps` for five mega*bytes* per second.
For example:
```yaml
spec:
initContainers:
- name: tc-init
image: jupyterhub/mybinder.org-tc-init
env:
- name: EGRESS_BANDWIDTH
value: 1mbit
securityContext:
capabilities:
add:
- NET_ADMIN
```
<file_sep>/docs/source/operation_guide/grafana_plots.md
# Some useful Grafana plots
Below are some links to various plots from grafana, an explanation of what they show and why they are useful to check.
> Many of the plots on grafana have a drop-down menu called "cluster" with three options referring to the different clusters mybinder.org operates on:
>
> - "prometheus" refers to the GKE cluster (US)
> - "OVH prometheus" refers to the OVH cluster (EU)
> - "default" is the sum across the clusters
- [Launch/Build Success Rate](https://grafana.mybinder.org/d/3SpLQinmk/1-overview?refresh=1m&orgId=1&var-cluster=default&panelId=16&fullscreen) - This chart is the main indicator that mybinder.org is healthy. If there is a problem, a dropping success rate indicates that it's impacting users. The caveat is that it's an indicator of _current_ status: if builds or launches are failing due to timeouts that it can take a long time to show up as the failure is only reported once it's completed failing. Including retries, this can take several minutes. The [Launch Success Rate](https://grafana.mybinder.org/d/fZWsQmnmz/pod-activity?refresh=1m&panelId=9&fullscreen&orgId=1&var-cluster=prometheus) graph shows similar statistics but also includes the number of launch attempts. This can tell us if no-one tried to launch a repo, or if a lot of people tried but failed. Ocassionally, large spikes in launch attempts happen which can be interesting to investigate.
- [Number of Pods per Node](https://grafana.mybinder.org/d/nDQPwi7mk/node-activity?orgId=1&var-cluster=prometheus&panelId=26&fullscreen&refresh=1m) - This graph shows which nodes are serving the mybinder.org users. This can indicate when auto-scaling may need a helping hand, for example, if one node has had a low load for a long time, it may need to be manually cordoned and drained because a pod is stuck on it.
- [Number of User Pods over time](https://grafana.mybinder.org/d/fZWsQmnmz/pod-activity?refresh=1m&panelId=3&fullscreen&orgId=1&var-cluster=default) - This graph shows how many user pods have been running on mybinder.org. If this seems to be larger than usual, it could be indicative of something interesting, such as posting a blog post or a link on HackerNews that generates a lot of traffic.
- [Popular repositories](https://grafana.mybinder.org/d/fZWsQmnmz/pod-activity?refresh=1m&panelId=1&fullscreen&orgId=1&var-cluster=prometheus) - This graph shows the most popular repos by number of launches in the last hour and, for example, we can see classes that use mybinder.org.
<file_sep>/docs/source/operation_guide/federation.rst
.. _mybinder-federation:
===========================
The mybinder.org Federation
===========================
The current status of the mybinder.org federation can be found `here <https://mybinder.readthedocs.io/en/latest/about/status.html>`__.
Adding or removing a federation member
--------------------------------------
The following files contain references to the federation,
and should be updated when a federation member is added or removed:
#. pages for https://mybinder.readthedocs.io: `status <https://github.com/jupyterhub/mybinder.org-user-guide/blob/HEAD/doc/about/status.rst>`_ and `federation info <https://github.com/jupyterhub/mybinder.org-user-guide/blob/HEAD/doc/_data/support/federation.yml>`_
#. `deployment to the cluster <https://github.com/jupyterhub/mybinder.org-deploy/blob/main/.github/workflows/cd.yml>`_
#. `testing of the cluster configuration <https://github.com/jupyterhub/mybinder.org-deploy/blob/main/.github/workflows/test-helm-template.yaml>`_
#. membership in `federationRedirect.hosts config for prod <https://github.com/jupyterhub/mybinder.org-deploy/blob/7aa58e033efe1ed1cee1b5cb7e789c1296deb36a/config/prod.yaml#L220>`__
#. add/remove data source for the cluster's prometheus at https://grafana.mybinder.org
#. if outside the default Google Cloud project, make sure launches are published to the events archive:
- If not deployed from this repo, publishing events to the archive is configured `here <https://github.com/jupyterhub/mybinder.org-deploy/blob/339ccb1de8107dc7854cac45f0a5b6e99937a91b/mybinder/values.yaml#L200-L219>`__
- GKE clusters don't need further configuration, but outside GKE (or outside our GCP project, maybe?) need a service account.
These accounts are configured `in terraform <https://github.com/jupyterhub/mybinder.org-deploy/blob/339ccb1de8107dc7854cac45f0a5b6e99937a91b/terraform/gcp/prod/main.tf#L17>`__, and can be retrieved via `terraform output events_archiver_keys`.
For OVH, a secret is added to the chart `here <https://github.com/jupyterhub/mybinder.org-deploy/blob/main/mybinder/templates/events-archiver/secret.yaml>`__ and mounted in the binder pod `here <https://github.com/jupyterhub/mybinder.org-deploy/blob/339ccb1de8107dc7854cac45f0a5b6e99937a91b/config/ovh2.yaml#L25-L34>`__ (in our chart, the secret itself is added to `eventsArchiver.serviceAccountKey <https://github.com/jupyterhub/mybinder.org-deploy/blob/339ccb1de8107dc7854cac45f0a5b6e99937a91b/mybinder/values.yaml#L555-L557>`__ helm config, in secrets/config/ovh2.yaml).
Temporarily removing a federation member from rotation
------------------------------------------------------
There are a few reasons why you may wish to remove a Federation member from
rotation. For example, maintenance work, a problem with the deployment, and so
on.
There are 3 main files you may wish to edit in order to remove a cluster from the Federation:
#. *Required.* Set the ``binderhub.config.BinderHub.pod_quota`` key to ``0`` in the
cluster's config file under the `config <https://github.com/jupyterhub/mybinder.org-deploy/tree/HEAD/config>`_
directory
#. *Recommended.* Set the ``weight`` key for the cluster to ``0`` in the
`helm chart values file <https://github.com/jupyterhub/mybinder.org-deploy/blob/7aa58e033efe1ed1cee1b5cb7e789c1296deb36a/config/prod.yaml#L220>`_
in order to remove it from the redirector's pool
#. *Optional.* Comment out the cluster from the
`continuous deployment <https://github.com/jupyterhub/mybinder.org-deploy/blob/4f42d791f92dcb3156e7c4ea92a236246bbf9135/.github/workflows/cd.yml#L168>`_
file
<file_sep>/docs/source/deployment/how.md
# How to deploy a change to mybinder.org?
This document explains **how** to deploy a change to mybinder.org.
For information on what exactly a deployment does, see [what](what).
When a new change has landed in either [BinderHub](https://github.com/jupyterhub/binderhub)
or [repo2docker](https://github.com/jupyterhub/repo2docker), they need to be explicitly
deployed to mybinder.org for users to benefit from them. This is the most common kind of
change deployed to mybinder.org.
Upgrades to BinderHub and repo2docker are automatically managed by the [Watch Dependencies GitHub](https://github.com/jupyterhub/mybinder.org-deploy/blob/main/.github/workflows/watch-dependencies.yaml) workflow.
Follow the instructions below if you need to manually update BinderHub or Repo2docker.
## Deployment policy
Deployments to mybinder.org should be:
1. **Safe**. We will have good, user friendly tooling + lots of safeguards,
so people can deploy without fear of accidentally breaking the site.
2. **Straightforward**. We want a lot of people to be involved in maintaining mybinder.org,
so we must make sure deployments are also easy to do. Most deployments should
not require specific sysadmin knowledge.
3. **Timely**. We deploy changes to repo2docker and BinderHub within a few days of
them being merged into main.
These are all **aspirational** - we strive for hitting the above points,
but recognize that work and life may get in the way of doing this perfectly.
## Updating BinderHub
This section explains how to upgrade the mybinder.org deployment after
merging a PR in the BinderHub repo.
BinderHub, the Helm chart, is a dependency for the local `mybinder` Helm chart.
The version of the BinderHub Helm chart is declared in `mybinder/Chart.yaml`.
Upgrading the version of BinderHub that is used in mybinder.org corresponds to
updating the BinderHub Helm chart version, which we step through below.
1. Merge changes to BinderHub.
2. Wait for the [Publish helm chart and docker images workflow on the main branch](https://github.com/jupyterhub/binderhub/actions/workflows/publish.yml?query=branch%3Amain) to complete successfully.
3. Lookup the latest BinderHub chart dev version on https://hub.jupyter.org/helm-chart/#development-releases-binderhub
4. In your fork of the [mybinder.org-deploy](https://github.com/jupyterhub/mybinder.org-deploy) repository, open `mybinder/Chart.yaml` and change `version` in the `binderhub` section of `dependencies` to the latest BinderHub chart dev version.
5. Open a pull request to merge this change into the main branch of the
mybinder.org-deploy repository, following the steps in [Deploying a change](deploying-a-change).
## Updating repo2docker
This section explains how to upgrade the mybinder.org deployment after
merging a PR in the [repo2docker](https://github.com/jupyterhub/repo2docker) repo.
BinderHub uses a docker image with repo2docker in it. When a new commit is merged in
the repo2docker repository, a new version of this image is pushed. We then configure
BinderHub to use the newly built image (which is identified by a tag) by editing `values.yaml`.
1. Merge changes to repo2docker.
2. Wait for the [Publish helm chart and docker images workflow on the main branch](https://github.com/jupyterhub/repo2docker/actions/workflows/release.yml?query=branch%3Amain) to complete successfully.
3. Lookup the latest Repo2docker tag on https://quay.io/repository/jupyterhub/repo2docker?tab=tags
4. In your fork of the mybinder.org-deploy repository, open `mybinder/values.yaml` and change the tag in `binderhub.config.KubernetesBuildExecutor.build_image` to the latest tag.
5. Open a pull request to merge this change into the main branch of the
mybinder.org-deploy repository, following the steps in [Deploying a change](deploying-a-change).
(deploying-a-change)=
## Deploying a change
### Deploying to _both_ `staging` then `prod`
Deploying a change involves making a PR with your desired change and merging it to
main.
1. Make the changes as described above [on your fork of this repo](https://github.com/jupyterhub/mybinder.org-deploy).
2. Keep track of the **hashes** that were updated. You should have both the _old_ hash that
was replaced, and the _new_ hash that replaced it.
3. If you haven't already, run the `list_new_commits.py` script in the `scripts/`
folder. This will print out a URL that describes the changes made to both
BinderHub and repo2docker.
4. Make a PR to the `main` branch with the changes you want.
- Name the PR like so: `<TOOL-CHANGED>: <OLD-HASH>...<NEW-HASH>`
- In the description of the PR, paste the full URL that you printed out
`list_new_commits.py`. It should have the following form:
https://github.com/jupyterhub/<REPO-NAME>/compare/<OLD-HASH>...<NEW-HASH>
5. Review, accept, and merge this PR. This will make GitHub Actions deploy the changes
to [staging.mybinder.org](https://staging.mybinder.org), and run tests in the `tests/`
directory against it. **In this case, you can merge your own PR**. Note that if the
PR is a large change to the Kubernetes setup, this may take some time, and GitHub Actions may
time-out in the process. If this happens and you _expect_ it to happen, you can restart
the build a few times.
6. If the tests succeed, the change will be deployed to mybinder.org.
7. If the tests fail, the change will _not_ be deployed to mybinder.org.
You must then investigate why it failed. **If you can
not figure out a cause in about 10 minutes, revert the change.**
You can revert the change with [the GitHub UI](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/reverting-a-pull-request) and immediately
merge the reversion PR that GitHub creates.
8. Troubleshoot and make changes to your fork. Repeat the process from Step 1.
### Deploying to _only_ `staging`
Sometimes you want to test out a deployment live before you make a deployment to `prod`.
There are a few ways we can achieve this.
#### Testing Pull Requests from **branches** of `mybinder.org-deploy`
This simplest way to deploy a PR to staging only is to apply the `test-staging` label to an open PR. This will trigger GitHub Actions to deploy the changes in the PR to the staging cluster **only**.
```{note}
If you need to re-deploy the changes in a PR to staging only, then the label will need to be removed and then re-added.
```
#### Testing Pull Requests from **forks** of `mybinder.org-deploy`
If the PR has been made from a fork of the repo, the labelling approach discussed in the previous section will fail due to a lack of access to secrets.
In this scenario, a user with `OWNER`, `COLLABORATOR` or `MEMBER` association with the `mybinder.org-deploy` repo can leave a `/test-this-pr` comment on the PR to trigger a deploy to staging.
#### Editing staging config directly
The final option to deploy to staging only is by editing `staging`-only config files. To deploy
to staging only, follow these steps:
1. Make changes to [`config/staging.yaml`](https://github.com/jupyterhub/mybinder.org-deploy/blob/HEAD/config/staging.yaml)
on your fork. This file contains configuration for Helm that will **override**
whatever is in `mybinder/values.yaml`.
2. Make a PR to the `main` branch, and review, accept, and merge this PR.
**In this case, you can merge your own PR**.
This will make GitHub Actions deploy the changes
to [staging.mybinder.org](https://staging.mybinder.org), and run tests in the `tests/`
directory against it. Because we've only edited `staging.yaml`, **it will not
be deployed to `prod`**.
3. If the tests succeed, you can check out the new behavior at `staging.mybinder.org`.
4. If the tests fail, the deployer must investigate why it failed. **If they can
not figure out a cause in about 10 minutes, revert the change.**
The build should not remain broken for more than ten minutes.
5. Troubleshoot and make changes to your fork. Repeat the process from Step 1.
6. If you are satisfied with these changes, **revert** the change to `config/staging.yaml`,
and **apply** those same changes to `mybinder/values.yaml`. Now follow the
steps in the section above to deploy to **both** `staging` and `prod`.
The [what](what) document has more details on common ways deployments can go
wrong, and how to debug them.
## Changing the mybinder.org infrastructure
Sometimes we need to make changes to the mybinder.org core infrastructure.
These are changes to the infrastructure that don't directly touch binderhub or
repo2docker, and often require more expertise. Examples for these include:
1. Upgrading nginx Ingress controller
2. Re-configuring our prometheus servers
3. Upgrading to a new JupyterHub release
4. Re-configuring autoscaling for the cluster
5. Doing a kubernetes master upgrade.
These changes require a different kind of review than deploying code. In this
case, ensure that you have a fellow member of the mybinder.org operations
team to assist in case something goes wrong.
<file_sep>/docs/source/getting_started/terminology.rst
==============================
Terminology for the deployment
==============================
This page contains common words or phrases in the dev-ops community that will
be useful in understanding and maintaining the ``mybinder.org`` deployment.
.. _term-cordoning:
"cordoning" a node
------------------
`Kubernetes page on cordoning <https://kubernetes.io/docs/concepts/architecture/nodes/#manual-node-administration>`_.
Sometimes you want to ensure that **no new pods** will be started on a given
node. Usually this is because you suspect the node has a problem with it, or
you wish to remove the node but need it to be free of pods first.
Cordoning the node tells Kubernetes to make it **unschedulable**, which means new
pods won't start on the node. It is common to wait several hours, then manually
remove any remaining pods on the node before removing it manually.
<file_sep>/docs/source/incident-reports/template-incident-report.md
# Template for reports
# {{ incident date: yyyy-mm-dd }}, {{ incident name }}
## Summary
Quick summary of what user impact was, how long it was, and how we fixed it.
## Timeline
All times in {{ most convenient timezone}}
### {{ yyyy-mm-dd hh:mm }}
Start of incident. First symptoms, possibly how they were identified.
### {{ hh:mm }}
Investigation starts.
### {{ hh:mm }}
More details.
## Lessons learnt
### What went well
List of things that went well. For example,
1. We were alerted to the outage by automated bots before it affected users
2. The staging cluster helped us catch this before it went to prod
### What went wrong
Things that could have gone better. Ideally these should result in concrete
action items that have GitHub issues created for them and linked to under
Action items. For example,
1. We do not record the number of hub spawn errors in a clear and useful way,
and hence took a long time to find out that was happening.
2. Our culler process needs better logging, since it is somewhat opaque now
and we do not know why restarting it fixed it.
### Where we got lucky
These are good things that happened to us but not because we had planned for them.
For example,
1. We noticed the outage was going to happen a few minutes before it did because
we were watching logs for something unrelated.
## Action items
These are only sample subheadings. Every action item should have a GitHub issue
(even a small skeleton of one) attached to it, so these do not get forgotten.
### Process improvements
1. {{ summary }} [link to github issue]
2. {{ summary }} [link to github issue]
### Documentation improvements
1. {{ summary }} [link to github issue]
2. {{ summary }} [link to github issue]
### Technical improvements
1. {{ summary }} [link to github issue]
2. {{ summary }} [link to github issue]
<file_sep>/images/federation-redirect/README.md
# Federation Redirector
The **Federation Redirector** redirects incoming HTTP traffic to one of a
group of BinderHubs. It is used to implement a federation of hubs for
mybinder.org.
It is a Tornado web server that redirects traffic on a small set of predefined
endpoints to a randomly chosen hub, the redirect target.
The list of potential redirect targets is configured at start-up. Each target
consists of a hostname, a weighting factor and a URL to check the hubs health.
For each request a weighted random choice of all healthy targets is made and
the visitor is redirected there.
Periodically the redirector will `GET` the health check URL for a target. If
it returns an error (if `Docker registry` or `JupyterHub API` is unhealthy)
or the target is over its quota (`Pod quota`),
the target is removed from the list of healthy targets.
Unhealthy targets are checked less frequently but once they return to good
health they are automatically added back to the list of viable targets.
The redirect response contains a short lived cookie that is used to remember
the choice of target. This means that subsequent visits from the same user
agent will be directed to the same target.
## Running tests
To run the automated tests for the redirector change to this directory in
your terminal and run `pytest`. This should find the tests in
`test_rendezvous.py`.
## How to update requirements.txt
Because `pip-compile` resolves `requirements.txt` with the current Python for
the current platform, it should be run on the same Python version and platform
as our Dockerfile.
Note that as of 2022-05-30, `pip-compile` has issues with `pycurl`, but we
workaround them by by omitting the `-slim` part from the image in the command
below.
```shell
# run from images/federation-redirect
# update requirements.txt based on requirements.in
docker run --rm \
--env=CUSTOM_COMPILE_COMMAND="see README.md" \
--volume=$PWD:/io \
--workdir=/io \
--user=root \
python:3.9-bullseye \
sh -c 'pip install pip-tools==6.* && pip-compile --upgrade'
```
<file_sep>/docs/source/operation_guide/index.rst
================
Operations Guide
================
Team processes as well as useful information about what you might
run into when maintaining mybinder.org.
.. toctree::
:maxdepth: 2
common_problems
command_snippets
grafana_plots
federation
<file_sep>/docs/source/incident-reports/index.rst
.. _incident-reporting:
==================
Incident reporting
==================
This page contains information and guidelines for how the Binder team handles
incidents and incident reports. Remember, **incidents are opportunities to learn**!
Principles and guidelines for incident reporting
------------------------------------------------
- Inspiration for our guidelines: `Google SRE guide, Managing Incidents <https://sre.google/sre-book/managing-incidents/>`_.
- Team management and takeaways from incidents: `Etsy Debriefing Facilitation Guide <https://extfiles.etsy.com/DebriefingFacilitationGuide.pdf>`_.
Example template for incident report
------------------------------------
- :doc:`Example template for incident report <template-incident-report>`
Incident history
----------------
(in reverse chronological order)
.. toctree::
:maxdepth: 1
:glob:
:reversed:
./2*
template-incident-report
<file_sep>/scripts/grafana-export
#!/usr/bin/env python3
"""
Export grafana data to a folder
Also import data from that folder
"""
import json
import logging
import os
import pathlib
import requests
log = logging.getLogger("grafana-export")
grafana_user = os.environ.get("GRAFANA_USER", "admin")
grafana_password = os.environ.get("GRAFANA_PASSWORD")
session = requests.Session()
def save_dashboard(host, dashboard_result, dashboard_dir):
dashboard_dir.mkdir(exist_ok=True)
uid = str(dashboard_result["uid"])
dest = dashboard_dir.joinpath(f"{uid}.json")
log.info(f"Downloading {dashboard_result['title']} to {dest}")
r = session.get(host + f"/api/dashboards/uid/{uid}")
r.raise_for_status()
dashboard = r.json()
with dashboard_dir.joinpath(f"{uid}.json").open("w") as f:
json.dump(dashboard, f, sort_keys=True, indent=1)
def export_grafana(host, data_dir, confirm=True):
"""export grafana data from host to data_dir"""
output_dir = pathlib.Path(data_dir)
output_dir.mkdir(parents=True, exist_ok=True)
dashboard_dir = output_dir.joinpath("dashboards")
# download dashboards
r = session.get(host + "/api/search?limit=1000")
r.raise_for_status()
for result in r.json():
if result["type"] == "dash-db":
save_dashboard(host, result, dashboard_dir)
# TODO: save folders (we don't have any)
# data sources
datasources_json = output_dir.joinpath("datasources.json")
log.info(f"Downloading data sources to {datasources_json}")
r = session.get(host + "/api/datasources")
r.raise_for_status()
datasources = r.json()
with datasources_json.open("w") as f:
json.dump(datasources, f, sort_keys=True, indent=1)
def import_grafana(host, data_dir, confirm=True):
"""Import grafana data created by export"""
data_dir = pathlib.Path(data_dir)
if not data_dir.is_dir():
raise FileExistsError(f"No such directory: {data_dir}")
if confirm:
ans = input(f"Import data from {data_dir} to {host}? [y/N]")
if not ans.lower().startswith("y"):
print("Exiting")
return
log.info(f"Importing data from {data_dir} to {host}")
datasources_json = data_dir.joinpath("datasources.json")
log.info(f"Creating data sources from {datasources_json}")
with datasources_json.open() as f:
datasources = json.load(f)
r = session.get(host + "/api/datasources")
r.raise_for_status()
existing = {s["name"]: s for s in r.json()}
log.info(f"Already have data sources: {list(existing)}")
for source in datasources:
if source["name"] in existing:
log.info(f"Already have data source {source['name']}")
continue
log.info(f"Adding data source {source['name']}: {source['url']}")
request_data = {}
for key in ("name", "url", "access", "type"):
request_data[key] = source[key]
r = session.post(
host + "/api/datasources",
data=json.dumps(request_data),
headers={"content-type": "application/json"},
)
r.raise_for_status()
dashboards_dir = data_dir.joinpath("dashboards")
for dashboard_json in dashboards_dir.glob("*.json"):
with dashboard_json.open() as f:
dashboard = json.load(f)
# can't preserve integer id on upload,
# stable uid is preserved, though
dashboard["dashboard"].pop("id")
# do not upload metadata
meta = dashboard.pop("meta")
dashboard["folderId"] = meta["folderId"]
dashboard["overwrite"] = True
log.info(
f"Uploading dashboard {dashboard['dashboard']['uid']}: {dashboard['dashboard']['title']}"
)
r = session.post(
host + "/api/dashboards/db",
data=json.dumps(dashboard),
headers={"content-type": "application/json"},
)
r.raise_for_status()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--host",
default="https://grafana.mybinder.org",
help="The Grafana host to export from.",
)
parser.add_argument(
"-d",
"--data-dir",
default="./grafana-data",
help="Directory in which to store grafana export data",
)
parser.add_argument(
"--import",
dest="action",
action="store_const",
const=import_grafana,
default=export_grafana,
help="Import grafana data instead of exporting",
)
parser.add_argument(
"-y", "--no-confirm", action="store_true", help="Skip prompts for confirmation"
)
logging.basicConfig(level=logging.INFO)
if grafana_password:
session.auth = (grafana_user, grafana_password)
else:
log.warning("Without authentication, data source export will fail")
opts = parser.parse_args()
opts.action(opts.host, opts.data_dir, confirm=(not opts.no_confirm))
<file_sep>/docs/source/incident-reports/2018-01-04-failed-staging-deploy.md
# 2018-01-04, Failed deploy to staging
## Summary
Merging a PR into BinderHub and deploying that change to `mybinder.org` staging in a second PR resulted in a Travis error. It also led to user uncertainty about the steps to resolve the error. User misunderstanding of the correct commit SHA to use for the binderhub helm-chart was one part of the problem. Flakiness of Travis deployment due to insufficient waiting for helm-chart deployments contributed to the problem. Though time consuming to determine the cause of the Travis error, there was no loss of production service due to current workflow and the staging-prod workflow served its main purpose well.
## Timeline
All times in PST
### 2018-01-04 07:33
Reviewed and merged BinderHub PR [#395](https://github.com/jupyterhub/binderhub/pull/395). Min's PR commit 441f5ea. Merge commit 4e3bafb.
### 08:03
PR [#254](https://github.com/jupyterhub/mybinder.org-deploy/pull/254) failed to deploy to staging.
[Travis CI](https://travis-ci.org/jupyterhub/mybinder.org-deploy/builds/325095814) errored and displayed that the deploy to staging failed.
PR #254 was generated using the first set of instructions in the `README`, [Deploying a Change](https://github.com/jupyterhub/mybinder.org-deploy#deploying-a-change). _Note: User error was made here by assuming the SHA used in the BinderHub helm-chart (binderhub-0.1.0-441f5ea) was the same as the PR in the BinderHub merge commit (4e3bafb). The assumption was that Travis would deploy the binderhub change to the helm-chart repo using the merge commit hash. The Documentation in the `README `'s [BinderHub](https://github.com/jupyterhub/mybinder.org-deploy#binderhub) section has this correct, but the user didn't think to scroll down as they had done the deployment process before but not on a regular basis._
Investigation begins.
### 08:17
Following the process in the `README`, I reverted the original PR since Travis showed an error. PR [#255](https://github.com/jupyterhub/mybinder.org-deploy/pull/255) reverted #254.
### 08:25
Message posted on Gitter to understand why Travis had errored since it seemed to be a very simple change bumping the BinderHub version.
### 08:27
Response posted on Gitter that Travis often fails to deploy. A restart of the Travis job usually resolves the error.
### 08:34
Message posted on Gitter to understand why the merge SHA (4e3bafb) was not found in a binderhub helm-chart.
From the Gitter response, user learned that there is a delay in the helm-chart showing up. User realizes that I also made an incorrect assumption about the hash to use in the PR for binderhub helm-chart. The correct SHA to use would be the last commit where the relevant files changed (441f5ea) which is not the same as the most recent commit (4e3bafb).
### 08:59
Submitted a new PR [#256](https://github.com/jupyterhub/mybinder.org-deploy/pull/256) to staging with the correct hash. Travis deploys successfully to staging. Visual review of `staging.mybinder.org` site looks fine.
### 11:31
Deployed to Prod with PR [#257](https://github.com/jupyterhub/mybinder.org-deploy/pull/257). Travis deploys successfully.
Tested repo, `willingc/ThinkDSP`, using the prod service to see if the notebook would launch. All seemed working on the sample notebook launch. Also checked Grafana dashboard to see if there was any disruption in activity.
Everything is fine and prod is working.
## Action items
### Process
- Minimize tribal knowledge in the deployment process (i.e. all may not know that restarting a failed Travis build may clear the error.).
- Simplify steps into a checklist especially ones that require human lookup of information or copy/paste.
### mybinder.org-deploy
- Minimize copy-paste errors by user
- Add a "deployment to staging" checklist and a "deployment to prod" checklist to the documentation (possibly as files distinct from the `README`.)
- Add a better way to self diagnose whether the Travis failure is legitimate or spurious.
- Look into generating a commit for bumping without human entry of data (review by a human is fine)
### helm-chart
- Document in the helm chart repo as well as mybinder.org-deploy that the deployment of the helm-chart may be delayed. Users should wait approximately 'x' minutes before opening a PR on staging.
<file_sep>/terraform/README.md
# Terraform folder structure
We keep terraform files for repeatable deployment of similar infrastructure
within this folder. While terraform configuration can be _applied_ agnostically
to different cloud providers, each providers have different terraform interfaces
and modules to define the infrastructure; therefore, we cannot _write_
terraform configuration agnostically.
For each cloud provider we provide terraform config for, we have a sub-folder
within this folder:
- `gcp`: This folder contains terraform config that interacts with Google
Cloud Platform
- `aws`: This folder contains terraform config that interacts with Amazon
Web Services
<file_sep>/docs/source/incident-reports/2018-02-22-nginx-down.md
# 2018-02-22 NGINX crash
## Summary
We got stackdriver email alerts saying Binder was down, confirmed all Binder
services were down (incl. mybinder.org), saw that logs were showing very little
output in general and suspected it was NGINX pods. Deleted the pods, then
all went back to normal.
## Timeline
All times in PST
### 2018-02-22 16:30
Stackdriver sends emails about the outage which we notice. Check grafana as
well as `mybinder.org` and both are down.
### 16:32
Check the logs being generated by the hub. Very few logs in general, and most
recent ones show errors with the NGINX ingress pods.
### 16:33
Deleted the ingress controller pods and waited for new ones to come up
### 16:35
Confirm that the problem is resolved, grafana/mybinder.org are now back.
## Lessons learnt
### What went well
1. The problem was very quickly identified and resolved
### What went wrong
1. The alert may have only gone out to a subset of team members, so it could
have been noticed earlier.
### Where we got lucky
1. We happened to be in a position where we could quickly debug and fix.
## Action items
### Process improvements
1. Make sure that everybody gets emailed when the site goes down (DONE)
### Technical improvements
1. Find a path forward to switch away from NGINX to better traffic tech [#528](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/issues/528)
<file_sep>/images/analytics-publisher/Dockerfile
# The build stage
# ---------------
FROM python:3.9-bullseye as build-stage
# Build wheels
#
# We set pip's cache directory and expose it across build stages via an
# ephemeral docker cache (--mount=type=cache,target=${PIP_CACHE_DIR}).
#
COPY requirements.txt requirements.txt
ARG PIP_CACHE_DIR=/tmp/pip-cache
RUN --mount=type=cache,target=${PIP_CACHE_DIR} \
pip install build \
&& pip wheel \
--wheel-dir=/tmp/wheels \
-r requirements.txt
# Download tini
#
RUN if [ "$(uname -m)" = x86_64 ]; then ARCH=amd64; fi; \
if [ "$(uname -m)" = aarch64 ]; then ARCH=arm64; fi; \
curl -sSLo /tini "https://github.com/krallin/tini/releases/download/v0.19.0/tini-$ARCH" \
&& chmod +x /tini
# The final stage
# ---------------
FROM python:3.9-slim-bullseye
ENV PYTHONUNBUFFERED=1
# fix known vulnerabilities
RUN apt-get -y update \
&& apt-get -y upgrade \
&& rm -rvf /var/lib/apt/lists/*
# install wheels built in the build-stage
COPY requirements.txt /tmp/requirements.txt
ARG PIP_CACHE_DIR=/tmp/pip-cache
RUN --mount=type=cache,target=${PIP_CACHE_DIR} \
--mount=type=cache,from=build-stage,source=/tmp/wheels,target=/tmp/wheels \
pip install \
--find-links=/tmp/wheels \
-r /tmp/requirements.txt
COPY --from=build-stage /tini /tini
# copy content to container
WORKDIR /srv
COPY . .
ENTRYPOINT ["/tini", "--"]
CMD ["python", "/srv/run.py"]
<file_sep>/docs/source/incident-reports/2020-07-09-simultaneous-launches-scipy.md
# 2020-07-09, Simultaneous launches (aka, SciPy gives Binder a lot of hugs at the same time)
## Summary
Several SciPy 2020 tutorials are using Binder as a part of their demonstrations. They've
requested quota increases before-hand, which we have granted. However, when two
tutorials both launched at the same time with >100 users each, Binder temporarily lost
service. This disruption self-healed once a new node was up, so major technical
improvements aren't needed. Though we may consider process/documentation improvements to
help avoid this in the future.
## Timeline
All times in UC/Pacific
links to grafana during incident window:
- [overview](https://grafana.mybinder.org/d/3SpLQinmk/1-overview?orgId=1&from=1594299600000&to=1594307700000)
- [node status](https://grafana.mybinder.org/d/nDQPwi7mk/node-activity?orgId=1&from=1594299600000&to=1594307700000)
- [pod status](https://grafana.mybinder.org/d/fZWsQmnmz/pod-activity?orgId=1&from=1594299600000&to=1594307700000)
### 07:30am - First report
[We hear reports](https://gitter.im/jupyterhub/mybinder.org-deploy?at=5f072954a61b8675119e65dd)
that the xarray tutorial noted some broken and/or slow launches on Binder.
### 07:35 - Note pending pods
[We note there are many pending pods](https://gitter.im/jupyterhub/mybinder.org-deploy?at=5f072ac186ccb45b599592c9) during that time,
peaking around 07:15.
We also noted that the xarray Binder had undergone many builds before the tutorial,
which may have contributed to the time it took to pull Docker images on the nodes.
We also noted that the placeholder pods and active pods seemed to do
[some weird stuff](https://grafana.mybinder.org/d/nDQPwi7mk/node-activity?orgId=1&from=1594300376037&to=1594305103856&fullscreen&panelId=43):

Same chart with annotations for scale-up events:

> Min notes that: this is placeholder pods doing exactly what they are supposed to
> (ref: [placeholder writeup](https://discourse.jupyter.org/t/planning-placeholders-with-jupyterhub-helm-chart-0-8-tested-on-mybinder-org/213)):
>
> 1. pending placeholders is what causes node scale-up before 'real' users need the new node
> 2. as the new node is ready to accept pods, placeholders start there and go back to running
> 3. if 100% of placeholders are pending for a period of time waiting for the new node
> (as they are here), this indicates _user_ pods are also waiting for the new node,
> which is what placeholder pods are meant to reduce. We should expect a rise in
> launch time when this happens. This indicates we do not have enough placeholder
> pods.
### 07:35 - Node limit increased
We noted that we'd reached our node limit on GKE and
[increased it](https://gitter.im/jupyterhub/mybinder.org-deploy?at=5f072b0a8342f46274047e5b).
### 07:42 - Launches are back to normal
We noted that [launches returned to normal](https://gitter.im/jupyterhub/mybinder.org-deploy?at=5f072ce0c7d15f7d0f801ab0),
even though a new node did not appear to be needed.
## Lessons learnt
### What went well
1. We quickly heard about the problem and responded
2. The deployment was relatively self-healing here, with one exception that we may
have hit a node limit on GKE. Demand dropped below the limit before the limit was increased.
### What went wrong
1. A steeper than usual spike in traffic meant our placeholder pods didn't allocate new
nodes early enough to be ready when users needed them, resulting in queuing of
launches.
2. A very high spike in number of builds (~30) may have contributed to an unusual
allocation of resources (maybe?)
3. As a result of the high load and new nodes spinning up, launches began to timeout
4. GKE prod reached its upper node limit in capacity, requiring a manual increase in
node capacity. During this time, many pods were pending, waiting for a slot.
5. According to our metrics, launch success rate never dropped below 100%, but users
experienced failures. This is perhaps because our timeout is 10 minutes (long!) and a
first-try failure at 10 minutes followed by a success 5 minutes later is likely experienced as a failure by the user. The
launch success rate chart shows 100% success rate _after retry_ but high retry rate,
indicating timeouts due to load:

6. Users reported experiencing connection errors, but these are not reflected in our
metrics as failures.
## Action items
### Process improvements
1. Something about how to notice whether we're hitting GKE node limits? \*We
can add a threshold to certain Grafana charts, indicating the current limit. Not sure
how to keep it up-to-date. [issue here](https://github.com/jupyterhub/mybinder.org-deploy/issues/1533).
2. Consider dedicated nodes or cluster for large events we know are going to use Binder a lot
(e.g. `scipy.mybinder.org`). [see issue here](https://github.com/jupyterhub/mybinder.org-deploy/issues/1526)
3. Include a "tips and recommendations" step to the official "quota increase" issues.
Maybe a short list of 5 items that instructors should consider (like "don't make lots
of changes just before the event") and we get them to verbally say "yes" to before
they are approved. [issue here](https://github.com/jupyterhub/mybinder.org-deploy/issues/1525)
### Documentation improvements
1. Something about recommending to others not to make lots of builds of the repo just
before an event (or to separate out content/env repos)
2. Something about spreading out Binder clicks for events so they don't all happen at once?
Add these to the documentation and then add a link to these docs to the github quota increase [see here for an issue](https://github.com/jupyterhub/binder/issues/200)
issue.
### Code and Configuration changes
1. increase the number of placeholder pods on GKE while we expect larger than usual
spikes from SciPy tutorials:
[PR](https://github.com/jupyterhub/mybinder.org-deploy/pull/1492).
2. decrease launch timeout to 5 minutes. 10 minutes to launch is probably too long to
start retrying. This chart (lots of lines!) shows brief periods of 'retried'
launches, almost all of which are at 10 minutes, indicating that timeout is the
majority cause:

If we limit to only the 99th percentile of successful launches, we see that launches _almost never_ take longer than 5 minutes and then succeed:

If we decrease the launch timeout to 5 minutes, this will likely increase our failure rate, _according to metrics_, but should clear out the queue of what are likely already failed launches _according to users_. The result should be a better experience. [issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/1528).
3. We should investigate resource allocations to builds, and possible limit concurrent
builds per repo to a very small number (killing old builds is probably a better user
experience than blocking new ones), as the failure corresponds to a spike in builds
but _not_ a spike in launches. [issue for information gathering here](https://github.com/jupyterhub/mybinder.org-deploy/issues/1529).
4. Use in-progress metrics (`binderhub_inprogress_launches|builds`) for instant feedback about current launch/build requests.
Our `launch_time_seconds` metrics are only recorded when the launch either succeeds or
fails, resulting in an inaccurately smooth and delayed metrics report. For example:
120 requests in 1 second that complete one at a time over a minute will look like a
smooth 2 requests/second in the current metrics. _done in grafana_.
5. Implement a quota on prod so that pending pods beyond capacity fail informatively,
rather than queuing, continuing to add to the load?
<file_sep>/docs/source/index.rst
Site Reliability Guide for mybinder.org
=======================================
This site is a collection of wisdom, tools, and other helpful information
to assist in the maintenance and team-processes around the BinderHub deployment
at `mybinder.org <https://mybinder.org>`_.
If you are looking for documentation on how to use mybinder.org,
`see the mybinder.org user documentation <https://docs.mybinder.org>`_. If you are looking
for information on deploying your own BinderHub
`see the BinderHub documentation <https://binderhub.readthedocs.io>`_.
What is the mybinder.org operations team?
-----------------------------------------
Behind the mybinder.org deployment is a team of contributors that
donate their time to keeping mybinder.org running smoothly. This
role is often called a
`Site Reliability Engineer <https://en.wikipedia.org/wiki/Site_Reliability_Engineering>`_
(or SRE). We informally call this team the "mybinder.org operators".
This site is a collection of wisdom, tools, and other helpful information that the
mybinder.org operations team uses for maintenance and team-processes
around the BinderHub deployment at `mybinder.org <https://mybinder.org>`_.
**If you are interested in helping the mybinder.org operations team**, first
check out `this post on what an operator does <https://discourse.jupyter.org/t/the-operators-no-binder-isnt-forming-a-rock-band/694>`_.
To show your interest in helping, please reach out to the operations team
via `this Discourse thread <https://discourse.jupyter.org/t/interested-in-joining-the-mybinder-org-operations-team/761>`_.
.. toctree::
:maxdepth: 2
getting_started/index
deployment/index
operation_guide/index
components/index
analytics/index
incident-reports/index
Indices and tables
------------------
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
<file_sep>/images/analytics-publisher/indexer.py
#!/usr/bin/env python3
"""
Generate index.html & index.jsonl from Events GCS bucket
This script reads the bucket containing archived events
generating a human readable index.html & a machine readable
index.jsonl.
"""
import argparse
import json
import mimetypes
import os
import tempfile
from datetime import datetime
from glob import glob
import jinja2
from google.cloud import storage
HERE = os.path.dirname(os.path.abspath(__file__))
# Static files that should be uploaded by indexer
STATIC_FILES = glob(os.path.join(HERE, "static", "bootstrap-4.1.3.min.css"))
def index_events(project, bucket, debug=False, dry_run=False):
storage_client = storage.Client()
bucket = storage.Bucket(storage_client, bucket)
blobs = bucket.list_blobs(prefix="events-")
archives = []
with open(os.path.join(HERE, "index.html")) as f:
html_template = jinja2.Template(f.read())
for blob in blobs:
archives.append(
{
"name": blob.name,
"date": blob.metadata["Events-Date"],
"count": blob.metadata["Events-Count"],
}
)
with tempfile.TemporaryFile(mode="w+") as htmlfile, tempfile.TemporaryFile(
mode="w+"
) as jsonlfile:
html_index = html_template.render(
archives=sorted(
archives, key=lambda archive: archive["date"], reverse=True
),
generated_time=datetime.utcnow().isoformat() + "Z",
)
if debug:
print(html_index)
htmlfile.write(html_index)
for archive in archives:
jsonlfile.write(json.dumps(archive) + "\n")
htmlfile.seek(0)
jsonlfile.seek(0)
if not dry_run:
html_blob = bucket.blob("index.html")
html_blob.upload_from_file(htmlfile, content_type="text/html")
print("Uploaded index.html")
bucket.blob("index.jsonl").upload_from_file(jsonlfile)
print("Uploaded index.jsonl")
# Upload static assets
for static_file in STATIC_FILES:
blob_name = os.path.relpath(static_file, HERE)
static_blob = bucket.blob(blob_name)
mimetype, _ = mimetypes.guess_type(static_file)
static_blob.upload_from_filename(static_file, content_type=mimetype)
print(f"Uploaded static file {blob_name}")
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument("project", help="Name of the GCP project to read logs from")
argparser.add_argument(
"bucket", help="GCS bucket to read archived event files from"
)
argparser.add_argument(
"--debug",
help="Print events when processing",
action="store_true",
default=False,
)
argparser.add_argument(
"--dry-run",
help="Do not upload processed events to GCS",
action="store_true",
default=False,
)
args = argparser.parse_args()
index_events(args.project, args.bucket, args.debug, args.dry_run)
if __name__ == "__main__":
main()
<file_sep>/docs/source/incident-reports/2018-01-18-reddit-hug.md
# 2018-01-18, reddit hugs mybinder
## Summary
A repo is posted to [reddit /r/python](https://www.reddit.com/r/Python/comments/7r6e6w/visualize_your_mostplayed_artists_tracks_and/), it starts getting popular. Traffic starts
building from around 6pm. Peaks at about 6am with 600 running pods.
## Timeline
All times in PST
### 2018-01-18 4:19
First noticed something was up after receiving an email about high level of users on Grafana from someone and a question on the Gitter channel.
### 4:27
Found the repository that was being [spawned](https://github.com/N2ITN/GoogleMusicFavs/), checked content (was this a bot/spam repo?). About 300 pods from this repo running (`kubectl get pods --namespace=prod | grep googlemusic | wc -l`)
### 4:33
Find out this repo is part of a r/python post. Decide this is a legit use of mybinder, things seem to be working. Keep monitoring, no action.
### 4:45
Notice we are using 12 of the maximum 15 nodes that the autoscaler is allowed to scale up to. Unclear what metric the autoscaler uses to decide to scale up. Decide to increase limit if we get close to 15.
### 5:59
Now at 14 nodes, decide to increase limit to 20 nodes via web interface. Noticed that grafana reports one less active node than cloud console. Grafana does not count cordoned nodes!
### 6:15
Note that number of running pods according to grafana has started to level off.
### 6:25
Decide there must be a problem as Google analytics traffic suggests people are still trying to start new binders. Arriving at about 10 users per 15minutes ("real time active users" on GA). This doesn't fit with level or falling number of running pods.
### 6:39
User reports unrelated repository does not launch ("waiting for server to launch") as well as start of build not working (can not resolve github.com). Both problems are confirmed.
### 6:52
After investigating and looking for broken/restarting pods, health of nodes that something weird is broken. Remember that last time things looked fine but things were broken we increased the number of ingress replicas. Decided to increase to ten from five. Notice that some replicas are in CrashLoopBackoff. Decide not to further investigate because increasing the number to seven replicas seems good enough. Also does not seem to mitigate the original build/launch problem.
### 7:23
Connect to node -2fhq with `gcloud compute --project "binder-prod" ssh --zone "us-central1-a" "gke-prod-a-ssd-pool-972e4826-2fhq"` to test network connectivity. Fetching `github.com` and `google.com` with `wget` works. Conclude that there is no network problem.
### 7:27
Noticed that deploying to production "failed". This seem to be flaky deploys as the cluster had been changed to have more ingress replicas. Travis log ends/hangs on "deleting outdated charts". Multiple restarts of build do not seem to fix the problem.
### 7:49
Decided to build a never before built repo with a unique name to make debugging easier. See that it gets built, pod is started but then exits 40s after startup because it is not contacted. JupyterHub pod log shows corresponding "pod never showed up" message. Unclear why there seems to be networking issues.
### 8:04
Tim signs off, Carol online.
### 8:20 - 8:24
Yuvi and Carol discuss situation. Suggest scrolling back through gitter history to catch up with state of play.
### 8:24
Using 14 nodes, conclude that "we are full". Check content of the deploys made during the last few hours.
### 8:29
By hand remove all pods spawned by jupyterhub that are 6-9hours old with `kubectl --namespace=prod get pod -l heritage=jupyterhub | grep -P '[6-9]h$' | awk '{ print $1; }' | xargs -L1 kubectl --namespace=prod delete pod`
### 8:33
Note that jupyterhub logs are full of errors. Reading logs leads to conclusion that user pods are being spawned but contacting them within the 30s timeout fails.
### 8:39
Checking network connectivity between jupyterhub pod and a random pod. "ssh" into the hub pod with `kubectl --namespace=prod exec -it hub-7649b9bf8-d769g /bin/bash` and attempt to access a random user pod found with `kubectl --namespace=prod get pod -o wide`. Test network connectivity with:
```python
>>> requests.get('http://10.12.6.136:8888/')
<Response [404]>
```
404 error means we managed to talk to the jupyter notebook server on that IP.
### 8:47
Decide to delete the jupyterhub pod: `kubectl --namespace=prod delete pod hub-7649b9bf8-d769g`
watching logs of new hub pod with `kubectl --namespace=prod logs hub-7649b9bf8-hkvnl -f`
### 8:53
Noted that restarting the hub also woke up the culling process. Culler springs to action cleaning out a lot of stale pods.
### 8:57
Some builds succeed and proceed to notebook interface. Others still get stuck with "waiting for server ...".
### 9:00
Tim back. Catching up some details on cluster size that got missed in the scrollback.
### 9:01
`gke-prod-a-ssd-pool-972e4826-2fhq` is identified as a node that has networking issues. The pod `fluentd-gcp-v2.0.9-68p79` is in `CrashLoopBackOff`. This pod is also on -2fhq. The pod was found by looking at all pods in all namespaces, not just the prod namespace.
### 9:05
Decide to cordon off node: `kubectl cordon gke-prod-a-ssd-pool-972e4826-2fhq`. Noted that the ingress pods that have been failing are also on this node.
### 9:14
After testing several repositories by hand and with `py.test tests/test_launch.py --hub-url=https://hub.mybinder.org --binder-url=https://mybinder.org` feeling is things are back to working order. Brief discussion if incident is over or not.
## Action items
### Process
- Gather commands used in incident response and add to SRE guide.
- Discuss how to know when to call an incident as resolved.
### Training
- Investigate k8 training
- Consider ways to have a system similar to staging (though not critical path) for training and experimentation
### Testing
- Consider creating a script or cheatsheet for running tests to launch binders
- Run the py.tests constantly and actively alert us when they fail (active monitoring)
<file_sep>/docs/source/getting_started/getting_started.md
# Getting started with the `mybinder.org` dev team
This page contains a starting point for people who would like to help
maintain the BinderHub deployment at `mybinder.org`.
## Make sure you have access on the Google Cloud project
Go to `console.cloud.google.com` and see if you have `binderhub` listed
in your projects. If not, message one of the Binder devs on the [Gitter Channel](https://gitter.im/jupyterhub/binder)
to get access.
## Install `kubectl` and the `gcloud` SDK
The most important tool for keeping an eye on the Kubernetes deployment is
`kubectl` and the `gcloud` SDK. These will let you run queries on the
`mybinder.org` deployment from your command line. To set this up, check
out the [Zero to JupyterHub Google SDK section](https://zero-to-jupyterhub.readthedocs.io/en/latest/kubernetes/google/step-zero-gcp.html).
(everything before the part where you create a google cloud cluster).
When you run `gcloud init` for the first time, it'll ask you to authenticate
and to choose a project / default region. You should authenticate with
the email that's been given privileges to work on `mybinder.org`, choose
the project `binderhub`, and use the region `us-central1`.
We recommend enabling [`kubectl` autocompletion](https://kubernetes.io/docs/tasks/tools/#kubectl)
as well.
## Set up `kubectl` to connect to `mybinder.org`
Once you have `kubectl` installed, you can connect it with `mybinder.org`.
To do so, run the following command:
```
gcloud container clusters get-credentials prod --zone us-central1 --project binderhub
```
This will open a log-in page in your browser. If you've got access, you'll
be able to log in and your `kubectl` will now be able to run commands
with `mybinder.org`.
You can test this out by running:
```
kubectl --namespace=prod get pod
```
and a list of all running Binder pods should be printed.
### Connect to the staging deployment
Now that you're connected to prod it's time to connect to staging. To do so,
pull the staging credentials on to your local machine:
```
gcloud container clusters get-credentials staging --zone us-central1-a --project binderhub
```
You can now switch between the `prod` and `staging` deployments by changing your
`kubectl` context.
## Look at the project Grafana
Another useful resource is the [mybinder.org Grafana dashboard](https://grafana.mybinder.org/?orgId=1).
This has information about the current state of the binder deployment. Take a
look at all of these plots and familiarize yourself with them. They're quite
useful in spotting and debugging problems in the future.
## Start helping out!
There are many ways that you can help debug/maintain/improve the `mybinder.org`
deployment. The best way to get started is to keep an eye on the [Gitter Channel](https://gitter.im/jupyterhub/binder)
as well as the Grafana dashboard. If you see something interesting, don't hesitate
to ask questions or make suggestions!
<file_sep>/docs/source/operation_guide/command_snippets.md
# Command snippets used during operations
This is a collection of frequently and infrequently used command-line snippets
that are useful for operating and investigating what is happening on the
cluster. Think of it as a mybinder.org specific extension of the [kubernetes
cheatsheet](https://kubernetes.io/docs/reference/kubectl/cheatsheet/).
## The mybinder-tools Python repository
Note that there is a helper package for working with Kubernetes in Python,
you can find it in the [mybinder-tools repo](https://github.com/jupyterhub/mybinder-tools).
## Cluster management
### Merging kubernetes credentials
Before completing any of the command snippets below, you need to merge the kubernetes credentials of the cluster you'd like to work with into your `~/.kube/config` file.
This is achieved by running:
```bash
gcloud container clusters get-credentials <CLUSTER-NAME> --zone=us-central1-a
```
### Upgrading kubernetes
Upgrading Kubernetes is done in two steps:
1. upgrade the kubernetes master version
2. upgrade the node version
First, we can upgrade the master version.
This is easiest via the Google Cloud Console which gives you a button to pick the latest version.
Upgrading master will result in some brief downtime of Binder during the upgrade.
It should take a couple of minutes.
To upgrade the master version with `gcloud`:
```bash
gcloud --project=binderhub-288415 container clusters upgrade staging --master --zone=us-central1-a
gcloud --project=binderhub-288415 container clusters upgrade prod --master --zone=us-central1
```
Now we can start the process of upgrading node versions, which takes more time.
Upgrading nodes really means replacing each node with a new one with the same name, using the new version.
If we use the above `container clusters upgrade` command to upgrade nodes,
it will take a very long time as Kubernetes drains nodes one by one to replace them.
To minimize downtime at the expense of some extra nodes for a while,
we create a whole new node pool with the new version and then cordon
and eventually delete the existing one.
**Note:** the process for changing node machine-type is the same
as the process for upgrading kubernetes node version,
since it is also creating a new node pool and draining and deleting the old one.
#### Upgrading staging
First, start the upgrade on staging by creating a new node pool.
Check the node type, number of nodes, and disk size.
The new pool should match the old one.
```bash
# old_pool is the name of the pool that we are replacing
old_pool=default-pool
# new_pool is the name our new pool will have. It must be different
new_pool=pool-$(date +"%Y%m%d")
gcloud --project=binderhub-288415 container node-pools create $new_pool \
--cluster=staging \
--disk-size=500 \
--machine-type=n1-standard-4 \
--enable-autorepair \
--num-nodes=2 \
--zone=us-central1-a
```
> Note: To see a list of the node pools, run `gcloud container node-pools list --cluster staging --project=binderhub`.
After the pool is created, cordon the previous nodes:
```bash
# for each node in the old pool:
kubectl cordon $node
```
> Note: You can run `kubectl get nodes -n <NAMESPACE>` to see a list of the current nodes.
Test that launches succeed on the new nodes by visiting
[https://staging.mybinder.org/v2/gh/binderhub-ci-repos/cached-minimal-dockerfile/master](https://staging.mybinder.org/v2/gh/binderhub-ci-repos/cached-minimal-dockerfile/master)
> Note: You might have to restart one of the ingress pods named `staging-nginx-ingress-controller-*` as they will both be on cordoned nodes and hence not receiving traffic. The symptom of this is that https://staging.mybinder.org does not load anymore.
Once this is verified to be successful, the old node pool can be drained:
```bash
kubectl drain --force --delete-local-data --ignore-daemonsets --grace-period=0 $node
```
and then the node pool can be deleted:
```bash
gcloud --project=binderhub-288415 container node-pools delete $old_pool --cluster=staging --zone=us-central1-a
```
#### Upgrading prod
Upgrading production is mostly the same as upgrading staging.
It has a couple small differences in node configuration,
and we don't want to delete the old pool as soon as we have the new one
because there will be active users on it.
Production has two node pools:
1. a "core" pool, which runs the hub, binder, etc.
2. a "user" pool, where user pods run.
The process is mostly the same, but we do it in two steps (for two pools).
As with staging, first we create the new pool,
copying configuration from the old pool.
On production, we use `pd-ssd` disks, enable autoscaling,
and use the larger `n1-highmem-16` nodes for users.
The 'core' pool uses n1-highmem-4 nodes and has a smaller, 250GB SSD.
> Note: `gcloud beta` is currently required for the `--disk-type` argument.
First we'll create variables that point to our old and new node pools to make it clear when we're creating new things vs. deleting old things.
```bash
# old_user_pool is the name of the existing user pool, to be deleted
# we can automatically assign this to a variable like so
old_user_pool=$(gcloud container node-pools list --cluster prod --project=binderhub-288415 --format json | jq -r '.[].name' | grep '^user')
# new_user_pool can be anything, as long as it isn't the same as old_user_pool
# we recommend appending with the date
new_user_pool=user-$(date +"%Y%m%d")
```
> Note: You can see a list of the node pools by running:
```bash
gcloud container node-pools list --cluster prod --project=binderhub-288415 --zone=us-central1
```
Then we can create the new user pool:
```bash
# create the new user pool
gcloud beta --project=binderhub-288415 container node-pools create $new_user_pool \
--cluster=prod \
--zone=us-central1 \
--disk-type=pd-ssd \
--disk-size=1000 \
--machine-type=n1-highmem-8 \
--num-nodes=2 \
--local-ssd-count=1 \
--enable-autoscaling \
--enable-autorepair \
--min-nodes=1 \
--max-nodes=8 \
--node-labels hub.jupyter.org/node-purpose=user,mybinder.org/pool-type=users
```
and/or create the new core pool:
```bash
# the name of the old 'core' pool
old_core_pool=$(gcloud container node-pools list --cluster prod --project=binderhub-288415 --format json | jq -r '.[].name' | grep '^core')
# the name of the new 'core' pool
new_core_pool=core-$(date +"%Y%m%d")
gcloud beta --project=binderhub-288415 container node-pools create $new_core_pool \
--cluster=prod \
--zone=us-central1 \
--disk-type=pd-ssd \
--disk-size=250 \
--machine-type=n1-highmem-4 \
--num-nodes=1 \
--enable-autoscaling \
--enable-autorepair \
--min-nodes=1 \
--max-nodes=4 \
--node-labels hub.jupyter.org/node-purpose=core,mybinder.org/pool-type=core
```
Once the new pool is created, we can start cordoning the old pool.
To avoid new nodes being allocated in the old pool,
set the autoscaling upper limit to 1 on the old pool,
or disable autoscaling on the old pool.
This can only be done via the cloud console at this time.
<html>
<img src="images/node-pool-max-number.gif", alt="Set maximum number of nodes in a node pool" height="442" width="440">
</html>
Since prod has a lot of load which can overwhelm a new node,
we don't want to cordon the whole old pool immediately,
which would drive all of Binder's traffic to the new nodes.
Instead, we cordon the old nodes gradually, starting with ~half of the pool.
After the new nodes have had a chance to warm up
(check cluster utilization and user pods metrics in grafana, around 10 minutes should be fine),
we can cordon the rest of the old pool.
At each point, especially after the old pool is fully cordoned,
verify that launches work on the new nodes by visiting
[https://mybinder.org/v2/gh/binderhub-ci-repos/cached-minimal-dockerfile/master](https://mybinder.org/v2/gh/binderhub-ci-repos/cached-minimal-dockerfile/master)
```bash
# for each node in node pool
kubectl cordon $node
```
The `hub` pod will need to be manually migrated over to the new node pool.
This is achieved by deleting the pod and it should automatically restart on one of the new core nodes.
```bash
kubectl delete pod <HUB-POD-NAME> -n prod
```
> Note: You can find <HUB-POD-NAME> by running `kubectl get pods -n prod`.
Unlike staging, prod has active users, so we don't want to delete the cordoned node pool immediately.
Wait for user pods to drain from the old nodes (6 hours max), then drain them.
After draining the nodes, the old pool can be deleted.
```bash
kubectl drain --force --delete-local-data --ignore-daemonsets --grace-period=0 $node
gcloud --project=binderhub-288415 container node-pools delete $old_user_pool --cluster=prod --zone=us-central1
gcloud --project=binderhub-288415 container node-pools delete $old_core_pool --cluster=prod --zone=us-central1
```
## Pod management
### List all pods that match a given name or age
Sometimes you want to delete all the pods for a given repository. The easiest
way to do this is to name-match the part of the pod name that corresponds to
the repo (since there will be a bunch of random characters as well).
Here's a python script that will match pods with a given name or a given
age. You can use it with the following pattern:
```
python scripts/delete-pods.py --pod-name <your-query> --older-than <your-query>
```
- `--pod-name` is a string and will be matched to any pod that contains this string.
- `--older-than` is a float (in hours) and will match any pod that is older than this amount.
Note, they are both optional, but you need to supply _at least_ one. Running
the above command by itself will list all pods that match the query.
### Delete all pods that match a given name or age
If you wish to **delete** the pods that match the query above, you supply the `--delete`
kwarg like so:
```
python scripts/delete-pods.py --pod-name <your-query> --older-than <your-query> --delete
```
### Forcibly delete a pod
Sometimes pods aren't easily deleted, e.g., if they are in a state `Unknown`
or `NodeLost` kubernetes may not be able to fluidly delete them. This is because
kubernetes waits for pods to gracefully delete, and if a pod cannot do this
(e.g., because it is totally unable to communicated with kubernetes), the
delete process won't happen. In this case, you can delete such pods with:
```
kubectl --namespace=prod delete pod <POD-NAME> --grace-period=0 --force
```
### Effects of deleting production pods
Below is a list of each production pod, and the expected outcome that comes with
deleting each one.
- `hub-` active user sessions will not be affected. New and pending launches will fail until the new Hub comes back.
- `binder-` the `mybinder.org` website will temporarily go down. Active user sessions will not be affected.
- `proxy-` all current users will lose connections (kernel connection lost) until the proxy returns and the Hub restores the routes. Server state is unaffected. Most browser sessions should recover by restoring connections. All pending launches will fail due to lost connections.
- `proxy-patches-` brief, minor degradation of error messages when browsers attempt to connect to a not-running server. This results in increased load on the Hub, handling requests from browsers whose idle servers have been culled.
- `redirector-` redirect sites (beta.mybinder.org) will 404 instead of sending to mybinder.org.
- `jupyter-` deleting a user pod will shut down their session. The user will
encounter errors when they attempt to submit code to the kernel.
## Node management and information
### Manually increase cluster size
Sometimes we know ahead of time that mybinder.org will receive a lot of traffic.
As preparation we might choose to increase the size of the cluster before the
event.
To pre-emptively bump the cluster size beyond current occupancy, follow these steps:
- Increase autoscaler minimum size. (note this will lead to a brief period where
the kubernetes API is not available.)
- Go to https://console.cloud.google.com/
- Click "Kubernetes engine" -> "edit" button
- Under "Node Pools" find the "minimum size" field and update it.
- Use the `gcloud` command line tool to explicitly resize the cluster.
- `gcloud container clusters resize prod --size <NEW-SIZE>`
Manually resizing a cluster with autoscaling on doesn't always work because the autoscaler
can automatically reduce the cluster size after asking for more nodes that
aren't needed. Increasing the minimum size works if you are resizing from
outside the autoscaler's bounds (e.g. 2) to the new minimum cluster size (3), so the
autoscaler doesn't have any idle nodes available for deletion. Similarly if
the new minimum is higher than the current size and there is no need to increase
the size of the cluster the autoscaler will not scale up the cluster even though
it is below the minimum size.
### Removing a node from the cluster
To remove a node from the cluster, we follow a two-step process. We first
**cordon** the node, which prevents new pods from being scheduled on it. We then
**drain** the node, which removes all remaining pods from the node.
- Step 1. Cordon the node
```bash
kubectl cordon <NODE-NAME>
```
"cordoning" explicitly tells kubernetes **not** to start new pods on this node.
For more information on cordoning, see :ref:`term-cordoning`.
- Step 2. Wait a few hours for pods to naturally get deleted from the node.
We'd rather not forcibly delete pods if possible. However if you _need_ to
delete all the pods on the node, you can skip to step 3.
- Step 3. Remove all pods from the node
```bash
kubectl drain --force --delete-local-data --ignore-daemonsets --grace-period=0 <NODE-NAME>
```
After running this, the node should now (forcibly) have 0 pods running on it.
- Step 4. Confirm the node has no pods on it after a few minutes. You can do this
with:
```bash
`kubectl get pods --namespace=prod -o wide | grep "<NODE-NAME>$" | grep "^jupyter-"`
```
If there are any pods remaining, manually delete them with `kubectl delete pod`.
Once the node has no pods on it, the autoscaler will automatically remove it.
**A note on the need for scaling down with the autoscaler**.
The autoscaler has issues scaling nodes _down_, so scaling down needs to be
manually done. The problems are caused by:
1. The cluster autoscaler will never remove nodes that have user pods running.
2. We can not tell the Kubernetes Scheduler to 'pack' user pods efficiently -
if there are two nodes, one with 60 user pods and another with 2, a new user
pod can end up in either of those. Since all user pods need to be gone from
a node before it can be scaled down, this leads to inefficient
load distribution.
Because the autoscaler will only remove a node when it has no pods, this means
it is unlikely that nodes will be properly removed. Thus the necessity for
manually scaling down now and then.
### List how many user pods are running on all nodes
You can find the number of user pods on various nodes with the following command:
```bash
kubectl --namespace=prod get pod --no-headers -o wide -l component=singleuser-server | awk '{ print $7; }' | sort | uniq -c | sort -n
```
The `-o wide` lists extra information per pod, including the name of the node it is
running on. The `-l component=singleuser-server` makes it only show you user server
pods. The `--no-headers` asks kubectl to not print column titles as a header.
The `awk` command selects the 7th column in the output (which is the node name).
The sort / uniq / sort combination helps print the number of pods per each node in
sorted order.
### Recycling nodes
We have found that nodes older than > 4 days often begin to have problems.
The nature of these problems is often hard to debug, but they tend to be
fixed by "recycling" the nodes (AKA, creating a new node to take the place
of the older node). Here's the process for recycling nodes.
- **List the node ages.** The following command will list the current nodes
and their ages.
`kubectl get node`
- **Check if any nodes are > 4 days old.** These are the nodes that we can
recycle.
- **Cordon the node you'd like to recycle.**
`kubectl cordon <NODE-NAME>`
- **If you need a new node immediately.** E.g., if we think a currently-used
node is causing problems and we need to move production pods to a new node.
In this case, manually resize the cluster up so that a new node is added,
then delete the relevant pods from the (cordoned) old node.
- **Wait a few hours.** This gives the pods time to naturally leave the node.
- **Drain the node.** Run the following command to remove all pods from the node.
`kubectl drain --force --delete-local-data --ignore-daemonsets --grace-period=0 <NODE-NAME>`
- **If it isn't deleted after several hours, delete the node.** with
`kubectl delete <NODE-NAME>`
## Networking
### Banning traffic
Sometimes there's bad traffic, either malicious or accidental,
and we want to block traffic, either incoming or outgoing,
between Binder and that source.
We can blacklist traffic in three ways:
1. ingress ip (bans requests to Binder coming from this ip or ip range)
2. egress ip (bans outgoing traffic _from_ Binder to these ip addresses)
3. egress DNS (disables DNS resolution for specified domains)
All of these are _stored_ in the `secrets/ban.py` file.
These are not upgraded
To update what should be banned, edit the `secrets/ban.py` file
and find the relevant list. If ip-based banning changed,
run the `scripts/firewall-rules` script to update the firewall:
```bash
./scripts/firewall-rules --project=binder-staging [gke_binder-staging_us-central1-a_staging]
./scripts/firewall-rules --project=binder-prod [gke_binder-prod_us-central1-a_prod-a]
```
If it is an update to the DNS block list, run the `secrets/ban.py` script:
```bash
./secrets/ban.py gke_binder-staging_us-central1-a_staging
./secrets/ban.py gke_binder-prod_us-central1-a_prod-a
```
## Acronyms that Chris likes to use in Gitter
It has been pointed out that Chris often employs the use of unusually
long acronyms. This is a short list of translations so that the world can
understand his unique and special mind.
- TYVM: Thank You Very Much
- SGTM: Sounds Good To Me
- LMKWYT: Let Me Know What You Think
- WDYT: What Do You Think
<file_sep>/scripts/load-test.py
import argparse
import json
import random
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
import requests
def build_binder(repo, ref="HEAD", binder_url="https://mybinder.org"):
"""Launch a binder"""
url = binder_url + f"/build/gh/{repo}/{ref}"
r = requests.get(url, stream=True)
r.raise_for_status()
for line in r.iter_lines():
line = line.decode("utf8", "replace")
if line.startswith("data:"):
yield json.loads(line.split(":", 1)[1])
def launch_binder(
n, repo, ref="HEAD", filepath=None, binder_url="https://mybinder.org", delay=10
):
"""Launch a new binder from `repo` at `ref`
Use `delay` to delay the launch by a random amount in seconds.
If `filepath` is set a notebook with that name will be fetched.
"""
delay = random.random() * delay
time.sleep(delay)
launched_at = time.time()
total_bytes = 0
for evt in build_binder(repo, ref=ref, binder_url=binder_url):
if "message" in evt:
pass
if evt.get("phase") == "ready":
ready_at = time.time()
s = requests.Session()
if filepath is None:
url = "{url}?token={token}".format(**evt)
else:
url = "{url}notebooks/{filepath}?token={token}".format(
filepath=filepath, **evt
)
# GET the notebook
r = s.get(url)
r.raise_for_status()
notebook_at = time.time()
total_bytes += len(r.content)
# spawn a kernel
url = "{url}api/sessions?token={token}".format(**evt)
r = s.post(
url,
json={
"path": "Foobar.ipynb",
"type": "notebook",
"name": "",
"kernel": {"id": None, "name": "python3"},
},
)
r.raise_for_status()
kernel_at = time.time()
total_bytes += len(r.content)
# biggest file in the session
url = (
"{url}nbextensions/jupyter-js-widgets/"
"extension.js?v=20180705212711&token={token}".format(**evt)
)
r = s.get(url)
r.raise_for_status()
widgets_at = time.time()
total_bytes += len(r.content)
done_at = time.time()
return {
"idx": n,
"start": launched_at,
"ready": ready_at,
"kernel": kernel_at,
"notebook": notebook_at,
"widgets": widgets_at,
"end": done_at,
"total_bytes": total_bytes,
"status": "success",
}
else:
failed_at = time.time()
print(n, "never launched.")
return {"idx": n, "start": launched_at, "end": failed_at, "status": "fail"}
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("repo", type=str, help="The GitHub repo to build")
parser.add_argument("--ref", default="HEAD", help="The ref of the repo to build")
parser.add_argument("--filepath", default=None, help="The notebook to open")
parser.add_argument(
"--results", default="results.json", help="File name to store results in"
)
parser.add_argument(
"--n-launches", default=5, help="Number of launches to perform", type=int
)
parser.add_argument(
"--binder",
default="https://mybinder.org",
help="""
The URL of the binder instance to use.
Use `http://localhost:8585` if you are doing local testing.
""",
)
opts = parser.parse_args()
gun_time = time.time()
with ThreadPoolExecutor(max_workers=100) as executor:
results = {}
for n in range(opts.n_launches):
job = executor.submit(launch_binder, n, opts.repo, opts.ref, opts.filepath)
results[job] = n
print("launching...")
launches = []
for future in as_completed(results):
idx = results[future]
try:
launch = future.result()
# save it for later
launches.append(launch)
except Exception as exc:
print(f"{idx!r} generated an exception: {exc}")
else:
if launch["status"] == "success":
print(
"Launch %r succeeded and took %is"
% (idx, launch["end"] - launch["start"])
)
else:
print(
"Launch %r failed and took %is"
% (idx, launch["end"] - launch["start"])
)
with open(opts.results, "w") as f:
json.dump({"gun_time": gun_time, "results": launches}, f)
import pprint
pprint.pprint(launches)
<file_sep>/docs/source/conf.py
# Configuration file for Sphinx to build our documentation to HTML.
#
# Configuration reference: https://www.sphinx-doc.org/en/master/usage/configuration.html
#
from datetime import datetime
# -- General Sphinx configuration ---------------------------------------------------
# ref: https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
#
# Set the default role so we can use `foo` instead of ``foo``
default_role = "literal"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"myst_parser",
"sphinx_copybutton",
"jupyterhub_sphinx_theme",
]
# The suffix(es) of source filenames.
source_suffix = [".rst", ".md"]
# The root toctree document.
root_doc = master_doc = "index"
# General information about the project.
project = "Site Reliability Guide for mybinder.org"
copyright = f"2017 - {datetime.now().year}, Binder Team"
author = "Binder Team"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# -- Options for HTML output ----------------------------------------------
# ref: http://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
#
html_theme = "jupyterhub_sphinx_theme"
html_theme_options = {
"icon_links": [
{
"name": "GitHub",
"url": "https://github.com/jupyterhub/mybinder.org-deploy",
"icon": "fa-brands fa-github",
},
],
"use_edit_page_button": True,
}
html_context = {
"github_user": "jupyterhub",
"github_repo": "mybinder.org-deploy",
"github_version": "HEAD",
"doc_path": "docs/source",
}
html_static_path = ["_static"]
html_logo = "_static/images/logo.png"
html_favicon = "_static/images/favicon.ico"
# -- Options for linkcheck builder -------------------------------------------
# ref: https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-the-linkcheck-builder
#
linkcheck_ignore = [
r"(.*)github\.com(.*)#", # javascript based anchors
r"(.*)/#%21(.*)/(.*)", # /#!forum/jupyter - encoded anchor edge case
"https://grafana.mybinder.org", # likely no longer functional links from incident reports
"https://console.cloud.google.com", # sign-in redirect noise
"https://console.developers.google.com", # sign-in redirect noise
]
linkcheck_anchors_ignore = [
"/#!",
"/#%21",
]
<file_sep>/scripts/delete-pods.py
#!/usr/bin/env python3
"""
Delete all user pods older than a given duration.
We use the Kubernetes cluster autoscaler, which
removes nodes from the kubernetes cluster when they have
been 'empty' for more than 10 minutes However, we
have issues where some pods get 'stuck' and never actually
die, sometimes forever. This causes nodes to not be
killed automatically.
This script makes it easier to find and delete pods that match a certain
name or age. By default, it only *finds* and lists these pods. If the `--delete` flag
is given, it will also *delete* those pods.
You need the `kubernetes` python library installed for this to work.
"""
import argparse
from datetime import datetime, timedelta, timezone
from kubernetes import client, config
# Setup our parameters
argparser = argparse.ArgumentParser()
argparser.add_argument(
"--older-than",
type=float,
default=0,
help="Pods older than this many hours will be killed (can be fractions of an hour)",
)
argparser.add_argument(
"--delete",
action="store_true",
help="Confirm deleting the pods rather than just printing info",
)
argparser.add_argument(
"--namespace", default="prod", help="Namespace to perform actions in"
)
argparser.add_argument(
"--pod-name", default="", help="Only delete pods with `pod-name` in the pod name."
)
kube_context_help = (
"Context pointing to the cluster to use. To list the "
"current activated context, run `kubectl config get-contexts`"
)
argparser.add_argument(
"--kube-context",
default="gke_binder-prod_us-central1-a_prod-a",
help=kube_context_help,
)
args = argparser.parse_args()
if args.older_than == 0 and args.pod_name == "":
raise ValueError("Must specify at least one of `pod-name` or `pod-age`.")
# Load and operate on current kubernetes config
config.load_kube_config(context=args.kube_context)
# Get list of pods with given label selector in the 'prod' namespace
core_api = client.CoreV1Api()
pods = core_api.list_namespaced_pod(
args.namespace, label_selector="component=singleuser-server"
)
total_pods = []
for pod in pods.items:
# API results always use UTC timezone
age = datetime.now(timezone.utc) - pod.status.start_time.replace(
tzinfo=timezone.utc
)
if age > timedelta(hours=args.older_than):
# If pod-name isn't specified, this will always be true
if args.pod_name not in pod.metadata.name:
continue
if args.delete:
core_api.delete_namespaced_pod(
pod.metadata.name, args.namespace, client.V1DeleteOptions()
)
print(
f"Deleted {age.total_seconds() / 60 / 60:.1f}h old pod {pod.metadata.name}"
)
summary_text = "Deleted {} pods"
else:
print(
f"Found {age.total_seconds() / 60 / 60:.1f}h old pod {pod.metadata.name}"
)
summary_text = "Found {} pods"
total_pods.append(pod.metadata.name)
print("---", "\n", summary_text.format(len(total_pods)))
<file_sep>/docs/source/incident-reports/2018-03-31-server-start-fail.md
# 2018-03-31, Server launch failures
## Summary
After a few days of general sub-optimal stability and some strange networking errors, a node was deleted. This caused a more general outage that was only solved by totally recycling all nodes.
[link to Gitter incident start](https://gitter.im/jupyterhub/binder?at=5ac010032b9dfdbc3a421980)
## Timeline
All times in PST
### 2018-03-31 15:47
Problem is identified
- Launch success rate drops quickly
- Many pods stuck in "Terminating" and "ContainerCreating" state.
- Hub pod is showing many timeout errors.
### 16:11
- Mount errors on build pods:
```
Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Normal Scheduled 5m default-scheduler Successfully assigned build-devvyn-2daafc-2dfield-2ddata-6e8479-c1cecc to gke-prod-a-ssd-pool-32-134a959a-p2kz
Normal SuccessfulMountVolume 5m kubelet, gke-prod-a-ssd-pool-32-134a959a-p2kz MountVolume.SetUp succeeded for volume "docker-socket"
Warning FailedMount 4m (x8 over 5m) kubelet, gke-prod-a-ssd-pool-32-134a959a-p2kz MountVolume.SetUp failed for volume "docker-push-secret" : mkdir /var/lib/kubelet/pods/e8d31d4e-3537-11e8-88bf-42010a800059: read-only file system
Warning FailedMount 4m (x8 over 5m) kubelet, gke-prod-a-ssd-pool-32-134a959a-p2kz MountVolume.SetUp failed for volume "default-token-ftskg" : mkdir /var/lib/kubelet/pods/e8d31d4e-3537-11e8-88bf-42010a800059: read-only file system
Warning FailedSync 55s (x24 over 5m) kubelet, gke-prod-a-ssd-pool-32-134a959a-p2kz Error syncing pod
```
### 17:21
- Decided to increase cluster size to 4, wait for new nodes to come up, then cordon the two older nodes
### 17:30
- New nodes are up, old nodes are drained
- Hub / binder pods show up on new nodes
- Launch success rate begins increasing
- Launch rate goes back to 100%
## Lessons learnt
### What went well
1. The problem was eventually resolved
### What went wrong
1. It was difficult to debug this problem as there was no obvious error message, and the person solving the problem wasn't sure how to debug.
## Action items
### Investigation
The outage seemed to come from the deletion of a node, but it seemed to be related to other pre-existing nodes as well. Perhaps this is a general thing that happens when nodes become too old?
### What went wrong
- There was a major outage that we were unable to debug, there were not clear errors in the logs
- There was only one person available to debug, which made it more difficult to know how to proceed withot any feedback
### Process improvements
1. Improve the alerting so that a majority of the team is notified when there's an outage. (currently blocking on [#365](https://github.com/jupyterhub/mybinder.org-deploy/issues/365))
2. Come up with team guidelines for how "stale" a node can become before we intentionally recycle it. ([#528](https://github.com/jupyterhub/mybinder.org-deploy/issues/528))
### Documentation improvements
1. Document how to "recycle" nodes properly ([#528](https://github.com/jupyterhub/mybinder.org-deploy/issues/528))
<file_sep>/docs/requirements.txt
jupyterhub-sphinx-theme
myst-parser
sphinx_copybutton
<file_sep>/scripts/delete-old-images.py
#!/usr/bin/env python3
"""
Cleanup images that don't match the current image prefix.
Currently deletes all images that don't match the current prefix,
as well as old builds of the binderhub-ci-repos.
Requires aiohttp and tqdm:
python3 -m pip install aiohttp aiodns tqdm aiohttp-client-cache aiosqlite
Usage:
./scripts/delete-old-images.py [staging|prod] [--dry-run]
"""
import asyncio
import os
import time
from collections import defaultdict
from datetime import datetime, timedelta
from functools import partial
from pprint import pformat
import aiohttp
import pandas as pd
import tqdm
import tqdm.asyncio
import yaml
from aiohttp_client_cache import CachedSession, SQLiteBackend
from dateutil.parser import parse as parse_date
HERE = os.path.dirname(__file__)
# delete builds used for CI, as well
CI_STRINGS = ["binderhub-ci-repos-", "binderhub-2dci-2drepos-"]
# don't delete images that *don't* appear to be r2d builds
# (image repository could have other images in it!)
R2D_STRINGS = ["r2d-"]
TODAY = datetime.now()
FIVE_YEARS_AGO = TODAY - timedelta(days=5 * 365)
TOMORROW = TODAY + timedelta(days=1)
LAST_WEEK = TODAY - timedelta(days=7)
class RequestFailed(Exception):
"""Nicely formatted error for failed requests"""
def __init__(self, code, method, url, content, action=""):
self.code = code
self.content = content
self.method = method
self.url = url
self.action = action
def __str__(self):
return (
f"{self.action} {self.method} {self.url}"
f" failed with {self.code}:\n {self.content}"
)
async def raise_for_status(r, action="", allowed_errors=None):
"""raise an informative error on failed requests"""
if r.status < 400:
return
if allowed_errors and r.status in allowed_errors:
return
if r.headers.get("Content-Type") == "application/json":
# try to parse json error messages
content = await r.json()
if isinstance(content, dict) and "errors" in content:
messages = []
for error in content["errors"]:
messages.append(f"{error.get('code')}: {error.get('message')}")
content = "\n".join(messages)
else:
content = pformat(content)
else:
content = await r.text()
raise RequestFailed(r.status, r.request_info.method, r.request_info.url, content)
def list_images(session, image_prefix):
if image_prefix.count("/") == 1:
# docker hub, can't use catalog endpoint
docker_hub_user = image_prefix.split("/", 1)[0]
return list_images_docker_hub(session, docker_hub_user)
elif image_prefix.count("/") == 2:
registry_host = image_prefix.split("/", 1)[0]
registry_url = f"https://{registry_host}"
return list_images_catalog(session, registry_url)
async def list_images_docker_hub(session, docker_hub_user):
"""List the images for a project"""
url = f"https://hub.docker.com/v2/repositories/{docker_hub_user}/?page_size=100"
while url:
async with session.get(url) as r:
await raise_for_status(r, "listing images")
resp = await r.json()
for image in resp["results"]:
# filter-out not our images??
yield f"{image['user']}/{image['name']}"
url = resp.get("next")
async def list_images_catalog(session, registry_host):
"""List the images for a project"""
url = f"{registry_host}/v2/_catalog"
while url:
async with session.get(url) as r:
await raise_for_status(r, "listing images")
resp = await r.json()
for image in resp["repositories"]:
# filter-out not our images??
yield image
if "next" in resp:
url = resp["next"]
elif "next" in r.links:
url = r.links["next"]["url"]
else:
url = None
async def get_manifest(session, image):
"""List the tags for an image
Returns a dict of the form:
{
'sha:digest': {
imageSizeBytes: '123',
tag: ['tags'],
...
}
"""
async with session.get(f"https://gcr.io/v2/{image}/tags/list") as r:
await raise_for_status(r, f"Getting tags for {image}")
return await r.json()
async def delete_image(session, image, digest, tags, dry_run=False):
"""Delete a single image
Tags must be removed before deleting the actual image
"""
if dry_run:
fetch = session.get
verb = "Checking"
return
else:
fetch = session.delete
verb = "Deleting"
manifests = f"https://gcr.io/v2/{image}/manifests"
# delete tags first (required)
for tag in tags:
async with fetch(f"{manifests}/{tag}") as r:
# allow 404 because previous delete may have been cached
await raise_for_status(r, f"{verb} tag {image}@{tag}", allowed_errors=[404])
# this is the actual deletion
async with fetch(f"{manifests}/{digest}") as r:
# allow 404 because previous delete may have been cached
await raise_for_status(
r, f"{verb} image {image}@{digest}", allowed_errors=[404]
)
async def main(
release="staging",
project=None,
concurrency=20,
delete_before=None,
dry_run=True,
max_builds=None,
):
if dry_run:
print("THIS IS A DRY RUN. NO IMAGES WILL BE DELETED.")
to_be = "to be "
else:
to_be = ""
if delete_before:
# docker uses millisecond integer timestamps
delete_before_ms = int(delete_before.timestamp()) * 1e3
else:
delete_before_ms = float("inf")
last_week_ms = int(LAST_WEEK.timestamp()) * 1e3
if not project:
project = "binderhub-288415"
with open(os.path.join(HERE, os.pardir, "config", release + ".yaml")) as f:
config = yaml.safe_load(f)
prefix = config["binderhub"]["config"]["BinderHub"]["image_prefix"]
with open(
os.path.join(HERE, os.pardir, "secrets", "config", release + ".yaml")
) as f:
config = yaml.safe_load(f)
password = config["binderhub"]["registry"]["password"]
username = config["binderhub"]["registry"].get("username", "_json_key")
start = time.perf_counter()
semaphores = defaultdict(lambda: asyncio.BoundedSemaphore(concurrency))
async def bounded(f, *args, **kwargs):
"""make an async call, bounding the concurrent calls with a semaphore
Limits the number of outstanding calls of any given function to `concurrency`.
Too many concurrent requests results in timeouts
since the timeout starts when the Python code is called,
not when the request actually initiates.
The concurrency limit is *per function*,
so with concurrency=20, there can be 20 outstanding calls to get_manifest
*and* 20 outstanding calls to delete_image.
This avoids the two separate queues contending with each other for slots.
"""
async with semaphores[f]:
return await f(*args, **kwargs)
# TODO: basic auth is only sufficient for gcr
# need to request a token for non-gcr endpoints (ovh, turing on docker hub)
# e.g.
auth_kwargs = {}
print(prefix)
if prefix.startswith("gcr.io"):
auth_kwargs["auth"] = aiohttp.BasicAuth(username, password)
else:
# get bearer token
if prefix.count("/") == 2:
# ovh
registry_host = prefix.split("/", 1)[0]
token_url = f"https://{registry_host}/service/token?service=harbor-registry&scope=registry:catalog:*"
else:
# turing
raise NotImplementedError("Can't get docker hub creds yet")
async with aiohttp.ClientSession(
auth=aiohttp.BasicAuth(username, password)
) as session:
response = await session.get(token_url)
token_info = await response.json()
auth_kwargs["headers"] = {"Authorization": f"Bearer {token_info['token']}"}
async with CachedSession(
connector=aiohttp.TCPConnector(limit=2 * concurrency),
cache=SQLiteBackend(expire_after=72 * 3600),
**auth_kwargs,
) as session:
print("Fetching images")
tag_futures = []
repos_to_keep = 0
repos_to_delete = 0
def should_delete_repository(image):
"""Whether we should delete the whole repository"""
if f"gcr.io/{image}".startswith(prefix) and not any(
ci_string in image for ci_string in CI_STRINGS
):
return False
else:
return True
def should_fetch_repository(image):
if not any(substring in image for substring in R2D_STRINGS):
# ignore non-r2d builds
return False
if delete_before or should_delete_repository(image):
# if delete_before, we are deleting old builds of images we are keeping,
# otherwise, only delete builds that don't match our image prefix
return True
else:
return False
async for image in tqdm.asyncio.tqdm(
list_images(session, prefix),
unit_scale=True,
desc="listing images",
):
if should_fetch_repository(image):
if should_delete_repository(image):
repos_to_delete += 1
else:
repos_to_keep += 1
tag_futures.append(
asyncio.ensure_future(bounded(get_manifest, session, image))
)
else:
repos_to_keep += 1
if not repos_to_keep:
raise RuntimeError(
f"No images matching prefix {prefix}. Would delete all images!"
)
print(f"Not deleting {repos_to_keep} repos starting with {prefix}")
if not tag_futures:
print("Nothing to delete")
return
print(f"{len(tag_futures)} repos to consider for deletion (not counting tags)")
delete_futures = set()
done = set()
print("Fetching tags")
delete_progress = tqdm.tqdm(
total=repos_to_delete,
position=2,
unit_scale=True,
desc=f"builds {to_be}deleted",
)
delete_byte_progress = tqdm.tqdm(
total=0,
position=3,
unit="B",
unit_scale=True,
desc=f"bytes {to_be}deleted",
)
def should_delete_tag(image, info, tag_index):
if should_delete_repository(image):
return True
if not delete_before and not (max_builds and tag_index < max_builds):
# no date cutoff
return False
# check cutoff
image_ms = int(info["timeUploadedMs"])
image_datetime = datetime.fromtimestamp(image_ms / 1e3)
# sanity check timestamps
if image_datetime < FIVE_YEARS_AGO or image_datetime > TOMORROW:
raise RuntimeError(
f"Not deleting image with weird date: {image}, {info}, {image_datetime}"
)
if delete_before_ms > image_ms:
# delete images older than cutoff
return True
if max_builds and tag_index >= max_builds and last_week_ms > image_ms:
# limit to max_builds,
# but only delete excess builds older than 1 week
return True
return False
def save_stats():
df = pd.DataFrame(
rows,
columns=[
"image",
"digest",
"tags",
"size",
"date",
],
)
today = datetime.today()
fname = f"registry-{release}-{today.strftime('%Y-%m-%d')}.pkl"
df.to_pickle(fname)
rows = []
try:
for f in tqdm.tqdm(
asyncio.as_completed(tag_futures),
total=len(tag_futures),
position=1,
desc="repos retrieved",
):
manifest = await f
image = manifest["name"]
delete_whole_repo = should_delete_repository(image)
if delete_whole_repo and len(manifest["manifest"]) > 1:
delete_progress.total += len(manifest["manifest"]) - 1
all_digests = sorted(
manifest["manifest"].items(),
key=lambda digest_info: int(digest_info[1]["timeUploadedMs"]),
)
for order, (digest, info) in enumerate(all_digests):
image_ms = int(info["timeUploadedMs"])
image_datetime = datetime.fromtimestamp(image_ms / 1e3)
nbytes = int(info["imageSizeBytes"])
rows.append(
(
image,
digest,
",".join(info["tag"]),
nbytes,
image_datetime,
)
)
if len(rows) % 100 == 0:
save_stats()
if not should_delete_tag(image, info, order):
continue
if not delete_whole_repo:
# not counted yet
delete_progress.total += 1
delete_byte_progress.total += nbytes
f = asyncio.ensure_future(
bounded(
delete_image,
session,
image,
digest,
info["tag"],
dry_run=dry_run,
)
)
delete_futures.add(f)
# update progress when done
f.add_done_callback(lambda f: delete_progress.update(1))
f.add_done_callback(
partial(
lambda nbytes, f: delete_byte_progress.update(nbytes),
nbytes,
)
)
if delete_futures:
done, delete_futures = await asyncio.wait(delete_futures, timeout=0)
if done:
# collect possible errors
await asyncio.gather(*done)
if delete_futures:
await asyncio.gather(*delete_futures)
finally:
save_stats()
delete_progress.close()
delete_byte_progress.close()
print("\n\n\n\n")
print(f"{to_be}deleted {delete_progress.n} images")
print(
f"{to_be}deleted {delete_byte_progress.n} bytes (not counting shared layers)"
)
print(f"in {int(time.perf_counter() - start)} seconds")
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"release",
type=str,
nargs="?",
default="staging",
help="The release whose images should be cleaned up (staging or prod)",
)
parser.add_argument(
"--project",
type=str,
default="",
help="The gcloud project to use; only needed if not of the form `binder-{release}`.",
)
parser.add_argument(
"--delete-before",
type=lambda s: s and parse_date(s),
default="",
help="Delete any images older than this date. If unspecified, do not use date cutoff.",
)
parser.add_argument(
"-j",
"--concurrency",
type=int,
default=20,
help="The number of concurrent requests to make. "
"Too high and there may be timeouts. Default is 20.",
)
parser.add_argument(
"--max-builds",
type=int,
default=10,
help="""
The maximum number of builds to keep for a given repo.
If there are more than this many builds, only the newest MAX_BUILDS are kept.
""",
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Do a dry-run (no images will be deleted)",
)
opts = parser.parse_args()
asyncio.run(
main(
opts.release,
opts.project,
opts.concurrency,
delete_before=opts.delete_before,
dry_run=opts.dry_run,
max_builds=opts.max_builds,
)
)
<file_sep>/docs/source/incident-reports/2017-09-27-hub-403.md
# 2017-09-27, Hub 403
## Summary
After a deployment, most users were getting a '403 Forbidden' when attempting to
launch their image. This was caused by a small bug that manifests only on
multi-node kubernetes clusters. The bug was identified and fixed, but due to
logistical issues it caused beta to be unusable for about 1h50m.
## Timeline
All times in PST
### Sep 27 2017 13:06
A lot of changes related to
[launching the user servers with hub api](https://github.com/jupyterhub/binderhub/issues/94)
are deployed to staging. Testing caused a '403 Forbidden' on the first try, but
worked on second try. This was attributed to the mysterious 'oh must have been
stale cookies' reason, and not counted as a problem. Deployment proceeds.
### 13:35
Deployment to beta is complete! '403 Forbidden' recurs once, but works the
second time - again attributed to 'stale cookies'. Some cluster maintenance work
is then begun:
1. Make sure cluster nodes have SSDs
2. Make sure cluster nodes have large SSDs (500G, not 100G) for better
performance, since on Google Cloud disk performance scales with disk size.
These take a while. It also looked like two of the three nodes present were
having disk trouble, so it seemed a good time to roll 'em over (without doing
too much investigation).
### 13:44
'403 Forbidden' reported by others, and it does not clear on second try. All
nodes now are new, so this is unrelated to nodes having issues. Within a few
minutes, it is clear that the '403 Forbidden' does not go away after first use,
and we should consider beta as 'broken' now.
### 14:08
Digging into logs and attempts to reproduce this yield the following
conclusions:
1. It works when the server starts up within 10s (thus not triggering the
'server is slow to spawn' message in hub logs)
2. It leads to 403 when the server takes longer than 10s.
A quick look at the code made the problem clear:
1. If the spawn takes more than 10s, the hub returns a `202` status code, and we
then have to poll to see when our server has started.
2. In binderhub, we were not doing this polling. We instead prematurely redirect
to the hub, which freaks out at the non-running server and gives us the 403.
This is because we
use [nullauthenticator](https://github.com/jupyterhub/nullauthenticator) for
the hub, whose sole job is to 403 every user not already logged in somehow!
This diagnosis was a little confused by the following facts:
1. We also upgraded from JupyterHub 0.7 to 0.8 with this deployment, and decided
to just wipe out the user db than upgrade it. This caused a lot of messages
of the following form:
```
[W 2017-09-28 16:20:51.785 JupyterHub base:350] Failed login for unknown user
[W 2017-09-28 16:20:51.787 JupyterHub log:122] 403 GET /hub/login?next=%2Fhub%2Fuser%2F13b14ca5-5c6b-4340-8157-651ad95a2739%2Fapi%2Fcontents%2Fnotebooks%2Fxwidgets.ipynb%3Fcontent%3D0%26_%3D1506507345627 (@10.100.16.12) 2.00ms
```
These were red herrings, just failures for people who were unceremoniously
booted off during the deploy.
2. In cluster, most images had already been locally cached. So some images would
launch under 10s, hence not causing the 403. This made tracking down cause a
little harder than if it failed consistently.
3. This would not be reproducible on minikube without artificial constraints,
since it does not use a registry & the build host is always the same as the
run host.
Once it was clear what the issue was, the fix seemed obvious enough.
### 15:11
A [PR](https://github.com/jupyterhub/binderhub/pull/130) is sent up with the
fix. It isn't as configurable as it should be, but should work. This is delayed
by the fact that the deployer's local minikube had run out of space & needed to
be reset.
### 15:25
The PR has been merged and deployed, bringing the outage to an end.
## Conclusion
This was caused by two factors:
1. Local development environment being slightly different from production
environment (single node vs multi node)
2. Testing in staging not thorough enough
Debugging this was complicated by the fact we changed a lot of things at the
same time (JupyterHub version, binder code, etc). This was a big breaking change
that required us to kick users out. We should try to not do those.
## Action Items
### BinderHub
1. Handle errors on launching servers on the hub more gracefully.
[Issue](https://github.com/jupyterhub/binderhub/issues/131)
### Deployment
1. Write end-to-end tests that help validate that a deployment was successful,
and make sure they are part of marking a deployment green or red.
[Issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/10)
2. Write down the responsibilities of the people doing the deployment.
Specifically include the lesson of 'never ignore any errors in staging, they
will always bite you in production' prominently.
[Issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/11)
3. Do deployments more often so we don't end up doing one big massive deployment
ever. Those will always cause problems. Easier deployments with better
tooling will help this. [Issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/8)
### Local development
1. Consider figuring out an easy way for people to run a development environment
that is closer to production (multi-node, image registry, etc) than minikube.
[Issue](https://github.com/jupyterhub/binderhub/issues/137)
<file_sep>/docs/source/analytics/index.rst
Analytics
---------
A public events archive with data about daily Binder launches.
.. toctree::
:maxdepth: 2
events-archive
cloud-costs
<file_sep>/images/federation-redirect/app.py
import asyncio
import datetime
import json
import math
import os
import random
import sys
from hashlib import blake2b
import prometheus_client
import tornado
import tornado.ioloop
import tornado.options
import tornado.web
from prometheus_client import Gauge
from tornado import options
from tornado.httpclient import AsyncHTTPClient, HTTPError, HTTPRequest
from tornado.httputil import HTTPHeaders
from tornado.ioloop import IOLoop
from tornado.log import app_log
from tornado.web import RequestHandler
# Config for local testing
CONFIG = {
"check": {
"period": 10,
"jitter": 0.1,
"retries": 5,
"timeout": 2,
"failed_period": 300,
},
"load_balancer": "rendezvous", # or "random"
"pod_headroom": 10, # number of available slots to consider a member 'available'
"host_cookie_age_hours": 4,
"hosts": {},
}
def get_config(config_path):
app_log.info(f"Using config from '{config_path}'.")
config = CONFIG.copy()
with open(config_path) as f:
config.update(json.load(f))
# merge default config
for key, value in CONFIG["check"].items():
config["check"].setdefault(key, value)
for h in list(config["hosts"].keys()):
# Remove empty entries from CONFIG["hosts"], these can happen because we
# can't remove keys in our helm templates/config files. All we can do is
# set them to Null/None. We need to turn the keys into a list so that we
# can modify the dict while iterating over it
if config["hosts"][h] is None:
config["hosts"].pop(h)
# remove zero-weight entries
if config["hosts"][h].get("weight", 0) == 0:
app_log.warning(f"Removing host {h} with 0 weight")
config["hosts"].pop(h)
# remove trailing slashes in host urls
# these can cause 404 after redirection (RedirectHandler) and we don't
# realize it
else:
config["hosts"][h]["url"] = config["hosts"][h]["url"].rstrip("/")
return config
CONFIG_PATH = "/etc/federation-redirect/config.json"
if os.path.exists(CONFIG_PATH):
CONFIG = get_config(CONFIG_PATH)
else:
app_log.warning("Using default config!")
class FailedCheck(Exception):
"""Exception class for health checks not being satisfied
These checks are considered failures and not retried
until the next interval.
"""
def __init__(self, msg, reason):
self.msg = msg
self.reason = reason
def blake2b_hash_as_int(b):
"""Compute digest of the bytes `b` using the Blake2 hash function.
Returns a unsigned 64bit integer.
"""
return int.from_bytes(blake2b(b, digest_size=8).digest(), "big")
def rendezvous_rank(buckets, key):
"""Rank the buckets for a given key using Rendez-vous hashing
Each bucket is scored for the specified key. The return value is a list of
all buckets, sorted in decreasing order (highest ranked first).
"""
ranking = []
for bucket, weight in buckets:
# The particular hash function doesn't matter a lot, as long as it is
# one that maps the key to a fixed sized value and distributes the keys
# uniformly across the output space
hash = blake2b_hash_as_int(b"%s-%s" % (str(key).encode(), str(bucket).encode()))
score = weight * (1.0 / -math.log(hash / 0xFFFFFFFFFFFFFFFF))
ranking.append((score, bucket))
return [b for (s, b) in sorted(ranking, reverse=True)]
def cache_key(uri):
"""Compute key for load balancing decisions"""
key = uri.lower()
if key.startswith("/build/gh"):
# remove branch/tag/reference, all instances of a repo should have
# the same key and hence target
key = key.rsplit("/", maxsplit=1)[0]
return key
# metrics
HEALTH = Gauge(
"federation_health",
"Overall health check status for each member: 1 = healthy, 0 = unhealthy."
" 'member' is the federation member."
" 'reason' is the check that failed, if unhealthy.",
["member", "reason"],
)
HEALTH_CHECK = Gauge(
"federation_health_check",
"Individual health check status for each member: 1 = healthy, 0 = unhealthy."
" 'member' is the federation member."
" 'check' is the name of the check.",
["member", "check"],
)
REDIRECTS = Gauge(
"federation_redirect_count",
"Number of requests routed to each member." " 'member' is the federation member.",
["member", "reason"],
)
class MetricsHandler(RequestHandler):
async def get(self):
self.set_header("Content-Type", prometheus_client.CONTENT_TYPE_LATEST)
self.write(prometheus_client.generate_latest(prometheus_client.REGISTRY))
class ProxyHandler(RequestHandler):
def initialize(self, host):
self.host = host
async def get(self):
uri = self.request.uri
target_url = self.host + uri
headers = self.request.headers.copy()
# don't override Host, which kubernetes uses for routing,
# otherwise this will be an infinite loop proxying to ourself
# Host will be set to the target Host by the request below.
current_host = headers.pop("Host", None)
if current_host:
# set proxy host header
headers.setdefault("X-Forwarded-Host", current_host)
# preserve original Host in Origin so this looks like a cross-origin request
# even though it's proxied
headers["Origin"] = f"https://{current_host}"
headers["X-Binder-Launch-Host"] = "https://mybinder.org/"
body = self.request.body
if not body:
if self.request.method == "POST":
body = b""
else:
body = None
client = AsyncHTTPClient()
req = HTTPRequest(target_url, method="GET", body=body, headers=headers)
response = await client.fetch(req, raise_error=False)
# For all non http errors...
if response.error and not isinstance(response.error, HTTPError):
self.set_status(500)
self.write(str(response.error))
else:
self.set_status(response.code, response.reason)
# clear tornado default header
self._headers = HTTPHeaders()
for header, v in response.headers.get_all():
if header not in (
"Content-Length",
"Transfer-Encoding",
"Content-Encoding",
"Connection",
):
# some headers appear multiple times, eg 'Set-Cookie'
self.add_header(header, v)
if response.body:
self.write(response.body)
class RedirectHandler(RequestHandler):
def initialize(self, load_balancer):
self.load_balancer = load_balancer
def prepare(self):
# copy hosts config in case it changes while we are iterating over it
hosts = dict(self.settings["hosts"]) # make a copy
if not hosts:
# no healthy hosts, allow routing to unhealthy 'prime' host only
hosts = {
key: host for key, host in CONFIG["hosts"].items() if host.get("prime")
}
app_log.warning(
f"Using unhealthy prime host(s) {list(hosts)} because zero hosts are healthy"
)
self.hosts = hosts
self.hosts_by_url = {} # dict of {"https://gke.mybinder.org": "gke"}
self.host_names = [] # ordered list of ["gke"]
self.host_weights = [] # ordered list of numerical weights
for name, host_cfg in hosts.items():
if host_cfg["weight"] > 0:
self.host_names.append(name)
self.host_weights.append(host_cfg["weight"])
self.hosts_by_url[host_cfg["url"]] = name
# Combine hostnames and weights into one list
self.names_and_weights = list(zip(self.host_names, self.host_weights))
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-control-allow-headers", "cache-control")
async def get(self):
path = self.request.path
uri = self.request.uri
host_url = self.get_cookie("host")
# make sure the host is a valid choice and considered healthy
host_name = self.hosts_by_url.get(host_url)
if host_name is None:
if self.load_balancer == "rendezvous":
host_name = rendezvous_rank(self.names_and_weights, cache_key(path))[0]
# "random" is our default or fall-back
else:
host_name = random.choices(self.host_keys, self.host_weights)[0]
host_url = self.hosts[host_name]["url"]
reason = "load_balancer"
else:
reason = "cookie"
REDIRECTS.labels(member=host_name, reason=reason).inc()
self.set_cookie(
"host",
host_url,
path=path,
expires=datetime.datetime.now(datetime.timezone.utc)
+ datetime.timedelta(hours=CONFIG["host_cookie_age_hours"]),
)
# do we sometimes want to add this url param? Not for build urls, at least
# redirect = url_concat(host_url + uri, {'binder_launch_host': 'https://mybinder.org/'})
redirect = host_url + uri
app_log.info(f"Redirecting {path} to {host_url}")
self.redirect(redirect, status=307)
class ActiveHostsHandler(RequestHandler):
"""Serve information about active hosts"""
def initialize(self, active_hosts):
self.active_hosts = active_hosts
async def get(self):
self.write({"active_hosts": self.active_hosts})
async def health_check(host, active_hosts):
check_config = CONFIG["check"]
all_hosts = CONFIG["hosts"]
app_log.info(f"Checking health of {host}")
client = AsyncHTTPClient()
try:
for n in range(check_config["retries"]):
try:
# TODO we could use `asyncio.gather()` and fetch health and versions in parallel
# raises an `HTTPError` if the request returned a non-200 response code
# health url returns 503 if a (hard check) service is unhealthy
# check versions
# run this first, because it updates the prime version to check against,
# and we don't want to skip that if the prime cluster is otherwise unhealthy
response = await client.fetch(
all_hosts[host]["versions"], request_timeout=check_config["timeout"]
)
versions = json.loads(response.body)
# if this is the prime host store the versions so we can compare to them later
if all_hosts[host].get("prime", False):
old_versions = CONFIG.get("versions", None)
if old_versions != versions:
app_log.info(
f"Updating prime versions {old_versions}->{versions}"
)
CONFIG["versions"] = versions
# check if this cluster is on the same versions as the prime
# w/o information about the prime's version we allow each
# cluster to be on its own versions
if versions != CONFIG.get("versions", versions):
HEALTH_CHECK.labels(member=host, check="versions").set(0)
raise FailedCheck(
"{} has different versions ({}) than prime ({})".format(
host, versions, CONFIG["versions"]
),
reason="versions",
)
else:
HEALTH_CHECK.labels(member=host, check="versions").set(1)
# check health
response = await client.fetch(
all_hosts[host]["health"], request_timeout=check_config["timeout"]
)
health = json.loads(response.body)
for check in health["checks"]:
HEALTH_CHECK.labels(member=host, check=check["service"]).set(
int(check["ok"])
)
for check in health["checks"]:
if not check["ok"]:
raise FailedCheck(
f"{host} unhealthy: {check}", reason=check["service"]
)
if (
check["service"] == "Pod quota"
and check["quota"] is not None
and CONFIG["pod_headroom"]
):
# apply headroom so we don't hit the hard pod limit after redirecting
if (
check["total_pods"] + CONFIG["pod_headroom"]
>= check["quota"]
):
check["ok"] = False
raise FailedCheck(
f"{host} is approaching pod quota: {check['total_pods']}/{check['quota']}",
reason=check["service"],
)
break
except FailedCheck:
# don't retry failures such as quotas/version checks
# those aren't likely to change in 1s
raise
except Exception as e:
# retry check on unhandled errors (e.g. connection failure)
app_log.warning(
f"{host} health check failed, attempt {n + 1} of {check_config['retries']}: {e}"
)
# raise the exception on the last attempt
if n + 1 == check_config["retries"]:
raise
else:
await asyncio.sleep(1)
# any kind of exception means the host is unhealthy
except Exception as e:
app_log.warning(f"{host} is unhealthy: {e}")
if isinstance(e, FailedCheck):
reason = e.reason
else:
reason = "unknown"
HEALTH.labels(member=host, reason=reason).set(0)
if host in active_hosts:
# remove the host from the rotation for a while
# prime hosts may still receive traffic when unhealthy
# _if_ all other hosts are also unhealthy
active_hosts.pop(host)
app_log.warning(f"{host} has been removed from the rotation")
# wait longer than usual to check unhealthy host again
period = check_config["failed_period"]
else:
HEALTH.labels(member=host, reason="").set(1)
if host not in active_hosts:
active_hosts[host] = all_hosts[host]
app_log.warning(f"{host} has been added to the rotation")
period = check_config["period"]
# schedule ourselves to check again later
jitter = check_config["jitter"] * (0.5 - random.random())
IOLoop.current().call_later((1 + jitter) * period, health_check, host, active_hosts)
def make_app():
# we want a copy of the hosts config that we can use to keep state
hosts = dict(CONFIG["hosts"])
for host in hosts.values():
if host.get("prime", False):
prime_host = host["url"]
break
else:
sys.exit("No prime host configured!")
app = tornado.web.Application(
[
(r"/build/.*", RedirectHandler, {"load_balancer": CONFIG["load_balancer"]}),
(
r"/(badge\_logo\.svg)",
tornado.web.RedirectHandler,
{
"url": "https://static.mybinder.org/badge_logo.svg",
"permanent": True,
},
),
(
r"/(badge\.svg)",
tornado.web.RedirectHandler,
{"url": "https://static.mybinder.org/badge.svg", "permanent": True},
),
(
r"/assets/(images/badge\.svg)",
tornado.web.RedirectHandler,
{"url": "https://static.mybinder.org/badge.svg", "permanent": True},
),
(r"/active_hosts", ActiveHostsHandler, {"active_hosts": hosts}),
(r"/metrics", MetricsHandler),
(r".*", ProxyHandler, {"host": prime_host}),
],
hosts=hosts,
debug=False,
)
# start monitoring all our potential hosts
for hostname in hosts:
IOLoop.current().add_callback(health_check, hostname, hosts)
return app
def main():
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
options.define("port", default=8080, help="port to listen on")
options.parse_command_line()
app = make_app()
app.listen(options.options.port, xheaders=True)
tornado.ioloop.IOLoop.current().start()
if __name__ == "__main__":
main()
<file_sep>/docs/source/incident-reports/2017-10-17-cluster-full.md
# 2017-10-17, Cluster Full
## Summary
Users were reporting failed launches after building. This was caused by the cluster being full & hitting our autoscaling 'upper' limit. Due to the launch timeout being too small + a bug in KubeSpawner, pods kept getting started & then orphaned, leading to cluster being full. We increased the timeout and cleaned out the cluster, which has temporarily fixed the problem.
## Timeline
All times in PST
### 2017-10-17 07:42
Users report all launches are failing with a 'took too long to launch' error
### 08:47
Investigation starts, and the cluster is full - there are 94 pending pods and a full ten nodes.
### 09:14
All pending pods are deleted, but the cluster is still full and new launches are still failing. This is attributed to KubeSpawner not really cleaning up after itself - if a spawn fails, KubeSpawner should kill the pod, rather than let it stay in whatever state it was in. This leads to 'orphan' pods that won't be cleaned up, since JupyterHub has lost track of these pods.
This is made worse by the low timeout on launches in binderhub - except when the launch fails, we don't kill the server. This leads to servers that are launched after the timeout, so users never see it. This is still kept track of by JupyterHub, but users never use these pods.
### 09:19
A [PR](https://github.com/jupyterhub/binderhub/pull/188) is made to bump up the timeout, since that is what is making the problem unmanageable right now. This is delayed by a GitHub service degradation (PRs do not update for a while).
### 09:49
GitHub is usable again, and the PR gets merged.
### 10:18
A deployment to staging is attempted, but fails test because Cluster is still full. Deployment to staging is reverted.
### 11:02
Cluster is entirely cleaned out with a `kubectl --namespace=beta delete pod --all`. This is disruptive to current users, but is the easiest way to get cluster capacity again.
### 11:14
Launch timeout bump (to about 10mins) is deployed again [1](https://github.com/jupyterhub/mybinder.org-deploy/pull/89) [2](https://github.com/jupyterhub/mybinder.org-deploy/pull/90)
### 11:29
The [cluster autoscaler](https://cloud.google.com/kubernetes-engine/docs/concepts/cluster-autoscaler) kicks in, resizing the cluster down to about 4 nodes.
Everything is fine and builds / launches are working again.
## Action items
### KubeSpawner
1. If a pod doesn't start in time, kubespawner should kill it. If it enters error state, kubespawner should kill it. In general, it should never 'orphan' pods. [Issue](https://github.com/jupyterhub/kubespawner/issues/95)
### BinderHub
1. Make the launch timeout more configurable, and specified in seconds [Issue](https://github.com/jupyterhub/binderhub/issues/244)
2. If launch fails, then BinderHub should actually call stop on the server & try to stop the server if it is running. [Issue](https://github.com/jupyterhub/binderhub/issues/245)
### Process
1. We need better alerting for when cluster is full, ideally before it is full! [Issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/125)
<file_sep>/docs/source/analytics/events-archive.rst
.. _analytics/events-archive:
============================
The Analytics Events Archive
============================
BinderHub emits an event each time a repository is launched. They
are recorded as JSON, and made available to the public at
`archive.analytics.mybinder.org <https://archive.analytics.mybinder.org>`_.
This page describes what is available in the Events Archive & how to
interpret it.
File format
===========
All data files are in `jsonl <https://jsonlines.org/>`_ format. Each line,
delimited by a ``\n`` is a is a well formed JSON object. These files can
be read / written in a streaming fashion, one line at a time, without
having to read the entire file into memory.
Launch data by date
===================
For each day since we started keeping track (2018-11-03), there is a
file named ``events-<YYYY>-<MM>-<DD>.jsonl`` that contains data for
all the launches performed by mybinder.org on that date. All timestamps
and dates are in `UTC <https://en.wikipedia.org/wiki/Coordinated_Universal_Time>`_.
Each line is a JSON object that conforms to `this JSON Schema
<https://github.com/jupyterhub/binderhub/blob/HEAD/binderhub/event-schemas/launch.json>`_.
A description of these fields is provided below.
#. **schema** and **version**
Currently set to ``binderhub.jupyter.org/launch`` and ``1`` respectively. These
identify the kind of event this is (a launch event from BinderHub) and the
current version of the event schema. This lets us evolve the format of the
events emitted without breaking existing analytics code. New versions of
the launch schema may add additional fields, or change meanings of current
ones. We will definitely add other events that are available here too -
for example, successful builds.
Your analytics code **must** make sure the event you are parsing has
the schema and version you are expecting before proceeding. If you
don't do this, your code might fail in unexpected ways in the future.
#. **timestamp**
ISO8601 formatted timestamp when the event was emitted. These are rounded
down to the closest minute. The lines in the file are ordered by timestamp,
starting at the earliest.
#. **provider**
Where the launched repository was hosted. Current options are ``GitHub``,
``GitLab`` and ``Git``.
#. **spec**
Specification identifying the repository / commit immutably & uniquely in
the provider.
For GitHub, it is ``<repo>/<commit-spec>``. Example would be ``yuvipanda/example-requirements/HEAD``.
For GitLab, it is ``<repo>/<commit-spec>``, except ``repo`` is URL escaped.
For raw Git repositories, it is ``<repo-url>/<commit-spec>``. ``repo-url`` is full URL escaped
to the repo and ``commit-spec`` is a full commit hash.
#. **status**
Wether the launch succeeded (``success``) or failed (``failure``). Currently
only successful launches are recorded.
Example code
------------
Some popular ways of reading this event data into a useful data structure are
provided here.
``pandas``
~~~~~~~~~~
.. code-block:: python
import pandas as pd
df = pd.read_json("https://archive.analytics.mybinder.org/events-2018-11-05.jsonl", lines=True)
df
Plain Python
~~~~~~~~~~~~
.. code-block:: python
import requests
import json
response = requests.get("https://archive.analytics.mybinder.org/events-2018-11-05.jsonl")
data = [json.loads(l) for l in response.iter_lines()]
``index.jsonl``
===============
The `index.jsonl <https://archive.analytics.mybinder.org/index.jsonl>`_ file lists
all the dates an event archive is available for. The following fields are present
for each line:
#. **date**
The UTC date the event archive is for
#. **name**
The name of the file containing the events. This is a relative path - since we
got the ``index.jsonl`` file from `https://archive.analytics.mybinder.org`, that
is the base URL used to resolve these. For example when ``name`` is
``events-2018-11-05.jsonl``, the full URL to the file is
``https://archive.analytics.mybinder.org/events-2018-11-05.jsonl``.
#. **count**
Total number of events in the file.
<file_sep>/docs/source/analytics/cloud-costs.rst
.. _analytics/cloud-costs:
================
Cloud Costs Data
================
In an effort to be transparent about how we use our funds,
we publish the amount of money spent each day in cloud
compute costs for running mybinder.org.
Interpreting the data
=====================
You can find the data in the `Analytics Archive
<https://archive.analytics.mybinder.org>`_ at `cloud-costs.jsonl
<https://archive.analytics.mybinder.org/cloud-costs.jsonl>`_. Each line in
the file is a JSON object, with the following keys:
#. **version**
Currently *1*, will be incremented when the structure of this format
changes.
#. **start_time** and **end_time**
The start and end of the billing period this item represents. These
times are inclusive, and in pacific time observing DST (so PDT or PST).
The timezone choice is unfortunate, but unfortunately our cloud provider
(Google Cloud Platform) provides detailed billing reports in this timezone
only.
#. **cost**
The cost of all cloud compute resources used during this time period. This
is denominated in US Dollars.
The lines are sorted by ``start_time``.
<file_sep>/docs/source/components/ingress.md
# HTTPS ingress with nginx + kube-lego
Kubernetes [Ingress Objects](https://kubernetes.io/docs/concepts/services-networking/ingress/)
are used to manage HTTP(S) access from the internet to inside the Kubernetes cluster.
Among other things, it lets us do the following:
1. Provide a HTTPS end point so users can connect to us securely
2. Direct traffic to various Services based on hostnames or URL paths
3. Allow using one public IP address for multiple domain names
We use the [nginx-ingress](https://github.com/kubernetes/ingress-nginx) provider to handle
our Ingress needs.
## Nginx Ingress
We run on Google Cloud's Kubernetes Engine. Even though GKE comes pre-installed with
the [Google Cloud Load Balancer Ingress provider](https://github.com/kubernetes/ingress-gce),
we decided to use nginx instead for the following reasons:
1. GCLB has a 30s default timeout on all HTTP connections. This is counted
not just when connection is idle, but from connection start time. This
is particularly bad for websockets, since those connections usually last for
a lot longer than 30s! There is no easy way to configure this timeout from
inside Kubernetes.
2. GCLB does not guarantee you can use the same IP for multiple domains. We
want the various subdomains of `mybinder.org` to point to the same IP
so we can easily add / remove new services without waiting for DNS propagation
delay.
### Installation
nginx-ingress is installed using the [nginx-ingress helm chart](https://github.com/kubernetes/ingress-nginx/tree/main/charts/ingress-nginx).
This installs the following components:
1. `nginx-ingress-controller` - keeps the HTTPS rules in sync with `Ingress`
objects and serves the HTTPS requests. This also exports
[metrics](metrics) that are captured in prometheus.
2. `nginx-ingress-default-backend` - simply returns a 404 error & is used
by `nginx-ingress-controller` to serve any requests that don't match
any rules.
The specific ways these have been configured can be seen in the `mybinder/values.yaml`
file in this repo, under `nginx-ingress`.
### Configuration with Ingress objects
`Ingress` objects are used to tell the ingress controllers which requests
should be routed to which `Service` objects. Usually, the rules either
check for a hostname (like `mybinder.org` or `prometheus.mybinder.org`) and/or
a URL prefix (like `/metrics` or `/docs`). You can see all the ingress objects
present with `kubectl --namespace=prod get ingress`.
The following ingress objects currently exist:
- `jupyterhub` - Directs traffic to `hub.mybinder.org`.
The zero-to-jupyterhub guide has more [documentation](https://zero-to-jupyterhub.readthedocs.io/en/latest/administrator/advanced.html#ingress).
- `binderhub` - Directs traffic to `mybinder.org`. You can find more details
about this in the [binderhub helm chart](https://github.com/jupyterhub/binderhub/tree/HEAD/helm-chart).
- `redirector` - Directs traffic to the HTTP redirector we run for `mybinder.org`.
This helps do redirects such as `docs.mybinder.org` or `beta.mybinder.org`.
The list of redirects is configured in `mybinder/values.yaml`. The code
for this is in `mybinder/templates/redirector` in this repo.
- `static` - Directs traffic into `static.mybinder.org`. We serve the `mybinder.org`
badges from a different domain for [privacy reasons](https://github.com/jupyterhub/binderhub/issues/379).
This ingress lets us direct traffic only from `static.mybinder.org/badge.svg` to the
binder pod.
- `prometheus-server` - Directs traffic to `prometheus.mybinder.org`. Configured under
`prometheus` in both `mybinder/values.yaml` and `config/prod.yaml`.
- `grafana` - Directs traffic to `grafana.mybinder.org`. Configured under `grafana` in
both `mybinder/values.yaml` and `config/prod.yaml`.
- `kube-lego-nginx` - Used by kube-lego for doing automatic
HTTPS certificate renewals.
## HTTPS certificates with kube-lego
We use [Let's Encrypt](https://letsencrypt.org/) for all our HTTPS certificates.
[Kube Lego](https://github.com/jetstack/kube-lego) is used to automatically
provision and maintain HTTPS certificates for us.
```{note}
Kube-lego is deprecated, and we should move to
`cert-manager <https://github.com/jetstack/cert-manager/>`_ soon.
```
### Installation
kube-lego is installed using the [kube-lego](https://github.com/helm/charts/tree/HEAD/stable/kube-lego).
### Configuration
`kube-lego` requires Ingress objects to have specific `annotations` and
`tls` values, as [documented here](https://github.com/jetstack/kube-lego#how-kube-lego-works).
We specify this for all our ingress objects, mostly by customizing various helm charts
in `mybinder/values.yaml`.
### Let's Encrypt account
Let's Encrypt uses [accounts](https://community.letsencrypt.org/t/what-are-accounts-do-i-need-to-backup-them/21318)
to keep track of HTTPS certificates & expiry dates.
Currently, the account is registered to `<EMAIL>`, mostly as a historical
accident. Changing it requires some amount of care to make sure we do not suffer
intermittent HTTPS failure, and should be done whenever we switch to cert-manager.
<file_sep>/images/analytics-publisher/run.py
#!/usr/bin/env python3
import json
import time
from datetime import datetime, timedelta
from archiver import archive_events
from cloudcosts import publish_daily_cost
from indexer import index_events
with open("/etc/analytics-publisher/analytics-publisher.json") as f:
config = json.load(f)
project_name = config["project"]
while True:
now = datetime.utcnow()
if now.hour < 5:
# For first 5h of the day, archive yesterday's events too.
# Stackdriver pushes out logs to GCS once every hour, and we run archiver every 2h.
# Running last day's for first 5h makes sure we catch last day's events, even if an
# archiving run is disrupted for any reason
yesterday = now - timedelta(days=1)
print("Archiving yesterday's events {}".format(yesterday.strftime("%Y-%m-%d")))
archive_events(
project=project_name,
log_name=config["events"]["logName"],
source_bucket=config["events"]["sourceBucket"],
destination_bucket=config["destinationBucket"],
date=yesterday,
)
print("Archiving today's events {}".format(now.strftime("%Y-%m-%d")))
archive_events(
project=project_name,
log_name=config["events"]["logName"],
source_bucket=config["events"]["sourceBucket"],
destination_bucket=config["destinationBucket"],
date=now,
)
if config["cloudCosts"]["enabled"]:
# Only publish cloudCosts if it is enabled.
# We disable this for binder staging, since all our billing
# exports are in prod only.
cloud_costs = publish_daily_cost(
billing_bucket_name=config["cloudCosts"]["sourceBucket"],
target_bucket_name=config["destinationBucket"],
target_object_name=config["cloudCosts"]["fileName"],
kind=config["cloudCosts"]["kind"],
)
print(f"Uploaded cloud costs for {len(cloud_costs)} days")
print("Generating index")
index_events(project_name, config["destinationBucket"])
print("Sleeping for 2h")
time.sleep(2 * 60 * 60)
<file_sep>/scripts/prune_harbor.py
#!/usr/bin/env python3
"""
Prune empty repositories in a Harbor registry
Harbor doesn't prune _repositories_, only _artifacts_.
There appears to be a cost to all the empty repos left over time.
This script prunes any repositories that lack any artifacts.
Artifacts are pruned by retention policy and garbage collection.
Requires:
- requests
- ruamel.yaml
- tqdm
"""
from argparse import ArgumentParser
from pathlib import Path
import requests
import tqdm
from ruamel.yaml import YAML
repo = Path(__file__).parent.parent.resolve()
yaml = YAML(typ="safe")
def prune_repositories(
harbor_url: str,
project_name: str,
username: str,
password: str,
) -> None:
"""Deletes all repositories with no images
Repository images may be pruned by Harbor garbage collection
and image retention policy.
"""
print("Deleting repositories with no images")
r = requests.get(
harbor_url + f"/projects/{project_name}/summary",
auth=(username, password),
)
r.raise_for_status()
repo_count = r.json()["repo_count"]
fetch_progress: tqdm.tqdm = tqdm.tqdm(
desc="fetching", unit="repo", total=repo_count
)
prune_progress: tqdm.tqdm = tqdm.tqdm(desc="pruning", unit="repo", total=repo_count)
page_size = 100
def fetch_page(page: int = 1) -> list:
r = requests.get(
harbor_url + f"/projects/{project_name}/repositories",
# sort by update_time because the oldest ones are the most likely to be empty
# reversed (-) because we iterate from the back
params=dict(sort="-update_time", page_size=str(page_size), page=str(page)),
auth=(username, password),
)
r.raise_for_status()
repos: list[dict] = r.json()
fetch_progress.update(len(repos))
return repos
page = repo_count // page_size
for page in range(repo_count // page_size + 1, 0, -1):
for repo in fetch_page(page):
# run deletions sequentially because harbor seems to have trouble under too much load
project_name, repo_name = repo["name"].split("/", 1)
if repo["artifact_count"] == 0:
r = requests.delete(
harbor_url + f"/projects/{project_name}/repositories/{repo_name}",
auth=(username, password),
)
r.raise_for_status()
prune_progress.update(1)
fetch_progress.close()
prune_progress.close()
print(f"Pruned {prune_progress.n}/{repo_count} repositories with no artifacts")
def load_config(member: str) -> dict:
"""Load information necessary for connecting to the harbor instance
Find URL, project from binderhub config
Find credentials in secrets/{member}-harbor.yaml
"""
config_file = repo / "config" / f"{member}.yaml"
with config_file.open() as f:
config = yaml.load(f)
image_prefix = config["binderhub"]["config"]["BinderHub"]["image_prefix"]
host, project_name, prefix = image_prefix.rsplit("/", 2)
harbor_url = f"https://{host}/api/v2.0"
harbor_config_file = repo / "secrets" / f"{member}-harbor.yaml"
with harbor_config_file.open() as f:
harbor_config = yaml.load(f)
return dict(
harbor_url=harbor_url,
project_name=project_name,
username=harbor_config["harbor"]["username"],
password=harbor_config["harbor"]["password"],
)
def main():
parser = ArgumentParser()
parser.add_argument(
"cluster", help="The federation member whose harbor should be pruned"
)
args = parser.parse_args()
config = load_config(args.cluster)
prune_repositories(**config)
if __name__ == "__main__":
main()
<file_sep>/docs/source/incident-reports/2022-06-02-pod-limit.md
# 2022-01-27, pod limit reached
## Summary
A bug in the GKE resource quota was preventing the `prod` hub from creating new pods.
It said we had exceeded our pod quota even though we certainly had not.
When we deleted the `gke-resource-quotas` `resourcequota` in k8s, the pod limit error no longer appeared and things went back to normal.
This effect lasted approximately nine hours before normal operation was restored without intervention.
<iframe src="https://grafana.mybinder.org/d-solo/fLoQvRHmk/status?orgId=1&from=1654149587937&to=1654207111545&panelId=2" width="450" height="200" frameborder="0"></iframe>
## Timeline
All times in CET
### 2022-06-02 10:00 - Problem starts
mybinder.org stopped successfully launching any new pods.
### 21:00 - Team alerted
A [user reported a Binder outage in the Matrix channel](https://matrix.to/#/!FUpHWAzqkjcOgkhmHS:petrichor.me/$yvbn4wMMghzF0COGPEVckE_1i7535UL5NLRc6Xlpu34?via=petrichor.me&via=gitter.im&via=matrix.org).
A team member noticed, and a quick investigation showed that pods hadn't been launching for several hours, and begin investigation.
Team is alerted via the Matrix channel.
### 21:06 - Logger error
We discover the following log error about hitting a pod quota limit:
```
HTTP response body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"pods \"jupyter-leosamu-2dpythonmoocproblems-2drkut24bk\" is forbidden: exceeded quota: gke-resource-quotas, requested: pods=1, used: pods=15k, limited: pods=15k","reason":"Forbidden","details":{"name":"jupyter-leosamu-2dpythonmoocproblems-2drkut24bk","kind":"pods"},"code":403}
```
In particular:
```
exceeded quota: gke-resource-quotas, requested: pods=1, used: pods=15k, limited: pods=15k"
```
This is confusing because we definitely are not using 15k pods.
### 21:22 - Found a StackOverflow answer
Some investigating on StackOverflow showed that others have run into similar problems.
These two StackOverflow posts were helpful:
- https://stackoverflow.com/questions/58716138/
- https://stackoverflow.com/a/61656760/1927102
They mentioned it was a bug in the `gke-resource-quotas` Kubernetes object, and that deleting it caused the object to be recreated and work correctly again.
### 21:24 - Delete `gke-resource-quotas` and issue goes away
We deleted the `gke-resource-quotas`, and immediately our deployment was able to create pods again, launch success went back to 100% pretty quickly.
## Lessons learnt
### What went well
1. Deleting the resource quota object caused the system to correct itself very quickly.
### What went wrong
1. It was 11 hours before we realized that there was a major outage on Binder.
### Where we got lucky
1. The actual fix was relatively simple once we new to delete the right object.
1. A team member with the skills and permissions to make the change happened to be at their computer at 11pm their time.
## Action items
### Process improvements
1. Uptime and alerting issue: https://github.com/jupyterhub/mybinder.org-deploy/issues/611
<file_sep>/docs/source/incident-reports/2019-03-24-r2d-upgrade.md
# 2019-03-24, repo2docker upgrade and docker image cache wipe
## Summary
User's experienced long wait times for the images to build, fixed by banning
the JupyterLab demo repository. The problem was that a single repository that
is very popular and takes a long time to build will end up consuming all
available "build slots". Incident lasted most on Sunday day in Europe.
## Timeline
All times in UTC+1 (Zurich time)
### 2019-03-24 8:54
Start of incident. New version of repo2docker is deployed and the build prefix
is bumped. This effectively clears the docker image cache. Repositories start
building. We quickly accumulate build pods. Things appear to work.

### 18:45
Builds are backlogged. As not a lot of binders are running the auto-scaled
cluster is small which limits the amount of resources available to build images.
Decision taken to add two new nodes to provide additional
resources for building pods

### 18:54
Two new nodes were added as a separate node pool. These nodes had no "Local SSD"
assigned to them. This was a mistake in setting up the node pool.
### 19:14
New builds weren't starting, number of running build pods kept decreasing.
When triggering a new build and watching the logs we never see a log message
that the build pod has been launched. This is consistent with the UI on the web
interface. However unclear why the build pods are not being launch.
Suspect something is "stuck" in the BinderHub
pods, restarted one of them. New builds start happening.
### 19:57
Reports of builds happening but then erroring with "no disk space", images
are not pushed to the registry. Notice that the extra node pool does not have
local SSD drives after ssh'ing to some old and new nodes and comparing output
of `df -h` on each. Recreate node pool with correctly configured nodes.
Restarted the second bhub pod.
### 22:18
Notice that builds are not starting and number of running build pods is low.
Start investigating where the builds are queued. Added a new metric to the
"Running Build Pods" chart on grafana to show number of builds "started" by
BinderHub.

Each "build" is either someone requesting a repository that has not been built
or is in the process of being built. The number of actually running builds keeps
decreasing over time, while the number of "started builds" keeps growing. The
big drop is when we deleted the build pod for the JupyterLab demo repository
### 22:36
Decide to start a build for JupyterLab demo repo and then ban it to allow
other repository builds to make progress. Once the JLab build is done we will
unban it again.
### 2019-03-25 7:01am
JupyterLab demo repo build completed, repository was unbanned again.
Incident resolved.
## Lessons learnt
### What went well
List of things that went well. For example,
1. We managed to update repo2docker and wipe the build cache without causing
a total outage.
### What went wrong
Things that could have gone better. Ideally these should result in concrete
action items that have GitHub issues created for them and linked to under
Action items. For example,
1. We generate a lot of load when wiping the cache and had no ready-to-go
instructions for adding extra nodes to the cluster.
### Where we got lucky
These are good things that happened to us but not because we had planned for them.
For example,
1. Users were super understanding and supportive that things were slow and builds
did not start for several hours.
## Action items
### Process improvements
1. Need to devise a strategy for wiping the build cache without generating a huge
load.
### Documentation improvements
1. Add a command that can be copy&pasted to add a new node pool to avoid
configuration errors.
### Technical improvements
1. Change our build thread pool setup so that a single slow to build and popular
repository does not end up using all available build slots.
<file_sep>/docs/source/incident-reports/2018-01-17-ubuntu-upgrade.md
# 2018-01-17, Emergency Aardvark bump
## Summary
In the midst of a general Travis period of instability, we were slow to
realize that Ubuntu had pulled several of its packages for "Zesty" from the
repository we were using in the base image for repo2docker. This meant that
most Binders would fail to build because those packages were no longer available.
We had to [perform an emergency bump](https://github.com/jupyterhub/repo2docker/pull/197)
to the latest version of Ubuntu (artful).
In the future we should keep bumping when ubuntu makes a new release _until_
it makes a new LTS release, then we should pin repo2docker at that.
## Timeline
All times in PST
### 2018-01-15
General Travis failures happening due to Travis downtime (not related to
Binder deployment)
### 2018-01-16
JupyterHub travis failures
(https://github.com/jupyterhub/repo2docker/pull/197#issuecomment-358515334),
was dismissed as holdover from Travis failing on the 15th (since it was reported
as 'no route to host' rather than 404).
### 2018-01-17 17:14pm
- Other travis failures (berkeley's), points to issue being real
- Realization this will cause repo2docker + mybinder to fail
### 2018-01-17 17:30pm
Binder build logs showed lots of "IGN" errors, due to trying to pull from
zesty repositories. Then would fail.
**Problem**: Ubuntu removed repository packages for zesty, since it was not
a long-term release. The base Binder image was using zesty, so it would look
for packages that weren't there anymore.
### 18:57
Bump to artful, merged + deployed. Checked mybinder.org and problem was resolved.
### 19:00
Tweet + Email listserv announcing bump to Artful.
## Action items
### Process
- Note that Ubuntu non-LTS is on a ~9 month cycle, not the 1 year cycle we assumed.
Keep bumping Ubuntu versions until we hit LTS, then stop [Issue](https://github.com/jupyterhub/repo2docker/issues/198)
- Subscribe to the ubuntu-announce mailing list [Issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/296)
### repo2docker
- Separate python version from distro version [Issue](https://github.com/jupyterhub/repo2docker/issues/185)
- Allow users to pin distros (with an apt.yaml) [PR](https://github.com/jupyterhub/repo2docker/pull/148)
### Misc
- Keep our own apt mirror [Issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/295)
### mybinder.org-deploy
- Continuous tests should be run against binder, to tell us when things fail [Issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/19)
<file_sep>/.github/PULL_REQUEST_TEMPLATE.md
<!-- If this PR is a bump to either BinderHub or repo2docker,
use the template below in your PR description. If it is not,
(e.g., a docs PR) then you can delete the template below. -->
<!-- BinderHub bump -->
This is a BinderHub version bump. See the link below for a diff of new changes:
https://github.com/jupyterhub/binderhub/compare/<OLD-HASH>...<NEW-HASH>
<!-- repo2docker bump -->
This is a repo2docker version bump. See the link below for a diff of new changes:
https://github.com/jupyterhub/repo2docker/compare/<OLD-HASH>...<NEW-HASH>
<file_sep>/docs/source/incident-reports/2018-07-08-podsnips-aplenty.md
# 2018-07-08, too many pods
## Summary
Pod shutdown stopped functioning,
resulting in constant growth of pods,
eventually filling the cluster and new launches failing.
Total service disruption for Binder lasted for approximately five hours on a Saturday evening.
## Timeline
All times in CEST (UTC+2)
### 2018-07-08 ca. 18:30
Launch success drops to zero.
### 22:48
Suspicious behavior reported on gitter.
### 23:25
Investigation launched.
Cluster is full, scaled up to 8 nodes.
Almost 1k user pods are found (737 running, 240 waiting to be scheduled).
Pods older than 4 hours are deleted,
and some of the most-used images (such as ipython-in-depth)
are culled more aggressively.
After culling ~600 pods and restarting the Hub pod, launches are back to normal.
Some of the nodes allocated due to the flood of pods are cordoned to be reclaimed by the autoscaler.
Launches return to stable and European-timezone team retires for the night.
### 2018-07-09 08:00
Manual max-age culling frees up some of the remaining nodes.
Cluster is able to scale back to 3 nodes.
### 10:00
Investigating hub logs shows indications that the pod reflector has stopped receiving events ca 18:30 CEST, such as:
```
2018-07-07 18:32:27.000 CEST
TimeoutError: pod/jupyter-ipython-2dipython-2din-2ddepth-... did not disappear in 300 seconds!
2018-07-07 18:34:04.000 CEST
TimeoutError: pod/jupyter-bokeh-2dbokeh-2dnotebooks-... did not start in 300 seconds!
```
Shortly thereafter, pod deletion is skipped for pods that launch but are not in the pod reflector, presumably for the same reason they are skipped in the above:
```
2018-07-07 18:38:35.000 CEST
[W 2018-07-07 16:38:35.506 JupyterHub spawner:1476] No pod jupyter-ipython-2dipython-2din-2ddepth-... to delete. Assuming already deleted.
```
These pods _have_ started, but aren't registered in the pod reflector,
so in aborting the launch, the pod is not deleted.
This is the direct cause of the runaway pod growth,
though the root cause is that the pod reflector has stopped updating.
## Lessons learned
### What went well
- mybinder.org-deploy/scripts/delete-pods.py script was useful for bulk deleting older pods to get things back under control.
- once noticed, cluster returned to healthy within a few minutes.
List of things that went well. For example,
1. We were alerted to the outage by automated bots before it affected users
2. The staging cluster helped us catch this before it went to prod
### What went wrong
- pod reflector in kubespawner stopped receiving events,
leading to total launch failure until the Hub was restarted.
- A compounding bug caused pods that the reflector failed to notice starting
skipped deletion because they were assumed not to have been created in the first place.
- outage lasted a long time (five hours) before we noticed. We still don't have downtime notifications setup.
- cluster scale-down still involves several manual steps of cordoning and draining nodes.
## Action items
These are only sample subheadings. Every action item should have a GitHub issue
(even a small skeleton of one) attached to it, so these do not get forgotten.
### Process improvements
1. set up notifications of downtime ([issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/611))
2. automate scale-down ([issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/646))
### Documentation improvements
1. make sure downtime recovery steps are documented.
They worked well this time,
but not all team members may know the steps.
([issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/655))
### Technical improvements
1. Fix failure to delete pods when pod reflector fails
([pull request](https://github.com/jupyterhub/kubespawner/pull/208))
2. Make kubespawner more robust to pod-reflector failure
([issue](https://github.com/jupyterhub/kubespawner/issues/209))
3. Refactor event reflector to be a single reflector instead of one-per-pod.
The new event reflectors were one major new feature in KubeSpawner,
and may have been relevant to the failure (not clear yet)
([issue](https://github.com/jupyterhub/kubespawner/issues/210)).
4. try to auto-detect likely unhealthy Hub state and restart the Hub without manual intervention ([issue](https://github.com/jupyterhub/jupyterhub/issues/2028))
<file_sep>/scripts/drain-user-node
#!/usr/bin/env python3
"""
Drain a cordoned node with user pods,
leaving user pods to finish draining on their own.
To be run after cordoning a user node,
to increase the likelihood of that node being reclaimed promptly
by the autoscaler.
Drains replicaset-controlled pods *other than user pods* from a node
so that the autoscaler can reclaim it when it's ready.
This drains pods such as `kube-dns` that can end up blocking
autoscaler reclamation without disrupting user pods.
"""
import argparse
import sys
from kubernetes import client, config
# Setup our parameters
argparser = argparse.ArgumentParser(description=__doc__)
argparser.add_argument(
"--dry-run",
action="store_true",
help="Dry run (report what would happen, don't actually do anything)",
)
argparser.add_argument(
"-y", dest="answer_yes", action="store_true", help="Answer yes (skips confirmation)"
)
argparser.add_argument(
"nodes",
nargs="*",
help="The nodes. If not given, all cordoned nodes will be drained.",
)
kube_context_help = (
"Context pointing to the cluster to use. To list the "
"current activated context, run `kubectl config get-contexts`. Default: currently active context"
)
argparser.add_argument("--kube-context", default=None, help=kube_context_help)
args = argparser.parse_args()
# prefix to avoid scaring people during output
if args.dry_run:
dry_prefix = "(Not actually) "
else:
dry_prefix = ""
# Connect to kubernetes
config.load_kube_config(context=args.kube_context)
kube = client.CoreV1Api()
# determine the nodes to drain
kube_nodes = {node.metadata.name: node for node in kube.list_node().items}
nodes_to_drain = []
if not args.nodes:
# nodes not specified, drain all cordoned nodes
for node_name, node in kube_nodes.items():
if node.spec.unschedulable:
nodes_to_drain.append(node_name)
if not nodes_to_drain:
sys.exit("No cordoned nodes to drain!")
else:
# validate node list (verify the nodes exist and are cordoned)
nodes_to_drain = args.nodes
for node_name in args.nodes:
if node_name not in kube_nodes:
sys.exit(f"No such node: {node_name}")
node = kube_nodes[node_name]
if not node.spec.unschedulable:
# verify that the node is cordoned
msg = f"Node {node_name} is not cordoned!"
if args.dry_run:
print(msg)
else:
# abort if it's not a dry run
sys.exit(msg)
def delete_pod(pod, reason):
"""Delete a pod for a given reason"""
print(f"{dry_prefix}Deleting {pod.metadata.name}: {reason}")
if args.dry_run:
return
kube.delete_namespaced_pod(pod.metadata.name, pod.metadata.namespace, {})
# get all pods
pods = kube.list_pod_for_all_namespaces().items
for node in nodes_to_drain:
print(f"{dry_prefix}Draining non-user pods from cordoned node {node}")
pod_count = 0
to_delete = []
# find the pods on the node to be deleted
for pod in [pod for pod in pods if pod.spec.node_name == node]:
pod_count += 1
name = pod.metadata.name
owner = pod.metadata.owner_references
if owner:
owner = owner[0]
if owner and owner.kind in {"ReplicaSet", "StatefulSet"}:
# delete pods owned by a ReplicaSet that will be relocated
# after deletion
to_delete.append((pod, f"owned by {owner.kind}"))
continue
elif owner:
print(f"Skipping {name} owned by {owner.kind}")
else:
# allow deleting stopped pods
if pod.status.phase in {"Failed", "Completed"}:
to_delete.append((pod, f"Pod {pod.status.phase}"))
continue
# show pods we aren't choosing to delete,
# as long as they aren't user/build pods
if not (
# it's a build
pod.metadata.labels.get("component") == "binderhub-build"
or (
# it's a singleuser server
pod.metadata.labels.get("app") == "jupyterhub"
and pod.metadata.labels.get("component") == "singleuser-server"
)
):
print(f"Skipping {name} with no owner")
# report summary and confirm deletion
if not to_delete:
print("Found no pods to delete")
continue
delete_count = len(to_delete)
print(f"{dry_prefix}The following {delete_count} pods will be deleted:")
print(
" " + "\n ".join(f"{pod.metadata.name} {reason}" for pod, reason in to_delete)
)
if not args.answer_yes and not args.dry_run:
ans = input(f"Delete {delete_count} pods [y/N? ")
if not ans.lower().startswith("y"):
print("aborting...")
sys.exit(0)
# actually do the deletion
if not args.dry_run:
for pod, reason in to_delete:
delete_pod(pod, reason)
print(
f"{dry_prefix}Deleted {delete_count} pods, left {pod_count - delete_count} running."
)
print(f"Node {node} should be reclaimed when user pods finish draining")
<file_sep>/docs/source/components/matomo.rst
=================================
Matomo (formerly Piwik) analytics
=================================
`Matomo <https://matomo.org/>`_ is a self-hosted free &
open source alternative to `Google Analytics <https://analytics.google.com>`_.
Why?
====
Matomo gives us better control of what is tracked, how long it is stored
& what we can do with the data. We would like to collect as
little data as possible & share it with the world in safe ways
as much as possible. Matomo is an important step in making this possible.
How it is set up?
=================
Matomo is a PHP+MySQL application. We use the apache based upstream
`docker image <https://hub.docker.com/_/matomo/>`_ to run it. We can
improve performance in the future if we wish by switching to ``nginx+fpm``.
We use `Google CloudSQL for MySQL <https://cloud.google.com/sql/docs/mysql/>`_
to provision a fully managed, standard mysql database. The
`sidecar pattern <https://cloud.google.com/sql/docs/mysql/connect-kubernetes-engine>`_
is used to connect Matomo to this database. A service account with appropriate
credentials to connect to the database has been provisioned & checked-in
to the repo. A MySQL user with name ``matomo`` & a MySQL database with name ``matomo``
should also be created in the Google Cloud Console.
Initial Installation
====================
Matomo is a PHP application, and this has a number of drawbacks. The initial
install **`must <https://github.com/matomo-org/matomo/issues/10257>`_** be completed
with a manual web interface. Matomo will error if it finds a complete ``config.ini.php``
file (which we provide) but no database tables exist.
The first time you install Matomo, you need to do the following:
1. Do a deploy. This sets up Matomo, but not the database tables
2. Use ``kubectl --namespace=<namespace> exec -it <matomo-pod> /bin/bash`` to
get shell on the matomo container.
3. Run ``rm config/config.ini.php``.
4. Visit the web interface & complete installation. The database username & password
are available in the secret encrypted files in this repo. So is the admin username
and password. This creates the database tables.
5. When the setup is complete, delete the pod. This should bring up our ``config.ini.php``
file, and everything should work normally.
This is not ideal.
Admin access
============
The admin username for Matomo is ``admin``. You can find the password in
``secret/staging.yaml`` for staging & ``secret/prod.yaml`` for prod.
Security
========
PHP code is notoriously hard to secure. Matomo has had security audits,
so it's not the worst. However, we should treat it with suspicion &
wall off as much of it away as possible. Arbitrary code execution
vulnerabilities often happen in PHP, so we gotta use that as our
security model.
We currently have:
1. A firewall hole (in Google Cloud) allowing it access to the CloudSQL
instance it needs to store data in. Only port 3307 (which is used by
the OAuth2+ServiceAccount authenticated CloudSQLProxy) is open. This
helps prevent random MySQL password grabbers from inside the cluster.
2. A Kubernetes NetworkPolicy is in place that limits what outbound
connections Matomo can make. This should be further tightened down -
ingress should only be allowed on the nginx port from our ingress
controllers.
3. We do not mount a Kubernetes ServiceAccount in the Matomo pod. This
denies it access to the KubernetesAPI.
<file_sep>/deploy.py
#!/usr/bin/env python3
import argparse
import glob
import json
import os
import re
import subprocess
import sys
# Color codes for colored output!
if os.environ.get("TERM"):
BOLD = subprocess.check_output(["tput", "bold"]).decode()
GREEN = subprocess.check_output(["tput", "setaf", "2"]).decode()
NC = subprocess.check_output(["tput", "sgr0"]).decode()
else:
# no term, no colors
BOLD = GREEN = NC = ""
HERE = os.path.dirname(__file__)
ABSOLUTE_HERE = os.path.dirname(os.path.realpath(__file__))
GCP_PROJECTS = {
"staging": "binderhub-288415",
"prod": "binderhub-288415",
}
GCP_ZONES = {
"staging": "us-central1-a",
"prod": "us-central1",
}
# Mapping of cluster names (keys) to resource group names (values) for Azure deployments
AZURE_RGs = {}
def check_call(cmd, dry_run):
"""
Print a command if dry_run is true, otherwise run it with subprocess.check_call
"""
if dry_run:
print("dry-run:", " ".join(cmd))
else:
subprocess.check_call(cmd)
def check_output(cmd, dry_run):
"""
Print a command if dry_run is true, otherwise run it with subprocess.check_output
and return decoded output
"""
if dry_run:
print("dry-run:", " ".join(cmd))
return ""
else:
out = subprocess.check_output(cmd)
return out.decode("utf-8")
def setup_auth_azure(cluster, dry_run=False):
"""
Set up authentication with a k8s cluster on Azure.
"""
# Read in auth info. Note that we assume a file name convention of
# secrets/{CLUSTER_NAME}-auth-key-prod.json
azure_file = os.path.join(ABSOLUTE_HERE, "secrets", f"{cluster}-auth-key-prod.json")
with open(azure_file) as stream:
azure = json.load(stream)
# Login in to Azure
login_cmd = [
"az",
"login",
"--service-principal",
"--username",
azure["sp-app-id"],
"--password",
azure["sp-app-key"],
"--tenant",
azure["tenant-id"],
]
check_output(login_cmd, dry_run)
# Set kubeconfig
creds_cmd = [
"az",
"aks",
"get-credentials",
"--name",
cluster,
"--resource-group",
AZURE_RGs[cluster],
]
stdout = check_output(creds_cmd, dry_run)
print(stdout)
def setup_auth_ovh(release, cluster, dry_run=False):
"""
Set up authentication with 'ovh' K8S from the ovh-kubeconfig.yml
"""
print(f"Setup the OVH authentication for namespace {release}")
ovh_kubeconfig = os.path.join(ABSOLUTE_HERE, "secrets", f"{release}-kubeconfig.yml")
os.environ["KUBECONFIG"] = ovh_kubeconfig
print(f"Current KUBECONFIG='{ovh_kubeconfig}'")
stdout = check_output(["kubectl", "config", "use-context", cluster], dry_run)
print(stdout)
def setup_auth_gcloud(release, cluster=None, dry_run=False):
"""
Set up GCloud + Kubectl authentication for talking to a given cluster
"""
# Authenticate to GoogleCloud using a service account
check_output(
[
"gcloud",
"auth",
"activate-service-account",
f"--key-file=secrets/gke-auth-key-{release}.json",
],
dry_run,
)
project = GCP_PROJECTS[release]
zone = GCP_ZONES[release]
# Use gcloud to populate ~/.kube/config, which kubectl / helm can use
check_call(
[
"gcloud",
"container",
"clusters",
"get-credentials",
cluster,
f"--zone={zone}",
f"--project={project}",
],
dry_run,
)
def update_networkbans(cluster, dry_run=False):
"""
Run secrets/ban.py to update network bans
"""
print(BOLD + GREEN + f"Updating network-bans for {cluster}" + NC, flush=True)
# some members have special logic in ban.py,
# in which case they must be specified on the command-line
ban_command = [sys.executable, "secrets/ban.py"]
if cluster in {"ovh", "ovh2"}:
ban_command.append(cluster)
check_call(ban_command, dry_run)
def get_config_files(release, config_dir="config"):
"""Return the list of config files to load"""
# common config files
config_files = sorted(glob.glob(os.path.join(config_dir, "common", "*.yaml")))
config_files.extend(
sorted(glob.glob(os.path.join("secrets", config_dir, "common", "*.yaml")))
)
# release-specific config files
for config_dir in (config_dir, os.path.join("secrets", config_dir)):
config_files.append(os.path.join(config_dir, release + ".yaml"))
return config_files
def deploy(release, name=None, dry_run=False):
"""Deploys a federation member to a k8s cluster.
The deployment is done in the following steps:
1. Deploy cert-manager
2. Deploy mybinder helm chart
3. Await deployed deployment and daemonsets to become Ready
"""
if not name:
name = release
setup_certmanager(dry_run)
print(BOLD + GREEN + f"Starting helm upgrade for {release}" + NC, flush=True)
helm = [
"helm",
"upgrade",
"--install",
"--cleanup-on-fail",
"--create-namespace",
f"--namespace={name}",
name,
"mybinder",
]
config_files = get_config_files(release)
# add config files to helm command
for config_file in config_files:
helm.extend(["-f", config_file])
check_call(helm, dry_run)
print(
BOLD + GREEN + f"SUCCESS: Helm upgrade for {release} completed" + NC, flush=True
)
wait_for_deployments_daemonsets(name, dry_run)
def wait_for_deployments_daemonsets(name, dry_run=False):
"""
Wait for all deployments and daemonsets to be fully rolled out
"""
print(
BOLD
+ GREEN
+ f"Waiting for all deployments and daemonsets in {name} to be ready"
+ NC,
flush=True,
)
deployments_and_daemonsets = (
check_output(
[
"kubectl",
"get",
f"--namespace={name}",
"--output=name",
"deployments,daemonsets",
],
dry_run,
)
.strip()
.split("\n")
)
for d in deployments_and_daemonsets:
check_call(
[
"kubectl",
"rollout",
"status",
f"--namespace={name}",
"--timeout=10m",
"--watch",
d,
],
dry_run,
)
def setup_certmanager(dry_run=False):
"""
Install cert-manager separately into its own namespace and `kubectl apply`
its CRDs each time as helm won't attempt to handle changes to CRD resources.
To `kubectl apply` the CRDs manually before `helm upgrade` is the typical
procedure recommended by cert-manager. Sometimes cert-manager provides
additional upgrade notes, see https://cert-manager.io/docs/release-notes/
before you upgrade to a new version.
"""
version = os.getenv("CERT_MANAGER_VERSION")
if not version:
raise RuntimeError("CERT_MANAGER_VERSION not set. Source cert-manager.env")
manifest_url = f"https://github.com/jetstack/cert-manager/releases/download/{version}/cert-manager.crds.yaml"
print(BOLD + GREEN + f"Installing cert-manager CRDs {version}" + NC, flush=True)
# Sometimes 'replace' is needed for upgrade (e.g. 1.1->1.2)
check_call(["kubectl", "apply", "-f", manifest_url], dry_run)
print(BOLD + GREEN + f"Installing cert-manager {version}" + NC, flush=True)
helm_upgrade = [
"helm",
"upgrade",
"--install",
"--create-namespace",
"--namespace=cert-manager",
"--repo=https://charts.jetstack.io",
"cert-manager",
"cert-manager",
f"--version={version}",
"--values=config/cert-manager.yaml",
]
check_call(helm_upgrade, dry_run)
def patch_coredns(dry_run=False):
"""Patch coredns resource allocation
OVH2 coredns does not have sufficient memory by default after our ban patches
"""
print(BOLD + GREEN + "Patching coredns resources" + NC, flush=True)
check_call(
[
"kubectl",
"set",
"resources",
"-n",
"kube-system",
"deployments/coredns",
"--limits",
"memory=250Mi",
"--requests",
"memory=200Mi",
],
dry_run,
)
def main():
# parse command line args
argparser = argparse.ArgumentParser()
argparser.add_argument(
"release",
help="Release to deploy",
choices=[
"staging",
"prod",
"ovh",
"ovh2",
],
)
argparser.add_argument(
"--name",
help="Override helm release name, if different from RELEASE",
)
argparser.add_argument(
"cluster",
help="Cluster to do the deployment in",
nargs="?",
type=str,
)
argparser.add_argument(
"--local",
action="store_true",
help="If the script is running locally, skip auth step",
)
argparser.add_argument(
"--dry-run",
action="store_true",
help="Print commands, but don't run them",
)
args = argparser.parse_args()
# if one argument given make cluster == release
cluster = args.cluster or args.release
# Check if the local flag is set
if not args.local:
# Check if the script is being run on CI
if not os.environ.get("CI"):
# Catch the case where the script is running locally but the --local flag
# has not been set. Check that the user is sure that they want to do this!
print(
"You do not seem to be running on CI but have not set the --local flag."
)
# Use regex to match user input
regex_no = re.compile("^[n|N][o|O]$")
regex_yes = re.compile("^[y|Y][e|E][s|S]$")
response = input("Are you sure you want to execute this script? [yes/no]: ")
if regex_no.match(response):
# User isn't sure - exit script
print("Exiting script.")
sys.exit()
elif regex_yes.match(response):
# User is sure - proceed
pass
else:
# User wrote something that wasn't "yes" or "no"
raise ValueError("Unrecognised input. Expecting either yes or no.")
# script is running on CI, proceed with auth and helm setup
if cluster.startswith("ovh"):
setup_auth_ovh(args.release, cluster, args.dry_run)
patch_coredns(args.dry_run)
elif cluster in AZURE_RGs:
setup_auth_azure(cluster, args.dry_run)
elif cluster in GCP_PROJECTS:
setup_auth_gcloud(args.release, cluster, args.dry_run)
else:
raise Exception("Cloud cluster not recognised!")
update_networkbans(cluster, args.dry_run)
deploy(args.release, args.name, args.dry_run)
if __name__ == "__main__":
main()
<file_sep>/scripts/list_new_commits.py
import requests
from yaml import safe_load as load
print("Fetching the SHA for live BinderHub and repo2docker...")
# Load latest requirements
url_requirements = "https://raw.githubusercontent.com/jupyterhub/mybinder.org-deploy/HEAD/mybinder/Chart.yaml"
requirements = load(requests.get(url_requirements).text)
binderhub_dep = [
ii for ii in requirements["dependencies"] if ii["name"] == "binderhub"
][0]
bhub_live = binderhub_dep["version"].split("-")[-1]
url_binderhub_requirements = f"https://raw.githubusercontent.com/jupyterhub/binderhub/{bhub_live}/helm-chart/binderhub/requirements.yaml"
requirements = load(requests.get(url_binderhub_requirements).text)
jupyterhub_dep = [
ii for ii in requirements["dependencies"] if ii["name"] == "jupyterhub"
][0]
jhub_live = jupyterhub_dep["version"].split("-")[-1]
# Load latest repo2docker
url_helm_chart = "https://raw.githubusercontent.com/jupyterhub/mybinder.org-deploy/HEAD/mybinder/values.yaml"
helm_chart = requests.get(url_helm_chart)
helm_chart = load(helm_chart.text)
r2d_live = helm_chart["binderhub"]["config"]["KubernetesBuildExecutor"][
"build_image"
].split(":")[-1]
print("Fetching latest commit SHA for BinderHub and repo2docker...")
# Load latest r2d commit from dockerhub
url = "https://hub.docker.com/v2/repositories/jupyter/repo2docker/tags/"
resp = requests.get(url)
r2d_head = resp.json()["results"][0]["name"]
# Load latest binderhub and jupyterhub commits
url_helm_chart = (
"https://raw.githubusercontent.com/jupyterhub/helm-chart/gh-pages/index.yaml"
)
helm_chart_yaml = load(requests.get(url_helm_chart).text)
latest_hash = {}
for repo in ["binderhub", "jupyterhub"]:
updates_sorted = sorted(
helm_chart_yaml["entries"][repo], key=lambda k: k["created"]
)
latest_hash[repo] = updates_sorted[-1]["version"].split("-")[-1]
url_bhub = "https://github.com/jupyterhub/binderhub/compare/{}...{}".format(
bhub_live, latest_hash["binderhub"]
)
url_r2d = (
f"https://github.com/jupyterhub/repo2docker/compare/{r2d_live}...{r2d_head[:8]}"
)
url_jhub = (
"https://github.com/jupyterhub/zero-to-jupyterhub-k8s/compare/{}...{}".format(
jhub_live, latest_hash["jupyterhub"]
)
)
print("---------------------\n")
print(f"BinderHub: {url_bhub}")
print(f"repo2docker: {url_r2d}")
print(f"JupyterHub: {url_jhub}")
print("\n---------------------")
<file_sep>/images/federation-redirect/Dockerfile
# The build stage
# ---------------
FROM python:3.9-bullseye as build-stage
RUN curl -sSLo /tini "https://github.com/krallin/tini/releases/download/v0.19.0/tini-amd64" \
&& chmod +x /tini
COPY requirements.txt requirements.txt
RUN pip wheel --wheel-dir /wheelhouse \
-r requirements.txt
# The final stage
# ---------------
FROM python:3.9-slim-bullseye
ENV PYTHONUNBUFFERED=1
# fix known vulnerabilities
RUN apt-get -y update \
&& apt-get -y upgrade \
&& apt-get -y install --no-install-recommends \
# libcurl is required by pycurl
libcurl4 \
&& rm -rvf /var/lib/apt/lists/*
COPY --from=build-stage /tini /tini
COPY --from=build-stage /wheelhouse /wheelhouse
RUN pip install --no-cache-dir /wheelhouse/*.whl
# copy content to container
WORKDIR /srv
COPY . .
ENTRYPOINT ["/tini", "--"]
CMD ["python", "/srv/app.py"]
<file_sep>/docs/source/components/index.rst
==========
Components
==========
These pages describe the different technical pieces that make up the
mybinder.org deployment.
.. toctree::
:maxdepth: 2
metrics.md
dashboards.md
ingress.md
cloud.md
matomo.rst
<file_sep>/images/minesweeper/README.md
# minesweeper docker image
docker image with basic dependencies for admin tasks on a kubernetes cluster
(ps, python, python-psutil, python-kubernetes)
## How to update requirements.txt
Because `pip-compile` resolves `requirements.txt` with the current Python for
the current platform, it should be run on the same Python version and platform
as our Dockerfile.
```shell
# run from images/minesweeper
# update requirements.txt based on requirements.in
docker run --rm \
--env=CUSTOM_COMPILE_COMMAND="see README.md" \
--volume=$PWD:/io \
--workdir=/io \
--user=root \
python:3.9-slim-bullseye \
sh -c 'pip install pip-tools==6.* && pip-compile --upgrade'
```
<file_sep>/README.md
# mybinder.org-deploy
Deployment: [](https://github.com/jupyterhub/mybinder.org-deploy/actions)
Deployment, configuration, and Site Reliability documentation files for the
public [mybinder.org][] service.
---
Please note that this repository is participating in a study into sustainability
of open source projects. Data will be gathered about this repository for
approximately the next 12 months, starting from 2021-06-11.
Data collected will include number of contributors, number of PRs, time taken to
close/merge these PRs, and issues closed.
For more information, please visit
[our informational page](https://sustainable-open-science-and-software.github.io/) or download our [participant information sheet](https://sustainable-open-science-and-software.github.io/assets/PIS_sustainable_software.pdf).
---
#### Deploying a Binder Service other than mybinder.org
These files are specific to [mybinder.org][].
If you wish to deploy your own Binder instance, please **do not use** these files.
Instead, you should review the [BinderHub documentation][] and the
[`jupyterhub/binderhub`][] repo to set up your deployment.
## Site Reliability Guide [](https://mybinder-sre.readthedocs.io/en/latest/?badge=latest)
[Site Reliability Guide](https://mybinder-sre.readthedocs.io/en/latest/index.html), the collected wisdom of operating mybinder.org.
Amongst other things the guide contains:
- [How to do a deployment](https://mybinder-sre.readthedocs.io/en/latest/deployment/how.html)
- [What happens during a deployment?](https://mybinder-sre.readthedocs.io/en/latest/deployment/what.html)
- [Incident reports for past incidents](https://mybinder-sre.readthedocs.io/en/latest/incident-reports/incident_reports_toc.html)
- [Incident report template](https://github.com/jupyterhub/mybinder.org-deploy/blob/HEAD/docs/source/incident-reports/template-incident-report.md)
## Key Links
| | Staging | Production |
| -------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------- |
| Site | [staging.mybinder.org](https://staging.mybinder.org) | [mybinder.org](https://mybinder.org) |
| CI Deployment | [](https://github.com/jupyterhub/mybinder.org-deploy/actions?query=workflow%3A%22Continuous+Deployment%22) (both) |
| Deployment checklist | staging | prod |
| Monitoring | staging | [prod](https://grafana.mybinder.org/dashboard/db/kubernetes-cluster-monitoring-binder-prod?refresh=60s&orgId=1) |
| Helm chart | dev | stable |
| ---------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------ |
| JupyterHub | [dev](https://jupyterhub.github.io/helm-chart/#development-releases-jupyterhub) | [stable](https://jupyterhub.github.io/helm-chart/#stable-releases) |
| BinderHub | [dev](https://jupyterhub.github.io/helm-chart/#development-releases-binderhub) | - |
[mybinder.org]: https://mybinder.org
[staging.mybinder.org]: https://staging.mybinder.org
[`jupyterhub/binderhub`]: https://github.com/jupyterhub/binderhub
[binderhub documentation]: https://binderhub.readthedocs.io/en/latest/
<file_sep>/docs/source/incident-reports/2022-01-27-prime-version.md
# 2022-01-27, stale prime version
## Summary
A bug in the federation-redirect health checks
kept the reference version from being updated,
preventing traffic from being sent to other federation members.
This effect lasted approximately nine hours before normal operation was restored without intervention.

## Timeline
All times in CET
### 2022-01-27 11:00
A routine bump of binderhub and reop2docker versions is deployed.
Traffic to federation members other than GKE begins to drop.
At this point, the prime cluster (GKE) is 'full' according to its pod quota.
Due to a bug in federation-redirect,
as long as a cluster is full,
its version is not checked.
However, the version of the prime cluster is used as a reference
when comparing with other federation members.
As long as the prime cluster stays 'full' (or unhealthy for any reason),
the reference version will be out of date,
Any actually up-to-date federation members will be considered to have mismatched version,
and will not receive any redirect traffic.
### 14:35
Federation members notice drop in traffic.
### 18:00
GKE load drops below quota,
allowing the prime version to be updated in the federation-redirector.
At this point, traffic to all federation members returns to normal.
### 2022-01-28 09:00
Investigations starts.
federation-redirect logs show that the version mismatch is the cause of the drop in traffic,
and that the 'prime' version is the one that's out-of-date.
Initial suspicion was an out-of-date binder pod was still receiving traffic.
Exploring logs in GCP logs explorer eventually eliminated this possibility.
Useful log query:
```
resource.type="k8s_container" resource.labels.cluster_name="prod"
resource.labels.container_name="binder"
"GET / "
```
It was useful to expand the "Log Fields" to see the counts by pod name within the time frame:

A brief digression reduced trust in GCP logs because it is clear that the "Kubernetes Pod" resource logs are missing many entries:
```
resource.type="k8s_pod" resource.labels.cluster_name="prod"
resource.labels.pod_name=~"^binder.*"
```
### 2022-01-28 10:00
Underlying issue in federation-redirect is identified and [fixed][fix-pr].
## Lessons learnt
### What went well
1. Cluster healed itself when traffic dropped below the prime cluster's pod quota.
2. federation-redirect logs clearly showed it was the version mismatch preventing traffic.
### What went wrong
1. federation-redirect did not have the correct 'prime version' for 9 hours, due to load on the prime cluster ([fixed][fix-pr])
2. Response time was low. The issue went unnoticed for 3 hours,
and without action for at least 9 hours (when it resolved itself).
We do not currently have much int he way of proactive 'alerting' ([issue][alerting-issue]),
so only when grafana or the status page is being watched do we notice issues like this.
3. The health of the cluster is not immediately visible except in logs or long-term traffic trends,
which can take a while to be noticed ([issue][metrics-issue]).
We do have most of these checks reimplemented on [the status page][status],
but not the actual values as considered by the redirector (bugs and all).
Notably, the status page does not include the version check,
which caused this incident.
[status]: https://mybinder.readthedocs.io/en/latest/about/status.html
### Where we got lucky
1. Federation 'as a whole' handled the traffic okay,
because the prime cluster can actually handle more traffic than its pod quota.
That said, if we actually stopped routing traffic to the prime cluster while it is unhealthy,
the issue would hav been resolved in minutes
because it would not have stayed full.
## Action items
### Process improvements
1. Implement proactive alerts. This is a longstanding issue we have yet to address ([issue][alerting-issue]).
### Technical improvements
1. Fix bug preventing prime cluster's version from being updated while it is full ([pull request][fix-pr])
2. Add metrics for the federation-redirect service to improve visibility of federation health issues ([issue][metrics-issue])
3. Consider allowing the prime cluster to stop accepting traffic when it is unhealthy ([issue][full-issue])
[alerting-issue]: https://github.com/jupyterhub/mybinder.org-deploy/issues/611
[metrics-issue]: https://github.com/jupyterhub/mybinder.org-deploy/issues/2114
[fix-pr]: https://github.com/jupyterhub/mybinder.org-deploy/pull/2113
[full-issue]: https://github.com/jupyterhub/mybinder.org-deploy/issues/2115
<file_sep>/images/analytics-publisher/cloudcosts.py
"""
Produces daily summaries of GCP spending data.
"""
import argparse
import csv
import io
import json
from google.cloud import storage
def totals_from_csv(file):
totals = {}
reader = csv.DictReader(file)
for row in reader:
time_range = (row["Start Time"], row["End Time"])
totals[time_range] = totals.get(time_range, 0) + float(row["Cost"])
return totals
def totals_from_json(file):
totals = {}
for item in json.load(file):
cost = float(item["cost"]["amount"])
time_range = (item["start_time"], item["end_time"])
totals[time_range] = totals.get(time_range, 0) + cost
return totals
def publish_daily_cost(
billing_bucket_name,
target_bucket_name,
target_object_name,
kind="json",
debug=False,
dry_run=False,
):
totals = {}
client = storage.Client()
bucket = storage.Bucket(client, billing_bucket_name)
if kind == "csv":
prefix = "report-"
else:
prefix = "billing-"
blobs = bucket.list_blobs(prefix=prefix)
for blob in blobs:
buffer = io.StringIO(blob.download_as_string().decode())
if kind == "csv":
current_totals = totals_from_csv(buffer)
else:
current_totals = totals_from_json(buffer)
for time_range, cost in current_totals.items():
totals[time_range] = totals.get(time_range, 0) + cost
# We want to push out sorted jsonl
sorted_items = [
{"version": 1, "start_time": start_time, "end_time": end_time, "cost": cost}
for (start_time, end_time), cost in totals.items()
]
sorted_items.sort(key=lambda d: d["start_time"])
if debug:
for item in sorted_items:
print(json.dumps(item))
if not dry_run:
target_bucket = storage.Bucket(client, target_bucket_name)
blob = target_bucket.blob(target_object_name)
target_buffer = io.StringIO()
for item in sorted_items:
target_buffer.write(json.dumps(item) + "\n")
target_buffer.seek(0)
blob.upload_from_file(target_buffer)
return sorted_items
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
"billing_bucket_name", help="Name of bucket GCP billing data is exported to"
)
argparser.add_argument(
"target_bucket_name", help="Name of bucket to push aggregate daily data to"
)
argparser.add_argument(
"target_object_name",
help="Name of object to output containing aggregate daily data",
)
argparser.add_argument(
"--kind",
choices=("csv", "json"),
help="Content Type of billing data export available in bucket",
default="json",
)
argparser.add_argument(
"--debug",
help="Print daily billing data to stdout",
action="store_true",
default=False,
)
argparser.add_argument(
"--dry-run",
help="Do not push output to output GCS bucket",
action="store_true",
default=False,
)
args = argparser.parse_args()
publish_daily_cost(
args.billing_bucket_name,
args.target_bucket_name,
args.target_object_name,
args.kind,
args.debug,
args.dry_run,
)
if __name__ == "__main__":
main()
<file_sep>/docs/source/components/dashboards.md
# Operational Dashboards with Grafana
We use [Grafana](https://grafana.com/) for creating dashboards
from our [operational metrics](metrics). Dashboards are useful for
understanding the current status of the system and all its components
at a glance. They are also very useful to try and debug what
is going wrong during / after an outage.
## What is it?
```{image} ../_static/images/dashboard.png
```
A dashboard is a set of pre-defined graphs in a particular layout that
provide an overview of a system. In our case, they provide an overview
of the [operational metrics](metrics) of the components that make
up mybinder.org.
## Where is it?
Our dashboards are at [grafana.mybinder.org](https://grafana.mybinder.org).
It is public for everyone to view - but to edit, you need an admin password
that is private. Open an issue in [jupyterhub/mybinder.org-deploy](https://github.com/jupyterhub/mybinder.org-deploy)
if you want write access to the dashboards.
You can click the button to the right of the Grafana logo in the top left,
and it will open a drop-down menu of dashboards for the mybinder.org deployment.
## Modifying dashboards
Each dashboard is edited directly from the user interface (if you have
access to edit it). You can click on any graph and select the `Edit` option
to see what queries make up the dashboard, and how you can edit it.
All the dashboard definitions are stored in an `sqlite` database on a
disk attached to the running grafana instance.
The [Grafana documentation](https://grafana.com/docs/grafana/latest/basics/)
has more info on the various concepts in Grafana, and how you can use them.
You can also create a new dashboard and play with it. Be careful before
editing currently used dashboards!
## Installation & Configuration
Grafana is installed with the [Grafana helm chart](https://github.com/helm/charts/tree/master/stable/grafana).
You can see the options for configuring it documented in its
[`values.yaml`](https://github.com/helm/charts/blob/master/stable/grafana/values.yaml)
file. You can also see the specific ways we have configured it
in the `grafana` section of `mybinder/values.yaml`, `config/prod.yaml`
and `config/staging.yaml`.
## Annotations
[Annotations](https://grafana.com/docs/grafana/latest/dashboards/annotations/) are
a cool feature of Grafana that lets us add arbitrary markers tagged to
all graphs marking an event that has happened. For example, you can
create an annotation each time a deployment happens - this puts a
marker with info about the deployment on each graph, so you can easily
tell if a particular deployment has caused changes in any metric.
This is very useful for debugging!
We use the script in `travis/post-grafana-annotation.py` to
create annotations just before each deployment. See the docstring in
the script for more details.
<file_sep>/docs/source/incident-reports/2019-02-20-no-logs.md
# incident date: 2019-02-20, kubectl logs unavailable
## Summary
For new builds users did not see the build logs. Builds did happen normally though. Fixed by removing node that had no IP address in its metadata.
## Timeline
All times in Central European Time (CET)
### 2019-02-20 08:03
`kubectl logs -f <pod>` has stopped working, reporting:
```
Error from server: Get https://10.128.0.6:10250/containerLogs/prod/jupyter-.../notebook?follow=true: No SSH tunnels currently open. Were the targets able to accept an ssh-key for user "gke-df315ed616f010764e03"?
```
This occurs for all pods.
Failure to retrieve pod logs means that build logs are not being forwarded to users.
This occurred once previously on 2019-01-08 and lasted 24 hours.
The first occurrence resolved itself without ever being diagnosed.
### 2019-02-20 10:12
Deploy of a minor bump fails due to the failure to create ssh tunnels.
### 2019-02-20 10:20
Planned upgrade of cluster begins,
hoping that upgrading master and bringing in new nodes will fix the issue.
### 2019-02-20 11:17
New nodes are fully up and running (old nodes are still present and draining),
and `kubectl logs` still fails with:
```
Error from server: Get https://10.128.0.17:10250/...?follow=true: No SSH tunnels currently open. Were the targets able to accept an ssh-key for user "gke-df315ed616f010764e03"?
```
At this point, we know that upgrading master and the nodes will _not_ fix `kubectl logs`,
at least for new nodes.
We do not yet know whether removing old nodes will fix the issue (_spoiler: it will_).
Investigation begins in earnest.
[Troubleshooting documentation](https://cloud.google.com/kubernetes-engine/docs/troubleshooting)
suggests that something could have gone wrong with the master's ssh access to the nodes.
This is verified not to be the case by checking:
1. ssh-key metadata is not too full
2. ssh key for gke-df315ed616f010764e03 is present on all nodes in /home/gke-df315ed616f010764e03/.ssh/authorized_keys
3. firewall rules allow ssh from master, which is doubly verified by attempting to ssh to `gke-df315ed616f010764e03@a.b.c.d` for a node from outside the cluster
4. `journalctl | grep df315` verifies that debugging attempts to ssh were rejected by public key and no such attempt was made from the master
At this point we know that there is nothing on the nodes preventing the master from creating an ssh tunnel.
It must be something wrong on the master,
and worryingly is something wrong on the master that is not fixed by upgrading the master.
### 12:00
After some Googling, we found how to get the API server logs:
```bash
kubectl proxy &
curl -O http://localhost:8001/logs/kube-apiserver.log
```
Searching the API server logs for clues relating to ssh:
```
grep ssh kube-apiserver.log
```
revealed only one hint: a repeated occurrence of
```
E0220 10:59:57.797466 1 ssh.go:200] Failed to getAddresses: no preferred addresses found; known addresses: [{Hostname gke-prod-a-users-b921bb88-rd5d}]
```
along with numerous repeats of the much less informative error:
```
logging error output: "Error: 'No SSH tunnels currently open. Were the targets able to accept an ssh-key for user \"gke-df315ed616f010764e03\"?
```
This gives us hope that the issue may go away when the listed node `gke-prod-a-users-b921bb88-rd5d` is removed.
The node has been cordoned and will be removed from the pool when it is empty.
### 13:30
with rd5d drained and removed from the pool,
`kubectl logs` is restored and everything is working.
## Lessons learnt
### What went well
- Even throughout this process while `kubectl logs` didn't work and upgrading the whole cluster,
launch success rate never dropped below 97.5%,
and only very briefly below 100%.
Great job!
- Once the problematic node was removed, everything recovered.
### What went wrong
Things that could have gone better. Ideally these should result in concrete
action items that have GitHub issues created for them and linked to under
Action items. For example,
- It was very difficult to identify the root cause of the issue,
in part because it appears to be a bug in the kubernetes master and/or the GKE service itself.
- It would have been useful to get a `kubectl describe node gke-prod-a-users-b921bb88-rd5d` once we were suspicious of the node,
to use as reference comparing the unhealthy node to healthy ones,
especially now that we know that this node was indeed the cause
### Where we got lucky
- While the event was occurring, the following command was run:
```bash
kubectl get node -o json | jq '.items[] | [.status.addresses[].address]'
```
in order to retrieve ip addresses of nodes for testing direct ssh access.
The problematic node reported unusually having no known ExternalIP or InternalIP. A normal entry looks like:
```json
["10.128.xxx.yyy", "35.202.xxx.yyy", "gke-prod-a-user-be5bcf07-xhxd"]
```
while the unhealthy node reported
```json
["", "gke-prod-a-users-b921bb88-rd5d"]
```
indicating that kubernetes had lost track of the node's ExternalIP and InternalIP.
This was likely a direct symptom of the problem and could be used as a diagnostic in the future.
## Action items
### Process improvements
1. test and notify when `kubectl logs` doesn't work [issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/900)
### Documentation improvements
1. add command to retrieve apiserver logs to sre guide [issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/901)
1. describe how to identify suspicious networking status in sre guide [issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/900)
<file_sep>/docs/source/incident-reports/2018-02-12-launch-fail.md
# 2018-02-12, Hub Launch Fail
## Summary
Binder was successfully building user pods, but was then failing to direct
users to the built pods. It was fixed by deleting the `binder` and `hub` pods.
## Timeline
All times in PST
### 2018-02-12 14:03
We realized that there's a high usage on the mybinder deployment. Tried
building a repository and it would get to the "launching" step then never
proceed further. Eventually it'd return a "your image took too long to launch" error.
### 2018-02-12 14:06
From the [grafana board](https://grafana.mybinder.org), we realized that in the
"Launch Times Summary" plot we showed _all_ pods as failing to launch.
### 14:08
We delete the `binder` and `hub` pods in the `prod` deployment.
### 14:09
Two people confirm that their pods now build and launch fine, Grafana also
shows successful "Launch Times Summary" data.
## Lessons learnt
### What went well
- Once we noted the problem, it was quickly resolved.
### What went wrong
- The outage was present for nearly an hour before we noticed it. This is partially
because the site itself was returning no errors, only taking forever to launch.
### Where we got lucky
- The solution was just "delete `binder` and `hub`" and the problem resolved
itself.
## Action items
### Process improvements
1. Improve the team operations around debugging the cluster more generally. We
should make sure that on average there are N>1 people around with the skills
and time to debug the deployment.
### Documentation improvements
1. Improve the language around site reliability expectations for mybinder.org,
so that these kinds of outages don't feel like we're letting users down. ([link to issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/359))
### Technical improvements
1. We should set up some kind of monitoring for the mybinder.org deployment.
We have plans to do this long-term ([link](https://github.com/jupyterhub/mybinder.org-deploy/issues/19)),
but we should have something quick-and-dirty that gets us part of the way there
quickly. ([link to issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/358))
<file_sep>/docs/source/incident-reports/2018-07-30-jupyterlab-build-cpu-saturate.md
# 2018-07-30 JupyterLab builds saturate BinderHub CPU
## Summary
Binder wasn't properly building pods and launches weren't working. It was
decided that:
1. The `jupyterlab-demo` repo updated itself, triggering a build
1. The update to `jupyterlab-demo` installed a newer version of JupyterLab
1. `repo2docker` needed a loooong time installing this (perhaps because of webpack size issues)
1. Since the repository gets a lot of traffic, each request to launch while
the build is still happening eats up CPU in the Binder pod
1. The Binder pod was thus getting saturated and behaving strangely, causing
the outage
1. Banning the `jupyterlab-demo` repository resolved the CPU saturation issue.
## Timeline
All times in PST (UTC-7)
### 2018-07-30 ca. 11:20
[Gitter Link](https://gitter.im/jupyterhub/binder?at=5b5f56df12f1be7137683cbc)
We notice that launches are not happening as expected. Cluster utilization is very low,
suggesting that pods aren't being created.
### 11:22
- Notice an SSL protocol error:
```
tornado.curl_httpclient.CurlError: HTTP 599: Unknown SSL protocol error in connection to gcr.io:443
```
- Binder pod is deleted and launches return to normal.
### 12:19
- Launches aren't working again, taking a very long time to start up
- Deleted binder and hub pods
- This resolved the issue a second time.
This is the utilization behavior seen:

### 13:29
Behavior is once again going wrong. Launches taking forever to load. We note
a lot of networky-looking problems in the logs.
### 13:41
Deleted several evicted pods. Pods are often evicted because of low resources
available and `kubelet` evicts in order to free up resources for more important
services.
### 14:01
Confirm that networking seems to be find between production pods.
[Gitter link](https://gitter.im/jupyterhub/binder?at=5b5f7caecb4d5b036ca97bd9)
### 14:16
Note that the CPU utilization of the BinderHub pod is at 100%. If we restart
Binder pod, the new one gradually increases CPU utilization until it hits
100%, then problems begin.
This explains the short-term fixes of deleting the `binder` pod from before.
### 14:45
We realize that the `jupyterlab-demo` repository has been updated and has
a lot of traffic. This seems to be causing strange behavior because it is
still building.
[Gitter link](https://gitter.im/jupyterhub/binder?at=5b5f86b33e264c713850cb5c)
### 15:11
`jupyterlab-demo` repository is banned, and behavior subsequently returns to
normal.
Post-mortem suggests this is the problem:
1. The `jupyterlab-demo` repo updated itself, triggering a build
1. The update to `jupyterlab-demo` installed a newer version of JupyterLab
1. `repo2docker` needed a loooong time installing this (perhaps because of webpack size issues)
1. Since the repository gets a lot of traffic, each request to launch while
the build is still happening eats up CPU in the Binder pod
1. The Binder pod was thus getting saturated and behaving strangely, causing
the outage
1. Banning the `jupyterlab-demo` repository resolved the CPU saturation issue.
## Lessons learned
### What went well
- the binder team did a great job of distributed debugging and got this fixed
relatively quickly once the error was spotted!
### What went wrong
- It took a while before we realized launch behavior was going wonky. We really
could use a notifier for the team :-/
## Action items
These are only sample subheadings. Every action item should have a GitHub issue
(even a small skeleton of one) attached to it, so these do not get forgotten.
### Process improvements
1. set up notifications of downtime ([issue](https://github.com/jupyterhub/mybinder.org-deploy/issues/611))
### Technical improvements
1. Find a way to gracefully handle repositories that take a long time to build (https://github.com/jupyterhub/binderhub/issues/624)
1. Find a way to avoid overloading the Binder CPU when a repository is building
and also getting a lot of traffic at the same time. (https://github.com/jupyterhub/binderhub/issues/624)
<file_sep>/mybinder/files/minesweeper/minesweeper.py
#!/usr/bin/env python3
"""
minesweeper script
Continuous process, on each node via DaemonSet,
to identify processes that could be considered for termination:
- determine which processes are "suspicious" (see herorat.py)
- produce report on suspicious pods:
- show running processes (`ps aux`)
- tail pod logs
- automatically terminate pods likely to be abuse, etc.
"""
import asyncio
import copy
import glob
import json
import os
import pprint
import re
import signal
import socket
import sys
import threading
from concurrent.futures import ThreadPoolExecutor
from functools import partial
from operator import attrgetter
from textwrap import indent
# herorat located in secrets/minesweeper/
import herorat
import kubernetes.client
import kubernetes.config
import psutil
from herorat import inspect_pod, inspect_process
from kubernetes.stream import stream
kubernetes.config.load_incluster_config()
kube = kubernetes.client.CoreV1Api()
local = threading.local()
config = {}
hostname = os.environ.get("NODE_NAME", socket.gethostname())
default_config = {
"userid": 1000,
"inspect_procs_without_pod": False,
"inspect_dind": True,
"threads": 8,
"interval": 300,
"namespace": os.environ.get("NAMESPACE", "default"),
"pod_selectors": {
"label_selector": "component=singleuser-server",
"field_selector": f"spec.nodeName={hostname}",
},
"log_tail_lines": 100,
# process attributes to retrieve
# see psutil.as_dict docs for available fields:
# https://psutil.readthedocs.io/en/latest/#psutil.Process.as_dict
"proc_attrs": [
"cmdline",
"cpu_percent",
"cpu_times",
"exe",
"memory_info",
"name",
"pid",
"ppid",
"status",
"uids",
],
}
default_config.update(herorat.default_config)
def get_kube():
"""Get thread-local kubernetes client
kubernetes client objects aren't threadsafe, I guess
"""
if not hasattr(local, "kube"):
local.kube = kubernetes.client.CoreV1Api()
return local.kube
class Proc(dict):
"""Proc is a dict subclass with attribute-access for keys
suspicious and should_terminate are added via inspection.
They can be booleans or truthy strings explaining
why they are suspicious or should be terminated.
"""
def __init__(self, **kwargs):
kwargs.setdefault("suspicious", False)
kwargs.setdefault("should_terminate", False)
super().__init__(**kwargs)
# secondary derived fields
# cmd is the command-line as a single string
self["cmd"] = " ".join(self["cmdline"])
# cpu_total is the sum of cpu times (user, system, children, etc.)
self["cpu_total"] = sum(kwargs.get("cpu_times", []))
def __repr__(self):
key_fields = ", ".join(
[
f"{key}={self.get(key)}"
for key in [
"pid",
"status",
"suspicious",
"should_terminate",
"cmd",
]
if self.get(key) is not None
]
)
return f"{self.__class__.__name__}({key_fields})"
def __getattr__(self, key):
return self[key]
def __setattr__(self, key, value):
self[key] = value
def get_procs(userid):
"""Get all container processes running with a given user id"""
procs = []
for p in psutil.process_iter(attrs=config["proc_attrs"]):
# TODO: should we filter to userid?
# For now: skip userid filtering, because we
# want to catch all processes in pods, even if they
# ran setuid
# if p.info["uids"].real != userid:
# continue
if not p.info["cmdline"]:
# ignore empty commands, e.g. kernel processes
continue
proc = Proc(**p.info)
procs.append(proc)
procs = sorted(procs, key=attrgetter("cpu_percent"), reverse=True)
return procs
def get_pods():
"""Get all the pods in our namespace"""
kube = get_kube()
namespace = config["namespace"]
# _preload_content=False doesn't even json-parse list results??
resp = kube.list_namespaced_pod(
namespace,
_preload_content=False,
**config["pod_selectors"],
)
return json.loads(resp.read().decode("utf8"))["items"]
def pods_by_uid(pods):
"""Construct a dict of pods, keyed by pod uid"""
return {pod["metadata"]["uid"]: pod for pod in pods}
def get_all_pod_uids():
"""Return mapping of pid to pod uid"""
pod_uids = {}
for cgroup_file in glob.glob("/proc/[0-9]*/cgroup"):
pid = int(cgroup_file.split("/")[-2])
try:
with open(cgroup_file) as f:
cgroups = f.read()
except FileNotFoundError:
# process deleted, ignore
continue
m = re.search(r"[/-]pod([^/\.]+)", cgroups)
if m is None:
# not a pod proc
continue
pod_uids[pid] = m.group(1).replace("_", "-")
return pod_uids
def get_dind_procs():
"""Return list of dind container processes
Identified by cgroup
"""
procs = []
for cgroup_file in glob.glob("/proc/[0-9]*/cgroup"):
pid = int(cgroup_file.split("/")[-2])
try:
with open(cgroup_file) as f:
cgroups = f.read()
except FileNotFoundError:
# process deleted, ignore
continue
# the dind-created cgroups for build containers
# are nested under an extra /docker/ level below the dind pod's own cgroup
# dind pod itself: /kubepods/burstable/pod{u-u-i-d}/{abc123}
# container run by dind: {dind_pod_cgroup}/docker/{def456}
m = re.search("/pod[^/]+/[^/]+/docker/(.+)", cgroups)
if m is None:
# not a dind proc
continue
try:
proc_dict = psutil.Process(pid).as_dict(config["proc_attrs"])
except psutil.NoSuchProcess:
pass
procs.append(Proc(**proc_dict))
return procs
def associate_pods_procs(pods, procs):
"""Associate pods and processes
For all pods, defines pod["minesweeper"]["procs"] = list_of_procs_in_pod
Returns (pods, procs_without_pods)
"""
for pod in pods.values():
pod["minesweeper"] = {
"procs": [],
}
procs_without_pods = []
pod_uids = get_all_pod_uids()
for proc in procs:
pod_uid = pod_uids.get(proc.pid)
pod = pods.get(pod_uid)
if not pod:
procs_without_pods.append(proc)
else:
pod["minesweeper"]["procs"].append(proc)
return pods, procs_without_pods
def ps_pod(pod, userid=1000):
"""Get ps output from a single pod"""
kube = get_kube()
try:
client = stream(
kube.connect_get_namespaced_pod_exec,
pod["metadata"]["name"],
namespace=pod["metadata"]["namespace"],
command=["ps", "aux"],
stderr=True,
stdin=False,
stdout=True,
_preload_content=False,
)
client.run_forever(timeout=60)
stderr = client.read_stderr()
if stderr.strip():
print(f"err! {stderr}", file=sys.stderr)
stdout = client.read_stdout()
returncode = client.returncode
if returncode:
raise RuntimeError(f"stdout={stdout}\nstderr={stderr}")
return stdout
except Exception as e:
return f"Error reporting on ps in {pod['metadata']['name']}: {e}"
def log_pod(pod):
"""Return the logs for a suspicious pod"""
kube = get_kube()
try:
return kube.read_namespaced_pod_log(
pod["metadata"]["name"],
namespace=pod["metadata"]["namespace"],
tail_lines=config["log_tail_lines"],
)
except Exception as e:
return f"Error collecting logs for {pod['metadata']['name']}: {e}"
async def report_pod(pod):
"""Produce a report on a single pod"""
pod_name = pod["metadata"]["name"]
ps_future = in_pool(lambda: ps_pod(pod))
logs_future = in_pool(lambda: log_pod(pod))
ps, logs = await asyncio.gather(ps_future, logs_future)
print(
"\n".join(
[
pod_name,
f"ps {pod_name}:",
indent(ps, " "),
f"logs {pod_name}:",
indent(logs, " "),
]
)
)
def terminate_pod(pod):
"""Call in a thread to terminate a pod"""
namespace = pod["metadata"]["namespace"]
name = pod["metadata"]["name"]
print(f"Deleting pod {name}")
kube = get_kube()
kube.delete_namespaced_pod(name=name, namespace=namespace)
async def node_report(pods=None, userid=1000):
"""Print a report of suspicious processes on a single node"""
if pods is None:
pods = pods_by_uid(await in_pool(get_pods))
procs = await in_pool(lambda: get_procs(userid))
print(f"Total processes for {hostname}: {len(procs)}\n", end="")
pods, procs_without_pod = associate_pods_procs(pods, procs)
# inspect all procs in our pods
user_procs = []
for pod in pods.values():
user_procs.extend(pod["minesweeper"]["procs"])
pod["minesweeper"]["procs"] = [
inspect_process(p) for p in pod["minesweeper"]["procs"]
]
print(f"Total user pods for {hostname}: {len(pods)}\n", end="")
print(f"Total user processes for {hostname}: {len(user_procs)}\n", end="")
suspicious_pods = [pod for pod in pods.values() if inspect_pod(pod)["suspicious"]]
print(f"Pods of interest for {hostname}: {len(suspicious_pods)}")
# report on all suspicious pods
report_futures = []
for pod in suspicious_pods:
fut = asyncio.ensure_future(report_pod(pod))
report_futures.append(fut)
await asyncio.sleep(0)
# report on suspicious processes with no matching pod
suspicious_procs_without_pod = []
if config["inspect_procs_without_pod"]:
procs_without_pod = [inspect_process(p) for p in procs_without_pod]
suspicious_procs_without_pod = [p for p in procs_without_pod if p.suspicious]
if suspicious_procs_without_pod:
print(
f"No pods found for {len(suspicious_procs_without_pod)} suspicious processes on {hostname}:"
)
for proc in suspicious_procs_without_pod:
print(f" {proc.pid}: {proc.cmd}")
# report on suspicious dind processes
if config["inspect_dind"]:
dind_procs = [inspect_process(p) for p in get_dind_procs()]
print(f"Total dind processes for {hostname}: {len(dind_procs)}")
for proc in dind_procs:
if proc.should_terminate:
print(f"dind process should terminate: {proc}")
try:
os.kill(proc.pid, signal.SIGKILL)
except OSError as e:
print(f"Failed to kill {proc}: {e}")
elif proc.suspicious:
print(f"dind process is suspicious: {proc}")
# TODO: find a way to identity the build repo responsible for suspicious processes in dind
# suspicious_dind_procs_without_pod = [
# p for p in procs_without_pod if p.suspicious
# ]
if report_futures:
await asyncio.gather(*report_futures)
# finally, terminate pods that meet the immediate termination condition
pods_to_terminate = [
pod for pod in suspicious_pods if pod["minesweeper"]["should_terminate"]
]
if pods_to_terminate:
terminate_futures = [
in_pool(partial(terminate_pod, pod)) for pod in pods_to_terminate
]
await asyncio.gather(*terminate_futures)
def get_pool(n=None):
"""Get the global thread pool executor"""
if get_pool._pool is None:
get_pool._pool = ThreadPoolExecutor(config["threads"])
return get_pool._pool
get_pool._pool = None
def in_pool(func):
f = get_pool().submit(func)
return asyncio.wrap_future(f)
def load_config():
"""load config from mounted config map
may change during run, so reload from file each time
"""
global config
prior_config = copy.deepcopy(config)
config.update(default_config)
config_file = "/etc/minesweeper/minesweeper.json"
if os.path.isfile(config_file):
with open(config_file) as f:
file_config = json.load(f)
config.update(file_config)
# sync global config with herorat
herorat.config = config
else:
print(f"No such file: {config_file}")
if config != prior_config:
print("Loaded config:")
pprint.pprint(config)
return config
async def main():
"""Main entrypoint: run node_report periodically forever"""
while True:
# reload since configmap can change
load_config()
await node_report(userid=config["userid"])
await asyncio.sleep(config["interval"])
if __name__ == "__main__":
asyncio.run(main())
|
b7b409c839b4110e23219483f667f8d01989221f
|
[
"reStructuredText",
"Markdown",
"Python",
"Text",
"Dockerfile",
"Shell"
] | 89
|
Markdown
|
jupyterhub/mybinder.org-deploy
|
bf460540a5dccb8ec0a445aee0a8496c0c328a4d
|
2450d4631e0c1e9b40e98e16fdcfb68118d26dcf
|
refs/heads/master
|
<file_sep>using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ConsoleApp1
{
public class FaturaService
{
public static void Run()
{
List<Fatura> faturas = FillFaturas();
// verifico se existe alguma fechada e com o pagamento efetuardo
if (faturas.Any(w => w.situacaoProcessamento == "FECHADA" && w.pagamentoEfetuado))
{
PrintInvoicesPaid(faturas);
bool ultimaFaturaPaga = false;
foreach (var fatura in faturas.OrderByDescending(o => o.dataFechamento))
{
if (fatura.pagamentoEfetuado)
ultimaFaturaPaga = true;
if (ultimaFaturaPaga)
fatura.pagamentoEfetuado = true;
Console.WriteLine(fatura.dataFechamento + " - paid: " + fatura.pagamentoEfetuado);
}
PrintInvoicesPaid(faturas);
Console.ReadLine();
}
}
static void PrintInvoicesPaid(List<Fatura> faturas)
{
Console.WriteLine("Faturas pagas: " + faturas.Count(w => w.situacaoProcessamento == "FECHADA" && w.pagamentoEfetuado));
}
private static List<Fatura> FillFaturas()
{
#region JSON faturas
StringBuilder j = new StringBuilder();
j.AppendLine("[{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"ABERTA\", ");
j.AppendLine("\"pagamentoEfetuado\": false, ");
j.AppendLine("\"dataVencimentoFatura\": \"2018-12-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2018-12-05\", ");
j.AppendLine("\"dataFechamento\": \"2018-11-19\", ");
j.AppendLine("\"valorTotal\": 42.34, ");
j.AppendLine("\"valorPagamentoMinimo\": null, ");
j.AppendLine("\"saldoAnterior\": null ");
j.AppendLine("}, ");
j.AppendLine("{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"ABERTA\", ");
j.AppendLine("\"pagamentoEfetuado\": false, ");
j.AppendLine("\"dataVencimentoFatura\": \"2018-11-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2018-11-05\", ");
j.AppendLine("\"dataFechamento\":\"2018-10-18\", ");
j.AppendLine("\"valorTotal\": 1390.14, ");
j.AppendLine("\"valorPagamentoMinimo\": null, ");
j.AppendLine("\"saldoAnterior\": null ");
j.AppendLine("}, ");
j.AppendLine("{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"ABERTA\", ");
j.AppendLine("\"pagamentoEfetuado\": false, ");
j.AppendLine("\"dataVencimentoFatura\": \"2018-10-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2018-10-05\", ");
j.AppendLine("\"dataFechamento\": \"2018-09-17\", ");
j.AppendLine("\"valorTotal\": 1390.14, ");
j.AppendLine("\"valorPagamentoMinimo\": null, ");
j.AppendLine("\"saldoAnterior\": null ");
j.AppendLine("}, ");
j.AppendLine("{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"ABERTA\", ");
j.AppendLine("\"pagamentoEfetuado\": false, ");
j.AppendLine("\"dataVencimentoFatura\": \"2018-09-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2018-09-05\", ");
j.AppendLine("\"dataFechamento\": \"2018-08-20\", ");
j.AppendLine("\"valorTotal\": 3673.67, ");
j.AppendLine("\"valorPagamentoMinimo\": null, ");
j.AppendLine("\"saldoAnterior\": 2063.93 ");
j.AppendLine("}, ");
j.AppendLine("{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"FECHADA\", ");
j.AppendLine("\"pagamentoEfetuado\": false, ");
j.AppendLine("\"dataVencimentoFatura\": \"2018-08-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2018-08-08\", ");
j.AppendLine("\"dataFechamento\": \"2018-07-18\", ");
j.AppendLine("\"valorTotal\": 2063.93, ");
j.AppendLine("\"valorPagamentoMinimo\": 2063.93, ");
j.AppendLine("\"saldoAnterior\": 6485.74 ");
j.AppendLine("}, ");
j.AppendLine("{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"FECHADA\", ");
j.AppendLine("\"pagamentoEfetuado\": true, ");
j.AppendLine("\"dataVencimentoFatura\": \"2018-07-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2018-07-09\", ");
j.AppendLine("\"dataFechamento\": \"2018-06-18\", ");
j.AppendLine("\"valorTotal\": 6485.74, ");
j.AppendLine("\"valorPagamentoMinimo\": 6485.74, ");
j.AppendLine("\"saldoAnterior\": 3230.19 ");
j.AppendLine("}, ");
j.AppendLine("{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"FECHADA\", ");
j.AppendLine("\"pagamentoEfetuado\": false, ");
j.AppendLine("\"dataVencimentoFatura\": \"2018-06-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2018-06-07\", ");
j.AppendLine("\"dataFechamento\": \"2018-05-22\", ");
j.AppendLine("\"valorTotal\": 3230.19, ");
j.AppendLine("\"valorPagamentoMinimo\": 1023.55, ");
j.AppendLine("\"saldoAnterior\": 1913.97 ");
j.AppendLine("}, ");
j.AppendLine("{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"FECHADA\", ");
j.AppendLine("\"pagamentoEfetuado\": true, ");
j.AppendLine("\"dataVencimentoFatura\": \"2018-05-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2018-05-09\", ");
j.AppendLine("\"dataFechamento\": \"2018-04-19\", ");
j.AppendLine("\"valorTotal\": 1913.97, ");
j.AppendLine("\"valorPagamentoMinimo\": 604.1, ");
j.AppendLine("\"saldoAnterior\": 947.82 ");
j.AppendLine("}, ");
j.AppendLine("{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"FECHADA\", ");
j.AppendLine("\"pagamentoEfetuado\": true, ");
j.AppendLine("\"dataVencimentoFatura\": \"2018-04-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2018-04-09\", ");
j.AppendLine("\"dataFechamento\": \"2018-03-22\", ");
j.AppendLine("\"valorTotal\": 947.82, ");
j.AppendLine("\"valorPagamentoMinimo\": 301.11, ");
j.AppendLine("\"saldoAnterior\": 5297.66 ");
j.AppendLine("}, ");
j.AppendLine("{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"FECHADA\", ");
j.AppendLine("\"pagamentoEfetuado\": true, ");
j.AppendLine("\"dataVencimentoFatura\": \"2018-03-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2018-03-07\", ");
j.AppendLine("\"dataFechamento\": \"2018-02-19\", ");
j.AppendLine("\"valorTotal\": 5297.66, ");
j.AppendLine("\"valorPagamentoMinimo\": 5297.66, ");
j.AppendLine("\"saldoAnterior\": 2935.88 ");
j.AppendLine("}, ");
j.AppendLine("{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"FECHADA\", ");
j.AppendLine("\"pagamentoEfetuado\": false, ");
j.AppendLine("\"dataVencimentoFatura\": \"2018-02-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2018-02-07\", ");
j.AppendLine("\"dataFechamento\": \"2018-01-24\", ");
j.AppendLine("\"valorTotal\": 2935.88, ");
j.AppendLine("\"valorPagamentoMinimo\": 901.71, ");
j.AppendLine("\"saldoAnterior\": 149.09 ");
j.AppendLine("}, ");
j.AppendLine("{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"FECHADA\", ");
j.AppendLine("\"pagamentoEfetuado\": true, ");
j.AppendLine("\"dataVencimentoFatura\": \"2018-01-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2018-01-08\", ");
j.AppendLine("\"dataFechamento\": \"2017-12-18\", ");
j.AppendLine("\"valorTotal\": 149.09, ");
j.AppendLine("\"valorPagamentoMinimo\": 60, ");
j.AppendLine("\"saldoAnterior\": 0 ");
j.AppendLine("}, ");
j.AppendLine("{ ");
j.AppendLine("\"idConta\": 468647, ");
j.AppendLine("\"situacaoProcessamento\": \"FECHADA\", ");
j.AppendLine("\"pagamentoEfetuado\": false, ");
j.AppendLine("\"dataVencimentoFatura\": \"2017-12-05\", ");
j.AppendLine("\"dataVencimentoReal\": \"2017-12-07\", ");
j.AppendLine("\"dataFechamento\": \"2017-11-21\", ");
j.AppendLine("\"valorTotal\": 0, ");
j.AppendLine("\"valorPagamentoMinimo\": 0, ");
j.AppendLine("\"saldoAnterior\": 0 ");
j.AppendLine("} ");
j.AppendLine("] ");
#endregion
List<Fatura> o = JsonConvert.DeserializeObject<List<Fatura>>(j.ToString());
return o;
}
static List<Fatura> FillFaturas2()
{
List<Fatura> faturas = new List<Fatura>();
faturas.Add(new Fatura() { dataFechamento = "2018-04-17", pagamentoEfetuado = true });
return faturas;
}
class Fatura
{
public int idConta { get; set; }
public string situacaoProcessamento { get; set; }
public bool pagamentoEfetuado { get; set; }
public string dataVencimentoFatura { get; set; }
public string dataVencimentoReal { get; set; }
public string dataFechamento { get; set; }
public double valorTotal { get; set; }
public double? valorPagamentoMinimo { get; set; }
public double? saldoAnterior { get; set; }
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text;
namespace ConsoleApp1
{
class Conta
{
public int Id { get; set; }
public decimal Amount { get; set; }
public bool Duplicated { get; set; }
}
public class LimitesDisponibilidades
{
readonly string content, filePath = string.Empty;
public string Content => content;
public string GeneratedFile { get; set; }
public LimitesDisponibilidades(string content, string filePath)
{
this.content = content;
this.filePath = filePath;
}
public static void Run()
{
string filePath = @"C:\Temp\ajuste_contas5.txt";
string content = System.IO.File.ReadAllText(filePath);
LimitesDisponibilidades limitesDisponibilidades = new LimitesDisponibilidades(content, filePath);
limitesDisponibilidades.UpdateLimitesDisponibilidades();
}
public string UpdateLimitesDisponibilidades()
{
System.IO.FileInfo fi = new System.IO.FileInfo(filePath);
StringBuilder builder = new StringBuilder();
List<Conta> contas = new List<Conta>();
builder.AppendLine("SET NOCOUNT ON");
builder.AppendLine("BEGIN");
string[] lines = content.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
foreach (var line in lines)
{
if (line.Length <= 5)
break;
int idConta = Int32.Parse(line.Split('\t')[0]);
string value = line.Split('\t')[1];
decimal amount = 0;
if (value.IndexOf("R$") > 0)
amount = Decimal.Parse(value.Replace("R$", ""));
else
amount = Decimal.Parse(value);
contas.Add(new Conta() { Id = idConta, Amount = amount });
}
// busca todas as contas que estão duplicadas no arquivo
foreach (var item in contas.GroupBy(c => c.Id).Where(grp => grp.Count() > 1).Select(grp => grp.Key))
{// marca todas as contas como duplicada
foreach (var conta in contas.Where(c => c.Id == item))
conta.Duplicated = true;
}
// informa que o arquivo contem contas duplicadas
if (contas.Where(w => w.Duplicated).Count() > 0)
builder.AppendLine("\t--Arquivo com contas duplicadas (ContaDuplicada).");
foreach (var conta in contas.OrderBy(w => w.Id))
{
builder.Append("\tUPDATE LimitesDisponibilidades ");
builder.AppendFormat("SET DisponibGlobalCredito = DisponibGlobalCredito - {0}\t", conta.Amount.ToString(new CultureInfo("en-US")));
// alinhar o where qdo tem valor baixo
if (conta.Amount.ToString().Length <= 4) builder.Append("\t");
builder.AppendFormat("WHERE Id_Conta = {0}", conta.Id);
// adiciona um comentario no arquivo para facilitar encontrar as contas que estão duplicadas
if (conta.Duplicated) builder.Append("\t -- ContaDuplicada");
builder.AppendLine();
}
builder.AppendLine("END");
GeneratedFile = fi.FullName.Replace(fi.Extension, ".sql");
System.IO.File.WriteAllText(GeneratedFile, builder.ToString(), Encoding.ASCII);
return builder.ToString();
}
}
}
<file_sep>using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.IO;
using System.Text;
namespace UnitTestProject1
{
[TestClass]
public class LimitesDisponibilidades
{
string content, filePath, validContent = string.Empty;
ConsoleApp1.LimitesDisponibilidades limitesDisponibilidades;
[TestInitialize]
public void Setup()
{
filePath = @"C:\temp\teste.txt";
#region Content
StringBuilder buildContent = new StringBuilder();
buildContent.AppendLine("64666 R$10.000,00 ");
buildContent.AppendLine("17914 R$10.000,00 ");
this.content = buildContent.ToString();
#endregion
#region ReturnContent
StringBuilder buildContentReturned = new StringBuilder();
buildContentReturned.AppendLine("SET NOCOUNT ON");
buildContentReturned.AppendLine("BEGIN");
buildContentReturned.AppendLine("\tUPDATE LimitesDisponibilidades SET DisponibGlobalCredito = DisponibGlobalCredito - 10000.00 WHERE ID_CONTA = 64666");
buildContentReturned.AppendLine("\tUPDATE LimitesDisponibilidades SET DisponibGlobalCredito = DisponibGlobalCredito - 10000.00 WHERE ID_CONTA = 17914");
buildContentReturned.AppendLine("END");
this.validContent = buildContentReturned.ToString();
#endregion
limitesDisponibilidades = new ConsoleApp1.LimitesDisponibilidades(this.content, this.filePath);
}
[TestMethod]
public void UpdateLimitesDisponibilidades()
{
string returnedContent = limitesDisponibilidades.UpdateLimitesDisponibilidades();
Assert.AreEqual(returnedContent, validContent);
}
[TestMethod]
public void FileNameMustBeTheSame()
{
limitesDisponibilidades.UpdateLimitesDisponibilidades();
FileInfo fileInfoTo = new FileInfo(limitesDisponibilidades.GeneratedFile);
FileInfo fileInfoFrom = new FileInfo(filePath);
string fileNameTo = fileInfoTo.Name.Replace(fileInfoTo.Extension, string.Empty);
string fileNameFrom = fileInfoFrom.Name.Replace(fileInfoFrom.Extension, string.Empty);
Assert.IsTrue(fileNameFrom.Equals(fileNameTo));
}
[TestMethod]
public void ExtensionMustBeSql()
{
limitesDisponibilidades.UpdateLimitesDisponibilidades();
FileInfo fi = new FileInfo(limitesDisponibilidades.GeneratedFile);
Assert.IsTrue(fi.Extension.Equals(".sql"));
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ConsoleApp1
{
class Program
{
static void Main(string[] args)
{
//UpdateLimitesDisponibilidades();
//StringBuilderJava();
//SmsContas.Run();
//FaturaService.Run();
LimitesDisponibilidades.Run();
}
}
}
<file_sep>using System;
using System.Linq;
using System.Text;
namespace ConsoleApp1
{
public class SmsContas
{
#region Counts without SMS enabled
//select count(1)
//from contas c
//JOIN Produtos p(NOLOCK)
//ON p.ID_PRODUTO = c.ID_PRODUTO
// INNER JOIN ParametrosProdutos pp
//ON pp.ID_PRODUTO = P.ID_PRODUTO
//AND pp.CODIGO = 'HabilitaSmsControle' AND pp.Logico = 1
//where 1=1
//and status not in (
// 4, -- Cancelado Glosa
// 8, -- Creliq
// 9, -- Perda
// 14, -- Cancelada Falec. Titular
// 15, -- Acordo Creliq
// 31, -- Cancelado Desaverbado
// 34, -- Cancelado Definitivo - Fraude
// 201, -- Cancelado Definitivo - PLD
// 202 -- Conta Duplicada
// )
//and not exists(
// select id_conta
// from SMSContas s
// where c.id_conta = s.id_conta
// and s.id_smstiposervico = 1
//)
#endregion
static StringBuilder query = new StringBuilder();
//static readonly int INSERTS_PER_FILE = 3;
static readonly int INSERTS_PER_FILE = 50000;
static readonly int TIPO_SERVICO = 1;
public static void Run()
{
int row = 0, fileGenerated = 1;
//string[] lines = System.IO.File.ReadAllLines(@"C:\Temp\sms_controle_contas.rpt").Take(9).ToArray(); // require using System.Linq;
string[] lines = System.IO.File.ReadAllLines(@"C:\Temp\sms_controle_contas.rpt");
while (row < lines.Length)
{
// New file, let's clear stringbuilder
query.Clear();
query.AppendLine("SET NOCOUNT ON");
query.AppendLine("DECLARE @MessageComplement VARCHAR(140)");
query.AppendLine("DECLARE @CadastrarMensagemRetorno INT");
// first of all lets disable all things
EnableOrDisableReferences(isReferencesEnabled: false);
query.AppendLine();
for (int i = 0; i < INSERTS_PER_FILE; i++)
{
if (row == lines.Length) // EOF = End Of File
break;
CreateInsertCommand(lines[row]);
row++;
}
// finally enable all things
EnableOrDisableReferences(isReferencesEnabled: true);
// save the result to file
string fileName = string.Format("INSERT_SmsContas_TipoServico_{0}_File_{1}.sql", TIPO_SERVICO, fileGenerated.ToString().PadLeft(2, '0'));
string folder = string.Format(@"C:\Temp\sms_contas\tipo_servico_{0}\", TIPO_SERVICO);
System.IO.File.WriteAllText(string.Concat(folder, fileName), query.ToString(), Encoding.ASCII);
Console.WriteLine("times executed: " + fileGenerated);
fileGenerated++;
}
}
static void CreateInsertCommand(string idConta)
{
// Verifica se a conta ecziste na tabela SmsContas
AccountHasSmsEnabled(idConta);
// Abre thread para relizar o cadastro
query.AppendLine("\tBEGIN");
// Non ecziste? Então realiza o insert
InsertAccountInSmsContas(idConta);
// Cria o complemento da mensagem
GenerateMessageComplement(idConta);
// Chama a proc para cadastrar a mensagem para a conta
RegisterMessage(idConta);
// Finaliza thread para relizar o cadastro
query.AppendLine("\tEND");
}
static void AccountHasSmsEnabled(string idConta)
{
query.Append("IF NOT EXISTS(SELECT Id_Conta FROM SMSContas ");
query.AppendFormat("WHERE Id_Conta = {0} ", idConta);
query.AppendFormat("AND ID_SMSTipoServico = {0})", TIPO_SERVICO);
query.AppendLine();
}
static void InsertAccountInSmsContas(string idConta)
{
query.Append("\t\tINSERT INTO SMSContas (Id_Conta, FlagAtivo, DataAtivacao, DataCancelamento, Origem, ID_SMSTipoServico, id_loginAtivacao, ");
query.Append("id_loginCancelamento, Id_PlataformaAtivacao, Id_PlataformaCancelamento) VALUES(");
query.AppendFormat("{0},", idConta);
query.Append("1, "); // FlagAtivo
query.Append("GETDATE(), "); // DataAtivacao
query.Append("NULL, "); // DataCancelamento
query.Append("'SCRIPT', "); // Origem
query.AppendFormat("{0}, ", TIPO_SERVICO); // ID_SMSTipoServico
query.Append("1, "); // id_loginAtivacao
query.Append("NULL, "); // id_loginCancelamento
query.Append("1, "); // Id_PlataformaAtivacao
query.Append("NULL)"); // Id_PlataformaCancelamento
query.AppendLine();
}
static void GenerateMessageComplement(string idConta)
{
query.AppendFormat("\t\tSET @MessageComplement = '{0}' ", idConta);
query.AppendFormat("+ CAST(dbo.FC_Calcula_DACModulo10({0}) AS VARCHAR(140))", idConta);
query.AppendLine();
}
static void RegisterMessage(string idConta)
{
query.Append("\t\tEXEC @CadastrarMensagemRetorno = SPR_SMSCadastrarMensagem ");
query.AppendFormat("@IdConta = {0}, ", idConta);
query.Append("@IdOperacao = -2, ");
query.Append("@Origem = 'SCRIPT', ");
query.Append("@IdStatus = -1, ");
query.Append("@Estabelecimento = NULL, ");
query.Append("@ValorCompra = NULL, ");
query.Append("@TipoSMS = 'ATIVACAO', ");
query.Append("@HoraEnvio = NULL, ");
query.Append("@ComplementoMensagem = @MessageComplement");
query.AppendLine();
}
static void EnableOrDisableReferences(bool isReferencesEnabled)
{
query.AppendLine();
query.AppendLine(string.Format("-- {0} todas as referencias", isReferencesEnabled ? "Habilitando" : "Desabilitando"));
query.AppendLine(Trigger(isReferencesEnabled));
query.AppendLine(Constraint(isReferencesEnabled));
}
static string Constraint(bool create)
{
if (create)
return "ALTER TABLE [dbo].[SMSContas] ADD CONSTRAINT [DF_SMSContas_FlagAtivo] DEFAULT (0) FOR [FlagAtivo]";
return "ALTER TABLE [dbo].[SMSContas] DROP CONSTRAINT [DF_SMSContas_FlagAtivo]";
}
static string Trigger(bool enable)
{
if (enable)
return "ALTER TABLE [dbo].[SMSContas] ENABLE TRIGGER ALL";
return "ALTER TABLE [dbo].[SMSContas] DISABLE TRIGGER ALL";
}
}
}
<file_sep># cdt-helper
Script generator
|
e639abd18524b5c4a785ef5146e6f0c3ab9d5aac
|
[
"Markdown",
"C#"
] | 6
|
C#
|
correamarques/cdt-helper
|
86d05fb622adeb5363b44857e13f904f8545d00b
|
eb605bb9be5233b096c4075f40593d277c2e5fae
|
refs/heads/master
|
<file_sep>#include "Map.h"
#include <iostream>
using namespace std;
/**
Map deconstructor
*/
Map::~Map(){
//dtor
SDL_DestroyTexture(this->spriteSheetTexture);
spriteSheetTexture=NULL;
}
/**
Initalizes the map with image
*/
void Map:: init(const char* spriteMap){
setTexture(spriteMap);
}
/**
Sets Map image
*/
void Map::setTexture(const char* spriteMap){
SDL_Surface* tempSurface = IMG_Load(spriteMap);
spriteSheetTexture = SDL_CreateTextureFromSurface(WindowProperties::renderer, tempSurface);
SDL_FreeSurface(tempSurface);
}
/**
Draws 2 map images onto the renderer
*/
void Map::render(){
render( scrollOffset, 0 );
render( scrollOffset + WindowProperties::windowValue.width, 0 );
}
/**
Draws a map onto the renderer at a given position
*/
void Map::render( int x, int y, SDL_Rect* clip, double angle, SDL_Point* center, SDL_RendererFlip flip ){
//Set rendering space and render to screen
SDL_Rect renderQuad = { x, y, WindowProperties::windowValue.width, WindowProperties::windowValue.height };
//Set clip rendering dimensions
if( clip != NULL ){
renderQuad.w = clip->w;
renderQuad.h = clip->h;
}
//Render to screen
SDL_RenderCopyEx( WindowProperties::renderer, spriteSheetTexture, clip, &renderQuad, angle, center, flip );
}
/**
Updates the Scrolling of the map in the background
*/
void Map::update(){
//Scroll background
--scrollOffset;
if( scrollOffset < -WindowProperties::windowValue.width){
scrollOffset = 0;
}
}
<file_sep># Platform Game
A Platform Game made in SDL2
To set up please download CodeBlocks from
http://www.codeblocks.org/downloads
*Note: Download Binary Release*
Then please download the SDL2 dll libraries from here
https://www.libsdl.org/download-2.0.php
*Note: Download Development Libraries*
Then in the codeblock's project files please set up the SDL2 locations to where you have downloaded the dll libraries
More information on how to set up SDL2 in CodeBlocks is here
http://lazyfoo.net/tutorials/SDL/01_hello_SDL/windows/codeblocks/index.php
# Folders
> Assets
Image Contain
> bin
Debug/Release executables<file_sep>#ifndef WINDOWPROPERTIES_H
#define WINDOWPROPERTIES_H
#include <iostream>
#include <fstream>
#include <string>
#include <windows.h>
#include <SDL.h>
class WindowValue{
public:
int width;
int height;
int FPS;
float Wscale,Hscale;
bool fullscreen;
};
class WindowProperties
{
public:
static const int WindowHeight;
static const int WindowWidth;
static const std::string title;
static SDL_Window* window;
static SDL_Surface* screen_surface;
static WindowValue windowValue;
static SDL_Renderer* renderer;
static SDL_Event event;
static bool init();
static void setWindowProperties(WindowValue);
static void setWindowProperties(int,int,float,float,int,bool);
static void setDefaultWindowProperties();
static void resizeWindowEvent();
static float getWidthDisposition();
static float getHeightDisposition();
};
#endif // WINDOWPROPERTIES_H
<file_sep>#include "Platforms/platformFactory.h"
Platform PlatformFactory::Create(SDL_Renderer* renderer, std::string type){
PlatformTypes::setPlatformType(type);
Platform platform;
std::cout << PlatformTypes::platforms[2] << std::endl;
//platform->init(renderer, PlatformTypes::platforms[2].c_str(),500,500);
return platform;
}
<file_sep>#ifndef MAP_H
#define MAP_H
#include "SDL.h"
#include "SDL_image.h"
#include "WindowProperties.h"
class Map
{
public:
Map()=default;
void init(const char* spriteMap);
void setTexture(const char* spriteMap);
void render();
void render( int x, int y, SDL_Rect* clip = NULL, double angle = 0.0, SDL_Point* center = NULL, SDL_RendererFlip flip = SDL_FLIP_NONE );
void update();
virtual ~Map();
protected:
private:
bool isScroll=false;
int scrollOffset=0;
SDL_Texture* spriteSheetTexture;
};
#endif // MAP_H
<file_sep>#include "Platforms/platformTypes.h"
std::vector<std::string> PlatformTypes::platforms;
/**
Gets all platform types from a specific folder
*/
void PlatformTypes::setPlatformType(std::string type){
std::string path = "./assets/platforms/" + type;
DIR *dp;
struct dirent *dirp;
dp = opendir( path.c_str());
while((dirp = readdir(dp))){
std::string filepath = path+"/" + dirp->d_name;
if(strcmp(dirp->d_name,"..")!=0 && strcmp(dirp->d_name,".")!=0 ){
PlatformTypes::platforms.push_back(filepath);
}
}
}
<file_sep>#ifndef GAMEOBJECT_H
#define GAMEOBJECT_H
#include <iostream>
#include <string>
#include "../components/ComponentManager.h"
#include <typeinfo>
/**
Abstract Game Object class that will hold all components of a game object
*/
class GameObject
{
public:
// Constructor
GameObject():entityHandler(this->manager.addEntity()){};
virtual ~GameObject(){
manager.destroyEntities();
}
// Add and Get Components
template <typename T,typename... Args> void addComponent(Args... args){
entityHandler.addComponent<T>(args...);
}
template <typename T> T& getComponent(){
return entityHandler.getComponent<T>();
}
// Component Manager
Manager manager;
Entity& entityHandler;
virtual void update(){
manager.refresh();
manager.update();
};
virtual void render(){
manager.draw();
};
};
#endif // GAMEOBJECT_H
<file_sep>#ifndef PLATFORM_H
#define PLATFORM_H
#include <iostream>
#include <string>
#include "../components/Components.h"
#include "GameObject.h"
using namespace std;
class Platform : public GameObject{
private:
// Platform Image
const char* platformType;
// Platform Starting Location
int x,y;
public:
// Default Constructor
Platform()=default;
// Copy Constructor
Platform(const Platform& other){
this->init(other.platformType,other.x,other.y);
};
Platform& operator=(const Platform& other){
this->init(other.platformType,other.x,other.y);
return *this;
}
// Default DeConstructor
~Platform()=default;
// Initializes the GameObject
void init(const char* platformType, int x, int y){
this->x=x;
this->y=y;
this->platformType=platformType;
this->addComponent<TransformComponent>(x,y,150,75);
this->addComponent<SpriteRenderer>(platformType);
this->addComponent<ColliderComponent>("Platform");
this->getComponent<TransformComponent>().velocity.x =-1;
}
// Renders the Game Object onto the renderer
void render(){
GameObject::render();
}
// Updates the Game Object
void update(){
GameObject::update();
}
};
#endif //PLATFORM_H
<file_sep>#ifndef SDLWINDOW_H
#define SDLWINDOW_H
#include <iostream>
#include <stdio.h>
#include <string>
#include <SDL.h>
#include <SDL_image.h>
#include "WindowProperties.h"
#include "AssetHandler.h"
class SDLWindow
{
public:
SDLWindow();
virtual ~SDLWindow();
void handleEvents();
void render();
void update();
bool running(){return isRunning;}
void loadMedia(std:: string);
void capFrameRate(int);
private:
AssetHandler assetHandler;
bool isRunning;
};
#endif // SDLWINDOW_H
<file_sep>#ifndef PLATFORMFACTORY_H
#define PLATFORMFACTORY_H
#include <string>
#include <iostream>
#include <ctime>
#include "Platforms/platform.h"
#include "Platforms/platformTypes.h"
#include "SDL.h"
#include "WindowProperties.h"
using namespace std;
class PlatformFactory{
public:
/**
Platform Creater that creates a random platform and return it as a game object
*/
static Platform Create(std::string type){
int jumpHeights[4] = {150, 250, 350, 450};
srand(time(NULL));
PlatformTypes::setPlatformType(type);
Platform platform;
platform.init(PlatformTypes::platforms[rand()%5].c_str(),WindowProperties::WindowWidth,jumpHeights[rand()%4]);
//platform.init(PlatformTypes::platforms[rand()%5].c_str(),WindowProperties::WindowWidth,450);
//cout << WindowProperties::windowValue.width << endl;
//cout << platform.getComponent<TransformComponent>().position << endl;
return platform;
}
};
#endif //PLATFORMFACTORY
<file_sep>#include "SDLWindow.h"
int main( int argc, char* args[] )
{
int frameStart;
SDLWindow game;
while(game.running()){
frameStart = SDL_GetTicks();
game.handleEvents();
game.render();
game.update();
game.capFrameRate(frameStart);
}
game.~SDLWindow();
return 0;
}
<file_sep>#include "SDLWindow.h"
/**
SDLWindow Initialization: Sets the attributes of the game window
@param width of the screen
@param height of the screen
@param title of the screen
@param fullscreen is a boolean to make the game fullscreen or not
*/
SDLWindow::SDLWindow() {
if( SDL_Init(SDL_INIT_EVERYTHING) == 0 && WindowProperties::init()){
// Set flag to notify the system is running
this->isRunning=true;
// Creates the window
WindowProperties::window = SDL_CreateWindow( WindowProperties::title.c_str(),
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
WindowProperties::windowValue.width,
WindowProperties::windowValue.height,
SDL_WINDOW_SHOWN | SDL_WINDOW_RESIZABLE );
SDL_Surface *surface; // Declare an SDL_Surface to be filled in with pixel data from an image file
surface = IMG_Load("assets/logo.png");
SDL_SetWindowIcon(WindowProperties::window, surface);
SDL_FreeSurface(surface);
WindowProperties::renderer = SDL_CreateRenderer(WindowProperties::window, -1 ,0);
if( WindowProperties::window != NULL && WindowProperties::renderer != NULL){
// Set background to white
SDL_SetRenderDrawColor(WindowProperties::renderer,255,255,255,255);
}
else{
// Error has occurred stop systems
printf( "Something could not be created! SDL_Error: %s\n", SDL_GetError() );
this->isRunning = false;
}
assetHandler.init();
}
else{
printf( "SDL could not initialize! SDL_Error: %s\n", SDL_GetError() );
this->isRunning = false;
}
}
SDLWindow::~SDLWindow() {
//Destroy window
SDL_DestroyRenderer( WindowProperties::renderer );
SDL_DestroyWindow( WindowProperties::window );
WindowProperties::window = NULL;
WindowProperties::renderer = NULL;
//Quit SDL subsystems
IMG_Quit();
SDL_Quit();
}
/**
Handles any input or outputs that occurs in the window
*/
void SDLWindow:: handleEvents() {
SDL_PollEvent(&WindowProperties::event);
switch(WindowProperties::event.type){
case SDL_QUIT:
isRunning=false;
break;
default:
WindowProperties::resizeWindowEvent();
assetHandler.keyEventHandler();
break;
}
}
/**
Renders game assets to the screen
*/
void SDLWindow:: render() {
SDL_RenderClear(WindowProperties::renderer);
assetHandler.render();
SDL_RenderPresent(WindowProperties::renderer);
}
/**
Updates the game assets
*/
void SDLWindow:: update() {
assetHandler.update();
}
int cnt=0;
/**
Caps the Game Frame Rate
@param FPS the frame per second
@param frameStart the starting frame time
*/
void SDLWindow:: capFrameRate(int frameStart) {
int FPS = WindowProperties::windowValue.FPS;
int frameTime = SDL_GetTicks() - frameStart;
//printf("Frame Started %d\n", cnt );
cnt++;
if((1000/FPS)> frameTime){
SDL_Delay((1000/FPS) - frameTime);
}
}
<file_sep>#include "AssetHandler.h"
#include "Platforms/platformTypes.h"
#include "Platforms/platform.h"
#include "Platforms/platformFactory.h"
/**
Initializes all Assets in the Game
TODO:: Refactor to place all characters in a vector
*/
std::list<Platform> platforms;
void AssetHandler::init(){
this->frameStart = SDL_GetTicks();
music.init("audio/Blessing.mp3");
background.init("assets/backgrounds/gamebackground.jpg");
newplayer.init(800,475);
music.play();
}
/**
Update function to call each asset's update function
*/
void AssetHandler::update(){
addPlatform();
list<Platform>::iterator it;
for(it= platforms.begin(); it!= platforms.end(); ++it){
it->update();
}
newplayer.update();
for(it= platforms.begin(); it!= platforms.end(); ++it){
newplayer.onCollisionDetection(it->getComponent<ColliderComponent>());
}
}
/**
Render function to draw the game object to the screen
*/
void AssetHandler::render(){
background.render();
list<Platform>::iterator it;
for(it= platforms.begin(); it!= platforms.end(); ++it){
it->render();
}
newplayer.render();
}
/**
Handles Key Events for game objects
*/
void AssetHandler::keyEventHandler(){
background.update();
newplayer.keyEventListener();
}
/**
Create Platform Assets every second
*/
void AssetHandler::addPlatform(){
if((SDL_GetTicks() - frameStart)>3000){
platforms.push_back(PlatformFactory::Create("Grass"));
frameStart=SDL_GetTicks();
}
}
<file_sep>#include "WindowProperties.h"
// Static window properties
const std::string WindowProperties::title="Platform Game";
const int WindowProperties::WindowHeight = 600;
const int WindowProperties::WindowWidth = 1000;
WindowValue WindowProperties::windowValue;
SDL_Window* WindowProperties::window = NULL;
SDL_Surface* WindowProperties::screen_surface = NULL;
SDL_Renderer* WindowProperties::renderer = NULL;
SDL_Event WindowProperties::event;
/**
Initialize Window settings from the WindowScreen.dat file
*/
bool WindowProperties::init() {
WindowValue scale;
std::ifstream data("data/WindowScreen.dat",std::ios::binary);
// checks if file exist
if(data.is_open()) {
data.read((char *) &scale,sizeof(scale));
windowValue=scale;
return true;
}
// If not initialize with default data
else {
setDefaultWindowProperties();
std::ifstream data("data/WindowScreen.dat",std::ios::binary);
if(data.is_open()){
data.read((char *) &scale,sizeof(scale));
}
return true;
}
data.close();
return false;
}
/**
Set Window Properties with scale values
*/
void WindowProperties::setWindowProperties(WindowValue scale) {
remove("data/WindowScreen.dat");
std::ofstream out("data/WindowScreen.dat",std::ios::binary);
out.write((char*) &scale, sizeof(scale));
int attributes = GetFileAttributes("data/WindowScreen.dat");
if((attributes & FILE_ATTRIBUTE_HIDDEN)==0) {
SetFileAttributes("data/WindowScreen.dat", attributes + FILE_ATTRIBUTE_HIDDEN);
}
windowValue=scale;
out.close();
}
/**
Set Window Properties with scale values
*/
void WindowProperties::setWindowProperties(int w,int h,float Ws,float Hs, int FPS, bool fullscreen) {
WindowValue scale;
scale.fullscreen=fullscreen;
scale.width=w;
scale.height=h;
scale.Wscale = Ws;
scale.Hscale = Hs;
scale.FPS = FPS;
setWindowProperties(scale);
}
/**
Reset Window Properties
*/
void WindowProperties::setDefaultWindowProperties() {
WindowValue scale;
scale.fullscreen=false;
scale.width=1000;
scale.height=600;
scale.Wscale = 1;
scale.Hscale = 1;
scale.FPS = 60;
setWindowProperties(scale);
}
/**
Change Window setting based on window event actions
*/
void WindowProperties::resizeWindowEvent() {
if(event.type == SDL_WINDOWEVENT){
switch(event.window.event){
case SDL_WINDOWEVENT_SIZE_CHANGED:
if(event.window.data1 < 1000 || event.window.data2 < 600) {
WindowProperties::setDefaultWindowProperties();
SDL_SetWindowSize(window,1000,600);
}
else{
float wScale = (float)(event.window.data1) /1000.0f;
float hScale = (float)(event.window.data2) /600.0f;
WindowProperties::setWindowProperties(event.window.data1,
event.window.data2,
wScale,
hScale,
WindowProperties::windowValue.FPS,
WindowProperties::windowValue.fullscreen);
}
break;
}
}
}
/**
Get the window's width distortion based on the orignal size
*/
float WindowProperties::getWidthDisposition() {
return (float)WindowProperties::windowValue.width/1000.0f;
}
/**
Get the window's height distortion based on the orignal size
*/
float WindowProperties::getHeightDisposition() {
return (float)WindowProperties::windowValue.height/600.0f;
}
<file_sep>#ifndef PLATFORMTYPES_H
#define PLATFORMTYPES_H
#include <string>
#include <cstring>
#include <iostream>
#include <vector>
#include <dirent.h>
class PlatformTypes{
public:
static void setPlatformType(std::string type);
// Holds all the platform images locations
static std::vector<std::string> platforms;
};
#endif //PLATFORMTYPES_H
<file_sep>#ifndef ASSETHANDLER_H
#define ASSETHANDLER_H
#include <list>
#include <../components/Components.h>
#include "PlayableCharacter.h"
#include "Map.h"
#include "Media.h"
#include "WindowProperties.h"
class AssetHandler
{
public:
AssetHandler(){};
void init();
void update();
void render();
void keyEventHandler();
PlayableCharacter newplayer;
GameObject wall;
private:
void addPlatform();
Map background;
Media music;
int frameStart;
};
#endif // ASSETHANDLER_H
|
e0444fa104314d45aa1e078b6df23d428514cfe5
|
[
"Markdown",
"C++"
] | 16
|
C++
|
Lazer7/Platform-Game
|
af58592259f7be511876b45249aa94efec2ea009
|
d026f5f93b3bbb9d86eb326f860ec8d0eafa7f62
|
refs/heads/master
|
<file_sep>
// Get worker ID
var decoded = decodeURIComponent(window.location.search);
var workerID = decoded.substring(decoded.indexOf('=')+1);
var filename = workerID + 'RVP';
// For 100 digits/minute, per cantab specs, make these two sum to 36
var nBlanksBeforeFixationCross = 0;
var preFixationMs = 0;
var fixationMs = 2000;
var postFixationMs = 600;
var digitMs = 600;
var postDigitMs = 0;
var alreadyCorrect;
var allowPresses;
var stim = [];
var minTargSep = 3; // Minimum separation between the end of one target and the beginning of another
var noRptsWitin = 2; // No repeated digits within this many
var noRptsWithin = 2;
var practiceTargTypes = [[3,5,7]];
var blockwise_nPracticeDgts = [7,8,7,8];
var blockwise_nPracticeTargs = [1,1,1,1];
var taskTargTypes = [[3,5,7],[2,4,6],[4,6,8]];
var blockwise_nTaskDgts = [200,200];
var blockwise_nTaskTargs = [16,16];
var legalDigits = [2,3,4,5,6,7,8,9];
// Variables governing the type of feedback displayed
var gamify = false;
if (gamify) {
var score;
var allowNegativeScores = false;
var nPointsPerCorrect = 40;
var nPointsPerIncorrect = 20;
}
var colourHints, underliningHints, textHints;
var timingFeedback, categoricalFeedback, anyFeedback, feedbackTextStopId;
var responseAllowanceMs = 1800; // These could be named better
var lateResponseAllowanceMs = 2500;
var nTextMs = 1000; // Number of ms for which feedback text is displayed
var nDecPts = 3;
var ALL = document.getElementsByTagName("html")[0];
if (gamify) {
var score;
var scoreArea = document.getElementById('scoreArea');
scoreArea.style.visibility = 'visible';
}
var digitDisplayArea = document.getElementById('digitDisplayArea');
var digitDisplayP = document.getElementById('digitDisplayP');
var targetDisplayArea = document.getElementById('targetDisplayArea');
var feedbackTextArea = document.getElementById('feedbackTextArea');
var dialogArea = document.getElementById('dialogArea');
var dialogP = document.getElementById('dialogP');
var currDigitCount, nextDigitCount;
var isPractice = true; // Set to false to eliminate the practice round
var lastTargTime;
var outputText = 'Time,Event\n';
function inputHandler(e) {
if(allowPresses){
var eventText;
if (e.constructor.name == 'KeyboardEvent') {
eventText = e.code;
} else if (e.constructor.name == 'TouchEvent') {
eventText = 'TouchEvent';
}
outputText += e.timeStamp.toFixed(nDecPts) + ',' + eventText + '\n';
if (anyFeedback) {
determineFeedback(e.timeStamp);
}
}
}
window.addEventListener('keydown', inputHandler, false);
window.addEventListener('touchstart', inputHandler, false);
function determineFeedback(responseTime) {
var correctResponse = false, lateResponse = false, earlyResponse = false, incorrectResponse = false;
if (alreadyCorrect) {
incorrectResponse = true;
} else if (responseTime > lastTargTime
&& responseTime < lastTargTime + responseAllowanceMs) {
correctResponse = true;
alreadyCorrect = true;
setTimeout(function() {
alreadyCorrect = false;
}, lastTargTime + lateResponseAllowanceMs - performance.now());
} else if (responseTime > lastTargTime
&& responseTime < lastTargTime + lateResponseAllowanceMs) {
lateReponse = true;
} else if (stim.isTarg[currDigitCount]) {
earlyResponse = true;
} else {
incorrectResponse = true;
}
if (gamify) {
if (correctResponse) {
score += nPointsPerCorrect;
} else {
score -= nPointsPerIncorrect;
if (score < 0 && !allowNegativeScores) {
score = 0;
}
}
scoreArea.textContent = 'Score: ' + score;
}
if (timingFeedback && (earlyResponse || lateResponse)) {
if (earlyResponse) {
displayFeedback('Too soon!');
} else if (lateResponse) {
displayFeedback('Too late!');
}
} else if (categoricalFeedback) {
if (correctResponse) {
displayFeedback('Correct!');
} else {
displayFeedback('Wrong!');
}
}
}
function displayFeedback(text) {
feedbackTextArea.textContent = text;
if (feedbackTextStopId) {
clearTimeout(feedbackTextStopId);
}
feedbackTextStopId = setTimeout(function(){
if(feedbackTextArea.textContent == text){
feedbackTextArea.textContent = ''
}
}, nTextMs);
}
function start() {
nextDigitCount = 0;
feedbackTextArea.textContent = '';
ALL.style.cursor = 'none';
if (gamify) {
score = 0;
categoricalFeedback = true;
}
stim.digits = stim.isTarg = [];
var i;
if (isPractice) {
colourHints = underliningHints = textHints = true;
categoricalFeedback = timingFeedback = true;
outputText += performance.now().toFixed(nDecPts) + ',' + 'Practice start\n';
for(i = 0; i < blockwise_nPracticeDgts.length; i++){
tempStim = new initializeDigits(blockwise_nPracticeTargs[i],blockwise_nPracticeDgts[i],practiceTargTypes);
stim.digits = stim.digits.concat(tempStim.digits);
stim.isTarg = stim.isTarg.concat(tempStim.isTarg);
}
targetDisplayArea.children[1].style.visibility = 'hidden';
targetDisplayArea.children[2].style.visibility = 'hidden';
} else {
colourHints = underliningHints = textHints = false;
categoricalFeedback = timingFeedback = false;
outputText += performance.now().toFixed(nDecPts) + ',' + 'Task start\n';
stim = new initializeDigits(blockwise_nTaskTargs[0],blockwise_nTaskDgts[0],taskTargTypes);
if(blockwise_nTaskDgts.length > 1){
var i, tempStim;
for(i = 1; i < blockwise_nTaskDgts.length; i++){
tempStim = new initializeDigits(blockwise_nTaskTargs[i],blockwise_nTaskDgts[i],taskTargTypes);
stim.digits = stim.digits.concat(tempStim.digits);
stim.isTarg = stim.isTarg.concat(tempStim.isTarg);
}
}
targetDisplayArea.children[1].style.visibility = 'visible';
targetDisplayArea.children[2].style.visibility = 'visible';
}
stim.digits = elimRepeats(stim.digits, stim.isTarg, noRptsWithin);
stim.digits = elimSpuriousTargs(stim.digits, stim.isTarg, taskTargTypes);
dialogArea.style.display = 'none';
digitDisplayArea.style.display = 'block';
targetDisplayArea.style.display = 'block';
if (gamify || timingFeedback || categoricalFeedback) {
anyFeedback = true;
}
feedbackTextArea.style.display = 'block';
feedbackTextArea.textContent = '';
if(preFixationMs > 0){
setTimeout(fixationCross, preFixationMs);
} else {
fixationCross();
}
}
function fixationCross(){
digitDisplayP.style.color = 'black';
digitDisplayP.style.textDecoration = 'none';
digitDisplayP.textContent = '\u2022';
setTimeout(function() {
if(postFixationMs > 0){
digitDisplayP.textContent = '';
setTimeout(showDigit, postFixationMs);
} else {
showDigit();
}
}, fixationMs);
}
function showDigit(){
currDigitCount = nextDigitCount++;
digitDisplayP.textContent = stim.digits[currDigitCount];
presentationTime = performance.now();
outputText += presentationTime.toFixed(nDecPts) + ',' + digitDisplayP.textContent + '\n';
allowPresses = true;
if (stim.isTarg[currDigitCount] && !stim.isTarg[currDigitCount+1]) {
lastTargTime = presentationTime;
}
determineHints();
setTimeout(interTrialCtrlFunc, digitMs);
}
function determineHints() {
if (stim.isTarg[currDigitCount]) {
if (colourHints) {
digitDisplayP.style.color = 'Yellow';
}
if (underliningHints) {
digitDisplayP.style.textDecoration = 'underline';
digitDisplayP.style.textDecorationColor = 'red';
}
if (!stim.isTarg[currDigitCount+1] && textHints) {
feedbackTextArea.textContent = 'Press now!';
}
} else {
digitDisplayP.style.color = 'black';
digitDisplayP.style.textDecoration = 'none';
if(feedbackTextArea.textContent == 'Press now!'){
feedbackTextArea.textContent = '';
}
}
}
function interTrialCtrlFunc() {
if (nextDigitCount == stim.digits.length) {
if (isPractice) {
feedbackTextArea.textContent = '';
digitDisplayP.textContent = '';
digitDisplayP.style.color = 'black';
digitDisplayP.style.textDecoration = 'none';
setTimeout(afterPracticeScreen, responseAllowanceMs);
} else {
showBlank();
setTimeout(function() {saveDataAndRedirect(filename, outputText, workerID)}, lateResponseAllowanceMs);
}
} else {
if (postDigitMs > 0) {
showBlank();
setTimeout(showDigit, postDigitMs);
} else {
showDigit();
}
}
}
function showBlank(){ // Add to this, otherwise not worth having as its own function
digitDisplayP.textContent = '';
}
function afterPracticeScreen() {
isPractice = false;
if (gamify) {
score = 0;
scoreArea.textContent = "Score: " + score;
}
allowPresses = false;
ALL.style.cursor = 'default';
dialogArea.style.display = 'block';
digitDisplayArea.style.display = 'none';
targetDisplayArea.style.display = 'none';
feedbackTextArea.style.display = 'none';
while (dialogArea.lastChild) {
dialogArea.removeChild(dialogArea.lastChild);
}
var centered = document.createElement('center');
instructionsArray =
[
"That was the end of the practice round. Now you'll have to look out for 3 sequences (they will be shown off to the side in case you forget them)",
"3 5 7",
"2 4 6",
"4 6 8",
"Press space as soon as you've seen any of them (i.e. press space as soon as you see the last digit).",
"React as fast as you can, but avoid making mistakes. This time the game won't tell you when you're seeing a sequence.",
"Click to start the game for real"
];
var i, currInstructions;
for (i = 0; i < instructionsArray.length; i++) {
currInstructions = document.createElement('p');
currInstructions.className = 'dialog';
currInstructions.textContent = instructionsArray[i];
centered.appendChild(currInstructions);
}
var startButton = document.createElement('button');
startButton.textContent = 'Start game';
startButton.onclick = start;
centered.appendChild(startButton);
dialogArea.appendChild(centered);
}
function initializeDigits(nTargs,nDigits,targTypes) {
var maxTargLen = Math.max.apply(null, targTypes.map(function(x) {return x.length}));
var i, j, candTargStart, localDigits = Array(nDigits), isTarg = Array(nDigits).fill(false), targTypeIdx = 0;
for (i = 0; i < nTargs; i++) { // Fill out targets
while (true) {
candTargStart = Math.floor(minTargSep + (nDigits - maxTargLen+1 - 2*minTargSep)*Math.random());
if (!isTarg.slice(candTargStart-minTargSep,candTargStart).includes(true) &&
!isTarg.slice(candTargStart,candTargStart+targTypes[targTypeIdx].length+minTargSep).includes(true)) {
for (j = 0; j < targTypes[targTypeIdx].length; j++) {
isTarg[candTargStart+j] = true;
localDigits[candTargStart+j] = targTypes[targTypeIdx][j];
}
targTypeIdx = targTypeIdx==targTypes.length-1 ? 0 : targTypeIdx+1; // cycle through target types
break; // out of while loop
}
}
}
for (i = 0; i < localDigits.length; i++) {
if (!isTarg[i]) {
localDigits[i] = sample(legalDigits,1)[0];
}
}
this.digits = localDigits;
this.isTarg = isTarg;
}
function elimRepeats(gStimArray, indicatorArray, noRptsWithin) {
var stimArray = gStimArray.slice(0); // Local copy of global variable
var stimIdx, localElements, unavailableElements, availableElements, preIdx, postIdx;
for (stimIdx = 0; stimIdx < stimArray.length; stimIdx++) {
if (!indicatorArray[stimIdx]) { // Don't alter target sequences
unavailableElements = getLocalUniques(stimArray, stimIdx, noRptsWithin);
if (unavailableElements.includes(stimArray[stimIdx])) {
availableElements = legalDigits.slice(0);
var i;
for (i = 0; i < unavailableElements.length; i++) {
availableElements.splice(availableElements.indexOf(unavailableElements[i]), 1);
}
stimArray[stimIdx] = sample(availableElements, 1)[0];
}
}
}
return(stimArray);
}
function uniqueElements(inArray) {
var i, collection = new Array();
for (i = 0; i < inArray.length; i++) {
if (!collection.includes(inArray[i])) {
collection.push(inArray[i]);
}
}
return(collection);
}
function getLocalUniques(inArray, idx, n) {
var preIdx = idx - n, localElements = new Array();
if (preIdx >= 0 && preIdx + n - 1 < idx) {
localElements = localElements.concat(inArray.slice(0).splice(preIdx, n));
}
postIdx = idx + 1;
if (postIdx < inArray.length) {
localElements = localElements.concat(inArray.slice(0).splice(postIdx, n));
}
return(uniqueElements(localElements));
}
function elimSpuriousTargs(gStimArray, gIndicatorArray, targArray) {
var stimArray = gStimArray.slice(0); // Local copies of global variables
var indicatorArray = gIndicatorArray.slice(0);
var stimIdx, targIdx, currSeq, currTarg, currIndic, tbrIdx, availableReplacements; // Variables for level 1
var stimIdx2, targIdx2, candIdx, unavailableReplacements, candRep, candStimArray, flag2, startIdx; // Variables for level 2
var tbrIdxs, subTbrIdx, tbrIdx, flag1; // These names really need to be changed at some point
for (targIdx = 0; targIdx < targArray.length; targIdx++) { // Cycle through target sequences to detect spurious occurrences
currTarg = targArray[targIdx];
for (stimIdx = 0; stimIdx < stimArray.length; stimIdx++) { // Inspect entire length of stimArray
currSeq = stimArray.slice(0).splice(stimIdx, currTarg.length);
currIndic = indicatorArray.slice(0).splice(stimIdx, currTarg.length);
if (arrayCmp(currSeq, currTarg) && currIndic.includes(false)) { // Spurious sequence detected
tbrIdxs = findIndices(currIndic, false);
flag1 = false;
for (subTbrIdx = 0; subTbrIdx < tbrIdxs.length; subTbrIdx++) {
if (flag1) {
break;
}
tbrIdx = tbrIdxs[subTbrIdx];
availableReplacements = legalDigits;
unavailableReplacements = getLocalUniques(stimArray, stimIdx, noRptsWithin);
for (candIdx = 0; candIdx < availableReplacements.length; candIdx++) { // Find candidates that would introduce a spurious target sequence
candStimArray = stimArray.slice(0);
candRep = availableReplacements[candIdx];
candStimArray[tbrIdx] = candRep;
flag2 = false;
for (targIdx2 = 0; targIdx2 < targArray.length; targIdx2++) { // Cycle through target sequences to test candidate replacement
if (flag2) {
break;
}
currTarg2 = targArray[targIdx2];
startIdx = Math.max(0, tbrIdx - currTarg2.length + 1);
for (stimIdx2 = startIdx; stimIdx2 <= tbrIdx; stimIdx2++) {
if (arrayCmp(currTarg2, candStimArray.slice(0).splice(stimIdx2, currTarg2.length))) { // A spurious target would be introduced
unavailableReplacements.push(candRep);
flag2 = true; // Go to next candidate replacement digit
break;
}
}
}
}
// The digits that would spurious sequences are now stored in unavailableReplacements
var unavailableIdx;
var currUnavail;
for (unavaialbeIdx = 0; unavailableIdx < unavailableReplacements.length; unavailableIdx++) {
currUnavail = unavaiableReplacements[unavailableIdx];
availableReplacements.splice(availableReplacements.indexOf(currUnavail));
}
if (availableReplacements.length > 0) {
stimArray[stimIdx] = sample(availableReplacements, 1)[0];
flag1 = true; // Go to next element in stimArray
}
}
}
}
}
return(stimArray);
}
function sample(inArray,k) { // Sample k elements without replacement
var arrayToSubsample = inArray.slice(0); // Don't alter original array
outArray = new Array(k);
var i;
for (i = 0; i < k; i++) {
currIdx = Math.floor(Math.random()*arrayToSubsample.length);
outArray[i] = arrayToSubsample[currIdx];
arrayToSubsample.splice(currIdx,1);
}
return outArray;
}
function arrayCmp(array1, array2) {
if (array1.length != array2.length) {
return false;
}
var i, returnVal = true;
for (i = 0; i < array1.length; i++) {
if (array1[i] != array2[i]) {
returnVal = false;
break;
}
}
return returnVal;
}
function isMember(array1, array2) {
var i;
for (i = 0; i < array2.length; i++) {
if(arrayCmp(array1, array2.slice(0).splice(i, array1.length))) {
return true;
}
}
return false;
}
function findIndices(array, element) {
var i, indices = new Array();
for (i = 0; i < array.length; i++) {
if (array[i] == element) {
indices.push(i);
}
}
return(indices);
}
function saveDataAndRedirect(filename, txt, pID) {
filename = 'Data/' + filename;
var form = document.createElement('form');
document.body.appendChild(form);
form.method = 'post';
form.action = 'saveData.php';
var data = {
filename: filename,
txt: txt,
pID: pID
}
var name;
for (name in data) {
var input = document.createElement('input');
input.type = 'hidden';
input.name = name;
input.value = data[name];
form.appendChild(input);
}
form.submit();
}
|
b093a0f4a67845a21ed62c15afff9e8df38cd0c7
|
[
"JavaScript"
] | 1
|
JavaScript
|
kinleyid/webRVP
|
af7acdbbfee9666a1ce64f868746c6b30179bcad
|
128927cdb7c5d2c2aeabf63d4185e8e5e1e3a8d9
|
refs/heads/master
|
<repo_name>daneography/tipper-ios<file_sep>/tipper/settingsViewController.swift
//
// settingsViewController.swift
// tipper
//
// Created by <NAME> on 7/28/20.
// Copyright © 2020 daneography. All rights reserved.
//
import UIKit
class settingsViewController: UIViewController, UITableViewDataSource {
@IBOutlet weak var addButton: UIBarButtonItem!
let vc = ViewController()
var tipPercentages = [Int]()
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
vc.tipPercentages.append(70)
tipPercentages = vc.tipPercentages
print(tipPercentages.count)
navigationItem.rightBarButtonItem = editButtonItem
if tipPercentages.count > 5 {
addButton.isEnabled = false
}
}
@IBAction func addPercentage(_ sender: UIBarButtonItem) {
if tipPercentages.count < 5 {
} else {
// TO-DO
}
}
func tableView(_ tableView: UITableView, canEditRowAt indexPath: IndexPath) -> Bool {
return true
}
func tableView(_ tableView: UITableView, commit editingStyle: UITableViewCell.EditingStyle, forRowAt indexPath: IndexPath) {
if(editingStyle == .delete){
tipPercentages.remove(at: indexPath.row)
tableView.deleteRows(at: [indexPath], with: .fade)
}
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return tipPercentages.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "PlainCell", for: indexPath)
//Tips Section
cell.textLabel?.text = String(tipPercentages[indexPath.row])
return cell
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destination.
// Pass the selected object to the new view controller.
}
*/
}
<file_sep>/tipper/ViewController.swift
//
// ViewController.swift
// tipper
//
// Created by <NAME> on 7/27/20.
// Copyright © 2020 daneography. All rights reserved.
//
import UIKit
class ViewController: UIViewController {
@IBOutlet weak var billAmount: UITextField!
@IBOutlet weak var tipValue: UILabel!
@IBOutlet weak var tipControl: UISegmentedControl!
@IBOutlet weak var totalAmount: UILabel!
var tipPercentages = [10,15,20,70,25]
var tip = [0.90, 0.95,0.10]
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
billAmount.text = String(100)
totalBill(self)
addTipSegment()
}
@IBAction func onTap(_ sender: Any) {
view.endEditing(true)
}
@IBAction func totalBill(_ sender: Any) {
let bill = Double(billAmount.text!) ?? 0
let tip = bill * (Double(tipPercentages[tipControl.selectedSegmentIndex])/100.0)
let total = bill + tip
tipValue.text = String(format: "$%.2f",tip)
totalAmount.text = String(format: "$%.2f",total)
}
@IBAction func settingsButtonTapped(_ sender: UIBarButtonItem) {
self.performSegue(withIdentifier: "mainToSettings", sender: self)
}
func addTipSegment(){
tipPercentages.sort()
tipControl.removeAllSegments()
for (index, tip) in tipPercentages.enumerated(){
tipControl.insertSegment(withTitle: String(format: "%d%%",tip), at: index, animated: true)
}
tipControl.selectedSegmentIndex = 0
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destination.
// Pass the selected object to the new view controller.
}
}
|
9d0e0c8313df6bcd7d31fbcb00025d6ebd186fbe
|
[
"Swift"
] | 2
|
Swift
|
daneography/tipper-ios
|
95815dea1b64e91d46b7dd380247bdf1535b986b
|
d8ffc111e48db92436861fa320041898b6d51633
|
refs/heads/master
|
<repo_name>holmstrom/Kilmer<file_sep>/Kilmer/StringValidation.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Kilmer
{
internal static class StringValidation
{
internal static void NotNullOrEmpty(this string str, string paramName, string message = "")
{
if (string.IsNullOrEmpty(str))
throw new ArgumentException(Utils.CreateMessage(str, message, "String should not be null or empty"), paramName);
}
internal static void IsNullOrEmpty(this string str, string paramName, string message = "")
{
if (!string.IsNullOrEmpty(str))
throw new ArgumentException(Utils.CreateMessage(str, message, "String should be null or empty, was {0}"), paramName);
}
internal static void NotNullOrWhitespace(this string str, string paramName, string message = "")
{
if (string.IsNullOrWhiteSpace(str))
throw new ArgumentException(Utils.CreateMessage(str, message, "String should not be null, empty or whitespace, was {0}"), paramName);
}
internal static void IsNullOrWhitespace(this string str, string paramName, string message = "")
{
if (!string.IsNullOrWhiteSpace(str))
throw new ArgumentException(Utils.CreateMessage(str, message, "String should be null, empty or whitespace, was {0}"), paramName);
}
internal static void DoesContain(this string str, string target, string paramName, string message = "")
{
if (!str.Contains(target))
throw new ArgumentException(Utils.CreateMessage(str, message, "String should contain {0}"), paramName);
}
internal static void DoesntContain(this string str, string target, string paramName, string message = "")
{
if (str.Contains(target))
throw new ArgumentException(Utils.CreateMessage(str, message, "String should contain {0}"), paramName);
}
}
}
<file_sep>/Kilmer/ObjectValidation.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
using System.Threading.Tasks;
namespace Kilmer
{
internal static class ObjectValidation
{
internal static void IsNull(this object value, string paramName, string message = "")
{
if (value != null)
throw new ArgumentException(Utils.CreateMessage(value, message, "Value should be null, was {0}"), paramName);
}
internal static void NotNull(this object value, string paramName, string message = "")
{
if (value == null)
throw new ArgumentNullException(Utils.CreateMessage(value, message, "Value should not be null"), paramName);
}
}
}
<file_sep>/Tests.Kilmer/Test_ObjectValidation.cs
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Kilmer;
namespace Tests.Kilmer
{
[TestClass]
public class Test_ObjectValidation
{
[TestMethod]
public void IsNull()
{
object o = null;
Val.IsNull(() => o);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void IsNull_Throw()
{
object o = new object();
Val.IsNull(() => o);
}
[TestMethod]
public void NotNull()
{
object o = new object();
Val.NotNull(() => o);
}
[TestMethod]
[ExpectedException(typeof(ArgumentNullException))]
public void NotNull_Throw()
{
object o = null;
Val.NotNull(() => o);
}
}
}
<file_sep>/Tests.Kilmer/Test_IEnumerableValidation.cs
using System;
using System.Collections.Generic;
using Kilmer;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace Tests.Kilmer
{
[TestClass]
public class Test_IEnumerableValidation
{
[TestMethod]
public void NotEmpty()
{
IEnumerable<int> ints = new List<int> { 1, 2, 3 };
Val.NotEmpty(() => ints);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotEmpty_Throw()
{
IEnumerable<int> ints = new List<int>();
Val.NotEmpty(() => ints);
}
[TestMethod]
public void Empty()
{
IEnumerable<int> ints = new List<int>();
Val.Empty(() => ints);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Empty_Throw()
{
IEnumerable<int> ints = new List<int>() { 1, 2, 3 };
Val.Empty(() => ints);
}
[TestMethod]
public void Count()
{
IEnumerable<int> ints = new List<int>() { 1, 2, 3 };
Val.Count(() => ints, 3);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Count_Throw()
{
IEnumerable<int> ints = new List<int>() { 1, 2, 3 };
Val.Count(() => ints, 2);
}
}
}
<file_sep>/Kilmer/IEnumerableValidation.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Kilmer
{
internal static class IEnumerableValidation
{
internal static void Empty<T>(this IEnumerable<T> data, string paramName, string message = "")
{
if (data.Any())
throw new ArgumentException(Utils.CreateMessage(message, "The collection should be empty"), paramName);
}
internal static void NotEmpty<T>(this IEnumerable<T> data, string paramName, string message = "")
{
if (!data.Any())
throw new ArgumentException(Utils.CreateMessage(message, "The collection should not be empty"), paramName);
}
internal static void CountIs<T>(this IEnumerable<T> data, int target, string paramName, string message = "")
{
int count = data.Count();
if (count != target)
throw new ArgumentException(Utils.CreateMessage(target, count, message, "Expected collection to contain {0} items, but it contained {1}"), paramName);
}
}
}
<file_sep>/Tests.Kilmer/Test_IComparableValidation_Equal.cs
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Kilmer;
namespace Tests.Kilmer
{
[TestClass]
public class Test_IComparableValidation_Equal
{
[TestMethod]
public void EqualTo_Int()
{
int i = 2;
Val.EqualTo(() => i, 2);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void EqualTo_Int_Throw()
{
int i = 1;
Val.EqualTo(() => i, 2);
}
[TestMethod]
public void EqualTo_Float()
{
double i = 2.0;
Val.EqualTo(() => i, 2.0);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void EqualTo_Float_Throw()
{
double i = 2.0;
Val.EqualTo(() => i, 2.1);
}
[TestMethod]
public void EqualTo_Bool()
{
bool b = true;
Val.EqualTo(() => b, true);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void EqualTo_Bool_Throw()
{
bool b = true;
Val.EqualTo(() => b, false);
}
[TestMethod]
public void EqualTo_Char()
{
char c = 'A';
Val.EqualTo(() => c, 'A');
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void EqualTo_Char_Throw()
{
char c = 'A';
Val.EqualTo(() => c, 'B');
}
[TestMethod]
public void EqualTo_String()
{
string str = "AAA";
Val.EqualTo(() => str, "AAA");
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void EqualTo_String_Throw()
{
string str = "AAA";
Val.EqualTo(() => str, "AAB");
}
[TestMethod]
public void EqualTo_DateTime()
{
var now = DateTime.Now;
Val.EqualTo(() => now, now.AddDays(-1).AddDays(1));
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void EqualTo_DateTime_Throw()
{
var now = DateTime.Now;
Val.EqualTo(() => now, now.AddDays(1));
}
[TestMethod]
public void NotEqualTo_Int()
{
int i = 2;
Val.NotEqualTo(() => i, 3);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotEqualTo_Int_Throw()
{
int i = 2;
Val.NotEqualTo(() => i, 2);
}
[TestMethod]
public void NotEqualTo_Float()
{
double i = 2.0;
Val.NotEqualTo(() => i, 2.1);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotEqualTo_Float_Throw()
{
double i = 2.0;
Val.NotEqualTo(() => i, 2.0);
}
[TestMethod]
public void NotEqualTo_Bool()
{
bool b = true;
Val.NotEqualTo(() => b, false);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotEqualTo_Bool_Throw()
{
bool b = true;
Val.NotEqualTo(() => b, true);
}
[TestMethod]
public void NotEqualTo_Char()
{
char c = 'A';
Val.NotEqualTo(() => c, 'B');
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotEqualTo_Char_Throw()
{
char c = 'A';
Val.NotEqualTo(() => c, 'A');
}
[TestMethod]
public void NotEqualTo_String()
{
string str = "AAA";
Val.NotEqualTo(() => str, "AAB");
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotEqualTo_String_Throw()
{
string str = "AAA";
Val.NotEqualTo(() => str, "AAA");
}
[TestMethod]
public void NotEqualTo_DateTime()
{
var now = DateTime.Now;
Val.NotEqualTo(() => now, now.AddDays(1));
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotEqualTo_DateTime_Throw()
{
var now = DateTime.Now;
Val.NotEqualTo(() => now, now.AddDays(-1).AddDays(1));
}
}
}
<file_sep>/Kilmer/Val.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
namespace Kilmer
{
public static class Val
{
public static void IsNull<T>(Expression<Func<T>> param, string message = "")
{
Tuple<string, T> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.IsNull(nameValPair.Item1, message);
}
public static void NotNull<T>(Expression<Func<T>> param, string message = "")
{
Tuple<string, T> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.NotNull(nameValPair.Item1, message);
}
public static void Above<T>(Expression<Func<T>> param, object target, string message = "")
where T : IComparable
{
Tuple<string, T> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.Above(target, nameValPair.Item1, message);
}
public static void AboveOrEqual<T>(Expression<Func<T>> param, object target, string message = "")
where T : IComparable
{
Tuple<string, T> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.AboveOrEqual(target, nameValPair.Item1, message);
}
public static void Below<T>(Expression<Func<T>> param, object target, string message = "")
where T : IComparable
{
Tuple<string, T> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.Below(target, nameValPair.Item1, message);
}
public static void BelowOrEqual<T>(Expression<Func<T>> param, object target, string message = "")
where T : IComparable
{
Tuple<string, T> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.BelowOrEqual(target, nameValPair.Item1, message);
}
public static void EqualTo<T>(Expression<Func<T>> param, object target, string message = "")
where T : IComparable
{
Tuple<string, T> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.EqualTo(target, nameValPair.Item1, message);
}
public static void NotEqualTo<T>(Expression<Func<T>> param, object target, string message = "")
where T : IComparable
{
Tuple<string, T> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.NotEqualTo(target, nameValPair.Item1, message);
}
public static void NotNullOrEmpty(Expression<Func<string>> param, string message = "")
{
Tuple<string, string> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.NotNullOrEmpty(nameValPair.Item1, message);
}
public static void IsNullOrEmpty(Expression<Func<string>> param, string message = "")
{
Tuple<string, string> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.IsNullOrEmpty(nameValPair.Item1, message);
}
public static void NotNullOrWhitespace(Expression<Func<string>> param, string message = "")
{
Tuple<string, string> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.NotNullOrWhitespace(nameValPair.Item1, message);
}
public static void IsNullOrWhitespace(Expression<Func<string>> param, string message = "")
{
Tuple<string, string> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.IsNullOrWhitespace(nameValPair.Item1, message);
}
public static void Contains(Expression<Func<string>> param, string target, string message = "")
{
Tuple<string, string> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.DoesContain(target, nameValPair.Item1, message);
}
public static void NotContains(Expression<Func<string>> param, string target, string message = "")
{
Tuple<string, string> nameValPair = Utils.GetNameAndValue(param);
nameValPair.Item2.DoesntContain(target, nameValPair.Item1, message);
}
public static void NotEmpty<T>(Expression<Func<IEnumerable<T>>> param, string message = "")
{
Tuple<string, IEnumerable<T>> nameValuePair = Utils.GetNameAndValue(param);
nameValuePair.Item2.NotEmpty(nameValuePair.Item1, message);
}
public static void Empty<T>(Expression<Func<IEnumerable<T>>> param, string message = "")
{
Tuple<string, IEnumerable<T>> nameValuePair = Utils.GetNameAndValue(param);
nameValuePair.Item2.Empty(nameValuePair.Item1, message);
}
public static void Count<T>(Expression<Func<IEnumerable<T>>> param, int target, string message = "")
{
Tuple<string, IEnumerable<T>> nameValuePair = Utils.GetNameAndValue(param);
nameValuePair.Item2.CountIs(target, nameValuePair.Item1, message);
}
}
}
<file_sep>/Tests.Kilmer/Test_Utils.cs
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Kilmer;
namespace Tests.Kilmer
{
[TestClass]
public class Test_Utils
{
[TestMethod]
public void ExtractsParamNameCorrectly()
{
string myParam = "";
string paramName = Utils.GetNameAndValue(() => myParam).Item1;
Assert.AreEqual("myParam", paramName);
}
[TestMethod]
public void ExtractsParamValueCorrectly()
{
string myParam = "42";
string paramValue = Utils.GetNameAndValue(() => myParam).Item2;
Assert.AreEqual("42", paramValue);
}
[TestMethod]
public void ExtractsParamNameForMemberAccessCorrectly()
{
DateTime dateTime = DateTime.Now;
string paramName = Utils.GetNameAndValue(() => dateTime.Ticks).Item1;
Assert.AreEqual("Ticks", paramName);
}
[TestMethod]
public void ExtractsParamValueForMemberAccessCorrectly()
{
DateTime dateTime = DateTime.Now;
long ticks = dateTime.Ticks;
long paramValue = Utils.GetNameAndValue(() => dateTime.Ticks).Item2;
Assert.AreEqual(ticks, paramValue);
}
[TestMethod]
public void CreateMessageWithOneParam_UsesDefaultMessageIfNoCustomOne()
{
var result = Utils.CreateMessage(1, "", "Default Message: {0}");
Assert.AreEqual("Default Message: 1", result);
}
[TestMethod]
public void CreateMessageWithOneParam_UsesCustomMessageIfPresent()
{
var result = Utils.CreateMessage(1, "Custom Message", "Default Message: {0}");
Assert.AreEqual("Custom Message", result);
}
[TestMethod]
public void CreateMessageWithTwoParams_UsesDefaultMessageIfNoCustomOne()
{
var result = Utils.CreateMessage(1, 2, "", "Default Message: {0} - {1}");
Assert.AreEqual("Default Message: 1 - 2", result);
}
[TestMethod]
public void CreateMessageWithTwoParams_UsesCustomMessageIfPresent()
{
var result = Utils.CreateMessage(1, 2, "Custom Message", "Default Message: {0} - {1}");
Assert.AreEqual("Custom Message", result);
}
}
}
<file_sep>/Tests.Kilmer/Test_IComparableValidation_Above.cs
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Kilmer;
namespace Tests.Kilmer
{
[TestClass]
public class Test_IComparableValidation_Above
{
[TestMethod]
public void AboveOrEqual_Equal()
{
int i = 2;
Val.AboveOrEqual(() => i, 2);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void AboveOrEqual_Throw()
{
int i = 1;
Val.AboveOrEqual(() => i, 2);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Above_Equal_Throw()
{
int i = 1;
Val.Above(() => i, 1);
}
[TestMethod]
public void Above_Int()
{
int i = 2;
Val.Above(() => i, 1);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Above_Int_Throw()
{
int i = 1;
Val.Above(() => i, 2);
}
[TestMethod]
public void Above_Float()
{
double i = 2.1;
Val.Above(() => i, 2.0);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Above_Float_Throw()
{
double i = 2.1;
Val.Above(() => i, 2.2);
}
[TestMethod]
public void Above_Bool()
{
bool b = true;
Val.Above(() => b, false);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Above_Bool_Throw()
{
bool b = false;
Val.Above(() => b, true);
}
[TestMethod]
public void Above_Char()
{
char c = 'B';
Val.Above(() => c, 'A');
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Above_Char_Throw()
{
char c = 'A';
Val.Above(() => c, 'B');
}
[TestMethod]
public void Above_String()
{
string s = "AAB";
Val.Above(() => s, "AAA");
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Above_String_Throw()
{
string s = "AAA";
Val.Above(() => s, "AAB");
}
[TestMethod]
public void Above_DateTime()
{
var now = DateTime.Now;
Val.Above(() => now, now.AddDays(-1));
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Above_DateTime_Throw()
{
var now = DateTime.Now;
Val.Above(() => now, now.AddDays(1));
}
}
}
<file_sep>/Tests.Kilmer/Test_StringValidation.cs
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Kilmer;
namespace Tests.Kilmer
{
[TestClass]
public class Test_StringValidation
{
[TestMethod]
public void NotNullOrEmpty()
{
string s = " ";
Val.NotNullOrEmpty(() => s);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotNullOrEmpty_Throw_Null()
{
string s = null;
Val.NotNullOrEmpty(() => s);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotNullOrEmpty_Throw_Empty()
{
string s = "";
Val.NotNullOrEmpty(() => s);
}
[TestMethod]
public void IsNullOrEmpty()
{
string s = null;
Val.IsNullOrEmpty(() => s);
string s2 = "";
Val.IsNullOrEmpty(() => s2);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void IsNullOrEmpty_Throw()
{
string s = " ";
Val.IsNullOrEmpty(() => s);
}
[TestMethod]
public void NotNullOrWhitespace()
{
string s = "a a";
Val.NotNullOrWhitespace(() => s);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotNullOrWhitespace_Throw_Null()
{
string s = null;
Val.NotNullOrWhitespace(() => s);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotNullOrWhitespace_Throw_Empty()
{
string s = "";
Val.NotNullOrWhitespace(() => s);
}
[TestMethod]
public void IsNullOrWhitespace()
{
string s = null;
Val.IsNullOrWhitespace(() => s);
string s2 = "";
Val.IsNullOrWhitespace(() => s2);
string s3 = " ";
Val.IsNullOrWhitespace(() => s3);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void IsNullOrWhitespace_Throw()
{
string s = "a";
Val.IsNullOrWhitespace(() => s);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotNullOrWhitespace_Throw_Whitespace()
{
string s = " \n \t \r ";
string s2 = @"
";
Val.NotNullOrWhitespace(() => s);
Val.NotNullOrWhitespace(() => s2);
}
[TestMethod]
public void Contains()
{
string s = "ABC";
Val.Contains(() => s, "A");
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Contains_Throw()
{
string s = "ABC";
Val.Contains(() => s, "X");
}
[TestMethod]
public void NotContains()
{
string s = "ABC";
Val.NotContains(() => s, "X");
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void NotContains_Throw()
{
string s = "ABC";
Val.NotContains(() => s, "A");
}
}
}
<file_sep>/Kilmer/IComparableValidation.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Kilmer
{
internal static class IComparableValidation
{
internal static void AboveOrEqual(this IComparable value, object target, string paramName, string message = "")
{
if (value.CompareTo(target) == -1)
throw new ArgumentException(Utils.CreateMessage(target, value, message, "Value has to be above or equal to {0}, was {1}"), paramName);
}
internal static void BelowOrEqual(this IComparable value, object target, string paramName, string message = "")
{
if (value.CompareTo(target) == 1)
throw new ArgumentException(Utils.CreateMessage(target, value, message, "Value has to be below or equal to {0}, was {1}"), paramName);
}
internal static void Above(this IComparable value, object target, string paramName, string message = "")
{
if (value.CompareTo(target) != 1)
throw new ArgumentException(Utils.CreateMessage(target, value, message, "Value has to be above {0}, was {1}"), paramName);
}
internal static void Below(this IComparable value, object target, string paramName, string message = "")
{
if (value.CompareTo(target) != -1)
throw new ArgumentException(Utils.CreateMessage(target, value, message, "Value has to be below {0}, was {1}"), paramName);
}
internal static void EqualTo(this IComparable value, object target, string paramName, string message = "")
{
if (value.CompareTo(target) != 0)
throw new ArgumentException(Utils.CreateMessage(target, value, message, "Value has to be equal to {0}, was {1}"), paramName);
}
internal static void NotEqualTo(this IComparable value, object target, string paramName, string message = "")
{
if (value.CompareTo(target) == 0)
throw new ArgumentException(Utils.CreateMessage(target, value, message, "Value can't be equal to {0}"), paramName);
}
}
}
<file_sep>/Tests.Kilmer/Test_IComparableValidation_Below.cs
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Kilmer;
namespace Tests.Kilmer
{
[TestClass]
public class Test_IComparableValidation_Below
{
[TestMethod]
public void BelowOrEqual_Equal()
{
int i = 2;
Val.BelowOrEqual(() => i, 2);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void BelowOrEqual_Throw()
{
int i = 3;
Val.BelowOrEqual(() => i, 2);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Below_Equal_Throw()
{
int i = 2;
Val.Below(() => i, 2);
}
[TestMethod]
public void Below_Int()
{
int i = 2;
Val.Below(() => i, 3);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Below_Int_Throw()
{
int i = 2;
Val.Below(() => i, 1);
}
[TestMethod]
public void Below_Float()
{
double d = 2.0;
Val.Below(() => d, 2.1);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Below_Float_Throw()
{
double d = 2.0;
Val.Below(() => d, 1.9);
}
[TestMethod]
public void Below_Bool()
{
bool b = false;
Val.Below(() => b, true);
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Below_Bool_Throw()
{
bool b = true;
Val.Below(() => b, false);
}
[TestMethod]
public void Below_Char()
{
char c = 'A';
Val.Below(() => c, 'B');
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Below_Char_Throw()
{
char c = 'B';
Val.Below(() => c, 'A');
}
[TestMethod]
public void Below_String()
{
string str = "AAA";
Val.Below(() => str, "AAB");
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Below_String_Throw()
{
string str = "AAB";
Val.Below(() => str, "AAA");
}
[TestMethod]
public void Below_DateTime()
{
var now = DateTime.Now;
Val.Below(() => now, now.AddDays(1));
}
[TestMethod]
[ExpectedException(typeof(ArgumentException))]
public void Below_DateTime_Throw()
{
var now = DateTime.Now;
Val.Below(() => now, now.AddDays(-1));
}
}
}
<file_sep>/Kilmer/Utils.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
namespace Kilmer
{
internal static class Utils
{
internal static string CreateMessage(object target, object value, string customMessage, string defaultMessage)
{
if (!string.IsNullOrWhiteSpace(customMessage))
return customMessage;
return string.Format(defaultMessage, target, value);
}
internal static string CreateMessage(object value, string customMessage, string defaultMessage)
{
if (!string.IsNullOrWhiteSpace(customMessage))
return customMessage;
return string.Format(defaultMessage, value);
}
internal static string CreateMessage(string customMessage, string defaultMessage)
{
if (!string.IsNullOrWhiteSpace(customMessage))
return customMessage;
return defaultMessage;
}
internal static Tuple<string, T> GetNameAndValue<T>(Expression<Func<T>> expr)
{
MemberExpression expression = expr.Body as MemberExpression;
if (expression == null)
throw new ArgumentException("Expression has to be of type MemberExpression");
string paramName = expression.Member.Name;
T value = expr.Compile()();
return Tuple.Create(paramName, value);
}
}
}
|
63696d3f9c39ae529fbc6ed6ecd7bf20bdcf72d6
|
[
"C#"
] | 13
|
C#
|
holmstrom/Kilmer
|
691f005400930f0122093f4410bf13ce19b4620b
|
f6f8952979269be53e18d44fbeb8a18d5bf0fb3a
|
refs/heads/main
|
<repo_name>irounik/Basic-IT-Workshop<file_sep>/Lab 3/Q6_Cube_Series.py
n = int(input('Enter number of terms:'))
for i in range(1,n+1): print(i**3, end= ' ')<file_sep>/Lab 1/swap.py
a = int(input('Enter first number(a): '))
b = int(input('Enter second number(b): '))
a = a+b
b = a-b
a = a-b
print('Now a is:',a,'and b is', b)<file_sep>/Lab 1/day_of_week.py
n = int(input('Enter the day number: '))
d = {1:'Monday', 2: 'Tuesday', 3: 'Wednesday', 4: 'Thursday', 5:'Friday', 6:'Saturday', 7:'Sunday'}
print('Day number',n,'is',d.get(n))<file_sep>/Lab 1/is_vowel.py
c = input('Enter a character: ')
if c == 'a' or c == 'e' or c == 'i' or c=='o' or c == 'u': print(c,'is Vowel')
else: print(c,'is not a Vowel') <file_sep>/Lab 3/Q5_Movie_Cost.py
videos = int(input("Enter number of videos: "))
movies = int(input("Enter the number of movies: "))
days = int(input("Enter the number of days: "))
cost = (75*videos + 50*movies)*days
print('Total cost will be:',cost)<file_sep>/Lab 4/Q2_Matrix_Transpose.py
def printMat(M):
for i in range(len(M)):
for j in range(len(M[0])):
print(M[i][j],end = ' ')
print()
M = [[1,2],[4,5],[3,6]]
T = [[0,0,0],[0,0,0]]
for i in range(len(M)):
for j in range(len(M[0])):
T[j][i]=M[i][j]
print("Matrix M:")
printMat(M)
print('M transpose:')
printMat(T)<file_sep>/Lab 1/area_of_circle.py
radius = int(input('Enter the radius:'))
area = 22/7* radius**2
print('Area is:',round(area,2),'sq. untis')<file_sep>/Lab 3/Q2_SumOfOdd.py
sum=0
for i in range(1,101):
if(i%2==1):
sum+=i
print('Sum of odd digits from 1 to 100 is:',sum)<file_sep>/Lab 1/area_of_triangle.py
a = float(input('Enter length of the first side: '))
b = float(input('Enter length of the second side: '))
c = float(input('Enter length of the third side: '))
s = a+b+c/2
area = (s*(s-a)*(s-b)*(s-c))**0.5
print('Area is: {:0.2f}'.format(area))<file_sep>/Lab 4/Q3_Dec_Bin_Hex.py
n = int(input('Enter a number: '))
print('Hex:', hex(n))
print('Oct:', oct(n))
print('Bin:', bin(n))<file_sep>/Lab 3/Q3_Print_20_to_1.py
for i in range(20,0,-1): print(i,end=' ')<file_sep>/Lab 1/anual_tax.py
sal = int(input('Enter the anual salary'))
if sal < 150000: per = 'No Tax'
elif sal >150000 and sal <=300000: per = 10
elif sal >300000 and sal <=500000: per = 20
else: per = 30
if per == 'No Tax': print(per)
else: print('Tax on Rs',sal,'/- will be',per,'%, that is Rs',per*sal/100)<file_sep>/Lab 4/Q4_Sort_Word.py
word = input('Enter the word: ')
print('Sorted word: ', *(sorted(word)), sep='')<file_sep>/Lab 1/money_change.py
amount = int(input('Enter the amount: '))
ten = five = two = one = 0
ten = int(amount/10)
amount = amount%10
five = int(amount/5)
amount = amount%5
two = int(amount/2)
amount = amount%2
one = amount
print(one,'One rupee coins\n', two,'two rupee coins\n', five,'five rupee coins\n', ten,'ten rupee coins')<file_sep>/Lab 3/Q4_Power_of_Two.py
for i in range(1,11):print(2**i,end=' ')<file_sep>/Lab 4/Q5_Prime.py
num = int(input('Enter the number: '))
prime = True
for i in range(2,int(num**0.5)+1):
if(num%i==0):
prime = False
break
print(num,'is Prime') if prime else print(num,'is Not Prime')<file_sep>/Lab 4/Q6_Palindrome.py
def palindrome(s):
i=0
j=len(s)-1
while(i<j) :
if(s[i]!=s[j]): return False
else:
i+=1
j-=1
return True
print('It\'s Parindrome') if palindrome(input('Enter a String: ')) else print('In\'s Not Palindrome')<file_sep>/Lab 3/Q8_SeriesSum.py
x = int(input('Enter the value of x: '))
n = int(input('Enter the number of terms: '))
s = 0
for i in range(n):
term = x**(i+1)
if(i%2==0): s-=term
else: s+=term
print('Sum of series is:',s)<file_sep>/Lab 3/Q1_NoOfDigits.py
num = int(input("Enter the number: "))
temp = num
count = 0
while(temp>0):
temp//=10
count+=1
print('Number of digts in',num,'is',count)<file_sep>/Lab 3/Q7_AP_Series.py
n = int(input('Enter the number of elements: '))
for i in range(n): print(-5+ 3*i, end=' ')<file_sep>/Lab 1/roots_of_quadratic_eqn.py
print('For an equatin: a*x^2 + b*x + c = 0')
a = float(input('Enter a: '))
b = float(input('Enter b: '))
c = float(input('Enter c: '))
d = b*b - 4*a*c
x1 = (-b + (d**0.5))/(2*a)
x2 = (-b - (d**0.5))/(2*a)
print('First Root is:',x1,'\nSecond Root is:',x2,2)<file_sep>/Lab 1/leap_year.py
year = int(input('Enter the year: '))
leap = False
if year%4==0:
if year % 100 == 0:
if year % 400 == 0: leap = True
else: leap = True
if leap : print(year,'is leap year.')
else: print(year,'is not a leap year.')<file_sep>/Lab 1/is_alphabet.py
ip = input("Enter the input character: ")
if (ip>='a' and ip<='z') or (ip>='A' and ip<='Z'): print(ip + ' is an alphabet')
else: print(ip+ ' is not an alphabet')<file_sep>/Lab 1/gratest_of_three.py
a = input('Enter the first number: ')
b = input('Enter the second number: ')
c = input('Enter the third number: ')
if a>b and a>c: m = a
elif b>c: m = b
else: m = c
print(m,'is gratest')<file_sep>/Lab 4/Q1_Create_Array.py
import array
a = array.array('i',10*[0])
for i in a: print(i,end=' ')<file_sep>/Lab 1/hex_and_oct.py
num = int(input('Enter a number: '))
h = o = ''
d = {0:'A',1:'B',2:'C',3:'D',4:'E',5:'F'}
# for hexadecimal
t1=num
while(t1>0):
temp = t1 % 16
if(temp > 9): temp = d.get(temp%10)
h =str(temp) + h
t1 = int(t1/16)
print('hex of',num,'is',h)
# for octal
t2 = num
while(t2>0):
temp = t2 % 8
o = str(temp) + o
t2 = int(t2/10)
print('oct of',num,'is',o)<file_sep>/Lab 4/Q7_Sum_of_Digits.py
print('Sum of it\'s digts is:', sum(map(int,(input('Enter a number: ')))))<file_sep>/Lab 1/case_change.py
c = input('Enter a character: ')
if c>='a' and c<='z': print('Upper Case will be',chr(ord(c) - 32))
else: print('Lower Case will be: ',chr(ord(c) + 32))
|
68acd493596678fdedef1d5d096f1cdede7d5a3c
|
[
"Python"
] | 28
|
Python
|
irounik/Basic-IT-Workshop
|
3bdef1ab300fbcbe2a84abc3e5b20a36a22fbd73
|
28ea26a0746ae662cff33ae23977a7bcfffe2828
|
refs/heads/master
|
<repo_name>CedarGroveStudios/JP_Logo<file_sep>/README.md
# JP_Logo
## Backlighted logo PCB
<file_sep>/ATtiny85_jp_2018-12-13_v00.ino
/* ATtiny85_jp_2018-12-13_v00
For back-lighted PCB
ATtiny85-10 (low voltage)
to conserve power, burn bootloader for 1MHz internal clock
*/
#include <avr/power.h>
// define PWM output pins
int
red_led = 4, // green LED analog OUT pin #4
grn_led_1 = 1, // green LED analog OUT pin #1
grn_led_2 = 0; // red LED analog OUT pin #0
// define constants
int
red_low = 0, // minimum red LED brightness
red_glow = 20, // maximum red LED brightness
red_hot_rate = 80, // step rate to maximum brightness in ms
red_cold_rate = 120, // step rate to minimum brightness in ms
grn_glow = 24, // maximum green LED brightness
grn_on_flick = 2, // number of on flickers
grn_on_time = 10, // flicker time on in ms
grn_off_time = 60; // time between flickers in ms
// define variables
int
count = 0; // general counter
void setup() {
analogWrite(red_led, red_low);
analogWrite(grn_led_1, 0);
analogWrite(grn_led_2, 0);
delay(1000);
}
void loop() {
grn_on_flick = random(1,5);
for (count=0; count<=grn_on_flick; count++) {
analogWrite(grn_led_1, 0);
analogWrite(grn_led_2, 0);
delay(grn_off_time);
analogWrite(grn_led_1, grn_glow);
analogWrite(grn_led_2, grn_glow);
delay(grn_on_time);
}
delay(5);
for (count=red_low; count<=red_glow; count++) {
delay(red_hot_rate);
analogWrite(red_led, count);
}
delay(random(3000,6000));
analogWrite(grn_led_1, 0);
analogWrite(grn_led_2, 0);
delay(grn_off_time);
analogWrite(grn_led_1, grn_glow);
analogWrite(grn_led_2, grn_glow);
delay(grn_on_time);
analogWrite(grn_led_1, 0);
analogWrite(grn_led_2, 0);
for (count=red_glow; count>=red_low; count--) {
delay (red_cold_rate);
analogWrite(red_led, count);
}
delay(random(5000,10000));
}
|
89062a112fadc7d54c9d07c3cfb20be79d38a28b
|
[
"Markdown",
"C++"
] | 2
|
Markdown
|
CedarGroveStudios/JP_Logo
|
7cfbf35c6ab93e4c93a8aeacb62577d0e433fe6e
|
bac1d7ddb9b547529979fc8856f5ec772b1f82a8
|
refs/heads/master
|
<repo_name>Namaham/test_CProject<file_sep>/README.md
# ■Visual Studio をインストールしよう
既にインストール済みの場合、この手順は飛ばしても良いです。
Unity を学んでた場合、既に入ってる可能性があります。
現在の最新版は Visual Studio 2019 です。
別のバージョンが入ってるなら、それはそのままでかまいませんので、
Visual Studio 2019 もインストールして下さい。
以下の Microsoft のページの手順2と手順3を行って下さい。
https://docs.microsoft.com/ja-jp/visualstudio/install/install-visual-studio?view=vs-2019
インストールするのは、Visual Studio 2019 の 「コミュニティ」 と書かれている無料ダウンロード版です。
ダウンロードボタンを押すとダウンロードが開始されます。
ダウンロードしたインストーラを実行すると Visual Studio Installer のインストールが開始されます。
上記サイトの手順3に書いてある通りなのですが、Visual Studio 2019 がいきなりインストールされるんじゃなくて、
それをインストールするためのインストーラがインストールされます。
ややこしいですが、Visual Studio に関するものをインストールするための管理アプリと思ってください。
Visual Studio Installer がインストールされると、上記サイトの手順4のウィンドウが自動的に開きます。
右下にある [インストール] と言うボタンを押して [続行] を選択してください。
* 補足1:[ダウンロードしながらのインストール] は不測の事態もあるのでやめましょう。
* 補足2:ワークロードは後で自由に追加できるのでとりあえず無視して下さい。
インストールが完了したら Visual Studio Installer と Visual Studio 2019 と言う
2つのアプリがインストールされたことになります。
# ■C++ のワークロードを追加しよう
Visual Studio Installer から **C++ によるデスクトップ開発** と言うワークロードを追加します。
ワークロードって何? って事を一言で言うと、**開発言語** と **プラットフォーム** です。
**C++ によるデスクトップ開発** なら、
・ 開発言語は c++ 言語 (c 言語も含む) で、
・ プラットフォームは Windows 用のデスクトップアプリケーション
と言う事です。
左下のスタートメニュー(Windows マークのアイコン)から、Visual Studio Installer と言うアプリを探して起動して下さい。
Visual Studio Installer が起動しましたら、ワークロードの中から、
**C++ によるデスクトップ開発** と言うワークロードを探し、チェックを付けて、
右下にある [変更] ボタンを押して下さい。
警告の確認ウィンドウが表示された場合は、[続行] を選択してください。
後は自動的に処理が進みますが、結構時間がかかります。
追加処理が完了したら、表示されているウィンドウはすべて閉じて下さい。
# ■プロジェクトを作成しよう
左下のスタートメニュー(Windows マークのアイコン)から、Visual Studio 2019 と言うアプリを探して起動して下さい。
Visual Studio 2019 開始ウィンドウが表示されたら 「新しいプロジェクトの作成」 を選択してください。
新しいプロジェクトの作成と言う画面に切り替わります。
<file_sep>/n000/test/main.cpp
#include <stdio.h>
#include <conio.h>
#include <windows.h>
#define TAMAKAZU 3
int main()
{
int f = 0; // ★この初期化は必須
int y[TAMAKAZU];
int c[TAMAKAZU];
// y 初期化(★この初期化は必須)
for (int i = 0; i < TAMAKAZU; i++) {
y[i] = 0;
}
while (1) {
// 文字を取得
if (_kbhit()) {
// f の値によって配列番号が変わります
c[f] = _getch();
y[f] = 20;
// f の値を進める
f = (f + 1) % TAMAKAZU;
}
// 画面をクリア
printf("\x1b[2J");
// 文字を出力
for (int i = 0; i < TAMAKAZU; i++) {
if (y[i] > 0) {
printf("\x1b[%d;10H%c", y[i], c[i]);
y[i]--;
}
}
// お休み
Sleep(50);
}
return 0;
}<file_sep>/n000/test/test.cpp
/*
#include <stdio.h>
#include <conio.h>
#include <windows.h>
//----------------------------------------------------------------------------------------------------------------------
void set_cursor_disp(bool onoff) {
HANDLE hOut;
CONSOLE_CURSOR_INFO cci;
// 出力用ハンドルの取得
hOut = GetStdHandle(STD_OUTPUT_HANDLE);
// CONSOLE_CURSOR_INFO構造体の現在の状態を取得する
GetConsoleCursorInfo(hOut, &cci);
// メンバ変数であるbVisibleがカーソルの表示・非表示を制御する変数なので、これをFALSEにする事でカーソルを非表示にできる
cci.bVisible = onoff;
// 変更した構造体情報をコンソールWindowにセットする
SetConsoleCursorInfo(hOut, &cci);
}
//----------------------------------------------------------------------------------------------------------------------
#define __USE_SLOT 0
#if __USE_SLOT
long z;
char j[] = "000102101112202122001020011121021222001122021120", s[][17] = { "\x1b[>5h","回:+ 止:1~3 終:/"," " };
int g, a, b, e[28][3], o = 9, m[3], n[8], Y[3], c = 0;
int L(int a) {
return a -= a > 27 ? 28 : 0;
}
int R(int a, int b, int c) {
return e[L(j[a + b] - 48 + Y
[j[a + c] - 48] / 3)][j[a + c] - 48];
}
void M(int a) {
printf("\x1b[%dm", a);
}
void S(int a) {
printf("%s", s[a]);
}
void H(int y, int x) {
printf("\x1b[%d;%dH", y, x);
}
int slot() {
S(0);H(6, 33);S(1);for (;;) {
g = _kbhit() ? _getch() : 0;H(20, 37);printf("%5d", o);
if (c > 4)c = 0;for (a = 0;a < 8;a++) {
if (c < 1) {
m[a / 3] = 2;n[a] = 0;Y[a / 3] = 0;for (b = 0;b < 84;b++)
e[b / 3][b % 3] = rand() % 7;
}if (c == 1) { m[a / 3] = 0;if (a > 6)c = 2; }if (c > 1 && a < 3) {
b = Y[a];if (m[a
] < 1 && g == 49 + a)m[a] = 1;if (m[a] == 1 && b % 3 == 0) { m[a] = 2;c++; }if (m[a] < 2)Y[a] = b == 83 ? 0 : b + 1
;
}
}if (c < 1 && g == 43) { c = 1;o--; }if (g == 47 || o < 0)return(0);if (c > 4) {
b = 0;for (a = 0;a < 43;a += 6)
if (R(a, 0, 1) == R(a, 2, 3) && R(a, 0, 1) == R(a, 4, 5)) { b++;n[a / 6] = 1; }o += b * b * b;
}if (c > 1) {
for
(a = 0;a < 81;a++) {
H(17 - a / 9, 32 + a % 9 * 2);for (b = 0;b < 47;b += 2)if (n[b / 6] == 1 && j[b] - 48 == a / 9
/ 3 && j[b + 1] - 48 == a % 9 / 3)M(5);M(e[L((Y[a % 9 / 3] + a / 9) / 3)][a % 9 / 3] + 41);S(2);M(0);
}
}for (
z = 0;z < 2000000;z++);
}
}
#endif
//----------------------------------------------------------------------------------------------------------------------
int main() {
char c;
char* p;
p = &c;
*p = 97;
printf("%c\n", c);
#if 0
char sss[10];
scanf_s("%9s", sss, 10);
printf("%s\n", sss);
scanf_s("%s", sss, 10);
printf("%s\n", sss);
#endif
#if 0 //-------------------------------------------------------
int aaa;
scanf_s("%d", &aaa);
printf("%d\n", aaa);
char sss[10];
scanf_s("%s", sss, 10);
printf("%s\n", sss);
#endif
#if 0 //-------------------------------------------------------
char mojiretu[4];
mojiretu[0] = 'a'; // 97 = a
mojiretu[1] = 'b'; // 97 = b
mojiretu[2] = 'c'; // 97 = c
mojiretu[3] = '\0'; // 0 = null コード。これが無いと実行時エラーになります。
printf("%s\n", mojiretu); // abc と出力されます。
mojiretu[0] = 97; // 97 = a
mojiretu[1] = 98; // 97 = b
mojiretu[2] = 99; // 97 = c
mojiretu[3] = 0; // 0 = null コード。これが無いと実行時エラーになります。
printf("%s\n", mojiretu); // abc と出力されます。
#endif
#if 0 //-------------------------------------------------------
char s[8];
scanf_s("%s", s, 8);
rewind(stdin);
// scanf_s("%s%*[^\n]", s, 8);
printf("%s\n", s);
printf("----------\n");
gets_s(s, 8);
printf("%s\n", s);
printf("----------\n");
char buf[16];
puts("何か入力してください。");
gets_s(buf, sizeof(buf));
puts(buf);
#endif
//-------------------------------------------------------
set_cursor_disp(false);
// printf("\x1b[>5h");
#if 0
int a;
printf("Hit any key...xxx\n");
while (1) {
if (_kbhit()) {
// a = 97;
a = _getch();
if (a == 0x1b) { // ESC
break;
}
printf("\r"); // 行頭へ移動
if (a == 0xe0) {
a = _getch();
switch (a) {
case 0x48: printf("↑"); break;
case 0x50: printf("↓"); break;
case 0x4B: printf("←"); break;
case 0x4D: printf("→"); break;
default: printf("?"); break;
}
}
else {
printf("%2x = %3d = %c", a, a, a);
}
printf("\x1b[K"); // 行末までクリア
}
}
#endif
//-------------------------------------------------------
#if __USE_SLOT
slot();
#endif
//-------------------------------------------------------
set_cursor_disp(true);
// printf("\x1b[>5l");
return (0);
}
#include <stdio.h>
#include <conio.h>
#include <stdlib.h>
int main()
{
while (1) {
if (_kbhit()) {
int c = _getch();
int x = rand() % 10 + 1;
printf("\x1b[3;%dH", x);
printf("%c", c);
}
}
return 0;
}
rewind(stdin);
fflush(stdin);
while (_kbhit()) { _getch(); }
*/
|
0cbdd69cd8e197265d95fa23ca8a6b63736aa46d
|
[
"Markdown",
"C++"
] | 3
|
Markdown
|
Namaham/test_CProject
|
758ee0fefbf3d957d090d6a9faa375c015ec65c2
|
0e048d310388143e3191b3cae4b2d6741d1ffd6e
|
refs/heads/master
|
<file_sep>how to use: scrapy runspider kompas_scrapper.py (install scrapy first)
<file_sep>import scrapy
from datetime import timedelta, date
# helper
def daterange(start_date, end_date):
for n in range(int ((end_date - start_date).days)):
yield start_date + timedelta(n)
def url_date_generator():
start_date = date(2010, 1, 2)
end_date = date(2017, 2, 3)
date_url = [("http://bisniskeuangan.kompas.com/search/bisniskeuangan/" + single_date.strftime("%Y-%m-%d")) for single_date in daterange(start_date, end_date)]
return date_url
class QuotesSpider(scrapy.Spider):
name = "kompas"
start_urls = url_date_generator()
f_url = open('url.csv', 'w')
f_content = open('content.txt','w')
f_title = open('title.csv','w')
def parse(self, response):
content_urls = []
content_titles = []
for news in response.css('div.list-latest'):
content_urls.append( news.xpath('a/@href').extract_first() )
content_titles.append( news.xpath('a/text()').extract_first() )
for url, title in zip(content_urls, content_titles):
self.f_title.write('%s\n'% title )
self.f_url.write('%s\n'%url)
#for url in content_urls:
# yield scrapy.Request(url, callback=self.parse_content)
def parse_content(self, response):
content = response.css('div.kcm-read-text').xpath('node()//text()').extract()
#join them
res = ""
for line in content:
res += line.encode('ascii', 'ignore')
self.f_content.write('%s\n'% res)
|
7486f991479b302d34e3c018d3fd9e0a84b8fcb0
|
[
"Markdown",
"Python"
] | 2
|
Markdown
|
afaji/indonesian_news_dataset
|
08554c800cdd34a69235b96a4536f202ce257624
|
ec2ecc2622a3a4007d2110bad2d66a3a7ad361d3
|
refs/heads/master
|
<file_sep>/**
* Created by Dannis on 2014/11/20.
*/
var selectedItems1 = [];
var selectedItems2 = [];
function buildMultiSelect(obj, dataOptions) {
var columnCount = 3;//列数,默认3列
var width = 480;//宽度,默认480px
var fontSize = 14;//字体大小,默认14px
var label1 = "选择列一";//第一列标签
var label2 = "选择列二";//第二列标签
var label3 = "已选择列";//第三列标签
if (dataOptions) {
columnCount = dataOptions.columnCount ? dataOptions.columnCount : columnCount;
width = dataOptions.width && dataOptions.width >= width ? dataOptions.width : width;
fontSize = dataOptions.fontSize ? dataOptions.fontSize : fontSize;
label1 = dataOptions.param1.label ? dataOptions.param1.label : label1;
label2 = dataOptions.param2.label ? dataOptions.param2.label : label2;
}
var selectWidth = Math.round(width * 0.3);//列表框宽度
if (columnCount == 2) {
selectWidth = Math.round(width * 0.45);//列表框宽度
}
$(obj).empty();
$(obj).css({
"width": width + "px",
"font-size": fontSize + "px",
"text-align": "center"
});
var selectStyle = "margin:0;height:220px;padding:0;width:" + selectWidth + "px;overflow:auto;";
var buttonStyle = "width:30px;height:22px;font-size:14px;line-height:12px;display:block";
var inputStyle = "margin:0;padding:0 3px;width:" + selectWidth + "px;";
var labelRow;
var searchRow;
var selectRow;
if (columnCount == 2) {
var buttonHtml = "";
if (dataOptions.linkage) {
buttonHtml = "<button id='select-button-2' style='" + buttonStyle + "'>>></button><br>"
+ "<button id='deselect-button' style='" + buttonStyle + "'><<</button>"
}
labelRow = "<tr style='text-align:left'>"
+ "<td><label id='label1' for='search-input-1'>" + label1 + "</label></td>"
+ "<td></td>"
+ "<td><label id='label2' for='search-input-2'>" + label2 + "</label></td>"
+ "</tr>";
searchRow = "<tr style='text-align:left'>"
+ "<td><input id='search-input-1' type='text' class='form-control' style='" + inputStyle + "'></td>"
+ "<td></td>"
+ "<td><input id='search-input-2' type='text' class='form-control' style='" + inputStyle + "'></td>"
+ "</tr>";
selectRow = "<tr style='vertical-align:top'>"
+ "<td><select id='selectable-items-1' multiple='multiple' class='form-control' style='" + selectStyle + "'></select></td>"
+ "<td style='vertical-align:middle'>"
+ buttonHtml
+ "</td>"
+ "<td><select id='selectable-items-2' multiple='multiple' class='form-control' style='" + selectStyle + "'></select></td>"
+ "</tr>";
} else if (columnCount == 3) {
labelRow = "<tr style='text-align:left'>"
+ "<td><label id='label1' for='search-input-1'>" + label1 + "</label></td>"
+ "<td></td>"
+ "<td><label id='label2' for='search-input-2'>" + label2 + "</label></td>"
+ "<td></td>"
+ "<td><label id='label3' for='selected-items'>" + label3 + "</label></td>"
+ "</tr>";
searchRow = "<tr style='text-align:left'>"
+ "<td><input id='search-input-1' type='text' class='form-control' style='" + inputStyle + "'></td>"
+ "<td></td>"
+ "<td><input id='search-input-2' type='text' class='form-control' style='" + inputStyle + "'></td>"
+ "<td></td>"
+ "<td></td>"
+ "</tr>";
selectRow = "<tr style='vertical-align:top'>"
+ "<td><select id='selectable-items-1' multiple='multiple' class='form-control' style='" + selectStyle + "'></select></td>"
+ "<td style='vertical-align:middle'>"
+ "<button id='select-button-1' style='" + buttonStyle + "'>--</button>"
+ "</td>"
+ "<td><select id='selectable-items-2' multiple='multiple' class='form-control' style='" + selectStyle + "'></select></td>"
+ "<td style='vertical-align:middle'>"
+ "<button id='select-button-2' style='" + buttonStyle + "'>>></button><br>"
+ "<button id='deselect-button' style='" + buttonStyle + "'><<</button>"
+ "</td>"
+ "<td><select id='selected-items' multiple='multiple' class='form-control' style='" + selectStyle + "'></select></td>"
+ "</tr>";
}
$(obj).append(labelRow + searchRow + selectRow);
if (dataOptions) {
if (dataOptions.singleClickSelect) {
$("select").find("option").click(function () {
selectItem(this);
});
}
}
bindEventHandler(dataOptions);
if (dataOptions.param1.url) {
fillData($('select[id="selectable-items-1"]'), dataOptions.param1.url, "", dataOptions.param1.method);
}
if (dataOptions.param2.url) {
fillData($('select[id="selectable-items-2"]'), dataOptions.param2.url, "", dataOptions.param2.method);
}
}
/**
* 向Select控件填充值
* @param selectObj select对象
* @param url Ajax请求地址
* @param params Ajax请求参数
* @param method Ajax请求方法
*/
function fillData(selectObj, url, params, method) {
var data = [];
$.ajax({
url: url,
data: params,
type: method,
async: false,
success: function (result) {
data = result;
}
});
var select = $(selectObj);
select.empty();
var options = "";
for (var i = 0; i < data.length; i++) {
if (!data[i].id && !data[i].name) {
options += "<option value='' title='" + data[i] + "'>" + data[i] + "</option>";
} else {
var textValue = data[i].name ? data[i].name : data[i].model;
options += "<option value='" + data[i].id + "' title='" + textValue + "'>" + textValue + "</option>"
}
}
select.append(options);
}
function selectItem(obj) {
var select = $(obj).parent();
if ("selectable-items-1" == select.attr('id')) {
var existsIndex1 = $.inArray($(obj).val(), selectedItems1);
if (existsIndex1 == -1) {//-1表示不存在
selectedItems1.push($(obj).val());
} else {
selectedItems1.splice(existsIndex1, 1);
$(obj).prop("selected", false);
}
for (var i = 0; i < selectedItems1.length; i++) {
select.find("option[value='" + selectedItems1[i] + "']").prop("selected", true);
}
} else if ("selectable-items-2" == select.attr('id')) {
var existsIndex2 = $.inArray($(obj).val(), selectedItems2);
if (existsIndex2 == -1) {//-1表示不存在
selectedItems2.push($(obj).val());
} else {
selectedItems2.splice(existsIndex2, 1);
$(obj).prop("selected", false);
}
for (var j = 0; j < selectedItems2.length; j++) {
select.find("option[value='" + selectedItems2[j] + "']").prop("selected", true);
}
}
}
/**
* 绑定事件处理器
*/
function bindEventHandler(dataOptions) {
//选择按钮绑定事件
$("button[id='select-button-2']").click(function () {
var select = $("select[id='selectable-items-2']");
var selectedOptions = select.find("option:selected");
for (var i = 0; i < selectedOptions.length; i++) {
selectedOptions[i].style.display = 'none';
$(selectedOptions[i]).prop("selected", false);
$("select[id='selected-items']").append("<option value='" + $(selectedOptions[i]).val() + "'>" + $(selectedOptions[i]).text() + "</option>");
}
});
//取消选择按钮绑定事件
$("button[id='deselect-button']").click(function () {
var select = $("select[id='selected-items']");
var selectedOptions = select.find("option:selected");
for (var i = 0; i < selectedOptions.length; i++) {
$("select[id='selectable-items-2']").find("option[value='" + $(selectedOptions[i]).val() + "']").css("display", "");
$(selectedOptions[i]).remove();
}
});
//第一列搜索框绑定事件
$("input[id='search-input-1']").on("input", function () {
var inputText = $("input[id='search-input-1']").val().trim();
var options = $("select[id='selectable-items-1']").find("option");
if (inputText) {
for (var i = 0; i < options.length; i++) {
if (options[i].text.toLowerCase().indexOf(inputText.toLowerCase()) != -1) {
options[i].style.display = "";
} else {
options[i].style.display = "none";
}
}
} else {
for (var j = 0; j < options.length; j++) {
options[j].style.display = "";
}
}
});
//第二列搜索框绑定事件
$("input[id='search-input-2']").on("input", function () {
var inputText = $("input[id='search-input-2']").val().trim();
var options = $("select[id='selectable-items-2']").find("option");
if (inputText) {
for (var i = 0; i < options.length; i++) {
if (options[i].text.toLowerCase().indexOf(inputText.toLowerCase()) != -1) {
options[i].style.display = "";
} else {
options[i].style.display = "none";
}
}
} else {
for (var j = 0; j < options.length; j++) {
options[j].style.display = "";
}
}
});
if (dataOptions.linkage) {
$("select[id='selectable-items-1']").on("change", function () {
var options = $(this).children("option:selected");
var params = [];
for (var i = 0; i < options.length; i++) {
var value = $(options[i]).attr("value");
if (value) {
params.push(value);
} else {
params.push($(options[i]).text());
}
}
if (params.length > 0) {
fillData(
$("select[id='selectable-items-2']"),
dataOptions.param2.conditionUrl,
dataOptions.param2.conditionParamName + "=" + params.join(","),
dataOptions.param2.method);
} else {
fillData(
$("select[id='selectable-items-2']"),
dataOptions.param2.url,
dataOptions.param2.method);
}
});
}
}
/**
* 获取select选中值
* @param select selectable-items-1表示第一列,selectable-items-2表示第二列,selected-items表示第三列
*/
function getMultiSelectData(select) {
var values = [];
if ('selectable-items-1' == select) {
values = $('select[id="selectable-items-1"]').val();
} else if ('selectable-items-2' == select) {
values = $('select[id="selectable-items-2"]').val();
} else if ('selected-items' == select) {
var selectedOptions = $('select[id="selected-items"]').find('option');
for (var k = 0; k < selectedOptions.length; k++) {
values.push(selectedOptions[k].value);
}
}
return values ? values.join(",") : "";
}
/**
* 获取select选中值
* @param select select对象
*/
function getSelectData(select) {
var values = [];
if($(select).attr("id") == 'selected-items') {
var selectedOptions = $(select).find('option');
for (var k = 0; k < selectedOptions.length; k++) {
values.push(selectedOptions[k].value);
}
} else {
values = $(select).val();
}
return values ? values.join(",") : "";
}<file_sep>$.extend($.fn.datagrid.defaults, {
fitColumns : true,
nowrap : false,
pagination : true
});
$.extend($.fn.datetimebox.defaults.formatter = function(date){
var y = date.getFullYear();
var M = date.getMonth()+1;
var d = date.getDate();
var h = date.getHours();
var m = date.getMinutes();
var s = date.getSeconds();
return y+'-'+M+'-'+d+' '+h+":"+m+":"+s;
// return y+'-'+M+'-'+d;
})<file_sep># examtation
# 高校试卷信息管理系统
- 教学秘书给出大纲
- 出卷人在本地出好试卷,然后上传至系统。(word、pdf) if word then在服务端保存两份(word+pdf) 如果只是pdf,只在服务端保存pdf
- ../files/doc(存word) ../files/pdf(存pdf文件)
- 可以通过虚拟路径引用文件,配置tomcat server.xml
- 所有角色预览试卷的时候预览格式都为pdf,通过src在html引用
###user表中可以加入院系的标示符 tinyint类型 表示院系 is_secretary是否为教学秘书 默认不是
##Role.leader = 0, Role.creator = 1, Role.checker = 2, Role.secretary = 3
###`examtation_user:String id, String email,String password(md5), tinyint role(枚举), String detail_id(对应的试卷详情), timestamp create_time,`
###`examtation_detail:String id, String doc_path, String pdf_path, timestamp last_modify, timestamp create_time, String feedback_id, boolean is_hidden(试卷是否可见),date is_visible(当前时间到规定时间,阅卷人可以看到试卷), String info_id `
###`examtation_info:String id, boolean is_open(是否开卷), tinyint type(试卷类型 枚举), String textbook(教材), String object(面向对象), text outline(大纲)`
###`examtation_process: String id, String detail_id, tinyint stage(到了那个阶段), boolean is_finish(出卷流程是否完成)`
###`examtation_feedback:String id,String user_id, tinyint role, String detail_id, timestamp create_time`
###`feedback_contact:String id,String content,boolean result, String doc_path(修改后的doc文件的地址),timestamp feedback_time(反馈时间)`
###`resource(该用户对于某个卷子的权限):String id, String user_id, tinyint role, String detail_id`资源表 一个user可以对应多个角色
<file_sep>package com.ccnu.bishe.examtation.dto.basic;
public class JResult extends JObject {
private String msg;
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public JResult() {
this.setSuccess(true);
}
public JResult(String msg) {
this.msg = msg;
this.setSuccess(true);
}
public JResult(boolean success, String msg){
this.msg = msg;
this.setSuccess(success);
}
}
<file_sep>package com.ccnu.bishe.examtation.dao;
import com.ccnu.bishe.examtation.dao.impl.BaseDaoImpl;
import com.ccnu.bishe.examtation.db.BaseMapper;
import com.ccnu.bishe.examtation.dto.FeedbackDto;
import com.ccnu.bishe.examtation.mapper.FeedBackMapper;
import com.ccnu.bishe.examtation.model.FeedBack;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
/**
* Created by Joban on 2017/2/23.
*/
@Repository
public class FeedbackDao extends BaseDaoImpl<FeedBack> implements IBaseDao<FeedBack> {
@Autowired
private FeedBackMapper feedBackMapper;
@Override
public BaseMapper<FeedBack> getMapper() {
return feedBackMapper;
}
public void insertSelective(FeedBack feedBack) {
feedBackMapper.insertSelective(feedBack);
}
public List<FeedbackDto> getFeedbackListByInfoId(Map<String, Object> params) {
return feedBackMapper.getFeedbackListByInfoId(params);
}
public long getFeedBackListCount(Map<String, Object> params) {
return feedBackMapper.getFeedBackListCount(params);
}
}
<file_sep>log4j.rootLogger=DEBUG,stdout,Dailylog
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %p [%c] %m%n
log4j.appender.Dailylog=org.apache.log4j.DailyRollingFileAppender
log4j.appender.Dailylog.File=D:/logstash.log
log4j.appender.Dailylog.DatePattern='_'yyyy-MM-dd'.log'
log4j.appender.dailyLog.Append=true
log4j.appender.Dailylog.Encoding=UTF-8
log4j.logger.org.springframework=DEBUG
log4j.logger.net.paoding=DEBUG
log4j.logger.net.paoding.rose.web.controllers.roseInfo=DEBUG
<file_sep>package com.ccnu.bishe.examtation.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.ccnu.bishe.examtation.dao.AdminDao;
import com.ccnu.bishe.examtation.model.Admin;
@Service
public class AdminService {
@Autowired
private AdminDao adminDao;
public Admin findAdminByEmailAndPassword(String email, String password){
return adminDao.findAdminByEmailAndPassword(email, password);
}
}
<file_sep>/**
* Author: deng.zhang
* Time: 2014-10-17 14:31
*/
var tableType = $.urlParam('tableType');
SelectedTopMenu = "PUSH管理";
SelectedLeftMenu = "消息管理";
var languageDict = {
'en': '英语',
'ja': '日语',
'de': '德语',
'fr': '法语',
'ru': '俄语',
'zh_TW': '中文繁体(台湾)',
'zh_HK': '中文繁体(香港)'
};
$(function () {
$("#add-message-translation-window").load('module/languageAdd.htm');
$('#app-select-window').load('module/appSelectPanel.htm', function () {
$.parser.parse();
});
$('#country-select-window').load('module/countrySelectPanel.htm', function () {
$.parser.parse();
});
$('#distributor-select-window').load('module/distributorSelectPanel.htm', function () {
$.parser.parse();
});
$('#device-select-window').load('module/deviceSelectPanel.htm', function () {
$.parser.parse();
});
$('#subject-select-window').load('module/subjectSelectPanel.htm', function () {
$.parser.parse();
});
$("#add-message-window").load('module/messageAdd.htm', function () {
$.parser.parse();
});
initSearchCondition();
$('#create-message-btn').click(function () {
clearForm();
$('#message-tip').html('添加消息');
$("#add-message-window").modal("show");
});
$("#delete-message-btn").click(function () {
var rows = $('#message-list').datagrid('getChecked');
if (rows.length == 0) {
bootbox.alert("请先选择需要删除的消息!", function () {
});
return;
}
if (confirm("确认删除已选择的消息吗?")) {
var ids = [];
$.each(rows, function (i, row) {
ids.push(row.id);
});
$.post('../push/deleteMessageByIds', {ids: ids.join(",")}, function (result) {
if (result.success) {
bootbox.alert("删除成功!", function () {
});
$('#message-list').datagrid('reload');
} else {
bootbox.alert("删除失败:" + result.msg, function () {
});
}
});
}
});
//查找应用按钮事件绑定
$('#search-message-btn').click(function () {
var searchForm = $("form[id='search-message-form']");
var params = {
"params['title']": searchForm.find("input[id='title']").val(),
"params['region']": searchForm.find("select[id='region']").combobox("getValue"),
"params['country']": searchForm.find("select[id='country']").combobox("getValue"),
"params['operator']": searchForm.find("select[id='operator']").combobox("getValue"),
"params['type']": searchForm.find("select[id='message-type']").combobox("getValue")
};
$('#message-list').datagrid('load', params);
});
$('#clear-condition-btn').click(function () {
var searchForm = $("form[id='search-message-form']");
searchForm.find("input[id='title']").val('');
initSearchCondition();
});
$('#message-list').datagrid({
url: "../push/getMessageByPage",
queryParams: {
page: 0
},
pagination: true,
loadMsg: '数据加载中...',
columns: [
[
{checkbox: true},
{field: "id", hidden: true},
{field: "title", title: "标题", align: "center", width: 90},
{field: "content", title: "内容", align: "center", width: 90},
{field: "priority", title: "优先级", align: "center", width: 50, editor: 'numberbox'},
{field: "description", title: "描述", align: "center", width: 90},
{field: "createdOn", title: "创建时间", align: "center", width: 90},
{field: "pushTime", title: "发送时间", align: "center", width: 90},
{field: "pushedOn", title: "发送完成时间", align: "center", width: 90},
{field: "status", title: "状态", align: "center", width: 50},
{field: "action", title: "操作", align: "center", width: 130, formatter: function (value, row, index) {
var push = '<a href="#" class="btn btn-xs" style="margin-right:5px;background-color:lightgray;color: #808080;cursor:auto ">推送</a>';
if (row.pushedOn == "") {
push = '<a href="#" onclick="push(' + row.id + ')" class="btn btn-xs btn-warning" style="margin-right:5px;" >推送</a>';
}
var detail = '<a href="#" onclick="edit(' + index + ',\'detail\')" class="btn btn-xs btn-primary" style="margin-right:5px;" >详情</a>';
var edit = '<a href="#" onclick="edit(' + index + ',\'edit\')" class="btn btn-xs btn-info" style="margin-right:5px;" >修改</a>';
if (row.status == "已完成") {
edit = '<a href="#" class="btn btn-xs" style="margin-right:5px;background-color:lightgray;color: #808080;cursor:auto ">修改</a>';
}
var del = '<a href="#" onclick="del(' + row.id + ')" class="btn btn-xs btn-danger" style="margin-right:5px;" >删除</a>';
return push + detail + edit + del;
}}
]
]
});
});
/**
* 初始化查询条件
*/
function initSearchCondition() {
var $searchMessageForm = $("form[id='search-message-form']");
//加载区域
$.ajax({
url:'../region/all',
method:'get',
success: function (data) {
if (!data) {
data = [];
}
data.unshift({id:'all',name:'全部区域',selected:true});
$searchMessageForm.find("select[id='region']").combobox({
valueField: 'id',
textField: 'name',
multiple: false,
panelHeight: '150',
data:data,
onChange: function (n) {
//重新加载所选区域内的国家
$searchMessageForm.find("select[id='country']").combobox('clear');
var url = '';
if ('all' == n) {
url = '../country/getAllCountrys';
} else {
url = '../country/getCountriesByRegionIds?ids=' + n;
}
$.ajax({
url:url,
method:'get',
success: function (data) {
if (!data) {
data = [];
}
data.unshift({id:'all',name:'全部国家',selected:true});
$searchMessageForm.find("select[id='country']").combobox({
valueField: 'id',
textField: 'name',
multiple: false,
panelHeight: '150',
data:data
});
}
});
}
});
}
});
//加载国家
$.ajax({
url: '../country/getAllCountrys',
method: 'get',
success: function (data) {
if (!data) {
data = [];
}
data.unshift({id:'all',name:'全部国家',selected:true});
$searchMessageForm.find("select[id='country']").combobox({
valueField: 'id',
textField: 'name',
multiple: false,
panelHeight: '150',
data:data
});
}
});
//加载运营商
$.ajax({
url: '../distributor/getAllDistributors',
method: 'get',
success: function (data) {
if (!data) {
data = [];
}
data.unshift({id:'all',name:'全部运营商',selected:true});
$searchMessageForm.find("select[id='operator']").combobox({
valueField: 'id',
textField: 'name',
multiple: false,
panelHeight: '150',
data:data
});
}
});
//加载消息类型
$searchMessageForm.find("select[id='message-type']").combobox({
valueField: 'value',
textField: 'label',
multiple: false,
editable: false,
panelHeight: '150',
data:[
{label:'全部类别',value:'all',selected:true},
{label:'单应用推荐',value:'singleAppRecommend'},
{label:'多应用推荐',value:'multiAppsRecommend'},
{label:'单应用升级',value:'singleAppUpdate'},
{label:'多应用升级',value:'multiAppsUpdate'},
{label:'活动推荐',value:'activityRecommend'}
]
});
}
function del(messageId) {
if (confirm("请确定是否删除该消息?")) {
$.post('../push/deleteMessageById', {messageId: messageId}, function () {
$('#message-list').datagrid('reload');
});
}
}
function push(messageId) {
if (confirm("请确认是否发送消息?")) {
$.post('../push/pushMessage', {messageId: messageId}, function () {
$('#message-list').datagrid('reload');
});
}
}<file_sep>package com.ccnu.bishe.examtation.mapper;
import java.util.Map;
import com.ccnu.bishe.examtation.annotation.Mapper;
import com.ccnu.bishe.examtation.db.BaseMapper;
import com.ccnu.bishe.examtation.model.Admin;
@Mapper
public interface AdminMapper extends BaseMapper<Admin> {
Admin findAdminByEmailAndPassword(Map<String, String> map);
}
<file_sep>$(function(){
// bootbox 区域设置
bootbox.setDefaults({
locale: "zh_CN"
});
//++ 广告&资源
// 全选或取消全选
$.selectAllToggle("ad-res-check", "ad-res-check-item");
// 删除app
$("#sf-del-sw").click(function(){
var result = $.checkboxVal("ad-res-check-item");
if(result.length > 0){
bootbox.confirm("确实要删除所选项到回收站吗?", function(result) {
// ----------
});
}else{
bootbox.alert("请先选择App,然后才能进行删除!", function() {
// ----------
});
}
});
// app投放
$("#sf-throw-sw").click(function(){
// 获取广告&资源checkbox的value
var result = $.checkboxVal("ad-res-check-item");
if(result.length > 0){
$("#app-throw").modal('show');
}else{
bootbox.alert("请先选择App,然后才能进行投放!", function() {
// ----------
});
}
});
// app下架
$("#sf-down-sw").click(function(){
var result = $.checkboxVal("ad-res-check-item");
if(result.length > 0){
bootbox.confirm("选择下架后,你投放的资源将被移除,是否继续?", function(result) {
// ----------
});
}else{
bootbox.alert("请先选择App,然后才能进行下架操作!", function() {
// ----------
});
}
});
//++ 回收站
// 全选或取消全选
$.selectAllToggle("cycle-check", "cycle-check-item");
// 还原
$("#sf-cy-re-sw").click(function(){
var result = $.checkboxVal("cycle-check-item");
if(result.length > 0){
bootbox.confirm("确定还原选中的选项吗?", function(result) {
// ----------
});
}else{
bootbox.alert("请先选择App,然后才能进行还原操作!", function() {
// ----------
});
}
});
// 从回收站删除
$("#sf-cy-del-sw").click(function(){
var result = $.checkboxVal("cycle-check-item");
if(result.length > 0){
bootbox.confirm("删除后将不能再找回,确定要删除吗?", function(result) {
// ----------
});
}else{
bootbox.alert("请先选择App,然后才能进行删除操作!", function() {
// ----------
});
}
});
//+ app上传模块
// icon 选择
$("#btn-icon-select").click(function(){
var $target = $(this).parent().parent().find("input[type='text']");
$("#icon-file-input").click();
$("#icon-file-input").change(function(){
$target.val($(this).val());
});
;
});
// Banner图片
$("#btn-banner-select").click(function(){
var $target = $(this).parent().parent().find("input[type='text']");
$("#banner-file-input").click();
$("#banner-file-input").change(function(){
$target.val($(this).val());
});
});
// 默认截图选择
$("#btn-sfc-select-1").click(function(){
var $target = $(this).parent().parent().find("input[type='file']");
var $targetShow = $(this).parent().parent().find("input[type='text']");
$target.click();
$target.change(function(){
$targetShow.val($(this).val());
});
});
var sfcNum = 2;
//
$("#btn-sfc-add").click(function(){
var id = $.random(1000, 9999);
var $addedEle = $("<tr style='background-color:#ddd'><td>截图 "+sfcNum+"</td>"+
"<td><input type='file' class='hidden' /><input type='text' class='form-control input-sm' placeholder='://' /></td>"+
"<td><input type='button' class='btn btn-default btn-xs btn-sfc-select' value='选择' /> "+
"<input type='button' class='btn btn-warning btn-xs btn-sfc-del' value='删除' /></td>"+
"<td></td></tr>");
$("#insert-before").before($($addedEle));
$addedEle.find(".btn-sfc-select").click(function(){
$addedEle.find("input[type='file']").click();
$addedEle.find("input[type='file']").change(function(){
$addedEle.find("input[type='text']").val($(this).val());
});
});
$addedEle.find(".btn-sfc-del").click(function(){
$addedEle.remove();
sfcNum--;
});
sfcNum++;
});
// 软件包
$("#btn-sfpak-select").click(function(){
var $target = $(this).parent().parent().find("input[type='text']");
$("#sfpak-file-input").click();
$("#sfpak-file-input").change(function(){
$target.val($(this).val());
});
});
// app投放生成树
// 测试数据
var data = [{
text:"Banner",
id:"1",
children:[{
text:"首页",
id:"2"
},{
text:"分类",
id:"3"
},{
text:"专题",
id:"4"
}]
},
{
text:"Subject",
id:"5",
children:[{
text:"悦读越快乐",
id:"6"
}]
},
{
text:"Category",
id:"7",
children:[{
text:"影音",
id:"8"
},{
text:"生活",
id:"9"
},{
text:"社交",
id:"10"
},{
text:"系统",
id:"11"
},{
text:"购物",
id:"12"
},{
text:"阅读",
id:"13"
}
]
}];
$("#throw-tree").checkboxTree(data);
// app修改
$(".app-modify").click(function(){
var id = $(this).parent().parent().find("input[type='checkbox']").val();
// $.post("", {id:id}, function(data){ });
$("#app-modify").modal("show");
});
});<file_sep>package com.ccnu.bishe.examtation.utils;
import com.ccnu.bishe.examtation.api.WebResultData;
import com.ccnu.bishe.examtation.enu.ErrorCode;
public class WebResultUtils {
public static WebResultData buildSucResult() {
return buildResult(ErrorCode.suc);
}
public static WebResultData buildFailureResult() {
return buildResult(ErrorCode.failure);
}
public static WebResultData buildSucResult(Object data) {
WebResultData webResultData = buildResult(ErrorCode.suc);
webResultData.setData(data);
return webResultData;
}
public static WebResultData buildResult(ErrorCode errorCode) {
WebResultData webResultData = new WebResultData();
webResultData.setStatus(errorCode.code().toString());
webResultData.setMsg(errorCode.description());
return webResultData;
}
}
<file_sep>package com.ccnu.bishe.examtation.enu;
/**
* Created by Joban on 2017/1/20.
*/
public enum SchoolCode {
CS(0, "计算机学院"),
SE(1, "软件学院"),
EE(2, "信息学院");
private Integer code;
private String description;
private SchoolCode(Integer code, String description) {
this.code = code;
this.description = description;
}
public Integer getCode() {
return code;
}
public void setCode(Integer code) {
this.code = code;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
// 普通方法
public static String getDescByCode(int code) {
for (SchoolCode c : SchoolCode.values()) {
if (c.getCode() == code) {
return c.description;
}
}
return null;
}
@Override
public String toString() {
return super.toString();
}
}
<file_sep>package com.ccnu.bishe.examtation.mapper;
import com.ccnu.bishe.examtation.annotation.Mapper;
import com.ccnu.bishe.examtation.db.BaseMapper;
import com.ccnu.bishe.examtation.dto.FeedbackDto;
import com.ccnu.bishe.examtation.model.FeedBack;
import java.util.List;
import java.util.Map;
@Mapper
public interface FeedBackMapper extends BaseMapper<FeedBack> {
FeedBack selectByPrimaryKey(String id);
List<FeedbackDto> getFeedbackListByInfoId(Map<String, Object> params);
long getFeedBackListCount(Map<String, Object> params);
}<file_sep>
// jQuery 类继承、需要较高jQuery版本支持
// @author jokee.wu
// @date 2013.10~
(function($){
$.ajaxSetup({
cache:false,
complete: function(jqXHR, textStatus){
try{
var errorCode = $.parseJSON(jqXHR.responseText).errorCode;
if(errorCode == -1){
alert($.parseJSON(jqXHR.responseText).errorMsg);
}
}catch(e){
}
}
});
$.extend({
// ------------------------------ 验证模块 ----------------------------------
// 检测是否为数字
isNumber:function(number){
var reg = /^\d+$/;
return reg.test(number);
},
// 检测是否为一个邮箱地址
isEmail:function(email){
var reg = /^\w+([-\.]\w+)*@\w+([\.-]\w+)*\.\w{2,4}$/;
return reg.test(email);
},
// 检测是否为一个有效的电话号码
isTel:function(tel){
var reg = '';
return false;
},
//------------------------------- 基本功能函数 ------------------------------
// 获取一个随机数,可以是一个区间
random:function(startNumber, endNumber){
// 没有传递任何参数,返回[0, 1)之间任何一个数
if(arguments.length == 0){
return Math.random();
}
// 传递了一个参数,返回[0, number]之间任何一个数
else if(arguments.length == 1){
return Math.random()*arguments[0];
}
// 传递了一个参数,返回[startNumber, endNumber]之间任何一个数
else if(arguments.length == 2){
return Math.random()*(endNumber-startNumber)+startNumber;
}
// 参数错误
else{
console.log("function $.random(), params error!");
}
},
// 将一个数转化成整数
parseInt:function(number){
return parseInt(number);
},
// 向上取整
ceil:function(number){
return Math.ceil(number);
},
// 向下取整
floor:function(number){
return Math.floor(number);
},
// 数据补零
fillZero:function(number, long){
var strNum = number.toString();
var strLen = strNum.length;
var times = long - strLen;
for(var i = 0; i < times; i++){
strNum = "0"+strNum;
}
return strNum;
},
// 当前时间
date:function(){
return new Date();
},
// 获取一个月天数
xmonthDays:function(year, month){
var date = new Date();
var days = 0;
// 如果没有传递参数返回当前月的天数
if(arguments.length == 0){
return date.getDate();
}
if(month == 12){
date.setFullYear(year+1);
date.setMonth(0);
date.setDate(0);
return date.getDate();
}else{
date.setFullYear(year);
date.setMonth(month);
date.setDate(0);
return date.getDate();
}
},
// 拆分一个数字
spiltNumber:function(number, long, dir){
var strNum = number.toString();
var strLen = strNum.length;
var result = "";
// 拆分从高位开始
if(dir == 'H'){
result = strNum.substr(0, long);
}
// 如果dir参数没有传递,拆分从低位开始
else{
result = strNum.substr(strLen-long, strLen - 1);
}
return result;
},
//------------------------------------UI模块------------------------------------
// 产生一种随机颜色
randColor:function(){
var colorElement = ['0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F'];
var color = "#";
for(var i = 0; i < 6; i++){
var r = this.parseInt(this.random(0, 15));
color+=colorElement[r];
}
return color;
},
// 全选以及取消全选
selectAllToggle:function(switchId, selectItemClass){
$("#"+switchId).click(function(){
var checked = $(this).is(":checked");
$("."+selectItemClass).prop("checked", checked);
});
},
// 获取窗口大小
windowSize:function(){
return {
width:$(window).width(),
height:$(window).height()
};
},
// 重定位
reload:function(href){
var cureentHref = location.href;
// 没有参数就重新加载当前页面
if(arguments.length == 0){
location.href = cureentHref;
}else{
location.href = href;
}
},
//获取网页链接锚点
anchor:function(){
var chref = location.hash;
// -------------------------------
var reg = /^[#|\?]\w*/;
// 获取锚点
var anc = chref.match(reg);
// 没有锚点,阻止程序运行
if(anc==null||anc==""){
return "#";
}
return anc[0];
},
// 实现页面刷新返回锚点
setPos:function(tabClass, tabCtClass){
var lhref = location.hash;
var reg1 = /^#[\w|-]*\d/;
// 获取锚点
var anc = reg1.exec(lhref);
// 没有锚点,阻止程序运行
if(anc==null||anc==""){
return;
}
// 取得数字
var reg2 = /[0-9]*$/;
var num = reg2.exec(anc);
var ctId = "#"+tabCtClass+"-"+num;
// 显示内容
$("."+tabCtClass).removeClass("show");
$("."+tabCtClass).addClass("hidden");
$(ctId).removeClass("hidden");
$(ctId).addClass("show");
// 激活tab按钮样式
var tbId = "#"+tabClass+"-"+num;
$("."+tabClass).removeClass("active");
$(tbId).addClass("active");
},
// tab之间切换
tabToggle:function(tabClass, tabCtClass){
$("."+tabClass).each(function(){
$(this).click(function(){
var thisId = $(this).attr("id");
// 取得数字
var reg = /\d*$/;
var num = reg.exec(thisId);
// 对应内容区Id
var ctId = "#"+tabCtClass+"-"+num;
// 显示对应内容
$("."+tabCtClass).removeClass("show")
.addClass("hidden");
$(ctId).addClass("show")
.removeClass("hidden");
// 激活按钮样式
$("."+tabClass).removeClass("active");
$(this).addClass("active");
});
});
},
// 获取一类checkbox的值
checkboxVal:function(chkClass){
var result = [];
$("."+chkClass).each(function(){
var checked = $(this).is(":checked");
if(checked){
result.push($(this).val());
}
});
return result;
},
sendAjax:function(param){
var defaultParams = {
url:"/",
type:"POST",
data:{},
callback:""};
var params = $.extend(defaultParams, param||{});
$.ajax({
url: params.url,
type: params.type,
data: JSON.stringify(params.data),
contentType: "application/json",
async: params.sync,
success: function(data){
//var data = eval("("+data+")");
if (typeof params.callback == "function"){
params.callback(data);
}
else{
if(data.status == "success"){
if($.trim(data.msg) != ""){
bootbox.alert(data.msg,function(){
$.reload();
})
}
else{
$.reload();
}
}
else{
bootbox.alert(data.msg);
}
}
},
error:function(){
bootbox.alert("error!");
}
});
},
// 获取url参数
urlParam:function(paras){
var url = location.href;
var paraString = url.substring(url.indexOf("?")+1,url.length).split("&");
var paraObj = {}
for (i=0; j=paraString[i]; i++){
paraObj[j.substring(0,j.indexOf("=")).toLowerCase()] = j.substring(j.indexOf("=")+1,j.length);
}
var returnValue = paraObj[paras.toLowerCase()];
if(typeof(returnValue)=="undefined"){
return "";
}else{
return returnValue;
}
},
//检查是否是小数
isDecimal: function(str){
var re = /^[\d]+[\.][\d]+$/;
if (! re.test(str)){
return false;
}
return true;
},
//检查是否是整数
isInteger: function(str){
var re = /^[0-9]\d*$/;
if (! re.test(str)){
return false;
}
return true;
},
//检查是否是空或多个空格
isNull: function(str){
var flag = false;
if ("" == str){
flag = true;
}
else{
var re = /^[ ]+$/;
flag = re.test(str);
}
return flag;
},
//检查是否满足字符长度,默认最小2,最大是15
strLength: function(str){
var min = (arguments[1] + 1) ? arguments[1] : 2;
var max = (arguments[2] + 1) ? arguments[2] : 15;
if (str.length < min){
//bootbox.alert("输入长度要大于" + min);
return false;
}
else if (str.length >= max){
bootbox.alert("输入长度要小于" + max);
return false;
}
return true;
},
//检查文件后缀
checkFileSuffix: function(filename, filetype){
var png_type = new Array(".png");
var img_type = new Array(".jpg", ".jpeg", ".png");
var apk_type = new Array(".apk");
filename = filename.toLowerCase();
if ('png' == filetype){
filetype = png_type;
}
else if ('img' == filetype){
filetype = img_type;
}
else if ('apk' == filetype){
filetype = apk_type;
}
var point = filename.lastIndexOf(".");
var type = filename.substr(point);
for (i=0; i<filetype.length; i++){
if(type == filetype[i]){
return true;
}
}
return false;
},
getdate: function(days){
var today = new Date();
var d = today.setDate(today.getDate() - days);
var d = new Date(d);
var year = d.getFullYear();
var month = d.getMonth() + 1;
var day = d.getDate();
return year + "-" + month + '-' + day;
}
}); // end $.extend
})(jQuery);
// jQuery 对象级别开发
(function($){
$.fn.extend({
// 居中显示
displayCenter:function(){
var $tarEle = $(this);
// 父级元素节点
var $tarEleParent = $tarEle.parent();
var tw = $tarEle.width();
var th = $tarEle.height();
var tpw = $tarEleParent.width();
var tph = $tarEleParent.height();
// 计算元素位置
var posLeft = parseInt((tpw - tw)/2);
var posTop = parseInt((tph - th)/2);
$tarEle.css("position", "absolute")
.css("left", posLeft)
.css("top", posTop);
},
// 获取一个元素尺寸大小
elementSize:function(){
var $this = $(this);
var ew = $this.width();
var eh = $this.height();
return {
width:ew,
height:eh
};
},
// 激活元素
activeEle:function(eleId){
$(this).removeClass("active");
$("#"+eleId).addClass("active");
},
// 复选框树
checkboxTree:function(data, checkClass){
var iterator = function(node, $parent){
for(var i = 0; i < node.length; i++){
var $li = $("<li><input type='checkbox' class='" + checkClass + "' value='" + node[i].id + "'/>" + node[i].text + "</li>");
$parent.append($li);
if(typeof node[i].children == "object" && node[i].children.length > 0){
var $cul = $("<ul></ul>");
var $swt = $("<span class='j-switch' style='cursor:pointer'>"+
"<i class='glyphicon glyphicon-hand-up'></i></span>");
$li.prepend($swt);
$li.append($cul);
iterator(node[i].children, $cul);
}else{
$li.prepend(" ");
}
}
}
iterator(data, $(this));
// 实现全选功能
$(this).find("input[type='checkbox']").click(function(){
var checked = $(this).is(":checked");
$(this).parent().find("ul li input[type='checkbox']").prop("checked", checked);
});
$(".j-switch").parent().find("ul").slideUp(0);
$(".j-switch").click(function(){
var $rtarget = $(this).parent().find(">ul");
var display = $rtarget.css("display");//toggle(1000);
if(display == "block"){
$rtarget.slideUp(0);
$(this).html("<i class='glyphicon glyphicon-hand-up'></i>");
}else{
$rtarget.slideDown(0);
$(this).html("<i class='glyphicon glyphicon-hand-down'></i>");
}
});
},
moduleTree:function(data){
var iterator = function(node, $parent){
for(var i = 0; i < node.length; i++){
var $li = $("<li><span>" + node[i].text + "</span></li>");
$parent.append($li);
if(typeof node[i].children == "object" && node[i].children.length > 0){
var $cul = $("<ul></ul>");
var $swt = $("<span class='j-switch' style='cursor:pointer'>"+
"<i class='glyphicon glyphicon-hand-up'></i></span>");
$li.prepend($swt);
$li.append($cul);
iterator(node[i].children, $cul);
}
else{
$li.prepend(" ");
}
}
}
iterator(data, $(this));
$(".j-switch").parent().find("ul").slideUp(0);
$(".j-switch").click(function(){
var $rtarget = $(this).parent().find(">ul");
var display = $rtarget.css("display");//toggle(1000);
if(display == "block"){
$rtarget.slideUp(0);
$(this).html("<i class='glyphicon glyphicon-hand-up'></i>");
}
else{
$rtarget.slideDown(0);
$(this).html("<i class='glyphicon glyphicon-hand-down'></i>");
}
});
},
updatePriority:function(url){
var tempValue;
$(this).focus(function(){
// 获取原始值
tempValue = $.trim($(this).val());
})
.blur(function(){
var id = $(this).parent().parent().find("input[type='checkbox']").val();
var nowValue = $.trim($(this).val());
if(!$.isNumber(nowValue)){
bootbox.alert('排序必须是数字!');
$(this).val(tempValue);
return;
}
else if(Number(nowValue)>9999){
bootbox.alert('排序超过范围(0~9999)');
$(this).val(tempValue);
return;
}
if(nowValue != tempValue){
var param = {
url:url,
data:{
id:id,
priority:nowValue
}
}
$.sendAjax(param);
}
});
},
updateItemsPerPage:function(){
var tempValue;
var url = location.href;
var sltVal = $.urlParam("perpage");
var href = url.substring(0,url.indexOf("?"));
$(this).focus(function(){
// 获取原始值
tempValue = $.trim($(this).val());
})
.blur(function(){
var nowValue = $.trim($(this).val());
if(!$.isNumber(nowValue)){
bootbox.alert('条目数必须是数字!');
$(this).val(tempValue);
return;
}
else if(Number(nowValue)>9999){
bootbox.alert('条目数超过范围(0~9999)');
$(this).val(tempValue);
return;
}
if(nowValue != tempValue){
if(-1 == url.indexOf('?')){
location.href = href + "?perpage=" + nowValue;
}
else{
if (sltVal == '') {
location.href = url + "&perpage=" + nowValue;
}
else
{
var tmpList = url.split('?');
var par = tmpList[1];
if(par.indexOf("&") == par.lastIndexOf("&"))
{
location.href = url.substring(0,url.lastIndexOf("?")) + "?perpage=" + nowValue;
}
else
{
location.href = url.substring(0,url.lastIndexOf("&")) + "&perpage=" + nowValue;
}
}
}
}
});
},
changeStatus:function(paramName){
var url = location.href;
var sltVal = $.urlParam(paramName);
var sortModel = $.urlParam('sortModel');
var sortName = $.urlParam('sortName');
var href = url.substring(0,url.indexOf("?"));
$(this).find("option").attr("selected", false);
$(this).find("option[value='"+sltVal+"']").attr("selected", true);
$(this).change(function(){
var status = $(this).val();
if(status==-1){return;}
/*
if (-1 !== url.indexOf("sort")){
location.href = href + "?" + 'sortModel=' + sortModel + '&' + 'sortName=' + sortName + '&' + paramName + "=" + status;
}
*/
if(-1 == url.indexOf('?')){
location.href = href + "?" + paramName + "=" + status;
}
else{
if (("app_publish_status" === paramName || "verify_status" === paramName) && status.length == 0){
location.href = url.substring(0, url.indexOf("?"));
}
else if (sltVal == '') {
location.href = url + "&" + paramName + "=" + status;
}
else
{
var tmpList = url.split('?');
var par = tmpList[1];
if(par.indexOf("&") == par.lastIndexOf("&"))
{
location.href = url.substring(0,url.lastIndexOf("?")) + "?" + paramName + "=" + status;
}
else
{
location.href = url.substring(0,url.lastIndexOf("&")) + "&" + paramName + "=" + status;
}
}
}
});
},
imgPreview: function(){
var $img;
$(this).mouseover(function(e){
$img = $("<img src='#' style='z-index:2000;'/>");
var src = $(this).attr("src");
$img.attr("src", src);
$("body").append($img);
if (height < 100){
var left = e.clientX + 5;
var top = e.clientY + 5;
}
else{
var left = e.pageX + 5;
$img.width(300);
var height = $img.height();
var top = ($(window).height() - height) / 2;
}
$img.css({
position:"fixed",
top: top,
left: left
});
});
$(this).mouseout(function(e){
$img.remove();
});
}
});
})(jQuery);
<file_sep>$(function(){
// bootbox设置区域
bootbox.setDefaults({
locale: "zh_CN"
});
//++ app banner位模块
// 实现全选
$.selectAllToggle("banner-check", "banner-check-item");
// 下架
$("#btn-banner-sw").click(function(){
var result = $.checkboxVal("banner-check-item");
if(result.length > 0){
bootbox.confirm("选择下架后,你投放的资源将被移除,是否继续?", function(type) {
if(type){
params = {'url': '/admin/ads/offshelf/',
'data': {'ad_ids': result},
}
$.sendAjax(params)
}
});
}else{
bootbox.alert("请先选择,然后才能进行下架操作!", function() {
// ----------
});
}
});
// 修改广告排序
$(".update_ads_priority").updatePriority('/admin/ads/priority/update/');
//++ 首页管理
// 实现全选
$.selectAllToggle("mp-check", "mp-check-item");
// 下架
$("#btn-mp-sw").click(function(){
var result = $.checkboxVal("mp-check-item");
if(result.length > 0){
bootbox.confirm("选择下架后,你投放的资源将被移除,是否继续?", function(value) {
if(value){
var listbox = document.getElementById("provider");
var provider = listbox.options[listbox.selectedIndex].value;
var params = {"id":result, "provider":provider};
$.sendAjax({"url": "/admin/featured/off_shelf/",
"type": "POST",
"data": params
});
}
});
}else{
bootbox.alert("请先选择,然后才能进行下架操作!", function() {
// ----------
});
}
});
// 保存
$('.featured_priority').updatePriority('/admin/featured/update_priority/');
//修改单页条数
$('.itemperpage').updateItemsPerPage();
//来源过滤
$("#provider").change(function(){
var provider_list = document.getElementById("provider");
var provider = provider_list.options[provider_list.selectedIndex].value;
var url = "/admin/featured/filter/?provider=" + provider;
$.reload(url);
});
//++ 分类管理
//修改排序
$(".category_priority").updatePriority('/admin/category/priority/update/');
// 删除分类
$(".btn-clsf-del").click(function(){
var id = $(this).parent().parent().find(".clsf-check-item").val();
bootbox.confirm("请确定是否删除所选分类?", function(type) {
if(type){
params = {
'url': '/admin/category/delete/',
'data': {'id': id},
}
$.sendAjax(params);
}
});
});
//++ 分类软件列表
//全选
$.selectAllToggle("capl-check", "capl-check-item");
//更改排序
var url = '/admin/category/list_app/update/' + $("#category_id").val() + "/";
$('.category_app_priority').updatePriority(url);
//下架
$("#btn-capl-sw").click(function(){
var result = $.checkboxVal("capl-check-item");
if(result.length > 0){
bootbox.confirm("选择下架后,你投放的资源将被移除,是否继续?", function(value) {
if(value){
var params = {"ids": result};
var url = "/admin/category/list_app/offshelf/" + $("#category_id").val() + "/";
$.sendAjax({"url": url,
"type": "POST",
"data": params
});
}
});
}else{
bootbox.alert("请先选择,然后才能进行下架操作!", function() {
// ----------
});
}
});
// 点击增加按钮
$("#btn-cat-add").click(function(){
reset_add_category_form();
});
// 添加分类,分类名称唯一性检查
$("#add-category-name").blur(function(){
if(! $.isNull($.trim($(this).val()))){
if ($.strLength($.trim($(this).val()), 1)){
$.sendAjax({
"url": "/admin/category/name/check/",
"type": "POST",
"data": {'name': $.trim($(this).val()), 'category_id': $('#input-category-id').val()},
"callback": function(data){
if(data.status == 'success'){
$("#category-name-check").html("<img src='/static/common/images/right.png' />");
}
else{
$("#category-name-check").html("<img src='/static/common/images/error.png' />" +
"<span class='label label-warning'>" + data.msg + "</span>");
}
},
});
}
}
else{
$("#category-name-check").html("");
}
});
// icon 选择
var cat_upload_params = function(input_id, img_id){
var param = {
url: '/admin/category/icon/upload/',
secureuri: false,
fileElementId: input_id,
dataType: 'json',
beforeSend: function(){},
success: function (data, status){
if(data.status == 'success'){
$("#" + img_id).parent().find("img").attr("src", '/download/' + data.img_path);
$("#" + img_id).parent().find("img").removeClass('hidden');
$('#input-category-icon').val(data.img_path);
}
else {
bootbox.alert(data.msg);
}
//解决ajaxfileupload后事件被清除的bug
$("#category-icon-file").change(function(){
var $target = $(this).parent().parent().find("input[type='text']");
if (! $.checkFileSuffix($("#category-icon-file").val(), 'img')){
bootbox.alert('请上传.png或者.jpg文件');
return;
}
$('#selected-category-icon').val(this.value);
$.ajaxFileUpload(cat_upload_params('category-icon-file', 'btn-category-icon'));
$target.val($(this).val());
});
}
};
return param;
}
// 添加分类图片
$("#btn-category-icon").click(function(){
var $target = $(this).parent().parent().find("input[type='text']");
$("#category-icon-file").click();
});
$("#category-icon-file").change(function(){
var $target = $(this).parent().parent().find("input[type='text']");
if (! $.checkFileSuffix($("#category-icon-file").val(), 'img')){
bootbox.alert('请上传.png或者.jpg文件');
return;
}
$.ajaxFileUpload(cat_upload_params('category-icon-file', 'btn-category-icon'));
$target.val($(this).val());
});
// 提交添加分类
$("#btn-add-category").click(function(){
$("#btn-add-category-real").click();
});
$("#btn-add-category-real").click(function(){
if($.trim($("#add-category-name").val())!='')
{
$.sendAjax({
"url": "/admin/category/name/check/",
"type": "POST",
"data": {'name': $.trim($('#add-category-name').val()), 'category_id': $('#input-category-id').val()},
"sync": false,
"callback": function(data){
if(data.status == 'success'){
$("#category-name-check").html("<img src='/static/common/images/right.png' />");
}
else{
$("#category-name-check").html("<img src='/static/common/images/error.png' />" +
"<span class='label label-warning'>" + data.msg + "</span>");
}
},
});
}
// 检查分类类型
if( $("#category-type").val() == '-1'){
bootbox.alert("请选择分类类型");
return false;
};
// 检查分类名
if( ! ($("#category-name-check").find('img').attr('src') == '/static/common/images/right.png')){
bootbox.alert("无效的分类名");
return false;
};
//检查图片
if( $.isNull($("#input-category-icon").val())){
bootbox.alert("无效的图片");
return false;
};
//检查描述
if ( $.isNull($("#category-description").val())){
bootbox.alert("分类描述不能为空");
return false;
};
});
// 修改分类
$('.btn-modify-category').click(function(){
reset_add_category_form();
$.sendAjax({
'url': '/admin/category/info/',
'type': "POST",
'data': {'category_id': $(this).attr('value')},
'callback': function(data){
if(data.status == 'success')
{
$('#input-category-id').val(data.category_id);
$('#add-category-name').val(data.name);
$("#category-name-check").html("<img src='/static/common/images/right.png' />");
$('#input-category-icon').val(data.icon);
$("#btn-category-icon").parent().find("img").attr("src", '/download/' + data.icon);
$("#btn-category-icon").parent().find("img").removeClass('hidden');
$("#category-type").val(data.category_type);
$("#category-type").attr('disabled', 'disabled');
$("#recommend_app_1").val(data.recommend_app_1);
$("#recommend_app_2").val(data.recommend_app_2);
$("#category-description").val(data.description);
$("#operate-category-title").html('修改分类');
}
else{
bootbox.alert(data.msg);
}
},
});
$('#category-add').modal('show');
});
// 分类图片预览
$('.imgpreviews').imgPreview();
//++ 专题管理
// 专题介绍表格
var options = {
width : '100%',
items:['fontname','fontsize','|'
,'forecolor','bold','italic','underline','|'
,'justifyleft','justifycenter','justifyright','justifyfull','|'
,'insertorderedlist','insertunorderedlist','|'
,'indent','outdent'
],
resizeType:0,
};
var editor;
// 实现全选
$.selectAllToggle("sbj-check", "sbj-check-item");
// 点击增加按钮
$("#btn-sbj-add").click(function(){
reset_add_subject_form();
if (! editor){
editor = KindEditor.create('#subject-description', options);
}
});
// 投放
$("#btn-sbj-sw").click(function(){
var result = $.checkboxVal("sbj-check-item");
if(result.length > 0){
params = {
'url': '/admin/subject/throw/',
'type': 'GET',
'callback': function(data){
// 清空
$("#sj-throw").html("");
$("#sj-throw").checkboxTree(data.data, 'subject-throw-node');
}
}
$.sendAjax(params);
$("#sbj-throw").modal('show');
}else{
bootbox.alert("请先选择专题,然后才能进行投放!", function() {
// ----------
});
}
});
//确定投放
$("#throw-yes-btn").click(function(){
var checkednodes = $.checkboxVal('subject-throw-node');
var checkedsubjects = $.checkboxVal('sbj-check-item');
if(checkednodes.length == 1 && checkednodes[0] == '-1')
{
bootbox.alert("请选择要投放到的模块!", function(){});
return;
}
if(checkednodes.length > 0){
var post_data = {};
post_data['subject_ids'] = checkedsubjects;
post_data['checked_nodes'] = checkednodes;
params = {
'url': '/admin/subject/throw/',
'data': post_data,
}
$.sendAjax(params);
}
else{
bootbox.alert("请选择要投放到的模块!", function() {
});
}
});
// 下架
$(".btn-sbj-offshelf").click(function(){
var subject_id = $(this).attr('value');
bootbox.confirm("请确定是否下架所选专题?", function(result) {
if(result){
params = {
'url': '/admin/subject/offshelf/',
'data': {'subject_id': subject_id},
}
$.sendAjax(params);
}
});
});
// 上架
$(".btn-sbj-onshelf").click(function(){
var subject_id = $(this).attr('value');
bootbox.confirm("请确定是否上架所选专题?", function(result) {
if(result){
params = {
'url': '/admin/subject/onshelf/',
'data': {'subject_id': subject_id},
}
$.sendAjax(params);
}
});
});
// 删除
$("#btn-sbj-del").click(function(){
var values = $.checkboxVal("sbj-check-item");
if(values.length > 0){
bootbox.confirm("请确定是否删除所选专题?", function(type) {
if(type){
params = {
'url': '/admin/subject/delete/',
'data': {'subject_ids' : values},
}
$.sendAjax(params);
}
});
}else{
bootbox.alert("请先选择专题,然后才能进行删除操作!", function() {
});
}
});
// 全选
$.selectAllToggle("slist-check", "slist-check-item");
// 添加专题,专题名称唯一性检查
$("#add-subject-name").blur(function(){
if(! $.isNull($.trim($(this).val()))){
if ($.strLength($.trim($(this).val()), 1)){
$.sendAjax({
"url": "/admin/subject/title/check/",
"type": "POST",
"data": {'title': $.trim($(this).val()), 'subject_id': $('#input-subject-id').val()},
"callback": function(data){
if(data.status == 'success'){
$("#subject-name-check").html("<img src='/static/common/images/right.png' />");
}
else{
$("#subject-name-check").html("<img src='/static/common/images/error.png' />" +
"<span class='label label-warning'>" + data.msg + "</span>");
}
},
});
}
}
else{
$("#subject-name-check").html("");
}
});
// icon 选择
var upload_params = function(input_id, img_id, type, input_subject){
var param = {
url: '/admin/subject/image/upload/' + type + '/',
secureuri: false,
fileElementId: input_id,
dataType: 'json',
beforeSend: function(){},
success: function (data, status){
if(data.status == 'success'){
$("#" + img_id).parent().find("img").attr("src", '/download/' + data.img_path);
$("#" + img_id).parent().find("img").removeClass('hidden');
$('#' + input_subject).val(data.img_path);
}
else {
bootbox.alert(data.msg);
}
if(input_id=='subject-banner-file')
{
$("#subject-banner-file").change(function(){
var $target = $(this).parent().parent().find("input[type='text']");
if (! $.checkFileSuffix($("#subject-banner-file").val(), 'img')){
bootbox.alert('请上传.png或者.jpg文件');
return;
}
$.ajaxFileUpload(upload_params('subject-banner-file', 'btn-subject-banner', 'banner', 'input-subject-banner'));
$target.val($(this).val());
});
}
else
{
$("#subject-icon-file").change(function(){
var $target = $(this).parent().parent().find("input[type='text']");
if (! $.checkFileSuffix($("#subject-icon-file").val(), 'img')){
bootbox.alert('请上传.png或者.jpg文件');
return;
}
$.ajaxFileUpload(upload_params('subject-icon-file', 'btn-subject-icon', 'icon', 'input-subject-icon'));
$target.val($(this).val());
});
}
}
};
return param;
}
// 添加专题banner图片
$("#btn-subject-banner").click(function(){
var $target = $(this).parent().parent().find("input[type='text']");
$("#subject-banner-file").click();
});
$("#subject-banner-file").change(function(){
var $target = $(this).parent().parent().find("input[type='text']");
if (! $.checkFileSuffix($("#subject-banner-file").val(), 'img')){
bootbox.alert('请上传.png或者.jpg文件');
return;
}
$.ajaxFileUpload(upload_params('subject-banner-file', 'btn-subject-banner', 'banner', 'input-subject-banner'));
$target.val($(this).val());
});
// 添加专题icon图片
$("#btn-subject-icon").click(function(){
var $target = $(this).parent().parent().find("input[type='text']");
$("#subject-icon-file").click();
});
$("#subject-icon-file").change(function(){
var $target = $(this).parent().parent().find("input[type='text']");
if (! $.checkFileSuffix($("#subject-icon-file").val(), 'img')){
bootbox.alert('请上传.png或者.jpg文件');
return;
}
$.ajaxFileUpload(upload_params('subject-icon-file', 'btn-subject-icon', 'icon', 'input-subject-icon'));
$target.val($(this).val());
});
// 提交添加专题
$("#btn-add-subject").click(function(){$("#btn-add-subject-real").click();});
$("#btn-add-subject-real").click(function(){
if($.trim($("#add-subject-name").val())!='')
{
$.sendAjax({
"url": "/admin/subject/title/check/",
"type": "POST",
"data": {'title': $.trim($('#add-subject-name').val()), 'subject_id': $('#input-subject-id').val()},
"sync": false,
"callback": function(data){
if(data.status == 'success'){
$("#subject-name-check").html("<img src='/static/common/images/right.png' />");
}
else{
$("#subject-name-check").html("<img src='/static/common/images/error.png' />" +
"<span class='label label-warning'>" + data.msg + "</span>");
}
},
});
}
// 检查专题名
if( ! ($("#subject-name-check").find('img').attr('src') == '/static/common/images/right.png')){
bootbox.alert("无效的专题名");
return false;
};
//检查图片
if( $.isNull($("#input-subject-icon").val())){
bootbox.alert("专题Icon图片不能为空");
return false;
};
//检查描述
if (editor.isEmpty()){
bootbox.alert("专题描述不能为空");
return false;
}
var htmlv = editor.html();
htmlv = htmlv.replace(/(<p>\s*<br \/>\s*<\/p>\s*)*\s*$/g,"");
editor.html(htmlv);
// 同步数据后可以直接取得textarea的value
editor.sync();
});
// 修改专题
$('.btn-modify-subject').click(function(){
reset_add_subject_form();
if (! editor){
editor = KindEditor.create('#subject-description', options);
}
$.sendAjax({
'url': '/admin/subject/info/',
'type': "POST",
'data': {'subject_id': $(this).attr('value')},
'callback': function(data){
if(data.status == 'success')
{
$('#input-subject-id').val(data.subject_id);
$('#add-subject-name').val(data.title);
$("#subject-name-check").html("<img src='/static/common/images/right.png' />");
$('#input-subject-icon').val(data.icon);
$('#input-subject-banner').val(data.banner);
$("#btn-subject-icon").parent().find("img").attr("src", '/download/' + data.icon);
$("#btn-subject-banner").parent().find("img").attr("src", '/download/' + data.banner);
$("#btn-subject-icon").parent().find("img").removeClass('hidden');
$("#btn-subject-banner").parent().find("img").removeClass('hidden');
$("#recommend_app_1").val(data.recommend_app_1);
$("#recommend_app_2").val(data.recommend_app_2);
$("#operate-subject-title").html('修改专题');
editor.html(data.description);
}
else{
bootbox.alert(data.msg);
}
},
});
$('#sbj-add').modal('show');
});
// 专题图片预览
$('.imgpreviews').imgPreview();
//++ 搜索管理
// 实现全选
$.selectAllToggle("search-check", "search-check-item");
// 删除
$("#btn-search-del").click(function(){
var result = $.checkboxVal("search-check-item");
if(result.length > 0){
bootbox.confirm("请确定是否删除所选项?", function(value) {
if(value){
var params = {"ids": result};
$.sendAjax({
"url": "/admin/keyword/delete/",
"type": "POST",
"data": params
});
}
});
}else{
bootbox.alert("请先选择,然后才能进行删除操作!", function() {
// ----------
});
}
});
//搜索管理导入表单验证
//输入时验证
var addKeywordFlag;
var addBatchKeywordFlag;
$('#k_name').blur(function(){
var name = $.trim($('#k_name').val());
var nameBatch = $.trim($('#k_batch').val());
$('#k_name_tip1').addClass('hidden');
$('#k_name_tip2').addClass('hidden');
$('#k_name_tip3').addClass('hidden');
//验证名字是否重复
if(name != ''){
var params = {
'url': '/admin/keyword/get/',
'data': {'name': name},
'type': 'POST',
'callback': function(data){
if(data.status == 'failed'){
$('#k_name_tip2').removeClass('hidden');
}else{
$('#k_name_tip2').addClass('hidden');
}
}
};
$.sendAjax(params);
}
//输入检测
if(name == '' && nameBatch == '')
{
$('#k_name_tip1').removeClass('hidden');
$('#k_name_tip3').addClass('hidden');
}
else if (name.length >4)
{
$('#k_name_tip1').addClass('hidden');
$('#k_name_tip3').removeClass('hidden')
}
else{
$('#k_name_tip1').addClass('hidden');
$('#k_name_tip3').addClass('hidden')}
})
$('#k_batch').blur(function(){
var name = $.trim($('#k_name').val());
var nameBatch = $.trim($('#k_batch').val());
var nameBatchList = nameBatch.split('\n');
var repeatCheck = [];
//验证名字是否重复
if (nameBatch.length > 0){
var keywordList = nameBatch.split('\n');
var i ;
for(i=0; i<keywordList.length; i++){
keyword = keywordList[i];
if(keyword.indexOf(',') == -1){
addBatchKeywordFlag = false;
}
var keyword = keyword.split(',')[0];
var priority = keyword.split(',')[1];
if(keyword != null){
var params = {
'url': '/admin/keyword/get/',
'data': {'name': keyword},
'type': 'POST',
'callback': function(data){
if(data.status == 'failed'){
$('#k_batch_tip1').removeClass('hidden');
}
else
{
$('#k_batch_tip1').addClass('hidden');
}
}
};
$.sendAjax(params);
if(!addBatchKeywordFlag){
break;
}
}
}
if(i == keywordList.length){
addBatchKeywordFlag = true;
$('#k_batch_tip1').addClass('hidden');
}
}
//输入检测
$('#k_batch_tip5').addClass('hidden');
for(var index in nameBatchList)
{
var nameSingle = nameBatchList[index].split(',');
if(nameBatch !="" && nameSingle.length<2)
{
$('#k_batch_tip2').removeClass('hidden');
$('#k_batch_tip1').addClass('hidden');
$('#k_batch_tip3').addClass('hidden');
$('#k_batch_tip4').addClass('hidden');
}
else if(nameSingle[0].length>4)
{
$('#k_batch_tip2').addClass('hidden');
$('#k_batch_tip3').removeClass('hidden');
break;
}
else
{
$('#k_batch_tip2').addClass('hidden');
$('#k_batch_tip3').addClass('hidden');
$('#k_batch_tip4').addClass('hidden');
}
if(nameSingle.length>1 && !$.isNumber(nameSingle[1]) && nameSingle[1].length>0)
{
$('#k_batch_tip4').removeClass('hidden');
break;
}
else
{
$('#k_batch_tip4').addClass('hidden');
}
repeat:
{
if(nameSingle[0] == name && nameBatch!='')
{
$('#k_batch_tip5').removeClass('hidden');
repeatCheck.push(nameSingle[0]);
break repeat;
}
else
{
if(repeatCheck.length==0)
{repeatCheck.push(nameSingle[0]);}
else
{
for(var item in repeatCheck)
{
if(nameSingle[0] == repeatCheck[item])
{
$('#k_batch_tip5').removeClass('hidden');
repeatCheck.push(nameSingle[0]);
break repeat;
}
else
{
repeatCheck.push(nameSingle[0]);
continue;
}
}
}
}
}
}
})
$('#k_priority').blur(function(){
var priority = $.trim($('#k_priority').val());
if(priority != '')
{
if(!$.isNumber(priority))
{
$('#k_priority_tip').removeClass('hidden');
}
else
{
$('#k_priority_tip').addClass('hidden');
}
}
})
//提交时验证
$('#keyword_submit').click(function(){
$('#keyword_submit_real').click();
});
$('#keyword_submit_real').click(function(){
var name = $.trim($('#k_name').val());
var nameBatch = $.trim($('#k_batch').val());
var nameResult, nameBatchResult, priorityResult;
nameResult = nameBatchResult = priorityResult = true;
var nameTip , nameBatchTip ,priorityTip;
nameTip = nameBatchTip = priorityTip = '';
var repeatCheck = [];
var none_repeat = true;
$('#k_batch_tip5').addClass('hidden');
if(name!='')
{
$.sendAjax({
'url': '/admin/keyword/get/',
'data': {'name': name},
'type': 'POST',
'sync': false,
'callback': function(data){
if(data.status == 'failed'){
addKeywordFlag = false;
$('#k_name_tip2').removeClass('hidden');
}else{
addKeywordFlag = true;
$('#k_name_tip2').addClass('hidden');
}
}
});
}
if (nameBatch!=''){
var keywordList = nameBatch.split('\n');
var i ;
for(i=0; i<keywordList.length; i++){
keyword = keywordList[i];
if(keyword.indexOf(',') == -1){
addBatchKeywordFlag = false;
}
var keyword = keyword.split(',')[0];
if(keyword != null){
$.sendAjax({
'url': '/admin/keyword/get/',
'data': {'name': keyword},
'type': 'POST',
'sync': false,
'callback': function(data){
if(data.status == 'failed'){
addBatchKeywordFlag = false;
$('#k_batch_tip1').removeClass('hidden');
}
else
{
addBatchKeywordFlag = true;
$('#k_batch_tip1').addClass('hidden');
}
}
});
if(!addBatchKeywordFlag){
break;
}
}
}
if(i == keywordList.length){
addBatchKeywordFlag = true;
$('#k_batch_tip1').addClass('hidden');
}
}
if(name.length ==0 && nameBatch.length == 0)
{
nameTip = '#k_name_tip1';
nameResult = false;
}
else if(name.length >4)
{
nameTip = '#k_name_tip3';
nameResult = false;
}
var nameBatchList = nameBatch.split('\n');
for(var index in nameBatchList)
{
var nameSingle = nameBatchList[index].split(',');
if(nameSingle[0].length>4)
{
nameBatchResult = nameBatchResult && false;
nameBatchTip = '#k_batch_tip3';
}
if(nameSingle.length>1 && nameSingle[1]!='' && !$.isNumber(nameSingle[1]))
{
nameBatchResult = nameBatchResult && false;
nameBatchTip = '#k_batch_tip4';
}
if( nameSingle.length<2 && nameBatch!='')
{
nameBatchResult = nameBatchResult && false;
nameBatchTip = '#k_batch_tip2';
}
repeatValid:
{
var nameSingle = nameBatchList[index].split(',');
if(nameSingle[0] == name && nameBatch!='')
{
$('#k_batch_tip5').removeClass('hidden');
repeatCheck.push(nameSingle[0]);
none_repeat = false;
break repeatValid;
}
else
{
if(repeatCheck.length==0)
{repeatCheck.push(nameSingle[0]);}
else
{
for(var item in repeatCheck)
{
if(nameSingle[0] == repeatCheck[item])
{
$('#k_batch_tip5').removeClass('hidden');
none_repeat = false;
repeatCheck.push(nameSingle[0]);
break repeatValid;
}
else
{
repeatCheck.push(nameSingle[0]);
continue;
}
}
}
}
}
}
var priority = $.trim($('#k_priority').val());
if(priority.length > 0){
if(!$.isNumber(priority)){
priorityTip = '#k_priority_tip';
priorityResult = false;
}else{
priorityTip = '#k_priority_tip';
}
}
if(addKeywordFlag){
nameResult = nameResult && addKeywordFlag;
if(nameResult)
{nameTip = '#k_name_tip2';}
}
if(addBatchKeywordFlag){
nameBatchResult = nameBatchResult && addBatchKeywordFlag;
if(nameBatchResult)
{nameBatchTip = '#k_batch_tip1';}
}
if(!nameResult)
{$(nameTip).removeClass('hidden');}
if(!nameBatchResult)
{}
if(!priorityResult)
{$(priorityTip).removeClass('hidden');}
if($.trim($('#k_name').val()) != '' && $.trim($('#k_batch').val())=='')
{addBatchKeywordFlag=true}
if($.trim($('#k_batch').val()) != '' && $.trim($('#k_name').val())=='')
{addKeywordFlag=true}
var result = nameResult && nameBatchResult && priorityResult && addBatchKeywordFlag && addKeywordFlag && none_repeat;
return result;
});
$('.keyword_priority').updatePriority('/admin/keyword/update/');
// 修改专题里面app的排序
$(".update-subject-app-priority").updatePriority('/admin/subject/list/priority/update/' + $('#btn-subject-app-list').attr('value') +'/');
// 修改专题排序
$(".update-subject-priority").updatePriority('/admin/subject/priority/update/');
// 专题软件列表全选
$.selectAllToggle("app-check", "app-check-item");
// 专题软件列表下架
$("#btn-subject-app-list").click(function(){
var result = $.checkboxVal("app-check-item");
var subject_id = $(this).attr('value');
if(result.length > 0){
bootbox.confirm("选择下架后,你投放的资源将被移除,是否继续?", function(type) {
if(type){
params = {'url': '/admin/offshelf_app_from_subject/' + subject_id + '/',
'data': {'app_ids': result},
}
$.sendAjax(params)
}
});
}else{
bootbox.alert("请先选择,然后才能进行下架操作!", function() {
// ----------
});
}
});
});
function reset_form(targetForm)
{
targetForm.reset();
$(targetForm).find('.label-warning').addClass('hidden');
}
function reset_add_subject_form(){
$('#input-subject-id').val('-1');
$("#subject-name-check").html('<small>专题名称建议不超过14个字</small>');
$('#add-subject-name').val('');
$('#selected-subject-icon').val('');
$('#input-subject-banner').val('');
$('#input-subject-icon').val('');
$("#btn-subject-banner").parent().find("img").attr("src", '');
$("#btn-subject-icon").parent().find("img").attr("src", '');
$("#recommend_app_1").val('');
$("#recommend_app_2").val('');
$("#subject-description").val('');
$("#operate-subject-title").html('增加专题');
}
function reset_add_category_form(){
$('#input-category-id').val('-1');
$("#category-type").val('-1');
$("#category-name-check").html('<small>分类名称建议不超过7个字</small>');
$('#add-category-name').val('');
$('#selected-category-icon').val('');
$('#input-category-icon').val('');
$("#category-type").removeAttr("disabled");
$("#btn-category-icon").parent().find("img").attr("src", '');
$("#recommend_app_1").val('');
$("#recommend_app_2").val('');
$("#category-description").val('');
$("#operate-category-title").html('增加分类');
$("#category-type").removeAttr('disabled');
}
function is_iE()
{
return !!window.ActiveXObject;
}
<file_sep>package com.ccnu.bishe.examtation.dao;
import java.util.List;
public interface IBaseDao<T> {
public void save(T obj);
public void delete(Long id);
public void update(T obj);
public T findById(Long id);
public List<T> findByIds(List<Long> idList);
}
<file_sep>package com.ccnu.bishe.examtation.model;
public class Info {
private String id;
private Boolean isOpen;
private Byte type;
private String textbook;
private String object;
private String outline;
private String pdfPath;
private int school;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id == null ? null : id.trim();
}
public Boolean getIsOpen() {
return isOpen;
}
public void setIsOpen(Boolean isOpen) {
this.isOpen = isOpen;
}
public Byte getType() {
return type;
}
public void setType(Byte type) {
this.type = type;
}
public String getTextbook() {
return textbook;
}
public void setTextbook(String textbook) {
this.textbook = textbook == null ? null : textbook.trim();
}
public String getObject() {
return object;
}
public void setObject(String object) {
this.object = object == null ? null : object.trim();
}
public String getOutline() {
return outline;
}
public void setOutline(String outline) {
this.outline = outline == null ? null : outline.trim();
}
public Boolean getOpen() {
return isOpen;
}
public void setOpen(Boolean open) {
isOpen = open;
}
public String getPdfPath() {
return pdfPath;
}
public void setPdfPath(String pdfPath) {
this.pdfPath = pdfPath;
}
public int getSchool() {
return school;
}
public void setSchool(int school) {
this.school = school;
}
public static Info build(Boolean isOpen, int type, String textbook, String object,
String outline, String fileName, int school) {
Info info = new Info();
info.setIsOpen(isOpen);
info.setType((byte) type);
info.setTextbook(textbook);
info.setObject(object);
info.setOutline(outline);
info.setPdfPath(fileName);
info.setSchool(school);
return info;
}
}<file_sep>package com.ccnu.bishe.examtation.utils;
import java.io.File;
import java.io.IOException;
import org.springframework.web.multipart.MultipartFile;
/**
* Created by Joban on 2017/1/23.
*/
public class FileUtils {
public static String uploadFile(MultipartFile multipartFile, String basicPath)
throws IOException {
String time = "";
String originalFilename = multipartFile.getOriginalFilename();
if (multipartFile != null && multipartFile.getSize() > 0) {
time = new DateUtils().getTimeString();
String path = basicPath + File.separator + time + originalFilename;
File dic = new File(basicPath);
if (!dic.exists()) {
dic.mkdirs();
}
multipartFile.transferTo(new File(path));
return time + originalFilename;//返回文件名
}
return "";
}
}
<file_sep><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.ccnu.bishe</groupId>
<artifactId>examtation</artifactId>
<packaging>war</packaging>
<version>0.0.1-SNAPSHOT</version>
<name>examtation Maven Webapp</name>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<spring.version>4.1.1.RELEASE</spring.version>
<jackson2.version>2.4.3</jackson2.version>
<slf4j.version>1.7.7</slf4j.version>
<logback.version>1.0.7</logback.version>
<activemq.version>5.10.0</activemq.version>
</properties>
<dependencies>
<!-- Compile/runtime dependencies -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>2.5</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>jstl</groupId>
<artifactId>jstl</artifactId>
<version>1.2</version>
</dependency>
<!-- spring dependencies -->
<dependency>
<groupId>cglib</groupId>
<artifactId>cglib</artifactId>
<version>3.1</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jms</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
<exclusions>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-aspects</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjweaver</artifactId>
<version>1.6.12</version>
</dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjrt</artifactId>
<version>1.6.12</version>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
<version>2.2.1</version>
</dependency>
<dependency>
<groupId>org.mybatis</groupId>
<artifactId>mybatis-spring</artifactId>
<version>1.2.2</version>
</dependency>
<dependency>
<groupId>org.mybatis</groupId>
<artifactId>mybatis</artifactId>
<version>3.2.8</version>
</dependency>
<!-- jackson -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson2.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson2.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson2.version}</version>
</dependency>
<!-- common -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.2.1</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-io</artifactId>
<version>1.3.2</version>
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<version>1.9</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.9</version>
</dependency>
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
<version>1.3.1</version>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>1.8.3</version>
</dependency>
<dependency>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>com.maxmind.geoip2</groupId>
<artifactId>geoip2</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.3.5</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpmime</artifactId>
<version>4.3.5</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.3.2</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore-nio</artifactId>
<version>4.3.2</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpasyncclient</artifactId>
<version>4.0.2</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient-cache</artifactId>
<version>4.3.5</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpasyncclient-cache</artifactId>
<version>4.0.2</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.4</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>16.0.1</version>
</dependency>
<!-- velocity -->
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity</artifactId>
<version>1.7</version>
</dependency>
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity-tools</artifactId>
<version>2.0</version>
<exclusions>
<exclusion>
<artifactId>struts-core</artifactId>
<groupId>org.apache.struts</groupId>
</exclusion>
<exclusion>
<artifactId>commons-validator</artifactId>
<groupId>commons-validator</groupId>
</exclusion>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
<exclusion>
<artifactId>struts-taglib</artifactId>
<groupId>org.apache.struts</groupId>
</exclusion>
<exclusion>
<artifactId>struts-tiles</artifactId>
<groupId>org.apache.struts</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- database connect -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.0.9</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.33</version>
</dependency>
<!-- validator -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator</artifactId>
<version>5.1.2.Final</version>
</dependency>
<!-- activeMQ -->
<dependency>
<groupId>org.apache.activemq</groupId>
<artifactId>activemq-core</artifactId>
<version>5.7.0</version>
</dependency>
<dependency>
<groupId>org.apache.activemq</groupId>
<artifactId>activemq-pool</artifactId>
<version>${activemq.version}</version>
</dependency>
<!-- logging -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<version>1.7.10</version>
</dependency>
<!-- MongoDB jongo -->
<dependency>
<groupId>org.jongo</groupId>
<artifactId>jongo</artifactId>
<version>1.2</version>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>2.13.0</version>
</dependency>
<!-- test dependencies -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>opensymphony</groupId>
<artifactId>sitemesh</artifactId>
<version>2.4.2</version>
</dependency>
<!-- 发送邮件 -->
<!--<dependency>
<groupId>javax.mail</groupId>
<artifactId>mailapi</artifactId>
<version>1.4.3</version>
</dependency>-->
<dependency>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>javax.activation</groupId>
<artifactId>activation</artifactId>
<version>1.0.2</version>
</dependency>
</dependencies>
<build>
<finalName>examtation</finalName>
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
<includes>
<include>*.properties</include>
<include>logback.xml</include>
</includes>
</resource>
<resource>
<directory>src/main/resources</directory>
<filtering>false</filtering>
<includes>
<include>**</include>
</includes>
<excludes>
<exclude>*.properties</exclude>
<exclude>logback.xml</exclude>
</excludes>
</resource>
</resources>
<filters>
<filter>${resource.filter}</filter>
</filters>
<!--<finalName>mybatis_generator</finalName>
<plugins>
<plugin>
<groupId>org.mybatis.generator</groupId>
<artifactId>mybatis-generator-maven-plugin</artifactId>
<version>1.3.2</version>
</plugin>
</plugins>-->
<plugins>
<plugin>
<groupId>org.apache.tomcat.maven</groupId>
<artifactId>tomcat7-maven-plugin</artifactId>
<configuration>
<charset>UTF-8</charset>
<port>8080</port>
<path>/examtation</path>
<uriEncoding>UTF-8</uriEncoding>
<systemProperties>
<druid.log.stmt.executableSql>true</druid.log.stmt.executableSql>
</systemProperties>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>dev</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<resource.filter>filters/dev.properties</resource.filter>
</properties>
</profile>
<profile>
<id>test</id>
<properties>
<resource.filter>filters/test.properties</resource.filter>
</properties>
</profile>
<profile>
<id>production</id>
<properties>
<resource.filter>filters/production.properties</resource.filter>
</properties>
</profile>
</profiles>
</project>
<file_sep>package com.ccnu.bishe.examtation.dao.impl;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.PostConstruct;
import com.ccnu.bishe.examtation.dao.IBaseDao;
import com.ccnu.bishe.examtation.db.BaseMapper;
import com.ccnu.bishe.examtation.utils.ObjectUtils;
public abstract class BaseDaoImpl<T> implements IBaseDao<T> {
private BaseMapper<T> mapper;
@PostConstruct
public void init(){
this.mapper = getMapper();
}
public void save(T obj){
mapper.insertSelective(obj);
}
public List<T> findByIds(List<Long> idList) {
List<T> result = new ArrayList<T>();
if (ObjectUtils.isEmpty(idList)) {
return result;
}
List<T> listDataInDB = mapper.selectByPrimaryKeys(idList);
return listDataInDB;
}
public void delete(Long id){
mapper.deleteByPrimaryKey(id);
}
public void update(T obj) {
mapper.updateByPrimaryKeySelective(obj);
}
public T findById(Long id){
return mapper.selectByPrimaryKey(id);
}
public abstract BaseMapper<T> getMapper();
}
<file_sep>package com.ccnu.bishe.examtation.mapper;
import com.ccnu.bishe.examtation.annotation.Mapper;
import com.ccnu.bishe.examtation.db.BaseMapper;
import com.ccnu.bishe.examtation.model.Resource;
import java.util.List;
import java.util.Map;
import org.apache.ibatis.annotations.Param;
@Mapper
public interface ResourceMapper extends BaseMapper<Resource> {
Resource selectByPrimaryKey(String id);
List<Resource> getResourceList(@Param("infoId") String infoId);
int getRoleByUserId(@Param("userId") String userId);
List<String> getInfoIdsByUserId(@Param("userId") String userId);
List<Resource> getResourceByUserId(Map<String, Object> params);
long CountResourceByUserId(Map<String, Object> params);
}<file_sep>package com.ccnu.bishe.examtation.dto;
import com.ccnu.bishe.examtation.model.Detail;
import java.util.Date;
/**
* Created by Joban on 2017/2/21.
*/
public class DetailDto {
private String id;
private String docPath;
private String pdfPath;
private Boolean isHidden;
private Date isVisible;
private String infoId;
private Boolean isFinish;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getDocPath() {
return docPath;
}
public void setDocPath(String docPath) {
this.docPath = docPath;
}
public String getPdfPath() {
return pdfPath;
}
public void setPdfPath(String pdfPath) {
this.pdfPath = pdfPath;
}
public Boolean getHidden() {
return isHidden;
}
public void setHidden(Boolean hidden) {
isHidden = hidden;
}
public Date getIsVisible() {
return isVisible;
}
public void setIsVisible(Date isVisible) {
this.isVisible = isVisible;
}
public String getInfoId() {
return infoId;
}
public void setInfoId(String infoId) {
this.infoId = infoId;
}
public Boolean getFinish() {
return isFinish;
}
public void setFinish(Boolean finish) {
isFinish = finish;
}
public static DetailDto build(Detail detail, Boolean isFinish) {
DetailDto detailDto = new DetailDto();
detailDto.setId(detail.getId());
detailDto.setPdfPath(detail.getPdfPath());
detailDto.setDocPath(detail.getDocPath());
detailDto.setInfoId(detail.getInfoId());
detailDto.setFinish(isFinish);
return detailDto;
}
}
<file_sep>package com.ccnu.bishe.examtation.test;
import org.springframework.stereotype.Controller;
/**
* Created by Joban on 2017/1/30.
*/
@Controller
public class EmailTest {
//controller包下可用
/*@Autowired
private JavaMailSenderImpl mailSender;
@Autowired
private SimpleMailMessage simpleMailMessage;
@RequestMapping(value = "/test/mail", method = RequestMethod.GET)
public void testEmail() {
MyEmail mail = new MyEmail(mailSender, simpleMailMessage);
mail.sendMail("Spring SMTP Mail Subject", "Spring SMTP Mail Text", "<EMAIL>");
}*/
}
<file_sep>package com.ccnu.bishe.examtation.model;
public class Contact {
private String id;
private String userId;
private Byte role;
private String infoId;
private String feedbackId;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id == null ? null : id.trim();
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId == null ? null : userId.trim();
}
public Byte getRole() {
return role;
}
public void setRole(Byte role) {
this.role = role;
}
public String getinfoId() {
return infoId;
}
public void setinfoId(String infoId) {
this.infoId = infoId == null ? null : infoId.trim();
}
public String getFeedbackId() {
return feedbackId;
}
public void setFeedbackId(String feedbackId) {
this.feedbackId = feedbackId;
}
public static Contact build(String id, String userId, Byte role, String infoId,
String feedbackId) {
Contact contact = new Contact();
contact.setId(id);
contact.setUserId(userId);
contact.setRole(role);
contact.setinfoId(infoId);
contact.setFeedbackId(feedbackId);
return contact;
}
}<file_sep>package com.ccnu.bishe.examtation.model;
import java.util.Date;
public class Detail {
private String id;
private String docPath;
private String pdfPath;
private Boolean isHidden;
private Date isVisible;
private String infoId;
public Detail() {
}
public Detail(String id) {
this.id = id;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id == null ? null : id.trim();
}
public String getDocPath() {
return docPath;
}
public void setDocPath(String docPath) {
this.docPath = docPath == null ? null : docPath.trim();
}
public String getPdfPath() {
return pdfPath;
}
public void setPdfPath(String pdfPath) {
this.pdfPath = pdfPath == null ? null : pdfPath.trim();
}
public Boolean getIsHidden() {
return isHidden;
}
public void setIsHidden(Boolean isHidden) {
this.isHidden = isHidden;
}
public Date getIsVisible() {
return isVisible;
}
public void setIsVisible(Date isVisible) {
this.isVisible = isVisible;
}
public String getInfoId() {
return infoId;
}
public void setInfoId(String infoId) {
this.infoId = infoId == null ? null : infoId.trim();
}
}<file_sep>package com.ccnu.bishe.examtation.dao;
import com.ccnu.bishe.examtation.dao.impl.BaseDaoImpl;
import com.ccnu.bishe.examtation.db.BaseMapper;
import com.ccnu.bishe.examtation.mapper.InfoMapper;
import com.ccnu.bishe.examtation.model.Info;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
/**
* Created by Joban on 2017/1/22.
*/
@Repository
public class InfoDao extends BaseDaoImpl<Info> implements IBaseDao<Info> {
@Autowired
private InfoMapper infoMapper;
@Override
public BaseMapper<Info> getMapper() {
return infoMapper;
}
public List<Info> queryByPage(Map<String, Object> params) {
return infoMapper.queryByPage(params);
}
public Long getCount(Map<String, Object> params) {
return infoMapper.getCount(params);
}
public void insertInfo(Info info) {
infoMapper.insert(info);
}
public List<String> getInfoIdsBySchool(int school) {
return infoMapper.getInfoIdsBySchool(school);
}
public List<String> getAllInfoIds() {
return infoMapper.getAllInfoIds();
}
public Info selectByPrimaryKey(String id) {
return infoMapper.selectByPrimaryKey(id);
}
}
<file_sep>package com.ccnu.bishe.examtation.enu;
/**
* Created by Joban on 2017/1/20.
*/
public enum RoleCode {
Secretary(0, "教学秘书"),
Leader(1, "院系领导"),
Owner(2, "出卷人"),
Tourist(3, "阅卷人");
private Integer code;
private String description;
private RoleCode(Integer code, String description) {
this.code = code;
this.description = description;
}
public Integer getCode() {
return code;
}
public void setCode(Integer code) {
this.code = code;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
// 普通方法
public static String getDescByCode(int code) {
for (RoleCode c : RoleCode.values()) {
if (c.getCode() == code) {
return c.description;
}
}
return null;
}
@Override
public String toString() {
return super.toString();
}
}
<file_sep>package com.ccnu.bishe.examtation.utils;
import java.util.UUID;
/**
* Create By Joban on2017年1月2日上午9:13:41
*/
public class UUIDUtils {
public static String getUUID() {
UUID uuid = UUID.randomUUID();
String str = uuid.toString();
String uuidStr = str.replaceAll("-", "");
return uuidStr;
}
}
<file_sep>// 数据统计
$(function(){
// bootbox设置区域
bootbox.setDefaults({
locale: "zh_CN"
});
//++ 商店流量
// 实现全选
// $.selectAllToggle("banner-check", "banner-check-item");
//++ 搜索统计
// 全选
$.selectAllToggle("search-word-th", "search-word-item");
$.selectAllToggle("search-record-th", "search-record-item");
// 搜索词与搜索记录之间切换
$.tabToggle("search-word-item", "search-wct-item");
// 页面刷新保持到原有位置
$.setPos("search-word-item", "search-wct-item");
// 搜索词删除
$("#btn-sws-del").click(function(){
var values = $.checkboxVal("search-word-item");
if(values.length > 0){
bootbox.confirm("请确定是否删除?", function(result) {
// $.get();
//alert(result);
});
}else{
bootbox.alert("请先选择,然后才能进行删除操作!", function() {
// ----------
});
}
});
});<file_sep>package com.ccnu.bishe.examtation.dto;
import java.util.Date;
/**
* Created by Joban on 2017/2/23.
*/
public class FeedbackDto {
private String title;
private String content;
private String name;
private int role;
private Date feedbackTime;
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getRole() {
return role;
}
public void setRole(int role) {
this.role = role;
}
public Date getFeedbackTime() {
return feedbackTime;
}
public void setFeedbackTime(Date feedbackTime) {
this.feedbackTime = feedbackTime;
}
private static FeedbackDto build(String title, String content, String name, int role,
Date feedbackTime) {
FeedbackDto feedbackDto = new FeedbackDto();
feedbackDto.setTitle(title);
feedbackDto.setContent(content);
feedbackDto.setName(name);
feedbackDto.setRole(role);
feedbackDto.setFeedbackTime(feedbackTime);
return feedbackDto;
}
}
<file_sep>package com.ccnu.bishe.examtation.dao;
import com.ccnu.bishe.examtation.mapper.ProcessMapper;
import com.ccnu.bishe.examtation.model.Process;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
/**
* Created by Joban on 2017/2/1.
*/
@Repository
public class ProcessDao {
@Autowired
private ProcessMapper processMapper;
public void insert(Process process) {
processMapper.insert(process);
}
public Process getListByInfoId(String infoId) {
return processMapper.getListByInfoId(infoId);
}
public int getStageByInfoId(String infoId) {
return processMapper.getStageByInfoId(infoId);
}
public void updateStageByInfoId(String infoId) {
processMapper.updateStageByInfoId(infoId);
}
public Boolean getIsFinishByInfoId(String infoId) {
return processMapper.getIsFinishByInfoId(infoId);
}
public void updateStageAndFinish(String infoId) {
processMapper.updateStageAndFinish(infoId);
}
}
|
007e6c8cbd31ad56f41c2d609e4f0863b852d1de
|
[
"JavaScript",
"Markdown",
"Maven POM",
"INI",
"Java"
] | 31
|
JavaScript
|
JobanCai/examtation
|
0b6b3aaec798279b8b18237a6379195343509747
|
21be8a19be8f46098576c88d8627a007682c419b
|
refs/heads/main
|
<file_sep>package com.hgn.blog.repositories;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import com.hgn.blog.entities.Post;
@Repository
public interface PostRespository extends JpaRepository<Post, Long>{
}
<file_sep>spring.datasource.url=jdbc:h2:mem:testdb
spring.datasource.username=sa
spring.datasource.password=
spring.h2.console.enabled=true
spring.h2.console.path=/h2-console
server.error.include-stacktrace=never
spring.jpa.hibernate.ddl-auto=create<file_sep>package com.hgn.blog.entities;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import java.util.Date;
@Entity
public class Post {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String titulo;
private String autor;
private Date data;
@Column(length = 100000)
private String conteudo;
private Integer upvotes;
public Post() {
}
public Post(Long id, String titulo, String autor, Date data, String conteudo, Integer upvotes) {
super();
this.id = id;
this.titulo = titulo;
this.autor = autor;
this.data = data;
this.conteudo = conteudo;
this.upvotes = upvotes;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getTitulo() {
return titulo;
}
public void setTitulo(String titulo) {
this.titulo = titulo;
}
public String getAutor() {
return autor;
}
public void setAutor(String autor) {
this.autor = autor;
}
public Date getData() {
return data;
}
public void setData(Date data) {
this.data = data;
}
public String getConteudo() {
return conteudo;
}
public void setConteudo(String conteudo) {
this.conteudo = conteudo;
}
public Integer getUpvotes() {
return upvotes;
}
public void setUpvotes(Integer upvotes) {
this.upvotes = upvotes;
}
}
<file_sep>package com.hgn.blog.exceptions;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.ResponseStatus;
import java.util.NoSuchElementException;
@ResponseStatus(HttpStatus.NOT_FOUND)
public class PostNaoEncontradoException extends NoSuchElementException {
public PostNaoEncontradoException(String mensagem) {
super(mensagem);
}
}
|
aacd552a1d61bdd208310a26ce1ac337274396d9
|
[
"Java",
"INI"
] | 4
|
Java
|
humbertoguerrer/blog-project
|
e9b93a940548f97794f34ec054abaf47ac442704
|
0f0879105edf2e7f00d7e24bec4864ebc5a7c4a4
|
refs/heads/master
|
<repo_name>rafaagahbiche/grafaa<file_sep>/PersonalSite.WebUI/Controllers/ArticleController.cs
namespace PersonalSite.WebUI.Controllers
{
using PersonalSite.Service.Abstract;
using PersonalSite.Service.ViewModel;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web.Mvc;
using Kaliko;
using System.Net;
using System.IO;
public class ArticleController : Controller
{
private readonly IArticleService articleService;
public ArticleController(IArticleService articleService)
{
this.articleService = articleService;
}
[HttpGet]
public PartialViewResult DeletePage(int pageId, int articleId)
{
if (pageId > 0)
{
articleService.DeleteArticlePage(pageId);
}
var pageCount = this.articleService.PageCount(articleId);
if (pageCount > 0)
{
var firstPageViewModel = this.articleService.GetFirstPage(articleId);
return PartialView("CreateArticlePage", firstPageViewModel);
}
return PartialView("CreateArticlePage", new PageViewModel() { PageId = -1, ParentArticleId = articleId });
}
[HttpGet]
public PartialViewResult ShowPageContent(int pageId, int articleId)
{
var pageViewModel = this.articleService.GetArticlePageById(pageId);
if (pageViewModel != null)
{
return PartialView("CreateArticlePage", pageViewModel);
}
return PartialView("CreateArticlePage", new PageViewModel() { PageId = -1, ParentArticleId = articleId });
}
[HttpGet]
public PartialViewResult AddNewTab(int articleId)
{
return PartialView("PageTab", new PageViewModel() { PageId = -1, ParentArticleId = articleId });
}
[HttpPost]
public PartialViewResult SavePage(PageViewModel pageViewModel)
{
if (pageViewModel == null || pageViewModel.PageId == 0)
{
return PartialView("EditPageInfos", new PageViewModel() { PageId = -1, ParentArticleId = -1 });
}
// Old article + New Page
if (pageViewModel.PageId == -1)
{
pageViewModel.PageId = this.articleService.CreateArticlePage(pageViewModel);
if (pageViewModel.PageId == -1)
{
return PartialView("EditPageInfos", new PageViewModel() { PageId = -1, ParentArticleId = -1 });
}
else
{
return PartialView("EditPageInfos", pageViewModel);
}
}
else
{
var updateSucceeded = this.articleService.UpdatePageContent(pageViewModel);
return PartialView("EditPageInfos", pageViewModel);
}
}
#region Details
public ActionResult Details()
{
return View("Error");
}
[Route("pages/{title}-{id}")]
public ActionResult Details(int id)
{
try
{
var article = this.articleService.Get(id);
if (article != null)
{
return View(article);
}
return View();
}
catch (Exception ex)
{
Kaliko.Logger.Write(ex, Logger.Severity.Critical);
}
return View();
}
#endregion
#region Create
[Authorize]
public ActionResult Create()
{
return View();
}
[HttpPost, ValidateInput(false)]
public ActionResult Create(Service.ViewModel.ArticleViewModel articleViewModel
, ICollection<int> articlePageIds)
{
try
{
if (articleViewModel != null)
{
articleViewModel.Id = this.articleService.Create(articleViewModel);
if (articlePageIds != null)
{
foreach (var pageId in articlePageIds)
{
this.articleService.AddArticleToPage(pageId, articleViewModel.Id);
}
}
return RedirectToAction("Edit", new { id = articleViewModel.Id });
}
return View("Create");
}
catch
{
return View();
}
}
#endregion
#region Edit article
[Authorize]
public ActionResult Edit()
{
return View("Error");
}
[Authorize]
[Route("pages/edit/{id}")]
public ActionResult Edit(int id)
{
// TODO if article not found
if (!id.Equals(0) && !id.Equals(-1))
{
var article = this.articleService.Get(id);
if (article != null)
{
return View(article);
}
else
{
Logger.Write("Article is null");
return View("Error");
}
}
return View();
}
// POST: Article/Edit/5
[HttpPost, ValidateInput(false)]
[Route("pages/edit/{id}")]
public ActionResult Edit(ArticleViewModel article, FormCollection collection)
{
try
{
if (collection != null && collection["articlePageIds"] != null)
{
var pageIds = collection["articlePageIds"].Split(',');
var s = Array.ConvertAll(pageIds, Int32.Parse);
var newPageIds = article.PagesIds.Except(s);
if (newPageIds.Count() > 0)
{
newPageIds.ToList().ForEach(id =>
{
articleService.AddArticleToPage(article.Id, id);
});
}
}
article = this.articleService.Update(article);
return View(article);
}
catch(Exception ex)
{
Logger.Write(ex, Logger.Severity.Major);
return View();
}
}
#endregion
#region Delete article
public ActionResult Delete(int id)
{
return View();
}
[HttpPost]
public ActionResult Delete(int id, FormCollection collection)
{
try
{
// TODO: Add delete logic here
return RedirectToAction("Index");
}
catch
{
return View();
}
}
#endregion
}
}
<file_sep>/PersonalSite.WebUI/Controllers/ArticlePageController.cs
namespace PersonalSite.WebUI.Controllers
{
using PersonalSite.Service.Abstract;
using PersonalSite.Service.ViewModel;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
public class ArticlePageController : Controller
{
private readonly IArticlePageService service;
public ArticlePageController(IArticlePageService service)
{
this.service = service;
}
public ActionResult DisplayEmpty()
{
return View("Create");
}
public ActionResult Delete(FormCollection postedFormData)
{
bool success = false;
var pageId = postedFormData["id"];
if (pageId != null)
{
success = this.service.Delete(Convert.ToInt32(pageId));
}
return Json(new
{
Status = success
}); ;
}
public ActionResult Create(FormCollection postedFormData)
{
if (postedFormData == null)
{
return PartialView("CreateArticlePage", new PageViewModel { PageId = -1 });
}
string content = postedFormData["content"];
string pageId = postedFormData["id"];
if (string.IsNullOrEmpty(pageId))
{
return PartialView("CreateArticlePage", new PageViewModel { PageId = -1 });
}
// Old article + New Page
if (Convert.ToInt32(pageId) == -1)
{
string articleId = postedFormData["articleId"];
if (!string.IsNullOrEmpty(articleId))
{
var articleViewModel = new ArticleViewModel();
// this.service.GetArticleById(Convert.ToInt32(articleId));
if (articleViewModel != null)
{
var articlePageViewModel = new PageViewModel
{
PageContent = content,
Article = articleViewModel,
ParentArticleId = articleViewModel.Id
};
int id = this.service.Create(articlePageViewModel);
string status = id.Equals(-1) ? "succeded" : "failed";
return Json(new
{
Status = status,
Id = id,
Content = HttpUtility.HtmlDecode(content)
});
}
}
}
// Old article + Update Page
var oldArticlePageViewModel = this.service.GetArticlePageById(Convert.ToInt32(pageId));
oldArticlePageViewModel.PageContent = content;
this.service.UpdatePageContent(oldArticlePageViewModel);
return Json(new
{
Status = "Page updated",
Id = pageId,
Content = HttpUtility.HtmlDecode(content)
});
}
}
}<file_sep>/PersonalSite.WebUI/Scripts/menu.js
var isMobile = false;
$(document).on("mobileinit", function () {
isMobile = true;
$.mobile.ignoreContentEnabled = true;
$.mobile.ajaxEnabled = false;
$.mobile.linkBindingEnabled = false;
});
$(document).ready(function () {
if (!isMobile) {
var topValue = '-' + $('div.menu-list').outerHeight() + 'px';
$('div.menu-list').css({ top: topValue });
}
var Menu = {
el: {
ham: $('.menu-header'),
menuTop: $('.menu-top'),
menuMiddle: $('.menu-middle'),
menuBottom: $('.menu-bottom')
},
init: function () {
Menu.bindUIactions();
},
bindUIactions: function () {
Menu.el.ham
.on(
'click',
function (event) {
Menu.activateMenu(event);
event.preventDefault();
if (isMobile) {
if ($('div.menu').find('div.menu-list').length > 0) {
$('div.menu-list').removeClass('menu-list').addClass('menu-list-open');
}
else {
if ($('div.menu').find('div.menu-list-open').length > 0 ) {
$('div.menu-list-open').removeClass('menu-list-open').addClass('menu-list');
}
}
}
else {
var topValue = '-' + $('div.menu-list').outerHeight() + "px";
if ($('div.menu-list').position().top == 0) {
$('div.menu-list').animate({ top: topValue }, 'slow');
}
else {
$('div.menu-list').animate({ top: '0' }, 'slow');
}
}
}
);
},
activateMenu: function () {
Menu.el.menuTop.toggleClass('menu-top-click');
Menu.el.menuMiddle.toggleClass('menu-middle-click');
Menu.el.menuBottom.toggleClass('menu-bottom-click');
}
};
Menu.init();
});
<file_sep>/PersonalSite.WebUI/Controllers/ControllerHelper.cs
namespace PersonalSite.WebUI.Controllers
{
using System.IO;
using System.Web.Mvc;
public static class ControllerHelper
{
/// <summary>
/// Controller helper to write partial view in output
/// </summary>
/// <param name="viewName">Partial view name</param>
/// <param name="model">ViewModel used</param>
/// <returns></returns>
public static string RenderViewToString(this Controller self, string viewName, object model)
{
self.ViewData.Model = model;
using (var sw = new StringWriter())
{
var viewResult = ViewEngines.Engines.FindPartialView(self.ControllerContext, viewName);
var viewContext = new ViewContext(self.ControllerContext, viewResult.View, self.ViewData, self.TempData, sw);
viewResult.View.Render(viewContext, sw);
viewResult.ViewEngine.ReleaseView(self.ControllerContext, viewResult.View);
return sw.ToString();
}
}
}
}<file_sep>/PersonalSite.Infra/App_Start/NinjectWebCommon.cs
[assembly: WebActivatorEx.PreApplicationStartMethod(typeof(PersonalSite.Infra.App_Start.NinjectWebCommon), "Start")]
[assembly: WebActivatorEx.ApplicationShutdownMethodAttribute(typeof(PersonalSite.Infra.App_Start.NinjectWebCommon), "Stop")]
namespace PersonalSite.Infra.App_Start
{
using System;
using System.Web;
using Microsoft.Web.Infrastructure.DynamicModuleHelper;
using Ninject;
using Ninject.Web.Common;
using PersonalSite.Domain.Abstract;
using PersonalSite.Domain.Concrete;
using PersonalSite.Service.Concrete;
using PersonalSite.DataAccess;
using PersonalSite.Service.Abstract;
public static class NinjectWebCommon
{
private static readonly Bootstrapper bootstrapper = new Bootstrapper();
/// <summary>
/// Starts the application
/// </summary>
public static void Start()
{
DynamicModuleUtility.RegisterModule(typeof(OnePerRequestHttpModule));
DynamicModuleUtility.RegisterModule(typeof(NinjectHttpModule));
bootstrapper.Initialize(CreateKernel);
}
/// <summary>
/// Stops the application.
/// </summary>
public static void Stop()
{
bootstrapper.ShutDown();
}
/// <summary>
/// Creates the kernel that will manage your application.
/// </summary>
/// <returns>The created kernel.</returns>
private static IKernel CreateKernel()
{
var kernel = new StandardKernel();
try
{
kernel.Bind<Func<IKernel>>().ToMethod(ctx => () => new Bootstrapper().Kernel);
kernel.Bind<IHttpModule>().To<HttpApplicationInitializationHttpModule>();
RegisterServices(kernel);
return kernel;
}
catch
{
kernel.Dispose();
throw;
}
}
/// <summary>
/// Load your modules or register your services here!
/// </summary>
/// <param name="kernel">The kernel.</param>
private static void RegisterServices(IKernel kernel)
{
kernel.Bind<IDbContextFactory>().To<DbContextFactory>().InRequestScope();
kernel.Bind<IArticleService>().To<ArticleService>();
kernel.Bind<IRepo<Article>>().To<Repo<Article>>();
kernel.Bind<IRepo<ArticlePage>>().To<Repo<ArticlePage>>();
kernel.Bind<IAuthProvider>().To<FormsAuthProvider>();
}
}
}
<file_sep>/PersonalSite.Test/RepositoryTest.cs
namespace PersonalSite.Test
{
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Moq;
using PersonalSite.DataAccess;
using PersonalSite.Domain.Abstract;
using PersonalSite.Domain.Concrete;
using System.Linq;
using System.Data.Entity;
using System.Collections.Generic;
[TestClass]
public class RepositoryTest
{
private Repo<Article> articleRepository;
private Mock<DbSet<Article>> mockSet;
private Mock<GrafaaEntities> mockEntities;
private Mock<IDbContextFactory> dbContextMock;
[TestInitialize()]
public void MyTestInitialize()
{
mockSet = new Mock<DbSet<Article>>();
mockEntities = new Mock<GrafaaEntities>();
mockEntities.Setup(m => m.Articles).Returns(mockSet.Object);
dbContextMock = new Mock<IDbContextFactory>();
dbContextMock.Setup<GrafaaEntities>(x => x.GetContext()).Returns(mockEntities.Object);
this.articleRepository = new Repo<Article>(dbContextMock.Object);
}
[TestMethod]
public void InsertTest()
{
mockSet.Setup<Article>(m => m.Create()).Returns(new Article());
mockEntities.Setup(m => m.Set<Article>()).Returns(mockSet.Object);
var insertedArticle = this.articleRepository.Insert(new Article() { Id = 1, Title = "a1" });
this.articleRepository.Save();
mockSet.Verify(m => m.Add(It.IsAny<Article>()), Times.Once());
mockEntities.Verify(m => m.SaveChanges(), Times.Once());
}
[TestMethod]
public void QueryTest()
{
var data = new List<Article>
{
new Article { Id = 1, Title = "BBB" },
new Article { Id = 2, Title = "ZZZ" },
new Article { Id = 3, Title = "AAA" },
}.AsQueryable();
mockSet.As<IQueryable<Article>>().Setup(m => m.Provider).Returns(data.Provider);
mockSet.As<IQueryable<Article>>().Setup(m => m.Expression).Returns(data.Expression);
mockSet.As<IQueryable<Article>>().Setup(m => m.ElementType).Returns(data.ElementType);
mockEntities.Setup(m => m.Set<Article>())
.Returns(mockSet.Object);
mockSet.Setup(m => m.Find(It.IsAny<object[]>()))
.Returns<object[]>(i => data.SingleOrDefault(x => x.Id.Equals(i)));
//(int i) => data.SingleOrDefault(x => x.Id.Equals(i))
//{ return data.SingleOrDefault(x => x.Id.Equals(i)); });
var articles = articleRepository.GetAll();
Assert.IsNotNull(articles);
Assert.AreEqual(3, articles.Count());
Assert.AreEqual("AAA", articleRepository.Get(3).Title);
Assert.AreEqual("BBB", articleRepository.Get(1).Title);
Assert.AreEqual("ZZZ", articleRepository.Get(2).Title);
}
[TestMethod]
public void DeleteTest()
{
//var dbContextMock = new Mock<IDbContextFactory>();
//dbContextMock.Setup(x => x.GetContext()).Returns<GrafaaEntities>(t => t);
//var articleRepository = new Repo<Article>(dbContextMock.Object);
var articleToDelete = articleRepository.Where(x => x.Title.Equals("articleTest")).FirstOrDefault();
articleRepository.Delete(articleToDelete);
articleRepository.Save();
var deletedArticle = articleRepository.Where(x => x.Title.Equals("articleTest")).FirstOrDefault();
Assert.IsNull(deletedArticle);
}
}
}
<file_sep>/PersonalSite.Infra/NinjectControllerFactory.cs
//using System;
//using System.Web.Mvc;
//using System.Web.Routing;
//using Ninject.Web.Common;
//using PersonalSite.Service.Abstract;
//using PersonalSite.Service.Concrete;
//using PersonalSite.Domain.Abstract;
//using PersonalSite.DataAccess;
//using PersonalSite.Domain.Concrete;
//using Microsoft.Web.Infrastructure.DynamicModuleHelper;
//using Ninject;
//namespace PersonalSite.Infra
//{
// public class NinjectControllerFactory : DefaultControllerFactory
// {
// private IKernel ninjectKernel;
// public NinjectControllerFactory()
// {
// ninjectKernel = new StandardKernel();
// AddBindings();
// }
// protected override IController GetControllerInstance(RequestContext
// requestContext, Type controllerType)
// {
// return controllerType == null
// ? null
// : (IController)ninjectKernel.Get(controllerType);
// }
// private void AddBindings()
// {
// //ninjectKernel.Bind<IDbContextFactory>().To<DbContextFactory>().InRequestScope();
// //ninjectKernel.Bind<IArticleService>().To<ArticleService>();
// //ninjectKernel.Bind<IRepo<Article>>().To<Repo<Article>>();
// //ninjectKernel.Bind<IRepo<ArticlePage>>().To<Repo<ArticlePage>>();
// //ninjectKernel.Bind<IAuthProvider>().To<FormsAuthProvider>();
// // default binding for everything except unit of work
// //ninjectKernel.Bind(x => x.FromAssembliesMatching("*")
// // .SelectAllClasses().Excluding<DbContextFactory>().BindDefaultInterface());
// }
// }
//}
<file_sep>/PersonalSite.WebUI/Controllers/AccountController.cs
namespace PersonalSite.WebUI.Controllers
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using PersonalSite.Service.Abstract;
using PersonalSite.Service.ViewModel;
public class AccountController : Controller
{
private readonly IAuthProvider authProvider;
public AccountController(IAuthProvider authProvider)
{
this.authProvider = authProvider;
}
public ActionResult Login()
{
return View();
}
[HttpPost]
public ActionResult Login(LoginViewModel loginViewModel, string returnUrl)
{
if (ModelState.IsValid)
{
if (authProvider.Authenticate(loginViewModel.UserName, loginViewModel.Password))
{
return Redirect(returnUrl ?? Url.Action("Index", ""));
}
else
{
ModelState.AddModelError(string.Empty, "Incorrect username or password");
return View();
}
}
else
{
return View();
}
}
}
}<file_sep>/PersonalSite.Domain/Concrete/Repo.cs
namespace PersonalSite.Domain.Concrete
{
using System;
using System.Linq;
using System.Linq.Expressions;
using Omu.ValueInjecter;
using PersonalSite.Domain.Abstract;
using PersonalSite.DataAccess;
public class Repo<T> : IRepo<T> where T : Entity, new()
{
protected readonly GrafaaEntities dbContext;
public Repo(IDbContextFactory dbContextFactory)
{
dbContext = dbContextFactory.GetContext();
}
public void Save()
{
dbContext.SaveChanges();
}
public T Insert(T o)
{
var t = dbContext.Set<T>().Create();
t.InjectFrom(o);
dbContext.Set<T>().Add(t);
return t;
}
public virtual void Delete(T o)
{
dbContext.Set<T>().Remove(o);
}
public T Get(int id)
{
T entity = null;
entity = dbContext.Set<T>().Find(id);
return entity;
}
public virtual IQueryable<T> Where(Expression<Func<T, bool>> predicate)
{
//if (typeof(IDel).IsAssignableFrom(typeof(T)))
// return IoC.Resolve<IDelRepo<T>>().Where(predicate, showDeleted);
return dbContext.Set<T>().Where(predicate);
}
public virtual IQueryable<T> GetAll()
{
//if (typeof(IDel).IsAssignableFrom(typeof(T)))
// return IoC.Resolve<IDelRepo<T>>().GetAll();
return dbContext.Set<T>();
}
}
}
<file_sep>/PersonalSite.Test/ServiceTest.cs
namespace PersonalSite.Test
{
using System;
using System.Text;
using System.Collections.Generic;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Moq;
using PersonalSite.DataAccess;
using PersonalSite.Domain.Abstract;
using PersonalSite.Domain.Concrete;
using System.Linq;
using System.Data.Entity;
using PersonalSite.Service.Concrete;
/// <summary>
/// Summary description for ServiceTest
/// </summary>
[TestClass]
public class ServiceTest
{
#region Additional test attributes
//
// You can use the following additional attributes as you write your tests:
//
// Use ClassInitialize to run code before running the first test in the class
// [ClassInitialize()]
// public static void MyClassInitialize(TestContext testContext) { }
//
// Use ClassCleanup to run code after all tests in a class have run
// [ClassCleanup()]
// public static void MyClassCleanup() { }
//
// Use TestInitialize to run code before running each test
// [TestInitialize()]
// public void MyTestInitialize() { }
//
// Use TestCleanup to run code after each test has run
// [TestCleanup()]
// public void MyTestCleanup() { }
//
#endregion
private IEnumerable<Article> articles;
private Mock<IRepo<Article>> mockArticleRepository;
private Mock<IRepo<ArticlePage>> mockArticlePageRepository;
private ArticleService mockService;
[TestInitialize()]
public void TestInitialize()
{
articles = new List<Article>()
{
new Article(){
Id = 1,
Title = "a1"
},
new Article(){
Id = 2,
Title = "a2"
},
new Article(){
Id = 3,
Title = "a3"
},
};
var dbContextMock = new Mock<IDbContextFactory>();
dbContextMock.Setup<GrafaaEntities>(x => x.GetContext());
mockArticleRepository = new Mock<IRepo<Article>>();
mockArticlePageRepository = new Mock<IRepo<ArticlePage>>();
mockService = new ArticleService(mockArticleRepository.Object, mockArticlePageRepository.Object);
}
[TestMethod]
public void TestGetMethod()
{
mockArticleRepository.Setup(m => m.Get(It.IsAny<int>()))
.Returns((int i) => articles.SingleOrDefault(a => a.Id.Equals(i)));
var article = mockService.Get(1);
Assert.IsNotNull(article);
}
[TestMethod]
public void TestGetAllMethod()
{
mockArticleRepository.Setup(m => m.GetAll())
.Returns(articles.AsQueryable);
var articlesFromRepository = mockService.GetAll();
Assert.IsNotNull(articlesFromRepository);
Assert.IsTrue(articlesFromRepository.Count() == 3);
}
}
}
<file_sep>/PersonalSite.Service/Abstract/IArticleService.cs
using PersonalSite.Service.ViewModel;
using System.Collections.Generic;
namespace PersonalSite.Service.Abstract
{
public interface IArticleService
{
ArticleViewModel Get(int id);
PageViewModel GetArticlePageById(int id);
IEnumerable<ArticleViewModel> GetAll();
PageViewModel GetFirstPage(int id);
int PageCount(int id);
int Create(ArticleViewModel articleViewModel);
int CreateArticlePage(PageViewModel articlePageViewModel);
bool DeleteArticlePage(int id);
void AddArticleToPage(int articlePageId, int articleId);
bool UpdatePageContent(PageViewModel articlePageViewModel);
ArticleViewModel Update(ArticleViewModel articleViewModel);
}
}
<file_sep>/PersonalSite.WebUI/Scripts/backoffice/jquery.page-article.save.js
(function ($) {
$.fn.pagearticle.save = function (options) {
var defaults = {
loadingSelector: null,
}
var base = this;
this.init = function () {
}
return this.each(function () {
base.init();
});
}
});<file_sep>/PersonalSite.WebUI/Scripts/_references.js
/// <autosync enabled="true" />
/// <reference path="modernizr-2.6.2.js" />
/// <reference path="jquery-1.10.2.js" />
/// <reference path="bootstrap.js" />
/// <reference path="respond.js" />
/// <reference path="jquery.validate.js" />
/// <reference path="jquery.validate.unobtrusive.js" />
/// <reference path="paginate/img.js" />
/// <reference path="paginate/jquery.paginate.js" />
/// <reference path="dragdrop.js" />
/// <reference path="forecast.js" />
/// <reference path="tinymce/plugins/advlist/plugin.min.js" />
/// <reference path="tinymce/plugins/anchor/plugin.min.js" />
/// <reference path="tinymce/plugins/autolink/plugin.min.js" />
/// <reference path="tinymce/plugins/autoresize/plugin.min.js" />
/// <reference path="tinymce/plugins/autosave/plugin.min.js" />
/// <reference path="tinymce/plugins/bbcode/plugin.min.js" />
/// <reference path="tinymce/plugins/charmap/plugin.min.js" />
/// <reference path="tinymce/plugins/code/plugin.min.js" />
/// <reference path="tinymce/plugins/colorpicker/plugin.min.js" />
/// <reference path="tinymce/plugins/contextmenu/plugin.min.js" />
/// <reference path="tinymce/plugins/directionality/plugin.min.js" />
/// <reference path="tinymce/plugins/emoticons/plugin.min.js" />
/// <reference path="tinymce/plugins/example/plugin.min.js" />
/// <reference path="tinymce/plugins/example_dependency/plugin.min.js" />
/// <reference path="tinymce/plugins/fullpage/plugin.min.js" />
/// <reference path="tinymce/plugins/fullscreen/plugin.min.js" />
/// <reference path="tinymce/plugins/hr/plugin.min.js" />
/// <reference path="tinymce/plugins/image/plugin.min.js" />
/// <reference path="tinymce/plugins/importcss/plugin.min.js" />
/// <reference path="tinymce/plugins/insertdatetime/plugin.min.js" />
/// <reference path="tinymce/plugins/layer/plugin.min.js" />
/// <reference path="tinymce/plugins/legacyoutput/plugin.min.js" />
/// <reference path="tinymce/plugins/link/plugin.min.js" />
/// <reference path="tinymce/plugins/lists/plugin.min.js" />
/// <reference path="tinymce/plugins/media/plugin.min.js" />
/// <reference path="tinymce/plugins/nonbreaking/plugin.min.js" />
/// <reference path="tinymce/plugins/noneditable/plugin.min.js" />
/// <reference path="tinymce/plugins/pagebreak/plugin.min.js" />
/// <reference path="tinymce/plugins/paste/plugin.min.js" />
/// <reference path="tinymce/plugins/preview/plugin.min.js" />
/// <reference path="tinymce/plugins/print/plugin.min.js" />
/// <reference path="tinymce/plugins/save/plugin.min.js" />
/// <reference path="tinymce/plugins/searchreplace/plugin.min.js" />
/// <reference path="tinymce/plugins/spellchecker/plugin.min.js" />
/// <reference path="tinymce/plugins/tabfocus/plugin.min.js" />
/// <reference path="tinymce/plugins/table/plugin.min.js" />
/// <reference path="tinymce/plugins/template/plugin.min.js" />
/// <reference path="tinymce/plugins/textcolor/plugin.min.js" />
/// <reference path="tinymce/plugins/textpattern/plugin.min.js" />
/// <reference path="tinymce/plugins/visualblocks/plugin.min.js" />
/// <reference path="tinymce/plugins/visualchars/plugin.min.js" />
/// <reference path="tinymce/plugins/wordcount/plugin.min.js" />
/// <reference path="tinymce/themes/modern/theme.min.js" />
/// <reference path="backoffice/createpage.js" />
/// <reference path="menu.js" />
/// <reference path="backoffice/createpage-min.js" />
/// <reference path="jquery.bxslider.min.js" />
/// <reference path="jquery-ui.js" />
/// <reference path="tracking.js" />
/// <reference path="backoffice/jquery.page-article.delete.js" />
/// <reference path="backoffice/jquery.page-article.save.js" />
/// <reference path="backoffice/jquery.article.paginate.js" />
/// <reference path="jquery.unobtrusive-ajax.min.js" />
/// <reference path="tinymce/utils/validate.js" />
/// <reference path="tinymce/tiny_mce_src.js" />
/// <reference path="tinymce/tiny_mce_popup.js" />
/// <reference path="tinymce/tiny_mce.js" />
/// <reference path="tinymce/themes/simple/langs/en.js" />
/// <reference path="tinymce/themes/simple/editor_template_src.js" />
/// <reference path="tinymce/themes/simple/editor_template.js" />
/// <reference path="tinymce/themes/advanced/langs/en_dlg.js" />
/// <reference path="tinymce/themes/advanced/js/source_editor.js" />
/// <reference path="tinymce/themes/advanced/editor_template_src.js" />
/// <reference path="tinymce/themes/advanced/editor_template.js" />
/// <reference path="tinymce/plugins/xhtmlxtras/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/xhtmlxtras/js/ins.js" />
/// <reference path="tinymce/plugins/xhtmlxtras/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/xhtmlxtras/editor_plugin.js" />
/// <reference path="tinymce/plugins/wordcount/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/wordcount/editor_plugin.js" />
/// <reference path="tinymce/plugins/visualchars/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/visualchars/editor_plugin.js" />
/// <reference path="tinymce/plugins/template/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/template/js/template.js" />
/// <reference path="tinymce/plugins/template/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/template/editor_plugin.js" />
/// <reference path="tinymce/plugins/table/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/table/js/table.js" />
/// <reference path="tinymce/plugins/table/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/table/editor_plugin.js" />
/// <reference path="tinymce/plugins/tabfocus/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/tabfocus/editor_plugin.js" />
/// <reference path="tinymce/plugins/style/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/style/js/props.js" />
/// <reference path="tinymce/plugins/style/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/style/editor_plugin.js" />
/// <reference path="tinymce/plugins/spellchecker/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/spellchecker/editor_plugin.js" />
/// <reference path="tinymce/plugins/searchreplace/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/searchreplace/js/searchreplace.js" />
/// <reference path="tinymce/plugins/searchreplace/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/searchreplace/editor_plugin.js" />
/// <reference path="tinymce/plugins/save/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/save/editor_plugin.js" />
/// <reference path="tinymce/plugins/print/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/print/editor_plugin.js" />
/// <reference path="tinymce/plugins/preview/jscripts/embed.js" />
/// <reference path="tinymce/plugins/preview/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/preview/editor_plugin.js" />
/// <reference path="tinymce/plugins/paste/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/paste/js/pasteword.js" />
/// <reference path="tinymce/plugins/paste/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/paste/editor_plugin.js" />
/// <reference path="tinymce/plugins/pagebreak/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/pagebreak/editor_plugin.js" />
/// <reference path="tinymce/plugins/noneditable/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/noneditable/editor_plugin.js" />
/// <reference path="tinymce/plugins/nonbreaking/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/nonbreaking/editor_plugin.js" />
/// <reference path="tinymce/plugins/media/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/media/js/media.js" />
/// <reference path="tinymce/plugins/media/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/media/editor_plugin.js" />
/// <reference path="tinymce/plugins/lists/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/lists/editor_plugin.js" />
/// <reference path="tinymce/plugins/legacyoutput/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/legacyoutput/editor_plugin.js" />
/// <reference path="tinymce/plugins/layer/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/layer/editor_plugin.js" />
/// <reference path="tinymce/plugins/insertdatetime/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/insertdatetime/editor_plugin.js" />
/// <reference path="tinymce/plugins/inlinepopups/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/inlinepopups/editor_plugin.js" />
/// <reference path="tinymce/plugins/iespell/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/fullscreen/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/fullscreen/editor_plugin.js" />
/// <reference path="tinymce/plugins/fullpage/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/fullpage/js/fullpage.js" />
/// <reference path="tinymce/plugins/fullpage/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/fullpage/editor_plugin.js" />
/// <reference path="tinymce/plugins/example_dependency/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/example_dependency/editor_plugin.js" />
/// <reference path="tinymce/plugins/example/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/example/js/dialog.js" />
/// <reference path="tinymce/plugins/example/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/example/editor_plugin.js" />
/// <reference path="tinymce/plugins/emotions/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/emotions/js/emotions.js" />
/// <reference path="tinymce/plugins/emotions/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/emotions/editor_plugin.js" />
/// <reference path="tinymce/plugins/directionality/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/directionality/editor_plugin.js" />
/// <reference path="tinymce/plugins/contextmenu/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/contextmenu/editor_plugin.js" />
/// <reference path="tinymce/plugins/bbcode/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/bbcode/editor_plugin.js" />
/// <reference path="tinymce/plugins/autosave/langs/en.js" />
/// <reference path="tinymce/plugins/autosave/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/autosave/editor_plugin.js" />
/// <reference path="tinymce/plugins/autoresize/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/autoresize/editor_plugin.js" />
/// <reference path="tinymce/plugins/autolink/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/autolink/editor_plugin.js" />
/// <reference path="tinymce/plugins/advlist/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/advlist/editor_plugin.js" />
/// <reference path="tinymce/plugins/advlink/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/advlink/js/advlink.js" />
/// <reference path="tinymce/plugins/advlink/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/advlink/editor_plugin.js" />
/// <reference path="tinymce/plugins/advimage/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/advimage/js/image.js" />
/// <reference path="tinymce/plugins/advimage/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/advimage/editor_plugin.js" />
/// <reference path="tinymce/plugins/advhr/langs/en_dlg.js" />
/// <reference path="tinymce/plugins/advhr/js/rule.js" />
/// <reference path="tinymce/plugins/advhr/editor_plugin_src.js" />
/// <reference path="tinymce/plugins/advhr/editor_plugin.js" />
/// <reference path="tinymce/langs/en.js" />
/// <reference path="tinymce/jquery.tinymce.js" />
/// <reference path="tinymce/utils/editable_selects.js" />
/// <reference path="tinymce/utils/form_utils.js" />
/// <reference path="tinymce/utils/mctabs.js" />
/// <reference path="tinymce/themes/advanced/langs/en.js" />
/// <reference path="tinymce/themes/advanced/js/link.js" />
/// <reference path="tinymce/themes/advanced/js/image.js" />
/// <reference path="tinymce/themes/advanced/js/color_picker.js" />
/// <reference path="tinymce/themes/advanced/js/charmap.js" />
/// <reference path="tinymce/themes/advanced/js/anchor.js" />
/// <reference path="tinymce/themes/advanced/js/about.js" />
/// <reference path="tinymce/plugins/xhtmlxtras/js/element_common.js" />
/// <reference path="tinymce/plugins/xhtmlxtras/js/del.js" />
/// <reference path="tinymce/plugins/xhtmlxtras/js/cite.js" />
/// <reference path="tinymce/plugins/xhtmlxtras/js/attributes.js" />
/// <reference path="tinymce/plugins/xhtmlxtras/js/acronym.js" />
/// <reference path="tinymce/plugins/xhtmlxtras/js/abbr.js" />
/// <reference path="tinymce/plugins/table/js/row.js" />
/// <reference path="tinymce/plugins/table/js/merge_cells.js" />
/// <reference path="tinymce/plugins/table/js/cell.js" />
/// <reference path="tinymce/plugins/paste/js/pastetext.js" />
/// <reference path="tinymce/plugins/media/js/embed.js" />
/// <reference path="tinymce/plugins/iespell/editor_plugin.js" />
/// <reference path="tinymce/plugins/example/langs/en.js" />
<file_sep>/PersonalSite.Service/Concrete/ArticleService.cs
namespace PersonalSite.Service.Concrete
{
using PersonalSite.DataAccess;
using PersonalSite.Domain.Abstract;
using PersonalSite.Service.Abstract;
using PersonalSite.Service.ViewModel;
using PersonalSite.Service.Extension;
using System;
using System.Linq;
using System.Collections.Generic;
using Kaliko;
public class ArticleService : IArticleService
{
private readonly IRepo<Article> articleRepo;
private readonly IRepo<ArticlePage> articlePageRepo;
public ArticleService(IRepo<Article> articleRepo, IRepo<ArticlePage> articlePageRepo)
{
this.articleRepo = articleRepo;
this.articlePageRepo = articlePageRepo;
}
public ArticleViewModel Get(int id)
{
ArticleViewModel articleViewModel = null;
try
{
var articleObject = this.articleRepo.Get(id);
articleViewModel = articleObject.GetViewModel(true);
}
catch (Exception ex)
{
Kaliko.Logger.Write(ex, Logger.Severity.Critical);
}
return articleViewModel;
}
public IEnumerable<ArticleViewModel> GetAll()
{
var articles = this.articleRepo.GetAll();
foreach (var article in articles)
{
yield return article.GetViewModel(false);
}
}
public PageViewModel GetFirstPage(int id)
{
PageViewModel firstPageViewModel = null;
try
{
var pages = this.articlePageRepo.Where(x => x.Article.Id == id);
firstPageViewModel = pages.FirstOrDefault().GetViewModel();
}
catch (Exception ex)
{
Kaliko.Logger.Write(ex, Logger.Severity.Critical);
}
return firstPageViewModel;
}
public int PageCount(int id)
{
var articleObject = this.articleRepo.Get(id);
return articleObject.ArticlePages.Count;
}
public PageViewModel GetArticlePageById(int id)
{
PageViewModel articlePageViewModel = null;
try
{
var articlePageObject = this.articlePageRepo.Get(id);
if (articlePageObject != null)
{
articlePageViewModel = articlePageObject.GetViewModel();
}
}
catch (Exception ex)
{
Console.Write(ex.Message);
}
return articlePageViewModel;
}
public int Create(ArticleViewModel articleViewModel)
{
int newArticleId = -1;
try
{
var articleObject = new Article()
{
Description = articleViewModel.Description,
Title = articleViewModel.Title,
Category = articleViewModel.Category
};
var newArticleObject = this.articleRepo.Insert(articleObject);
this.articleRepo.Save();
newArticleId = newArticleObject.Id;
}
catch (Exception ex)
{
Console.Write(ex.Message);
}
return newArticleId;
}
public bool UpdatePageContent(PageViewModel articlePageViewModel)
{
var updateSucceeded = false;
try
{
var articlePageObject = this.articlePageRepo.Get(articlePageViewModel.PageId);
if (articlePageObject != null)
{
articlePageObject.PageContent = articlePageViewModel.PageContent;
this.articlePageRepo.Save();
updateSucceeded = true;
}
}
catch (Exception ex)
{
Kaliko.Logger.Write(ex, Logger.Severity.Critical);
}
return updateSucceeded;
}
public ArticleViewModel Update(ArticleViewModel articleViewModel)
{
var articleObject = this.articleRepo.Get(articleViewModel.Id);
if (articleObject != null)
{
try
{
articleObject.Description = articleViewModel.Description;
articleObject.Title = articleViewModel.Title;
articleObject.Category = articleViewModel.Category;
articleRepo.Save();
}
catch (Exception ex)
{
Kaliko.Logger.Write(ex, Logger.Severity.Critical);
}
}
articleViewModel = articleObject.GetViewModel(true);
return articleViewModel;
}
public void AddArticleToPage(int articlePageId, int articleId)
{
var articlePageObject = articlePageRepo.Get(articlePageId);
if (articlePageObject != null)
{
articlePageObject.Article = this.articleRepo.Get(articleId);
articlePageRepo.Save();
}
}
public int CreateArticlePage(PageViewModel articlePageViewModel)
{
int newPageId = -1;
try
{
var articlePageObject = new ArticlePage()
{
PageContent = articlePageViewModel.PageContent
};
var articleObject = articleRepo.Get(articlePageViewModel.ParentArticleId);
if (articleObject != null)
{
articlePageObject.Article = articleObject;
articlePageObject.ParentArticle = articlePageViewModel.ParentArticleId;
}
var newArticlePageObj = this.articlePageRepo.Insert(articlePageObject);
this.articlePageRepo.Save();
newPageId = newArticlePageObj.Id;
}
catch (Exception ex)
{
Kaliko.Logger.Write(ex, Logger.Severity.Critical);
}
return newPageId;
}
public bool DeleteArticlePage(int id)
{
bool success = false;
try
{
var articlePageObject = this.articlePageRepo.Get(id);
this.articlePageRepo.Delete(articlePageObject);
this.articlePageRepo.Save();
success = true;
}
catch (Exception ex)
{
Console.Write(ex.Message);
}
return success;
}
}
}
<file_sep>/PersonalSite.Service/ViewModel/PageViewModel.cs
namespace PersonalSite.Service.ViewModel
{
using System;
using System.ComponentModel.DataAnnotations;
using System.Web;
using System.Web.Mvc;
public class PageViewModel
{
public int PageId { get; set; }
[UIHint("tinymce_jquery_full"), AllowHtml]
public string PageContent { get; set; }
[UIHint("tinymce_jquery_full"), AllowHtml]
public string DecodedPageContent
{
get { return HttpUtility.HtmlDecode(PageContent); }
set { PageContent = HttpUtility.HtmlEncode(value); }
}
public ArticleViewModel Article { get; set; }
public int ParentArticleId { get; set; }
}
}
<file_sep>/PersonalSite.Domain/Concrete/DbContextFactory.cs
namespace PersonalSite.Domain.Concrete
{
using PersonalSite.DataAccess;
using PersonalSite.Domain.Abstract;
using System;
using System.Data.Entity;
public class DbContextFactory : IDbContextFactory
{
private readonly GrafaaEntities dbContext;
public DbContextFactory()
{
dbContext = new GrafaaEntities();
}
public GrafaaEntities GetContext()
{
return dbContext;
}
}
}
<file_sep>/PersonalSite.Test/ControllerTest.cs
namespace PersonalSite.Test
{
using Microsoft.VisualStudio.TestTools.UnitTesting;
using PersonalSite.WebUI.Controllers;
using Moq;
using PersonalSite.Service.Abstract;
using PersonalSite.DataAccess;
using PersonalSite.Domain.Abstract;
using PersonalSite.Service.Concrete;
using PersonalSite.Domain.Concrete;
[TestClass]
public class ControllerTest
{
[TestMethod]
public void ArticleServiceTest()
{
var articleRepo = new Mock<IRepo<Article>>();
articleRepo.Setup(a => a.Get(It.Is<int>(x => x > 0))).Returns<Article>(x => x);
var articlePageRepo = new Mock<IRepo<ArticlePage>>();
var articleService = new ArticleService(articleRepo.Object, articlePageRepo.Object);
var article = articleService.Get(1);
Assert.AreEqual(article, null);
}
}
}
<file_sep>/PersonalSite.Service/Abstract/IArticlePageService.cs
namespace PersonalSite.Service.Abstract
{
using PersonalSite.Service.ViewModel;
public interface IArticlePageService
{
PageViewModel GetArticlePageById(int id);
int Create(PageViewModel pageViewModel);
bool UpdatePageContent(PageViewModel pageViewModel);
bool Delete(int id);
}
}
<file_sep>/PersonalSite.Domain/Abstract/IRepo.cs
using System;
using System.Linq;
using System.Linq.Expressions;
namespace PersonalSite.Domain.Abstract
{
public interface IRepo<T>
{
T Get(int id);
IQueryable<T> GetAll();
IQueryable<T> Where(Expression<Func<T, bool>> predicate);
T Insert(T o);
void Delete(T o);
void Save();
}
}
<file_sep>/PersonalSite.WebUI/Scripts/backoffice/jquery.article.paginate.js
(function ($) {
$.fn.paginatearticle = function (options) {
var defaults = {
pagercontainerSelector: null,
nextSelector: null,
prevSelector: null,
pagerSelector: null,
listcontainerSelector: null,
itemslistSelector: null,
currentpage: 1
}
var settings = $.extend({}, defaults, options);
var base = this;
function navigateTo(page) {
var slidepx = $(settings.itemslistSelector).width();
if (settings.currentpage > page) {
// slide to right
base.animate({ marginLeft: "+=" + (settings.currentpage - page) * slidepx }, { duration: 'slow' });
}
else {
if (settings.currentpage < page) {
// slide to left
base.animate({ marginLeft: "-=" + (page - settings.currentpage) * slidepx }, { duration: 'slow' });
}
}
settings.currentpage = page;
if (settings.pagerSelector) {
$(settings.pagerSelector).children('span').first().html(settings.currentpage);
}
}
function setOnClickEvents() {
if (settings.nextSelector) {
$(settings.nextSelector).on('click', function () {
if (settings.currentpage < base.children('li').length) {
navigateTo(settings.currentpage + 1);
}
else {
base.css('color', 'black');
}
});
}
if (settings.prevSelector) {
$(settings.prevSelector).on('click', function () {
if (settings.currentpage > 1) {
navigateTo(settings.currentpage - 1);
}
else {
base.css('color', 'black');
}
});
}
}
function setWidthPage() {
var maxHeight = 0;
var liWidth = $(settings.itemslistSelector).width();
base.children('li').each(function () {
$(this).css('width', liWidth + 'px');
if ($(this).outerHeight(false) > maxHeight) {
maxHeight = $(this).outerHeight(false);
}
});
$(settings.itemslistSelector).css('height', maxHeight + 10 + 'px');
$(settings.listcontainerSelector).css('height', maxHeight + 10 + 'px');
$(settings.pagercontainerSelector).css('width', liWidth + 'px');
var ulWidth = base.children('li').length * liWidth;
base.css('width', ulWidth + 'px');
}
this.init = function () {
setWidthPage();
setWidthPage();
setOnClickEvents();
}
return this.each(function () {
base.init();
});
}
}(jQuery));<file_sep>/PersonalSite.WebUI/App_Start/RouteConfig.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using System.Web.Routing;
namespace PersonalSite.WebUI
{
public class RouteConfig
{
public static void RegisterRoutes(RouteCollection routes)
{
routes.IgnoreRoute("{resource}.axd/{*pathInfo}");
routes.MapMvcAttributeRoutes();
routes.MapRoute(
name: "Default",
url: "{controller}/{action}/{id}",
defaults: new { controller = "Home", action = "Index", id = UrlParameter.Optional }
);
routes.MapRoute(
"DefaultHome",
string.Empty,
new { controller = "Home", action = "index" }
);
routes.MapRoute(
"Pages",
"pages/{title}-{id}",
new { controller = "Article", action = "Details" }
);
routes.MapRoute(
"Manager",
"pages/edit/{id}",
new { controller = "Article", action = "Edit" }
);
routes.MapRoute(
"Contact",
"contact",
new { controller = "Home", action = "Contact" }
);
routes.MapRoute(
"Account",
"Login",
new { controller = "Account", action = "Login" }
);
}
}
}
<file_sep>/PersonalSite.WebUI/Controllers/PartialController.cs
using PersonalSite.Service.Concrete;
using System.Web.Mvc;
namespace PersonalSite.WebUI.Controllers
{
public class PartialController : Controller
{
private readonly ArticleService articleService;
public PartialController(ArticleService articleService)
{
this.articleService = articleService;
}
[ChildActionOnly]
public ActionResult Menu()
{
return PartialView(articleService.GetAll());
}
}
}<file_sep>/PersonalSite.Service/ViewModel/ArticleViewModel.cs
using System.Collections.Generic;
namespace PersonalSite.Service.ViewModel
{
public class ArticleViewModel
{
public int Id { get; set; }
public List<PageViewModel> ArticlePages { get; set; }
public List<int> PagesIds { get; set; }
public string Title { get; set; }
public string Description { get; set; }
public string Category { get; set; }
}
}
<file_sep>/PersonalSite.Service/Concrete/ArticlePageService.cs
namespace PersonalSite.Service.Concrete
{
using PersonalSite.DataAccess;
using PersonalSite.Domain.Abstract;
using PersonalSite.Service.Abstract;
using PersonalSite.Service.ViewModel;
using PersonalSite.Service.Extension;
using System;
using Kaliko;
public class ArticlePageService : IArticlePageService
{
private readonly IRepo<ArticlePage> repository;
public ArticlePageService(IRepo<ArticlePage> articlePageRepo)
{
this.repository = articlePageRepo;
}
public PageViewModel GetArticlePageById(int id)
{
var articlePageViewModel = new PageViewModel();
try
{
var articlePageObject = this.repository.Get(id);
articlePageViewModel = articlePageObject.GetViewModel();
}
catch (Exception ex)
{
Kaliko.Logger.Write(ex, Logger.Severity.Critical);
}
return articlePageViewModel;
}
public int Create(PageViewModel pageViewModel)
{
int newArticlePageId = -1;
try
{
var articlePageObject = new ArticlePage()
{
//Article = pageViewModel.Article != null
// ? articleRepo.Get(pageViewModel.Article.Id) : null,
PageContent = pageViewModel.PageContent
};
var newArticlePage = this.repository.Insert(articlePageObject);
this.repository.Save();
newArticlePageId = newArticlePage.Id;
}
catch (Exception ex)
{
Kaliko.Logger.Write(ex, Logger.Severity.Critical);
}
return newArticlePageId;
}
public bool UpdatePageContent(PageViewModel pageViewModel)
{
bool updateSucceeded = false;
try
{
var articlePageObject = this.repository.Get(pageViewModel.PageId);
if (articlePageObject != null)
{
articlePageObject.PageContent = pageViewModel.PageContent;
this.repository.Save();
updateSucceeded = true;
}
}
catch (Exception ex)
{
Kaliko.Logger.Write(ex, Logger.Severity.Critical);
}
return updateSucceeded;
}
public bool Delete(int id)
{
bool success = false;
try
{
var articlePageObject = this.repository.Get(id);
this.repository.Delete(articlePageObject);
this.repository.Save();
success = true;
}
catch (Exception ex)
{
Kaliko.Logger.Write(ex, Logger.Severity.Critical);
}
return success;
}
}
}
<file_sep>/PersonalSite.WebUI/Scripts/backoffice/createpage-min.js
var loadingSelector = 'div.loading';
var tabLinksSelector = 'ul.tab-links';
var confirmSelector = 'div.del-confirm';
var backgroundSelector = 'div.del-background';
var hiddenBlocSelector = 'div.hidden-bloc';
function switchLoading(activate) {
if (activate) {
if ($(loadingSelector).hasClass("hide-loading") && !$(loadingSelector).hasClass("show-loading")) {
$(loadingSelector).removeClass("hide-loading");
$(loadingSelector).addClass("show-loading");
}
} else {
if (!$(loadingSelector).hasClass("hide-loading") && $(loadingSelector).hasClass("show-loading")) {
$(loadingSelector).addClass("hide-loading");
$(loadingSelector).removeClass("show-loading");
}
}
}
function disableTabLinks(disable) {
if (disable) {
$(tabLinksSelector).children('li').each(function () {
$(this).find("a").addClass("disabled");
});
}
else {
$(tabLinksSelector).children('li').each(function () {
$(this).find("a").removeClass("disabled");
});
}
}
function showDeleteWarning(show) {
if (show) {
$(confirmSelector).show();
$(backgroundSelector).show();
} else {
$(confirmSelector).hide();
$(backgroundSelector).hide();
}
}
function callCreateArticlePageSync(url, pageViewModel) {
var oldPageId = pageViewModel.PageId;
$.ajax({
url: url,
cache: false,
type: "POST",
data: pageViewModel,
success: function (data) {
$('div#pageinfos').html(data);
if (oldPageId == -1) {
$('li.active').find('a').html($('li.tab').length);
AssignTabToEditor();
disableTabLinks(false);
}
switchLoading(false);
},
error: function (err) {
var x = 2;
}
});
}
var AssignTabToEditor = function () {
var newPageId = $('div#pageinfos').children('input#editPageId').val();
var link = $('li.tab').last().children('a')[0].href;
var oldPageId = link.substring(link.indexOf('=') + 1, link.indexOf('&'));
if (oldPageId == -1) {
$('li.tab').last().children('a')[0].href = link.replace(oldPageId, newPageId);
}
}
var AddTabOnclick = function () {
$("a#tabplus").bind('click', function (e) {
$('ul#tabs').children('li.active').removeClass('active');
disableTabLinks(true);
var parentId = $('div#article').children('input#pagefuckingId').val();
$.ajax({
url: '/Article/AddNewTab',
type: "GET",
data: { articleId: parentId },
success: function (data) {
$(data).insertBefore('li.plus');
}
});
$.ajax({
url: '/Article/ShowPageContent',
type: "GET",
data: { pageId: -1, articleId: parentId },
success: function (data) {
$('div#tobeupdated').html(data);
initEventsForSelectedTab();
}
});
e.preventDefault();
});
}
var selectFocusTab = function () {
$('ul#tabs').children('li.active').removeClass('active');
$(this).parent().addClass('active');
}
var initEventsForSelectedTab = function () {
initSavePageEvent();
editDeleteEvent();
}
var initSavePageEvent = function () {
$('a.save-page').bind('click', function (e) {
$(loadingSelector).find('span').html("Saving page content...");
switchLoading(true);
var id = $('div#pageinfos').children('input[name="PageId"]').val();
var parentId = $('div#article').children('input#pagefuckingId').val();
var contentToSend = tinymce.activeEditor.getContent();
var pageViewModel = { PageId: id, ParentArticleId: parentId, PageContent: $('<div></div>').text(contentToSend).html() };
callCreateArticlePageSync(this.href, pageViewModel);
e.preventDefault();
});
}
var deleteCurrentTab = function () {
$('ul#tabs').find('li.active').remove();
var i = 1;
$('ul#tabs').children('li.tab').each(function () {
$(this).find('a').html(i);
i++;
});
$('ul#tabs').children('li:first-child').addClass('active');
}
var turnOffDeleteStyle = function () {
disableTabLinks(false);
showDeleteWarning(false);
deleteCurrentTab();
}
var editDeleteEvent = function () {
$('.delete-page').bind('click', function () {
disableTabLinks(true);
showDeleteWarning(true);
});
$('.edit-del-cancel').bind('click', function () {
disableTabLinks(false);
showDeleteWarning(false);
});
}
$(function () {
var init = function () {
AddTabOnclick();
//loadTinyMce();
//initFirstTabFirstText(false);
//initTabOnClick();
initSavePageEvent();
editDeleteEvent();
//initEditArticlePageEvent();
//initTabPlusOnclick();
}
init();
});
<file_sep>/PersonalSite.WebUI/Scripts/Paginate/img.js
var readyToUpdate = 0;
var currentpage = 1;
/**
* Tools Function
*/
function centerBoxInsideParent(box, parent) {
var boxPositionTop = (parent.outerHeight() / 2) - (box.outerHeight() / 2);
var boxPositionLeft = (parent.outerWidth() / 2) - (box.outerWidth() / 2);
box.css({
'top': boxPositionTop + 'px',
'left': boxPositionLeft + 'px'
});
}
function centerChildren(imgDisplay) {
var delDiv = imgDisplay.children('.del-background');
var delConfirm = imgDisplay.children('.del-confirm');
var loading = imgDisplay.children('.loading');
var detailsDiv = imgDisplay.children('.img-details');
var detailsModifyDiv = imgDisplay.children('.img-details-modify');
delDiv.height(detailsDiv.height() + imgDisplay.height());
// centerBoxInsideParent(delConfirm, delDiv);
centerBoxInsideParent(delConfirm, imgDisplay);
//loading.height(detailsModifyDiv.height() +imgDisplay.height());
centerBoxInsideParent(loading, imgDisplay);
}
function positionningImageDisplay(img) {
var imgContainer = $(img).children('.img-container');
var imgDetails = $(img).children('.img-details');
var imgDetailsEdition = $(img).children('.img-details-modify');
var x = parseInt(imgContainer.css('left'), 10);
var leftPositionInt = parseInt(imgContainer.position().left, 10) - parseInt(imgContainer.css('border-left-width'), 10)
imgDetails.css('left', leftPositionInt + 'px');
imgDetailsEdition.css('left', leftPositionInt + 'px');
}
function setHeightWidthSections() {
}
$(document).ready(function () {
//setHeightWidthSections();
});
$(window).load(function () {
//setHeightWidthSections();
});
$(function () {
var myObj = $('ul#pagination').paginatearticle({
nextSelector: 'li.nav-next',
prevSelector: 'li.nav-prev',
pagerSelector: 'li.nav-numbers',
itemslistSelector: 'div.img-list',
listcontainerSelector: 'div.img-list-container',
pagercontainerSelector: '.pager-container',
currentpage: 1
});
if (isMobile) {
$('ul#pagination').on('swipeleft', function () {
if (currentpage < $('ul#pagination').children('li').length) {
navigateTo(currentpage + 1);
}
});
$('ul#pagination').on('swiperight', function () {
if (currentpage > 1) {
navigateTo(currentpage - 1);
}
});
}
//$('.img-display').each(function () {
// var img = $(this);
// centerChildren(img);
// positionningImageDisplay(img);
//});
//$("body").keydown(function (e) {
// if (e.keyCode == 37) { // left
// if (currentpage > 1) {
// navigateTo(currentpage - 1);
// }
// }
// else if (e.keyCode == 39) { // right
// if (currentpage < $('ul#pagination').children('li').length) {
// navigateTo(currentpage + 1);
// }
// }
//});
});
<file_sep>/PersonalSite.WebUI/Scripts/dragdrop.js
var userFilterType;
var userFilterValue;
var isManager = null;
function selectCategorie() {
$('.categories').children('div.selected').removeClass('selected');
$(this).addClass('selected');
var selectedCat = $(this).attr('id');
$('#sortable1').children('li').each(function () {
if ($(this).attr('categorie') === selectedCat) {
$(this).show();
} else { $(this).hide(); }
});
if ($('ul#sortable2').height() < $('ul#sortable1').height()) {
$('ul#sortable2').height($('ul#sortable1').height());
}
else {
$('ul#sortable2').removeAttr('style');
}
}
function setAvailableApplications(response) {
$('#sortable1').delay(700).empty();
$('.categories').empty();
var selectedCat;
if (response.length === 0) {
$('div.available-apps').children('div').addClass('no-result');
$('<div><span>Veuillez changer le filtre pour séléctionner des applications.</span></div>').appendTo('#sortable1');
}
else {
if ($('div.available-apps').children('div').hasClass('no-result')) {
$('div.available-apps').children('div').removeClass('no-result');
}
for (var i = 0; i < response.length; i++) {
var categoryId = response[i].Categorie.replace(/\s/g, '');
var categoryFound = false;
$('.categories').children('div').each(function () {
if ($(this).attr('id') === categoryId) {
categoryFound = true;
}
});
if (categoryFound === false) {
var categorie = $('<div id=' + categoryId + '><span>' + response[i].Categorie + '</span></div>').bind('click', selectCategorie);
categorie.appendTo('.categories');
if (i === 0) {
selectedCat = categoryId;
categorie.addClass('selected');
}
}
var newItem = $('<li id="' + response[i].Id + '" class="grab" categorie="' + categoryId + '"><div>' + response[i].Name + '</div></li>');
if (response[i].Description !== null && response[i].Description !== "") {
newItem.attr('description', response[i].Description);
newItem.bind('mouseover', function () {
var desc = $(this).attr('description');
$(this).append("<div class='info' style='position:absolute;top:0'>" + response[i].Name + "</div>");
});
newItem.bind('mouseout', function () {
$(this).children('div.info').remove();
});
}
if (categoryId !== selectedCat) {
newItem.appendTo('#sortable1').hide();
}
else {
newItem.hide().appendTo('#sortable1').slideDown("slow");
}
}
}
}
function getAvailableApplications() {
var defaultDataParameters = { search: 'application' };
var allObjects = [{ Name: "Link 1", Categorie: "category1", Id: "1", Description: "Description of link 1" },
{ Name: "Link 2", Categorie: "category1", Id: "2", Description: "Description of link 2" },
{ Name: "Link 3", Categorie: "category2", Id: "3", Description: "Description of link 3" },
{ Name: "Link 4", Categorie: "category2", Id: "4", Description: "Description of link 4" }];
setAvailableApplications(allObjects);
}
$(function () {
$('ul').each(function () {
if ($(this).children('li').length === 1) {
$(this).children('li.emptyMessage').show();
}
});
$("#sortable1").sortable({
connectWith: ".connectedSortable",
items: "li:not(.unsortable)",
placeholder: "ghost-highlight",
revert: 200,
tolerance: "intersect",
over: function (event, ui) {
if ($(ui.item).hasClass('manager-selection')) {
$('div.applications').children('ul')
.children('li.ghost-highlight')
.removeClass('ghost-highlight')
.attr('style', 'height:0px; padding:0px; border: none;');
$('div.available-apps').children('div').children('div.applications').addClass('warning-background');
$('div.warning').show();
}
else {
if ($('.positions').children('div.ghost-box')) {
$('.positions').children('div.ghost-box').fadeOut("normal", function () {
$(this).remove();
});
}
}
},
out: function (event, ui) {
if ($(ui.item).hasClass('manager-selection')) {
$('div.applications').removeClass('warning-background');
$('div.warning').hide();
}
},
receive: function (event, ui) {
$(ui.item).removeAttr('style');
if ($(ui.item).hasClass('manager-selection')) {
$(ui.sender).sortable('cancel');
}
else {
var nb = $('.positions').children('div').length - 1;
$('.positions').children('div:last').fadeOut("normal", function () {
$(this).remove();
});
}
//show empty message on sender if applicable
$('li.emptyMessage', this).hide();
if ($('li:not(.emptyMessage)', ui.sender).length == 0) {
$('li.emptyMessage', ui.sender).show();
}
else {
$('li.emptyMessage', ui.sender).hide();
}
},
start: function (event, ui) {
ui.placeholder.height(ui.item.height());
$(ui.item).removeClass('grab');
$(ui.item).addClass('grabbing');
},
stop: function (event, ui) {
$(ui.item).removeClass('grabbing');
$(ui.item).addClass('grab');
}
}).disableSelection();
$("#sortable2").sortable({
connectWith: [".connectedSortable", ".connectedToTrash"],
dropOnEmpty: true,
placeholder: "ghost-highlight",
items: "li:not(.unsortable)",
revert: 200,
tolerance: "intersect",
receive: function (event, ui) {
$(ui.item).removeAttr('style').addClass('manager-selection');
if (!$(ui.item).hasClass('perso')) {
$(ui.item).addClass('perso')
}
$('.positions').children('div:last').remove();
var nb = $('.positions').children('div').length + 1;
$('<div><span>' + nb + '</span></div>').appendTo('.positions');
//show empty message on sender if applicable
$('li.emptyMessage', this).hide();
if ($('li:not(.emptyMessage)', ui.sender).length === 0) {
$('li.emptyMessage', ui.sender).show();
}
else {
$('li.emptyMessage', ui.sender).hide();
}
},
start: function (event, ui) {
ui.placeholder.height(ui.item.height());
$(ui.item).removeClass('grab');
$(ui.item).addClass('grabbing');
},
stop: function (event, ui) {
$(ui.item).removeClass('grabbing');
$(ui.item).addClass('grab');
},
over: function (event, ui) {
if ($('ul#sortable2').children('li.emptyMessage').is(':visible')) {
$('ul#sortable2').children('li.emptyMessage').hide();
}
if (!$(ui.item).hasClass('manager-selection')) {
var nb = $('.positions').children('div').length + 1;
$('<div class="ghost-box" style="opacity:0.7; border:1px dashed;"><span>' + nb + '</span></div>').appendTo('.positions');
}
},
out: function (event, ui) {
if ($('.positions').children('div:last').hasClass('ghost-box')) {
$('.positions').children('div:last').remove();
if ($('.positions').children('div').length === 0) {
if (!$('ul#sortable2').children('li.emptyMessage').is(':visible')) {
$('ul#sortable2').children('li.emptyMessage').show();
}
}
}
}
}).disableSelection();
$("#sortable3").sortable({
connectWith: ".connectedToTrash",
dropOnEmpty: true,
placeholder: "ghost-highlight",
items: "li:not(.unsortable)",
revert: 200,
tolerance: "intersect",
receive: function (event, ui) {
$(ui.item).removeAttr('style').removeClass('manager-selection');
$(ui.item).children('div.oblig').remove();
var nb = $('.positions').children('div').length - 1;
$('.positions').children('div:last').fadeOut("normal", function () {
$(this).remove();
});
//hide empty message on receiver
$('li.emptyMessage', this).hide();
//show empty message on sender if applicable
if ($('li:not(.emptyMessage)', ui.sender).length == 0) {
$('li.emptyMessage', ui.sender).show();
}
else {
$('li.emptyMessage', ui.sender).hide();
}
},
over: function (event, ui) {
if ($('ul#sortable3').children('li.emptyMessage').is(':visible')) {
$('ul#sortable3').children('li.emptyMessage').hide();
}
},
start: function (event, ui) {
ui.placeholder.height(ui.item.height());
$(ui.item).removeClass('grab');
$(ui.item).addClass('grabbing');
},
stop: function (event, ui) {
$(ui.item).removeClass('grabbing');
$(ui.item).addClass('grab');
}
}).disableSelection();
getAvailableApplications();
});<file_sep>/PersonalSite.WebUI/Scripts/forecast.js
function setIconWeather(code, isMorning) {
var nightCode = isMorning ? "" : "-N";
// Orage
if (code >= 200 && code <= 232) {
$('.tab_contentWeather').children('.weather-ico').each(function () {
$(this).removeAttr('class');
$(this).addClass('weather-ico');
$(this).addClass('sprite-07_thunderstorm' + nightCode);
});
}
// Nuage + pluies
if ((code >= 300 && code <= 321) || (code >= 520 && code <= 531)) {
$('.tab_contentWeather').children('.weather-ico').each(function () {
$(this).removeAttr('class');
$(this).addClass('weather-ico');
$(this).addClass('sprite-05_showerRain' + nightCode);
});
}
// soleil + pluies
if (code >= 500 && code <= 504) {
$('.tab_contentWeather').children('.weather-ico').each(function () {
$(this).removeAttr('class');
$(this).addClass('weather-ico');
$(this).addClass('sprite-06_rain' + nightCode);
});
}
//Neiges
if (code >= 600 && code <= 622) {
$('.tab_contentWeather').children('.weather-ico').each(function () {
$(this).removeAttr('class');
$(this).addClass('weather-ico');
$(this).addClass('sprite-08_snow' + nightCode);
});
}
//Bruumes
if (code >= 701 && code <= 781) {
$('.tab_contentWeather').children('.weather-ico').each(function () {
$(this).removeAttr('class');
$(this).addClass('weather-ico');
$(this).addClass('sprite-09_mist' + nightCode);
});
}
// Ciel d�gag�
if (code == 800) {
$('.tab_contentWeather').children('.weather-ico').each(function () {
$(this).removeAttr('class');
$(this).addClass('weather-ico');
$(this).addClass('sprite-01_skyIsClear' + nightCode);
});
}
// Soleil + nuages
if (code == 801) {
$('.tab_contentWeather').children('.weather-ico').each(function () {
$(this).removeAttr('class');
$(this).addClass('weather-ico');
$(this).addClass('sprite-02_fewClouds' + nightCode);
});
}
// Nuage 1
if (code == 802 || code == 803) {
$('.tab_contentWeather').children('.weather-ico').each(function () {
$(this).removeAttr('class');
$(this).addClass('weather-ico');
$(this).addClass('sprite-03_scatteredClouds' + nightCode);
});
}
// Nuage 2
if (code == 804) {
$('.tab_contentWeather').children('.weather-ico').each(function () {
$(this).removeAttr('class');
$(this).addClass('weather-ico');
$(this).addClass('sprite-04_brokenClouds' + nightCode);
});
}
}
function SetWeatherInfo(data) {
var dt = new Date();
var today = dt.getDate() + "/" + ("0" + (dt.getMonth() + 1)).slice(-2) + "/" + dt.getFullYear();
$('.weatherDate').html(today);
var temp = data.main.temp;
var temp_max = data.main.temp_max;
var temp_min = data.main.temp_min;
$('.weather-infosRight').html(temp);
$('.weatherInfo').html(data.weather[0].description);
$('.weatherCity').html(data.name);
$('.degrees').html(temp + '°C');
setIconWeather(data.weather[0].id, data.weather[0].icon.indexOf('d') > -1);
}
function GetWeatherInfo(city) {
var directUrl = "http://api.openweathermap.org/data/2.5/weather";
var api = '2d3f3c632a918e9e16908ce030dcea6a';
var param = { q: city, units: "metric", APPID: api };
try {
$.ajax({
type: "GET",
dataType: "json",
url: directUrl,
data : param,
cache: false,
success: function (data) {
SetWeatherInfo(data);
},
error: function (errorData) {
$('.weather-infosLeft').hide();
$('.weather-infosGlobal').html(errorData.status);
}
});
}
catch (err) {
$('.weather-infosLeft').hide();
$('.weather-infosGlobal').html("Erreur lors de la recherche m�t�o");
}
}
function setCitiesTabToAutoComplete(cities) {
var rows = cities.split('\n');
var citiesTab = new Array(rows.length - 1);
for (i = 0; i < rows.length - 1; i++) {
var row = rows[i + 1];
citiesTab[i] = row.split('\t')[1] + ", " + row.split('\t')[4];
}
$("#cities").autocomplete({
source:function(request, response) {
var results = $.ui.autocomplete.filter(citiesTab, request.term);
response(results.slice(0, 5));
},
focus: function (event, ui) {
GetWeatherInfo(ui.item.label);
return false;
},
messages: {
noResults: '',
results: function () { }
}
});
}
function getAllCities() {
var url = "/Scripts/city_list.txt";
$.ajax({
type: "GET",
url: url,
dataType: "text",
success: function (data) {
setCitiesTabToAutoComplete(data);
},
error: function (err) {
console.log(JSON.stringify(err));
}
});
}
$(document).ready(function () {
GetWeatherInfo("London,uk");
getAllCities();
//.autocomplete("instance")._renderItem = function (ul, item) {
//return $( "<li>" )
// .append( "<a>" + item.label + "</a>" )
// .appendTo( ul );
//}
});<file_sep>/PersonalSite.Domain/Abstract/IDbContextFactory.cs
using PersonalSite.DataAccess;
namespace PersonalSite.Domain.Abstract
{
public interface IDbContextFactory
{
GrafaaEntities GetContext();
}
}
<file_sep>/PersonalSite.Service/Extension/ExtensionHelper.cs
namespace PersonalSite.Service.Extension
{
using PersonalSite.DataAccess;
using PersonalSite.Service.ViewModel;
using System.Collections.Generic;
public static class ExtensionHelper
{
public static ArticleViewModel GetViewModel(this Article article, bool loadPages)
{
var articleViewModel = new ArticleViewModel();
if (article != null)
{
articleViewModel.Id = article.Id;
articleViewModel.Title = article.Title;
articleViewModel.Description = article.Description;
articleViewModel.Category = article.Category;
if (loadPages)
{
articleViewModel.ArticlePages = new List<PageViewModel>();
articleViewModel.PagesIds = new List<int>();
foreach (var articlePage in article.ArticlePages)
{
articleViewModel.ArticlePages.Add(articlePage.GetViewModel());
articleViewModel.PagesIds.Add(articlePage.Id);
}
}
}
return articleViewModel;
}
public static PageViewModel GetViewModel(this ArticlePage articlePage)
{
PageViewModel articlePageViewModel = null;
if (articlePage != null)
{
articlePageViewModel = new PageViewModel()
{
PageId = articlePage.Id,
PageContent = articlePage.PageContent,
ParentArticleId = articlePage.Article.Id
};
}
return articlePageViewModel;
}
}
}
|
c1aa2862101f110f95c6aab59694018b14e26096
|
[
"JavaScript",
"C#"
] | 30
|
C#
|
rafaagahbiche/grafaa
|
b1cd5615f2a3182f2fe41b42f8506f2e294726bc
|
e4a57d314c9f85cff638878bfb6fd4421967a380
|
refs/heads/master
|
<file_sep><?php
class SecretariaController extends Controller {
public $layout='//layouts/column2';
/**
* @return array action filters
*/
public function filters()
{
return array(
'accessControl', // perform access control for CRUD operations
'postOnly + delete', // we only allow deletion via POST request
);
}
/**
* Specifies the access control rules.
* This method is used by the 'accessControl' filter.
* @return array access control rules
*/
public function accessRules()
{
return array(
array('allow',
'roles'=>array('Administrador'),
'users'=>array('@'),
),
array('allow', // allow authenticated user to perform 'create' and 'update' actions
'roles'=>array('Secretaria'),
'users'=>array('@'),
),
array('deny', // deny all users
'users'=>array('*'),
),
);
}
public function actionIndex(){ // esta paja es para que haga pull
$tar=M05Usuario::model()->find("Usuario = '".Yii::app ()->user->name."'");
$this->render('index',array('Usuario'=>$tar,));
}
public function actionEvalua(){
$tar=M05Usuario::model()->find("Usuario = '".Yii::app ()->user->name."'");
$modelStatus = P03Status::model()->find("Descripcion = 'Aprobada'");
$criteria=new CDbCriteria;
$criteria->condition='P03_id= '.$modelStatus->id;
$dataProvider= new CActiveDataProvider(M03Tesis::model(), array('criteria'=>$criteria,));
$this->render('test',array('Usuario'=>$tar,'dataProvider'=>$dataProvider));
}
public function actionActaeva($id){
$DosAlumnos = false;//
$dir = T01TesisHasUsuario::model()->findAll("M03_id =".$id);
$tr_j1 = P02TipoRelacion::model()->find("descripcion = 'Jurado 1'");
$tr_j2 = P02TipoRelacion::model()->find("descripcion = 'Jurado 2'");
$tr_js = P02TipoRelacion::model()->find("descripcion = 'Jurado Suplente'");
$tr_ta = P02TipoRelacion::model()->find("descripcion = 'Tutor Academico'");
$tr_a = P02TipoRelacion::model()->find("descripcion = 'Tesista'");
$tesis = M03Tesis::model()->find("id = ".$id);
//$jefe_2=T08UsuarioHasRol::model()->find("P01_id = ".$jefe_1->id);
foreach ($dir as $value) {
if ($value->P02_id == $tr_a->id) {
if(!$DosAlumnos){
$alumno = M05Usuario::model()->find("id =".$value->M05_id);
$DosAlumnos = true;
} else {
$alumno2 = M05Usuario::model()->find("id =".$value->M05_id);
}
} elseif($value->P02_id == $tr_j1->id){
$jurado1=M05Usuario::model()->find("id =".$value->M05_id);
} elseif ($value->P02_id == $tr_j2->id) {
$jurado2 = M05Usuario::model()->find("id =".$value->M05_id);
} elseif ($value->P02_id == $tr_js->id) {
$juradoS = M05Usuario::model()->find("id =".$value->M05_id);
} elseif ($value->P02_id == $tr_ta->id) {
$tutor = M05Usuario::model()->find("id =".$value->M05_id);
}
}
// convertir a pdf
$mPDF1 = Yii::app()->ePdf->mpdf('utf-8','A4','','',15,15,35,25,9,9,'P');
//Esto lo pueden configurar como quieren, para eso deben de entrar en la web de MPDF para ver todo lo que permite.
$mPDF1->useOnlyCoreFonts = true;
$mPDF1->SetTitle(" Acta Evaluacion ".$alumno->Nombre." ".$alumno->Apellido);
$mPDF1->SetAuthor("Departamento Ing. Informatica");
$mPDF1->showWatermarkText = true;
$mPDF1->watermark_font = 'DejaVuSansCondensed';
$mPDF1->watermarkTextAlpha = 0.1;
$mPDF1->SetDisplayMode('fullpage');
// tiene que tener 5 o 6 variables
$mPDF1->WriteHTML($this->renderPartial('actasEva_Tesis',array(
'Normbre_alumno'=>$alumno->Nombre." ".$alumno->Apellido,
'Cedula_alumno' =>$alumno->Cedula,
'Titulo_TAP' => $tesis->Titulo,
'Fecha_presentacion' => $tesis->Fecha_Defensa,
'L_academico' => $tesis->Lapso_Academico_defensa,
'Nombre_tutor'=>$tutor->Nombre." ".$tutor->Apellido,
'Nombre_Jurado_1'=>$jurado1->Nombre." ".$jurado1->Apellido,
'Nombre_Jurado_2'=>$jurado2->Nombre." ".$jurado2->Apellido,
'Nombre_Jurado_S'=>$juradoS->Nombre." ".$juradoS->Apellido,
), true));
if($DosAlumnos){
// convertir a pdf
$mPDF1 = Yii::app()->ePdf->mpdf('utf-8','A4','','',15,15,35,25,9,9,'P');
//Esto lo pueden configurar como quieren, para eso deben de entrar en la web de MPDF para ver todo lo que permite.
$mPDF1->useOnlyCoreFonts = true;
$mPDF1->SetTitle(" Acta Evaluacion ".$alumno->Nombre." ".$alumno->Apellido);
$mPDF1->SetAuthor("Departamento Ing. Informatica");
$mPDF1->showWatermarkText = true;
$mPDF1->watermark_font = 'DejaVuSansCondensed';
$mPDF1->watermarkTextAlpha = 0.1;
$mPDF1->SetDisplayMode('fullpage');
// tiene que tener 5 o 6 variables
$mPDF1->WriteHTML($this->renderPartial('actasEva_Tesis',array(
'Normbre_alumno'=>$alumno2->Nombre." ".$alumno2->Apellido,
'Cedula_alumno' =>$alumno->Cedula,
'Titulo_TAP' => $tesis->Titulo,
'Fecha_presentacion' => $tesis->Fecha_Defensa,
'L_academico' => $tesis->Lapso_Academico_defensa,
'Nombre_tutor'=>$tutor,
'Nombre_Jurado_1'=>$jurado1,
'Nombre_Jurado_2'=>$jurado2,
'Nombre_Jurado_S'=>$juradoS,
), true));
}
}
public function Actionviejas_tap()
{
if(isset($_POST['desi'])){
if($_POST["tip"]==1){
$this->redirect(array('viejas_tap_subirt'));
}
else{
$this->redirect(array('viejas_tap_subirp'));
}
}
$this->render('viejas_tap');
}
public function actionviejas_tap_subirt()
{
$tar=M05Usuario::model()->find("Usuario = '".Yii::app ()->user->name."'");
$tipo1=P02TipoRelacion::model()->find("Descripcion = 'Tesista'");
$tipo2=P02TipoRelacion::model()->find("Descripcion = 'Tutor'");
$estado=P03Status::model()->find("Descripcion = 'Aprobada'");
$model_1=new M03Tesis;
$model_2=new T01TesisHasUsuario;
$model_3=new T01TesisHasUsuario;
$model_4=new M05Usuario;
if(isset($_POST['M03Tesis']))
{
$model_1->attributes=$_POST['M03Tesis'];
$model_1->P03_id=$estado->id;
$model_4->attributes=$_POST['M05Usuario'];
$est2=M05Usuario::model()->find("Cedula = '".$model_4->Cedula."'");
if(count($est2)==0)
{
$model_4->Usuario=$model_4->Nombre.".".$model_4->Apellido;
$contrasena=$this->RandomString();
$model_4->Telefono="no especifico";
$model_4->Direccion="no especifico";
$contenido="Buen día, has sido registraddo en el sistema TAP del departamento de Ing Informática. Usuario: ".$model_4->Usuario." Contraseña: ".$contrasena."";
$this->correo_e($model_4->Correo_Electronico,$contenido,"nuevo usuario del sistema tap");
$model_4->Clave=$model_4->hashPassword($contrasena,$session=$model_4->generateSalt());
$model_4->session=$session;
if($model_4->save())
{
$item = new T08UsuarioHasRol;
$item->M05_id=$model_4->id;
$item->P01_id="4"; $item->save();
$modelRol = P01Rol::model()->findByPk($item->P01_id);
$auth=Yii::app()->authManager;
$nombre=P01Rol::model()->find("id=".$item->P01_id);
//$auth->assign($modelRol->nombre,$model_4->Usuario);
}
}
if($model_1->save())
{
// Para subir la relacion con el alumno---------
$model_2->M03_id=$model_1->id;
$model_2->M05_id=$est2->id; // aqui es donde acomodo el usuario
$model_2->P02_id=$tipo1->id;
$model_2->save();
// Para subir la relacion con el profesor
$prof=M01Profesor::model()->findByPk($_POST['M03Tesis']['tutor']);
$docente=M05Usuario::model()->find("Cedula = '".$prof->Cedula."'");
if(count($docente)==0)
{ //si el profesor no se encuentra en el sistema se crea un usuario temporal no puede entrar en el sistema hasta que no se le habilite un Usuario y clave
//Crea usuario temporal
$sql="Insert into m05_usuario (id,Cedula,Apellido,Nombre,Correo_Electronico) values (NULL,'".$prof->Cedula."','" .$prof->Nombre."','".$prof->Apellido."','".$prof->Correo_UNET."')";
$comando = Yii::app() -> db -> createCommand($sql);
$comando -> execute();
$docente2=M05Usuario::model()->find("Cedula = '".$prof->Cedula."'");
// asociar profesor a tesis
$model_3->M03_id=$model_1->id;
$model_3->M05_id=$docente2->id;
$model_3->P02_id=$tipo2->id;
$model_3->save();
}
else{ // si el profesor esta en el sistema
$model_3->M03_id=$model_1->id;
$model_3->M05_id=$docente->id;
$model_3->P02_id=$tipo2->id;
$model_3->save();
}
}
}
$this->render('viejas_tap_subirt',array('Usuario'=>$tar,'model_1'=>$model_1,'model_2'=>$model_2,'model_3'=>$model_3,'model_4'=>$model_4));
}
function RandomString($length=10,$uc=TRUE,$n=TRUE,$sc=FALSE)
{
$source = 'abcdefghijklmnopqrstuvwxyz';
if($uc==1) $source .= 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';
if($n==1) $source .= '1234567890';
if($sc==1) $source .= '|@#~$%()=^*+[]{}-_';
if($length>0){
$rstr = "";
$source = str_split($source,1);
for($i=1; $i<=$length; $i++){
mt_srand((double)microtime() * 1000000);
$num = mt_rand(1,count($source));
$rstr .= $source[$num-1];
}
}
return $rstr;
}
public function actionCorreo(){
$tar=M05Usuario::model()->find("Usuario = '".Yii::app ()->user->name."'");
$model=new M05Usuario;
if(isset($_POST['M05Usuario'])){
$model->attributes=$_POST['M05Usuario'];
$this->correo_e($model->Correo_Electronico,$model->Nombre,$model->Apellido);
}
$this->render('correo',array('Usuario'=>$tar,'model'=>$model));
}
}<file_sep><?php
$this->breadcrumbs=array(
'Inicio',
);
echo $this->renderPartial('menu', array('usu'=>$Usuario));
?>
<h1>Enviar correo</h1>
<?php
/* @var $this M05UsuarioController */
/* @var $model M05Usuario */
/* @var $form CActiveForm */
?>
<div class="form">
<?php $form=$this->beginWidget('CActiveForm', array(
'id'=>'m05-usuario-form',
// Please note: When you enable ajax validation, make sure the corresponding
// controller action is handling ajax validation correctly.
// There is a call to performAjaxValidation() commented in generated controller code.
// See class documentation of CActiveForm for details on this.
'enableAjaxValidation'=>false,
)); ?>
<p class="note">Los campos con <span class="required">*</span> son requeridos.</p>
<div class="row">
<?php echo $form->labelEx($model,'Correo_Electronico'); ?></br>
<?php echo $form->textField($model,'Correo_Electronico',array('size'=>60,'maxlength'=>100)); ?>
</div>
<div class="row">
<?php echo $form->labelEx($model,'Asunto: '); ?></br>
<?php echo $form->textField($model,'Apellido',array('size'=>45,'maxlength'=>45)); ?>
</div>
<div class="row">
<?php echo $form->labelEx($model,'Contenido'); ?>
<?php
Yii::import('ext.krichtexteditor.KRichTextEditor');
$this->widget('KRichTextEditor', array(
'model' => $model,
'value' => $model->isNewRecord ? '' : $model->Descripcion,
'attribute' => 'Nombre',
'options' => array(
'theme_advanced_resizing' => 'true',
'theme_advanced_statusbar_location' => 'bottom',
),
));
?>
</div>
<div class="row buttons">
<?php echo CHtml::submitButton($model->isNewRecord ? 'Create' : 'Save'); ?>
</div>
<?php $this->endWidget(); ?>
</div><!-- form --><file_sep>
<?php
$this->breadcrumbs=array(
'Inicio',
);
echo $this->renderPartial('menu', array('usu'=>$Usuario));
?>
<h1> Lista de tesis </h1>
<?php
$this->widget('zii.widgets.CListView', array(
'dataProvider'=>$dataProvider,
'itemView'=>'_test',
));
?><file_sep>
<b><?php echo CHtml::encode($data->getAttributeLabel('Titulo')); ?>:</b>
<?php echo CHtml::encode($data->Titulo); ?>
<br />
<b><?php echo CHtml::encode('Fecha de la propuesta'); ?>:</b>
<?php echo CHtml::encode($data->Fecha_Inscripcion); ?>
<br />
<br />
<?php echo CHtml::link(CHtml::encode("Ver detalles"), array('actaeva', 'id'=>$data->id)); ?>
<br />
|
868ea6f1a4690d6a8a802468196885cd8c9ac1b7
|
[
"PHP"
] | 4
|
PHP
|
LuisJavier186/CRIBLYRTH
|
b9748f543ccf449d0db939c3038f33489f3381d4
|
9724a4a44165ccc5b16a9e029c106480d4024559
|
refs/heads/master
|
<repo_name>Harri-Renney/XYZ-Web-Service<file_sep>/src/java/com/model/Claim.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.model;
import java.util.Date;
/**
*
* @author Owner
*/
public class Claim {
private int id;
private String memId;
private Date date;
private String rationale;
private String status;
private float amount;
public Claim(int id, String memId, Date date, String rationale, String status, float amount) {
this.id = id;
this.memId = memId;
this.date = date;
this.rationale = rationale;
this.status = status;
this.amount = amount;
}
public boolean yearPassed() {
Date currentDate = new Date(); // current date
long difference = currentDate.getTime() - date.getTime();
long differenceDays = difference / (1000 * 60 * 60 * 24);
return differenceDays > 365;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getMemid() {
return memId;
}
public void setMemid(String memid) {
this.memId = memid;
}
public Date getDate() {
return date;
}
public void setDate(Date date) {
this.date = date;
}
public String getRationale() {
return rationale;
}
public void setRationale(String rationale) {
this.rationale = rationale;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public float getAmount() {
return amount;
}
public void setAmount(float amount) {
this.amount = amount;
}
}
<file_sep>/src/java/com/model/XYZWebApplicationDB.java
package com.model;
import java.sql.SQLException;
import java.sql.Time;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.logging.Level;
import java.util.logging.Logger;
public class XYZWebApplicationDB {
JDBCWrapper wrapper;
public XYZWebApplicationDB(JDBCWrapper w)
{
wrapper = w;
}
public void insertMember(Member m)
{
java.sql.Date sqlDOB = new java.sql.Date(m.getDOB().getTime());
java.sql.Date sqlReg = new java.sql.Date(m.getDOB().getTime());
wrapper.createStatement();
try {
wrapper.getStatement().executeUpdate("insert into members values ('" + m.getUsername() + "', '" + m.getFullName() + "', '" + m.getAddress() + "', '" + sqlDOB.toString() + "', '" + sqlReg.toString() + "', '" + m.getStatus() + "', " + m.getBalance() + ")");
} catch (SQLException ex) {
Logger.getLogger(JDBCWrapper.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void insertUser(User u)
{
wrapper.createStatement();
try {
wrapper.getStatement().executeUpdate("insert into users values ('" + u.getId() + "', '" + u.getPassword() + "', '" + u.getStatus() + "')");
} catch (SQLException ex) {
Logger.getLogger(JDBCWrapper.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void insertPayment(Payment p)
{
wrapper.createStatement();
try {
wrapper.getStatement().executeUpdate("insert into payments values ('" + p.getId() + "', '" + p.getMemid() + "', '" + p.getTypeOfPayment() + "', '" + p.getAmount() + "', '" + p.getDate() + "', '" + p.getTime() + "')");
} catch (SQLException ex) {
Logger.getLogger(JDBCWrapper.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void insertClaim(Claim c)
{
wrapper.createStatement();
try {
wrapper.getStatement().executeUpdate("insert into claims values ('" + c.getId() + "', '" + c.getMemid() + "', '" + c.getDate() + "', '" + c.getRationale() + "', '" + c.getStatus() + "', '" + c.getAmount() + "')");
} catch (SQLException ex) {
Logger.getLogger(JDBCWrapper.class.getName()).log(Level.SEVERE, null, ex);
}
}
public User getUser(String id)
{
User ret = new User();
wrapper.createStatement();
wrapper.findRecord("users", "id", id);
try {
ret.setId(wrapper.getResultSet().getString("id"));
ret.setPassword(wrapper.getResultSet().getString("password"));
ret.setStatus(wrapper.getResultSet().getString("status"));
} catch (SQLException ex) {
Logger.getLogger(XYZWebApplicationDB.class.getName()).log(Level.SEVERE, null, ex);
}
return ret;
}
public ArrayList<Payment> getUserPayments(String id)
{
ArrayList ret = new ArrayList<Payment>();
wrapper.findRecord("payments", "mem_id", id);
try {
do
{
ret.add(new Payment(wrapper.getResultSet().getInt("id"), wrapper.getResultSet().getString("mem_id"), wrapper.getResultSet().getString("type_of_payment"), wrapper.getResultSet().getFloat("amount"), Time.valueOf(wrapper.getResultSet().getString("time")), makeDate(wrapper.getResultSet().getString("date"))));
}while(wrapper.getResultSet().next());
} catch (SQLException ex) {
Logger.getLogger(XYZWebApplicationDB.class.getName()).log(Level.SEVERE, null, ex);
}
return ret;
}
public ArrayList<Member> getAllMembers()
{
ArrayList ret = new ArrayList<Member>();
wrapper.createStatement();
wrapper.createResultSet("SELECT * FROM members");
try {
wrapper.getResultSet().next();
do
{
String[] addressString = wrapper.getResultSet().getString("address").split(",");
Address a = new Address(Integer.parseInt(addressString[0]), addressString[1], addressString[2], addressString[3], addressString[4]);
ret.add(new Member(wrapper.getResultSet().getString("id"), wrapper.getResultSet().getString("name"), a, makeDate(wrapper.getResultSet().getString("dob")), makeDate(wrapper.getResultSet().getString("dor")), wrapper.getResultSet().getString("status"), wrapper.getResultSet().getFloat("balance")));
}while(wrapper.getResultSet().next());
} catch (SQLException ex) {
Logger.getLogger(XYZWebApplicationDB.class.getName()).log(Level.SEVERE, null, ex);
}
return ret;
}
public ArrayList<Payment> getAllPayments()
{
ArrayList ret = new ArrayList<Payment>();
wrapper.createStatement();
wrapper.createResultSet("SELECT * FROM payments");
try {
wrapper.getResultSet().next();
do
{
ret.add(new Payment(wrapper.getResultSet().getInt("id"), wrapper.getResultSet().getString("mem_id"), wrapper.getResultSet().getString("type_of_payment"), wrapper.getResultSet().getFloat("amount"), wrapper.getResultSet().getTime("time"), wrapper.getResultSet().getDate("date")));
}while(wrapper.getResultSet().next());
} catch (SQLException ex) {
Logger.getLogger(XYZWebApplicationDB.class.getName()).log(Level.SEVERE, null, ex);
}
return ret;
}
public ArrayList<Member> getProvisionalMembers() {
ArrayList<Member> users;
users = getAllMembers();
ArrayList ret = new ArrayList<Member>();
for (Member user : users) {
if (user.getStatus().equals("APPLIED")) {
ret.add(user);
}
}
return ret;
}
public ArrayList<Claim> getMemberClaims(String id)
{
ArrayList ret = new ArrayList<Claim>();
wrapper.findRecord("claims", "mem_id", id);
try {
do
{
ret.add(new Claim(wrapper.getResultSet().getInt("id"), wrapper.getResultSet().getString("mem_id"), wrapper.getResultSet().getDate("date"), wrapper.getResultSet().getString("rationale"),wrapper.getResultSet().getString("status"), wrapper.getResultSet().getFloat("amount")));
}while(wrapper.getResultSet().next());
} catch (SQLException ex) {
Logger.getLogger(XYZWebApplicationDB.class.getName()).log(Level.SEVERE, null, ex);
}
return ret;
}
public ArrayList<Claim> getAllClaims()
{
ArrayList users = getAllMembers();
ArrayList ret = new ArrayList<Claim>();
for(int i = 0; i != users.size(); ++i)
{
ArrayList claims = getMemberClaims(((Member)users.get(i)).getUsername());
for(int j = 0; j != claims.size(); ++j)
{
ret.add(claims.get(j));
}
}
return ret;
}
public Date makeDate(String dateParam) {
Date dob = new Date();
DateFormat df = new SimpleDateFormat("dd/MM/yy");
try {
dob = df.parse(dateParam);
} catch (ParseException ex) {
System.out.println("Parse exception");
}
return dob;
}
public void changePassword(User u)
{
wrapper.createStatement();
try {
wrapper.getStatement().executeUpdate("UPDATE users SET \"password\" = '"+ u.getPassword() +"' WHERE \"id\" = '" + u.getId() + "'");
} catch (SQLException ex) {
Logger.getLogger(JDBCWrapper.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void approveMemberApplication(User u)
{
wrapper.createStatement();
try {
wrapper.getStatement().executeUpdate("UPDATE users SET \"status\" = 'APPROVED' WHERE \"id\" = '" + u.getId() + "'");
wrapper.getStatement().executeUpdate("UPDATE members SET \"status\" = 'APPROVED' WHERE \"id\" = '" + u.getId() + "'");
} catch (SQLException ex) {
Logger.getLogger(JDBCWrapper.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void suspendMemberApplication(User u)
{
wrapper.createStatement();
try {
wrapper.getStatement().executeUpdate("UPDATE users SET \"status\" = 'SUSPENDED' WHERE \"id\" = '" + u.getId() + "'");
} catch (SQLException ex) {
Logger.getLogger(JDBCWrapper.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
<file_sep>/src/java/com/web/RegServlet.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.web;
import com.model.Address;
import java.io.IOException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.model.JDBCWrapper;
import com.model.Member;
import com.model.User;
import com.model.XYZWebApplicationDB;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*
* @author Fraser
*/
@WebServlet(name = "RegServlet", urlPatterns = {"/Registration"})
public class RegServlet extends HttpServlet {
/**
* Processes requests for both HTTP <code>GET</code> and <code>POST</code>
* methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String button = request.getParameter("button");
switch (button) {
case "Register":
// Get parameters
String firstName = request.getParameter("firstName");
String lastName = request.getParameter("lastName");
String houseNumber = request.getParameter("houseNumber");
houseNumber = houseNumber.trim();
String streetName = request.getParameter("streetName");
String city = request.getParameter("city");
String county = request.getParameter("county");
String postCode = request.getParameter("postCode");
postCode = postCode.toUpperCase();
String userID = makeUserID(firstName,lastName);
try {
Date dob = makeDate(request.getParameter("DOB"));
// Error check
if (isEmpty(firstName, lastName, houseNumber, streetName, city, county, postCode)) {
request.setAttribute("errorMessage", "1 or more field has been left blank");
RequestDispatcher rd = request.getRequestDispatcher("registrationPage.jsp");
rd.forward(request, response);
} else if (!isValidPostcode(postCode)) {
request.setAttribute("errorMessage2", "Invalid PostCode");
RequestDispatcher rd = request.getRequestDispatcher("registrationPage.jsp");
rd.forward(request, response);
} else {
Date dor = new Date();
Member m = new Member(userID, firstName + " " + lastName, new Address(Integer.parseInt(houseNumber), streetName, city, county, postCode), dob, dor, "APPLIED", 0);
User u = new User(userID, User.createPassword(), "APPLIED");
//Inserting members with data provided above^^
JDBCWrapper wrapper = (JDBCWrapper) getServletContext().getAttribute("database");
new XYZWebApplicationDB(wrapper).insertMember(m);
new XYZWebApplicationDB(wrapper).insertUser(u);
request.setAttribute("username", u.getId());
request.setAttribute("password", <PASSWORD>());
RequestDispatcher view = request.getRequestDispatcher("RegistrationSuccess.jsp");
view.forward(request, response);
}
} catch (NumberFormatException ex) {
// Catch if the house number is a String
request.setAttribute("errorMessage3", "House number must be a number");
RequestDispatcher rd = request.getRequestDispatcher("registrationPage.jsp");
rd.forward(request, response);
} catch (ParseException ex) {
// Catch if invalid date
request.setAttribute("errorMessage4", "Invalid Date");
RequestDispatcher rd = request.getRequestDispatcher("registrationPage.jsp");
rd.forward(request, response);
}
break;
case "login":
RequestDispatcher view = request.getRequestDispatcher("login.jsp");
view.forward(request, response);
break;
case "backPage":
RequestDispatcher view2 = request.getRequestDispatcher("login.jsp");
view2.forward(request, response);
break;
default:
break;
}
}
// <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code.">
/**
* Handles the HTTP <code>GET</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Handles the HTTP <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
public String makeUserID(String firstName, String lastName) {
char initial = firstName.toLowerCase().charAt(0);
lastName = lastName.toLowerCase();
JDBCWrapper wrapper = (JDBCWrapper) getServletContext().getAttribute("database");
int count = 0;
wrapper.createStatement();
if (wrapper.findRecord("users", "id", initial + "-" + lastName)) {
count++;
}
if (count == 0) {
return initial + "-" + lastName;
} else {
return initial + "-" + lastName + count;
}
}
public boolean isValidPostcode(String postcode) {
String regex = "^[A-Z]{1,2}[0-9R][0-9A-Z]? [0-9][ABD-HJLNP-UW-Z]{2}$";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(postcode);
return matcher.matches();
}
public Date makeDate(String dateParam) throws ParseException {
Date dob = new Date();
DateFormat df = new SimpleDateFormat("dd/MM/yy");
dob = df.parse(dateParam);
return dob;
}
public boolean isEmpty(String userID, String fullName, String houseNumber, String streetName, String city, String county, String postCode) {
return userID.trim().isEmpty() || fullName.trim().isEmpty() || houseNumber.trim().isEmpty()
|| streetName.trim().isEmpty() || city.trim().isEmpty() || county.trim().isEmpty() || postCode.trim().isEmpty();
}
/**
* Returns a short description of the servlet.
*
* @return a String containing servlet description
*/
@Override
public String getServletInfo() {
return "Short description";
}// </editor-fold>
}
<file_sep>/README.md
# XYZ-Web-Service
|
f89833cdf505f452ed636e36bcbfbff8ec42efc7
|
[
"Markdown",
"Java"
] | 4
|
Java
|
Harri-Renney/XYZ-Web-Service
|
9ebca6079b9f4a7cfde44352b0cb7f3cf2fc0b0e
|
82a0035ceb3f42edcc1c7571284ca9d0bd20aaf3
|
refs/heads/master
|
<file_sep>boto3==1.4.0
botocore==1.4.56
docutils==0.12
Flask==0.10.1
futures==3.0.5
itsdangerous==0.24
Jinja2==2.8
jmespath==0.9.0
MarkupSafe==0.23
python-dateutil==2.5.3
s3transfer==0.1.4
six==1.10.0
Werkzeug==0.11.11
<file_sep>/******************************************************************************
* HTML5 Multiple File Uploader Demo *
******************************************************************************/
// Constants
var MAX_UPLOAD_FILE_SIZE = 25*1024*1024; // 1 MB
var UPLOAD_URL = "/upload";
var NEXT_URL = "/files/";
// List of pending files to handle when the Upload button is finally clicked.
var PENDING_FILES = [];
function stringify(obj)
{
var temp;
temp = JSON.flatten(obj);
// console.log(temp);
var string = "";
for (var ke in temp) {
if (temp.hasOwnProperty(ke)) {
// console.log(temp[ke])
string += temp[ke];
}
}
return string;
}
function grabnextphrase(string,n,skip)
{
var left = -1;
var right = 0;
while(true){
if(string[n + right] == " " && left == -1){
left = right;
right++;
}
else if(string[n + right] == " " && left != -1)
break;
else
right++;
}
var returnString = string.substring(n + left, n + right);
if(returnString.length >= skip)
return returnString;
else{
return returnString + grabnextphrase(string,n + right,skip);
}
}
$('.form').find('input, textarea').on('keyup blur focus', function (e) {
var $this = $(this),
label = $this.prev('label');
if (e.type === 'keyup') {
if ($this.val() === '') {
label.removeClass('active highlight');
} else {
label.addClass('active highlight');
}
} else if (e.type === 'blur') {
if( $this.val() === '' ) {
label.removeClass('active highlight');
} else {
label.removeClass('highlight');
}
} else if (e.type === 'focus') {
if( $this.val() === '' ) {
label.removeClass('highlight');
}
else if( $this.val() !== '' ) {
label.addClass('highlight');
}
}
});
function populate(data) {
var string;
//phone
try{
if(data.basics.phone){
document.getElementById('inputPhone').value = data.basics.phone;
label = $('#inputPhone').prev('label');
label.addClass('active highlight');
}
}
catch(err){console.log(err);}
//address
try{
if(data.basics.address){
document.getElementById('inputAddress').value = data.basics.address;
label = $('#inputAddress').prev('label');
label.addClass('active highlight');
}
}
catch(err){console.log(err);}
// website
try{
if(data.basics.url){
// string = JSON.flatten(data.basics.url);
// console.log(stringify(data.basics.url));
document.getElementById('inputUrl').value = stringify(data.basics.url);
label = $('#inputUrl').prev('label');
label.addClass('active highlight');
}
}
catch(err){console.log(err);}
//////////////////////////////////////////////////////////////////
//Education
try{
if(data.education_and_training){
// string = JSON.flatten(data.skills);
// console.log(stringify(data.education_and_training));
string = stringify(data.education_and_training);
lstring = string.toLowerCase();
var n;
if(lstring.search("gpa")){
n = lstring.search("gpa");
document.getElementById('inputGPA').value = grabnextphrase(string,n,-1);
label = $('#inputGPA').prev('label');
label.addClass('active highlight');
}
if(lstring.search("university")){
n = lstring.search("university");
document.getElementById('inputInstitution').value = "University" + grabnextphrase(string,n,4);
label = $('#inputInstitution').prev('label');
label.addClass('active highlight');
}
if(lstring.search("expected")){
n = lstring.search("expected");
console.log(n);
document.getElementById('inputGrad').value = grabnextphrase(string,n,-1);;
label = $('#inputGrad').prev('label');
label.addClass('active highlight');
}
if(lstring.search("major")){
n = lstring.search("major");
console.log(n);
document.getElementById('inputMajor').value = grabnextphrase(string,n,-1);;
label = $('#inputMajor').prev('label');
label.addClass('active highlight');
}
}
}
catch(err){console.log(err);}
/////////////////////////////////////////////////////////////////////////////////
//skills
try{
if(data.skills){
// string = JSON.flatten(data.skills);
console.log(stringify(data.skills))
document.getElementById('inputSkills').value = stringify(data.skills);
}
}
catch(err){console.log(err);}
/////////////////////////////////////////////////////////////////////////////////
// Work Experience
try{
if(data.work_experience){
// string = JSON.flatten(data.skills);
console.log(stringify(data.skills))
document.getElementById('inputWE1DESCRIPTION').value = stringify(data.work_experience);
}
}
catch(err){console.log(err);}
}
$(document).ready(function() {
// Set up the drag/drop zone.
$('#phase2').hide();
$('#loader').hide();
initDropbox();
// Set up the handler for the file input box.
$("#file-picker").on("change", function() {
handleFiles(this.files);
});
// Handle the submit button.
$("#upload-button").on("click", function(e) {
// If the user has JS disabled, none of this code is running but the
// file multi-upload input box should still work. In this case they'll
// just POST to the upload endpoint directly. However, with JS we'll do
// the POST using ajax and then redirect them ourself when done.
e.preventDefault();
$('#loader').show();
doUpload();
})
$("#done-button").on("click", function(e) {
e.preventDefault();
window.location="/done";
})
});
function doUpload() {
$("#progress").show();
var $progressBar = $("#progress-bar");
// Gray out the form.
// $("#upload-form :input").attr("disabled", "disabled");
// Initialize the progress bar.
$progressBar.css({"width": "0%"});
// Collect the form data.
fd = collectFormData();
// Attach the files.
for (var i = 0, ie = PENDING_FILES.length; i < ie; i++) {
// Collect the other form data.
fd.append("file", PENDING_FILES[i]);
}
// Inform the back-end that we're doing this over ajax.
fd.append("__ajax", "true");
var xhr = $.ajax({
xhr: function() {
var xhrobj = $.ajaxSettings.xhr();
if (xhrobj.upload) {
xhrobj.upload.addEventListener("progress", function(event) {
var percent = 0;
var position = event.loaded || event.position;
var total = event.total;
if (event.lengthComputable) {
percent = Math.ceil(position / total * 100);
}
// Set the progress bar.
$progressBar.css({"width": percent + "%"});
$progressBar.text(percent + "%");
}, false)
}
return xhrobj;
},
url: UPLOAD_URL,
method: "POST",
contentType: false,
processData: false,
cache: false,
data: fd,
success: function(data) {
$progressBar.css({"width": "100%"});
data = JSON.parse(data);
if (data.status === "error") {
console.log("Hello");
window.alert(data.msg);
$("#upload-form :input").removeAttr("disabled");
$('#loader').hide();
return;
}
else {
console.log("David");
window.alert(data.msg);
$('#loader').hide();
return
}
},
error: function(e) {
console.log(e);
}
});
}
function collectFormData() {
// Go through all the form fields and collect their names/values.
var fd = new FormData();
$("#upload-form :input").each(function() {
var $this = $(this);
var name = $this.attr("name");
var type = $this.attr("type") || "";
var value = $this.val();
// No name = no care.
if (name === undefined) {
return;
}
// Skip the file upload box for now.
if (type === "file") {
return;
}
// Checkboxes? Only add their value if they're checked.
if (type === "checkbox" || type === "radio") {
if (!$this.is(":checked")) {
return;
}
}
fd.append(name, value);
});
return fd;
}
function handleFiles(files) {
// Add them to the pending files list.
for (var i = 0, ie = files.length; i < ie; i++) {
PENDING_FILES.push(files[i]);
}
}
function initDropbox() {
var $dropbox = $("#dropbox");
// On drag enter...
$dropbox.on("dragenter", function(e) {
e.stopPropagation();
e.preventDefault();
$(this).addClass("active");
});
// On drag over...
$dropbox.on("dragover", function(e) {
e.stopPropagation();
e.preventDefault();
});
// On drop...
$dropbox.on("drop", function(e) {
e.preventDefault();
$(this).removeClass("active");
// Get the files.
var files = e.originalEvent.dataTransfer.files;
handleFiles(files);
// Update the display to acknowledge the number of pending files.
$dropbox.text(PENDING_FILES.length + " files ready for upload!");
});
// If the files are dropped outside of the drop zone, the browser will
// redirect to show the files in the window. To avoid that we can prevent
// the 'drop' event on the document.
function stopDefault(e) {
e.stopPropagation();
e.preventDefault();
}
$(document).on("dragenter", stopDefault);
$(document).on("dragover", stopDefault);
$(document).on("drop", stopDefault);
}
// $('.tab a').on('click', function (e) {
// e.preventDefault();
// $(this).parent().addClass('active');
// $(this).parent().siblings().removeClass('active');
// target = $(this).attr('href');
// $('.tab-content > div').not(target).hide();
// $(target).fadeIn(600);
// });
JSON.flatten = function(data) {
var result = {};
function recurse (cur, prop) {
if (Object(cur) !== cur) {
result[prop] = cur;
} else if (Array.isArray(cur)) {
for(var i=0, l=cur.length; i<l; i++)
recurse(cur[i], prop ? prop+"."+i : ""+i);
if (l == 0)
result[prop] = [];
} else {
var isEmpty = true;
for (var p in cur) {
isEmpty = false;
recurse(cur[p], prop ? prop+"."+p : p);
}
if (isEmpty)
result[prop] = {};
}
}
recurse(data, "");
return result;
}
/* When the user clicks on the button,
toggle between hiding and showing the dropdown content */
function myFunction() {
document.getElementById("myDropdown").classList.toggle("show");
}
// Close the dropdown menu if the user clicks outside of it
window.onclick = function(event) {
if (!event.target.matches('.dropbtn')) {
var dropdowns = document.getElementsByClassName("dropdown-content");
var i;
for (i = 0; i < dropdowns.length; i++) {
var openDropdown = dropdowns[i];
if (openDropdown.classList.contains('show')) {
openDropdown.classList.remove('show');
}
}
}
}
<file_sep>import boto3
import re
from flask import Flask, render_template, request, json, redirect, url_for
from uuid import uuid4
application = Flask(__name__, static_url_path='')
##############DDB#######################################
ddbregion = 'us-west-2'
tablename = 'BCNCApplications'
tablenameDB = 'BCNCResumes'
ddb = boto3.resource('dynamodb', region_name=ddbregion)
table = ddb.Table(tablename)
tableDB = ddb.Table(tablenameDB)
########################################################
##############S3########################################
s3region = 'us-west-2'
s3bucket = 'bcncresumes'
s3 = boto3.resource('s3', region_name=s3region)
########################################################
@application.route('/')
def index():
return render_template('index.html')
@application.route("/upload", methods=['POST','GET'])
def upload():
try:
# statically pull out since gotta validate input anyway
_uploadKey = str(uuid4())
_firstName = str(request.form['inputFName'])
_lastName = str(request.form['inputLName'])
_name = _firstName + " " + _lastName
_email = str(request.form['inputEmail'])
_primary = str(request.form['inputPMajor'])
_secondary = str(request.form['inputSMajor'])
_gpa = str(request.form['inputGPA'])
_year = str(request.form['inputGradYear'])
_q1 = str(request.form['fquestion'])
_q2 = str(request.form['squestion'])
_position = str(request.form['position'])
if not (_firstName and _lastName and _email and _primary and _gpa and _year and _q1 and _q2 and _position != 'placeholder'):
return ajax_response(False, "A required field has been left blank.")
# Validate the input
validation = validateInput(_email, _gpa, _year)
if validateInput(_email, _gpa, _year) != "OK":
return ajax_response(False, validation)
elif len(request.files.getlist("file")) == 0:
return ajax_response(False, "Please upload a file.")
elif not (queryDDB(_name).get('Item') is None):
return ajax_response(False, "An entry with this name: " + _name + " already exists. If this is a mistake shoot an email to <EMAIL>")
# Add entry to DDB
response = table.put_item(Item=fillItem(_uploadKey, _name, _email, _primary, _secondary, _gpa, _year, _q1, _q2, _position))
responseDB = tableDB.put_item(Item=fillItemDB(_uploadKey, _name, _email, _primary, _secondary, _gpa, _year))
# Add files to S3
for upload in request.files.getlist("file"):
s3.Bucket(s3bucket).put_object(Key=_name + '/' + upload.filename.rsplit("/")[0], Body=upload)
except Exception as e:
return ajax_response(False, str(e))
return ajax_response(True, "Your information has successfully been recorded.")
def fillItem(uuid, name, email, pmajor, smajor, gpa, year, q1, q2, position):
Item = {
'UploadKey' : uuid,
'Name' : name,
'Email' : email,
'Primary Major' : pmajor,
'GPA' : gpa,
'Graduation Year' : year,
'Question 1' : q1,
'Question 2' : q2,
'Position Desired' : position
}
if not smajor:
return Item
else:
Item['Secondary Major'] = smajor
return Item
def fillItemDB(uuid, name, email, pmajor, smajor, gpa, year):
Item = {
'UploadKey' : uuid,
'Name' : name,
'Email' : email,
'Primary Major' : pmajor,
'GPA' : gpa,
'Graduation Year' : year
}
if not smajor:
return Item
else:
Item['Secondary Major'] = smajor
return Item
def ajax_response(status, msg):
status_code = "ok" if status else "error"
return json.dumps(dict(
status=status_code,
msg=msg,
))
def queryDDB(name):
#try:
item = tableDB.get_item(Key={'Name': name})
#except boto.dynamodb.exceptions.DynamoDBKeyNotFoundError:
#item = None
return item
EMAIL_REGEX = re.compile(r"[^@\s]+@[^@\s]+\.[^@\s.]+$")
GPA_REGEX = re.compile("^[0-9]\.[0-9]+$")
def validateInput(email, gpa, year):
try:
if not EMAIL_REGEX.match(email):
return "Input email is not valid. Please check for typos."
elif not GPA_REGEX.match(gpa) or float(gpa) > 4.0 or float(gpa) < 0.0:
return "Input GPA is not valid. Must be in format D.DD, where 'D' is a digit. Must be on 4.0 scale."
# TODO: currently just makes sure reasonable. Probably narrow by current date.
elif int(year) < 1950 or int(year) > 2050:
return "Input graduation date must be somewhat reasonable."
else:
return "OK"
except ValueError:
return "Input graduation date must be a number in the 2000's. GPA must be on 4.0 scale."
if __name__ == "__main__":
application.debug = True
application.run()
<file_sep># BCNC-Resume-Submission
Basic Flask app that acts as a application portal/drive for UCDavis BCNC club
|
7fb7a713901842cf57eabd42ef9b3e461e7e5e7f
|
[
"JavaScript",
"Python",
"Text",
"Markdown"
] | 4
|
Text
|
BCNC/ApplicationForms
|
ed757ad4b119fb697359fe11c94bbf87b0a1f874
|
71dee3422b5fd9507b7d4a18c1e4a8e1d56ecadf
|
refs/heads/master
|
<repo_name>ChukaEbi/bowling-challenge<file_sep>/spec/FrameSpec.js
describe('Frame',function(){
var frame;
beforeEach(function(){
frame = new Frame();
});
describe('when a new frame is made', function(){
it('should start with 10 pins', function(){
expect(frame.pins).toEqual(10);
});
it('should remove pins from the current frame', function(){
frame.removePins(2);
expect(frame.pins).toEqual(8);
});
});
it('can be reset',function(){
frame.removePins(5);
frame.resetPins();
expect(frame.pins).toEqual(10);
})
});
<file_sep>/lib/Frame.js
function Frame(){
this.MAX_PINS = 10;
this.pins = this.MAX_PINS;
}
Frame.prototype.pins = function(){
return this.pins;
}
Frame.prototype.removePins = function(pins){
this.pins-=pins;
}
Frame.prototype.resetPins = function(){ this.pins = this.MAX_PINS; }
<file_sep>/lib/Game.js
function Game(){
this.frame = new Frame();
this.NO_OF_ROUNDS = 10;
this.MAX_BALLS = 2;
this.ball = this.MAX_BALLS;
this.round = 1;
this.score = 0;
this.isStrike = false;
}
Game.prototype.NO_OF_ROUNDS = function(){ return this.NO_OF_ROUNDS;}
Game.prototype.round = function(){ return this.round; }
Game.prototype.nextRound = function() {
this.frame = new Frame();
this.ball = this.MAX_BALLS;
this.round+=1;
}
Game.prototype.ballThrown = function(){
this.ball -= 1;
this.recordScore();
}
Game.prototype.roll = function(){
this.ballThrown();
if(this.ball === 0){
if(this.round === 10){ return "Your final score is " +this.score+ "."; }
this.nextRound();
this.isStrike = false;
}
}
Game.prototype._noOfPins = function(){ return this.frame.pins;}
Game.prototype.recordScore = function(){
var points = this.getRandomInt(0,(this.frame.pins + 1));
if(points == 10){
this.nextRound();
this.isStrike = true;
}
else{
this.frame.removePins(points);
this.score+= points;
if(this.isStrike) { this.score+=points; }
}
}
Game.prototype.getRandomInt = function(min, max) {
return Math.floor(Math.random() * (max - min + 1) + min);
}
|
3e0e856b2236a3ad6bdc1c104b078a2ba141d375
|
[
"JavaScript"
] | 3
|
JavaScript
|
ChukaEbi/bowling-challenge
|
367185e84382773b169757d9d9e59aeda636f90d
|
95eac9c1f4dd6bc5ce556fc4132cf6d3de43243f
|
refs/heads/master
|
<file_sep>enum sandWich {
case bread
case meat
case cheese
case greens
case extras
case bread2
}
// switch bread
enum bread {
case Wheat
case White
case Rye
}
var breadChoice = bread.Wheat
breadChoice = .Wheat
switch breadChoice {
case .Wheat:
print("Love it.")
case .White:
print("Meh...")
case .Rye:
print("Why does it even exist?")
}
enum meat {
case Turkey
case Ham
case Bologna
}
var meatChoice = meat.Turkey
meatChoice = .Turkey
switch meatChoice {
case .Turkey:
print("Great choice! My personal favorite")
case .Ham:
print("Nice!")
case .Bologna:
print("Not my favorite, but hey.")
}
enum cheese {
case cheddar
case americanCheese
case mozarella
}
var cheeseChoice = cheese.americanCheese
cheeseChoice = .americanCheese
switch cheeseChoice {
case .americanCheese:
print("Good choice!")
case .cheddar:
print("Ok.")
case .mozarella:
print("Of course!")
}
enum greens {
case spinach
case lettuce
case cabbage
}
var greenChoice = greens.lettuce
greenChoice = .lettuce
switch greenChoice {
case .lettuce:
print("Yes.Ma'am.")
case .cabbage:
print("I prefer mines cooked.")
case .spinach:
print("Just why?")
}
enum extras {
case mayo
case buffaloSauce
}
var extrasChoice = extras.mayo
extrasChoice = .mayo
switch extrasChoice {
case .mayo:
print("Excellent.")
case .buffaloSauce:
print("You're weird.")
}
enum bread2 {
case Wheat
case White
case Rye
}
var bread2Choice = bread.Wheat
bread2Choice = .Wheat
switch bread2Choice {
case .Wheat:
print("Love it.")
case .White:
print("Meh...")
case .Rye:
print("Why does it even exist?")
}
|
37849366b37bac84f085c9c328c8be002137579f
|
[
"Swift"
] | 1
|
Swift
|
dndouglas1/EnumExercise
|
1a59c382d7c4752beeaa28d3b12a9ccb13fd7640
|
7cfb303fa4b5eb3fa8c006af4c9f012513c85288
|
refs/heads/master
|
<file_sep><?php get_header();
$lastWordShortcodeExists = shortcode_exists( 'my-last-word' );
?>
<p class="lead">
<?php if ($lastWordShortcodeExists) :
echo do_shortcode('[my-last-word]');
else :
esc_html_e( 'This is a simple hero unit, a simple jumbotron-style component for
calling extra attention to featured content or information.', 'my_theme' );
endif ?>
</p>
<a class="btn btn-primary btn-lg" href="#" role="button">
<?php esc_html_e( 'Learn more', 'my_theme' ); ?>
</a>
<hr class="my-4"><file_sep><?php
add_action( 'after_setup_theme', 'my_theme_setup' );
function my_theme_setup() {
load_theme_textdomain( 'my_theme', get_template_directory() . '/languages' );
add_theme_support( 'post-thumbnails' );
add_theme_support( 'automatic-feed-links' );
add_theme_support( 'title-tag' );
register_nav_menus( array( 'main-menu' => esc_html__( 'Main Menu', 'my_theme' ) ) );
}
/**
* Enqueue styles.
*/
function my_theme_load_styles()
{
wp_enqueue_style('my_theme_style', get_stylesheet_uri());
wp_enqueue_style( 'bootstrap-css', 'https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css' );
}
add_action( 'wp_enqueue_scripts', 'my_theme_load_styles' );
/**
*
* Create welcome(homepage) and about pages upon theme activation.
*
*/
function my_theme_activation () {
if(is_admin()) {
$about_page_title = 'About';
$about_page_check = get_page_by_title($about_page_title);
if (!$about_page_check) {
$about_page = create_page(
'About',
'about',
'This is about us page page. Since we don\'t know who we are, there is really not much to say.
Other than random stuff that is!'
);
if($about_page) {
// // this is already the default so it should already be in place
// update_option( 'page_for_posts', 2 );
}
}
$home_page_title = 'Homepage';
$home_page_check = get_page_by_title($home_page_title);
if (!$home_page_check) {
$home_page = create_page(
'Welcome',
'welcome',
'Welcome to the Welcome page!!'
);
if($home_page) {
update_option( 'page_on_front', $home_page );
update_option( 'show_on_front', 'page' );
}
}
}
}
add_action('after_switch_theme', 'my_theme_activation');
/**
* Create a wordpress page.
* @param $title string
* @param $slug string
* @param $content string
*
* @return int the page ID or 0 if it fails
*/
function create_page($title, $slug, $content)
{
$about_page = array(
'post_type' => 'page',
'post_title' => $title,
'post_content' => $content,
'post_status' => 'publish',
'post_author' => 1,
'post_slug' => $slug
);
return wp_insert_post($about_page);
}<file_sep><?php get_header();?>
<main id="content">
<?php if ( have_posts() ) : while ( have_posts() ) : the_post(); ?>
<article id="post-<?php the_ID(); ?>" <?php post_class(); ?>>
<div class="entry-content jumbotron">
<header class="header">
<h1 class="entry-title text-center">
<?php echo the_title(); ?>
</h1>
</header>
<div class="container">
<div class="text-center">
<?php echo the_content(); ?>
<hr class="my-4">
</div>
<div class="row">
<div class="col-sm">
<?php get_template_part( 'static-column-text' ); ?>
</div>
<div class="col-sm">
<?php get_template_part( 'static-column-text' ); ?>
</div>
</div>
</div>
</div>
</article>
<?php endwhile; endif; ?>
</main>
<?php get_footer(); ?>
<file_sep><?php
/**
* Coordinates the public-facing functionality of the plugin.
*
* Plugin Convention:
* Methods in underscore naming represent registered wordpress hook callbacks
*
* @link https://e-leven.net/
* @since 1.0.0
*
* @package last-word
* @subpackage last-word/public
* @author <NAME> <<EMAIL>>
*/
namespace LastWord;
use LastWord\MyLastWord\MyLastWordPostContent;
use LastWord\MyLastWord\MyLastWordShortcode;
class PublicController {
const SHORTCODE_TAG_NAME = 'my-last-word';
/**
* Initialize the class and set its properties.
*
* @since 1.0.0
*/
public function __construct() {
$this->registerMainHooks();
}
/**
* Register the main hooks related to the public-facing functionality.
*
* Enqueue scripts and styles
* Post content hook for last-word post content
* Register the shortcode for last-word content
* Remove shortcode content from RSS feeds
*
* Conditionally include custom post types on archive pages
*
* @since 1.0.0
* @access private
*/
private function registerMainHooks() {
add_shortcode( self::SHORTCODE_TAG_NAME, array( $this, 'shortcode_content_view_hook'));
add_action( 'wp_enqueue_scripts', array($this, 'enqueue_scripts' ));
add_action( 'the_content', array($this, 'post_content_view_hook' ));
add_filter( 'the_content_feed', array($this, 'remove_shortcode_from_feed'));
}
/**
* Determine if we are on a post type screen
*
* @since 1.0.0
* @return boolean
*/
private function isPostContent(){
$result = false;
$screen = get_post_type();
if ($screen == 'post') {
$result = true;
}
return $result;
}
/**
* Determine if we are on a screen where an last-word shortcode is being used
*
* @since 1.0.0
* @return boolean
*/
private function isLastWordShortcode(){
$result = false;
global $post;
// $post not set on 404 pages, returns Trying to get property of non-object
if (isset( $post )) {
$result = has_shortcode( $post->post_content, self::SHORTCODE_TAG_NAME);
}
return $result;
}
/**
* Determine if we are on an active last-word screen
*
* @since 1.0.0
* @return boolean
*/
private function isLastWordScreen(){
$result = false;
if($this->isLastWordShortcode()) {
$result = true;
}
elseif($this->isPostContent()) {
$result = true;
}
return $result;
}
/**
* Orchestrate the setup and rendering of the last-word post content view
*
* @since 1.0.0
* @param string $content The wp $content
* @return string The last-word WpPostContentType view output
*/
public function post_content_view_hook($content) {
if($this->isPostContent() && !is_feed()) {
$my_content = new MyLastWordPostContent($content);
return $my_content->contentOutput();
}
return $content;
}
/**
* Shortcode hook callback
* Render the shortcode view
*
* Int the future, we would also handle the shortcode user input here
*
* @since 1.0.0
* @param $atts array | string
* associative array of attributes, or an empty string if no attributes given
*
* @return string The shortcode view
*/
public function shortcode_content_view_hook($atts) {
// In the future, we may want to add/ accept shortcode parameters $atts
// and pass them to the constructor -> new MyLastWordShortcode($atts);
$my_shorcode = new MyLastWordShortcode();
return $my_shorcode->contentOutput();
}
/**
* Register the JavaScript and CSS for the public-facing side of the site.
*
* @since 1.0.0
*/
public function enqueue_scripts() {
$pluginPublicFolder = 'public/';
$cssFilePath = $pluginPublicFolder . 'css/' . LAST_WORD_WP_NAME . '.css';
$jsFilePath = $pluginPublicFolder . 'js/' . LAST_WORD_WP_NAME . '.js';
if($this->isLastWordScreen()) {
wp_enqueue_script(
LAST_WORD_WP_NAME, LAST_WORD_PLUGIN_URL . $jsFilePath, array('jquery'), 0.1, true);
wp_enqueue_style(
LAST_WORD_WP_NAME, LAST_WORD_PLUGIN_URL . $cssFilePath);
}
}
/**
* Remove the shortcode content from RSS feed hook callback
*
* @since 1.0.0
* @param $content string The current post content.
* @return mixed
*/
public function remove_shortcode_from_feed($content){
remove_shortcode(self::SHORTCODE_TAG_NAME);
return $content;
}
}
<file_sep><?php
/**
* This class serves as the entry point for the plugin.
*
* It is used to:
* - load dependencies,
* - define internationalization,
* - instantiate the core plugin controllers for both the public-facing side of the site and the admin area.
*
* @link https://e-leven.net/
* @since 1.0.0
*
* @package last-word
* @author <NAME> <<EMAIL>>
*/
namespace LastWord;
//use LastWord\Admin\AdminController;
class LastWordInit {
/**
* The current version of the plugin.
*
* @since 1.0.0
* @access protected
* @var string $version The current version of the plugin.
*/
protected $version;
const WPLW_ADMIN_DIR = LAST_WORD_PLUGIN_DIR . 'admin/';
const WPLW_MY_LAST_WORD_DIR = LAST_WORD_PLUGIN_DIR . 'myLastWord/';
/**
* Define the core functionality of the plugin.
*
* Set the plugin identifier, load the dependencies, define the locale, and run the core controllers.
*
* @since 1.0.0
*/
public function __construct() {
$this->version = "1.0.0";
$this->load_dependencies();
$this->setLocale();
$this->run();
}
/**
* Load the required dependencies for this plugin.
*
* @since 1.0.0
* @access private
*/
private function load_dependencies() {
/**
* The class responsible for orchestrating actions that occur in the public-facing
* side of the site.
*/
require_once LAST_WORD_PLUGIN_DIR . 'PublicController.php';
/**
* The abstract superclass responsible for my last word slur.
*/
require_once self::WPLW_MY_LAST_WORD_DIR . '/MyLastWord.php';
/**
* The concrete subclasses responsible for my last word post and shortcode views
*/
require_once self::WPLW_MY_LAST_WORD_DIR . '/MyLastWordPostContent.php';
require_once self::WPLW_MY_LAST_WORD_DIR . '/MyLastWordShortcode.php';
}
/**
* Load the plugin text domain for translation.
*
* @since 1.0.0
*/
public function load_plugin_textdomain() {
load_plugin_textdomain(
LAST_WORD_WP_NAME,
false,
LAST_WORD_PLUGIN_DIR . '/languages/'
);
}
/**
* Define the locale for this plugin for internationalization.
*
* Uses the I18n class in order to set the domain and to register the hook
* with WordPress.
*
* @since 1.0.0
* @access private
*/
private function setLocale() {
add_action( 'plugins_loaded', array( $this, 'load_plugin_textdomain'));
}
/**
* Register the hooks related to the admin area functionality
* of the plugin.
*
* @since 1.0.0
* @access private
*/
private function run() {
new PublicController();
}
}
<file_sep><?php
/**
* The file that defines the WpContentType abstract class
*
* @link https://e-leven.net/
* @since 1.0.0
*
* @package last-word
* @subpackage last-word/myLastWord
*/
namespace LastWord\MyLastWord;
abstract class MyLastWord {
const WPLW_LAST_WORDS_TXT_FILE = 'last-words.txt';
/**
* Get the random last word
*
* @since 1.0.0
* @return string $output The generated random last word
*/
protected function getRandomLastWord() {
$lines = file(LAST_WORD_PLUGIN_DIR . self::WPLW_LAST_WORDS_TXT_FILE);
return $lines[array_rand($lines)];
}
}<file_sep><?php
/**
* The file that defines the MyLastWordShortCode concrete class
*
* @link https://e-leven.net/
* @since 1.0.0
*
* @package last-word
* @subpackage last-word/myLastWord
*/
namespace LastWord\MyLastWord;
class MyLastWordShortCode extends MyLastWord {
public $my_last_word;
/**
* Initialize the class and set its properties.
*
* @since 1.0.0
*/
public function __construct() {
$this->my_last_word = $this->getRandomLastWord();
}
/**
* Return the output
*
* @since 1.0.0
* @return string The generated output
*/
public function contentOutput() {
return $this->my_last_word;
}
}
<file_sep><?php
/**
* The file that defines MyLastWordPostContent concrete class
*
* @link https://e-leven.net/
* @since 1.0.0
*
* @package last-word
* @subpackage last-word/myLastWord
*/
namespace LastWord\MyLastWord;
class MyLastWordPostContent extends MyLastWord {
public $content;
public $my_last_word;
/**
* Initialize the class and set its properties.
*
* @since 1.0.0
* @param string $content The wp $content
*/
public function __construct($content) {
$this->content = $content;
$this->my_last_word = $this->getRandomLastWord();
}
/**
* Get the position setting for the last word
*
* @since 1.0.0
* @return string
*/
private function getPositionSetting() {
// we can later add an option to place it before content
return 'after';
}
/**
* Place my last word before or after the wp content
*
* @since 1.0.0
* @return string The complete content
*/
public function orderTheContent() {
$content = $this->content;
$my_last_word = $this->my_last_word;
$position = $this->getPositionSetting();
if ($position == 'after') {
$new_content = $content;
$new_content.= $my_last_word;
}
else {
$new_content = $my_last_word;
$new_content.= $content;
}
return $new_content;
}
/**
* Return the content output
*
* @since 1.0.0
* @return string The generated content output
*/
public function contentOutput() {
return $this->orderTheContent();
}
}
<file_sep><?php
/**
* @wordpress-plugin
* Plugin Name: Last Word
* Plugin URI: https://wordpress.org/plugins/last-word/
* Description: This plugin always wants to have the last word
* Version: 1.0.0
* Author: <NAME>
* Author URI: https://e-leven.net/
* License: GPL-2.0+
* License URI: http://www.gnu.org/licenses/gpl-2.0.txt
* Text Domain: last_word
* Domain Path: /languages
*/
// If this file is called directly, abort.
if ( ! defined( 'WPINC' ) ) {
die;
}
if ( defined( 'LAST_WORD_WP_NAME' ) ) {
die;
}
define( 'LAST_WORD_WP_NAME', 'last-word' );
define( 'LAST_WORD_PLUGIN_URL', plugin_dir_url( __FILE__ ) );
define( 'LAST_WORD_PLUGIN_DIR', plugin_dir_path( __FILE__ ) );
/**
* The core plugin entry class
*/
include_once plugin_dir_path( __FILE__ ) . '/LastWordInit.php';
new LastWord\LastWordInit();<file_sep># Basic WP theme and plugin demo
Just another basic wordpress theme and plugin
### Last Word plugin
- Displays a random line from plan 9's
[fortunes](https://github.com/0intro/plan9/blob/7524062cfa4689019a4ed6fc22500ec209522ef0/sys/games/lib/fortunes)
file after the post content
- Can also be triggered elsewhere by using a shortcode ['my-last-word']
- Is relatively well written
### My-theme theme
- Creates an about and a welcome page
- Sets the welcome page as a static home page
- Should have all basic theme files needed for fulfilling the wordpress template hierarchy
- Is relatively ugly at the moment (needs styling)
## Installation
### The Docker (compose) way
- clone the repo
`git clone https://github.com/djleven/wp_basic.git`
- navigate to directory
`cd wp_basic`
- run docker compose
`docker-compose up -d`
- navigate to localhost on your browser
- perform the GUI WP installation (only done the first time)
Note: For more info on this Docker/WP setup check out this
[repo](https://github.com/nezhar/wordpress-docker-compose)
### Manually
- download the zip
- install the two folders found in `wp-dev` to your `plugins` and `themes`
folders of your wordpress installation respectively
## Activation
- Activate the plugin and theme in your wp admin backend
<file_sep></div>
<footer id="footer" class="text-center">
<div id="copyright">
© <?php echo esc_html( date_i18n( __( 'Y', 'my_theme' ) ) ); ?>
<?php echo esc_html( get_bloginfo( 'name' ) ); ?>
</div>
<div>
<a href="<?php echo esc_url( home_url( '/' ) ); ?>" title="home" rel="home">
<?php esc_html_e( 'home', 'my_theme' ); ?>
</a>
|
<a href="<?php echo esc_url( home_url( '/about' ) ); ?>" title="about" rel="about">
<?php esc_html_e( 'about', 'my_theme' ); ?>
</a>
</div>
</footer>
</div>
<?php wp_footer(); ?>
</body>
</html>
|
7bf3b10b447eea60f6a21ceb07473d7e8bdabe36
|
[
"Markdown",
"PHP"
] | 11
|
PHP
|
djleven/wp_basic
|
87541ddf84b19397e30a24f4dc03b4424ec74149
|
eb1142920d59d7bb008af341328c0e77eb808cf1
|
refs/heads/master
|
<repo_name>J-Wass/LearntBot<file_sep>/LearntBot/learnt_bot.py
import sys
import math
import os
import queue
import threading
import random
import pickle
from rlbot.agents.base_agent import BaseAgent, SimpleControllerState
from rlbot.utils.structures.game_data_struct import GameTickPacket
from rlbot.utils.game_state_util import GameState, BallState, CarState, Physics, Vector3, Rotator, GameInfoState
class LearntBot(BaseAgent):
# fitness function for genomes
def eval_genomes(self, genomes, config):
self.max_individuals = len(genomes)
for genome_id, genome in genomes:
# store this genome's ANN for use by the bot, eventually come up with a fitness
genome_NN = neat.nn.FeedForwardNetwork.create(genome, config)
if self.generation % 50 == 0: # every 5 gens, pickle a neural net
pickle.dump(genome_NN, open( "neural_net", "wb" ))
self.net_queue.put(genome_NN)
# wait for best fitness from this genome
best_fitness = self.fitness_queue.get()
genome.fitness = best_fitness
# drives NEAT algorithm
def run_neat(self):
import neat
global neat
# Load configuration.
local_dir = os.path.dirname(__file__)
config_file = os.path.join(local_dir, 'neat.config')
config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction,
neat.DefaultSpeciesSet, neat.DefaultStagnation,
config_file)
p = neat.Population(config)
#p.add_reporter(neat.StdOutReporter(True))
#stats = neat.StatisticsReporter()
#p.add_reporter(stats)
#p.add_reporter(neat.Checkpointer(5))
best_genome = p.run(self.eval_genomes, 100000)
best_genome_neural_network = neat.nn.FeedForwardNetwork.create(best_genome, config)
pickle.dump(best_genome_neural_network, open( "best_neural_net", "wb" ))
# set up learnt bot
def initialize_agent(self):
self.controller_state = SimpleControllerState()
self.frame = 0
self.generation = -1
self.max_individuals = 0
self.individual = -1
# Queues for communicating models/data between NEAT and bot input/output
self.initial_distance = 0
self.percent_progress = 0
self.net_queue = queue.Queue()
self.fitness_queue = queue.Queue()
# fire off thread for NEAT algo running concurrently with bot input/output
neat_thread = threading.Thread(target = self.run_neat, args=())
neat_thread.daemon = True
neat_thread.start()
def get_output(self, packet: GameTickPacket) -> SimpleControllerState:
# 100 frames per individual, 10 individuals per generation, 100 generations, or reset before a goal happens
if self.frame == (100 + self.generation*2) or self.frame == 0 or (-893 <= packet.game_ball.physics.location.x <= 893 and packet.game_ball.physics.location.y > 5115):
if self.frame != 0:
self.fitness_queue.put(self.percent_progress)
self.frame = 0
self.individual += 1
self.percent_progress = 0.0
# set a random game state so the bot doesn't start memorizing saves
car_state = CarState(jumped=False, double_jumped=False, boost_amount=100,
physics=Physics(velocity=Vector3(x=0,y=0, z = 0), rotation=Rotator(0, math.pi * -1.5 + random.uniform(-0.5,0.5), 0),
angular_velocity=Vector3(0, 0, 0), location=Vector3(x=random.uniform(-1000,1000), y=random.uniform(2000,4000), z=0)))
ball_state = BallState(Physics(location=Vector3(x=random.uniform(-800,800), y=random.uniform(-1000,1000), z=0), rotation=Rotator(0, 0, 0),
velocity=Vector3(x=0,y=random.uniform(1000,1500),z=random.uniform(0,500))))
game_state = GameState(ball=ball_state, cars={self.index: car_state},)
self.set_game_state(game_state)
if self.individual == self.max_individuals:
self.individual = 0
self.generation += 1
# wait for NEAT to have the next network ready
self.network = self.net_queue.get()
# run all code below on every frame
self.frame += 1
ball = packet.game_ball
bot = packet.game_cars[self.index]
# if first frame, record the original ball distance, otherwise try to get best distance to ball
distance = math.sqrt((bot.physics.location.x - ball.physics.location.x)**2 + (bot.physics.location.y - ball.physics.location.y)**2 + (bot.physics.location.z - ball.physics.location.z)**2)
if self.frame == 10:
self.initial_distance = distance
if self.frame >= 10:
self.percent_progress = max(self.percent_progress, (self.initial_distance - distance)/self.initial_distance)
# feature input vector that is used to train our neural network
input = []
input.append(bot.physics.location.x)
input.append(bot.physics.location.y)
input.append(bot.physics.location.z)
#input.append(bot.physics.rotation.pitch)
input.append(bot.physics.rotation.yaw)
#input.append(bot.physics.rotation.roll)
input.append(bot.physics.velocity.x)
input.append(bot.physics.velocity.y)
input.append(bot.physics.velocity.z)
input.append(bot.physics.angular_velocity.x)
input.append(bot.physics.angular_velocity.y)
#input.append(bot.physics.angular_velocity.z)
input.append(bot.boost)
input.append(bot.jumped)
input.append(bot.double_jumped)
input.append(bot.has_wheel_contact)
input.append(ball.physics.location.x)
input.append(ball.physics.location.y)
input.append(ball.physics.location.z)
#input.append(ball.physics.rotation.pitch)
#input.append(ball.physics.rotation.yaw)
#input.append(ball.physics.rotation.roll)
input.append(ball.physics.velocity.x)
input.append(ball.physics.velocity.y)
input.append(ball.physics.velocity.z)
#input.append(ball.physics.angular_velocity.x)
#input.append(ball.physics.angular_velocity.y)
#input.append(ball.physics.angular_velocity.z)
# use self.network (from NEAT) with input to derive a usable output
def sigmoid(x):
if x >= 0:
z = math.e ** -x
return 1. / (1. + z)
else:
z = math.e ** x
return z / (1. + z)
def activate(x):
return sigmoid(x/3000) * 2 - 1
output = self.network.activate(input)
self.controller_state.throttle = activate(output[0])
self.controller_state.steer = activate(output[1])
self.controller_state.boost = True if activate(output[2]) > 0.75 else False
#self.controller_state.pitch = activate(output[2])
#self.controller_state.yaw = activate(output[3])
#self.controller_state.roll = activate(output[4])
#self.controller_state.jump = True if activate(output[5]) > 0.75 else False
#self.controller_state.handbrake = True if activate(output[7]) > 0.75 else False
if self.frame >= 10:
self.renderer.begin_rendering()
self.renderer.draw_string_2d(20,20, 2, 2, "Generation: " + str(self.generation) + " [Bot: " + str(self.individual) + "/" + str(self.max_individuals-1) + " | " + str(self.frame) + " frames]", self.renderer.white())
self.renderer.draw_string_2d(20,50, 2, 2, "Best percent progress: " + str(round(self.percent_progress *100)) + "%", self.renderer.white())
self.renderer.draw_string_2d(20,80, 2, 2, "Current percent progress: " + str(round((self.initial_distance - distance)/self.initial_distance*100)) + "%", self.renderer.white())
self.renderer.draw_string_2d(20,110 , 2, 2, "Steer: " + str(self.controller_state.steer), self.renderer.white())
self.renderer.draw_string_2d(20,140, 2, 2, "Throttle: " + str(self.controller_state.throttle), self.renderer.white())
self.renderer.draw_string_2d(20,170, 2, 2, "Boost: " + str(self.controller_state.boost), self.renderer.white())
#self.renderer.draw_string_2d(20,170, 2, 2, "Pitch: " + str(self.controller_state.pitch), self.renderer.white())
#self.renderer.draw_string_2d(20,200, 2, 2, "Yaw: " + str(self.controller_state.yaw), self.renderer.white())
#self.renderer.draw_string_2d(20,230, 2, 2, "Roll: " + str(self.controller_state.roll), self.renderer.white())
#self.renderer.draw_string_2d(20,260, 2, 2, "Powerslide: " + str(self.controller_state.handbrake), self.renderer.white())
#self.renderer.draw_string_2d(20,290, 2, 2, "Jump: " + str(self.controller_state.jump), self.renderer.white())
self.renderer.end_rendering()
#print(self.controller_state.__dict__)
return self.controller_state
|
0dd6101d63dc3063e7866f7f3549c065d468507b
|
[
"Python"
] | 1
|
Python
|
J-Wass/LearntBot
|
92078556b942ba0d82ff2d8e4e0fc9bc2bf22eb4
|
57040533ef86e62581f720480e909456185a401e
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using WinSCP;
namespace testSSH
{
class Program
{
static void Main(string[] args)
{
//sftp://N0vasSTAR92:<EMAIL>:3130/Prod/EIPPIn/
//sftp://N0vasSTAR92:Ludi1x0;fingerprint=ssh-<KEY> <EMAIL>-portal.com:3130/Prod/EIPPIn/
string remoteDirectory = "/Prod/Test/";
string localDirectory = @"C:\temp\*.txt";
// Setup session options
SessionOptions sessionOptions = new SessionOptions
{
Protocol = Protocol.Sftp,
HostName = "sftp.eipp-portal.com",
UserName = "N0vasSTAR92",
Password = "<PASSWORD>",
PortNumber = 3130,
SshHostKeyFingerprint = "<KEY>"
//Protocol = Protocol.Ftp,
//HostName = "172.16.17.32",
//UserName = "<EMAIL>",
//Password = "<PASSWORD>",
//PortNumber = 21,
};
using (Session session = new Session())
{
// Connect
session.Open(sessionOptions);
// Upload files
TransferOptions transferOptions = new TransferOptions();
transferOptions.TransferMode = TransferMode.Binary;
TransferOperationResult transferResult;
transferResult = session.PutFiles(localDirectory, remoteDirectory, false, transferOptions); //method to upload files
//get files,
//transferResult = session.GetFiles(remoteDirectory, "C:\\download\\", false, transferOptions);
// Throw on any error
transferResult.Check();
// Print results
foreach (TransferEventArgs transfer in transferResult.Transfers)
{
Console.WriteLine("Upload of {0} succeeded", transfer.FileName);
}
}
Console.ReadLine();
}
}
}
|
7f98a5ec9fa4cdae381fbce8cb15e86c649c737b
|
[
"C#"
] | 1
|
C#
|
fg050481/testSSH
|
0d0a4c22207a497b908ec8f3190baa2b10a3de5b
|
660fdb22182779485f2ee835653d30c37b110fc9
|
refs/heads/master
|
<file_sep>package com.apap.tugas1.service;
import java.util.List;
import java.util.Optional;
import com.apap.tugas1.model.InstansiModel;
import com.apap.tugas1.model.PegawaiModel;
import com.apap.tugas1.model.ProvinsiModel;
public interface InstansiService {
//get
Optional<InstansiModel> getInstansiById(Long id);
List<InstansiModel> getAllInstansi();
List<InstansiModel> getInstansiFromProvinsi(ProvinsiModel provinsi);
List<PegawaiModel> getTuaMudaInstansi(InstansiModel instansi);
//add
void addInstansi(InstansiModel instansi);
}
<file_sep>package com.apap.tugas1.service;
import java.util.List;
import java.util.Optional;
import com.apap.tugas1.model.JabatanModel;
public interface JabatanService {
//View
Optional<JabatanModel> getJabatanById(Long id);
List<JabatanModel> getAllJabatan();
//Add
void addJabatan(JabatanModel jabatanBaru);
//Update
void updateJabatan(Long id, JabatanModel jabatanBaru);
//Delete
void deleteById(Long id);
}
|
c6d0d750154b7b605111e1ed6579948b5e35cd68
|
[
"Java"
] | 2
|
Java
|
Apap-2018/tugas1_1606890555
|
8c37308cf50c7765da7bd5184c4c3ff2054fcac9
|
7f98f0002e2f268ac3db45b63c95b3d16ba554dd
|
refs/heads/master
|
<repo_name>andrescorso/UNCode-Digital_V0<file_sep>/verilog/vcd2wd.py
#!/usr/bin/env python
# coding: utf-8
import sys
from Verilog_VCD.Verilog_VCD import parse_vcd
import Verilog_VCD as vvcd
import json
file_name = sys.argv[1]
vcd = parse_vcd(file_name+'.vcd')
ts = vvcd.Verilog_VCD.get_timescale()
ts = int(''.join(filter(str.isdigit, ts)))
et = vvcd.Verilog_VCD.get_endtime()
def readGolden(golden):
Gwaves = {}
Gdata = {}
for i in golden["signal"]:
Gwaves[i["name"]] = i["wave"]
if i.get("data",None) != None:
Gdata[i["name"]] = i["data"]
return Gwaves,Gdata
goldenF = True if sys.argv[2] == "comp" else False
Gwaves = {}
Gdata = {}
if goldenF:
try:
json_file = open('code/golden.wf')
golden = eval(json_file.read())
Gwaves,Gdata = readGolden(golden)
except:
goldenF = False
wavedrom = "{ \"signal\": [\n"
signals = []
bfl = 65
fl = bfl
def gfl():
global fl
fl += 1
return chr(fl-1)
for i in vcd.keys():
names = []
for j in vcd[i]["nets"]:
name_dut = j["hier"].split(".")
if len(name_dut) > 1:
if name_dut[1] == "DUT":
names.append((j["name"],j["size"],j["type"]))
strsignals = ""
for signal in names:
name = signal[0]
allt = vcd[i]["tv"]
wave = ""
utime = 0
bit = False
stop= False
data = []
node = ""
if signal[1] == "1": bit = True
idxG = 0
idxD = 0
difBus = False
for t in range(0,int(et)+2*int(ts),int(ts)):
cW = ""
if not(stop) and t >= allt[utime][0]:
if bit:
wave += allt[utime][1]
cW = allt[utime][1]
else:
cW = hex(int(allt[utime][1], 2))[2:].upper()
data.append(cW)
if goldenF and cW != Gdata[name][idxD]:
wave += "9"
difBus = True
else:
wave += "2"
idxD += 1
utime += 1
if utime == len(allt):
stop = True
else:
wave +="."
cW = "."
if (goldenF and Gwaves[name][idxG] != "2" and cW != Gwaves[name][idxG]):
node += gfl()
else:
node+="."
idxG += 1
strsignals += "{\"name\": '"+name+"', \"wave\": '"+wave+"'"
if len(data) > 0:
strsignals += ", \"data\":"
strsignals += "["
strsignals += ', '.join("'{0}'".format(w) for w in data)
strsignals += "]"
realsignal = ""
if len(node) != node.count("."):
strsignals += ", \"node\":"
strsignals += "'"
strsignals += node
strsignals += "'"
if len(node) != node.count(".") or difBus:
realsignal = "{\"name\": '"+name+"*', \"wave\": '"+Gwaves[name]+"'"
if Gdata.get(name,None) != None:
realsignal += ", \"data\":"
realsignal += "["
realsignal += ', '.join("'{0}'".format(w) for w in Gdata[name])
realsignal += "]"
realsignal +="}"
strsignals += "}"
signals.append(strsignals)
if realsignal != "":
signals.append(realsignal)
wavedrom += ", \n".join(signals)
wavedrom += "]\n"
if bfl != fl:
wavedrom += ",\"edge\":[\n"
edges = []
for cfl in range(bfl,fl,2):
edges.append("'"+chr(cfl)+"|-|"+chr(cfl+1)+" diff'")
wavedrom += ", ".join(edges)
wavedrom += "]"
wavedrom += "}"
print(wavedrom)
if goldenF:
fi = open(f"code/waveform.wf",'w')
fi.write(wavedrom)
fi.close()
else:
fi = open(f"code/golden.wf",'w')
fi.write(wavedrom)
fi.close()
<file_sep>/README.md
# UNCode-Digital Version 0.1
Incluye el código de las páginas creadas para la comparación y visualización de diferencias de diseños digitales escritos en Verilog y VHDL.
Funcionamiento en Verilog:

Funcionamiento en VHDL:

<file_sep>/VHDL/program.php
<html>
<body>
<?php
//Saving design code
$code_s = str_replace("<br>","\n",htmlspecialchars_decode($_POST['c']));
$file = fopen("code/design.vhd", 'w+');
ftruncate($file, 0); //Clear the file to 0bit
$content = $code_s ;
fwrite($file , $content); //Now lets write it in there
fclose($file ); //Finally close our .txt
//Saving golden model code
$code_g = str_replace("<br>","\n",htmlspecialchars_decode($_POST['g']));
$file = fopen("code/golden.vhd", 'w+');
ftruncate($file, 0); //Clear the file to 0bit
$content = $code_g ;
fwrite($file , $content); //Now lets write it in there
fclose($file ); //Finally close our .txt
//Saving testbench code
$code_t = str_replace("<br>","\n",htmlspecialchars_decode($_POST['t']));
$file = fopen("code/testbench.vhd", 'w+');
ftruncate($file, 0); //Clear the file to 0bit
$content = $code_t ;
fwrite($file , $content); //Now lets write it in there
fclose($file ); //Finally close our .txt
$salida = shell_exec('cd code ; ghdl -c golden.vhd testbench.vhd -r testbench --vcd=example_3_4.vcd');
echo "<pre>$salida</pre>";
$salida = shell_exec('python vcd2wd.py code/example_3_4 golden 2>&1 ');
echo "<pre>$salida</pre>";
$salida = shell_exec('cd code ; ghdl -c design.vhd testbench.vhd -r testbench --vcd=example_3_4.vcd');
echo "<pre>$salida</pre>";
$salida = shell_exec('python vcd2wd.py code/example_3_4 comp 2>&1 ');
echo "<pre>$salida</pre>";
?>
</body>
</html>
|
c02ebcf54e315d504160fdd29ffb2203c9b0a316
|
[
"Markdown",
"Python",
"PHP"
] | 3
|
Python
|
andrescorso/UNCode-Digital_V0
|
7d87d640d6bdb37d717e476ee43c4f7f4ee99bcd
|
cd7a18c422c90f4811f5e9cd093094a828551c4a
|
refs/heads/master
|
<file_sep><?php
interface vida{
const tipo = array( 'organica', 'no organica');
public function respirar ($tiempo="5", $cantidad_oxigeno, $estado) ;
public function bombiarfluido();
}
interface muerte extends vida{
const estado = array( 'pulso leve' , 'no tiene pulso' );
}
class Humano implements muerte{
public function saludar(){
return "Hola ".muerte::tipo[1]." como estas";
}
public function respirar($tiempo,$cantidad_oxigeno,$estado){
}
public function bombiarfluido(){
}
public function pulso(){
}
}
$obj = new Humano();
echo $obj->saludar();
|
9a0cb06449cb811778af6c9605eddd37ca004d22
|
[
"PHP"
] | 1
|
PHP
|
Harvey10102020/PHP_INTERFACE_CONST
|
b3a23e6eebe9de3420f3f15ab634bb33e795c518
|
c35ee3543700e8540761da12fff8a7cc1eea72cd
|
refs/heads/main
|
<file_sep>from flask import Flask,render_template,request
from predict import *
app=Flask(__name__)
app.jinja_env.auto_reload = True
app.config['TEMPLATES_AUTO_RELOAD'] = True
@app.route('/' , methods=['GET' , 'POST'])
def index():
if request.method=='POST':
wav_b64=request.get_data(as_text=True)
result=predict(wav_b64[22:])
print(result)
print(MMD_scores)
return render_template('index.html')
@app.route('/evaluate')
def evaluate():
result=final_evaluate()
return render_template('result.html',result=result)
if __name__ == '__main__':
app.run(host='0.0.0.0',port=80)<file_sep># -*- coding:utf-8 -*-
# @Time :2020/9/1 0:30
# @Author :Benjamin
# @File :predict.py
import requests
import json
# 填入下面四个信息
API_KEY = 'HBTnQr8eDx<KEY>'
SECRET_KEY = '<KEY>'
Request_url = 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/sound_cls/mmd_hc'
mmd_threshold = 0.8
MMD_scores = []
host = 'https://aip.baidubce.com/oauth/2.0/token'
data = {
'grant_type': 'client_credentials',
'client_id': API_KEY,
'client_secret': SECRET_KEY
}
response = requests.post(host, data=data)
token = response.json()["access_token"]
url = Request_url + "?access_token=" + token
headers = {'Content-Type': 'application/json'}
def predict(wav_b64):
params = {
"sound": wav_b64,
"top_num": 2
}
response = requests.post(url, data=json.dumps(params), headers=headers).json()
if 'results' in response:
result = response['results'][0]
print(result)
if result['name'] == 'HC':
MMD_scores.append(1 - float(result['score']))
elif result['name'] == 'MMD':
MMD_scores.append(float(result['score']))
msg="预测结果为:%s 概率为:%.3f。" % ('正常' if result['name'] == 'SC' else '抑郁症', result['score'])
elif 'error_code' in response:
msg="数据有误!"
return msg
def final_evaluate():
if len(MMD_scores):
score = sum(MMD_scores) / len(MMD_scores)
result={
"score":score,
"num":len(MMD_scores)
}
return result
else:
return None
<file_sep># Depression-Predicter
Use audio data to predict the probability of having depression.
1. Description of dataset
2. Model
3. Usage
### Description of dataset
We use MODMA dataset which contains the audio data of people who ill with depression and be healthy.
Audio is another non-invasive accessible physiological data, and studies have shown that mental disorders will be causing the patients’ audio data to differ from healthy controls, said in introduction paper.
### Model
Baidu Ai-Studio is a good choice for freshman in deep learning area. we just need prepare dataset and send it to EasyDL platform, than we can use the model by sending HTTP-requests to baidu API.
### Usage
This repository is a flask project, providing user interface. A web is established for collecting audio data, then send to the model we trained earlier. Predict and relative recommendation well be displayed in the web page. User can have a knowledge of self mental-health state.
|
08298818a0562576c635a2fe69a8b0ec681eb5cf
|
[
"Markdown",
"Python"
] | 3
|
Python
|
world-executed/Depression-Predicter
|
d20a384c2fa254b45180ab748cd2f813fc56c438
|
cda5f82b72a013c64e16236204a84f7310a3955a
|
refs/heads/master
|
<repo_name>Kaoutarloup/Project-and-Training-1-<file_sep>/skeleton-master/Makefile
all:
cd 1-hello; make ; cd ..
cd 2-primes; make ; cd ..
cd 3-ls; make ; cd ..
cd 4-sort; make ; cd ..
cd 5-shell; make ; cd ..
submit:
git commit -a -m "auto-submission"
git push
<file_sep>/skeleton-master/1-hello/makefile
all: hello
hello: hello.o
ld -o hello hello.o
hello.o: hello.asm
nasm -f elf64 -g hello.asm
clean:
rm -f hello hello.o
<file_sep>/skeleton-master/3-ls/ls.c
#include <dirent.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
char* nomDeBase(char* path)
{
size_t i = strlen(path) - 1;
while(path[i] != '/') i--;//trouve le /, puis sors de la boucle
return path + i + 1;
}
void renduFichier(char* nomFichier)
{
struct stat file_stat;
char* base = nomDeBase(nomFichier);
if (-1 == lstat(nomFichier, &file_stat)) {//-1 = erreur
printf("%s: non valide\n", base);
return;
}
switch(file_stat.st_mode & S_IFMT) {
case S_IFDIR:
printf("%s/\n", base);//dossier avec un /
break;
case S_IFLNK:
printf("%s@\n", base);//lien avec un @
break;
case S_IFREG:
if (file_stat.st_mode & S_IEXEC) printf("%s*\n", base);//executables avec *, sinon rien
else printf("%s\n", base);
break;
default:
printf("%s: non valide\n", base);//aucun des cas ci dessus
break;
}
}
void renduDossier(char* nomDossier)
{
DIR* dossier = opendir(nomDossier);
if (dossier == NULL) {
renduFichier(nomDossier);//si pas un dossier, passe a l'etape suivant
return;
}
struct dirent* entree;
while((entree = readdir(dossier)) != NULL) {//tans que sa reste un dossier
if (entree->d_name[0] == '.') continue;
char chemin[1024];
strcpy(chemin, nomDossier);
strcat(chemin, "/");
strcat(chemin, entree->d_name);//fais du joli texte
renduFichier(chemin);
}
closedir(dossier);
}
int main(int argc, char** argv)
{
if(argc == 1) {
renduDossier(".");//que 1 argument
return 0;
}
if(argc == 2) {
renduDossier(realpath(argv[1], NULL));//plusieurs arguments
return 0;
}
perror("non valide, voir manuel");
return 1;
}
<file_sep>/skeleton-master/4-sort/sort.c
#include <stdio.h>
#include <string.h>
#define limiteTextes 100
#define limiteTexte 128
#define true 1
#define false 0
#define Boolean int
static Boolean separationTextes(char *source, char *destination)
{
int i,l;
l = strlen(source);//longueur de texte
for (i = 0; i < l; i++)//cherche dans ce texte
{
if (source[i] == '\\')
{
if (source[i+1] == 'n')// \\=\, n=n , \n = nouvelle ligne
{
source[i] = 0; //remplace le \ par 0
strcpy(destination,&source[i+2]);//copies source[i+2] dans destination qui est source+1, donc enleve aussi le \n
return true;
}
}
}
return false;
}
int main (int argc, char **argv)
{
int i,k,nombreTextes;
char listeTextes[limiteTextes][limiteTexte];
int partieTriee[limiteTextes];
int taille;
Boolean scanFini, inclu;
Boolean montant = true;
Boolean separe;
for (i = 1; i < argc; i++)
{
if (strcmp(argv[i], "-r") == 0) //si -r, va reverser l'ordre
{
montant = false;
}
}
nombreTextes = 0;
scanFini = false;
do
{
if (NULL == fgets(listeTextes[nombreTextes], sizeof(listeTextes[nombreTextes]), stdin))//si a fini de lire, ou rencontre une erreur
{
scanFini = true;//met fin au scan
}
else
{
i = strlen(listeTextes[nombreTextes]);
if (i > 0)
{
listeTextes[nombreTextes][i-1] = 0;
}
}
do
{
separe = separationTextes(listeTextes[nombreTextes],listeTextes[nombreTextes+1]);//vrai si nouvelle ligne, et va enlever le /n
if (listeTextes[nombreTextes][0] != 0)
{
inclu = false;
for (i = 0; (i < nombreTextes) && (!inclu); i++)
{
taille = strcoll(listeTextes[nombreTextes], listeTextes[partieTriee[i]]);
if ((montant && (taille < 0)) //compare les deux textes, strcoll donnant un negatif, positif ou 0
|| ((!montant) && (taille > 0))) //prend en compte si r a été utilisé ou non
{
for (k = nombreTextes; k >= i; k--)
{
partieTriee[k] = partieTriee[k-1];//descend dans cette liste
}
partieTriee[i] = nombreTextes; //change le nombre contenu dans partietriee i
nombreTextes++;
inclu = true;
}
}
if (!inclu)
{
partieTriee[nombreTextes] = nombreTextes;
nombreTextes++;
}
}
} while (separe);
} while ((!scanFini) && (nombreTextes < limiteTextes));//scan jusqu'a la fin, ou a la limite
for (i = 0; i < nombreTextes; i++)
{
puts(listeTextes[partieTriee[i]]);
}
return 0;
}
<file_sep>/skeleton-master/5-shell/Makefile
CFLAGS = -Wall -Werror -g -O0
all:
gcc $(CFLAGS) shell.c -o shell
clean:
rm -f shell
<file_sep>/skeleton-master/2-primes/makefile
all: primes
primes: primes.o
ld -o primes primes.o
primes.o: primes.asm
nasm -f elf64 -g primes.asm
clean:
rm -f primes primes.o
<file_sep>/skeleton-master/3-ls/Makefile
CFLAGS = -Wall -Werror -g -O0
all:
gcc $(CFLAGS) ls.c -o ls
clean:
rm -f ls
<file_sep>/skeleton-master/README.md
Des exrcices en C
1-hello Add files via upload last month
2-primes Add files via upload last month
3-ls Add files via upload last month
4-sort Add files via upload last month
5-shell
<file_sep>/skeleton-master/5-shell/shell.c
#define _GNU_SOURCE
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <ctype.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/wait.h>
void* ealloc(void* prev, size_t size)//fais l'allocation
{
void* ptr = realloc(prev, size);
assert(ptr != NULL);
return ptr;
}
char* triage(char* entree)//fais du triage
{
if (NULL == entree) return NULL;
char *p = entree;
int l = strlen(entree);
while(isspace(p[l - 1])) p[--l] = 0;
while(*p && (isspace(*p) || '\0' == *p)) ++p, --l;
memmove(entree, p, l + 1);//copie l+1 characteres de p dans entree
return entree;
}
char* lisLigne()
{
static const char EXIT[] = "exit";
char* entree = NULL;
unsigned long taille = 0;
write(STDOUT_FILENO, "$ ", 2);
getline(&entree, &taille, stdin);
if ('\0' == *entree) return strdup(EXIT);
entree = triage(entree);
if ('\0' == *entree) {//\0 signifie null
free(entree);
return NULL;
}
return entree;
}
char* lisPartie(char* entree)
{
static char first, *precedant;
if (entree == NULL) entree = precedant;
else first = entree[0];
if (!first) return NULL;
char* partie;
entree[0] = first;
if ('$' != first) {//si premier charactere est pas le $
partie = entree;
while(entree[0] && '$' != entree[0]) entree++;//avance dans la liste
} else if('{' != entree[1]) {
partie = entree++;//avance que de 1
while(
entree[0] && (isalnum(entree[0]) || entree[0] == '_' || entree[0] == '?')//continue d'avancer
) entree++;
} else {
partie = ++entree;
while(entree[0] && '}' != entree[0]) entree++;//avance jusqu'au crochet fermant
if('}' == entree[0]) *(entree++) = '\0';//met un null
*partie = '$';
}
precedant = entree;//decalage de 1
first = entree[0];
entree[0] = '\0';
return partie;
}
char* etendre(char* entree)
{
size_t size = 0;
char* buffer = NULL;
while(entree[0] && isspace(entree[0])) entree++;//avance a l'espace
char* partie = lisPartie(entree);
while(partie != NULL) {
if('$' == partie[0]) partie = getenv(partie + 1);//retourne null si la variable existe pas
if(partie != NULL) {
size_t len = strlen(partie);
buffer = ealloc(buffer, (size + len + 1) * sizeof(char));//augmente la memoire reservée
strcpy(buffer + size, partie);
size += len;
}
partie = lisPartie(NULL);
}
return buffer;
}
char* lecture()//lis en appelant les autres fonctions
{
char *entree = NULL, *etendu;
do { entree = lisLigne(); }
while(NULL == entree);
etendu = etendre(entree);
free(entree);
return etendu;
}
int assignementCheck(char* entree)
{
if (!(isalnum(entree[0]) || entree[0] == '_' || entree[0] == '?')) return 0;//retourne faux si commence par chiffre,lettre, _ ou ?
while(entree[0] != '\0' && (isalnum(entree[0]) || entree[0] == '_' || entree[0] == '?')) entree++;
return entree[0] == '=';//retourne vrai si il y a = apres les chiffres,lettre, _ ou ?
}
void assignementProgres(int status)
{
char valeur[4];
sprintf(valeur, "%d", status);
setenv("?", valeur, 1);//ajoute une variable d'environment ou met -1 si erreur
}
void assignement(char* entree)
{
char* place = strsep(&entree, "=");//cherche le = dans entree
assignementProgres(setenv(place, entree, 1));//setenv retourne 0 si assez de place, -1 si erreur
}
char* creePartie(char* entree)//si null, retourne precedant
{
static char *precedant;
if(NULL == entree) entree = precedant;
while(entree && entree[0] && isspace(entree[0])) entree++;//avance jusqu'a l'espace
if(NULL == entree || '\0' == entree[0]) return NULL;
char* partie;
if ('"' == entree[0]) {
partie = ++entree;
while(entree[0] && '"' != entree[0]) entree++;//avance dans l'entree
} else {
partie = entree;
while(entree[0] && !isspace(entree[0])) entree++;
}
if('\0' == entree[0]) precedant = NULL;
else precedant = entree + 1;
entree[0] = '\0';//\0 signifie null
return partie;
}
typedef struct exec {
pid_t id;
struct exec* prev;
int pipe[2];
char* ecrire;
char* lire;
char* effacer;
int argc;
char** argv;
} exect;
exect* exectNouv(exect* precedant)
{
exect* processus = ealloc(NULL, sizeof(exect));//va donner plus de memoire
processus->prev = NULL;//remet les valeurs en place
processus->argc = 0;
processus->argv = ealloc(NULL, sizeof(char*));
processus->ecrire = processus->lire = processus->effacer = NULL;
if (NULL != precedant) {
processus->prev = precedant;
pipe(precedant->pipe);
} else {
processus->pipe[0] = processus->pipe[1] = 0;
}
return processus;
}
void libere(exect* processus)
{
while (NULL != processus) {//va liberer de la memoire
exect* prev = processus->prev;
free(processus->argv);
free(processus);
processus = prev;
}
}
void ajoute(exect* processus, char* argument)//ajoute un argument, de la memoire avec cette nouvelle donee, puis enleve le processus ajouté
{
processus->argv[processus->argc++] = argument;
processus->argv = ealloc(processus->argv, (processus->argc + 1) * sizeof(char*));
processus->argv[processus->argc] = NULL;
}
void paralelle(exect* processus)//pour les pipes et forks
{
while(NULL != processus)
{
processus->id = fork();
if (processus->id < 0) {
perror("fork(): erreur\n");
exit(EXIT_FAILURE);
}
if (0 != processus->id) {
if (0 != processus->pipe[0]) {
close(processus->pipe[0]);
close(processus->pipe[1]);
}
processus = processus->prev;
continue;
}
if (NULL != processus->ecrire) {//contiendra adress fichier
close(STDIN_FILENO);
open(processus->ecrire, O_RDONLY);//va a l'adresse, ouvre en read
}
if (NULL != processus->lire) {//contiendra adresse fichier
close(STDOUT_FILENO);
open(processus->lire, O_WRONLY | O_CREAT | O_TRUNC, S_IRWXU);//va a l'adress, ouvre en write only, si pas de fichier le cree, donne aussi des droits de lecture, ecriture et execution (s_irwxu)
}
if (NULL != processus->effacer) {//idem
close(STDERR_FILENO);
open(processus->effacer, O_WRONLY | O_CREAT | O_TRUNC, S_IRWXU);//idem aussi
}
if (0 != processus->pipe[0]) {
close(STDOUT_FILENO);
close(processus->pipe[STDIN_FILENO]);
dup2(processus->pipe[STDOUT_FILENO], STDOUT_FILENO);
close(processus->pipe[STDOUT_FILENO]);
}
if (NULL != processus->prev) {
close(STDIN_FILENO);
close(processus->prev->pipe[STDOUT_FILENO]);
dup2(processus->prev->pipe[STDIN_FILENO], STDIN_FILENO);
close(processus->prev->pipe[STDIN_FILENO]);
}
execvpe(processus->argv[0], processus->argv, environ);
perror("commande inconnue\n");
exit(EXIT_FAILURE);
}
}
void attendre(exect* processus)//va attendre que des processus soit nulls
{
int status;
while (NULL != processus) {
waitpid(processus->id, &status, 0);
processus = processus->prev;
}
assignementProgres(WEXITSTATUS(status));
}
void execute(char* entree)
{
int enPipe = 0, enFond = 0, enEchec = 0;
exect *processus = exectNouv(NULL);
char* partie = creePartie(entree);//refait un joli string pour travailler
while (NULL != partie) {
if (0 == strcmp(partie, "&")) {//le & pour tache secondaire
enFond = 1;
} else if (0 == strcmp(partie, "|")) {//le | pour pipeline
enPipe = 1;
processus = exectNouv(processus);
} else if (0 == strcmp(partie, "<")) {//le < pour lire
partie = creePartie(NULL);
if (NULL == partie) {
enEchec = 1;
perror("manque nom de fichier\n");
break;
}
processus->ecrire = partie;
} else if (0 == strcmp(partie, ">")) {//le > pour ecrire
partie = creePartie(NULL);
if (NULL == partie) {
enEchec = 1;
perror("manque nom de fichier\n");
break;
}
processus->lire = partie;
} else if (0 == strcmp(partie, "2>")) {//pour effacer
partie = creePartie(NULL);
if (NULL == partie) {
enEchec = 1;
perror("manque nom de fichier\n");
break;
}
processus->effacer = partie;
} else {
ajoute(processus, partie);
}
partie = creePartie(NULL);
}
if (0 == processus->argc) {//pas d'argument suivant
enEchec = 1;
perror("manque commande suivante\n");
}
if (enPipe && enFond) {//peux pas avoir les deux
enEchec = 1;
perror("pas de tache de fond en pipeline\n");
}
if (!enEchec) {//si pas echoué a cause d'autres erreurs
paralelle(processus);
if (!enFond) attendre(processus);
}
libere(processus);
}
int main(int argc, char** argv)
{
char* entree = NULL;
while(1)
{
entree = lecture();
if (0 == strcmp(entree, "exit")) {//si on entre "exit", sort de la boucle, retourne exit success
free(entree);//libere cet espace
break;
}
if (assignementCheck(entree)) {//si assignement
assignement(entree);//fais l'assignement
free(entree);//libere
continue;
}
execute(entree);//pour tous le reste
free(entree);//libère
}
return EXIT_SUCCESS;
}
<file_sep>/README.md
# C Project
fs2019-2020 / skeleton/CsBasics/assembler / C
<file_sep>/skeleton-master/4-sort/Makefile
CFLAGS = -Wall -Werror -g -O0
all:
gcc $(CFLAGS) sort.c -o sort
clean:
rm -f sort
|
bd4040cc3cd0c120accadf2373f0a8271eeaddb9
|
[
"Markdown",
"C",
"Makefile"
] | 11
|
Makefile
|
Kaoutarloup/Project-and-Training-1-
|
c51e75bb3d82933a08aa3c6e2f31b6fa78d5139b
|
8794392c9a299ebb91dd6764786926508d45c946
|
refs/heads/master
|
<file_sep><?php
$s =$_GET['message'];
$fname="text.txt";
$f=fopen($fname,"a");
fputs($f,"$s<br>");
fclose($f);
?>
<file_sep># lab8_php
<br>
<br>
<file_sep><!DOCTYPE html>
<html>
<head>
<title>Чат</title>
</head>
<body>
<?php
$h =$_GET['login'];
$fname="acounts.txt";
$f=fopen($fname,"a");
fputs($f,"$h<br>");
fclose($f);
?>
<h2>Введіть повідомлення</h2>
<form action="2.php" method="GET">
<input type="hidden" name="$h" value="$h">
<input name="message" type="text" >
<input type="submit" name="Надіслати" value="Надіслати">
<br> <br>
<?php
$cnt_file="text.txt"; // Файл
$f=fopen($cnt_file,"r");
while($s=fgets($f)) print "$s<br>";
fclose($f);
?>
</form>
</body>
</html>
|
aab8af896f142149bb5d7b38190524421e267f9e
|
[
"Markdown",
"PHP"
] | 3
|
PHP
|
oksanailchenko/lab8_php
|
d5b573300abef98f22ae9ac7302a9402a51f1e84
|
ee97b3fb1b18d33098c7941b517be133b3ca5a30
|
refs/heads/master
|
<repo_name>hsudavid70/tipcalculator<file_sep>/tipcalculator/SettingsViewController.swift
//
// SettingsViewController.swift
// tipcalculator
//
// Created by davidhsu on 9/21/16.
// Copyright (c) 2016 davidhsu. All rights reserved.
//
import UIKit
class SettingsViewController: UIViewController {
let defaults = NSUserDefaults.standardUserDefaults()
var lastSelected : Int = 0
@IBOutlet weak var label1: UILabel!
@IBOutlet weak var label2: UILabel!
@IBOutlet weak var themeLabel: UILabel!
@IBOutlet weak var themeSwitch: UISwitch!
@IBOutlet weak var defTipSelect: UISegmentedControl!
@IBOutlet weak var lowSlideLabel: UILabel!
@IBOutlet weak var midSlideLabel: UILabel!
@IBOutlet weak var maxSlideLabel: UILabel!
@IBOutlet weak var lowSlider: UISlider!
@IBOutlet weak var midSlider: UISlider!
@IBOutlet weak var maxSlider: UISlider!
override func viewDidLoad() {
println("lifcycle: (Settings) viewDidLoad")
super.viewDidLoad()
lastSelected = getDefTip()
println("lastselected \(lastSelected)")
defTipSelect.selectedSegmentIndex = lastSelected
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func viewWillDisappear(animated: Bool) {
super.viewWillDisappear(animated)
println("lifecycle:(Settings) view will disappear")
// if tip rate selection changed, invalidate saved index by setting it to -1
if( defTipSelect.selectedSegmentIndex != lastSelected){
println("reset")
defaults.setInteger(-1, forKey: "savedSegIndex")
defaults.setInteger(defTipSelect.selectedSegmentIndex, forKey: "def_tip_index")
}
println("(Settings) saved \(defTipSelect.selectedSegmentIndex)")
// always save changed preset value
defaults.setObject([
lowSlider.value,
midSlider.value,
maxSlider.value
], forKey: "tip_preset_array")
defaults.synchronize()
}
override func viewWillAppear(animated: Bool){
let savedPresets = getPresets()
self.view.alpha = 0
UIView.animateWithDuration(0.7, animations: {
self.view.alpha = 1
})
var colorTheme = defaults.stringForKey("tipColorTheme") ?? "Light"
if (colorTheme == "Light"){
themeSwitch.on = false
lightColorTheme()
}
else{
themeSwitch.on = true
darkColorTheme()
}
themeLabel.text = "\(colorTheme) theme"
lowSlider.value = savedPresets[0]
midSlider.value = savedPresets[1]
maxSlider.value = savedPresets[2]
lowSlideLabel.text = String(Int(savedPresets[0]*100)) + "%"
midSlideLabel.text = String(Int(savedPresets[1]*100)) + "%"
maxSlideLabel.text = String(Int(savedPresets[2]*100)) + "%"
defTipSelect.setTitle(lowSlideLabel.text, forSegmentAtIndex: 0)
defTipSelect.setTitle(midSlideLabel.text, forSegmentAtIndex: 1)
defTipSelect.setTitle(maxSlideLabel.text, forSegmentAtIndex: 2)
}
// check if preference keys exist, if not , set default value to 10 %
func getDefTip()->Int{
if(checkKey("def_tip_index")){
return defaults.objectForKey("def_tip_index") as! Int
}
else{
defaults.setInteger(0, forKey: "def_tip_index")
defaults.synchronize()
return 0
}
}
func getPresets()->[Float]{
if(checkKey("tip_preset_array")){
return defaults.arrayForKey("tip_preset_array") as! [Float]
}
else{
defaults.setObject([0.10,0.20,0.30], forKey: "tip_preset_array")
defaults.synchronize()
return [0.10,0.20,0.30]
}
}
// check if key exists
func checkKey(userKey:String)->Bool{
return defaults.objectForKey(userKey) != nil
}
func lightColorTheme(){
self.view.backgroundColor = UIColor(red:1.0,green:1.0,blue:1.0,alpha:1.0);
themeLabel.textColor = UIColor.blackColor()
label1.textColor = UIColor.blackColor()
label2.textColor = UIColor.blackColor()
lowSlideLabel.textColor = UIColor.blackColor()
midSlideLabel.textColor = UIColor.blackColor()
maxSlideLabel.textColor = UIColor.blackColor()
}
func darkColorTheme(){
self.view.backgroundColor = UIColor(red:0.1,green:0.1,blue:0.1,alpha:1.0);
themeLabel.textColor = UIColor.whiteColor()
label1.textColor = UIColor.whiteColor()
label2.textColor = UIColor.whiteColor()
lowSlideLabel.textColor = UIColor.whiteColor()
midSlideLabel.textColor = UIColor.whiteColor()
maxSlideLabel.textColor = UIColor.whiteColor()
}
@IBAction func onSwitch(sender: UISwitch) {
print("switched")
if(themeSwitch.on){
defaults.setObject("Dark",forKey: "tipColorTheme")
defaults.synchronize()
themeLabel.text="Dark Theme"
darkColorTheme()
}
else{
defaults.setObject("Light",forKey: "tipColorTheme")
defaults.synchronize()
themeLabel.text="Light Theme"
lightColorTheme() }
}
@IBAction func lowSlideChanged(sender: UISlider) {
let sliderVal = String(Int(lowSlider.value*100))
lowSlideLabel.text = sliderVal + "%"
defTipSelect.setTitle(lowSlideLabel.text,forSegmentAtIndex:0)
if(lowSlider.value >= midSlider.value){
midSlider.value = lowSlider.value
midSlideLabel.text = String(Int(midSlider.value*100)) + "%"
defTipSelect.setTitle(midSlideLabel.text,forSegmentAtIndex:1)
}
if(lowSlider.value >= maxSlider.value){
maxSlider.value = lowSlider.value
maxSlideLabel.text = String(Int(maxSlider.value*100)) + "%"
defTipSelect.setTitle(maxSlideLabel.text,forSegmentAtIndex:2)
}
}
@IBAction func midSlideChanged(sender: UISlider) {
let sliderVal = String(Int(midSlider.value*100))
midSlideLabel.text = sliderVal + "%"
defTipSelect.setTitle(midSlideLabel.text,forSegmentAtIndex:1)
if(midSlider.value >= maxSlider.value){
maxSlider.value = midSlider.value
maxSlideLabel.text = String(Int(maxSlider.value*100)) + "%"
defTipSelect.setTitle(maxSlideLabel.text,forSegmentAtIndex:2)
}
if(midSlider.value <= lowSlider.value){
lowSlider.value = midSlider.value
lowSlideLabel.text = String(Int(lowSlider.value*100)) + "%"
defTipSelect.setTitle(lowSlideLabel.text,forSegmentAtIndex:0)
}
}
@IBAction func maxSlideChanged(sender: UISlider) {
let sliderVal = String(Int(maxSlider.value*100))
maxSlideLabel.text = sliderVal + "%"
defTipSelect.setTitle(maxSlideLabel.text,forSegmentAtIndex:2)
if(maxSlider.value <= midSlider.value){
midSlider.value = maxSlider.value
midSlideLabel.text = String(Int(midSlider.value*100)) + "%"
defTipSelect.setTitle(midSlideLabel.text,forSegmentAtIndex:1)
}
if(maxSlider.value <= lowSlider.value){
lowSlider.value = maxSlider.value
lowSlideLabel.text = String(Int(lowSlider.value*100)) + "%"
defTipSelect.setTitle(lowSlideLabel.text,forSegmentAtIndex:0)
}
}
}<file_sep>/tipcalculator/ViewController.swift
//
// ViewController.swift
// tipcalculator
//
// Created by davidhsu on 9/21/16.
// Copyright (c) 2016 davidhsu. All rights reserved.
//
import UIKit
class ViewController: UIViewController {
let defaults = NSUserDefaults.standardUserDefaults()
let numFormatter = NSNumberFormatter()
var tipPresets : [Float] = [0.10,0.20,0.30]
@IBOutlet weak var tipSelect: UISegmentedControl!
@IBOutlet weak var tipStepper: UIStepper!
@IBOutlet weak var peopleStepper: UIStepper!
@IBOutlet weak var billField: UITextField!
@IBOutlet weak var tipValLabel: UILabel!
@IBOutlet weak var totalValLabel: UILabel!
@IBOutlet weak var divider: UIView!
@IBOutlet weak var billLabel: UILabel!
@IBOutlet weak var tipLabel: UILabel!
@IBOutlet weak var totalLabel: UILabel!
@IBOutlet weak var keyInputField: UITextField!
@IBOutlet weak var tipRateLabel: UILabel!
@IBOutlet weak var numPeopleLabel: UILabel!
@IBOutlet weak var tipRateValLabel: UILabel!
@IBOutlet weak var numPeopleValLabel: UILabel!
override func viewDidLoad() {
let timeviewDidLoad = Int(NSDate().timeIntervalSince1970)
println("lifecycle: (Main) viewDidLoad \(timeviewDidLoad)")
super.viewDidLoad()
NSNotificationCenter.defaultCenter().addObserver(
self,
selector: "localeChanged:",
name: NSCurrentLocaleDidChangeNotification,
object: nil)
NSNotificationCenter.defaultCenter().addObserver(
self,
selector: "willTerminate:",
name: UIApplicationWillTerminateNotification,
object: nil)
// Do any additional setup after loading the view, typically from a nib.
numFormatter.numberStyle = NSNumberFormatterStyle.CurrencyStyle
numFormatter.locale = NSLocale.currentLocale()
//numFormatter.minimumFractionDigits = 2
numFormatter.maximumFractionDigits = 2
// set first responder for bill
self.keyInputField.becomeFirstResponder()
let lastStoppedTime = getStoppedTime()
let savedSegIndex = getSavedTip()
// check for invalidated segmentselect index
if(savedSegIndex != -1){
println("restored saved segment select")
tipSelect.selectedSegmentIndex = savedSegIndex
}
// check for stored app stopped time and 10 min interval
if(lastStoppedTime != 0 && (timeviewDidLoad - lastStoppedTime) < 600){
// load from saved state
keyInputField.text = defaults.stringForKey("savedInput")
tipStepper.value = defaults.doubleForKey("tipStepperVal")
peopleStepper.value = defaults.doubleForKey("peopleStepperVal")
tipRateValLabel.text = Int(tipStepper.value).description + "%"
numPeopleValLabel.text = Int(peopleStepper.value).description
}
else{
// init everything to 0
billField.text = numFormatter.stringFromNumber(0)
tipValLabel.text = numFormatter.stringFromNumber(0)
totalValLabel.text = numFormatter.stringFromNumber(0)
}
}
override func viewWillAppear(animated: Bool) {
println("lifecycle: viewWillAppear")
super.viewWillAppear(animated)
// load locale
// get stored/default tip percent and update
self.billField.alpha = 0
self.tipSelect.alpha = 0
UIView.animateWithDuration(0.7, animations: {
// This causes first view to fade in and second view to fade out
self.billField.alpha = 1
self.tipSelect.alpha = 1
})
var colorTheme = defaults.stringForKey("tipColorTheme") ?? "Light"
if (colorTheme == "Light"){
lightColorTheme()
}
else{
darkColorTheme()
}
tipPresets = getPresets()
let savedIndex = getSavedTip()
/* index is set to -1 to indicate reset or indeterminate state. The default selection will be loaded */
tipSelect.selectedSegmentIndex = (savedIndex != -1) ? getSavedTip() : getDefTip()
tipSelect.setTitle(String(Int(tipPresets[0]*100))+"%", forSegmentAtIndex: 0)
tipSelect.setTitle(String(Int(tipPresets[1]*100))+"%", forSegmentAtIndex: 1)
tipSelect.setTitle(String(Int(tipPresets[2]*100))+"%", forSegmentAtIndex: 2)
tipRateValLabel.text = String(Int(tipPresets[tipSelect.selectedSegmentIndex]*100)) + "%"
tipStepper.value = Double(Int(tipPresets[tipSelect.selectedSegmentIndex]*100))
update_bill()
}
override func viewWillDisappear(animated: Bool) {
super.viewWillDisappear(animated)
println("lifecycle:(Main) view will disappear")
defaults.setInteger(tipSelect.selectedSegmentIndex, forKey: "savedSegIndex")
defaults.synchronize()
println("(Main) saved \(tipSelect.selectedSegmentIndex)")
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// check if preference keys exist, if not , set default value to 10 %
func getDefTip()->Int{
if(checkKey("def_tip_index")){
return defaults.objectForKey("def_tip_index") as! Int
}
else{
defaults.setInteger(0, forKey: "def_tip_index")
defaults.synchronize()
return 0
}
}
// get stopped time with check
func getStoppedTime()->Int{
if(checkKey("lastStoppedTime")){
return defaults.objectForKey("lastStoppedTime") as! Int
}
else{
return 0
}
}
// get saved segment select index
func getSavedTip()->Int{
if(checkKey("savedSegIndex")){
return defaults.objectForKey("savedSegIndex") as! Int
}
else{
defaults.setInteger(-1, forKey: "savedSegIndex")
defaults.synchronize()
return 0
}
}
// check if key exists
func checkKey(userKey:String)->Bool{
return defaults.objectForKey(userKey) != nil
}
func lightColorTheme(){
self.view.backgroundColor = UIColor(red:1.0,green:1.0,blue:1.0,alpha:1.0)
billField.backgroundColor = UIColor.whiteColor()
billField.textColor = UIColor.blackColor()
billField.tintColor = UIColor.blackColor()
billField.layer.borderWidth = 1
billField.layer.borderColor = UIColor.blackColor().CGColor
divider.backgroundColor = UIColor.blackColor()
tipValLabel.textColor = UIColor.blackColor()
totalValLabel.textColor = UIColor.blackColor()
billLabel.textColor = UIColor.blackColor()
tipLabel.textColor = UIColor.blackColor()
totalLabel.textColor = UIColor.blackColor()
tipRateLabel.textColor = UIColor.blackColor()
tipRateValLabel.textColor = UIColor.blackColor()
numPeopleLabel.textColor = UIColor.blackColor()
numPeopleValLabel.textColor = UIColor.blackColor()
}
func darkColorTheme(){
self.view.backgroundColor = UIColor(red:0.1,green:0.1,blue:0.1,alpha:1.0)
billField.backgroundColor = UIColor.blackColor()
billField.textColor = UIColor.whiteColor()
billField.tintColor = UIColor.whiteColor()
billField.layer.borderWidth = 1
billField.layer.borderColor = UIColor.whiteColor().CGColor
divider.backgroundColor = UIColor.whiteColor()
tipValLabel.textColor = UIColor.whiteColor()
totalValLabel.textColor = UIColor.whiteColor()
billLabel.textColor = UIColor.whiteColor()
tipLabel.textColor = UIColor.whiteColor()
totalLabel.textColor = UIColor.whiteColor()
tipRateLabel.textColor = UIColor.whiteColor()
tipRateValLabel.textColor = UIColor.whiteColor()
numPeopleLabel.textColor = UIColor.whiteColor()
numPeopleValLabel.textColor = UIColor.whiteColor()
}
func update_bill(){
// The String->Double conversion for Swift 1.2
let keyInputVal = (keyInputField.text as NSString).doubleValue
let tip = keyInputVal/100 * tipStepper.value/100
let total = keyInputVal/100 + tip;
billField.text = numFormatter.stringFromNumber(keyInputVal/100)
tipValLabel.text = numFormatter.stringFromNumber(tip/Double(peopleStepper.value))
totalValLabel.text = numFormatter.stringFromNumber(total/Double(peopleStepper.value))
tipLabel.text = Int(peopleStepper.value) > 1 ? "Tip/pers." : "Tip"
totalLabel.text = Int(peopleStepper.value) > 1 ? "Total/pers." : "Total"
}
func getPresets()->[Float]{
if(checkKey("tip_preset_array")){
return defaults.arrayForKey("tip_preset_array") as! [Float]
}
else{
defaults.setObject([0.10,0.20,0.30], forKey: "tip_preset_array")
defaults.synchronize()
return [0.10,0.20,0.30]
}
}
@IBAction func onTap(sender: AnyObject) {
//view.endEditing(true)
}
@IBAction func calcTip(sender: AnyObject) {
if (count(keyInputField.text) > 10) {
keyInputField.deleteBackward()
}
update_bill()
}
@IBAction func tipStepperChanged(sender: UIStepper) {
tipRateValLabel.text = Int(sender.value).description + "%"
update_bill()
}
@IBAction func peopleStepperChanged(sender: UIStepper) {
numPeopleValLabel.text = Int(sender.value).description
update_bill()
}
@IBAction func segValueChanged(sender: UISegmentedControl) {
tipRateValLabel.text = String(Int(tipPresets[tipSelect.selectedSegmentIndex]*100)) + "%"
tipStepper.value = Double(Int(tipPresets[tipSelect.selectedSegmentIndex]*100))
update_bill()
}
// life cycle event handler
func localeChanged(notification: NSNotification) {
println("locale changed")
numFormatter.locale = NSLocale.currentLocale()
update_bill()
}
func willTerminate(notification: NSNotification){
// get time in unix epoch time to the nearest seconds
let timeTerminate = Int(NSDate().timeIntervalSince1970)
println("lifecycle: willTerminate \(timeTerminate)")
// save the termination time, user input, selected index
defaults.setInteger(timeTerminate, forKey: "lastStoppedTime")
defaults.setObject(keyInputField.text, forKey:"savedInput")
defaults.setInteger(tipSelect.selectedSegmentIndex, forKey: "savedSegIndex")
defaults.setDouble(tipStepper.value, forKey: "tipStepperVal")
defaults.setDouble(peopleStepper.value, forKey: "peopleStepperVal")
defaults.synchronize()
}
}
|
4a082d90d190fb4f1f3654a3b25cc75311f347b3
|
[
"Swift"
] | 2
|
Swift
|
hsudavid70/tipcalculator
|
11897829a34775e3304cc91d5bf904ffc1c8a10c
|
d9f81c4834234707d4ad7db98fb982129ad7ed02
|
refs/heads/main
|
<repo_name>hafizhnass/sprint1<file_sep>/pr.js
var nama = prompt ("masukkan nama pelajar")
var nilai = prompt ("input nilai anda")
if(nilai > 70) {
document.write( nama + "selamat anda lulus cukup memuaskan dengan nominal nilai" + ' : ' + nilai)
} else {
document.write( nama + "anda tidak lulus karena anda kurang biaya nyogok dengan nominal nilai" + ' : ' + nilai)
}
|
4632676df6560ad29d5d26a9be12267912850d11
|
[
"JavaScript"
] | 1
|
JavaScript
|
hafizhnass/sprint1
|
44ad2a640b6fe036e3540e66d0453d9f267ce5fa
|
090eada18f286cc88cb81fb8b4ae5dbc7cbb47d0
|
refs/heads/develop
|
<file_sep>var http = require('http');
var parse = require('csv-parse');
var transform = require('stream-transform');
module.exports = {
getZanoxComData: function(url, process, callback) {
var parser = parse({delimiter: ';'});
var transformer = transform(process, {parallel: 10});
transformer.on('finish',function(){
callback();
});
http.get(url, function(response) {
response.pipe(parser).pipe(transformer);
});
}
}
<file_sep>var config = require('config');
var main = require('./app/main.js');
main.process(config.url, config.watzdprice_url, config.shop, function (err) {
if (err) {
console.error(JSON.stringify(err));
}
});
<file_sep># watzdprice_zanoxcom_loader
Load zanox.com feeds
|
5ccaa62ed16b5862bec24e130be6648bb6d0a896
|
[
"JavaScript",
"Markdown"
] | 3
|
JavaScript
|
broersa/watzdprice_zanoxcom_loader
|
1570aaf8103d96f7eaad2c4b5c6c79910201bff7
|
8bf51b3348e64b3ec55662c8bf9ff577d71e85fa
|
refs/heads/master
|
<file_sep><?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>TestArithmeticProject</groupId>
<artifactId>TestArithmeticProject</artifactId>
<packaging>jar</packaging>
<version>1.0-SNAPSHOT</version>
<properties>
<java.version>1.7</java.version>
<github.global.server>github</github.global.server>
<github.maven-plugin>0.9</github.maven-plugin>
<allure.version>1.4.11</allure.version>
<aspectj.version>1.8.5</aspectj.version>
</properties>
<!-- зависимости от библиотек -->
<dependencies>
<dependency>
<!-- координаты необходимой библиотеки -->
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<!-- эта библиотека используется только для запуска и компилирования тестов -->
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjweaver</artifactId>
<version>${aspectj.version}</version>
</dependency>
<dependency>
<groupId>ru.yandex.qatools.allure</groupId>
<artifactId>allure-junit-adaptor</artifactId>
<version>${allure.version}</version>
</dependency>
</dependencies>
<distributionManagement>
<repository>
<id>internal.repo</id>
<name>Temporary Staging Repository</name>
<url>file://${project.build.directory}/mvn-repo</url>
</repository>
</distributionManagement>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.18.1</version>
<configuration>
<testFailureIgnore>true</testFailureIgnore>
<argLine> -javaagent:"${settings.localRepository}/org/aspectj/aspectjweaver/${aspectj.version}/aspectjweaver-${aspectj.version}.jar" </argLine>
<properties>
<property>
<name>listener</name>
<value>ru.yandex.qatools.allure.junit.AllureRunListener</value>
</property>
</properties>
</configuration>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
</configuration>
</plugin>
<plugin>
<artifactId>maven-deploy-plugin</artifactId>
<version>2.8.1</version>
<configuration>
<altDeploymentRepository>internal.repo::default::file://${project.build.directory}/mvn-repo</altDeploymentRepository>
</configuration>
</plugin>
<plugin>
<groupId>com.github.github</groupId>
<artifactId>site-maven-plugin</artifactId>
<version>${github.maven-plugin}</version>
<configuration>
<message>Maven artifacts for ${project.version}</message>
<noJekyll>true</noJekyll>
<outputDirectory>${project.build.directory}/mvn-repo</outputDirectory>
<branch>refs/heads/mvn-repo</branch>
<includes><include>**/*</include></includes>
<repositoryName>NewLibrary</repositoryName>
<repositoryOwner>Belka2017</repositoryOwner>
</configuration>
<executions>
<execution>
<goals>
<goal>site</goal>
</goals>
<phase>deploy</phase>
</execution>
</executions>
</plugin>
</plugins>
</build>
<reporting>
<excludeDefaults>true</excludeDefaults>
<plugins>
<plugin>
<groupId>ru.yandex.qatools.allure</groupId>
<artifactId>allure-maven-plugin</artifactId>
<version>2.0</version>
</plugin>
</plugins>
</reporting>
</project><file_sep>/**
* <p>Description: Класс, выполняющий арифметические действия (сложение, вычитание, умножение, деление). </p>
* @author <NAME>
*/
public class Arithmetic
{
public Arithmetic()
{}
/**
* Возврщает результат сложения двух чисел
* @param aOp1 Первое слагаемое
* @param aOp2 Второе слагаемое
* @return Сумма
*/
public int getSum(int aOp1, int aOp2)
{
return aOp1 + aOp2;
}
/**
* Возврщает результат вычитания одного числа из другого
* @param aOp1 Уменьшаемое
* @param aOp2 Вычитаемое
* @return Разность
*/
public int getSubst(int aOp1, int aOp2)
{
return aOp1 - aOp2;
}
/**
* Возврщает результат перемножения двух чисел
* @param aOp1 Первый множитель
* @param aOp2 Второй множитель
* @return Произведение
*/
public int getMultiply(int aOp1, int aOp2)
{
return aOp1 * aOp2;
}
/**
* Возврщает результат деления одного числа на другое
* @param aOp1 Делимое
* @param aOp2 Делитель
* @return Частное
*/
public int getDiv(int aOp1, int aOp2)
{
return aOp1 / aOp2;
}
}
<file_sep>/**
* <p>Description: Тестирование арифметического действия (деление). </p>
* @author <NAME>
*/
import org.junit.*;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import ru.yandex.qatools.allure.annotations.Description;
import ru.yandex.qatools.allure.annotations.Step;
import java.io.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static org.junit.runners.Parameterized.Parameters;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class TestingOfDivision
{
private static Arithmetic mArithmetic = null;
private int mOp1;
private int mOp2;
private String mOperation;
private int mRes;
public TestingOfDivision(int aOp1, int aOp2, String aOperation, int aRes)
{
this.mOp1 = aOp1;
this.mOp2 = aOp2;
this.mOperation = aOperation;
this.mRes = aRes;
}
@BeforeClass
public static void setData()
{
mArithmetic = new Arithmetic();
}
@Step("Checking of operation between operands. Division is needed.")
@Before
public void checkOperation()
{
Assume.assumeTrue(isDiv());
}
private boolean isDiv()
{
return (mOperation.equals("/"));
}
@Description("testing dividing one number by another")
@Test
public void checkingResultOfDivision()
{
assertEquals("Test failed!", mRes, mArithmetic.getDiv(mOp1, mOp2));
}
@Parameters
public static Collection getParameters()
{
List<Object[]> collection = new ArrayList();
try
{
BufferedReader reader;
try
{
reader = new BufferedReader(new InputStreamReader(new FileInputStream("dataFile.txt")));
String line;
while ((line = reader.readLine()) != null)
{
String[] elements = line.split(";");
Object[] objects = new Object[4];
objects[0] = Integer.parseInt(elements[0]);
objects[1] = Integer.parseInt(elements[1]);
objects[2] = elements[2];
objects[3] = Integer.parseInt(elements[3]);
collection.add(objects);
}
reader.close();
}
catch (FileNotFoundException e)
{
System.out.println("File not found!");
System.exit(0);
}
}
catch (IOException e)
{
System.out.println("ERROR");
};
return collection;
}
}
|
30250f3f712577fb6b0d95f7cbe1268488a8302e
|
[
"Java",
"Maven POM"
] | 3
|
Maven POM
|
Belka2017/TestArithmeticProject
|
05c8b9645ed792211f15a5dc9dea5b99f5f07ceb
|
8b501ffab157b6b603498a92cfb0f0a3f7d59760
|
refs/heads/master
|
<file_sep>package com.mohjacksi.snapdemo;
import android.hardware.Camera;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import java.io.IOException;
/**
* A simple {@link Fragment} subclass.
*/
public class EmptyFragment extends Fragment implements SurfaceHolder.Callback{
Camera camera;
SurfaceView mSurfaceView;
SurfaceHolder mSurfaceHolder;
public EmptyFragment() {
// Required empty public constructor
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_empty, container, false);
mSurfaceView = view.findViewById(R.id.surfaceView);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
return view;
}
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
camera = Camera.open();
camera.setDisplayOrientation(90);
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewFrameRate(30);
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
camera.setParameters(parameters);
try {
camera.setPreviewDisplay(surfaceHolder);
} catch (IOException e) {
e.printStackTrace();
}
camera.startPreview();
}
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
}
}
|
c41bd50b8f1dd40317071531b996f2f647e3f9dd
|
[
"Java"
] | 1
|
Java
|
mohjacksi/SnapDemo
|
69df5db08e0d78be54ad2e189a2b0cf2190857b2
|
bd4e670f369ff715042f87be2787c3a41dd33abf
|
refs/heads/master
|
<repo_name>DavHer/Particles<file_sep>/app/src/main/java/com/example/david/particles/objects/ParticleSystem.java
package com.example.david.particles.objects;
import android.graphics.Color;
import com.example.david.particles.data.VertexArray;
import com.example.david.particles.programs.ParticleShaderProgram;
import com.example.david.particles.util.Geometry;
import static android.opengl.GLES20.glDrawArrays;
import static android.opengl.GLES20.GL_POINTS;
/**
* Created by david on 21/07/15.
*/
public class ParticleSystem {
private static final int POSITION_COMPONENT_COUNT = 3;
private static final int COLOR_COMPONENT_COUNT = 3;
private static final int VECTOR_COMPONENT_COUNT = 3;
private static final int PARTICLE_START_TIME_COMPONENT_COUNT = 1;
private static final int TOTAL_COMPONENT_COUNT = POSITION_COMPONENT_COUNT +
COLOR_COMPONENT_COUNT+
VECTOR_COMPONENT_COUNT+
PARTICLE_START_TIME_COMPONENT_COUNT;
private static final int BYTES_PER_FLOAT = 4;
private static final int STRIDE = TOTAL_COMPONENT_COUNT * BYTES_PER_FLOAT;
private final float[] particles;
private final VertexArray vertexArray;
private int maxParticleCount;
private int currentParticleCount;
private int nextParticle;
public ParticleSystem(int maxParticleCount){
particles = new float[maxParticleCount * TOTAL_COMPONENT_COUNT];
vertexArray = new VertexArray(particles);
this.maxParticleCount = maxParticleCount;
}
public void addParticle(Geometry.Point position, int color, Geometry.Vector direction,
float particleStartTime){
int particleOffset = nextParticle * TOTAL_COMPONENT_COUNT;
int currentOffset = particleOffset;
nextParticle++;
if(nextParticle>maxParticleCount)
return;
if(currentParticleCount < maxParticleCount){
currentParticleCount++;
}
if(nextParticle == maxParticleCount){
//Start over at the beginning, but keep currentParticleCount so
//that all the other particles still get draw
nextParticle = 0;
}
particles[currentOffset++] = position.x;
particles[currentOffset++] = position.y;
particles[currentOffset++] = position.z;
particles[currentOffset++] = Color.red(color)/255f;
particles[currentOffset++] = Color.green(color)/255f;
particles[currentOffset++] = Color.blue(color)/255f;
particles[currentOffset++] = direction.x;
particles[currentOffset++] = direction.y;
particles[currentOffset++] = direction.z;
particles[currentOffset++] = particleStartTime;
vertexArray.updateBuffer(particles, particleOffset, TOTAL_COMPONENT_COUNT);
}
public void bindData(ParticleShaderProgram particleProgram){
int dataOffset = 0;
vertexArray.setVertexAttribPointer(dataOffset, particleProgram.getPositionAttributeLocation(),
POSITION_COMPONENT_COUNT, STRIDE);
dataOffset += POSITION_COMPONENT_COUNT;
vertexArray.setVertexAttribPointer(dataOffset, particleProgram.getColorLocation(),
COLOR_COMPONENT_COUNT, STRIDE);
dataOffset += COLOR_COMPONENT_COUNT;
vertexArray.setVertexAttribPointer(dataOffset, particleProgram.getDirectionVectorLocation(),
VECTOR_COMPONENT_COUNT, STRIDE);
dataOffset += VECTOR_COMPONENT_COUNT;
vertexArray.setVertexAttribPointer(dataOffset, particleProgram.getParticleStartTimeLocation(),
PARTICLE_START_TIME_COMPONENT_COUNT, STRIDE);
}
public void draw(){
glDrawArrays(GL_POINTS, 0, currentParticleCount);
}
}
<file_sep>/app/src/main/java/com/example/david/particles/Constants.java
package com.example.david.particles;
/**
* Created by david on 09/04/15.
*/
public class Constants {
public static final int BYTES_PER_FLOAT = 4;
}
<file_sep>/app/src/main/java/com/example/david/particles/util/LoggerConfig.java
package com.example.david.particles.util;
/**
* Created by david on 15/02/15.
*/
public class LoggerConfig {
public static final boolean ON = true;
}
|
6fc8ab6bde8ac33093bf6eb5f64af97e24d0b44a
|
[
"Java"
] | 3
|
Java
|
DavHer/Particles
|
99d2d3c0afae860de6970ab645878945122c5391
|
f50701c631b142fac4c4ba260e2db63dcf390eb2
|
refs/heads/master
|
<repo_name>RomainPct/Sesame<file_sep>/Sésame/Style/Inputs/safetyInput.class.swift
//
// safetyInput.swift
// Safety First
//
// Created by <NAME> on 16/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
// Classic input
class safetyInput: UITextField {
var error: Bool? {
didSet {
backgroundColor = error! ? UIColor(red: 1, green: 0, blue: 0, alpha: 0.05) : UIColor(named: "BlueTransparent")
}
}
override func layoutSubviews() {
super.layoutSubviews()
self.layer.cornerRadius = 0
self.font = .systemFont(ofSize: 20)
self.adjustsFontSizeToFitWidth = false
let path = UIBezierPath.getRoundedRect(topLeftRadius: 15, topRightRadius: 5, bottomRightRadius: 15, bottomLeftRadius: 5, selfBoundsRect: self.bounds)
let shapeLayer = CAShapeLayer()
shapeLayer.path = path.cgPath
self.layer.mask = shapeLayer
self.borderStyle = .none
if error == nil {
error = false
}
}
override func textRect(forBounds bounds: CGRect) -> CGRect {
return UIEdgeInsetsInsetRect(bounds, UIEdgeInsetsMake(7, 7, 7, 7))
}
override func placeholderRect(forBounds bounds: CGRect) -> CGRect {
return UIEdgeInsetsInsetRect(bounds, UIEdgeInsetsMake(7, 7, 7, 7))
}
override func editingRect(forBounds bounds: CGRect) -> CGRect {
return UIEdgeInsetsInsetRect(bounds, UIEdgeInsetsMake(7, 7, 7, 7))
}
}
<file_sep>/Podfile
project 'Sésame.xcodeproj'
# Uncomment the next line to define a global platform for your project
platform :ios, '11.0'
target 'Sésame' do
# Comment the next line if you're not using Swift and don't want to use dynamic frameworks
use_frameworks!
# Pods for Sésame
pod 'RealmSwift' # Realm Database
pod 'KeychainAccess' # Use keychain
pod 'Alamofire' # launch http requests
pod 'Firebase/Core' # Firebase for analytics
end
<file_sep>/Sésame/Extensions/String.extension.swift
//
// StringExtension.swift
// Sésame
//
// Created by <NAME> on 03/01/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
extension String {
func countLines(inTextView textView:UITextView) -> Int {
let maxSize = CGSize(width: textView.frame.width - 24, height: 1000)
let attributesDictionry = [ NSAttributedStringKey.font : textView.font! ]
let textHeight = NSString(string: self).boundingRect(with: maxSize, options: .usesLineFragmentOrigin, attributes: attributesDictionry, context: nil).height
return Int(textHeight / textView.font!.lineHeight)
}
func toDate( dateFormat format : String) -> Date {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = format
return dateFormatter.date(from: self)!
}
}
<file_sep>/Sésame/ViewControllers/EditPassword.ViewController.swift
//
// editPasswordViewController.swift
// Sésame
//
// Created by <NAME> on 01/02/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class editPasswordViewController: UIViewController, UITextFieldDelegate {
let manager = VaultManager()
var scroll:scrollManager?
@IBOutlet weak var ui_endScreen: UIView!
@IBOutlet weak var ui_scrollView: UIScrollView!
@IBOutlet weak var ui_oldPasswordInput: safetyInput!
@IBOutlet weak var cs_oldPasswordError: NSLayoutConstraint!
@IBOutlet weak var ui_newPasswordInput: safetyInput!
@IBOutlet weak var cs_newPasswordError: NSLayoutConstraint!
@IBOutlet weak var ui_confirmPasswordInput: safetyInput!
@IBOutlet weak var cs_confirmPasswordError: NSLayoutConstraint!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
ui_oldPasswordInput.delegate = self
ui_newPasswordInput.delegate = self
ui_confirmPasswordInput.delegate = self
scroll = scrollManager(forInputs: [ui_oldPasswordInput,ui_newPasswordInput,ui_confirmPasswordInput], atDepth: .flat, inScrollView: ui_scrollView)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// Action bouton "enregistrer"
@IBAction func savePasswordUpdate(_ sender: Any) {
leftAllInputs()
if verifInputs() {
// Message de confirmation
let alert = UIAlertController(title: nil, message: nil, preferredStyle: .actionSheet)
alert.addAction(UIAlertAction(title: NSLocalizedString("editPasswordAction", comment: ""), style: .default, handler: { (_) in
// Enregistrer la modification
self.manager.SesamePassword = self.ui_newPasswordInput.text
UIView.transition(with: self.ui_endScreen, duration: 0.2, options: [.transitionCrossDissolve,.curveEaseIn], animations: {
self.ui_endScreen.isHidden = false
}, completion: nil)
}))
alert.addAction(UIAlertAction(title: NSLocalizedString("cancel", comment: ""), style: .cancel, handler: nil))
present(alert, animated: true, completion: nil)
}
}
// TextField delegate
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
leftInputs(textField)
return false
}
func textFieldShouldEndEditing(_ textField: UITextField) -> Bool {
_ = verifInputs(inputLeft: textField)
return true
}
// Tap to left inputs
@IBAction func leftFirstResponder(_ sender: Any) {
leftAllInputs()
}
// Vérification des inputs
func verifInputs(inputLeft :UITextField? = nil) -> Bool {
var answer = true
// Vérif old password
answer = (ui_oldPasswordInput.text == manager.SesamePassword)
cs_oldPasswordError.constant = (ui_oldPasswordInput.text != manager.SesamePassword) ? 12 : 0
// Vérif new password
if let newPassword = ui_newPasswordInput.text,
newPassword.count < 8 {
answer = false
if inputLeft != ui_oldPasswordInput {
cs_newPasswordError.constant = 12
}
} else {
cs_newPasswordError.constant = 0
}
// Vérif confirm password
if ui_confirmPasswordInput.text != ui_newPasswordInput.text {
answer = false
if inputLeft != ui_newPasswordInput {
cs_confirmPasswordError.constant = 12
}
} else {
cs_confirmPasswordError.constant = 0
}
return answer
}
// Quitter les inputs
func leftInputs(_ textField : UITextField){
switch textField {
case ui_oldPasswordInput:
ui_newPasswordInput.becomeFirstResponder()
case ui_newPasswordInput:
ui_confirmPasswordInput.becomeFirstResponder()
default:
textField.resignFirstResponder()
}
}
func leftAllInputs(){
[ui_oldPasswordInput,ui_newPasswordInput,ui_confirmPasswordInput].forEach({$0?.resignFirstResponder()})
}
}
<file_sep>/Sésame/Models/MultiDevicesSynchronizationManager.model.swift
//
// MultiDevicesSynchronizationManager.model.swift
// Sésame
//
// Created by <NAME> on 20/08/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
class MultiDevicesSynchronizationManager {
}
<file_sep>/Sésame/Style/Buttons/safetyBarButton.class.swift
//
// safetyBarButton.class.swift
// Sésame
//
// Created by <NAME> on 04/04/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class safetyBarButton: UIButton {
override func layoutSubviews() {
super.layoutSubviews()
// Style
backgroundColor = isHighlighted ? UIColor(named: "BluePrimary") : UIColor.white
setTitleColor(UIColor(named: "BluePrimary") , for: .normal)
setTitleColor(UIColor.white, for: .highlighted)
titleLabel?.font = .systemFont(ofSize: 14)
// Forme
let path = UIBezierPath.getRoundedRect(topLeftRadius: 5, topRightRadius: 15, bottomRightRadius: 5, bottomLeftRadius: 15, selfBoundsRect: self.bounds)
let shapeLayer = CAShapeLayer()
shapeLayer.path = path.cgPath
layer.mask = shapeLayer
}
}
<file_sep>/Sésame/Models/SafeguardManager.model.swift
//
// Synchronization.swift
// Sésame
//
// Created by <NAME> on 16/02/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Alamofire
class SafeguardManager {
let userInfo = UserDefaults.standard
let icloud = iCloudManager()
let timeCorresp:[Int:Double?] = [0 : nil, 1 : 1, 2 : 7, 3 : 30]
static let FREQUENCIES = [NSLocalizedString("frequency0", comment: ""),
NSLocalizedString("frequency1", comment: ""),
NSLocalizedString("frequency2", comment: ""),
NSLocalizedString("frequency3", comment: "")]
private let FREQUENCY_KEY = "safeguardFrequency"
private let USER_EMAIL_KEY = "userEmail"
private let LAST_SAFEGUARD_KEY = "lastSafeguard"
private let SAVE_USER_URL:URLConvertible = "http://romainpenchenat.free.fr/sesame/saveUser.php"
var lastSafeguard: String? {
get {
return userInfo.string(forKey: LAST_SAFEGUARD_KEY)
}
set {
userInfo.set(newValue, forKey: LAST_SAFEGUARD_KEY)
}
}
var safeguardFrequency: Int? {
get {
return (userInfo.object(forKey: FREQUENCY_KEY) != nil) ? userInfo.integer(forKey: FREQUENCY_KEY) : nil
}
set {
userInfo.set(newValue, forKey: FREQUENCY_KEY)
}
}
var userEmail: String? {
get {
return userInfo.string(forKey: USER_EMAIL_KEY)
}
set {
userInfo.set(newValue, forKey: USER_EMAIL_KEY)
}
}
init(newEmail:String? = nil,newFrequency:Int? = nil) {
if newEmail != nil, newFrequency != nil {
if SafeguardManager.verif(newEmail: newEmail), SafeguardManager.verif(newFrequency: newFrequency) {
userEmail = newEmail
safeguardFrequency = newFrequency
}
}
}
func safeguard() -> Bool {
// Vérifier existence adresse mail dans UserDefaults
if userEmail != nil {
// Enregistrer db key dans database
let table:[String : String] = ["email":userEmail!,"key": VaultManager().getKey() ]
Alamofire.request(SAVE_USER_URL, method: .post, parameters: table, encoding: JSONEncoding.default, headers: nil)
// Créer dossier app dans IcloudDrive
icloud.createFolder()
// Enregistrer fichier sur Icloud Drive
icloud.saveRealmFile(realmFile: VaultManager().getRealmFile())
// Enregistrer date derniere sauvegarde
let format = DateFormatter()
format.dateFormat = "dd/MM/yyyy"
lastSafeguard = format.string(from: Date())
}
return true
}
static func verif(newEmail:String?) -> Bool {
var answer = true
let EMAIL_REGEX = "(?:[a-z0-9!#$%\\&'*+/=?\\^_`{|}~-]+(?:\\.[a-z0-9!#$%\\&'*+/=?\\^_`{|}"+"~-]+)*|\"(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21\\x23-\\x5b\\x5d-\\"+"x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])*\")@(?:(?:[a-z0-9](?:[a-"+"z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\\[(?:(?:25[0-5"+"]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-"+"9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21"+"-\\x5a\\x53-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])+)\\])"
let emailTest = NSPredicate(format:"SELF MATCHES[c] %@", EMAIL_REGEX)
if !emailTest.evaluate(with: newEmail) {
answer = false
}
answer = emailTest.evaluate(with: newEmail) ? answer : false
answer = newEmail != "" ? answer : false
return answer
}
static func verif(newFrequency:Int?) -> Bool {
let answer = (newFrequency != nil && newFrequency! < 4 && newFrequency! >= 0)
return answer
}
}
<file_sep>/Sésame/ViewControllers/EditCard.ViewController.swift
//
// editCardViewController.swift
// Sésame
//
// Created by <NAME> on 21/12/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class editCardViewController: UIViewController, UITextFieldDelegate, UITextViewDelegate {
@IBOutlet weak var ui_nameInput: safetyInput!
@IBOutlet weak var cs_nameErrorHeight: NSLayoutConstraint!
@IBOutlet weak var ui_identifierInput: safetyInput!
@IBOutlet weak var ui_passwordInput: safetyInput!
@IBOutlet weak var ui_notesInput: safetyTextarea!
@IBOutlet weak var ui_scrollView: UIScrollView!
@IBOutlet weak var ui_notesView: safetyBox!
lazy var screenHeight = self.ui_scrollView.frame.height
var id:Int?
var scroll:scrollManager?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
if let id = id {
let card = Singletons.vault?.getCard(withId: id)
ui_nameInput.text = card?.name
ui_identifierInput.text = card?.identifier
ui_passwordInput.text = card?.password
ui_notesInput.text = card?.notes
}
// Définition des delegates
ui_nameInput.delegate = self
ui_identifierInput.delegate = self
ui_passwordInput.delegate = self
ui_notesInput.delegate = self
scroll = scrollManager(forInputs: [ui_nameInput,ui_identifierInput,ui_passwordInput,ui_notesInput], atDepth: .superview, inScrollView: ui_scrollView)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// Enregistrer les modifications
@IBAction func saveCard(_ sender: Any) {
if let name = ui_nameInput.text,
verifName() {
Singletons.vault?.updateCard(id: id!, name: name, identifier: ui_identifierInput.text, password: <PASSWORD>Input.text, notes: ui_notesInput.text)
performSegue(withIdentifier: "leftEditCard", sender: nil)
}
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "leftEditCard" {
// Si on a modifié le carte
let nextVC:completeCardViewController! = segue.destination as? completeCardViewController
nextVC.wasEdited = true
} else if segue.identifier == "unwindToMain" {
// Si on a demandé de supprimer la carte
if let nextVC = segue.destination as? MainViewController {
nextVC.needReload = true
}
}
}
// Gestion delegate TextField
func textField(_ textField: UITextField, shouldChangeCharactersIn range: NSRange, replacementString string: String) -> Bool {
if textField == ui_identifierInput,
range.location == 0,
string.count > 1 {
ui_identifierInput.text = string.trimmingCharacters(in: .whitespaces)
return false
} else {
return true
}
}
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
switch textField {
case ui_nameInput:
ui_identifierInput.becomeFirstResponder()
case ui_identifierInput:
ui_passwordInput.becomeFirstResponder()
default:
textField.resignFirstResponder()
}
return false
}
// Gestion delegate TextView
func textView(_ textView: UITextView, shouldChangeTextIn range: NSRange, replacementText text: String) -> Bool {
var answer = true
let char = text.cString(using: String.Encoding.utf8)!
let isBackSpace = strcmp(char, "\\b")
if (isBackSpace != -92) {
let fullText = textView.text + text
if fullText.count <= 180 {
if fullText.countLines(inTextView: textView) > 4 {
answer = false
}
} else {
answer = false
}
}
return answer
}
// Tap pour quitter un input
@IBAction func tapDetected(_ sender: Any) {
leftFirstResponder()
}
fileprivate func getFirstResponder() -> UIView? {
return [ui_notesInput, ui_nameInput, ui_identifierInput, ui_passwordInput].filter { $0.isFirstResponder }.first
}
fileprivate func leftFirstResponder() {
if let firstResponder = getFirstResponder() {
firstResponder.resignFirstResponder()
}
}
// Suppression de la carte
@IBAction func deleteTheCard(_ sender: Any) {
let verifSheet = UIAlertController(title: nil, message: nil, preferredStyle: .actionSheet)
verifSheet.addAction(UIAlertAction(title: NSLocalizedString("wantToDelete", comment: ""), style: .destructive, handler: { (answer) in
// Suppression de la carte puis retour
Singletons.vault?.deleteCard(withId: self.id!)
self.performSegue(withIdentifier: "unwindToMain", sender: nil)
}))
verifSheet.addAction(UIAlertAction(title: NSLocalizedString("cancel", comment: ""), style: .cancel, handler: nil))
present(verifSheet, animated: true, completion: nil)
}
// Vérification des informations
func verifName() -> Bool {
let answer = ui_nameInput.isFilled
ui_nameInput.error = !answer
cs_nameErrorHeight.constant = answer ? 0 : 12
if !answer {
leftFirstResponder()
}
return answer
}
}
<file_sep>/Sésame/ViewControllers/CodeRecoverData.ViewController.swift
//
// CodeRecoverDataViewController.swift
// Sésame
//
// Created by <NAME> on 24/02/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class CodeRecoverDataViewController: UIViewController {
@IBOutlet weak var ui_descriptionLabel: UILabel!
@IBOutlet weak var ui_codeInput: safetyInput!
@IBOutlet weak var cs_errorHeight: NSLayoutConstraint!
var recovery:recoveryManager?
var isChecking:Bool = false
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
if recovery != nil {
ui_descriptionLabel.text = "\(ui_descriptionLabel.text!) \(recovery!.email) :"
}
}
fileprivate func checkRecoveryCode() {
if !isChecking {
isChecking = true
recovery?.getEncryptionKey(secretCode: ui_codeInput.text, completionHandler: { (response) in
self.cs_errorHeight.constant = response ? 0 : 12
if response {
// Lancer la récupération
self.performSegue(withIdentifier: "launchRecovery", sender: nil)
} else {
self.isChecking = false
}
})
}
}
@IBAction func checkCode(_ sender: Any) {
checkRecoveryCode()
}
@IBAction func editingChanged(_ sender: UITextField) {
let count = sender.text?.count ?? 0
if count == 6 {
checkRecoveryCode()
} else if count > 6 {
sender.text?.removeLast(count - 6)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
if let nextVC = segue.destination as? RecoveryViewController {
nextVC.recovery = recovery
}
}
}
<file_sep>/Sésame/Models/iCloudManager.model.swift
//
// iCloudManager.model.swift
// Sésame
//
// Created by <NAME> on 16/04/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class iCloudManager {
struct URLwDate {
var url:URL?
var fileName:String
}
let fileManager = FileManager.default
lazy var URL_appDocument = fileManager.url(forUbiquityContainerIdentifier: nil)?.appendingPathComponent("Documents")
var URL_lastSafeguard:URLwDate = URLwDate(url: nil, fileName: "0")
func createFolder() {
if let URL_appDocument = URL_appDocument,
(!fileManager.fileExists(atPath: URL_appDocument.path, isDirectory: nil)) {
do {
try fileManager.createDirectory(at: URL_appDocument, withIntermediateDirectories: true, attributes: nil)
} catch {
print("Create folder ERROR : \(error)")
}
}
}
func saveRealmFile(realmFile:URL?){
let today = Date()
let format = DateFormatter()
format.dateFormat = "yyyy-MM-dd"
if let URL_fileOniCloud = URL_appDocument?.appendingPathComponent(format.string(from: today) + "_sesameSafeguard.realm"),
let URL_realmFile = realmFile {
// Vérifier si un fichier du même nom existe déja
if fileManager.fileExists(atPath: URL_fileOniCloud.path) {
do {
try fileManager.removeItem(at: URL_fileOniCloud)
} catch {
print("Remove file ERROR : \(error)")
}
}
do {
try fileManager.copyItem(at: URL_realmFile, to: URL_fileOniCloud)
} catch {
print("Create file ERROR : \(error)")
}
}
// Supprimer les vieilles sauvegardes pour ne pas surcharger icloud
if let URL_appDocuments = URL_appDocument,
let content = try? fileManager.contentsOfDirectory(at: URL_appDocuments, includingPropertiesForKeys: nil, options: []) {
if content.count > 15 {
var components = content.map { (url) -> String in
var component = url.lastPathComponent
if component.contains(".icloud") { component.removeFirst() }
return component
}.filter { (str) -> Bool in return str != ".Trash" }.sorted(by: >)
components.removeFirst(15)
for component in components {
let componentPath = component.contains(".icloud") ? ".\(component)" : component
let URL_file = URL_appDocuments.appendingPathComponent(componentPath)
do {
try fileManager.removeItem(at: URL_file)
} catch {
print("Remove file ERROR : \(error)")
}
}
}
}
}
func getLastRealmSafeguard() -> URLwDate {
if let URL_appDocuments = URL_appDocument,
let content = try? fileManager.contentsOfDirectory(at: URL_appDocuments, includingPropertiesForKeys: nil, options: []) {
for file in content {
if let keys = try? file.resourceValues(forKeys: [.nameKey]),
let name = keys.name?.replacingOccurrences(of: ".", with: ""),
!name.contains("management"),
!name.contains("lock"),
!name.contains("note"),
name != "Trash",
name > URL_lastSafeguard.fileName {
URL_lastSafeguard.url = file
URL_lastSafeguard.fileName = name
}
}
}
if var lastPathComponent = URL_lastSafeguard.url?.lastPathComponent,
let filesPath = URL_lastSafeguard.url?.deletingLastPathComponent().path,
lastPathComponent.contains(".icloud") {
lastPathComponent.removeFirst() // on supprime le point présent avant le nom du fichier
let downloadedFilePath = filesPath + "/" + lastPathComponent.replacingOccurrences(of: ".icloud", with: "")
do {
try fileManager.startDownloadingUbiquitousItem(at: URL_lastSafeguard.url!)
} catch {
print("Unexpected error: \(error).")
}
URL_lastSafeguard.url = nil
while URL_lastSafeguard.url == nil {
if fileManager.fileExists(atPath: downloadedFilePath) {
URL_lastSafeguard.url = URL(fileURLWithPath: downloadedFilePath)
}
usleep(500)
}
}
return URL_lastSafeguard
}
func cleanRealmManagementFiles(){
if let URL_appDocuments = URL_appDocument,
let content = try? fileManager.contentsOfDirectory(at: URL_appDocuments, includingPropertiesForKeys: nil, options: []) {
let toDelete = content.filter { (url) -> Bool in
if url.path.contains("management") || url.path.contains("lock") || url.path.contains("note") {
return true
} else { return false }
}
for docURL in toDelete {
print("Delete : \(docURL)")
print("--")
do {
try fileManager.removeItem(at: docURL)
} catch { print("Remove file ERROR : \(error)") }
}
}
}
}
<file_sep>/Sésame/ViewControllers/CreateCard.ViewController.swift
//
// createCardViewController.swift
// Safety First
//
// Created by <NAME> on 11/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class createCardViewController: UIViewController, UITextFieldDelegate, UITextViewDelegate {
var scroll:scrollManager?
@IBOutlet weak var ui_nameInput: safetyInput!
@IBOutlet weak var cs_nameErrorHeight: NSLayoutConstraint!
@IBOutlet weak var ui_identifierInput: UITextField!
@IBOutlet weak var ui_passwordInput: UITextField!
@IBOutlet weak var ui_notesInput: safetyTextarea!
@IBOutlet weak var ui_scrollView: UIScrollView!
@IBOutlet weak var ui_notesView: safetyBox!
lazy var screenHeight = self.ui_scrollView.frame.height
var newCard:card? = nil
@IBAction func leftThisVC(_ sender: Any) {
dismiss(animated: true, completion: nil)
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
ui_notesInput.delegate = self
ui_nameInput.delegate = self
ui_identifierInput.delegate = self
ui_passwordInput.delegate = self
scroll = scrollManager(forInputs: [ui_nameInput,ui_identifierInput,ui_passwordInput,ui_notesInput], atDepth: .superview, inScrollView: ui_scrollView)
}
override func viewWillAppear(_ animated: Bool) {
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func shouldPerformSegue(withIdentifier identifier: String, sender: Any?) -> Bool {
if identifier == "saveData" {
if let name = ui_nameInput.text,
verifName() {
newCard = Singletons.vault?.newCard(name: name, identifier: ui_identifierInput.text, password: ui_passwordInput.text, notes: ui_notesInput.text)
} else {
return false
}
}
return true
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "saveData",
let newCard = newCard,
let nextVC = segue.destination as? MainViewController {
nextVC.cardToScrollToId = newCard.id
}
}
// Gestion delegate TextField
func textField(_ textField: UITextField, shouldChangeCharactersIn range: NSRange, replacementString string: String) -> Bool {
if textField == ui_identifierInput,
range.location == 0,
string.count > 1 {
ui_identifierInput.text = string.trimmingCharacters(in: .whitespaces)
return false
} else {
return true
}
}
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
switch textField {
case ui_nameInput:
ui_identifierInput.becomeFirstResponder()
case ui_identifierInput:
ui_passwordInput.becomeFirstResponder()
default:
textField.resignFirstResponder()
}
return false
}
func textFieldDidEndEditing(_ textField: UITextField) {
if textField == ui_nameInput {
_ = verifName()
}
}
// Gestion delegate TextView
func textView(_ textView: UITextView, shouldChangeTextIn range: NSRange, replacementText text: String) -> Bool {
var answer = true
let char = text.cString(using: String.Encoding.utf8)!
let isBackSpace = strcmp(char, "\\b")
if (isBackSpace != -92) {
let fullText = textView.text + text
if fullText.count <= 180 {
if fullText.countLines(inTextView: textView) > 4 {
answer = false
}
} else {
answer = false
}
}
return answer
}
// Tap pour quitter un input
@IBAction func tapDetected(_ sender: Any) {
leftFirstResponder()
}
fileprivate func getFirstResponder() -> UIView? {
return [ui_notesInput, ui_nameInput, ui_identifierInput, ui_passwordInput].filter { $0.isFirstResponder }.first
}
fileprivate func leftFirstResponder() {
if let firstResponder = getFirstResponder() {
firstResponder.resignFirstResponder()
}
}
// Vérification des informations
func verifName() -> Bool {
let answer = ui_nameInput.isFilled
ui_nameInput.error = !answer
cs_nameErrorHeight.constant = answer ? 0 : 12
if !answer {
leftFirstResponder()
}
return answer
}
}
<file_sep>/Sésame/Models/scrollManager.model.swift
//
// scrollManager.model.swift
// Sésame
//
// Created by <NAME> on 18/04/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
import Foundation
enum depth {
case flat
case superview
}
class scrollManager {
let _inputs:[UIView]
let _depth:depth
let _bonusPos:CGFloat
let ui_scrollView:UIScrollView
init(forInputs inputs:[UIView], atDepth depth:depth, inScrollView scrollView:UIScrollView, withAdditionalY y:CGFloat = 0) {
_inputs = inputs
_depth = depth
_bonusPos = y
ui_scrollView = scrollView
NotificationCenter.default.addObserver(self, selector: #selector(keyboardWillDisappear),
name: .UIKeyboardWillHide, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(keyboardWillAppear(notification:)),
name: .UIKeyboardWillShow, object: nil)
}
@objc func keyboardWillAppear(notification: NSNotification?) {
guard let keyboardFrame = notification?.userInfo?[UIKeyboardFrameEndUserInfoKey] as? NSValue else {
return
}
let inputs = _inputs.filter { (input) -> Bool in return input.isFirstResponder }
if let input = inputs.first,
let bottomPos = getBottomPos(ofInput: input) {
let scrollY = getScrollNeeded(inputBottomPos: bottomPos, keyboardHeight: keyboardFrame.cgRectValue.height)
ui_scrollView.setContentOffset(CGPoint(x: 0, y: scrollY), animated: true)
}
}
@objc func keyboardWillDisappear() {
ui_scrollView.setContentOffset(CGPoint(x: 0, y: 0), animated: true)
}
fileprivate func getBottomPos(ofInput input:UIView) -> CGFloat? {
switch _depth {
case .flat:
return input.frame.maxY + 14
case .superview:
return input.superview?.frame.maxY
}
}
func getScrollNeeded(inputBottomPos:CGFloat,
keyboardHeight:CGFloat) -> CGFloat {
let bonusSpace:CGFloat = 28
let spaceToScroll = keyboardHeight - (ui_scrollView.frame.height - (inputBottomPos + _bonusPos + bonusSpace))
// print("\(keyboardHeight) - (\(ui_scrollView.frame.height) - (\(inputBottomPos) + \(_bonusPos) + 28))")
return spaceToScroll > 0 ? spaceToScroll : 0
}
}
<file_sep>/Sésame/ViewControllers/Recovery.ViewController.swift
//
// RecoveryViewController.swift
// Sésame
//
// Created by <NAME> on 24/02/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class RecoveryViewController: UIViewController {
@IBOutlet weak var ui_flecheImage: UIImageView!
@IBOutlet weak var ui_successView: UIView!
var recovery:recoveryManager?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
override func viewWillAppear(_ animated: Bool) {
UIView.animate(withDuration: 2, delay: 0, options: [.repeat,.curveEaseInOut], animations: {
self.ui_flecheImage.alpha = 0
self.ui_flecheImage.transform = CGAffineTransform(translationX: 0, y: 250)
}, completion: nil)
}
override func viewDidAppear(_ animated: Bool) {
let recoveryResult = recovery!.recoveryData()
switch recoveryResult {
case 2:
// Impossible de trouver un fichier de sauvegarde
sleep(1)
let alert = UIAlertController(title: NSLocalizedString("recoveryErrorTitle",comment: ""), message: NSLocalizedString("recoveryFileErrorMessage",comment: ""), preferredStyle: .alert)
alert.addAction(UIAlertAction(title: NSLocalizedString("recoveryErrorCancel",comment: ""), style: .cancel, handler: { (_) in
self.performSegue(withIdentifier: "unwindToRecoverData", sender: nil)
}))
present(alert, animated: true, completion: nil)
case 1:
// Impossible de trouver une clé pour lire le fichier
sleep(1)
let alert = UIAlertController(title: NSLocalizedString("recoveryErrorTitle",comment: ""), message: NSLocalizedString("recoveryKeyErrorMessage",comment: ""), preferredStyle: .alert)
alert.addAction(UIAlertAction(title: NSLocalizedString("recoveryErrorCancel",comment: ""), style: .cancel, handler: { (_) in
self.performSegue(withIdentifier: "unwindToRecoverData", sender: nil)
}))
present(alert, animated: true, completion: nil)
default:
sleep(2)
UIView.animate(withDuration: 0.3) {
self.ui_successView.alpha = 1
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
<file_sep>/Sésame/ViewControllers/RecoverData.ViewController.swift
//
// RecoverDataViewController.swift
// Sésame
//
// Created by <NAME> on 24/02/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class RecoverDataViewController: UIViewController, UITextFieldDelegate {
@IBOutlet weak var ui_emailInput: safetyInput!
@IBOutlet weak var cs_errorHeight: NSLayoutConstraint!
@IBOutlet weak var ui_sendButton: safetyButton!
@IBOutlet weak var ui_scrollView: UIScrollView!
lazy var screenHeight = self.ui_scrollView.frame.height
var recovery:recoveryManager?
var scroll:scrollManager?
private var _isSending = false
var isSending: Bool {
get {
return _isSending
}
set {
_isSending = newValue
if newValue {
ui_sendButton.setTitle(NSLocalizedString("sendingCode", comment: ""), for: .normal)
} else {
ui_sendButton.setTitle(NSLocalizedString("sendCode", comment: ""), for: .normal)
}
}
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
ui_emailInput.delegate = self
scroll = scrollManager(forInputs: [ui_emailInput], atDepth: .flat, inScrollView: ui_scrollView)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// TextField Delegate
func textField(_ textField: UITextField, shouldChangeCharactersIn range: NSRange, replacementString string: String) -> Bool {
if textField == ui_emailInput,
range.location == 0,
string.count > 1 {
ui_emailInput.text = string.trimmingCharacters(in: .whitespaces)
return false
} else {
return true
}
}
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
sendCode()
return false
}
@IBAction func sendMeTheCode(_ sender: Any) {
sendCode()
}
func sendCode() {
ui_emailInput.resignFirstResponder()
let secur = securityManager()
if secur.isIcloudEnable() {
if secur.isInternetAvailable() {
if !isSending {
isSending = true
recovery = recoveryManager(email: ui_emailInput.text)
recovery!.verifEmail { (response) in
self.ui_emailInput.error = !response
self.cs_errorHeight.constant = response ? 0 : 12
self.isSending = response
if response {
// Envoie mail
self.recovery!.sendCode(completionHandler: { (response) in
if response {
// Display next screen
self.performSegue(withIdentifier: "goToCodeRecoverData", sender: nil)
}
self.isSending = false
})
}
}
}
} else {
present(secur.InternetNotAvailableAlert, animated: true, completion: nil)
}
} else {
present(secur.IcloudNotEnableAlert, animated: true, completion: nil)
}
}
@IBAction func tapDetected(_ sender: Any) {
ui_emailInput.resignFirstResponder()
}
// MARK: - Navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if let nextVC = segue.destination as? CodeRecoverDataViewController {
nextVC.recovery = recovery
}
}
@IBAction func unwindToRecoverData(segue:UIStoryboardSegue) {}
}
<file_sep>/Sésame/Models/Vault.model.swift
//
// cards.swift
// Safety First
//
// Created by <NAME> on 11/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import Foundation
import RealmSwift
class Vault {
private var _realm:Realm
private var _lettersList:[Character] = []
private var _dataInSection:[Int] = [0]
private var _cardsList:Results<card> {
return _realm.objects(card.self).sorted(byKeyPath: "_uppercaseName")
}
private var _researchResultsList:[card] = []
// Init classic
init(withRealm: Realm) {
_realm = withRealm
orderCardsList()
}
// func refresh(){
// _realm.refresh()
// orderCardsList()
// }
// Functions
func getNewId() -> Int {
var id = 0
if let lastID = _realm.objects(card.self).sorted(byKeyPath: "_id").last?.id {
id = lastID + 1
}
return id
}
fileprivate func orderGeneralCardsList() {
var numberFor = 0
var numberLetter = 0
_lettersList = []
for card in _cardsList {
numberFor = numberFor + 1
if let letter = card.uppercaseName.first,
_lettersList.last != letter {
numberLetter = numberLetter + 1
_dataInSection.append(0)
_lettersList.append(letter)
}
_dataInSection[numberLetter] = numberFor
}
}
fileprivate func orderResearchCardsList(_ researchedText: String) {
resetResearchResultsList()
for card in _cardsList {
let uppercaseName = card.uppercaseName[String.Index(encodedOffset: 0)] == "#" ? card.uppercaseName[String.Index(encodedOffset: 1)...].uppercased() : card.uppercaseName
if uppercaseName.contains(researchedText),
let firstCharacterIndex = uppercaseName.range(of: researchedText)?.lowerBound.encodedOffset {
if firstCharacterIndex == 0 {
_researchResultsList.insert(card, at: 0)
} else if uppercaseName[String.Index(encodedOffset: firstCharacterIndex - 1)] == " " {
_researchResultsList.append(card)
}
}
}
}
func orderCardsList(forResearchedText researchedText: String? = nil) {
if researchedText == nil {
orderGeneralCardsList()
} else {
orderResearchCardsList(researchedText!.uppercased())
}
}
func resetResearchResultsList() {
_researchResultsList = []
}
func newCard(name:String,identifier:String?,password:String?,notes:String?) -> card {
let newCard = card(withId: getNewId())
newCard.name = name
newCard.identifier = identifier ?? ""
newCard.password = <PASSWORD> ?? ""
newCard.notes = notes ?? ""
try? _realm.write {
_realm.add(newCard)
}
orderCardsList()
return newCard
}
func updateCard(id:Int, name:String, identifier:String?, password:String?, notes:String?) {
if let theCard = getCard(withId: id) {
theCard.name = name
theCard.identifier = identifier ?? ""
theCard.password = <PASSWORD> ?? ""
theCard.notes = notes ?? ""
}
orderCardsList()
}
// Count
func isEmpty(forResearch:Bool = false) -> Bool {
return countCards(forResearch: forResearch) == 0 ? true : false
}
func countSection() -> Int {
return _lettersList.count
}
func countCards(forResearch:Bool = false) -> Int {
return forResearch ? _researchResultsList.count : _cardsList.count
}
func countCardsInSection(inSection: Int) -> Int {
var number:Int = 0
for card in _cardsList {
if card.uppercaseName.first == _lettersList[inSection] {
number = number + 1
}
}
return number
}
// Get Section name
func getSectionName(forSection:Int) -> String? {
if _lettersList.count != 0 {
return "\(_lettersList[forSection])"
} else {
return nil
}
}
// Get card
func getCard(atIndex index:Int, forResearch:Bool = false) -> card? {
guard index >= 0 && index < countCards() else {
return nil
}
if forResearch {
return _researchResultsList[index]
} else {
return _cardsList[index]
}
}
func getCardID(atIndex index:Int, forResearch:Bool = false) -> Int {
return getCard(atIndex: index, forResearch: forResearch)!.id
}
func getCardInSection(atIndex index:Int, inSection section:Int) -> card? {
guard index >= 0 && index < countCards() && section >= 0 && section < countSection() else {
return nil
}
return _cardsList[index + _dataInSection[section]]
}
func getCardIDInSection(atIndex index:Int, inSection section:Int) -> Int {
return getCardInSection(atIndex: index, inSection: section)!.id
}
func getCard(withId id:Int) -> card? {
return _realm.object(ofType: card.self, forPrimaryKey: id)
}
// Get IndexPath
func getIndexPathOfCard(withId id:Int) -> IndexPath? {
// Trouver la position de la nouvelle carte dans la liste
if let index = _cardsList.index(where: { (card) -> Bool in
return card.id == id
}) {
// Récupérer toutes les sections précédents la carte et celle de la carte
let sections = _dataInSection.filter { $0 <= index && $0 != 0 }
// Lire le numéro d'item dans sa section index totale - index du premier item de la section
if let last = sections.last {
let item = index - last
return IndexPath(item: item, section: sections.count)
} else {
return IndexPath(item: 0, section: 0)
}
}
return nil
}
// Delete card
func deleteCard(atIndex index:Int, inSection section:Int){
if let cardToDelete = getCardInSection(atIndex: index, inSection: section) {
try? _realm.write {
_realm.delete(cardToDelete)
}
}
orderCardsList()
}
func deleteCard(withId id:Int){
if let cardToDelete = getCard(withId: id) {
try? _realm.write {
_realm.delete(cardToDelete)
}
}
orderCardsList()
}
}
<file_sep>/Sésame/Style/Containers/safetyBox.class.swift
//
// roundedView.swift
// Safety First
//
// Created by <NAME> on 29/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class safetyBoxBlue: UIView {
override func layoutSubviews() {
super.layoutSubviews()
self.layer.cornerRadius = 0
let path = UIBezierPath.getRoundedRect(topLeftRadius: 15, topRightRadius: 5, bottomRightRadius: 15, bottomLeftRadius: 5, selfBoundsRect: self.bounds)
let shapeLayer = CAShapeLayer()
shapeLayer.path = path.cgPath
shapeLayer.fillColor = UIColor(named: "BluePrimary")?.cgColor
self.layer.backgroundColor = UIColor.clear.cgColor
self.layer.insertSublayer(shapeLayer, at: 0)
self.layer.masksToBounds = false
self.layer.shadowPath = path.cgPath
self.layer.shadowColor = UIColor.black.cgColor
self.layer.shadowOpacity = 0.16
self.layer.shadowRadius = 2
self.layer.shadowOffset = CGSize(width: 1, height: 1)
}
}
<file_sep>/Sésame/Style/Cells/SafetyFirstTableViewCell.class.swift
//
// SafetyFirstTableViewCell.swift
// Safety First
//
// Created by <NAME> on 28/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class SafetyFirstTableViewCell: UITableViewCell {
@IBOutlet weak var _name: UILabel!
@IBOutlet weak var cs_nameHeight: NSLayoutConstraint!
@IBOutlet weak var _identifier: UILabel!
@IBOutlet weak var cs_identifierHeight: NSLayoutConstraint!
@IBOutlet weak var ui_copyButton: UIButton!
@IBOutlet weak var cs_copyButtonWidth: NSLayoutConstraint!
override func layoutSubviews() {
super.layoutSubviews()
self.layer.shadowColor = UIColor.black.cgColor
self.layer.shadowOpacity = 0.16
self.layer.shadowRadius = 2
self.layer.shadowOffset = CGSize.zero
if let lang = NSLocale.preferredLanguages.first,
(lang == "fr-FR") {
ui_copyButton.setImage(UIImage(named: "copy-\(lang)"), for: .normal)
}
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>/Sésame/AppDelegate.swift
//
// AppDelegate.swift
// Safety First
//
// Created by <NAME> on 11/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import Firebase
// Déplacement des variables
import KeychainAccess
import Foundation
// Fin
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
var leftApp_date:Date?
fileprivate func rearrangeUserVariables() {
// Gestion du déplacement des variables A NE PAS SUPPRIMER AVANT SEPTEMBRE 2019
let keychain = Keychain(service: "com.romainp.Se-same").synchronizable(false)
let keychainOld = Keychain(service: "com.romainp.Safety-First")
let userInfo = UserDefaults.standard
// Vers userInfo
let manager = VaultManager()
// encryption key
if let key = keychainOld[data: "key"] {
// print("key move")
manager.encryptionKey = key
do {
try keychainOld.remove("key")
} catch { print("Error removing encryption key from keychain : \(error)")}
}
// configured
if keychain["SesameConfigured"] != nil {
// print("SesameConfigured move 2")
manager.isSesameConfigured = true
keychain["SesameConfigured"] = nil
}
if userInfo.bool(forKey: "configured") {
// print("SesameConfigured move 1")
manager.isSesameConfigured = true
userInfo.removeObject(forKey: "configured")
}
let safeguard = SafeguardManager()
// user email
if let userEmail = keychain["userEmail"] {
// print("userEmail move 2")
safeguard.userEmail = userEmail
keychain["userEmail"] = nil
}
if let userEmail = userInfo.string(forKey: "user_email") {
// print("userEmail move 1")
safeguard.userEmail = userEmail
userInfo.removeObject(forKey: "user_email")
}
// last safeguard
if let lastSafeguard = keychain["lastSafeguard"] {
// print("last safeguard move 2")
safeguard.lastSafeguard = lastSafeguard
keychain["lastSafeguard"] = nil
}
if let lastSafeguard = userInfo.string(forKey: "last_synchronization") {
// print("last safeguard move 1")
safeguard.lastSafeguard = lastSafeguard
userInfo.removeObject(forKey: "last_synchronization")
}
// safeguard frequency
if let safeguardFrequency = keychain["safeguardFrequency"] {
// print("safeguard frequency move 2")
safeguard.safeguardFrequency = Int(safeguardFrequency)
keychain["safeguardFrequency"] = nil
}
if userInfo.object(forKey: "synchronized") != nil {
// print("safeguard frequency move 1")
safeguard.safeguardFrequency = userInfo.integer(forKey: "synchronized")
userInfo.removeObject(forKey: "synchronized")
}
// FIN
}
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
// Use Firebase library to configure APIs
FirebaseApp.configure()
// A NE PAS SUPPRIMER AVANT SEPTEMBRE 2019
rearrangeUserVariables()
return true
}
func applicationWillResignActive(_ application: UIApplication) {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
leftApp_date = Date()
}
func applicationDidEnterBackground(_ application: UIApplication) {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
func applicationWillEnterForeground(_ application: UIApplication) {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
// Gestion demande d'identification
if let lastDate = leftApp_date,
Date().timeIntervalSince(lastDate) > 30,
Date().timeIntervalSince(lastDate) < 300 {
let vc = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "homeVC") as! HomeViewController
vc.isFirstConnexion = false
window?.rootViewController?.presentedViewController?.childViewControllers.last?.present(vc, animated: false, completion: nil)
} else if let lastDate = leftApp_date,
Date().timeIntervalSince(lastDate) >= 300 {
window?.rootViewController?.dismiss(animated: false, completion: nil)
}
}
func applicationDidBecomeActive(_ application: UIApplication) {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
// Gestion des sauvegardes régulières
DispatchQueue(label: "safeguard", qos: .background).sync {
let safeguard = SafeguardManager()
if let frequency = safeguard.safeguardFrequency,
let intervalNotSur = safeguard.timeCorresp[frequency],
let interval = intervalNotSur,
let lastSafeguard = safeguard.lastSafeguard?.toDate(dateFormat: "dd/MM/yyyy") {
let intervalDuration = DateInterval(start: lastSafeguard, end: Date()).duration
if (intervalDuration / 86400) > interval {
_ = safeguard.safeguard()
}
}
}
}
func applicationWillTerminate(_ application: UIApplication) {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
}
<file_sep>/Sésame/Extensions/ViewController.extension.swift
//
// ViewControllerExtension.swift
// Sésame
//
// Created by <NAME> on 14/03/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
extension UIViewController {
func gradientBackground(){
let gradientLayer = CAGradientLayer()
gradientLayer.frame = self.view.bounds
gradientLayer.colors = [ UIColor(red: 0, green: 122/255, blue: 1, alpha: 1).cgColor, UIColor(red: 0, green: 166/255, blue: 1, alpha: 1).cgColor ]
gradientLayer.locations = [0.25, 1.0]
self.view.layer.insertSublayer(gradientLayer, at: 0)
}
}
<file_sep>/Sésame/Models/Singletons.model.swift
//
// Singletons.model.swift
// Sésame
//
// Created by <NAME> on 01/08/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Foundation
class Singletons {
static var vault:Vault? = VaultManager().getVault()
static func reloadVault(){
vault = nil
vault = VaultManager().getVault()
}
}
<file_sep>/Sésame/ViewControllers/EditSafeguard.ViewController.swift
//
// EditSynchronizationViewController.swift
// Sésame
//
// Created by <NAME> on 15/02/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class EditSafeguardViewController: UIViewController, UITextFieldDelegate {
let safeguard = SafeguardManager()
@IBOutlet weak var ui_frequencyInput: safetySelect!
@IBOutlet weak var ui_newSynchronizationButton: safetyButton!
@IBOutlet weak var ui_lastSynchroLabel: UILabel!
@IBOutlet weak var ui_linkedEmailLabel: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
displayFrequency()
displayLastSafeguard()
displayNewSynchronizationButton()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// Initialisation écran
func displayFrequency() {
ui_frequencyInput.text = NSLocalizedString("frequency\(safeguard.safeguardFrequency ?? 0)", comment: "")
}
func displayNewSynchronizationButton() {
if safeguard.safeguardFrequency != 0 {
ui_newSynchronizationButton.isHidden = true
} else {
ui_newSynchronizationButton.isHidden = false
}
}
func displayLastSafeguard() {
if let lastSafeguard = safeguard.lastSafeguard {
ui_lastSynchroLabel.text = "\(NSLocalizedString("lastSafeguardLabel", comment: ""))\(lastSafeguard)"
}
if let email = safeguard.userEmail {
ui_linkedEmailLabel.text = "\(NSLocalizedString("linkedEmailLabel", comment: ""))\(email)"
}
}
// Lancer la sauvegarde
@IBAction func safeguard(_ sender: Any) {
if securityManager().isIcloudEnable() {
let safeguardVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "safeguard")
present(safeguardVC, animated: true, completion: nil)
} else {
present(securityManager().IcloudNotEnableAlert, animated: true, completion: nil)
}
}
@IBAction func screenTaped(_ sender: Any) {
ui_frequencyInput.showSelect(really: false)
}
// Segue
@IBAction func unwindToEditSynchronization(segue:UIStoryboardSegue) {
displayLastSafeguard()
}
}
<file_sep>/Sésame/ViewControllers/SetSafeguard.ViewController.swift
//
// SetSynchronizationViewController.swift
// Sésame
//
// Created by <NAME> on 13/02/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class SetSafeguardViewController: UIViewController, UITextFieldDelegate {
@IBOutlet weak var ui_emailInput: safetyInput!
@IBOutlet weak var ui_frequencyInput: safetySelect!
@IBOutlet weak var cs_emailErrorLabel: NSLayoutConstraint!
@IBOutlet weak var ui_scrollView: UIScrollView!
lazy var screenHeight = self.ui_scrollView.frame.height
var scroll:scrollManager?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
ui_emailInput.delegate = self
scroll = scrollManager(forInputs: [ui_emailInput], atDepth: .flat, inScrollView: ui_scrollView)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// Quitter first responder
fileprivate func leaveInputs() {
ui_emailInput.resignFirstResponder()
ui_frequencyInput.showSelect(really: false)
}
@IBAction func screenTaped(_ sender: UITapGestureRecognizer) {
leaveInputs()
}
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
textField.resignFirstResponder()
return false
}
// Gestion espace après coller un élément
func textFieldDidBeginEditing(_ textField: UITextField) {
if textField == ui_emailInput {
cs_emailErrorLabel.constant = 0
}
}
func textField(_ textField: UITextField, shouldChangeCharactersIn range: NSRange, replacementString string: String) -> Bool {
if textField == ui_emailInput,
range.location == 0,
string.count > 1 {
ui_emailInput.text = string.trimmingCharacters(in: .whitespaces)
return false
} else {
return true
}
}
// Safeguard Action
@IBAction func safeguard() {
leaveInputs()
if SafeguardManager.verif(newEmail: ui_emailInput.text),
let frequencyStr = ui_frequencyInput.text,
let frequencyInt:Int = SafeguardManager.FREQUENCIES.index(of: frequencyStr) {
let security = securityManager()
if security.isInternetAvailable() {
if security.isIcloudEnable() {
_ = SafeguardManager(newEmail: ui_emailInput.text, newFrequency: frequencyInt)
let safeguardVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "safeguard")
present(safeguardVC, animated: true, completion: nil)
} else {
present(security.IcloudNotEnableAlert, animated: true, completion: nil)
}
} else {
present(security.InternetNotAvailableAlert, animated: true, completion: nil)
}
} else {
cs_emailErrorLabel.constant = 12
}
}
}
<file_sep>/Sésame/Style/Containers/safetyBoxBlue.class.swift
//
// safetyBox.class.swift
// Sésame
//
// Created by <NAME> on 04/04/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class safetyBox: UIView {
var hide: Bool {
get {
return self.isHidden
}
set {
self.layer.masksToBounds = newValue
}
}
override func layoutSubviews() {
super.layoutSubviews()
self.layer.cornerRadius = 0
let path = UIBezierPath.getRoundedRect(topLeftRadius: 15, topRightRadius: 5, bottomRightRadius: 15, bottomLeftRadius: 5, selfBoundsRect: self.bounds)
let shapeLayer = CAShapeLayer()
shapeLayer.path = path.cgPath
shapeLayer.fillColor = UIColor.white.cgColor
shapeLayer.name = "roundedBlock"
self.layer.backgroundColor = UIColor.clear.cgColor
self.layer.insertSublayer(shapeLayer, at: 0)
self.layer.shadowPath = path.cgPath
self.layer.shadowColor = UIColor.black.cgColor
self.layer.shadowOpacity = 0.16
self.layer.shadowRadius = 2
self.layer.shadowOffset = CGSize(width: 1, height: 1)
}
func deleteOldShapeLayer() {
for layer in self.layer.sublayers! {
if layer.name == "roundedBlock" {
layer.removeFromSuperlayer()
}
}
}
func touchAnim(finished:@escaping ()->Void) {
UIView.animate(withDuration: 0.2, animations: {
self.layer.shadowOffset = CGSize(width: 0, height: 0)
self.layer.shadowOpacity = 0
}) { (_) in
finished()
}
}
func resetTouchAnim() {
UIView.animate(withDuration: 0.2) {
self.layer.shadowOpacity = 0.16
self.layer.shadowOffset = CGSize(width: 1, height: 1)
}
}
}
<file_sep>/Sésame/Style/Cells/SectionHeaderCell.class.swift
//
// SectionHeaderCell.swift
// Safety First
//
// Created by <NAME> on 29/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class SectionHeaderCell: UITableViewCell {
@IBOutlet weak var headerLabel: UILabel!
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
<file_sep>/Sésame/ViewControllers/Home.ViewController.swift
//
// HomeViewController.swift
// Safety First
//
// Created by <NAME> on 16/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import MobileCoreServices
import LocalAuthentication
class HomeViewController: UIViewController, UITextFieldDelegate {
let manager = VaultManager()
@IBOutlet weak var ui_loginBox: safetyBox!
@IBOutlet weak var ui_passwordInput: safetyInput!
@IBOutlet weak var cs_errorHeight: NSLayoutConstraint!
var isFirstConnexion = true
fileprivate func leftHomeVC(){
if isFirstConnexion {
performSegue(withIdentifier: "goToNavigationController", sender: nil)
} else {
dismiss(animated: true, completion: nil)
}
}
@IBAction func ui_accessToData(_ sender: Any) {
accessToData()
}
func accessToData() {
if let password = ui_passwordInput.text,
password == <PASSWORD> {
leftHomeVC()
ui_passwordInput.text = nil
UIView.transition(with: ui_loginBox, duration: 0.1, options: .transitionCrossDissolve, animations: {
self.ui_loginBox.isHidden = true
}, completion: nil)
} else {
ui_passwordInput.error = true
cs_errorHeight.constant = 12
}
}
func logInWithBiometrics() {
let context = LAContext()
if context.canEvaluatePolicy(LAPolicy.deviceOwnerAuthenticationWithBiometrics, error: nil) {
context.evaluatePolicy(LAPolicy.deviceOwnerAuthenticationWithBiometrics, localizedReason: NSLocalizedString("BiometricReason", comment: "") , reply: { (isOwner, Error) in
DispatchQueue.main.async {
if let err = Error {
print(err.localizedDescription)
self.logInWithPassword()
} else if isOwner == true {
self.leftHomeVC()
} else {
self.logInWithPassword()
}
}
})
} else {
logInWithPassword()
}
}
func logInWithPassword() {
UIView.transition(with: ui_loginBox, duration: 0.2, options: .transitionCrossDissolve, animations: {
self.ui_loginBox.isHidden = false
}) { (_) in
self.ui_passwordInput.becomeFirstResponder()
}
}
override func viewDidAppear(_ animated: Bool) {
if !manager.isSesameConfigured {
manager.SesamePassword = nil
manager.encryptionKey = nil
performSegue(withIdentifier: "toSetPasswordScreen", sender: nil)
}
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
self.gradientBackground()
ui_passwordInput.delegate = self
}
override func viewWillAppear(_ animated: Bool) {
if manager.isSesameConfigured {
logInWithBiometrics()
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
textField.resignFirstResponder()
accessToData()
return false
}
@IBAction func unwindToHome(segue:UIStoryboardSegue) {}
}
<file_sep>/Sésame/Models/VaultManager.model.swift
//
// VaultManager.swift
// Safety First
//
// Created by <NAME> on 16/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import Foundation
import RealmSwift
import KeychainAccess
class VaultManager {
private var _vault:Vault?
private let _keychain = Keychain(service: "com.romainp.Se-same").synchronizable(false)
private let userInfo = UserDefaults.standard
fileprivate var _KEY_configuredApp = "SesameConfigured"
fileprivate var _KEY_password = "<PASSWORD>"
fileprivate var _KEY_encryptionKey = "SesameEncryptionKey"
var isSesameConfigured: Bool {
get {
return userInfo.bool(forKey: _KEY_configuredApp)
}
set {
userInfo.set((newValue ? 1 : 0), forKey: _KEY_configuredApp)
}
}
var SesamePassword:String? {
get {
return _keychain[_KEY_password]
}
set {
_keychain[_KEY_password] = newValue
}
}
var encryptionKey:Data? {
get {
return _keychain[data: _KEY_encryptionKey]
}
set {
_keychain[data: _KEY_encryptionKey] = newValue
}
}
func getVault() -> Vault? {
let _config = Realm.Configuration.init(encryptionKey: encryptionKey)
do {
let _realm = try Realm(configuration: _config)
return Vault(withRealm: _realm)
} catch {
print("Main Vault init ERROR : \(error)")
}
return nil
}
func generateKey() {
// Générer clé
var key = Data(count: 64)
_ = key.withUnsafeMutableBytes { bytes in
SecRandomCopyBytes(kSecRandomDefault, 64, bytes)
}
// Enregistrer clé
encryptionKey = key
}
func findTheGoodEncryptionKey(encryptedKeys:[recoveryManager.encryptionKey], forRealmFile realmFileURL: URL) -> String? {
for key in encryptedKeys {
let decriptedKey = decriptKeyFromDB(keyString: key.encryptionKey)
let config = Realm.Configuration(fileURL: realmFileURL, encryptionKey: decriptedKey)
if let _ = try? Realm(configuration: config) {
return key.encryptionKey
}
}
return nil
}
func recoveryKey(encrytedKey:String) {
let decriptedKey = decriptKeyFromDB(keyString: encrytedKey)
encryptionKey = decriptedKey
}
func getKey() -> String {
if let keyData = encryptionKey {
let keyBase64String = keyData.base64EncodedString(options: Data.Base64EncodingOptions(rawValue: 0))
return keyBase64String
} else {
return "erreur"
}
}
private func decriptKeyFromDB(keyString:String) -> Data? {
return Data(base64Encoded: keyString, options: .init(rawValue: 0))
}
func getRealmFile() -> URL? {
return Realm.Configuration.defaultConfiguration.fileURL
}
}
<file_sep>/Sésame/Style/Containers/settingsLine.class.swift
//
// settingsLine.class.swift
// Sésame
//
// Created by <NAME> on 16/04/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class settingsLine : UIView {
override func layoutSubviews() {
super.layoutSubviews()
self.layer.shadowColor = UIColor.black.cgColor
self.layer.shadowOpacity = 0.16
self.layer.shadowRadius = 2
self.layer.shadowOffset = CGSize(width: 1, height: 1)
}
}
<file_sep>/Sésame/Extensions/UIBezierPath.extension.swift
//
// UIBezierPath.extension.swift
// Sésame
//
// Created by <NAME> on 16/04/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
extension UIBezierPath {
static func getRoundedRect(topLeftRadius:CGFloat, topRightRadius:CGFloat, bottomRightRadius:CGFloat, bottomLeftRadius:CGFloat, selfBoundsRect:CGRect) -> UIBezierPath {
let minx = selfBoundsRect.minX
let miny = selfBoundsRect.minY
let maxx = selfBoundsRect.maxX
let maxy = selfBoundsRect.maxY
let path = UIBezierPath()
path.move(to: CGPoint(x: minx + topLeftRadius, y: miny))
path.addLine(to: CGPoint(x: maxx - topRightRadius, y: miny))
path.addArc(withCenter: CGPoint(x: maxx - topRightRadius, y: miny + topRightRadius), radius: topRightRadius, startAngle: CGFloat(3 * Double.pi / 2 ), endAngle: 0, clockwise: true)
path.addLine(to: CGPoint(x: maxx, y: maxy - bottomRightRadius ))
path.addArc(withCenter: CGPoint(x: maxx - bottomRightRadius, y: maxy - bottomRightRadius), radius: bottomRightRadius, startAngle: 0, endAngle: CGFloat( Double.pi / 2), clockwise: true)
path.addLine(to: CGPoint(x: minx + bottomLeftRadius, y: maxy ))
path.addArc(withCenter: CGPoint(x: minx + bottomLeftRadius, y: maxy - bottomLeftRadius), radius: bottomLeftRadius, startAngle: CGFloat( Double.pi / 2), endAngle: CGFloat( Double.pi), clockwise: true)
path.addLine(to: CGPoint(x: minx, y: miny + topLeftRadius ))
path.addArc(withCenter: CGPoint(x: minx + topLeftRadius, y: miny + topLeftRadius), radius: topLeftRadius, startAngle: CGFloat( Double.pi), endAngle: CGFloat(3 * Double.pi / 2), clockwise: true)
path.close()
return path
}
}
<file_sep>/Sésame/ViewControllers/Settings.ViewController.swift
//
// settingsViewController.swift
// Sésame
//
// Created by <NAME> on 20/12/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import Foundation
class settingsViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
@IBAction func safeguard(_ sender: Any) {
if SafeguardManager().safeguardFrequency == nil {
performSegue(withIdentifier: "goSetSafeguard", sender: nil)
} else {
performSegue(withIdentifier: "goEditSafeguard", sender: nil)
}
}
@IBAction func unwindToSettings(segue:UIStoryboardSegue) {
if (segue.source as? SafeguardViewController) != nil {
performSegue(withIdentifier: "goEditSafeguard", sender: nil)
}
}
}
<file_sep>/Sésame/Style/Buttons/safetyButton.class.swift
//
// safetyButton.swift
// Safety First
//
// Created by <NAME> on 23/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class safetyButton: UIButton {
var _borderLayer:CALayer?
var finalBorderLayer: CALayer {
get {
return CALayer()
}
set {
if _borderLayer != nil {
self.layer.replaceSublayer(_borderLayer!, with: newValue)
} else {
self.layer.addSublayer(newValue)
}
_borderLayer = newValue
}
}
override func awakeFromNib() {
super.awakeFromNib()
// layer.borderWidth = 3/UIScreen.main.nativeScale
// layer.borderColor = self.tintColor.cgColor
self.contentEdgeInsets = UIEdgeInsets(top: 5, left: 20, bottom: 5, right: 20)
}
override func layoutSubviews() {
super.layoutSubviews()
self.layer.cornerRadius = 0
let path = UIBezierPath.getRoundedRect(topLeftRadius: 5, topRightRadius: 15, bottomRightRadius: 5, bottomLeftRadius: 15, selfBoundsRect: self.bounds)
let shapeLayer = CAShapeLayer()
shapeLayer.path = path.cgPath
let borderLayer = CAShapeLayer()
borderLayer.path = path.cgPath
borderLayer.lineWidth = 6/UIScreen.main.nativeScale
borderLayer.fillColor = UIColor.clear.cgColor
borderLayer.strokeColor = self.tintColor.cgColor
finalBorderLayer = borderLayer
self.layer.mask = shapeLayer
}
}
<file_sep>/Sésame/Style/Inputs/safetyTextarea.class.swift
//
// safetyTextarea.swift
// Sésame
//
// Created by <NAME> on 04/04/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class safetyTextarea: UITextView {
override func layoutSubviews() {
super.layoutSubviews()
self.layer.cornerRadius = 0
self.font = .systemFont(ofSize: 20)
let path = UIBezierPath.getRoundedRect(topLeftRadius: 15, topRightRadius: 5, bottomRightRadius: 15, bottomLeftRadius: 5, selfBoundsRect: self.bounds)
let shapeLayer = CAShapeLayer()
shapeLayer.path = path.cgPath
self.layer.mask = shapeLayer
self.backgroundColor = UIColor(named: "BlueTransparent")
self.contentInset = UIEdgeInsetsMake(0, 0, 0, 0)
self.textContainerInset = UIEdgeInsetsMake(7, 7, 7, 7)
}
}
<file_sep>/Sésame/ViewControllers/CompleteCard.ViewController.swift
//
// completeCardViewController.swift
// Safety First
//
// Created by <NAME> on 11/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import MobileCoreServices
class completeCardViewController: UIViewController, UINavigationControllerDelegate {
// Général
@IBOutlet weak var ui_identifierLabel: UILabel!
@IBOutlet weak var ui_identifierView: safetyBox!
@IBOutlet weak var ui_passwordLabel: UILabel!
@IBOutlet weak var ui_passwordView: safetyBox!
@IBOutlet weak var ui_notesTextView: UITextView!
@IBOutlet weak var ui_notesView: safetyBox!
@IBOutlet weak var ui_eyeButton: UIButton!
@IBOutlet weak var ui_notSafePasswordButton: UIButton!
@IBOutlet weak var ui_copyPasswordButton: safetyButton!
// Contraintes
@IBOutlet weak var cs_notesTextViewHeight: NSLayoutConstraint!
@IBOutlet weak var cs_safetyAlertHeight: NSLayoutConstraint!
// Notif
@IBOutlet weak var ui_PasswordNotifView: UIView!
@IBOutlet weak var ui_PasswordNotifLabel: UILabel!
@IBOutlet weak var ui_IdentifierNotifView: UIView!
@IBOutlet weak var ui_IdentifierNotifLabel: UILabel!
// Vars
var id:Int?
var card:card?
var passwordIsReadable = false
var wasEdited = false
var safetyAlert:securityManager.securityAnswer?
// Général
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
navigationController?.delegate = self
ui_notesTextView.textContainerInset = UIEdgeInsetsMake(7, 0, 0, 0)
}
override func viewWillAppear(_ animated: Bool) {
displayData()
verifPasswordSafety()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// Charger les données de la carte
func displayData() {
if let id = id {
card = Singletons.vault?.getCard(withId: id)
self.title = card?.name
if card?.identifier != "" {
ui_identifierLabel.text = card?.identifier
ui_identifierView.hide = false
if let cons = ui_identifierView.constraints.filter({ $0.firstAttribute == NSLayoutAttribute.height }).first {
cons.isActive = false
}
} else {
ui_identifierView.hide = true
ui_identifierView.addConstraint(NSLayoutConstraint(item: ui_identifierView, attribute: .height, relatedBy: .equal, toItem: nil, attribute: .notAnAttribute, multiplier: 1, constant: 0))
}
if card?.hiddenPassword != "" {
ui_passwordLabel.text = card?.hiddenPassword
ui_passwordView.hide = false
if let cons = ui_passwordView.constraints.filter({ $0.firstAttribute == NSLayoutAttribute.height }).first {
cons.isActive = false
}
ui_copyPasswordButton.isHidden = false
} else {
ui_passwordView.hide = true
ui_passwordView.addConstraint(NSLayoutConstraint(item: ui_passwordView, attribute: .height, relatedBy: .equal, toItem: nil, attribute: .notAnAttribute, multiplier: 1, constant: 0))
ui_copyPasswordButton.isHidden = true
}
if card?.notes != "" {
ui_notesTextView.text = """
\(card?.notes ?? "Aucunes")
"""
ui_notesView.hide = false
if let cons = ui_notesView.constraints.filter({ $0.firstAttribute == NSLayoutAttribute.height }).first {
cons.isActive = false
}
// Resize textview
if let font = ui_notesTextView.font {
let insets = ui_notesTextView.textContainerInset
let height = NSString(string: card!.notes).boundingRect(with: CGSize(width: ui_notesTextView.frame.width, height: 200), options: .usesLineFragmentOrigin, attributes: [ NSAttributedStringKey.font : font ], context: nil).height + insets.top + insets.bottom
if cs_notesTextViewHeight.constant != height {
ui_notesView.deleteOldShapeLayer()
cs_notesTextViewHeight.constant = height
}
}
} else {
ui_notesView.hide = true
ui_notesView.addConstraint(NSLayoutConstraint(item: ui_notesView, attribute: .height, relatedBy: .equal, toItem: nil, attribute: .notAnAttribute, multiplier: 1, constant: 0))
}
}
}
// Gestion segue
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "editCard",
let nextScreen:editCardViewController = segue.destination as? editCardViewController {
nextScreen.id = id
}
}
func navigationController(_ navigationController: UINavigationController, willShow viewController: UIViewController, animated: Bool) {
if wasEdited,
let nextVC:MainViewController = viewController as? MainViewController {
nextVC.needReload = true
nextVC.cardToScrollToId = id
}
}
@IBAction func unwindToCompleteCardVC(segue:UIStoryboardSegue) { }
// Gestion de sécurité du mot de passe
func verifPasswordSafety() {
if let password = card?.password {
safetyAlert = securityManager().testPassword(password: <PASSWORD>)
if safetyAlert != nil {
cs_safetyAlertHeight.constant = (safetyAlert!.result && card?.hiddenPassword.count != 0) ? 12 : 0
}
}
}
@IBAction func whyPasswordIsNotSafe(_ sender: Any) {
if safetyAlert != nil {
let alert = UIAlertController(title: NSLocalizedString("safetyAlert_title", comment: ""), message: NSLocalizedString(safetyAlert!.description! + "_msg", comment: ""), preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Ok", style: .default, handler: nil))
present(alert, animated: true, completion: nil)
}
}
// Gestion du mot de passe
@IBAction func changePasswordView(_ sender: Any) {
if passwordIsReadable {
hidePassword()
} else {
seePassword()
}
}
func seePassword() {
ui_passwordLabel.text = "\(card?.password ?? "")"
passwordIsReadable = true
if let img = UIImage(named: "eye-blue") {
ui_eyeButton.setImage(img, for: UIControlState.normal)
}
}
func hidePassword() {
ui_passwordLabel.text = "\(card?.hiddenPassword ?? "")"
passwordIsReadable = false
if let img = UIImage(named: "eye-black") {
ui_eyeButton.setImage(img, for: UIControlState.normal)
}
}
@IBAction func copyPassword(_ sender: Any) {
copyPassword()
}
@IBAction func hideNotification(_ sender: Any) {
ui_passwordView.resetTouchAnim()
UIView.animate(withDuration: 0.3, animations: {
self.ui_PasswordNotifView.alpha = 0
})
}
@IBAction func hideIdentifierNotification(_ sender: Any) {
ui_identifierView.resetTouchAnim()
UIView.animate(withDuration: 0.3, animations: {
self.ui_IdentifierNotifView.alpha = 0
})
}
// Copy functions
func copyIdentifier() {
UIPasteboard.general.setItems([[kUTTypeUTF8PlainText as String : card?.identifier ?? ""]], options: [:])
ui_IdentifierNotifLabel.text = card?.name
UIView.animate(withDuration: 0.3, animations: {
self.ui_IdentifierNotifView.alpha = 1
})
}
func copyPassword() {
UIPasteboard.general.setItems([[kUTTypeUTF8PlainText as String : card?.password ?? ""]], options: [UIPasteboardOption.expirationDate : Date(timeIntervalSinceNow: 60)])
ui_PasswordNotifLabel.text = card?.name
UIView.animate(withDuration: 0.3, animations: {
self.ui_PasswordNotifView.alpha = 1
})
}
// Touch to copy
@IBAction func touchToCopyIdentifier(_ sender: Any) {
ui_identifierView.touchAnim {
self.copyIdentifier()
}
}
@IBAction func touchToCopyPassword(_ sender: Any) {
ui_passwordView.touchAnim {
self.copyPassword()
}
}
}
<file_sep>/Sésame/ViewControllers/Main.ViewController.swift
//
// ViewController.swift
// Safety First
//
// Created by <NAME> on 11/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import MobileCoreServices
class MainViewController: UIViewController, UITableViewDataSource, UITableViewDelegate, UISearchBarDelegate {
var needReload = false
var tableIsEmpty = true
var cardToScrollToId:Int? = nil
@IBOutlet weak var ui_cardsList: UITableView!
@IBOutlet weak var ui_resultsCardsList: UITableView!
@IBOutlet weak var ui_searchBar: SafetySearchBar!
@IBOutlet weak var ui_notifView: UIView!
@IBOutlet weak var ui_notifAppNameLabel: UILabel!
@IBOutlet weak var constraint_resultListHeight: NSLayoutConstraint!
@IBOutlet weak var constraint_cardsListHeight: NSLayoutConstraint!
// classique
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
ui_cardsList.dataSource = self
ui_cardsList.delegate = self
ui_cardsList.rowHeight = UITableViewAutomaticDimension
ui_cardsList.estimatedRowHeight = 64
ui_cardsList.sectionHeaderHeight = 89
ui_searchBar.delegate = self
ui_resultsCardsList.delegate = self
ui_resultsCardsList.dataSource = self
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if let indexPath = ui_cardsList.indexPathForSelectedRow {
ui_cardsList.deselectRow(at: indexPath, animated: true)
}
}
fileprivate func scrollToGoodPosition() {
if let id = cardToScrollToId,
let indexPath = Singletons.vault?.getIndexPathOfCard(withId: id) {
ui_cardsList.scrollToRow(at: indexPath, at: .middle, animated: false)
}
cardToScrollToId = nil
}
override func viewDidAppear(_ animated: Bool) {
if let indexPath = ui_resultsCardsList.indexPathForSelectedRow {
// On revient donc de la recherche
ui_resultsCardsList.deselectRow(at: indexPath, animated: true)
// Cacher la partie recherche et déplacer au bon endroit
leftResearch()
}
if needReload {
reloadTable()
doResearch()
needReload = false
scrollToGoodPosition()
}
}
@IBAction func unwindToInitial(segue:UIStoryboardSegue) {
if segue.identifier == "saveData" { // Nouvelle carte créée
reloadTable()
scrollToGoodPosition()
}
}
func reloadTable() {
ui_cardsList.reloadData()
ui_cardsList.layoutIfNeeded()
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "segueToCompleteCard",
let nextScreen:completeCardViewController = segue.destination as? completeCardViewController {
if let indexPath = ui_cardsList.indexPathForSelectedRow {
nextScreen.id = Singletons.vault?.getCardIDInSection(atIndex: indexPath.row, inSection: indexPath.section)
}
if let indexPath = ui_resultsCardsList.indexPathForSelectedRow,
let id = Singletons.vault?.getCardID(atIndex: indexPath.row, forResearch: true) {
cardToScrollToId = id
nextScreen.id = id
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// SearchBar
func searchBarShouldBeginEditing(_ searchBar: UISearchBar) -> Bool {
ui_searchBar.showsCancelButton = true
constraint_cardsListHeight.isActive = true
constraint_resultListHeight?.isActive = false
UIView.animate(withDuration: 0.2, delay: 0, options: .curveEaseIn, animations: {
self.view.layoutIfNeeded()
}, completion: nil)
return true
}
func searchBarCancelButtonClicked(_ searchBar: UISearchBar) {
leftResearch()
}
private func leftResearch(){
constraint_resultListHeight?.isActive = true
constraint_cardsListHeight.isActive = false
UIView.animate(withDuration: 0.2, delay: 0, options: .curveEaseIn, animations: {
self.view.layoutIfNeeded()
}) { (_) in
self.scrollToGoodPosition()
}
ui_searchBar.text = ""
ui_searchBar.showsCancelButton = false
ui_searchBar.resignFirstResponder()
clearSearchSection()
}
func searchBar(_ searchBar: UISearchBar, textDidChange searchText: String) {
doResearch()
}
func doResearch() {
if let searchText = ui_searchBar.text {
if searchText == "" {
clearSearchSection()
} else {
Singletons.vault?.orderCardsList(forResearchedText: searchText)
ui_resultsCardsList.reloadData()
}
}
}
func searchBarSearchButtonClicked(_ searchBar: UISearchBar) {
ui_searchBar.resignFirstResponder()
}
func clearSearchSection() {
Singletons.vault?.resetResearchResultsList()
ui_resultsCardsList.reloadData()
}
// copyPassword
@IBAction func copyPassword(_ sender: UIButton, forEvent event: UIEvent) {
if let shadowView = sender.superview as? cellShadowUIView,
let cell = shadowView.superview?.superview as? SafetyFirstTableViewCell{
var password:String?
if let indexPath = ui_cardsList.indexPath(for: cell) {
password = Singletons.vault?.getCardInSection(atIndex: indexPath.row, inSection: indexPath.section)?.password
}
if let indexPath = ui_resultsCardsList.indexPath(for: cell) {
ui_searchBar.resignFirstResponder()
password = Singletons.vault?.getCard(atIndex: indexPath.row, forResearch: true)?.password
}
if let passwordToCopy = password {
UIPasteboard.general.setItems([[kUTTypeUTF8PlainText as String : passwordToCopy]], options: [UIPasteboardOption.expirationDate : Date(timeIntervalSinceNow: 60)])
ui_notifAppNameLabel.text = cell._name.text
UIView.animate(withDuration: 0.3, animations: {
self.ui_notifView.alpha = 1
})
}
}
}
@IBAction func hideNotification(_ sender: Any) {
UIView.animate(withDuration: 0.3, animations: {
self.ui_notifView.alpha = 0
})
}
// Gestion tableView
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
var count:Int = 0
if tableView == self.ui_cardsList {
if !(Singletons.vault?.isEmpty() ?? true) {
count = Singletons.vault?.countCardsInSection(inSection: section) ?? 0
} else {
count = 2
}
}
if tableView == self.ui_resultsCardsList {
count = (Singletons.vault?.isEmpty(forResearch: true) ?? true) ? 1 : Singletons.vault!.countCards(forResearch: true)
}
return count
}
func numberOfSections(in tableView: UITableView) -> Int {
var count = 0
if tableView == self.ui_cardsList {
count = (Singletons.vault?.isEmpty() ?? true) ? 1 : Singletons.vault?.countSection() ?? 1
}
if tableView == self.ui_resultsCardsList {
count = 1
}
return count
}
func tableView(_ tableView: UITableView, titleForHeaderInSection section: Int) -> String? {
return Singletons.vault?.getSectionName(forSection: section) ?? "Welcome !"
}
func tableView(_ tableView: UITableView, viewForHeaderInSection section: Int) -> UIView? {
let headerCell = ui_cardsList.dequeueReusableCell(withIdentifier: "sectionHeaderCell") as! SectionHeaderCell
headerCell.headerLabel.text = Singletons.vault?.getSectionName(forSection: section) ?? NSLocalizedString("intro0", comment: "")
return headerCell.contentView
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
if ( !(Singletons.vault?.isEmpty() ?? true) && tableView == self.ui_cardsList) || ( !(Singletons.vault?.isEmpty(forResearch: true) ?? true) && tableView == self.ui_resultsCardsList ) {
let cell:SafetyFirstTableViewCell = ui_cardsList.dequeueReusableCell(withIdentifier: "safetyFirst") as! SafetyFirstTableViewCell
var card:card?
switch tableView {
case self.ui_cardsList:
card = Singletons.vault?.getCardInSection(atIndex: indexPath.row, inSection: indexPath.section)
case self.ui_resultsCardsList:
card = Singletons.vault?.getCard(atIndex: indexPath.row, forResearch: true)
default:
break
}
cell._name.text = card?.name
cell._identifier.text = card?.identifier
if let identifierText = card?.identifier,
identifierText != "" {
cell.cs_identifierHeight.constant = 22
cell.cs_nameHeight.constant = 33
} else {
cell.cs_identifierHeight.constant = 0
cell.cs_nameHeight.constant = 55
}
cell.cs_copyButtonWidth.constant = card?.password == "" ? 0 : 69
return cell
} else if tableView == self.ui_cardsList {
// Si il n'y a pas de cartes enregistrés
let cell:helpCell = ui_cardsList.dequeueReusableCell(withIdentifier: "helpCell") as! helpCell
if indexPath.row == 0 {
cell.helpTextLabel.text = NSLocalizedString("intro1", comment: "")
} else {
cell.helpTextLabel.text = NSLocalizedString("intro2", comment: "")
}
return cell
} else {
// Si il n'y a pas de résultat de recherche
let cell:UITableViewCell = ui_resultsCardsList.dequeueReusableCell(withIdentifier: "noResultCell")!
return cell
}
}
// Supprimer une carte
func tableView(_ tableView: UITableView, commit editingStyle: UITableViewCellEditingStyle, forRowAt indexPath: IndexPath) {
if tableView == self.ui_cardsList {
Singletons.vault?.deleteCard(atIndex: indexPath.row, inSection: indexPath.section)
if ui_cardsList.numberOfRows(inSection: indexPath.section) <= 1 {
if ui_cardsList.numberOfSections == 1 {
reloadTable()
} else {
ui_cardsList.deleteSections([indexPath.section], with: UITableViewRowAnimation.right)
}
} else {
ui_cardsList.deleteRows(at: [indexPath], with: .right)
}
}
}
// Possibilité de supprimer ou non une carte
func tableView(_ tableView: UITableView, canEditRowAt indexPath: IndexPath) -> Bool {
if tableView == self.ui_cardsList && !(Singletons.vault?.isEmpty() ?? true) {
return true
} else {
return false
}
}
}
<file_sep>/Sésame/Style/Inputs/safetySelect.class.swift
//
// safetySelect.class.swift
// Sésame
//
// Created by <NAME> on 04/04/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
// Select
class safetySelect: safetyInput, UITextFieldDelegate, UITableViewDelegate {
private var ui_selectTable : safetySelectTable!
private var cs_tableHeight : NSLayoutConstraint!
// Init
override init(frame: CGRect) {
super.init(frame: frame)
initializer()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
initializer()
}
func initializer(){
if self.text == "default" {
self.text = SafeguardManager.FREQUENCIES[2]
}
self.delegate = self
}
// Delegate textField
func textFieldShouldBeginEditing(_ textField: UITextField) -> Bool {
showSelect(really: true)
return false
}
// Delegate tableview
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
let cell = tableView.cellForRow(at: indexPath) as! safetySelectCell
if let frequency = cell.frequencyLabel.text {
self.text = frequency
if let vc = window?.rootViewController?.presentedViewController?.childViewControllers.last as? EditSafeguardViewController {
vc.safeguard.safeguardFrequency = SafeguardManager.FREQUENCIES.index(of: frequency)
vc.displayNewSynchronizationButton()
}
}
showSelect(really: false)
}
// layout
override func layoutSubviews() {
super.layoutSubviews()
ui_selectTable = safetySelectTable()
ui_selectTable.delegate = self
self.superview?.insertSubview(ui_selectTable, belowSubview: self)
let top = NSLayoutConstraint(item: ui_selectTable, attribute: .top, relatedBy: .equal, toItem: self, attribute: .bottom, multiplier: 1, constant: 0)
let left = NSLayoutConstraint(item: ui_selectTable, attribute: .leading, relatedBy: .equal, toItem: self, attribute: .leading, multiplier: 1, constant: 0)
let right = NSLayoutConstraint(item: ui_selectTable, attribute: .trailing, relatedBy: .equal, toItem: self, attribute: .trailing, multiplier: 1, constant: 0)
cs_tableHeight = NSLayoutConstraint(item: ui_selectTable, attribute: .height, relatedBy: .equal, toItem: nil, attribute: .notAnAttribute, multiplier: 1, constant: 0 )
self.superview?.addConstraints([top,left,right,cs_tableHeight])
}
func showSelect(really : Bool) {
self.backgroundColor = really ? UIColor(named: "Blue20") : UIColor(named: "BlueTransparent")
cs_tableHeight.constant = really ? 160 : 0
UIView.animate(withDuration: 0.3) {
self.superview?.layoutIfNeeded()
}
}
}
class safetySelectTable : UITableView, UITableViewDataSource {
// Init
override init(frame: CGRect, style: UITableViewStyle) {
super.init(frame: frame, style: style)
self.dataSource = self
self.rowHeight = 40
self.translatesAutoresizingMaskIntoConstraints = false
self.separatorStyle = UITableViewCellSeparatorStyle.none
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
// DataSource
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return SafeguardManager.FREQUENCIES.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = safetySelectCell(style: .default, reuseIdentifier: nil)
cell.frequencyLabel.text = SafeguardManager.FREQUENCIES[indexPath.row]
return cell
}
// Style
override func layoutSubviews() {
super.layoutSubviews()
self.backgroundColor = UIColor.white
self.layer.cornerRadius = 5
}
}
class safetySelectCell : UITableViewCell {
var frequencyLabel : UILabel!
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
frequencyLabel = UILabel()
self.selectedBackgroundView = {
let bgview = UIView(frame: CGRect(x: 0, y: 0, width: self.frame.size.width, height: self.frame.size.height))
bgview.backgroundColor = UIColor(named: "Blue20")
return bgview
}()
self.backgroundColor = UIColor(named: "BlueTransparent")
frequencyLabel.translatesAutoresizingMaskIntoConstraints = false
frequencyLabel.font = .systemFont(ofSize: 20)
frequencyLabel.textColor = UIColor(red: 0, green: 0, blue: 0, alpha: 0.54)
contentView.addSubview(frequencyLabel)
let top = NSLayoutConstraint(item: frequencyLabel, attribute: .top, relatedBy: .equal, toItem: contentView, attribute: .top, multiplier: 1, constant: 4)
let left = NSLayoutConstraint(item: frequencyLabel, attribute: .leading, relatedBy: .equal, toItem: contentView, attribute: .leading, multiplier: 1, constant: 21)
let bottom = NSLayoutConstraint(item: frequencyLabel, attribute: .bottom, relatedBy: .equal, toItem: contentView, attribute: .bottom, multiplier: 1, constant: 4)
contentView.addConstraints([top,left,bottom])
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
<file_sep>/Sésame/ViewControllers/SetPassword.ViewController.swift
//
// SetPasswordViewController.swift
// Safety First
//
// Created by <NAME> on 16/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import Foundation
class SetPasswordViewController: UIViewController, UITextFieldDelegate {
let manager = VaultManager()
@IBOutlet weak var ui_passwordInput: safetyInput!
@IBOutlet weak var cs_passwordErrorHeight: NSLayoutConstraint!
@IBOutlet weak var ui_confirmPasswordInput: safetyInput!
@IBOutlet weak var cs_confirmPasswordErrorHeight: NSLayoutConstraint!
@IBOutlet weak var ui_scrollView: UIScrollView!
@IBAction func savePassword(_ sender: Any) {
if let password = ui_passwordInput.text,
verifData(nil) {
// Enregistrer le mot de passe dans le trousseau
manager.SesamePassword = <PASSWORD>
// Générer clé
VaultManager().generateKey()
// Enregistrer configuré = OUI
manager.isSesameConfigured = true
// Lancer segue
performSegue(withIdentifier: "goToNavigationControllerFromSettings", sender: nil)
}
}
lazy var screenHeight = self.ui_scrollView.frame.height
var scroll:scrollManager?
override func viewDidLoad() {
super.viewDidLoad()
self.gradientBackground()
ui_passwordInput.delegate = self
ui_confirmPasswordInput.delegate = self
scroll = scrollManager(forInputs: [ui_passwordInput,ui_confirmPasswordInput], atDepth: .flat, inScrollView: ui_scrollView, withAdditionalY: ui_passwordInput.superview!.frame.minY )
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// Gestion TextField Delegate
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
if textField == ui_passwordInput {
ui_confirmPasswordInput.becomeFirstResponder()
} else {
textField.resignFirstResponder()
}
return false
}
func textFieldShouldEndEditing(_ textField: UITextField) -> Bool {
_ = verifData(textField)
return true
}
// Vérification des données
func verifData(_ textField : UITextField?) -> Bool {
var answer = true
if let password = ui_passwordInput.text,
password.count < 8,
password.count > 0 {
answer = false
cs_passwordErrorHeight.constant = 12
ui_passwordInput.error = true
} else {
cs_passwordErrorHeight.constant = 0
ui_passwordInput.error = false
}
if ui_passwordInput.text != ui_confirmPasswordInput.text {
cs_confirmPasswordErrorHeight.constant = textField != ui_passwordInput ? 12 : 0
ui_confirmPasswordInput.error = textField != ui_passwordInput
answer = false
} else {
cs_confirmPasswordErrorHeight.constant = 0
ui_confirmPasswordInput.error = false
}
return answer
}
}
<file_sep>/Sésame/Models/RecoveryManager.model.swift
//
// RecoveryManager.swift
// Sésame
//
// Created by <NAME> on 24/02/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import Alamofire
import MessageUI
import Foundation
class recoveryManager {
struct encryptionKey: Codable {
let encryptionKey:String
}
let fileManager = FileManager.default
private let _email:String?
private var _encryptionKeys:[encryptionKey] = []
private let URL_VERIF_EMAIL:URLConvertible = "http://romainpenchenat.free.fr/sesame/verifEmail.php"
private let URL_SEND_CODE:URLConvertible = "http://romainpenchenat.free.fr/sesame/sendCode.php"
private let URL_GET_KEY:URLConvertible = "http://romainpenchenat.free.fr/sesameV2/getEncryptionKey.php"
var email:String {
get {
return _email ?? ""
}
}
init(email:String?) {
_email = email
}
func verifEmail(completionHandler:@escaping (Bool) -> Void) {
if let email = _email,
email != "" {
// Tester si email existant dans notre db
Alamofire.request(URL_VERIF_EMAIL, method: .post, parameters: ["email":email], encoding: JSONEncoding.default, headers: nil).responseString(completionHandler: { (response) in
if let string = response.result.value,
string == "1" {
completionHandler(true)
} else {
completionHandler(false)
}
})
} else {
completionHandler(false)
}
}
func sendCode(completionHandler:@escaping (Bool) -> Void) {
let params:[String:String] = ["secretCode":generateSecretCode(),"email":email]
Alamofire.request(URL_SEND_CODE, method: .post, parameters: params, encoding: JSONEncoding.default, headers: nil).responseString { (response) in
if let response = response.result.value,
response == "" {
completionHandler(true)
} else {
completionHandler(false)
}
}
}
private func generateSecretCode() -> String {
var secretCode:String = String(arc4random_uniform(999999))
while secretCode.count < 6 {
secretCode = "0\(secretCode)"
}
return secretCode
}
func getEncryptionKey(secretCode:String?, completionHandler:@escaping (Bool) -> Void) {
if let code = secretCode,
code.count == 6 {
let params:[String:String] = ["email":email,"secretCode":code]
Alamofire.request(URL_GET_KEY, method: .post, parameters: params, encoding: JSONEncoding.default, headers: nil).responseJSON { (response) in
if let data = response.data,
let keys = try? JSONDecoder().decode([encryptionKey].self, from: data) {
self._encryptionKeys = keys
completionHandler(true)
} else {
completionHandler(false)
}
}
} else {
completionHandler(false)
}
}
func recoveryData() -> Int {
// Récupérer fichier
let icloud = iCloudManager()
let URLwDate_safeguardOniCloud = icloud.getLastRealmSafeguard()
// Vérifier les variables
if let URL_safeguardOniCloud = URLwDate_safeguardOniCloud.url,
let URL_realmSourceFile = VaultManager().getRealmFile() {
if let encryptionKey = VaultManager().findTheGoodEncryptionKey(encryptedKeys: _encryptionKeys, forRealmFile: URL_safeguardOniCloud) {
// Placer fichier comme source de Realm
do {
try fileManager.removeItem(at: URL_realmSourceFile)
} catch {
print("Remove current realm source file ERROR : \(error)")
}
do {
try fileManager.copyItem(at: URL_safeguardOniCloud, to: URL_realmSourceFile)
} catch {
print("Copy new realm source file ERROR : \(error)")
}
// Enregistrer encryption key
VaultManager().recoveryKey(encrytedKey: encryptionKey)
// Enregistrer mail pour synchro + fréquence synchro
let safeguard = SafeguardManager(newEmail: email, newFrequency: 2)
// Mettre à jour le jour de la derniere sauvegarde
let format = DateFormatter()
format.dateFormat = "dd/MM/yyyy"
safeguard.lastSafeguard = format.string(from: Date())
// Recharger le coffre fort
Singletons.reloadVault()
// Nettoyer les fichiers inutiles créés sur iCloud
icloud.cleanRealmManagementFiles()
return 0
} else { return 1 }
} else { return 2 }
}
}
<file_sep>/Sésame/Style/Cells/helpCell.class.swift
//
// helpCell.swift
// Sésame
//
// Created by <NAME> on 28/12/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class helpCell: UITableViewCell {
@IBOutlet weak var helpTextLabel: UILabel!
override func layoutSubviews() {
super.layoutSubviews()
self.layer.shadowColor = UIColor.black.cgColor
self.layer.shadowOpacity = 0.16
self.layer.shadowRadius = 2
self.layer.shadowOffset = CGSize.zero
}
}
<file_sep>/Sésame/Style/Cells/cellShadowUIView.class.swift
//
// cellShadowUIView.swift
// Safety First
//
// Created by <NAME> on 30/10/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class cellShadowUIView: UIView {
override func layoutSubviews() {
super.layoutSubviews()
self.layer.shadowColor = UIColor.black.cgColor
self.layer.shadowOpacity = 0.16
self.layer.shadowRadius = 2
self.layer.shadowOffset = CGSize(width: 0, height: 1)
}
}
<file_sep>/Sésame/ViewControllers/Safeguard.ViewController.swift
//
// Safeguard.ViewController.swift
// Sésame
//
// Created by <NAME> on 15/02/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
class SafeguardViewController: UIViewController {
let manager = SafeguardManager()
@IBOutlet weak var ui_flecheImage: UIImageView!
@IBOutlet weak var cs_spaceTop: NSLayoutConstraint!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
override func viewWillAppear(_ animated: Bool) {
UIView.animate(withDuration: 2, delay: 0, options: [.repeat,.curveEaseInOut], animations: {
self.ui_flecheImage.alpha = 0
self.ui_flecheImage.transform = CGAffineTransform(translationX: 0, y: -250)
}, completion: nil)
}
override func viewDidAppear(_ animated: Bool) {
if manager.safeguard() {
sleep(2)
if self.presentingViewController?.childViewControllers.last as? EditSafeguardViewController != nil {
performSegue(withIdentifier: "unwindToEditSynchronization", sender: nil)
} else {
performSegue(withIdentifier: "unwindToSettings", sender: nil)
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
<file_sep>/Sésame/Style/Bars/SafetySearchBar.class.swift
//
// SafetySearchBar.swift
// Safety First
//
// Created by <NAME> on 03/11/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class SafetySearchBar: UISearchBar {
override func layoutSubviews() {
super.layoutSubviews()
self.layer.shadowColor = UIColor.black.cgColor
self.layer.shadowOpacity = 0.16
self.layer.shadowRadius = 2
self.layer.shadowOffset = CGSize(width: 0, height: -2)
}
}
<file_sep>/Sésame/Models/SecurityManager.model.swift
//
// securityManager.swift
// Safety First
//
// Created by <NAME> on 25/11/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import SystemConfiguration
class securityManager {
struct securityAnswer {
var result:Bool
var description:String?
}
lazy var InternetNotAvailableAlert: UIAlertController = {
let alert = UIAlertController(title: NSLocalizedString("InternetNotAvailableAlertTitle", comment: ""), message: NSLocalizedString("InternetNotAvailableAlertMessage", comment: ""), preferredStyle: .alert)
alert.addAction(UIAlertAction(title: NSLocalizedString("InternetNotAvailableAlertCancel", comment: ""), style: .cancel, handler: nil))
return alert
}()
lazy var IcloudNotEnableAlert: UIAlertController = {
let alert = UIAlertController(title: NSLocalizedString("IcloudNotEnableAlertTitle", comment: ""), message: NSLocalizedString("IcloudNotEnableAlertMessage", comment: ""), preferredStyle: .alert)
alert.addAction(UIAlertAction(title: NSLocalizedString("IcloudNotEnableAlertCancel", comment: ""), style: .cancel, handler: nil))
return alert
}()
func testPassword(password:String) -> securityAnswer {
var answer = securityAnswer(result: false, description: nil)
if password.count < 6 && password.count > 0 {
answer.result = true
answer.description = "short"
}
return answer
}
func isInternetAvailable() -> Bool {
var zeroAddress = sockaddr_in()
zeroAddress.sin_len = UInt8(MemoryLayout.size(ofValue: zeroAddress))
zeroAddress.sin_family = sa_family_t(AF_INET)
let defaultRouteReachability = withUnsafePointer(to: &zeroAddress) {
$0.withMemoryRebound(to: sockaddr.self, capacity: 1) {zeroSockAddress in
SCNetworkReachabilityCreateWithAddress(nil, zeroSockAddress)
}
}
var flags = SCNetworkReachabilityFlags()
if !SCNetworkReachabilityGetFlags(defaultRouteReachability!, &flags) {
return false
}
let isInternetAvailable = (flags.contains(.reachable) && !flags.contains(.connectionRequired))
return isInternetAvailable
}
func isIcloudEnable() -> Bool {
// Vérifie uniquement Icloud et pas Icloud Drive mais cela suffit pour partager les fichiers Sésame
if FileManager.default.ubiquityIdentityToken != nil {
return true
} else {
return false
}
}
}
<file_sep>/Sésame/Style/Bars/safetyNavBar.class.swift
//
// safetyNavBar.swift
// Safety First
//
// Created by <NAME> on 09/12/2017.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class safetyNavBar: UINavigationBar {
override func layoutSubviews() {
super.layoutSubviews()
self.barTintColor = UIColor(named: "BluePrimary")
self.tintColor = UIColor.white
self.titleTextAttributes = [NSAttributedStringKey.foregroundColor : UIColor.white]
self.isTranslucent = false
}
}
<file_sep>/Sésame/Extensions/UITextField.extension.swift
//
// File.swift
// Sésame
//
// Created by <NAME> on 15/02/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
import UIKit
extension UITextField
{
var isFilled: Bool {
return self.text?.trimmingCharacters(in: .whitespaces).count != 0
}
}
|
6c16a69f58e758469bef8372b28f1544a526f730
|
[
"Swift",
"Ruby"
] | 43
|
Swift
|
RomainPct/Sesame
|
6eec036c83a877d3d7ed470d1e65a29dcece9fac
|
cc68d017c440508218573b89dd50d9238e57ff34
|
refs/heads/master
|
<repo_name>gagandeepkalra/ScalaTestProject<file_sep>/src/main/java/ThoughtWorks.java
import java.util.Arrays;
import java.util.PriorityQueue;
import java.util.Scanner;
public class ThoughtWorks {
// Q1
static long findX(long n) {
return (long) (Math.pow(2, Math.floor(Math.log(n) / Math.log(2))));
}
public static void input1(String[] args) {
Scanner sc = new Scanner(System.in);
int t = sc.nextInt();
for (int i = 0; i < t; i++) {
long n = sc.nextLong();
long x = findX(n);
long count = n - x;
if (count == 0) {
System.out.println(n);
} else {
System.out.println(2 + (count - 1) * 2);
}
}
}
// Q2- event based sorting, find max overlapping events
static class Time {
private int hours;
private int mins;
public Time(int hours, int mins, boolean flag) {
this.hours = hours;
this.mins = mins;
if (flag) add(5);
}
private void add(int value) {
mins += value;
if (mins >= 60) {
hours += 1;
mins = mins - 60;
}
}
public double getValue() {
return hours * 1.0 + ((mins * 1.0) / 100.0);
}
}
static class Pair {
Double start;
Double end;
public Pair(Double start, Double end) {
this.start = start;
this.end = end;
}
}
public static void input2(String[] args) {
Scanner sc = new Scanner(System.in);
int t = sc.nextInt();
Pair[] ranges = new Pair[t];
for (int i = 0; i < t; i++) {
ranges[i] = (new Pair(new Time(sc.nextInt(), sc.nextInt(), false).getValue(), new Time(sc.nextInt(), sc.nextInt(), true).getValue()));
}
Arrays.sort(ranges, (Pair o1, Pair o2) -> {
int result = ((int) Math.signum(o1.start - o2.start));
if (result == 0) {
return ((int) Math.signum(o1.end - o2.end));
} else return result;
});
PriorityQueue<Double> queue = new PriorityQueue<>();
int count = 0;
int result = Integer.MIN_VALUE;
for (int i = 0; i < t; i++) {
Pair pair = ranges[i];
while (!queue.isEmpty() && queue.peek() <= pair.start) {
queue.remove();
count--;
}
queue.add(pair.end);
count++;
result = Math.max(result, count);
}
System.out.println(result);
}
}
/*
4
00 00 11 11
00 00 10 11
11 17 23 17
12 12 12 50
*/
<file_sep>/src/main/java/OfFrequenciesAndLife.java
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.Map;
import java.util.SortedSet;
import java.util.StringTokenizer;
import java.util.TreeSet;
public class OfFrequenciesAndLife {
/*
Answer n Queries of type:
1. 1 elem -> increase frequency of elem by one
2. 2 elem -> decrease frequency of elem by one
3. 3 -> from the set of elements having the least frequency, print out the maximum element
4. 4 -> from the set of elements having the hoighest frequency, print out the least element
*/
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(
new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int n = Integer.parseInt(st.nextToken());
Map<Integer, Integer> elementToFrequency = new HashMap<>();
Map<Integer, TreeSet<Integer>> frequencyToElements = new HashMap<>();
SortedSet<Integer> allFrequenciesPresent = new TreeSet<>();
while (n-- > 0) {
st = new StringTokenizer(br.readLine());
int element, currentFrequency, newFrequency;
switch (Integer.parseInt(st.nextToken())) {
case 1: {
element = Integer.parseInt(st.nextToken());
currentFrequency = elementToFrequency.getOrDefault(element, 0);
newFrequency = currentFrequency + 1;
frequencyToElements.getOrDefault(currentFrequency, new TreeSet<>()).remove(element);
elementToFrequency.put(element, newFrequency);
allFrequenciesPresent.add(newFrequency);
if (frequencyToElements.getOrDefault(currentFrequency, new TreeSet<>()).size() == 0)
allFrequenciesPresent.remove(currentFrequency);
TreeSet<Integer> x = frequencyToElements.getOrDefault(newFrequency, new TreeSet<>());
x.add(element);
frequencyToElements.put(newFrequency, x);
break;
}
case 2: {
element = Integer.parseInt(st.nextToken());
currentFrequency = elementToFrequency.getOrDefault(element, 0);
newFrequency = currentFrequency - 1;
frequencyToElements.getOrDefault(currentFrequency, new TreeSet<>()).remove(element);
if (frequencyToElements.getOrDefault(currentFrequency, new TreeSet<>()).size() == 0)
allFrequenciesPresent.remove(currentFrequency);
if (newFrequency <= 0) {
elementToFrequency.remove(element);
} else {
elementToFrequency.put(element, newFrequency);
allFrequenciesPresent.add(newFrequency);
TreeSet<Integer> x = frequencyToElements.getOrDefault(newFrequency, new TreeSet<>());
x.add(element);
frequencyToElements.put(newFrequency, x);
}
break;
}
case 3: {
if (allFrequenciesPresent.size() > 0) {
TreeSet<Integer> result = frequencyToElements.getOrDefault(allFrequenciesPresent.first(), new TreeSet<>());
if (result.size() > 0) {
System.out.println(result.last());
} else System.out.println(-1);
} else System.out.println(-1);
}
break;
case 4: {
if (allFrequenciesPresent.size() > 0) {
TreeSet<Integer> result = frequencyToElements.getOrDefault(allFrequenciesPresent.last(), new TreeSet<>());
if (result.size() > 0) {
System.out.println(result.first());
} else System.out.println(-1);
} else System.out.println(-1);
}
}
}
}
}
<file_sep>/src/main/java/codeforces/LittleArrayAndElephants.java
package codeforces;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.TreeSet;
/*
E. Little Elephant and Inversions :=> https://codeforces.com/contest/220/problem/E
Segment Trees, Two Pointers
(iterative segment tree)
*/
public class LittleArrayAndElephants {
private static void insert(int idx, int[] segmentArr) {
int n = segmentArr.length / 2;
for (segmentArr[idx += n]++; idx > 1; idx >>= 1) segmentArr[idx >> 1] = segmentArr[idx] + segmentArr[idx ^ 1];
}
private static void remove(int idx, int[] segmentArr) {
int n = segmentArr.length / 2;
for (segmentArr[idx += n]--; idx > 1; idx >>= 1) segmentArr[idx >> 1] = segmentArr[idx] + segmentArr[idx ^ 1];
}
private static int query(int l, int r, int[] segmentArr) { // even => left child, odd => right child, [l, r)
int res = 0, n = segmentArr.length / 2;
for (l += n, r += n; l < r; l >>= 1, r >>= 1) {
if ((l & 1) == 1) res += segmentArr[l++];
if ((r & 1) == 1) res += segmentArr[--r];
}
return res;
}
private static int[] compressed(String line, int n) {
int[] rangedInput = new int[n];
Map<Integer, Set<Integer>> indexOf = new HashMap<>(n);
Set<Integer> set = new TreeSet<>();
StringTokenizer st = new StringTokenizer(line);
for (int i = 0; i < n; i++) {
int input = Integer.parseInt(st.nextToken());
Set<Integer> indexes = indexOf.getOrDefault(input, new TreeSet<>());
indexes.add(i);
indexOf.putIfAbsent(input, indexes);
set.add(input);
}
int ctr = 0;
for (Integer elem : set) { // sorted order
for (Integer l : indexOf.get(elem)) {
rangedInput[l] = ctr;
}
ctr++;
}
return rangedInput;
}
public static void main(String[] args) throws Exception {
final BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int n = Integer.parseInt(st.nextToken());
long k = Long.parseLong(st.nextToken());
int[] input = compressed(br.readLine(), n);
long inversions = 0, result = 0;
int[] segmentArrL = new int[2 * n], segmentArrR = new int[2 * n]; // 0 is not used, 1 to n-1 are internal nodes, n to 2n-1 are leaves
for (int i = 0; i < n; i++) {
inversions += query(input[i] + 1, n, segmentArrL); // add #elements > input[i] in 0..i to inversion count
insert(input[i], segmentArrL);
}
int l = n - 2, r = n - 1;
// remove input[r] from left and move to right
remove(input[r], segmentArrL);
insert(input[r], segmentArrR);
while (r > 0) {
while (inversions > k && l >= 0) {
remove(input[l], segmentArrL);
inversions -= query(input[l] + 1, n, segmentArrL) + query(0, input[l], segmentArrR); // find #elements > input[l] in left and < input[l] in right
l--;
}
result += l + 1;
r--;
insert(input[r], segmentArrR);
if (l == r) {
remove(input[l], segmentArrL);
l--;
} else {
inversions += query(input[r] + 1, n, segmentArrL) + query(0, input[r], segmentArrR); // find #elements > input[l] in left and < input[l] in right
}
}
System.out.println(result);
}
}
<file_sep>/src/main/java/codeforces/contests/_1399/WeightsDivisionEasyJava.java
package codeforces.contests._1399;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.PriorityQueue;
/*
https://codeforces.com/contest/1399/problem/E1
[Graph Theory]
We calculate contribution of each edge by calculating number of leaves if we go down it. Afterwards, we use a max heap to
reduce the maximals of the weights only that the heap is ordered by diffs = w*c - w/2*c, courtesy integer division
Scala version didn't pass, TLE
*/
public class WeightsDivisionEasyJava {
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int t = Integer.parseInt(br.readLine());
ArrayList<Integer> printBuffer = new ArrayList<>();
while (t-- > 0) {
String[] l1 = br.readLine().split(" ");
int n = Integer.parseInt(l1[0]);
long s = Long.parseLong(l1[1]);
ArrayList<int[]>[] graph = new ArrayList[n + 1];
for (int i = 0; i <= n; i++) graph[i] = new ArrayList<>();
{
int nn = n - 1;
while (nn-- > 0) {
String[] l2 = br.readLine().split(" ");
int u = Integer.parseInt(l2[0]);
int v = Integer.parseInt(l2[1]);
int w = Integer.parseInt(l2[2]);
graph[u].add(new int[]{v, w});
graph[v].add(new int[]{u, w});
}
}
PriorityQueue<int[]> queue = new PriorityQueue<>((o1, o2) -> (diff(o1) > diff(o2)) ? -1 : 1);
int[] leaves = new int[n + 1];
long sum = dfs(graph, queue, leaves, 1, 0);
int steps = 0;
while (sum > s) {
int[] pair = queue.poll();
int w = pair[0];
int c = pair[1];
queue.add(new int[]{w / 2, c});
sum -= diff(pair);
steps += 1;
}
printBuffer.add(steps);
}
System.out.println(printBuffer.stream().reduce(new StringBuffer(), (sb, i) -> sb.append(i).append("\n"), StringBuffer::append));
}
private static long diff(int[] pair) {
int w = pair[0], c = pair[1];
return (long) w * c - (long) w / 2 * c;
}
private static long dfs(ArrayList<int[]>[] graph, PriorityQueue<int[]> queue, int[] leaves, int u, int p) {
if (graph[u].size() == 1 && graph[u].get(0)[0] == p) {
leaves[u] = 1;
return 0L;
} else {
long total = 0;
for (int[] pair : graph[u]) {
int v = pair[0];
if (v != p) {
int w = pair[1];
total += dfs(graph, queue, leaves, v, u) + ((long) leaves[v]) * w;
leaves[u] += leaves[v];
queue.add(new int[]{w, leaves[v]});
}
}
return total;
}
}
}
<file_sep>/src/main/java/codeforces/MishaAndPermutationsSummation.java
package codeforces;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/*
https://codeforces.com/contest/501/problem/D
https://en.wikipedia.org/wiki/Factorial_number_system
*/
public class MishaAndPermutationsSummation {
private static void build(int[] seg, int l, int r, int i) {
if (l == r) seg[i] = 1;
else {
int m = (l + r) >>> 1;
build(seg, l, m, 2 * i + 1);
build(seg, m + 1, r, 2 * i + 2);
seg[i] = seg[2 * i + 1] + seg[2 * i + 2];
}
}
private static int findRankOf(int[] seg, int l, int r, int i, int element) {
if (l == r) return 0;
else {
int m = (l + r) >>> 1;
if (element <= m) return findRankOf(seg, l, m, 2 * i + 1, element);
else return seg[2 * i + 1] + findRankOf(seg, m + 1, r, 2 * i + 2, element);
}
}
private static int findElementWithRank(int[] seg, int l, int r, int i, int rank) { // 0 based
if (l == r) return l;
else {
int m = (l + r) >>> 1;
if (rank < seg[2 * i + 1]) return findElementWithRank(seg, l, m, 2 * i + 1, rank);
else return findElementWithRank(seg, m + 1, r, 2 * i + 2, rank - seg[2 * i + 1]);
}
}
private static void removeElement(int[] seg, int l, int r, int i, int element) {
if (l == r) seg[i] = 0;
else {
int m = (l + r) >>> 1;
if (element <= m) removeElement(seg, l, m, 2 * i + 1, element);
else removeElement(seg, m + 1, r, 2 * i + 2, element);
seg[i] = seg[2 * i + 1] + seg[2 * i + 2];
}
}
private static int[] toFactorialNumberSystem(int[] in) {
int n = in.length;
int[] factorodic = new int[n], seg = new int[4 * n];
build(seg, 0, n - 1, 0);
for (int i = 0; i < n; i++) {
factorodic[i] = findRankOf(seg, 0, n - 1, 0, in[i]);
removeElement(seg, 0, n - 1, 0, in[i]);
}
return factorodic;
}
private static int[] toPermutationFromFactorialNumberSystem(int[] in) {
int n = in.length;
int[] result = new int[n], seg = new int[4 * n];
build(seg, 0, n - 1, 0);
for (int i = 0; i < n; i++) {
result[i] = findElementWithRank(seg, 0, n - 1, 0, in[i]);
removeElement(seg, 0, n - 1, 0, result[i]);
}
return result;
}
public static void main(String[] args) throws Exception {
final BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int n = Integer.parseInt(st.nextToken());
int[] p1 = new int[n], p2 = new int[n];
st = new StringTokenizer(br.readLine());
for (int i = 0; i < n; i++) p1[i] = Integer.parseInt(st.nextToken());
st = new StringTokenizer(br.readLine());
for (int i = 0; i < n; i++) p2[i] = Integer.parseInt(st.nextToken());
int[] factorodic1 = toFactorialNumberSystem(p1), factorodic2 = toFactorialNumberSystem(p2), addition = new int[n];
for (int i = n - 1, j = 0, carry = 0; i >= 0; i--, j++) {
int sum = factorodic1[i] + factorodic2[i] + carry;
addition[i] = sum % (j + 1);
carry = sum / (j + 1);
}
int[] result = toPermutationFromFactorialNumberSystem(addition);
for (int i = 0; i < n; i++) {
System.out.print(result[i] + " ");
}
}
}
<file_sep>/src/main/java/codeforces/CopyingData.java
package codeforces;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.StringTokenizer;
public class CopyingData {
private static int[][] segmentArr; // 1 based
private static void apply(int i, int value0, int value1) {
segmentArr[i][0] = value0;
segmentArr[i][1] = value1;
}
private static boolean updatePresent(int i) {
return segmentArr[i][0] != -1 || segmentArr[i][1] != -1;
}
private static void update(int l, int r, int i, int x, int y, int value0, int value1) { // inclusive
if (r < x || y < l) return;
if (x <= l && r <= y) {
apply(i, value0, value1);
return;
}
// push down current update
if (updatePresent(i)) {
apply(2 * i, segmentArr[i][0], segmentArr[i][1]);
apply(2 * i + 1, segmentArr[i][0], segmentArr[i][1]);
apply(i, -1, -1);
}
int m = (l + r) / 2;
update(l, m, 2 * i, x, y, value0, value1);
update(m + 1, r, 2 * i + 1, x, y, value0, value1);
}
private static int query(int l, int r, int i, int idx) { // returns index in array A or -1 if result from array B
if (updatePresent(i)) return segmentArr[i][0] + idx - segmentArr[i][1];
if (l == r) return -1; // end of line
int m = (l + r) / 2;
if (idx <= m) return query(l, m, 2 * i, idx);
else return query(m + 1, r, 2 * i + 1, idx);
}
public static void main(String[] args) throws Exception {
final BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int n = Integer.parseInt(st.nextToken()), q = Integer.parseInt(st.nextToken());
int arr[] = new int[n + 1], brr[] = new int[n + 1];
st = new StringTokenizer(br.readLine());
for (int i = 1; i <= n; i++) arr[i] = Integer.parseInt(st.nextToken());
st = new StringTokenizer(br.readLine());
for (int i = 1; i <= n; i++) brr[i] = Integer.parseInt(st.nextToken());
segmentArr = new int[4 * n][2];
for (int[] row : segmentArr) Arrays.fill(row, -1);
while (q-- > 0) {
st = new StringTokenizer(br.readLine());
switch (Integer.parseInt(st.nextToken())) {
case 1:
int a = Integer.parseInt(st.nextToken()), b = Integer.parseInt(st.nextToken()), k = Integer.parseInt(st.nextToken());
update(1, n, 1, b, b + k - 1, a, b);
break;
case 2:
int idx = Integer.parseInt(st.nextToken());
int res = query(1, n, 1, idx);
if (res == -1) System.out.println(brr[idx]);
else System.out.println(arr[res]);
break;
}
}
}
}
<file_sep>/src/main/java/Stats.java
import java.util.Map;
import java.util.Scanner;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
/*
Compute average of stock prices, multi-threaded implementation
*/
public class Stats {
public static class StatisticsAggregatorImpl implements StatisticsAggregator {
private Map<String, Pair> hashMap = new ConcurrentHashMap<>();
@Override
public void putNewPrice(String symbol, double price) {
hashMap.putIfAbsent(symbol, new Pair());
hashMap.get(symbol).update(price);
}
@Override
public double getAveragePrice(String symbol) {
return hashMap.getOrDefault(symbol, new Pair()).average();
}
@Override
public int getTickCount(String symbol) {
return hashMap.getOrDefault(symbol, new Pair()).getCount();
}
private static class Pair {
private double total = 0.0;
private int count = 0;
public int getCount() {
return count;
}
synchronized void update(double value) {
total += value;
count++;
}
Double average() {
if (count == 0) return 0.0;
else return total / count;
}
}
}
////////////////// DO NOT MODIFY BELOW THIS LINE ///////////////////
public interface StatisticsAggregator {
// This is an input. Make note of this price.
public void putNewPrice(String symbol, double price);
// Get the average price
public double getAveragePrice(String symbol);
// Get the total number of prices recorded
public int getTickCount(String symbol);
}
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
while (scanner.hasNext()) {
final StatisticsAggregator stats = new StatisticsAggregatorImpl();
final Set<String> symbols = new TreeSet<>();
String line = scanner.nextLine();
String[] inputs = line.split(",");
int threads = Integer.parseInt(inputs[0]);
ExecutorService pool = Executors.newFixedThreadPool(threads);
for (int i = 1; i < inputs.length; ++i) {
String[] tokens = inputs[i].split(" ");
final String symbol = tokens[0];
symbols.add(symbol);
final double price = Double.parseDouble(tokens[1]);
pool.submit(new Runnable() {
@Override
public void run() {
stats.putNewPrice(symbol, price);
}
});
}
pool.shutdown();
try {
pool.awaitTermination(5000, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
for (String symbol : symbols) {
System.out.println(String.format("%s %.4f %d", symbol,
stats.getAveragePrice(symbol),
stats.getTickCount(symbol)));
}
}
scanner.close();
}
}
<file_sep>/src/main/java/codeforces/Optimize.java
package codeforces;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.StringTokenizer;
/*
E. Optimize! :=> https://codeforces.com/contest/338/problem/E
Segment Trees, Lazy, Binary Search
*/
public class Optimize {
private static void preCompute(int[] segmentArr, int len, int l, int r, int i) { // 1 based segmentArr
if (l == r) segmentArr[i] = len - l;
else {
int m = (l + r) >>> 1;
preCompute(segmentArr, len, l, m, i << 1);
preCompute(segmentArr, len, m + 1, r, (i << 1) | 1);
segmentArr[i] = Math.min(segmentArr[i << 1], segmentArr[(i << 1) | 1]);
}
}
private static void updateWithAmount(int[] segmentArr, int[] lazy, int findl, int findr, int amount, int l, int r, int i) {
if (r < findl || findr < l) {
// out of range
} else {
if (findl <= l && r <= findr) {
segmentArr[i] += amount;
lazy[i] += amount;
return; // don't go any further boy!
}
if (lazy[i] != 0) { // lazy set => children needs to be updated
lazy[i << 1] += lazy[i];
segmentArr[i << 1] += lazy[i];
lazy[(i << 1) | 1] += lazy[i];
segmentArr[(i << 1) | 1] += lazy[i];
lazy[i] = 0;
}
int m = (l + r) >>> 1;
updateWithAmount(segmentArr, lazy, findl, findr, amount, m + 1, r, (i << 1) | 1);
updateWithAmount(segmentArr, lazy, findl, findr, amount, l, m, i << 1);
segmentArr[i] = Math.min(segmentArr[i << 1], segmentArr[(i << 1) | 1]);
}
}
public static void main(String[] args) throws Exception {
final BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int n = Integer.parseInt(st.nextToken()), len = Integer.parseInt(st.nextToken()), h = Integer.parseInt(st.nextToken());
int[] a = new int[n], b = new int[len];
st = new StringTokenizer(br.readLine());
for (int i = 0; i < len; i++) b[i] = Integer.parseInt(st.nextToken());
Arrays.sort(b);
st = new StringTokenizer(br.readLine());
for (int i = 0; i < n; i++) a[i] = Integer.parseInt(st.nextToken());
int[] segmentArr = new int[4 * len], lazy = new int[4 * len];
preCompute(segmentArr, len, 0, len - 1, 1); // len, len-1, len-2 ....
int result = 0;
int[] index = new int[n]; // save indexes of elements used in array b for each element fo a
for (int i = 0, negativeOnesCount = 0; i < n; i++) {
int key = h - a[i]; // now find index of element >= key in array b
index[i] = -1;
if (key <= b[len - 1]) {
int l = 0, r = len - 1;
while (l < r) { // 5 7
int mid = (l + r) >>> 1;
if (key <= b[mid])
r = mid;
else // key > b[mid]
l = mid + 1;
}
index[i] = r;
}
if (index[i] != -1)
updateWithAmount(segmentArr, lazy, 0, index[i], -1, 0, len - 1, 1); // increase 0 to r
else
negativeOnesCount++;
if (i >= len)
if (index[i - len] != -1)
updateWithAmount(segmentArr, lazy, 0, index[i - len], 1, 0, len - 1, 1); // revert
else
negativeOnesCount--;
if (i >= len - 1 && negativeOnesCount == 0 && segmentArr[1] >= 0)
result++;
}
System.out.println(result);
}
}
<file_sep>/src/main/java/SubSetBitwiseAnd.java
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
import java.util.stream.IntStream;
/**
*
* AND Sum
* Given an array of N numbers, you have to report the Sum of bitwise AND of all possible subsets of this array.
* As answer can be large, report it after taking mod with 109+7.
* Input:
* First line contains a number T denoting the number of test cases.
*
* First line of each test case contains a number N denoting the number of elements in the array.
* Second line contains the N elements of the array.
*
* Output:
* For each test case output a single number denoting the Sum of bitwise AND of all possible subsets of the given array.
*
* Input Constraints:
*
* 1<=T<=10
* 1<=N<=105
* 1<=a[i]<=109
*
*/
public class SubSetBitwiseAnd {
final static int MOD = 1000000007;
static long power(int a, int exp) {
if (exp == 0) return 1;
else if (exp % 2 == 0) {
long result = power(a, exp / 2);
return (result * result) % MOD;
} else {
return (power(a, exp - 1) * a) % MOD;
}
}
public static void main(String[] srr) throws Exception {
BufferedReader br = new BufferedReader(
new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int t = Integer.parseInt(st.nextToken());
while (t-- > 0) {
st = new StringTokenizer(br.readLine());
int n = Integer.parseInt(st.nextToken());
int[] arr = new int[n];
int[] count = new int[32];
st = new StringTokenizer(br.readLine());
for (int i = 0; i < n; i++) {
arr[i] = Integer.parseInt(st.nextToken());
int x = arr[i];
IntStream.range(0, 32).filter(j -> ((x >> j) & 1) == 1).forEach(j -> count[j]++);
}
long result = 0;
for (int i = 0; i < 32; i++) {
result = (result + ((power(2, count[i]) - 1) * power(2, i)) % MOD) % MOD;
}
System.out.println(result);
}
}
}
<file_sep>/src/main/java/BobAndForest.java
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/*
Graph
*/
public class BobAndForest {
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(
new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int n = Integer.parseInt(st.nextToken()), m = Integer.parseInt(st.nextToken());
int[][] grid = new int[n][m];
int[] count = new int[n + 1];
for (int i = 0; i < n; i++) {
final String row = new StringTokenizer(br.readLine()).nextToken();
for (int j = 0; j < m; j++) {
if (row.charAt(j) == '.') grid[i][j] = 0;
else if (row.charAt(j) == '*') {
grid[i][j] = 1;
} else {
System.err.println("Illegal Character");
return;
}
}
}
for (int i = n - 2; i >= 0; i--) {
for (int j = m - 2; j >= 0; j--) {
if (grid[i][j] != 0) {
grid[i][j] += Math.min(grid[i + 1][j], Math.min(grid[i + 1][j + 1], grid[i][j + 1]));
count[grid[i][j]]++;
}
}
}
for (int i = 0; i < n; i++) {
if (grid[i][m - 1] == 1) count[1]++;
}
for (int j = 0; j < m; j++) {
if (grid[n - 1][j] == 1) count[1]++;
}
if (grid[n - 1][m - 1] == 1) count[1]--;
// for (int i = 0; i < n; i++) {
// for (int j = 0; j < m; j++) {
// System.out.print(grid[i][j] + " ");
// }
// System.out.println();
// }
//
// System.out.println();
//
// for (int i = 0; i <= n; i++) {
// System.out.print(count[i] + " ");
// }
for (int i = n, sum = 0; i > 0; i--) {
if (count[i] != 0) {
sum += count[i];
count[i] += sum - count[i];
}
}
// System.out.println();
//
// for (int i = 0; i <= n; i++) {
// System.out.print(count[i] + " ");
// }
for (int i = 1; i <= n; i++) {
count[i] += count[i - 1];
}
// System.out.println();
//
// for (int i = 0; i <= n; i++) {
// System.out.print(count[i] + " ");
// }
int k = Integer.parseInt(new StringTokenizer(br.readLine()).nextToken());
while (k-- > 0) {
int i = Math.max(Integer.parseInt(new StringTokenizer(br.readLine()).nextToken()), n);
System.out.println(count[i]);
}
}
}
<file_sep>/src/main/java/NiYOTest.java
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.LinkedList;
import java.util.StringTokenizer;
import java.util.stream.IntStream;
public class NiYOTest {
static class Node {
public char c;
public int[] frequency = new int[26];
public Node(char c) {
this.c = c;
frequency[c - 'a'] = 1;
}
public void add(Node node) {
for (int i = 0; i < 26; i++) {
frequency[i] += node.frequency[i];
}
}
}
static class Tree {
public int V;
public LinkedList<Integer> adjListArray[];
public Tree(int V) {
this.V = V;
adjListArray = new LinkedList[V];
for (int i = 0; i < V; i++) {
adjListArray[i] = new LinkedList<>();
}
}
public void addEdge(int src, int dest) {
adjListArray[src].addFirst(dest);
adjListArray[dest].addFirst(src);
}
}
static void populate(Tree tree, Node[] nodes, int index, int parent) {
tree.adjListArray[index].stream().filter(integer -> integer != parent).forEach(integer -> {
populate(tree, nodes, integer, index);
nodes[index].add(nodes[integer]);
});
}
public static void input(String args[]) throws Exception{
BufferedReader br = new BufferedReader(
new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int N = Integer.parseInt(st.nextToken()), Q = Integer.parseInt(st.nextToken());
Node[] nodes = new Node[N + 1];
st = new StringTokenizer(br.readLine());
for (int i = 1; i <= N; i++) {
nodes[i] = new Node(st.nextToken().charAt(0));
}
int t = N - 1;
Tree tree = new Tree(N + 1);
while (t-- > 0) {
st=new StringTokenizer(br.readLine());
tree.addEdge(Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken()));
}
populate(tree, nodes, 1, 0);
while (Q-- > 0) {
st = new StringTokenizer(br.readLine());
Node node = nodes[Integer.parseInt(st.nextToken())];
int[] frequency = new int[26];
st.nextToken().chars().forEach(c -> frequency[c - 'a']++);
System.out.println(IntStream.range(0, 26).map(i -> {
if (frequency[i] > node.frequency[i]) return frequency[i] - node.frequency[i];
else return 0;
}).sum());
}
}
}
<file_sep>/src/main/java/RecurrenceRelationAndMatrixExponentiation.java
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/**
*
* The Travelling Ant
* There is an Ant that lives in Baskerville and loves to travel. As Baskerville is a small place, it consists of only
* 5 cities placed one next to each other.
* There is a train between each successive cities ie between City 1 - City 2, City 2 - City 3, ... City 5 - City 1.
* Note that our Ant loves to travel and gets happy after making exactly N train trips and returning back to home. Ant
* lives in the city 1 from where she begins her journey. She asks you to find the number of ways she can make N train trips and come back to home.
* Since the number of ways can be huge, print that number modulo 10^9 + 7.
* Input
* First line contains T, the number of test cases.
* Then T lines follow.
* Each line contains a single integer n, representing the number of train trips the ant needs to make.
*
* Output
* For each test case, print a single line containing the answer to the problem.
* Constraints
* 1 <= T <= 1000
* 0 <= n <= 10^18
*
*/
public class RecurrenceRelationAndMatrixExponentiation {
static long MOD = 1000000007; // 10^9 +7
static long[][] multiply(long[][] a, long[][] b) {
long[][] c = new long[4][4];
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
for (int k = 0; k < 4; k++) {
c[i][j] = (c[i][j] + (a[i][k] * b[k][j]) % MOD) % MOD;
}
}
}
return c;
}
final static long[][] a = {
{0, 2, 0, 2},
{1, 0, 0, 0},
{0, 1, 1, 1},
{0, 0, 1, 0}};
static long[][] matrixExponentiation(long power) {
if (power == 1) return a;
if (power % 2 == 0) {
long[][] result = matrixExponentiation(power / 2);
return multiply(result, result);
} else {
return multiply(a, matrixExponentiation(power - 1));
}
}
static long calculate(long n) {
long a0 = 1, a1 = 0, a2 = 2;
if (n == 0) return a0;
else if (n == 1) return a1;
else if (n == 2) return a2;
long power = n - 2;
long[][] aPower = matrixExponentiation(power);
return (aPower[0][0] * 2 + aPower[0][2]) % MOD;
}
public static void main(String[] arr) throws Exception {
BufferedReader br = new BufferedReader(
new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int t = Integer.parseInt(st.nextToken());
while (t-- > 0) {
st = new StringTokenizer(br.readLine());
long n = Long.parseLong(st.nextToken());
System.out.println(calculate(n));
}
}
}
<file_sep>/src/main/java/DialMaximum.java
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.stream.IntStream;
/*
Dial Maximum
Consider a dial pad of 1 to 9 digits as shown in the figure. You can not press a single digit 2 times consecutively. If you are pressing any digit, then after pressing that digit, you can only press its adjacent digit. Two digits are adjacent if they share an edge.
Cost of pressing a digit after another adjacent digit is given. Initially, you have X unit(s) of money. You need to maximize the sum of digit(s) you can press in X unit(s) of money. You can start from any digit.
Input :
The first line of input contains an integer X, denoting amount of money you have.
Each of the following 12 lines contains u, v and w, where w is the cost of changing digit from u to v or v to u.
Output :
A single integer representing the maximum sum of numbers you can press in X unit of money.
Sample Input
15
1 2 1
2 3 1
4 5 1
5 6 1
7 8 1
8 9 1
1 4 1
2 5 1
3 6 1
4 7 1
5 8 1
6 9 1
Sample Output
136
Explanation
we can follow given sequence : [9,8,9,8,9,8,9,8,9,8,9,8,9,8,9,8]
*/
public class DialMaximum {
static class Node {
int v;
int cost;
Node(int v, int w) {
this.v = v;
this.cost = w;
}
}
static class Graph {
int V; // no. of vertices
LinkedList<Node> neighbours[];
Graph(int V) {
this.V = V;
neighbours = new LinkedList[V];
for (int i = 0; i < V; i++) {
neighbours[i] = new LinkedList<>();
}
}
void addEdge(int src, int dest, int weight) {
neighbours[src].addFirst(new Node(dest, weight));
neighbours[dest].addFirst(new Node(src, weight));
}
public class Pair<L, R> {
private final L first;
private final R second;
public Pair(L first, R second) {
this.first = first;
this.second = second;
}
public L getFirst() {
return first;
}
public R getSecond() {
return second;
}
@Override
public int hashCode() {
return first.hashCode() ^ second.hashCode();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Pair)) return false;
Pair pairo = (Pair) o;
return this.first.equals(pairo.getFirst()) &&
this.second.equals(pairo.getSecond());
}
}
private Map<Pair, Integer> map = new HashMap<>();
int getMaximumSum(int start, int money) {
Pair pair = new Pair<>(start, money);
if (map.containsKey(pair)) {
return map.get(pair);
} else {
int result = neighbours[start].stream()
.filter(node -> money >= node.cost)
.map(node -> getMaximumSum(node.v, money - node.cost))
.max(Integer::compareTo)
.orElse(0) + start;
map.put(pair, result);
return result;
}
}
}
public static void main(String[] args) throws Exception {
final BufferedReader br = new BufferedReader(
new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int x = Integer.parseInt(st.nextToken());
Graph graph = new Graph(10);
for (int i = 0; i < 12; i++) {
st = new StringTokenizer(br.readLine());
graph.addEdge(Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken()));
}
int[][] dp = new int[10][x + 1];
for (int j = 0; j <= x; j++) {
for (int i = 1; i <= 9; i++) {
dp[i][j] += i;
for (Node node : graph.neighbours[i]) {
if (j + node.cost <= x) {
dp[node.v][j + node.cost] = Math.max(dp[node.v][j + node.cost], dp[i][j]);
}
}
}
}
System.out.println(IntStream.range(1, 10).mapToObj(i -> dp[i][x]).max(Integer::compareTo).orElse(0));
}
}
<file_sep>/src/main/java/codeforces/SerejaAndBrackets.java
package codeforces;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/*
https://codeforces.com/contest/380/problem/C
*/
public class SerejaAndBrackets {
private static Pair[] segmentArr;
private static void preCompute(int l, int r, int i, String input) {
if (l == r) {
segmentArr[i] = new Pair(input.charAt(l));
} else {
int m = (l + r) / 2;
preCompute(l, m, 2 * i, input);
preCompute(m + 1, r, 2 * i + 1, input);
segmentArr[i] = segmentArr[2 * i].merge(segmentArr[2 * i + 1]);
}
}
private static Pair query(int l, int r, int i, int x, int y) {
if (y < l || x > r) return empty;
else if (x <= l && r <= y) return segmentArr[i];
else {
int mid = (l + r) >> 1;
return query(l, mid, 2 * i, x, y).merge(query(mid + 1, r, 2 * i + 1, x, y));
}
}
public static void main(String[] args) throws Exception {
final BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
final String input = st.nextToken();
int n = input.length();
segmentArr = new Pair[4 * n];
preCompute(0, n - 1, 1, input);
st = new StringTokenizer(br.readLine());
int m = Integer.parseInt(st.nextToken());
while (m-- > 0) {
st = new StringTokenizer(br.readLine());
int l = Integer.parseInt(st.nextToken()), r = Integer.parseInt(st.nextToken()); // 1 based
Pair res = query(0, n - 1, 1, l - 1, r - 1);
System.out.println((r - l + 1) - res.o - res.c); // length - unmatched brackets
}
}
private static Pair empty = new Pair();
private static class Pair {
int o;
int c;
Pair() {
}
Pair(int o, int c) {
this.o = o;
this.c = c;
}
Pair(char bracket) {
switch (bracket) {
case '(':
o = 1;
break;
case ')':
c = 1;
break;
}
}
Pair merge(Pair that) {
int t = Math.min(this.o, that.c);
return new Pair(this.o + that.o - t, this.c + that.c - t);
}
@Override
public String toString() {
return "Pair{" +
"o=" + o +
", c=" + c +
'}';
}
}
}
<file_sep>/README.md
# ScalaTestProject
Purely Functional Competitive programming in Scala.
Most programs would contain a description(or link) of the underlying problem solved and adheres to the same strict run time limit from codeforces
Going purely Functional takes a speed hit because of frequent memory requests of newer copies from immutable data. In most of the programs here I have tried avoiding mutation but you'll still find me using a mutable map or set somewhere limited to a scope.
|
f970d09a1246b1129fcca20e12b9e6acc9116a50
|
[
"Markdown",
"Java"
] | 15
|
Java
|
gagandeepkalra/ScalaTestProject
|
3cba4664e5f00456d6431d44ed0da5d994438725
|
be785272d7b21e740b497a1c6cc6ed9bc520816b
|
refs/heads/main
|
<file_sep>// lista cognomi
// Creare un array contenente dei cognomi in ordine casuale.
// Chiedere all'utente il suo cognome e inserirlo nella lista dei cognomi.
// Stampare la lista dei cognomi in ordine alfabetico e comunicare all'utente la posizione in cui si trova il suo cognome (dopo averlo riordinato).
// Creazione array contenente cognomi in ordne sparso
var lastNameList = ['Moussa', 'Esposito', 'Russo', 'Abate', 'Torchia', 'Barone', 'Sardo', 'Conte'];
// Creazione array che conterrá la lista dei cognom ordinata
var sortedLastNameList = [];
// Inserimento cognome dell'utente finché línput non sia valido (deve essere una stringa che cominci con una lettera)
var userLastName;
do {
// Inserimento cognome utente
userLastName = prompt('Inserisci il tuo cognome');
// Alert nel caso l'input sia un numero
if(!isNaN(userLastName)) {
alert('Input inserito non valido');
}
} while(!isNaN(userLastName))
// Conversione in maiuscola della prima lettera del cognome
userLastName = userLastName.charAt(0).toUpperCase() + userLastName.slice(1);
// Inserimento cognome dell'utente nell'array
lastNameList.push(userLastName);
// SENZA FUNZIONI JS //
// variabile che conta quante volte il nome inserito dallútente é presente nell'array
var equalLastNames = 0;
// conteggio delle volte che si ripete il cognome inserito dall'utente
for (var i = 0; i < lastNameList.length; i++) {
if(userLastName == lastNameList[i]) {
equalLastNames++;
}
}
console.log('equalLastNames ' + equalLastNames);
var userLastNamePosition;
while(lastNameList.length > sortedLastNameList.length) {
//Creazione variabile stringa con all'interno un carattere maggiore di tutte le lettere dell'alfabeto
var currentString = '{';
// ciclo che compara la currentString con tutte le stringe della lista e prende la piú piccola che non é gia presente nell'array ordinato
for (var i = 0; i < lastNameList.length; i++) {
if (lastNameList[i] == userLastName){
// se lélemento nell'array in posizione i: é i cognome dell'utente, é minore di currentString e il contatore di nomi uguali é piú grande di 0 allora currentString = lastName[i]
if(lastNameList[i] < currentString && equalLastNames > 0) {
currentString = lastNameList[i];
}
} else {
// Se l'elemento dell'array in posizione i: non é il cognome dell'utente, é piú piccolo di currentString e non é presente nell'array ordinato allora currentString diventa quell'elemento
if(lastNameList[i] < currentString && !sortedLastNameList.includes(lastNameList[i])) {
currentString = lastNameList[i];
}
}
}
console.log(currentString);
sortedLastNameList.push(currentString);
// Se currentString é il cognome dell'utente allora il cognome dellútente l'ultimo elemento Inserito
if(currentString == userLastName) {
userLastNamePosition = sortedLastNameList.length - 1;
equalLastNames--;
}
}
// Stampa array ordinato
console.log(sortedLastNameList);
//Stampa posizione del cognome dellútente nell'array
console.log('Il cognome dell\'utente é nella posizione: ' + userLastNamePosition);
|
ad3a20b00eb5071a4d0ca52978e989f7b7b36ee4
|
[
"JavaScript"
] | 1
|
JavaScript
|
DaoudMoussa/js-lista-cognomi
|
6ca1d5af4b061e433d7f31026b26eea9955807d8
|
e0afac942af0c9eadcaafe7484449f001f631e4e
|
refs/heads/master
|
<file_sep>$(document).ready(function() {
$('.load-animation').fadeIn(1500);
});<file_sep><!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport"
content="width=device-width, initial-scale=1.0" />
<link rel="shortcut icon" href="./assets/img/White-square-logo_american-bar.webp" type="image/x-icon">
<link rel="stylesheet"
href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css"
integrity="<KEY>"
crossorigin="anonymous" />
<link rel="stylesheet"
href="./assets/main.css" />
<title>Union Lodge No1</title>
</head>
<body>
<!-- PAGE WRAPPER -->
<main>
<div class="container">
<div class="parallax-1">
<h1 class="load-animation">Welcome Home</h1>
<!-- <h2>Located in Downtown Austin</h2> -->
</div>
<div class="wrapper">
<nav class="navbar navbar-default">
<div class="container-fluid">
<!-- Brand and toggle get grouped for better mobile display -->
<div class="navbar-header">
<button type="button"
class="navbar-toggle collapsed"
data-toggle="collapse"
data-target="#bs-example-navbar-collapse-1"
aria-expanded="false">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand"
href="index.html"><img src="assets/img/White-square-logo_american-bar.webp"
alt="Company logo"></a>
</div>
<!-- Collect the nav links, forms, and other content for toggling -->
<div class="collapse navbar-collapse"
id="bs-example-navbar-collapse-1">
<ul class="nav navbar-nav navbar-right">
<li><a href="index.html">Home</a></li>
<!-- <li><a href="#">Shop</a></li> -->
<li><a href="#">Menu</a></li>
<li><a href="#">Contact</a></li>
</li>
</ul>
</li>
</ul>
</div><!-- /.navbar-collapse -->
</div><!-- /.container-fluid -->
</nav>
</div> <!-- End content wrapper -->
<div class="parallax-2">
<div class="wrapper">
<h2>Open daily from <br /> 5PM - 2AM</h2>
<img src="./assets/img/decoration.jpg" alt="decoration">
<p>We are a Austin based specialty cocktail bar that stands
as a tribute to the true American bars of the late
19th century. Step through our doors to
enjoy a libation of the highest caliber, and be
transported to a time before Prohibition, where
the creativity of barmen flourished. Our expert
bartenders apply their passion and years of
experience toward creating specialty cocktails,
reminiscent of the masterpieces of the
pre-Prohibition era.</p>
</div>
<footer>
<hr>
<div class="footer-content">
<p>© Union Lodge No1</p>
<p>130 E. 6th St.
Austin TX, 78701
</p>
</div>
</footer>
</div> <!-- End Parallax 2 -->
</div> <!-- End main container -->
</main>
<script src="https://code.jquery.com/jquery-3.4.1.min.js"
integrity="<KEY>
crossorigin="anonymous"></script>
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"
integrity="<KEY>"
crossorigin="anonymous"></script>
<script src="/assets/scripts.js"></script>
</body>
</html>
|
092bab4efa73fce1e4573270be3d2f9bb0dddaae
|
[
"JavaScript",
"HTML"
] | 2
|
JavaScript
|
codentacos/barStaticSite
|
46166ab958c262e428c08512457727dfe49c38fe
|
86340b58b3efed77f876e69e48600cae6c1f4b35
|
refs/heads/master
|
<file_sep># long25vn.github.io
- [Quốc ca](https://long25vn.github.io)
- [Love Letter](https://long25vn.github.io/loveletterfinal/letter.html)
- [Calculator](https://long25vn.github.io/calculator/calculator.html)
- [Form](https://long25vn.github.io/validateform/form.html)
<file_sep>
mangrong=[0,0,0,0,0];
mangrong2=[0,0,0,0];
var tempso1;
var tempso2="a";
var tempso3="a";
var tempmath;
var tempketqua="";
var ham = "";
function press (str)
{
x=[];
ham = ham + str;
$('#display').text(ham);
}
function pressmath(str)
{
if (str=="*") {$('#display').text("x");}
else {$('#display').text(str);}
if (ham != "") {mangrong.unshift(ham.valueOf());}
mangrong.unshift(str);
ham = [];
tempmath=mangrong[2].toString();
if (tempmath=="+")
{ tempketqua = Number(mangrong[3]) + Number(mangrong[1]);
$('#display').text(tempketqua.toString().slice(0,8));
mangrong.unshift(tempketqua);
tempso1=mangrong[1];
mangrong[1]=mangrong[0];
mangrong[0]=tempso1;
}
else if (tempmath=="-")
{ tempketqua = Number(mangrong[3]) - Number(mangrong[1]);
$('#display').text(tempketqua.toString().slice(0,8));
mangrong.unshift(tempketqua);
tempso1=mangrong[1];
mangrong[1]=mangrong[0];
mangrong[0]=tempso1;
}
else if (tempmath=="*")
{ tempketqua = Number(mangrong[3]) * Number(mangrong[1]);
$('#display').text(tempketqua.toString().slice(0,8));
mangrong.unshift(tempketqua);
tempso1=mangrong[1];
mangrong[1]=mangrong[0];
mangrong[0]=tempso1;
}
else if (tempmath=="÷")
{ tempketqua = Number(mangrong[3]) / Number(mangrong[1]);
$('#display').text(tempketqua.toString().slice(0,8));
mangrong.unshift(tempketqua);
tempso1=mangrong[1];
mangrong[1]=mangrong[0];
mangrong[0]=tempso1;
}
}
function pressketqua(str)
{ if (ham != "") {mangrong.unshift(ham.valueOf());}
mangrong.unshift(str.valueOf());
ham = "";
tempmath=mangrong[2].toString();
if (tempmath=="+")
{ tempketqua = Number(mangrong[3]) + Number(mangrong[1]);
$('#display').text(tempketqua.toString().slice(0,8));
mangrong.unshift(tempketqua);
}
else if (tempmath=="-")
{ tempketqua = Number(mangrong[3]) - Number(mangrong[1]);
$('#display').text(tempketqua.toString().slice(0,8));
mangrong.unshift(tempketqua);
}
else if (tempmath=="*")
{ tempketqua = Number(mangrong[3]) * Number(mangrong[1]);
$('#display').text(tempketqua.toString().slice(0,8));
mangrong.unshift(tempketqua);
}
else if (tempmath=="÷")
{ tempketqua = Number(mangrong[3]) / Number(mangrong[1]);
$('#display').text(tempketqua.toString().slice(0,8));
mangrong.unshift(tempketqua);
}
}
function presstest()
{
alert (mangrong +" "+ tempketqua.toString().slice(0,3));
}
function pressc()
{
ham="";
mangrong=[0,0,0,0,0];
$('#display').text("");
}
function pressbinhphuong()
{if (ham=="")
{
tempso3 = Math.pow(mangrong[0],2);
mangrong.unshift(tempso3);
$('#display').text(tempso3.toString().slice(0,8));
}
if (ham!= "")
{ mangrong.unshift(ham.valueOf());
tempso3 = Math.pow(mangrong[0],2);
mangrong.unshift(tempso3);
$('#display').text(tempso3.toString().slice(0,8));
}
}
function presscan()
{if (ham=="")
{
tempso3 = Math.sqrt(mangrong[0]);
mangrong.unshift(tempso3);
$('#display').text(tempso3.toString().slice(0,8));
}
if (ham!= "")
{ mangrong.unshift(ham.valueOf());
tempso3 = Math.sqrt(mangrong[0]);
mangrong.unshift(tempso3);
$('#display').text(tempso3.toString().slice(0,8));
ham = "";
}
}
function pressphantram()
{if (ham=="")
{
tempso3 = (mangrong[0] / 100);
mangrong.unshift(tempso3);
$('#display').text(tempso3.toString().slice(0,8));
}
if (ham!= "")
{ mangrong.unshift(ham.valueOf());
tempso3 = (mangrong[0] / 100);
mangrong.unshift(tempso3);
$('#display').text(tempso3.toString().slice(0,8));
ham = "";
}
}
|
461fd89c1dbef3e1a48123bda9b754d19b5db033
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
long25vn/long25vn.github.io
|
29fe54abf04003548ad5ebfa7cdd5b0bb9277181
|
6bb4a7afe015bc6d5cbe235a68b1a7bda2490b0d
|
refs/heads/main
|
<repo_name>akashdeepconnect/ent-sls-adj-common-util<file_sep>/Tests/test_company_rep/test_lambda_modify_insert.py
from datetime import datetime
from boto3.dynamodb.conditions import Key
import logging
from mock import patch
import mock
import pandas as pd
import os
os.environ['TABLE_NAME']='test'
from ent_sls_adj_common_util.functions.company_rep import checks
from ent_sls_adj_common_util.functions.company_rep import cross_account_session
# Creaate a logger and setting the logging level
logger=logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def ret_d(dynamodb):
return dynamodb
def get_emp_history_data(table, agncy_cd,logger):
try:
response = table.query(
KeyConditionExpression=Key('pk').eq("agcy#{}".format(agncy_cd))
)
items = response['Items']
return items
except Exception as ex:
log_message = f"Exception occurred in get_emp_history_data{ex} "
logger.error(log_message)
@patch('cross_account_session.get_cross_account_session',mock.MagicMock(return_value=None))
@patch('checks.execute_query_from_athena',mock.MagicMock(return_value = pd.read_csv('Tests/test_company_rep/athena_data/data.csv', dtype=str)))
@patch('checks.get_today_loc_dt',mock.MagicMock(return_value=datetime(2021, 5, 18)))
@patch('checks.employee_already_for_emptyp_marketid',mock.MagicMock(return_value=False))
def test_lambda_handler_modify(table):
from .input_map import input_dict
from .result_map import test_lambda_handler_modify_result
from ent_sls_adj_common_util.functions.company_rep.lambda_function import lambda_handler
checks.co_rep_data_table = table
ret = lambda_handler(input_dict,None)
items = get_emp_history_data(checks.co_rep_data_table,'6502',logger)
logger.info(items)
assert items == test_lambda_handler_modify_result
@patch('cross_account_session.get_cross_account_session',mock.MagicMock(return_value=None))
@patch('checks.execute_query_from_athena',mock.MagicMock(return_value = pd.read_csv('Tests/test_company_rep/athena_data/data.csv', dtype=str)))
@patch('checks.get_today_loc_dt',mock.MagicMock(return_value=datetime(2021, 5, 18)))
@patch('checks.employee_already_for_emptyp_marketid',mock.MagicMock(return_value=False))
def test_lambda_handler_insert(table):
from .input_map import input_dict_insert
from ent_sls_adj_common_util.functions.company_rep.lambda_function import lambda_handler
from .result_map import test_lambda_handler_insert_result
checks.co_rep_data_table = table
ret = lambda_handler(input_dict_insert,None)
items = get_emp_history_data(table,'6502',logger)
logger.info(items)
assert items == test_lambda_handler_insert_result
@patch('cross_account_session.get_cross_account_session',mock.MagicMock(return_value=None))
@patch('checks.execute_query_from_athena',mock.MagicMock(return_value = pd.read_csv('Tests/test_company_rep/athena_data/data.csv', dtype=str)))
@patch('checks.get_today_loc_dt',mock.MagicMock(return_value=datetime(2021, 5, 18)))
@patch('checks.employee_already_for_emptyp_marketid',mock.MagicMock(return_value=False))
def test_lambda_handler_emp_lastname_less_3_char(table):
from .input_map import input_dict_insert_emp_lastname_less_3_char
from ent_sls_adj_common_util.functions.company_rep.lambda_function import lambda_handler
from .result_map import test_lambda_handler_emp_lastname_less_3_char_result
checks.co_rep_data_table = table
ret = lambda_handler(input_dict_insert_emp_lastname_less_3_char,None)
items = get_emp_history_data(table,'6502',logger)
logger.info(items)
assert items == test_lambda_handler_emp_lastname_less_3_char_result
@patch('cross_account_session.get_cross_account_session',mock.MagicMock(return_value=None))
@patch('checks.execute_query_from_athena',mock.MagicMock(return_value = pd.read_csv('Tests/test_company_rep/athena_data/empty_data.csv', dtype=str)))
@patch('checks.get_today_loc_dt',mock.MagicMock(return_value=datetime(2021, 5, 18)))
@patch('checks.employee_already_for_emptyp_marketid',mock.MagicMock(return_value=False))
def test_lambda_handler_emp_not_present(table):
from .input_map import input_dict_insert_emp_lastname_less_3_char
from ent_sls_adj_common_util.functions.company_rep.lambda_function import lambda_handler
from .result_map import test_lambda_handler_emp_lastname_less_3_char_result
checks.co_rep_data_table = table
ret = lambda_handler(input_dict_insert_emp_lastname_less_3_char,None)
items = get_emp_history_data(table,'6502',logger)
logger.info(items)
assert items == []<file_sep>/requirements.txt
asn1crypto==1.4.0
astroid==2.5.6
attrs==20.3.0
aws_cdk.aws_lambda_event_sources
aws-cdk.assets==1.101.0
aws-cdk.aws-apigateway==1.101.0
aws-cdk.aws-applicationautoscaling==1.101.0
aws-cdk.aws-autoscaling==1.101.0
aws-cdk.aws-autoscaling-common==1.101.0
aws-cdk.aws-autoscaling-hooktargets==1.101.0
aws-cdk.aws-batch==1.101.0
aws-cdk.aws-certificatemanager==1.101.0
aws-cdk.aws-cloudformation==1.101.0
aws-cdk.aws-cloudfront==1.101.0
aws-cdk.aws-cloudwatch==1.101.0
aws-cdk.aws-codebuild==1.101.0
aws-cdk.aws-codecommit==1.101.0
aws-cdk.aws-codedeploy==1.101.0
aws-cdk.aws-codeguruprofiler==1.101.0
aws-cdk.aws-codepipeline==1.101.0
aws-cdk.aws-codepipeline-actions==1.101.0
aws-cdk.aws-cognito==1.101.0
aws-cdk.aws-dynamodb==1.101.0
aws-cdk.aws-ec2==1.101.0
aws-cdk.aws-ecr==1.101.0
aws-cdk.aws-ecr-assets==1.101.0
aws-cdk.aws-ecs==1.101.0
aws-cdk.aws-efs==1.101.0
aws-cdk.aws-eks==1.101.0
aws-cdk.aws-elasticloadbalancing==1.101.0
aws-cdk.aws-elasticloadbalancingv2==1.101.0
aws-cdk.aws-events==1.101.0
aws-cdk.aws-events-targets==1.101.0
aws-cdk.aws-globalaccelerator==1.101.0
aws-cdk.aws-iam==1.101.0
aws-cdk.aws-kinesis==1.101.0
aws-cdk.aws-kinesisfirehose==1.101.0
aws-cdk.aws-kms==1.101.0
aws-cdk.aws-lambda==1.101.0
aws-cdk.aws-logs==1.101.0
aws-cdk.aws-route53==1.101.0
aws-cdk.aws-route53-targets==1.101.0
aws-cdk.aws-s3==1.101.0
aws-cdk.aws-s3-assets==1.101.0
aws-cdk.aws-sam==1.101.0
aws-cdk.aws-secretsmanager==1.101.0
aws-cdk.aws-servicediscovery==1.101.0
aws-cdk.aws-signer==1.101.0
aws-cdk.aws-sns==1.101.0
aws-cdk.aws-sns-subscriptions==1.101.0
aws-cdk.aws-sqs==1.101.0
aws-cdk.aws-ssm==1.101.0
aws-cdk.aws-stepfunctions==1.101.0
aws-cdk.aws-stepfunctions-tasks==1.101.0
aws-cdk.cloud-assembly-schema==1.101.0
aws-cdk.core==1.101.0
aws-cdk.custom-resources==1.101.0
aws-cdk.cx-api==1.101.0
aws-cdk.lambda-layer-awscli==1.101.0
aws-cdk.lambda-layer-kubectl==1.101.0
aws-cdk.pipelines==1.101.0
aws-cdk.region-info==1.101.0
awswrangler==2.7.0
beautifulsoup4==4.9.3
boto3==1.17.61
botocore==1.20.61
cattrs==1.5.0
certifi==2020.12.5
chardet==4.0.0
constructs==3.3.75
et-xmlfile==1.1.0
idna==2.10
importlib-metadata==4.0.1
iniconfig==1.1.1
isort==5.8.0
jmespath==0.10.0
jsii==1.29.0
lazy-object-proxy==1.6.0
lxml==4.6.3
mccabe==0.6.1
numpy==1.20.2
openpyxl==3.0.7
packaging==20.9
pg8000==1.18.0
pluggy==0.13.1
publication==0.0.3
py==1.10.0
pyarrow==3.0.0
pylint==2.8.2
PyMySQL==1.0.2
pyparsing==2.4.7
pytest==6.2.3
python-dateutil==2.8.1
pytz==2021.1
redshift-connector==2.0.878
requests==2.25.1
s3transfer==0.4.2
scramp==1.2.2
six==1.15.0
soupsieve==2.2.1
toml==0.10.2
typed-ast==1.4.3
typing-extensions==3.7.4.3
urllib3==1.26.4
wrapt==1.12.1
zipp==3.4.1<file_sep>/Tests/test_company_rep/conftest.py
import os
import sys
print(sys.path)
from .input_map import data_init_dict
import pytest
import boto3
from moto import mock_dynamodb2
@pytest.fixture(scope='function')
def aws_credentials():
"""Mocked AWS Credentials for moto."""
os.environ['AWS_ACCESS_KEY_ID'] = 'testing'
os.environ['AWS_SECRET_ACCESS_KEY'] = 'testing'
os.environ['AWS_SECURITY_TOKEN'] = 'testing'
os.environ['AWS_SESSION_TOKEN'] = 'testing'
@pytest.fixture(scope='function')
def dynamodb(aws_credentials):
with mock_dynamodb2():
yield boto3.resource('dynamodb', region_name='us-east-1')
@pytest.fixture(scope='function')
def table(dynamodb):
dynamodb.create_table(
TableName="test",
KeySchema=[
{
'AttributeName': 'pk',
'KeyType': 'HASH'
},
{
'AttributeName': 'sk',
'KeyType': 'RANGE'
},
],
AttributeDefinitions=[
{
'AttributeName': 'pk',
'AttributeType': 'S'
},
{
'AttributeName': 'sk',
'AttributeType': 'S'
},
],
BillingMode='PAY_PER_REQUEST'
)
co_rep_data_table = dynamodb.Table('test')
for item in data_init_dict:
co_rep_data_table.put_item(Item=item)
return co_rep_data_table
<file_sep>/ent_sls_adj_common_util/functions/company_rep/config.py
import os
ARN = os.getenv('TARGET_ROLE_ARN')
TABLE_NAME = os.getenv('TABLE_NAME')
<file_sep>/ent_sls_adj_common_util/infra/pipeline_stack.py
from aws_cdk import core
from aws_cdk import aws_codepipeline as codepipeline
from aws_cdk import aws_codepipeline_actions as cpactions
from aws_cdk import pipelines
from .configuration import RawConfig
from .lambda_service_stage import CorepcodeService
class PipelineStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, raw_config: RawConfig, **kwargs):
super().__init__(scope, id, **kwargs)
self._raw_config = raw_config
source_artifact = codepipeline.Artifact()
cloud_assembly_artifact = codepipeline.Artifact()
pipeline = pipelines.CdkPipeline(self, 'Pipeline',
cloud_assembly_artifact=cloud_assembly_artifact,
pipeline_name='CorepcodePipeline',
source_action=cpactions.GitHubSourceAction(
#connection_arn=self._raw_config.application['connection_arn'],
action_name='GitHub',
output=source_artifact,
oauth_token=core.SecretValue.secrets_manager('github-token'),
owner=self._raw_config.application['owner'],
repo=self._raw_config.application['repo_name'],
branch=self._raw_config.application['branch'],
trigger=cpactions.GitHubTrigger.POLL ),
synth_action=pipelines.SimpleSynthAction(
source_artifact=source_artifact,
cloud_assembly_artifact=cloud_assembly_artifact,
install_command='npm install -g aws-cdk && pip install -r requirements.txt',
build_command='pytest Tests',
synth_command='cdk synth'))
dev_env = core.Environment(**self._raw_config.development.env)
# print(type(self._raw_config.development), len(self._raw_config.development), self._raw_config.development)
print(type(self._raw_config.development))
l2_app = CorepcodeService(self, 'L2', env=dev_env, raw_config=self._raw_config.development)
l2_stage = pipeline.add_application_stage(l2_app)
l2_stage.add_manual_approval_action(
action_name='Promote_To_L3'
)
l3_app = CorepcodeService(self, 'L3', env=dev_env,raw_config=self._raw_config.staging)
l3_stage = pipeline.add_application_stage(l3_app)
'''l3_stage.add_manual_approval_action(
action_name='Promote_To_L4'
)
prod_env = core.Environment(**self._raw_config.production.env)
prod_app = AgupService(self, 'L4', env=prod_env, raw_config=self._raw_config.production)
l4_stage = pipeline.add_application_stage(prod_app)'''
# pre_prod_stage.add_actions(pipelines.ShellScriptAction(
# action_name='Integ',
# run_order=pre_prod_stage.next_sequential_run_order(),
# additional_artifacts=[source_artifact],
# commands=[
# 'pip install -r requirements.txt',
# 'pytest integtests',
# ],
# use_outputs={
# 'SERVICE_URL': pipeline.stack_output(pre_prod_app.url_output)
# }))
# pipeline.add_application_stage(WebServiceStage(self, 'Prod', env={
# 'account': APP_ACCOUNT,
# 'region': 'eu-central-1',
# }))
<file_sep>/ent_sls_adj_common_util/functions/company_rep/checks.py
import sys
import os
path = os.path.abspath(__file__)
dir_path = os.path.dirname(path)
sys.path.insert(0,dir_path)
import awswrangler.exceptions as exceptions
from botocore.exceptions import ClientError
import awswrangler as wr
from boto3.dynamodb.conditions import Key, Attr
import boto3
from datetime import datetime
import traceback
from datetime import timedelta
from pytz import timezone
import time
from config import(
TABLE_NAME
)
def get_today_loc_dt():
eastern = timezone('US/Eastern')
today_loc_dt = datetime.now(tz=eastern)
return today_loc_dt
dynamodb = boto3.resource('dynamodb')
co_rep_data_table = dynamodb.Table(TABLE_NAME)
def execute_query_from_athena(logger,query_str, cross_account_session, database_name):
df = None
for i in range(3):
try:
df = wr.athena.read_sql_query(sql = query_str, database=database_name,
ctas_approach=False, boto3_session = cross_account_session)
except exceptions.QueryFailed as e:
logger.error('Attempt {} of Athena Query execution has failed. '.format(i + 1))
time.sleep(10)
except exceptions.QueryCancelled as e:
logger.error('Attempt {} of Athena Query execution has been Cancelled. '.format(i + 1))
time.sleep(10)
except ClientError as e:
logger.error('Client Side Error. Most common cause: Invalid Query')
break
else:
break
return df
def valid_employee(emp_num ,cross_accnt_session,logger):
try:
dl_data = None
query = ''' select * from uatrun_processed_sabir.employee
where batch = (select max(batch) from uatrun_processed_sabir.employee) and employeeno={}; '''.format(emp_num)
dl_data = execute_query_from_athena(logger,query_str = query,
cross_account_session=
cross_accnt_session, database_name="default")
dl_data = dl_data.fillna("")
if len(dl_data)>0:
return {'emp_no':dl_data['employeeno'].iloc[0],'op_id':dl_data['operatorid'].iloc[0]}
else:
return False
except Exception as ex:
log_message = f"Exception occurred in valid_employee {ex} \n {traceback.format_exc()}"
logger.error(log_message)
def get_rep_cd_list(table,lastname_3_char,logger):
try:
rep_data_list =[]
scan_kwargs = {
'FilterExpression': Attr('co_rep_cd').begins_with(lastname_3_char),
'ProjectionExpression': "co_rep_cd",
}
done = False
start_key = None
while not done:
if start_key:
scan_kwargs['ExclusiveStartKey'] = start_key
response = table.scan(**scan_kwargs)
item = response.get('Items', [])
if len(item)>0:
rep_data_list.extend(item)
start_key = response.get('LastEvaluatedKey', None)
done = start_key is None
return rep_data_list
except Exception as ex:
log_message = f"Exception occurred in get_rep_cd_list {ex} \n {traceback.format_exc()}"
logger.error(log_message)
def get_com_rep_code(table,emp_type,emp_num,logger):
try:
rep_data =[]
scan_kwargs = {
'FilterExpression': Attr('sk').begins_with('emptyp#{}'.format(emp_type)) & Attr('emp_id').eq(emp_num),
'ProjectionExpression': "co_rep_cd,pk,sk",
}
done = False
start_key = None
while not done:
if start_key:
scan_kwargs['ExclusiveStartKey'] = start_key
response = table.scan(**scan_kwargs)
item = response.get('Items', [])
if len(item)>0:
rep_data.extend(item)
break
start_key = response.get('LastEvaluatedKey', None)
done = start_key is None
return rep_data
except Exception as ex:
log_message = f"Exception occurred in get_com_rep_code {ex} \n {traceback.format_exc()}"
logger.error(log_message)
def get_emp_type_from_table(table,role_cd,logger):
try:
response = table.query(
ProjectionExpression = "emp_type_cd",
KeyConditionExpression= Key('pk').eq("role#{}".format(role_cd))
)
items = response['Items']
logger.info(f"role_code:{role_cd},{items}")
return items
except Exception as ex:
log_message = f"Exception occurred in get_emp_type_from_table {ex} \n {traceback.format_exc()}"
logger.error(log_message)
def get_emp_type(table,role_code,logger):
emp_type = get_emp_type_from_table(table,role_code,logger)[0]
return emp_type['emp_type_cd']
def is_rows_present(table,agncy_cd,emp_type,market_id,logger):
try:
response = table.query(
KeyConditionExpression=Key('pk').eq("agcy#{}".format(agncy_cd)) & Key('sk').eq('emptyp#{}#markt#{}'.format(emp_type,market_id))
)
if(len(response['Items'])) > 0:
return True
else:
return False
'''rep_data =[]
scan_kwargs = {
'FilterExpression': Key('pk').eq("agcy#{}".format(agncy_cd)) & Attr('sk').eq('emptyp#{}markt#{}'.format(emp_type,market_id)),
'ProjectionExpression': "sk",
}
done = False
start_key = None
while not done:
if start_key:
scan_kwargs['ExclusiveStartKey'] = start_key
response = table.scan(**scan_kwargs)
item = response.get('Items', [])
if len(item)>0:
rep_data.extend(item)
break
start_key = response.get('LastEvaluatedKey', None)
done = start_key is None
if len(rep_data)>0:
return True
else:
return False'''
except Exception as ex:
log_message = f"Exception occurred in is_rows_present {ex} \n {traceback.format_exc()}"
logger.error(log_message)
def modify_agncy_emp_typ(table,agncy_cd,emp_type,market_id,deleted_flag,co_rep_cd,emp_num,logger):
try:
exp_attr={
':co_rep_cd':co_rep_cd,
':eff_dt':get_today_loc_dt().strftime('%Y-%m-%d'),
':updt_dttm' :get_today_loc_dt().strftime('%Y-%m-%d %H:%M:%S'),
':emp_id':emp_num,
':gs1pk':'empno#{}'.format(emp_num),
':gs2pk':'rep#{}'.format(co_rep_cd),
':deleted_flag' : deleted_flag ,
}
logger.info(f"expression attributes : {exp_attr}")
response = table.update_item(
Key={
'pk': 'agcy#{}'.format(agncy_cd),
'sk': 'emptyp#{}#markt#{}'.format(emp_type,market_id)
},
UpdateExpression="set co_rep_cd=:co_rep_cd,eff_dt=:eff_dt,updt_dttm=:updt_dttm,emp_id=:emp_id,gs1pk=:gs1pk,gs2pk=:gs2pk,deleted_flag=:deleted_flag",
ExpressionAttributeValues={
':co_rep_cd':co_rep_cd,
':eff_dt':get_today_loc_dt().strftime('%Y-%m-%d'),
':updt_dttm':get_today_loc_dt().strftime('%Y-%m-%d %H:%M:%S'),
':emp_id': emp_num,
':gs1pk':'empno#{}'.format(emp_num),
':gs2pk':'rep#{}'.format(co_rep_cd),
':deleted_flag' : deleted_flag ,
},
ReturnValues="UPDATED_NEW"
)
return response
except Exception as ex:
log_message = f"Exception occurred in modify_agncy_emp_typ {ex} \n {traceback.format_exc()}"
logger.error(log_message)
def delete_market_0(table,agncy_cd,emp_type,logger):
try:
response = table.delete_item(
Key={
'pk': 'agcy#{}'.format(agncy_cd),
'sk': 'emptyp#{}#markt#{}'.format(emp_type,'0')
}
)
logger.info (f'deleted for market_id = 0 , agncy_cd : {agncy_cd}, emp_type : {emp_type}' )
except ClientError as e:
if e.response['Error']['Code'] == "ConditionalCheckFailedException":
logger.error(e.response['Error']['Message'])
else:
raise
else:
return response
def employee_already_for_emptyp_marketid(table,agncy_cd,emp_type,market_id,emp_num,logger):
try:
response = table.query(
IndexName='gs1pk-gs1sk-index',
KeyConditionExpression=Key('gs1pk').eq("empno#{}".format(emp_num)) & Key('gs1sk').eq('emptyp#{}#agncy#{}#markt#{}'.format(emp_type,agncy_cd,market_id))
)
if(len(response['Items'])) > 0:
return True
else:
return False
except Exception as ex:
log_message = f"Exception occurred in is_rows_present {ex} \n {traceback.format_exc()}"
logger.error(log_message)
def add_co_rep_data(rep_detail,new_image,cross_accnt_session,logger):
try:
logger.info(f"Beigin to add employee in co_rep_dat DDB")
co_rep_cd = ''
emp_num = str(int(rep_detail['M']['co_rep_cd']['S']))
valid_emp = valid_employee(emp_num,cross_accnt_session,logger)
if valid_emp != False:
logger.info(f"{emp_num} :Valid employee")
role_code = rep_detail['M']['org_role_cd']['S']
market_id = rep_detail['M']['lob_id']['S']
co_rep_desc = rep_detail['M']['lob_nm']['S']
emp_type = get_emp_type(co_rep_data_table,role_code,logger)
agncy_cd = str(int(new_image['agncy_cd']['S']))
co_rep_code_list = get_com_rep_code(co_rep_data_table,emp_type,emp_num,logger)
logger.info(f"Rep code list : {co_rep_code_list}")
if employee_already_for_emptyp_marketid(co_rep_data_table,agncy_cd,emp_type,market_id,emp_num,logger):
logger.info("Nothing to modify")
return 0
if len(co_rep_code_list) == 0:
if emp_type in ['44','49','51','30']:
logger.info(f"emp_type : {emp_type} is 44 , 49 ,51 or 30")
co_rep_cd = valid_emp['op_id']
else:
lastname = rep_detail['M']['co_rep_nm']['S'].split(" ")[-1]
if len(lastname)>2:
lastname_3_char = lastname[0:3].upper()
rep_cd_list = get_rep_cd_list(co_rep_data_table,lastname_3_char,logger)
if len(rep_cd_list) == 0 :
co_rep_cd = lastname_3_char+'0'
else:
rep_cd_list = [d for d in rep_cd_list if d['co_rep_cd'][3:].isdigit()]
if len(rep_cd_list) == 0:
max_id = -1
else:
rep_cd_list.sort(key = lambda rep_cd: rep_cd['co_rep_cd'],reverse = True)
max_id = int(rep_cd_list[0]['co_rep_cd'][3:])
co_rep_cd = lastname_3_char + str(max_id+1)
logger.info(f"Rep code is generated with first 3 char of lastname + Next Seq available")
else:
lastname = lastname.upper()
rep_cd_list = get_rep_cd_list(co_rep_data_table,lastname,logger)
if len(rep_cd_list) == 0 :
co_rep_cd = lastname+'0'
else:
if(len(lastname) == 2):
rep_cd_list = [d for d in rep_cd_list if d['co_rep_cd'][2:].isdigit()]
if len(rep_cd_list) == 0:
max_id = -1
else:
rep_cd_list.sort(key = lambda rep_cd: rep_cd['co_rep_cd'],reverse = True)
max_id = int(rep_cd_list[0]['co_rep_cd'][2:])
elif(len(lastname) == 1):
rep_cd_list = [d for d in rep_cd_list if d['co_rep_cd'][1:].isdigit()]
if len(rep_cd_list) == 0:
max_id = -1
else:
rep_cd_list.sort(key = lambda rep_cd: rep_cd['co_rep_cd'],reverse = True)
max_id = int(rep_cd_list[0]['co_rep_cd'][1:])
co_rep_cd = lastname + str(max_id+1)
logger.info(f"Rep code is generated with first 3 char of lastname + Next Seq available")
else:
logger.info(f"Rep code list is not empty for emp_num: {emp_num} and emp_type:{emp_type} i.e.{co_rep_code_list[0]}")
co_rep_cd = co_rep_code_list[0]['co_rep_cd']
logger.info(f"co_rep_cd : {co_rep_cd}")
if is_rows_present(co_rep_data_table,agncy_cd,emp_type,'0',logger):
delete_market_0(co_rep_data_table,agncy_cd,emp_type,logger)
if is_rows_present(co_rep_data_table,agncy_cd,emp_type,market_id,logger):
logger.info(f"Row already present.. Modifying for addition")
modify_agncy_emp_typ(co_rep_data_table,agncy_cd,emp_type,market_id,'N',co_rep_cd,emp_num,logger)
else:
co_rep_json = {
"agncy_cd": agncy_cd.rjust(7, '0'),
"agncy_nbr_cd": agncy_cd,
"co_rep_cd": co_rep_cd,
"creatd_by_id": "app_admin",
"create_dttm": get_today_loc_dt().strftime('%Y-%m-%d %H:%M:%S'),
"del_filtr_nbr": "nan",
"deleted_flag": "N",
"eff_dt": get_today_loc_dt().strftime('%Y-%m-%d'),
"emp_id": emp_num,
"employee_type_cd": emp_type,
"gs1pk": "empno#{}".format(emp_num),
"gs1sk": "emptyp#{}#agncy#{}#markt#{}".format(emp_type,agncy_cd,market_id),
"gs2pk": "rep#{}".format(co_rep_cd),
"gs2sk": "agcy#{}".format(agncy_cd),
"markt_cd": market_id,
"markt_desc": co_rep_desc,
"pk": "agcy#{}".format(agncy_cd),
"sk": "emptyp#{}#markt#{}".format(emp_type,market_id),
"updt_by_id": "app_admin",
"updt_dttm": get_today_loc_dt().strftime('%Y-%m-%d %H:%M:%S')
}
response = co_rep_data_table.put_item(
Item= co_rep_json
)
logger.info(f"updated for agncy_cd : {agncy_cd}, emp_num: {emp_num} and emp_type : {emp_type}")
else:
logger.error(f"{emp_num} :Employee doesnot exiists in the employee table")
except Exception as ex:
log_message = f"Exception occurred in add_co_rep_data {ex} \n {traceback.format_exc()}"
logger.error(log_message)
def delete_co_rep_data(rep_detail,old_image,cross_accnt_session,logger):
try:
logger.info(f"Beigin to delete employee in co_rep_dat DDB")
emp_num = str(int(rep_detail['M']['co_rep_cd']['S']))
valid_emp = valid_employee(emp_num,cross_accnt_session,logger)
if valid_emp != False:
logger.info(f"{emp_num} :Valid employee")
market_id = rep_detail['M']['lob_id']['S']
co_rep_desc = rep_detail['M']['lob_nm']['S']
role_code = rep_detail['M']['org_role_cd']['S']
emp_type = get_emp_type(co_rep_data_table,role_code,logger)
agncy_cd = old_image['agncy_cd']['S']
if is_rows_present(co_rep_data_table,agncy_cd,emp_type,'0',logger):
delete_market_0(co_rep_data_table,agncy_cd,emp_type,logger)
if is_rows_present(co_rep_data_table,agncy_cd,emp_type,market_id,logger):
logger.info(f"Row already present.. Modifying for deletion")
modify_agncy_emp_typ(co_rep_data_table,agncy_cd,emp_type,market_id,'Y','UNAS','0',logger)
else:
co_rep_json = {
"agncy_cd": agncy_cd.rjust(7, '0'),
"agncy_nbr_cd": agncy_cd,
"co_rep_cd": "UNAS",
"creatd_by_id": "app_admin",
"create_dttm": get_today_loc_dt().strftime('%Y-%m-%d %H:%M:%S'),
"del_filtr_nbr": "nan",
"deleted_flag": "Y",
"eff_dt": get_today_loc_dt().strftime('%Y-%m-%d'),
"emp_id": "0",
"employee_type_cd": emp_type,
"gs1pk": "empno#{}".format("0"),
"gs1sk": "emptyp#{}#agncy#{}#markt#{}".format(emp_type,agncy_cd,market_id),
"gs2pk": "rep#{}".format("UNAS"),
"gs2sk": "agcy#{}".format(agncy_cd),
"markt_cd": market_id,
"markt_desc": co_rep_desc,
"pk": "agcy#{}".format(agncy_cd),
"sk": "emptyp#{}#markt#{}".format(emp_type,market_id),
"updt_by_id": "app_admin",
"updt_dttm": get_today_loc_dt().strftime('%Y-%m-%d %H:%M:%S')
}
response = co_rep_data_table.put_item(
Item= co_rep_json
)
logger.info(f"Record has been modified as part of deletion i.e agncy_cd : {agncy_cd}, emp_num: {emp_num} and emp_type : {emp_type}")
else:
logger.error(f"{emp_num} :Employee doesnot exiists in the employee table")
except Exception as ex:
log_message = f"Exception occurred in delete_co_rep_data {ex} \n {traceback.format_exc()}"
logger.error(log_message)<file_sep>/ent_sls_adj_common_util/infra/lambda_stack.py
from os import path
from aws_cdk import core
import aws_cdk.aws_lambda as lmb
import aws_cdk.aws_codedeploy as codedeploy
import aws_cdk.aws_cloudwatch as cloudwatch
from .configuration import EnvSpecific
import aws_cdk.aws_stepfunctions as stepfunctions
import aws_cdk.aws_stepfunctions_tasks as tasks
from typing import Any, Dict, Type, TypeVar
from aws_cdk import aws_iam
from aws_cdk import aws_events
from .services.lambda_service import LambdaService
import os
class CorepcodeStack(core.Stack):
def __init__(self, scope: core.Construct, construct_id: str, raw_config: EnvSpecific, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
this_dir = path.dirname(__file__)
functions_folder = path.join(os.path.dirname(this_dir), 'functions')
lambda_service = LambdaService(role_arn=raw_config.lambda_info['lambda_execution_arn'],
layers=None,
env=raw_config.lambda_info['lambda_env_vars']['ENV'], scope=self)
get_changed_agencies_info_handler = lambda_service.create_lambda_function(
name='company_rep',
functions_folder=functions_folder,
memory=1024,
timeout_seconds=600,
environment=raw_config.lambda_info['lambda_env_vars']
)
db_trigger = lambda_service.create_dynamodb_trigger()
lambda_trigger = get_changed_agencies_info_handler.add_event_source(db_trigger)
<file_sep>/ent_sls_adj_common_util/infra/configuration.py
from dataclasses import dataclass
from typing import Any, Dict, Type, TypeVar
from aws_cdk.core import Environment, RemovalPolicy
import json
class EnvSpecific:
def __init__(self, d):
self.lambda_info = d['lambda_info']
self.env = d['env']
class RawConfig:
"""
Raw JSON configuration of the application and of all infrastructure resources for
each environment and.
"""
def __init__(self, config_file: str):
self._all_config: Any = self._read_config(config_file)
self.development: Any = EnvSpecific(self._all_config['DEV'])
self.staging: Any = EnvSpecific(self._all_config['UAT'])
self.production: Any = EnvSpecific(self._all_config['PROD'])
self.application: Any = self._all_config['application']
def _read_config(self, config_file):
with open (config_file, 'r') as f:
return json.load(f)
AppConfigClass = TypeVar('AppConfigClass', bound='AppConfig')
@dataclass
class AppConfig:
"""Configuration of the application."""
application_name: str
repository_name: str
branch: str
build_environment: Environment
@classmethod
def from_raw_config(cls: Type[AppConfigClass], raw_config: Dict[str, Any]) -> AppConfigClass:
"""
Constructor from raw configuration.
It will perform some conversion to CDK specific types and classes.
"""
raw_config = cls.convert_to_cdk_constructs(raw_config)
return cls(**raw_config)
@staticmethod
def convert_to_cdk_constructs(raw_config: Dict[str, Any]) -> Dict[str, Any]:
"""
Converts raw config to CDK specific constructs when required.
This method can be used when overwriting the classmethod construct of
an inherited class in order to preserve the conversions of the parent
class.
"""
raw_build_environment = raw_config.pop('build_environment')
build_environment = Environment(**raw_build_environment)
raw_config.update(build_environment=build_environment)
return raw_config
<file_sep>/ent_sls_adj_common_util/functions/company_rep/cross_account_session.py
import boto3
from boto3.session import Session
def get_cross_account_session(ARN):
try:
sts_connection = boto3.client("sts")
dl_acc = sts_connection.assume_role(
RoleArn=ARN,
RoleSessionName="cross_acct_lambda"
)
ACCESS_KEY = dl_acc["Credentials"]["AccessKeyId"]
SECRET_KEY = dl_acc["Credentials"]["SecretAccessKey"]
SESSION_TOKEN = dl_acc["Credentials"]["SessionToken"]
session = Session(aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY,
aws_session_token=SESSION_TOKEN)
return session
except Exception as ex:
raise ex<file_sep>/ent_sls_adj_common_util/functions/company_rep/testtest.py
import sys
import os
path = os.path.abspath(__file__)
dir_path = os.path.dirname(path)
sys.path.insert(0,dir_path)
import sys
sys.path.insert(0, '.')
import sys
sys.path.insert(0, '.')<file_sep>/ent_sls_adj_common_util/functions/company_rep/lambda_function.py
import traceback
import logging
from modify_record import modify_handler
from insert_record import insert_handler
from cross_account_session import get_cross_account_session
from config import(
ARN
)
#Add logging formatter
formatter=logging.Formatter("%(asctime)s %(name)s %(levelname)s %(message)s")
# Creaate a logger and setting the logging level
logger=logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class NotAgencyProducerRecord(Exception):
def __init__(self,message):
self.message=message
def lambda_handler(event, context):
# TODO implement
try:
cross_accnt_session = None
for record in event['Records']:
pk = record['dynamodb']['Keys']['pk']['N']
sk = record['dynamodb']['Keys']['sk']['S']
if not sk.startswith('agency_producer#producernbr#'):
continue
cross_accnt_session = get_cross_account_session(ARN)
logger.info("cross account session created")
print(event)
if record['eventName'] == 'INSERT':
logger.info("New agency Created .. Begin company rep data insertion")
insert_handler(record,cross_accnt_session,logger)
elif record['eventName'] == 'MODIFY':
co_rep_map_new = record['dynamodb']['NewImage']['co_rep_map']['L']
agncy_cd_new = record['dynamodb']['NewImage']['agncy_cd']['S']
co_rep_map_old = record['dynamodb']['OldImage']['co_rep_map']['L']
agncy_cd_old = record['dynamodb']['OldImage']['agncy_cd']['S']
insert_handler(record,cross_accnt_session,logger)
if co_rep_map_new != co_rep_map_old or agncy_cd_new != agncy_cd_old:
logger.info("Begin company rep data modification")
modify_handler(record,cross_accnt_session,logger)
else:
logger.info("No data to modify")
except Exception as ex:
log_message = f"Exception occurred while updating records to dynamodb {ex} \n {traceback.format_exc()}"
logger.error(log_message)<file_sep>/ent_sls_adj_common_util/infra/lambda_service_stage.py
from aws_cdk import core
from .configuration import RawConfig, EnvSpecific
from .lambda_stack import CorepcodeStack
from typing import Any, Dict, Type, TypeVar
class CorepcodeService(core.Stage):
def __init__(self, scope: core.Construct, id: str, raw_config: EnvSpecific, **kwargs):
super().__init__(scope, id, **kwargs)
service = CorepcodeStack(self, 'corepcodeservice', raw_config)<file_sep>/ent_sls_adj_common_util/infra/__init__.py
from os import path
import os
this_dir = path.dirname(__file__)
functions_folder = path.join(os.path.dirname(this_dir), 'functions')
print(functions_folder)<file_sep>/ent_sls_adj_common_util/functions/company_rep/insert_record.py
from checks import add_co_rep_data
def insert_handler(record,cross_accnt_session,logger):
new_image = record['dynamodb']['NewImage']
co_rep_map_new = new_image['co_rep_map']['L']
for rep_detail in co_rep_map_new:
add_co_rep_data(rep_detail,new_image,cross_accnt_session,logger)
<file_sep>/ent_sls_adj_common_util/infra/services/lambda_service.py
from aws_cdk import aws_lambda as lambdas
from aws_cdk import core, aws_iam
import os
from .iam_service import IAMService
import aws_cdk.aws_dynamodb as dynamodb
from aws_cdk import aws_dynamodb as ddb
from aws_cdk.core import Duration
from aws_cdk.aws_lambda_event_sources import DynamoEventSource
class LambdaService:
def __init__(self, layers=None, role_arn=None, env=None, scope=None):
self.layers = None
self.role = None
self.scope = scope if scope else None
self.env = env if env else None
if layers:
self.layers = list()
for layer in layers:
self.layers.append(
lambdas.LayerVersion.from_layer_version_arn(
scope=scope, id=layer['name'], layer_version_arn=layer['arn']
))
if role_arn:
self.role = IAMService().role_from_arn(scope=scope, name = 'Lambda Execution Role', role_arn=role_arn)
# @staticmethod
def create_lambda_function(self, name, functions_folder, memory, timeout_seconds, scope=None,
layers = None, role = None, function_path=None, environment=None):
scope = scope if scope else self.scope
lambda_name = self.env + '_' + name if self.env else name
layers = layers if layers else self.layers
role = role if role else self.role
function_path = function_path if function_path else os.path.join(functions_folder, name)
return lambdas.Function(
scope = scope,
id=lambda_name,
function_name=lambda_name,
role=role,
layers=layers,
memory_size=memory,
timeout=core.Duration.seconds(timeout_seconds),
runtime=lambdas.Runtime.PYTHON_3_7,
code=lambdas.Code.from_asset(path=function_path),
handler="lambda_function.lambda_handler",
environment=environment
)
def ret_dynamodb(self):
#itable = ddb.Table.from_table_arn(self.scope,"test","arn:aws:dynamodb:us-east-1:315207712355:table/test")
itable = ddb.Table.from_table_attributes(self.scope,"test",table_arn="arn:aws:dynamodb:us-east-1:315207712355:table/test",table_stream_arn="arn:aws:dynamodb:us-east-1:315207712355:table/test/stream/2021-06-07T14:43:56.312")
#return ddb.ITable(self,table_arn='arn:aws:dynamodb:us-east-1:315207712355:table/test')
return itable
def create_dynamodb_trigger(self):
itable = self.ret_dynamodb()
db_trigger = DynamoEventSource(itable,enabled=True,starting_position=lambdas.StartingPosition.LATEST, batch_size=1000,max_batching_window=Duration.seconds(60), parallelization_factor=4,retry_attempts=0)
return db_trigger
def create_lambda_layer(self, name, path, scope=None, description = None, layer_version_name=None):
scope = scope if scope else self.scope
if scope is None:
raise Exception
ac = lambdas.AssetCode(path=path)
return lambdas.LayerVersion(scope=scope, id=name, code=ac,description=description, layer_version_name=layer_version_name)<file_sep>/app.py
#!/usr/bin/env python3
from pathlib import Path
from aws_cdk import core
from ent_sls_adj_common_util.infra.lambda_service_stage import CorepcodeService
from ent_sls_adj_common_util.infra.pipeline_stack import PipelineStack
from ent_sls_adj_common_util.infra.configuration import RawConfig
config_file = Path('./env_based_resources.json')
raw_config = RawConfig(config_file)
dev_env = core.Environment(**raw_config.development.env)
app = core.App()
PipelineStack(app, 'CorepcodePipelineStack', env={
'account': '315207712355',
'region': 'us-east-1'
}, raw_config=raw_config)
app.synth()
<file_sep>/ent_sls_adj_common_util/infra/services/iam_service.py
from aws_cdk import core, aws_iam
class IAMService:
def __init__(self, scope=None):
self.scope = scope if scope else None
def role_from_arn(self, scope, name, role_arn):
scope = scope if scope else self.scope
return aws_iam.Role.from_role_arn(id=name, role_arn=role_arn, scope=scope)
<file_sep>/ent_sls_adj_common_util/functions/company_rep/modify_record.py
from checks import add_co_rep_data
from checks import delete_co_rep_data
def modify_handler(record,cross_accnt_session,logger):
new_image = record['dynamodb']['NewImage']
old_image = record['dynamodb']['OldImage']
co_rep_map_new = new_image['co_rep_map']['L']
co_rep_map_old = old_image['co_rep_map']['L']
for rep_detail in co_rep_map_old:
if rep_detail not in co_rep_map_new:
delete_co_rep_data(rep_detail,old_image,cross_accnt_session,logger)
for rep_detail in co_rep_map_new:
if rep_detail not in co_rep_map_old:
add_co_rep_data(rep_detail,new_image,cross_accnt_session,logger)
|
d1da50e8a2f9c0f71afeac4c16c5b17c0794f5bd
|
[
"Python",
"Text"
] | 18
|
Python
|
akashdeepconnect/ent-sls-adj-common-util
|
a1a34b9974a0e778967e9e129622a2782720a3b6
|
d0370b82e595909be826d6e353d9f42312722278
|
refs/heads/master
|
<repo_name>LucasHiago/ecomn_back<file_sep>/server/middlewares/authentication.js
const jwt = require("jsonwebtoken");
const User = require("../models/User");
const Admin = require("../models/Admin");
const config = require("config");
const JWT_SECRET = config.get("JWT_SECRET");
exports.checkAuth = async (req, res, next) => {
try {
// Get Authorization header
const bearerHeader = req.headers["authorization"];
// Check if Authorization header is undefined
if (typeof bearerHeader == "undefined")
return res.status(400).json({ message: "Authorization header required" });
// Check Authorization header format
if (!bearerHeader.startsWith("Bearer "))
return res
.status(400)
.json({ message: "Invalid Authorization header format" });
// Check if access token is provided
if (bearerHeader.length == 7)
return res.status(400).json({ message: "Access token required" });
const bearer = bearerHeader.split(" ");
const token = bearer[1];
const decoded = jwt.verify(token, JWT_SECRET);
const isAdmin = decoded.admin;
if (isAdmin) {
const admin = await Admin.findById(decoded.sub);
if (!admin)
return res.status(400).json({
message: "Admin not found when trying to check authentication"
});
req.userId = admin._id;
req.isAdmin = true;
next();
} else {
const user = await User.findById(decoded.sub);
if (!user)
return res.status(400).json({
message: "User not found when trying to check authentication"
});
req.userId = user._id;
req.isAdmin = false;
next();
}
} catch (error) {
return res.status(500).json({ message: error.message });
}
};
<file_sep>/server/controllers/products.js
const Product = require("../models/Product");
const path = require("path");
const Resize = require("../utils/Resize");
const fs = require("fs");
/**
* @route GET /products
* @desc return all product
*/
exports.getAll = (req, res) => {
Product.find()
.sort("-created_at")
.then(products => {
if (!products) res.status(200).json({ message: "No products available" });
res.status(200).json({ message: "Success", data: products });
})
.catch(err => res.status(500).json({ message: err.message }));
};
/**
* @route GET /products/:id
* @desc Get product by id
*/
exports.get = (req, res) => {
const id = req.params.id;
Product.findOne({ _id: id })
.then(product => {
if (!product)
res.status(400).json({ message: "No Product with the given id" });
res.status(200).json({ message: "Success", data: product });
})
.catch(err => {
res.status(500).json({ err: err.message });
});
};
/**
* @route POST /products
* @desc Create a new product
*/
exports.create = async (req, res) => {
try {
if (!req.isAdmin)
return res.status(404).json({ message: "Only admin can access " });
let data = ({
code,
name,
price,
material,
width,
description,
image,
stock,
color
} = req.body);
const imagePath = path.join(__dirname, "../../public/images");
const fileUpload = new Resize(imagePath);
if (!req.file) res.status(401).json({ error: "Please provide an image" });
const filename = await fileUpload.save(req.file.buffer);
data.image = filename;
const product = await Product.create(data);
return res.status(201).json({ message: "Success", data: product });
} catch (err) {
if (err.name === "ValidationError")
return res.status(400).json({ err: err.message });
return res.status(500).json({ err: err.message });
}
};
/**
* @route PUT /products
* @desc Update a product
*/
exports.update = async (req, res) => {
try {
if (!req.isAdmin)
return res.status(404).json({ message: "Only admin can access " });
const { id } = req.params;
const data = req.body;
const product = await Product.findOneAndUpdate({ _id: id }, data, {
new: true
});
if (!product) return res.status(404).json({ message: "Product not found" });
res.status(200).json({ message: "Success", data: product });
} catch (error) {
res.status(500).json({ message: error });
}
};
/**
* @route DELETE /products
* @desc Delete a product
*/
exports.deleteProduct = async (req, res) => {
try {
if (!req.isAdmin)
return res.status(404).json({ message: "Only admin can access " });
const { id } = req.params;
const product = await Product.findById(id);
if (!product) throw new Error("Product with the given id not found");
const deleted = await Product.deleteOne({ _id: id });
imagePath = path.join(__dirname, `../../public/images/${product.image}`);
fs.unlink(imagePath, function(err) {
if (err) return false;
});
res
.status(200)
.json({ message: "Success", delete: deleted, data: product });
} catch (error) {
res.status(500).json({ error });
}
};
<file_sep>/tests/carts.test.js
const chai = require("chai");
const chaiHttp = require("chai-http");
const expect = chai.expect;
const fs = require("fs");
chai.use(chaiHttp);
const server = require("../server/app");
let userId = "";
let productId = "";
let accessToken = "";
let transId = "";
let cartId = "";
describe("Carts test endpoint", () => {
before(async () => {
// Create user
const res = await chai
.request(server)
.post("/api/users/register")
.send({
firstName: "Cart",
lastName: "Test",
email: "<EMAIL>",
address: "Jl Cemara Raya 2 Tanjung Seneng",
password: "<PASSWORD>",
password_confirm: "<PASSWORD>"
});
userId = res.body.data._id;
accessToken = res.body.access_token;
// Create Product
const product = await chai
.request(server)
.post("/api/products")
.set("Content-Type", "serverlication/x-www-form-urlencoded")
.field("code", "Test Cart")
.field("name", "Product for cart")
.field("price", 4234234)
.field("material", "blah blah")
.field("width", 5)
.field("stock", 6)
.field("description", "dfasfasf")
.attach(
"image",
fs.readFileSync(__dirname + "/tiosaputra.jpg"),
"testCart.jpg"
);
productId = product.body._id;
});
it("Should Get Cart By User Id ", done => {
chai
.request(server)
.get(`/api/carts/${userId}`)
.set("Authorization", "Bearer " + accessToken)
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
expect(res.body).to.have.property("message");
done();
});
});
it("Should add product to cart", done => {
chai
.request(server)
.put(`/api/carts/${userId}`)
.set("Authorization", "Bearer " + accessToken)
.send({
productId: productId,
quantity: 5
})
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
expect(res.body).to.have.property("message");
done();
});
});
it("Change Product Quantity", done => {
chai
.request(server)
.put(`/api/carts/${userId}/items/${productId}/quantity`)
.set("Authorization", "Bearer " + accessToken)
.send({ quantity: 4 })
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
expect(res.body).to.have.property("message");
done();
});
});
it("Menghapus produk di dalam cart", done => {
chai
.request(server)
.delete(`/api/carts/${userId}/items/${productId}`)
.set("Authorization", "Bearer " + accessToken)
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
expect(res.body).to.have.property("message");
done();
});
});
});
<file_sep>/server/controllers/userAuth.js
const User = require("../models/User");
const bcrypt = require("bcryptjs");
const jwt = require("jsonwebtoken");
const config = require("config");
const JWT_SECRET = config.get("JWT_SECRET");
signUserToken = user => {
return jwt.sign(
{
iss: "Fabrik store E-Commerce",
sub: user._id,
iat: new Date().getTime(), // Current Time
exp: new Date().setDate(new Date().getDate() + 1), // Current Time + 1 day
admin: false
},
JWT_SECRET
);
};
/**
* @route POST /users/login
* @desc User Login
*/
exports.login = async (req, res) => {
try {
const { email, password } = req.body;
const user = await User.findOne({ email: email });
if (!user) return res.status(404).json({ message: "User not found" });
const valid = await bcrypt.compare(password, user.password);
if (!valid)
return res.status(403).json({ message: "Password does not match" });
const token = signUserToken(user);
return res.status(200).json({
userId: user._id,
access_token: token,
token_type: "bearer"
});
} catch (error) {
return res.status(500).json(error);
}
};
/**
* @route POST /users/register
* @desc Register a user
*/
exports.register = async (req, res) => {
try {
let { firstName, lastName, email, address, password } = req.body;
// Check if email is exists
const user = await User.findOne({ email: email });
if (user)
return res.status(409).json({
message: "Email already in use",
data: req.body
});
// All validation pass, Create User
let newUser = new User({
firstName,
lastName,
email,
address,
password
});
const savedUser = await newUser.save();
const token = signUserToken(savedUser);
return res.status(200).json({
message: "Register Success",
data: savedUser,
access_token: token
});
} catch (err) {
// If the process fail the data should be deleted in database
res.json({ err: err.message });
}
};
<file_sep>/server/controllers/transactions.js
const Transaction = require("../models/Transaction");
const Product = require("../models/Product");
const Cart = require("../models/Cart");
exports.getAllTransaction = async (req, res) => {
try {
if (!req.isAdmin)
return res.status(404).json({ message: "Only admin can access " });
let transactions = await Transaction.find()
.populate("user")
.sort("-createdAt");
let products = await Product.find();
if (!transactions)
res.status(200).json({ message: "No transactions available" });
for (let i = 0; i < transactions.length; i++) {
for (let y = 0; y < transactions[i].products.length; y++) {
for (let z = 0; z < products.length; z++) {
let trnsProId = `${transactions[i].products[y]._id}`;
let productId = `${products[z]._id}`;
if (productId == trnsProId) {
let data = {
_id: transactions[i].products[y]._id,
quantity: transactions[i].products[y].quantity,
code: products[z].code,
name: products[z].name,
price: products[z].price,
image: products[z].image,
color: products[z].color,
stock: products[z].stock,
material: products[z].material,
width: products[z].width,
description: products[z].description,
status: products[z].status
};
transactions[i].products[y] = data;
}
}
}
}
res.status(200).json({ message: "Success", data: transactions });
} catch (err) {
res.status(500).json({ message: err.message });
}
};
exports.getUserTransactions = async (req, res) => {
try {
const { userId } = req.params;
if (req.isAdmin)
return res.status(404).json({ message: "Only user can access " });
let transactions = await Transaction.find({ user: userId })
.populate("user")
.sort("-createdAt");
let products = await Product.find();
if (!transactions)
res.status(200).json({ message: "No transactions available" });
for (let i = 0; i < transactions.length; i++) {
for (let y = 0; y < transactions[i].products.length; y++) {
for (let z = 0; z < products.length; z++) {
let trnsProId = `${transactions[i].products[y]._id}`;
let productId = `${products[z]._id}`;
if (productId == trnsProId) {
let data = {
_id: transactions[i].products[y]._id,
quantity: transactions[i].products[y].quantity,
code: products[z].code,
name: products[z].name,
price: products[z].price,
image: products[z].image,
color: products[z].color,
stock: products[z].stock,
material: products[z].material,
width: products[z].width,
description: products[z].description,
status: products[z].status
};
transactions[i].products[y] = data;
}
}
}
}
res.status(200).json({ message: "Success", data: transactions });
} catch (err) {
res.status(500).json({ message: err.message });
}
};
exports.getSingleTransaction = async (req, res) => {
try {
const id = req.params.id;
let transactions = await Transaction.findById(id)
.populate("user")
.populate("product");
let products = await Product.find();
if (!transactions)
res.status(400).json({ message: "No transaction with the given id" });
for (let y = 0; y < transactions.products.length; y++) {
for (let z = 0; z < products.length; z++) {
let trnsProId = `${transactions.products[y]._id}`;
let productId = `${products[z]._id}`;
if (productId == trnsProId) {
let data = {
_id: transactions.products[y]._id,
quantity: transactions.products[y].quantity,
code: products[z].code,
name: products[z].name,
price: products[z].price,
image: products[z].image,
color: products[z].color,
stock: products[z].stock,
material: products[z].material,
width: products[z].width,
description: products[z].description,
status: products[z].status
};
transactions.products[y] = data;
}
}
}
res.status(200).json({ message: "Success", data: transactions });
} catch (err) {
res.status(500).json({ err: err.message });
}
};
exports.createTransaction = async (req, res) => {
try {
if (req.isAdmin)
return res.status(404).json({ message: "Only user can access " });
const userId = req.userId;
const { shippingAddress, paymentMethod, courService } = req.body;
let total = 0;
const userCart = await Cart.findOne({ user: userId });
let products = await Product.find();
if (!userCart.products.length)
return res.status(500).json({ message: "Product in cart is empty" });
for (let i = 0; i < userCart.products.length; i++) {
for (let y = 0; y < products.length; y++) {
let cartId = `${userCart.products[i]._id}`;
let productId = `${products[y]._id}`;
if (productId == cartId) {
let data = {
_id: userCart.products[i]._id,
quantity: userCart.products[i].quantity,
code: products[y].code,
name: products[y].name,
price: products[y].price,
image: products[y].image,
color: products[y].color,
stock: products[y].stock,
material: products[y].material,
width: products[y].width,
description: products[y].description,
status: products[y].status
};
userCart.products[i] = data;
}
}
}
for (let i = 0; i < userCart.products.length; i++) {
total += userCart.products[i].price;
}
total += courService.cost;
let data = {
products: userCart.products,
shippingAddress: shippingAddress,
processStatus: "Menunggu konfirmasi",
payments: {
isPaidOff: false,
method: paymentMethod
},
courService: courService,
user: userId,
total: total
};
const transaction = await Transaction.create(data);
res.status(200).json({ message: "Success", data: transaction });
} catch (err) {
res.status(500).json({ message: "Internal Server Error", err });
}
};
exports.statusTransaction = async (req, res) => {
try {
if (!req.isAdmin)
return res.status(404).json({ message: "Only admin can access " });
const { id } = req.params;
const { status } = req.body;
const transaction = await Transaction.findById(id);
if (!transaction)
return res.status(404).json({ message: "Transaction not found" });
transaction.processStatus = status;
await transaction.save();
res
.status(200)
.json({ message: `Transaksi status ${status}`, status: status });
} catch (err) {
res.json({ err: err.message });
}
};
exports.arrive = async (req, res) => {
try {
if (req.isAdmin)
return res.status(404).json({ message: "Only user can access " });
const { id } = req.params;
const transaction = await Transaction.findById(id);
if (!transaction)
return res.status(404).json({ message: "Transaction not found" });
transaction.processStatus = "Sampai";
const savedTrans = await transaction.save();
res.status(200).json({ message: `Transaksi Diterima`, data: savedTrans });
} catch (err) {
res.json({ err: err.message });
}
};
exports.resiTransaction = async (req, res) => {
try {
if (!req.isAdmin)
return res.status(404).json({ message: "Only admin can access " });
const { id } = req.params;
const { number } = req.body;
const transaction = await Transaction.findById(id);
transaction.processStatus = "Dikirim";
transaction.resiNumber = number;
const newTrans = await transaction.save();
res.status(200).json({ message: `Transaksi Resi Updated`, data: newTrans });
} catch (err) {
res.json({ err: err.message });
}
};
<file_sep>/server/routes/address.js
const express = require("express");
const router = express.Router();
const { getUserAddress, updateUserAddress } = require("../controllers/address");
const { checkAuth } = require("../middlewares/authentication");
router.get("/:userId", checkAuth, getUserAddress);
router.put("/:userId", checkAuth, updateUserAddress);
module.exports = router;
<file_sep>/tests/admin.test.js
const chai = require("chai");
const chaiHttp = require("chai-http");
const expect = chai.expect;
chai.use(chaiHttp);
const server = require("../server/app");
const admin = {
username: "admin123",
password: "<PASSWORD>",
password_confirm: "<PASSWORD>"
};
describe("Admin Endpoint Test", () => {
before(async () => {
await chai
.request(server)
.post("/api/admin/register")
.send(admin);
});
it("Should login admin", async () => {
try {
const res = await chai
.request(server)
.put("/api/admin/login")
.send({
username: admin.username,
password: <PASSWORD>
});
expect(res).to.have.status(200);
expect(res).to.be.json;
expect(res.body).to.have.property("access_token");
} catch (err) {
console.log(err.message);
}
});
});
<file_sep>/server/routes/admin.js
const express = require("express");
const router = express.Router();
const validation = require("../middlewares/validation");
const schemas = require("../middlewares/schemas");
const { registerAdmin, adminLogin } = require("../controllers/adminAuth");
// Prefix : admin
router.put("/login", validation(schemas.adminLogin), adminLogin);
router.post("/register", validation(schemas.adminRegister), registerAdmin);
module.exports = router;
<file_sep>/tests/transactions.test.js
const chai = require("chai");
const chaiHttp = require("chai-http");
const expect = chai.expect;
const fs = require("fs");
chai.use(chaiHttp);
const server = require("../server/app");
let userId = "";
let productId = "";
let productPrice = 0;
let accessToken = "";
let transId = "";
let adminId = "";
let adminAccessToken = "";
describe("Transactions Endpoint", () => {
before(async () => {
// Create user
const res = await chai
.request(server)
.post("/api/users/register")
.send({
firstName: "Transaction",
lastName: "Test",
email: "<EMAIL>",
address: "Jl Cemara Raya 2 Tanjung Seneng",
password: "<PASSWORD>",
password_confirm: "<PASSWORD>"
});
userId = res.body.data._id;
accessToken = res.body.access_token;
const admin = await chai
.request(server)
.post("/api/admin/register")
.send({
username: "admin123",
password: "<PASSWORD>",
password_confirm: "<PASSWORD>"
});
adminId = admin.body._id;
adminAccessToken = admin.body.access_token;
// Create Product
const product = await chai
.request(server)
.post("/api/products")
.set({
"Content-Type": "application/x-www-form-urlencoded",
Authorization: `Bearer ${adminAccessToken}`
})
.field("code", "123Test")
.field("name", "Kain Untuk Test")
.field("price", 50000)
.field("material", "blah blah")
.field("width", 5)
.field("stock", 6)
.field("color", "merah")
.field("description", "1235")
.attach(
"image",
fs.readFileSync(__dirname + "/tiosaputra.jpg"),
"tiosaputra.jpg"
);
productId = product.body.data._id;
productPrice = product.body.data.price;
// Add product to cart
await chai
.request(server)
.put(`/api/carts/${userId}`)
.set("Authorization", "Bearer " + accessToken)
.send({
productId: productId,
quantity: 5
});
});
it("Should create transaction", async () => {
try {
const res = await chai
.request(server)
.post("/api/transactions")
.set("Authorization", "Bearer " + accessToken)
.send({
shippingAddress: {
phoneNumber: "8583734894823",
address: "ABC",
province: "12",
city: "123"
},
paymentMethod: "Transfer Rekening",
courService: {
courier: "jne",
service: "OK",
cost: "13000",
etd: "5",
note: ""
}
});
expect(res).to.have.status(200);
expect(res).to.be.json;
expect(res.body).to.have.property("message");
transId = res.body.data._id;
} catch (err) {
console.log(err.message);
}
});
it("Mengambil transaksi berdasarkan id", done => {
chai
.request(server)
.get(`/api/transactions/${transId}`)
.set("Authorization", "Bearer " + accessToken)
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
expect(res.body).to.have.property("message");
expect(res.body).to.have.property("data");
done();
});
});
it("Should get all transactions", done => {
chai
.request(server)
.get(`/api/transactions`)
.set("Authorization", "Bearer " + adminAccessToken)
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
expect(res.body).to.have.property("message");
done();
});
});
it("Mengubah status transaksi", done => {
const status = ["Diterima", "Ditolak", "Dikirim"];
chai
.request(server)
.put(`/api/transactions/${transId}/status`)
.set("Authorization", "Bearer " + adminAccessToken)
.send({
status: status[0]
})
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
expect(res.body).to.have.property("message");
expect(res.body).to.have.property("status");
done();
});
});
it("Mengupload bukti transaksi", async () => {
try {
const res = await chai
.request(server)
.put(`/api/transactions/${transId}/proof`)
.set({
"Content-Type": "application/x-www-form-urlencoded",
Authorization: `Bearer ${accessToken}`
})
.attach(
"image",
fs.readFileSync(__dirname + "/tiosaputra.jpg"),
"tiosaputra.jpg"
);
expect(res).to.have.status(200);
expect(res.body).to.have.property("message");
} catch (err) {
console.log(err);
}
});
it("Merubah status verifikasi pembayaran", done => {
const verify = ["Terferivikasi", "Belum Terferivikasi"];
chai
.request(server)
.put(`/api/transactions/${transId}/verify`)
.set("Authorization", "Bearer " + adminAccessToken)
.send({
status: verify[0]
})
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
expect(res.body).to.have.property("message");
done();
});
});
});
<file_sep>/server/routes/courier.js
const express = require("express");
const router = express.Router();
const { checkAuth } = require("../middlewares/authentication");
const { getCost, getProvince, getCity } = require("../controllers/courier");
const { provinceCache, cityCache } = require("../middlewares/cache");
router.post("/cost", checkAuth, getCost);
router.get("/province", checkAuth, provinceCache, getProvince);
router.get("/city", checkAuth, cityCache, getCity);
module.exports = router;
<file_sep>/server/routes/products.js
const express = require("express");
const router = express.Router();
const {
get,
getAll,
create,
update,
deleteProduct
} = require("../controllers/products");
const upload = require("../middlewares/upload");
const { checkAuth } = require("../middlewares/authentication");
// Prefix : products
router.get("/", getAll);
router.get("/:id", get);
router.post("/", checkAuth, upload.single("image"), create);
router.put("/:id", checkAuth, update);
router.delete("/:id", checkAuth, deleteProduct);
module.exports = router;
<file_sep>/src/controllers/UserController.js
const User = require('../models/User');
const bcrypt = require('bcryptjs');
const jwt = require('jsonwebtoken');
module.exports = {
async store(req, res){
const { email, password } = req.body;
let user = await User.findOne({ email });
if (!user){
const hashedPassword = await bcrypt.hash(password, 10);
user = await User.create({ email, password: hashedPassword });
}
user.password = <PASSWORD>;
return res.json(user);
},
async login(req, res) {
const { email, password } = req.body;
const user = await User.findOne({ email });
if (!user) {
return res.status(404).json({ errors: [ 'O e-mail não foi encontrado!']});
}
const isPasswordCorrect = await bcrypt.compare(password, user.password);
if (!isPasswordCorrect) {
return res.status(400).json({ errors: ['Senha inválida.']})
}
const payload = {
id: user.id,
email: user.email
};
const token = jwt.sign(payload, process.env.SECRET || "s3cr3t", {
expiresIn: "7d"
});
return res.json({
token,
...payload
});
}
};<file_sep>/tests/address.test.js
const chai = require("chai");
const chaiHttp = require("chai-http");
const expect = chai.expect;
chai.use(chaiHttp);
const server = require("../server/app");
let userId = "";
let accessToken = "";
describe("Address Rest API Test", () => {
before(async () => {
// Create User Object
const res = await chai
.request(server)
.post("/api/users/register")
.send({
firstName: "Product",
lastName: "<NAME>",
email: "<EMAIL>",
address: "User untuk product test",
password: "<PASSWORD>",
password_confirm: "<PASSWORD>"
});
userId = res.body.data._id;
accessToken = res.body.access_token;
});
it("Should Get Address", done => {
chai
.request(server)
.get(`/api/address/${userId}`)
.set({
Authorization: `Bearer ${accessToken}`
})
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
// expect(res).to.have.property("message");
done();
});
});
it("Should Update Address", done => {
chai
.request(server)
.put(`/api/address/${userId}`)
.set({
Authorization: `Bearer ${accessToken}`
})
.send({
phoneNumber: "532532423",
province: 12,
city: 153,
address: "Example Address At Address Number Address"
})
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
// expect(res).to.have.property("message");
done();
});
});
});
<file_sep>/server/controllers/address.js
const Address = require("../models/Address");
exports.getUserAddress = async (req, res) => {
try {
const { userId } = req.params;
const accessTokenUserId = req.userId;
// Check if given user id has access to the data
if (userId !== `${accessTokenUserId}`)
return res.status(403).json({ message: "Forbidden" });
const address = await Address.findOne({ user: userId });
if (!address)
return res
.status(404)
.json({ message: "Address with the given user id not found" });
res.status(200).json({ message: "Success", data: address });
} catch (error) {
res.json({ error });
}
};
exports.updateUserAddress = async (req, res) => {
try {
const { userId } = req.params;
const accessTokenUserId = req.userId;
// Check if given user id has access to the data
if (userId !== `${accessTokenUserId}`)
return res.status(403).json({ message: "Forbidden" });
const { phoneNumber, province, city, address } = req.body;
const updatedAddress = await Address.findOneAndUpdate(
{ user: userId },
{
phoneNumber: phoneNumber,
province: province,
city: city,
address: address
},
{ new: true }
);
res.status(200).json({ message: "Success Update", data: updatedAddress });
} catch (error) {
res.json({ err: err.message });
}
};
<file_sep>/server/controllers/courier.js
const axios = require("axios");
const redis = require("redis");
const REDIS_PORT = process.env.PORT || 6379;
const client = redis.createClient(REDIS_PORT);
exports.getCost = (req, res) => {
const { origin, destination, weight, courier } = req.body;
axios
.post(
"https://api.rajaongkir.com/starter/cost",
{
origin: origin,
destination: destination,
weight: weight,
courier: courier
},
{
headers: {
key: "2e15faa98e205ad0f21e1a4fb97432c4"
}
}
)
.then(result => {
return res.status(200).json({ message: "Success", data: result.data });
})
.catch(err => {
return res.status(500).json({ message: "Error", err: err });
});
};
exports.getProvince = (req, res) => {
axios
.get("https://api.rajaongkir.com/starter/province", {
headers: {
key: "2e15faa98e205ad0f21e1a4fb97432c4"
}
})
.then(result => {
client.setex("province", 3600, JSON.stringify(result.data));
return res.status(200).json({ message: "Success", data: result.data });
})
.catch(err => {
return res.status(500).json({ message: "Error", err: err });
});
};
exports.getCity = (req, res) => {
axios
.get("https://api.rajaongkir.com/starter/city", {
headers: {
key: "2e15faa98e205ad0f21e1a4fb97432c4"
}
})
.then(result => {
client.setex("city", 3600, JSON.stringify(result.data));
return res.status(200).json({ message: "Success", data: result.data });
})
.catch(err => {
return res.status(500).json({ message: "Error", err: err });
});
};
<file_sep>/server/controllers/carts.js
const Cart = require("../models/Cart");
const Product = require("../models/Product");
exports.getCart = async (req, res) => {
try {
const { userId } = req.params;
if (req.isAdmin)
return res.status(404).json({ message: "Only konsumen can access " });
let cart = await Cart.findOne({ user: userId });
if (!cart) throw new Error("Invalid User Id");
let products = await Product.find();
for (let i = 0; i < cart.products.length; i++) {
for (let y = 0; y < products.length; y++) {
let cartId = `${cart.products[i]._id}`;
let productId = `${products[y]._id}`;
if (productId == cartId) {
let data = {
_id: cart.products[i]._id,
quantity: cart.products[i].quantity,
code: products[y].code,
name: products[y].name,
price: products[y].price,
image: products[y].image,
color: products[y].color,
stock: products[y].stock,
material: products[y].material,
width: products[y].width,
description: products[y].description,
status: products[y].status
};
cart.products[i] = data;
}
}
}
res.status(200).json({ message: "Success", data: cart });
} catch (err) {
res.json({ err: err.message });
}
};
exports.addItemToCart = async (req, res) => {
try {
if (req.isAdmin)
return res.status(404).json({ message: "Only konsumen can access " });
const { userId } = req.params;
const { productId, quantity } = req.body;
if (!productId || !quantity)
return res.status(200).json({ message: "Format data salah" });
const cart = await Cart.findOne({ user: userId });
if (!cart) res.status(200).json({ message: "Invalid User Id" });
let isProductInCart = false;
for (let i = 0; i < cart.products.length; i++) {
if (cart.products[i]._id == productId) {
cart.products[i].quantity = cart.products[i].quantity + quantity;
isProductInCart = true;
break;
}
}
if (isProductInCart) {
const savedCart = await cart.save();
return res.status(200).json({ message: "Success", data: savedCart });
} else {
cart.products.push({ _id: productId, quantity: quantity });
const savedCart = await cart.save();
return res.status(200).json({ message: "Success", data: savedCart });
}
} catch (err) {
return res.json({ err: err.message });
}
};
exports.setQuantity = async (req, res) => {
try {
// if (req.isAdmin)
// return res.status(404).json({ message: "Only konsumen can access " });
const { userId, itemId } = req.params;
const { quantity } = req.body;
if (!itemId) throw new Error("Format parameter salah");
const cart = await Cart.findOne({ user: userId });
for (let i = 0; i < cart.products.length; i++) {
if (cart.products[i]._id == itemId) {
cart.products[i].quantity = quantity;
break;
}
}
const savedCart = await cart.save();
res.status(200).json({ message: "Success", data: savedCart });
} catch (err) {
res.json({ err });
}
};
exports.deleteItemFromCart = async (req, res) => {
try {
if (req.isAdmin)
return res.status(404).json({ message: "Only konsumen can access " });
const { userId, itemId } = req.params;
if (!userId || !itemId) throw new Error("Format parameter salah");
const cart = await Cart.findOne({ user: userId });
for (let i = 0; i < cart.products.length; i++) {
if (cart.products[i]._id == itemId) {
cart.products.splice(i, 1);
break;
}
}
const savedCart = await cart.save();
res.status(200).json({ message: "Success", data: savedCart });
} catch (err) {
res.json({ err: err.message });
}
};
<file_sep>/server/middlewares/cache.js
const redis = require("redis");
const client = redis.createClient(6379);
exports.provinceCache = (req, res, next) => {
client.get("province", (err, data) => {
if (err) throw err;
if (data !== null) {
res.status(200).json({ message: "Success", data: JSON.parse(data) });
} else {
next();
}
});
};
exports.cityCache = (req, res, next) => {
client.get("city", (err, data) => {
if (err) throw err;
if (data !== null) {
res.status(200).json({ message: "Success", data: JSON.parse(data) });
} else {
next();
}
});
};
<file_sep>/server/models/Transaction.js
const mongoose = require("mongoose");
const Product = require("./Product");
const Schema = mongoose.Schema;
const transactionSchema = new Schema({
products: [
{
_id: {
type: mongoose.Schema.Types.ObjectId,
ref: "Product",
required: true
},
price: { type: Number, required: true },
quantity: { type: Number, required: true }
}
],
total: {
type: Number,
required: true,
default: 0
},
// 1.Menunggu pembayaran, 2.Menunggu konfirmasi admin, 3. Pesanan Dikirim, 4.Pesanan diterima
// 5. Pesanan ditolak,
processStatus: {
type: String,
required: true
},
resiNumber: {
type: String
},
payments: {
isPaidOff: {
type: Boolean,
required: true,
default: false
},
method: {
type: String,
required: true
},
rekNumber: {
type: Number
},
total: {
type: Number,
required: true,
default: 0
},
image: {
type: String
},
verify: {
type: String
},
paymentDate: {
type: Date,
required: false
}
},
courService: {
courier: {
type: String,
required: true
},
service: {
type: String,
required: true
},
cost: {
type: Number,
require: true
},
etd: {
type: String,
required: true
},
note: {
type: String,
require: false
}
},
shippingAddress: {
phoneNumber: {
type: String,
required: true
},
province: {
type: String,
required: true
},
city: {
type: String,
required: true
},
address: {
type: String,
required: true
}
},
createdAt: {
type: Date,
required: true,
default: Date.now
},
updatedAt: {
type: Date,
required: true,
default: Date.now
},
// Relation with Users
user: {
type: mongoose.Schema.Types.ObjectId,
ref: "User"
}
});
// Reduce Product Quantity
transactionSchema.post("save", async doc => {
try {
// Loop though products in transactions
// to reduce stock
const products = doc.products;
for (let i = 0; products.length; i++) {
let product = await Product.findById(products[i]._id);
product.stock = product.stock - products[i].quantity;
product.save();
}
// Clear cart data
const cart = await Cart.findOne({ user: doc.user });
cart.products = [];
cart.save();
} catch (error) {
return;
}
});
module.exports = mongoose.model("Transaction", transactionSchema);
<file_sep>/README.md
# 🏄 Ecomn Back - Express E-commerce Server
By <NAME> and Felipe
[](https://github.com/LucasHiago/ecomn_back)
[](https://github.com/KumarAbhirup/Emoji-Log/)
## 📦 Setup
### 🖥️ Development environment
- Run
```bash
$ git clone https://github.com/KumarAbhirup/headless-wordpress-with-react <PROJECT_NAME> # to clone project
$ cd <PROJECT_NAME> # enter in the project
$ yarn # install modules
$ yarn dev # run development server
```
- Visit `http://localhost:3333/products/list` or `http://localhost:3333`
### ⚒️ Linting
#### In VSCode
Just run
#### In Any other IDE
- Run `yarn lint` to check for linting errors.
- Run `yarn lint:fix` to fix the linting errors.
## 🦄 Info
- Give pulls to this project, if you like the idea.
## 📝 License
**MIT - Source code by [Lucas Hiago](https://lucashiago.com)**
_Follow me 👋 **on codepen**_ → [](https://codepen.io/lucashiagodsf)
<file_sep>/server/models/User.js
const mongoose = require("mongoose");
const bcrypt = require("bcryptjs");
const Cart = require("./Cart");
const Address = require("./Address");
const Schema = mongoose.Schema;
const userSchema = new Schema({
firstName: {
type: String,
required: true
},
lastName: {
type: String,
required: true
},
email: {
type: String,
unique: true,
required: true
},
address: {
type: String,
required: true
},
password: {
type: String,
required: true
},
createdAt: {
type: Date,
default: Date.now
}
});
// Hash Password
userSchema.pre("save", async function(next) {
try {
// Generate Salt
const salt = await bcrypt.genSalt(10);
// Generate Password Hash (salt + hash)
const passwordHash = await bcrypt.hash(this.password, salt);
// Set this.password
this.password = <PASSWORD>;
next();
} catch (error) {
next(error);
}
});
// Create Cart Object
userSchema.post("save", async function(doc) {
try {
const cart = Cart.create({ user: doc._id });
if (!cart) throw new Error("Fail while creating cart object");
const address = Address.create({ user: doc._id });
if (!address) throw new Error("Fail while creating address object");
} catch (error) {
return;
}
});
module.exports = mongoose.model("User", userSchema);
<file_sep>/server/middlewares/upload.js
const multer = require("multer");
const upload = multer({
limits: {
fileSize: 1024 * 1024 * 5
}
});
module.exports = upload;
<file_sep>/server/controllers/users.js
const User = require("../models/User");
exports.getAllUser = (req, res) => {
User.find()
.then(users => {
if (!users) res.status(404).json({ message: "No users available" });
res.status(200).json({ message: "Success", data: users });
})
.catch(err => res.status(500).json({ message: err.message }));
};
exports.getUser = async (req, res) => {
try {
const { userId } = req.params;
if (!userId) return res.status(422).json({ message: "Invalid Format" });
const user = await User.findOne({ _id: userId });
if (!user) return res.status(404).json({ message: "User not found" });
res.status(200).json({
message: "Success",
data: {
firstName: user.firstName,
lastName: user.lastName,
email: user.email
}
});
} catch (err) {
res.json({ err: err.message });
}
};
exports.updateUser = async (req, res) => {
try {
const { firstName, lastName } = req.body;
const { userId } = req.params;
if (!firstName || !lastName || !userId)
throw new Error("Struktur data salah");
const updatedUser = await User.findOneAndUpdate(
{ _id: userId },
{ firstName, lastName },
{ new: true }
);
res.status(200).json({ message: "Success Update", data: updatedUser });
} catch (err) {
res.json({ err: err.message });
}
};
<file_sep>/server/routes/carts.js
const express = require("express");
const { checkAuth } = require("../middlewares/authentication");
const {
getCart,
addItemToCart,
deleteItemFromCart,
setQuantity
} = require("../controllers/carts");
const router = express.Router();
// Prefix /api/carts
/* Get Cart Info */
router.get("/:userId", checkAuth, getCart);
/* Add A Product to Cart */
router.put("/:userId", checkAuth, addItemToCart);
/* Set Product Quantity in Cart */
router.put("/:userId/items/:itemId/quantity", setQuantity);
/* Delete item in cart */
router.delete("/:userId/items/:itemId", checkAuth, deleteItemFromCart);
module.exports = router;
<file_sep>/tests/products.test.js
const chai = require("chai");
const expect = chai.expect;
const chaiHttp = require("chai-http");
const fs = require("fs");
chai.use(chaiHttp);
const server = require("../server/app");
let productId = "";
let userId = "";
let accessToken = "";
let adminId = "";
let adminAccessToken = "";
describe("Products Endpoint", () => {
before(async () => {
const res = await chai
.request(server)
.post("/api/users/register")
.send({
firstName: "Product",
lastName: "<NAME>",
email: "<EMAIL>",
address: "User untuk product test",
password: "<PASSWORD>",
password_confirm: "<PASSWORD>"
});
userId = res.body.data._id;
accessToken = res.body.access_token;
const admin = await chai
.request(server)
.post("/api/admin/register")
.send({
username: "admin123",
password: "<PASSWORD>",
password_confirm: "<PASSWORD>"
});
adminId = admin.body._id;
adminAccessToken = admin.body.access_token;
});
it("Mengambil seluruh produk", async () => {
const res = await chai
.request(server)
.get("/api/products")
.set("Authorization", "Bearer " + accessToken);
expect(res).to.have.status(200);
expect(res).to.be.json;
expect(res.body).to.have.property("message");
});
it("Mencoba menyimpan dengan data tidak lengkap", async () => {
try {
const res = await chai
.request(server)
.post("/api/products")
.set({
"Content-Type": "application/x-www-form-urlencoded",
Authorization: `Bearer ${adminAccessToken}`
})
.field("code", "12345")
.field("name", "Kain ABCD")
.field("price", 232424)
.field("width", 5)
.field("stock", 6)
.field("color", "merah")
.field("description", "1235")
.attach(
"image",
fs.readFileSync(__dirname + "/tiosaputra.jpg"),
"tiosaputra.jpg"
);
expect(res).to.have.status(400);
expect(res.body).to.have.property("message");
} catch (err) {
expect(err).to.be.an("object");
}
});
it("Membuat produk baru", async () => {
try {
const res = await chai
.request(server)
.post("/api/products")
.set({
"Content-Type": "application/x-www-form-urlencoded",
Authorization: `Bearer ${adminAccessToken}`
})
.field("code", "12345")
.field("name", "<NAME>")
.field("price", 232424)
.field("material", "blah blah")
.field("width", 5)
.field("stock", 6)
.field("color", "merah")
.field("description", "1235")
.attach(
"image",
fs.readFileSync(__dirname + "/tiosaputra.jpg"),
"tiosaputra.jpg"
);
productId = res.body.data._id;
expect(res).to.have.status(201);
expect(res).to.be.json;
expect(res.body).to.have.property("message");
} catch (err) {
console.log(err);
}
});
it("Mangambil produk berdasarkan id", async () => {
try {
const res = await chai
.request(server)
.get(`/api/products/${productId}`)
.set("Authorization", "Bearer " + accessToken);
expect(res).to.have.status(200);
expect(res.body).to.have.property("message");
} catch (err) {
console.log(err);
}
});
it("Mengubah produk berdsarkan id", done => {
const updateProduct = {
code: "12345",
name: "Kain Mulus bersih berkilau",
price: 4353454,
material: "Softly Gently",
width: 5,
stock: 5,
description: "This is my not my recomendation"
};
chai
.request(server)
.put(`/api/products/${productId}`)
.send(updateProduct)
.set("Authorization", "Bearer " + adminAccessToken)
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
// expect(res.body.data.code).to.equal(updateProduct.code);
// expect(res.body.data.name).to.equal(updateProduct.name);
// expect(res.body.data.price).to.equal(updateProduct.price);
// expect(res.body.data.material).to.equal(updateProduct.material);
// expect(res.body.data.width).to.equal(updateProduct.width);
// expect(res.body.data.stock).to.equal(updateProduct.stock);
// expect(res.body.data.description).to.equal(updateProduct.description);
done();
});
});
it("Menghapus produk berdasarkan id", done => {
chai
.request(server)
.del(`/api/products/${productId}`)
.set("Authorization", "Bearer " + adminAccessToken)
.end((err, res) => {
expect(err).to.be.null;
expect(res).to.have.status(200);
expect(res).to.be.json;
expect(res.body).to.have.property("data");
expect(res.body).to.have.property("message");
done();
});
});
});
|
1de045a4b4673a5d7c92f60081830cd2a44996de
|
[
"JavaScript",
"Markdown"
] | 24
|
JavaScript
|
LucasHiago/ecomn_back
|
0c0b224adf76ee10d07d0eb9656da8e393857ec2
|
9766f4e7e04fcb7310bcb17f94c3ce05a66052f4
|
refs/heads/master
|
<file_sep>import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { HomeComponent } from './home/home.component';
import { PanelComponent } from './panel/panel.component';
import { UsuariosComponent } from './usuarios/usuarios.component';
import { RegistroComponent } from './registro/registro.component';
import { LoginComponent } from './login/login.component';
import { AuthGuard } from './guards/auth.guard';
const routes: Routes = [
{ path: 'home' , component: HomeComponent, canActivate: [ AuthGuard] },
{ path: 'panel' , component: PanelComponent},
{ path: 'usuarios/:id' , component: UsuariosComponent},
{ path: 'registro', component: RegistroComponent },
{ path: 'login' , component: LoginComponent },
{path: '**', redirectTo: 'registro' }
];
@NgModule({
imports: [RouterModule.forRoot(routes)],
exports: [RouterModule]
})
export class AppRoutingModule { }
<file_sep>import { Component, OnInit } from '@angular/core';
import { HeroesService } from '../services/heroes.service';
import { HereoModel } from '../models/heroe.model';
import Swal from 'sweetalert2';
import { UsuarioModel } from '../models/usuario.model';
@Component({
selector: 'app-panel',
templateUrl: './panel.component.html',
styleUrls: ['./panel.component.sass']
})
export class PanelComponent implements OnInit {
usuarios: UsuarioModel[] = [];
heroes: HereoModel[] = [];
////cargar///
cargando = false;
constructor( private heroesService: HeroesService) { }
ngOnInit() {
this.cargando = true;
this.heroesService.getHeroes()
.subscribe( resp => {
this.heroes = resp;
this.cargando = false;
});
}
//////////////eliminar //
borrarHeroe( heroe: HereoModel, i: number ){
Swal.fire({
title: 'Estas seguro?',
text: `Estas seguro que desea borrar a ${heroe.nombre }`,
type: 'question',
showConfirmButton: true,
showCancelButton: true
}).then( resp => {
if ( resp.value ) {
this.heroes.splice(i, 1);
this.heroesService.borrarHeroe( heroe.id ).subscribe();
}
});
}
}
|
dd413b0a445762a2cfc773f83585e09e51ed750c
|
[
"TypeScript"
] | 2
|
TypeScript
|
Pako-007/login
|
75e82c7016e75d5a140bfb65681eb1bb776ee0a6
|
172098cf521e6cf692d59d219247341ba2a75532
|
refs/heads/main
|
<repo_name>joaogojr/searchPipeAngular<file_sep>/search.pipe.ts
import { Pipe, PipeTransform } from '@angular/core';
/**
* Usage
*
* <div *ngFor="let item of items | search:'id,text':valueToSearch">{{item.text}}</div>
*/
@Pipe({
name: 'search'
})
export class SearchPipe implements PipeTransform {
transform(values: Array<any>, keys: string, term: string) {
if (!term) {
return values;
}
if (!values || !values.length) {
return values;
}
if(!keys || typeof values[0] == 'string') {
return values.filter(value => new RegExp(term, 'gi').test(value));
}
return (values || []).filter(item => keys.split(',').some(key => item.hasOwnProperty(key) && new RegExp(term, 'gi').test(item[key])));
}
}
<file_sep>/README.md
# searchPipeAngular
Custom Pipe to search in Angular List or Select
|
83b5205d518b610485dd4735cd1ef1ba843a50e0
|
[
"Markdown",
"TypeScript"
] | 2
|
TypeScript
|
joaogojr/searchPipeAngular
|
ec2188356de43d6bdf82a00f85c6cc72f56856b0
|
7694b7114df522c281e322082a61dbf892e13c70
|
refs/heads/master
|
<repo_name>matclone/DoubleLucky<file_sep>/src/com/matclone/DoubleLucky/ui/BasicFrame.java
package com.matclone.DoubleLucky.ui;
import java.awt.Dimension;
import java.awt.Point;
import java.awt.Toolkit;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import javax.swing.JFrame;
public class BasicFrame extends JFrame {
private final int frame_width = 800;
private final int frame_height = 450;
private Point currentLocation = new Point();
public BasicFrame(String title) {
super(title);
this.setSize(frame_width, frame_height);
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
this.setResizable(false);
Toolkit toolkit = this.getToolkit();
Dimension screen = toolkit.getScreenSize();
int screen_width = screen.width;
int screen_height = screen.height;
this.currentLocation.x = (screen_width - frame_width) / 2;
this.currentLocation.y = (screen_height - frame_height) / 2;
this.setLocation(this.currentLocation);
this.addListeners();
}
/**
* 添加窗体鼠标监听器,以实现鼠标拖动
*/
private void addListeners() {
Point beforePoint = new Point();
Point afterPoint = new Point();
this.addMouseListener((new MouseAdapter() {
public void mousePressed(MouseEvent e) {
beforePoint.x = e.getX();
beforePoint.y = e.getY();
}
public void mouseReleased(MouseEvent e) {
afterPoint.x = e.getX();
afterPoint.y = e.getY();
Point offsetPoint = new Point();
offsetPoint.x = afterPoint.x - beforePoint.x;
offsetPoint.y = afterPoint.y - beforePoint.y;
BasicFrame.this.currentLocation.x += offsetPoint.x;
BasicFrame.this.currentLocation.y += offsetPoint.y;
BasicFrame.this.setLocation(currentLocation);
}
}));
}
}
<file_sep>/src/com/matclone/DoubleLucky/exception/IllegalBlueBallException.java
package com.matclone.DoubleLucky.exception;
public class IllegalBlueBallException extends Exception {
public IllegalBlueBallException() {
super("·Ç·¨µÄÀ¶ÇòÖµ");
}
}
|
80bdc5ad49352e332bf7fa67cb6965545b91e844
|
[
"Java"
] | 2
|
Java
|
matclone/DoubleLucky
|
c59c797a32bb0622795514202dade93232fd36cc
|
21875cba499beddd5c4806e91d4250002754691c
|
refs/heads/master
|
<repo_name>luis11181/sia-scraper<file_sep>/index.js
const puppeteer = require('puppeteer');
const fs = require('fs');
const courseTree = require("./tree");
function getSelection(inputLevel,inputCareer,inputLocation){
for (const location in courseTree) {
for(const level in courseTree[location]){
for(const faculty in courseTree[location][level]){
for(const career of courseTree[location][level][faculty]){
if(career.includes(inputCareer)&&location.includes(inputLocation)&&level.includes(inputLevel)){
return [level,faculty,career];
}
}
}
}
}
}
async function getInfo(){
// Init browser
const browser = await puppeteer.launch({
headless:true,
ignoreHTTPSErrors:true,
timeout:0,
defaultViewport: {
width: 1366,
height: 768
}
});
let response = {};
// Set courseName to the course you want to search, it'll search for all non-elective courses
const courseName = "";
// You HAVE TO set selection, just fill it once with keywords, it's not necessary to write the whole word
// Career and location MUST be in CAPITAL LETTERS.
// EXAMPLES OF USAGE: [Pregrado|Doctorado|Postgrados y másteres,COMP|QUÍM|ADM|...,BOG|MEDELLÍN|PAZ|...]
const selection = getSelection("Pregrado","SISTEMAS Y COMP","BOG");
const selectIds = [`#pt1\\:r1\\:0\\:soc1\\:\\:content`,`#pt1\\:r1\\:0\\:soc2\\:\\:content`,`#pt1\\:r1\\:0\\:soc3\\:\\:content`];
const url = 'https://sia.unal.edu.co/ServiciosApp/facespublico/public/servicioPublico.jsf?taskflowId=task-flow-AC_CatalogoAsignaturas';
// Go to catalog
const page = await browser.newPage();
console.time("open");
await page.goto(url,{
timeout:0
});
console.timeEnd("open");
// Select drop-list options
for (const option of selection) {
console.time(option);
let selectionOptions = null;
// Wait until options loaded
switch(selection.indexOf(option)){
case 0:
selectionOptions = await page.evaluate(()=>document.getElementById("pt1:r1:0:soc1::content").innerText);
break;
case 1:
await page.waitForFunction(()=>!document.querySelector(`#pt1\\:r1\\:0\\:soc2\\:\\:content`).disabled,{
timeout:0
});
selectionOptions = await page.evaluate(()=>document.getElementById("pt1:r1:0:soc2::content").innerText);
break;
case 2:
await page.waitForFunction(()=>!document.querySelector(`#pt1\\:r1\\:0\\:soc3\\:\\:content`).disabled,{
timeout:0
});
selectionOptions = await page.evaluate(()=>document.getElementById("pt1:r1:0:soc3::content").innerText);
break;
}
// Get available options
selectionOptions = selectionOptions.split("\n");
const selectValue = `${selectionOptions.indexOf(option)}`;
// Select correct option
const selectElement = await page.$(selectIds[selection.indexOf(option)]);
await selectElement.click();
selectElement.select(selectValue);
console.timeEnd(option);
}
// Type course name
await page.type(`#pt1\\:r1\\:0\\:it11\\:\\:content`,courseName);
// Click button to execute search
await page.waitForFunction(()=>!document.querySelector(".af_button.p_AFDisabled"));
const button = await page.$(".af_button_link");
button.click();
// Wait for results to load
await page.waitFor(6000);
let courses = await page.$$(".af_commandLink");
const size = courses.length-1;
console.log(`${size} courses found!`);
for (let i = 0; i < size; i++) {
console.time(i);
courses = await page.$$(".af_commandLink");
// Visit
const element = courses[i];
try{
await element.click();
}catch{
console.error("Couldn't click on course link");
}
// Load course info
try {
await page.waitForSelector(".af_showDetailHeader_content0",{
timeout: 3000,
});
// Get raw content
const rawContent = await page.evaluate(() => document.querySelector('#d1').innerText);
let regex = /(.*)\((.*)\)/,m;
// Get course info
const courseInfo = regex.exec(rawContent)[0].split(" (");
const name = courseInfo[0];
const code = courseInfo[1].replace(")","");
// Init course
let course = {
name,
groups: []
};
// Get group numbers
regex = /\([0-9]*\)(.*)Grupo(.*)([0-9]*)(.*)+/g;
do {
m = regex.exec(rawContent);
if (m) {
course.groups.push({number: Number(m[0].substring(m[0].indexOf("(")+1,m[0].indexOf(")"))), name: m[0].split(") ")[1]});
}
} while (m);
// Get professor
regex = /Profesor:(.*)/g;
for(const obj of course.groups){
m = regex.exec(rawContent);
if (m) {
obj.professor = m[0].split("Profesor: ")[1];
}
}
// Get seats
regex = /Cupos disponibles:(.*)/g;
for(const obj of course.groups){
m = regex.exec(rawContent);
if (m) {
obj.seats = Number(m[0].split("Cupos disponibles: ")[1]);
}
}
// Get schedule
regex = /Fecha:(.*)(\n*)(.*)(\n*)(.*)(\n*)(.*)(\n*)(.*)(\n*)(.*)(\n*)(.*)(\n*)(.*)(\n*)(.*)Duración:/g;
for(const obj of course.groups){
m = regex.exec(rawContent);
if (m) {
m[0] = m[0].replace(/(Fecha:(.*))|Duración/g,"").replace(/(SALA|SALON|SALÓN|LABORATORIO|AUDITORIO)(.*)/g,"@").replace(/\n|\./g,"").replace(/de |a |\:/g,"").split("@").filter(v => v.includes(" "));
obj.schedule = m[0];
}
}
// Save response
response[`${code}`] = course
}catch (e){
console.log(`${i}: NO INFO FOUND!`);
// console.error(e)
}
// Go back to course list
// await page.waitForSelector(`.af_button`);
const backButton = await page.$(`.af_button`);
await backButton.click();
try{
await page.waitForSelector(".af_selectBooleanCheckbox_native-input",{
timeout: 5000
});
}catch{
await backButton.click();
}
console.timeEnd(i);
}
// Log final file
response = JSON.stringify(response,null,2);
fs.writeFile('response.json', response, (err) => {
if (err) throw err;
console.log('Data written to file');
});
await browser.close();
}
getInfo();<file_sep>/readme.md
# SIA scraper
Simple scraper for getting useful info on schedules, professors and seats. (Keep in mind that for large queries could take long)
## Installation
Clone the repo and install ```node_modules```.
## Usage
Run using ```npm start```, or in develop with ```npm run develop``` to run continuosly. Then set ```courseName``` to the course you want to search; it'll search for all non-elective courses otherwise.
```
const courseName = "algoritmos";
```
After that, you *have to* set selection, just fill it once with keywords (or code), it's not necessary to write the whole word. Career and location **MUST be in CAPITAL LETTERS¨**.
```
*EXAMPLES OF USAGE*
Level: [Pre(grado)|Doc(torado)|Post(grados y másteres)]
Career: [2879|QUÍMICA|ECON|...]
Location: [BOG|1102|TUMACO|...]
```
Full detail of these configs can be found on ```tree.json``` file.
|
9a16d3ab2d9f6e92254ad63dd30e10a55340dff1
|
[
"JavaScript",
"Markdown"
] | 2
|
JavaScript
|
luis11181/sia-scraper
|
e557dac6c26de46f3ed50ea2d2b37e3d214143aa
|
85b2438e8e8496de7bdbd4df6b086812cb4c5012
|
refs/heads/master
|
<repo_name>pangkun/csdn<file_sep>/src/main/java/controller/Dao.java
package controller;
import javabean.Article;
import javabean.Author;
import util.DBUtil;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
public class Dao {
public static void saveAuthor(Author author) {
Connection connection = DBUtil.getConnection();
try {
PreparedStatement preparedStatement = connection.prepareStatement("insert into author1 values(0,?)");
preparedStatement.setString(1, author.getUrl());
preparedStatement.execute();
} catch (SQLException e) {
e.printStackTrace();
}
}
public static void saveArticle(Article article) {
Connection connection = DBUtil.getConnection();
PreparedStatement preparedStatement = null;
try {
preparedStatement = connection.prepareStatement("insert into article1 values(0,?,?,?,?,?)");
preparedStatement.setString(1, article.getTitle());
preparedStatement.setString(2, article.getAuthor());
preparedStatement.setString(3, article.getDate());
preparedStatement.setString(4, article.getTime());
preparedStatement.setString(5, article.getUrl());
preparedStatement.execute();
} catch (SQLException e) {
e.printStackTrace();
} finally {
try {
preparedStatement.close();
connection.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
public static Author findAuthor(String authorName) {
Author author = null;
Connection connection = DBUtil.getConnection();
PreparedStatement preparedStatement = null;
try {
preparedStatement = connection.prepareStatement("select * from author1 where url = ?");
preparedStatement.setString(1, authorName);
ResultSet resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
String url = resultSet.getString("url");
int id = resultSet.getInt("id");
author.setUrl(url);
author.setId(id);
return author;
}
} catch (SQLException e) {
e.printStackTrace();
}
return author;
}
public static Author findNextAuthor(String authorName) {
Author author = null;
Connection connection = DBUtil.getConnection();
PreparedStatement preparedStatement = null;
try {
preparedStatement = connection.prepareStatement("select * from author1 where url = ?");
preparedStatement.setString(1, authorName);
ResultSet resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
int id = resultSet.getInt("id");
preparedStatement.execute("select * from author where id = " + (id + 1));
resultSet = preparedStatement.executeQuery();
if (resultSet.next()) {
String url = resultSet.getString("url");
author.setId(id+1);
author.setUrl(url);
return author;
}
}
} catch (SQLException e) {
e.printStackTrace();
}
return author;
}
}
|
cb1bb48afd6c5997a73831b61c62c671c2b76e03
|
[
"Java"
] | 1
|
Java
|
pangkun/csdn
|
4f57dcb0d550a8ed7e2614fe9f9e39c6c4c1614d
|
3256a804e123327ad5493332a62e293140e9cc6c
|
refs/heads/master
|
<repo_name>YBill/VirtualViewTest<file_sep>/settings.gradle
rootProject.name='VirtualViewTest'
include ':app'
<file_sep>/app/src/main/java/com/bill/virtualviewtest/ui/LocalParserOutActivity.java
package com.bill.virtualviewtest.ui;
import android.graphics.Color;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.widget.LinearLayout;
import com.bill.virtualviewtest.MyApplication;
import com.bill.virtualviewtest.R;
import com.bill.virtualviewtest.base.BaseActivity;
import com.bill.virtualviewtest.util.Utils;
import com.tmall.wireless.vaf.framework.VafContext;
import com.tmall.wireless.vaf.framework.ViewManager;
import com.tmall.wireless.vaf.virtualview.core.IContainer;
import com.tmall.wireless.vaf.virtualview.core.Layout;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
public class LocalParserOutActivity extends BaseActivity {
private static final String NAME = "MyTest";
private static final String TEMPLATE = "template/MyTest.out";
private static final String DATA = "data/MyTest.json";
private LinearLayout mLinearLayout;
private VafContext sVafContext;
private ViewManager sViewManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_local_parser);
mLinearLayout = findViewById(R.id.container);
sVafContext = ((MyApplication) getApplication()).getVafContext();
sViewManager = ((MyApplication) getApplication()).getViewManager();
load();
}
private void load() {
loadTemplates(TEMPLATE);
JSONObject json = getJSONDataFromAsset(DATA);
preview(NAME, json);
}
private void loadTemplates(String template) {
File file = getApplicationContext().getExternalFilesDir(TEMPLATE);
if (file != null) {
String path = file.getAbsolutePath();
int ret = sViewManager.loadBinFileSync(path);
if (ret > 0) {
Utils.bottomToast(mLinearLayout, "Load with " + path, Color.GREEN);
return;
}
}
byte[] b = getTemplateFromAsset(template);
int ret = sViewManager.loadBinBufferSync(b);
if (ret > 0) {
Utils.bottomToast(mLinearLayout, "Load with assets/" + template, Color.YELLOW);
return;
}
Utils.bottomToast(mLinearLayout, "Load failed", Color.RED);
// 加载不出来,看loadBinFileSync内部源码其内部是通过读本地文件的,不是读assets中的文件
// sViewManager.loadBinFileSync("file:///android_asset/template/MyTest.out");
}
private void preview(String templateName, JSONObject jsonData) {
if (TextUtils.isEmpty(templateName)) {
Utils.toast("Template name should not be empty!!!!");
return;
}
View mContainer = sVafContext.getContainerService().getContainer(templateName, true);
IContainer iContainer = (IContainer) mContainer;
if (jsonData != null) {
iContainer.getVirtualView().setVData(jsonData);
}
Layout.Params p = iContainer.getVirtualView().getComLayoutParams();
LinearLayout.LayoutParams marginLayoutParams = new LinearLayout.LayoutParams(p.mLayoutWidth, p.mLayoutHeight);
marginLayoutParams.leftMargin = p.mLayoutMarginLeft;
marginLayoutParams.topMargin = p.mLayoutMarginTop;
marginLayoutParams.rightMargin = p.mLayoutMarginRight;
marginLayoutParams.bottomMargin = p.mLayoutMarginBottom;
mLinearLayout.removeAllViews();
mLinearLayout.addView(mContainer, marginLayoutParams);
}
private byte[] getTemplateFromAsset(String name) {
try {
InputStream inputStream = getAssets().open(name);
int length = inputStream.available();
byte[] buf = new byte[length];
inputStream.read(buf);
inputStream.close();
return buf;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
private JSONObject getJSONDataFromAsset(String name) {
try {
InputStream inputStream = getAssets().open(name);
BufferedReader inputStreamReader = new BufferedReader(new InputStreamReader(inputStream));
StringBuilder sb = new StringBuilder();
String str;
while ((str = inputStreamReader.readLine()) != null) {
sb.append(str);
}
inputStreamReader.close();
return new JSONObject(sb.toString());
} catch (IOException e) {
e.printStackTrace();
} catch (JSONException e) {
e.printStackTrace();
}
return null;
}
}
<file_sep>/app/src/main/java/com/bill/virtualviewtest/ui/RealtimeListLoadActivity.java
package com.bill.virtualviewtest.ui;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Base64;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.LinearLayout;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bill.virtualviewtest.MyApplication;
import com.bill.virtualviewtest.R;
import com.bill.virtualviewtest.base.BaseActivity;
import com.bill.virtualviewtest.util.HttpUtil;
import com.bill.virtualviewtest.util.ThreadUtils;
import com.bill.virtualviewtest.util.Utils;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.tmall.wireless.vaf.framework.VafContext;
import com.tmall.wireless.vaf.framework.ViewManager;
import com.tmall.wireless.vaf.virtualview.core.IContainer;
import com.tmall.wireless.vaf.virtualview.core.Layout;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
public class RealtimeListLoadActivity extends BaseActivity {
private VafContext sVafContext;
private ViewManager sViewManager;
private LinearLayout mLinearLayout;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_local_parser);
mLinearLayout = findViewById(R.id.container);
sVafContext = ((MyApplication) getApplication()).getVafContext();
sViewManager = ((MyApplication) getApplication()).getViewManager();
getListData();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_preview, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_refresh:
getListData();
return true;
}
return super.onOptionsItemSelected(item);
}
private void getListData() {
mLinearLayout.removeAllViews();
ThreadUtils.runOnWork(new Runnable() {
@Override
public void run() {
OkHttpClient client = new OkHttpClient();
Request request = new Request.Builder()
.url(HttpUtil.getHostUrl() + ".dir")
.build();
try {
Response response = client.newCall(request).execute();
if (response.isSuccessful()) {
if (response.body() != null) {
String string = response.body().string();
final String[] dirs = new Gson().fromJson(string, String[].class);
if (dirs != null) {
List<String> list = handleName(dirs);
for (String name : list) {
refreshByUrl(name);
}
} else {
Utils.toast("No templates!");
}
}
}
} catch (IOException e) {
e.printStackTrace();
Utils.toast("Server is not running!");
}
}
});
}
private List<String> handleName(String[] names) {
List<String> list = new ArrayList<>(names.length);
for (String name : names) {
if (name.startsWith("Layout"))
list.add(name);
}
Collections.sort(list);
return list;
}
private void refreshByUrl(final String name) {
final String url = HttpUtil.getHostUrl() + name + "/data.json";
OkHttpClient client = new OkHttpClient();
Request request = new Request.Builder()
.url(url)
.build();
try {
Response response = client.newCall(request).execute();
if (response.isSuccessful()) {
if (response.body() != null) {
String string = response.body().string();
final RealtimeDetailActivity.PreviewData previewData = new Gson().fromJson(string, RealtimeDetailActivity.PreviewData.class);
if (previewData != null) {
loadTemplates(previewData.templates);
ThreadUtils.runOnMain(new Runnable() {
@Override
public void run() {
JsonObject json = previewData.data;
if (json != null) {
try {
JSONObject mJsonData = JSON.parseObject(json.toString());
preview(name, mJsonData);
} catch (Exception e) {
e.printStackTrace();
}
}
}
});
}
}
}
} catch (IOException e) {
e.printStackTrace();
Utils.toast("Server is not running!");
}
}
private void loadTemplates(ArrayList<String> templates) {
for (String temp : templates) {
sViewManager.loadBinBufferSync(Base64.decode(temp, Base64.DEFAULT));
}
}
private void preview(String templateName, com.alibaba.fastjson.JSONObject jsonData) {
if (TextUtils.isEmpty(templateName)) {
Utils.toast("Template name should not be empty!!!!");
return;
}
View mContainer = sVafContext.getContainerService().getContainer(templateName, true);
IContainer iContainer = (IContainer) mContainer;
if (jsonData != null) {
iContainer.getVirtualView().setVData(jsonData);
}
Layout.Params p = iContainer.getVirtualView().getComLayoutParams();
LinearLayout.LayoutParams marginLayoutParams = new LinearLayout.LayoutParams(p.mLayoutWidth, p.mLayoutHeight);
marginLayoutParams.leftMargin = p.mLayoutMarginLeft;
marginLayoutParams.topMargin = p.mLayoutMarginTop;
marginLayoutParams.rightMargin = p.mLayoutMarginRight;
marginLayoutParams.bottomMargin = p.mLayoutMarginBottom;
mLinearLayout.addView(mContainer, marginLayoutParams);
}
}
<file_sep>/app/src/main/java/custom/TagTextView.java
package custom;
import android.content.Context;
import android.util.AttributeSet;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.widget.AppCompatTextView;
import com.tmall.wireless.vaf.virtualview.core.IView;
/**
* author : Bill
* date : 2021/3/11
* description :
*/
public class TagTextView extends AppCompatTextView implements IView {
public TagTextView(@NonNull Context context) {
super(context);
}
public TagTextView(@NonNull Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
}
public TagTextView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
public void measureComponent(int widthMeasureSpec, int heightMeasureSpec) {
this.measure(widthMeasureSpec, heightMeasureSpec);
}
@Override
public void comLayout(int l, int t, int r, int b) {
this.layout(l, t, r, b);
}
@Override
public void onComMeasure(int widthMeasureSpec, int heightMeasureSpec) {
this.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
public void onComLayout(boolean changed, int l, int t, int r, int b) {
this.onLayout(changed, l, t, r, b);
}
@Override
public int getComMeasuredWidth() {
return this.getMeasuredWidth();
}
@Override
public int getComMeasuredHeight() {
return this.getMeasuredHeight();
}
}
<file_sep>/app/src/main/java/com/bill/virtualviewtest/event/ClickProcessorImpl.java
package com.bill.virtualviewtest.event;
import android.app.Activity;
import android.content.Intent;
import android.util.Log;
import com.bill.virtualviewtest.util.ActivityManager;
import com.bill.virtualviewtest.util.Utils;
import com.bill.virtualviewtest.web.DetailActivity;
import com.tmall.wireless.vaf.virtualview.event.EventData;
import com.tmall.wireless.vaf.virtualview.event.IEventProcessor;
import org.json.JSONException;
import org.json.JSONObject;
/**
* author : Bill
* date : 2021/3/12
* description :
*/
public class ClickProcessorImpl implements IEventProcessor {
@Override
public boolean process(EventData data) {
Log.d("IEventProcessor", "ClickProcessorImpl-> event " + data.mVB.getAction() + " " + data.mVB.getId());
String action = data.mVB.getAction();
if (action == null)
return true;
if (action.startsWith("{")) {
try {
JSONObject obj = new JSONObject(action);
String url = obj.optString("url");
gotoDetail(url);
} catch (JSONException e) {
e.printStackTrace();
}
} else {
Utils.toast(action);
}
return true;
}
private void gotoDetail(String url) {
Activity currentAct = ActivityManager.getInstance().getTopActivity();
if (currentAct != null) {
Intent intent = new Intent(currentAct, DetailActivity.class);
intent.putExtra("url", url);
currentAct.startActivity(intent);
}
}
}
<file_sep>/app/src/main/java/com/bill/virtualviewtest/ui/LocalLoadActivity.java
package com.bill.virtualviewtest.ui;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import com.bill.virtualviewtest.R;
import com.bill.virtualviewtest.base.BaseActivity;
public class LocalLoadActivity extends BaseActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_local_load);
}
public void handleLoadJavaBytes(View view) {
startActivity(new Intent(this, LocalParserBytesActivity.class));
}
public void handleLoadMd5String(View view) {
startActivity(new Intent(this, LocalParserMd5StrActivity.class));
}
public void handleLoadOut(View view) {
startActivity(new Intent(this, LocalParserOutActivity.class));
}
}
<file_sep>/app/src/main/java/custom/LocalAndNetNetImage.java
package custom;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import com.bill.virtualviewtest.glide.GlideApp;
import com.libra.TextUtils;
import com.libra.Utils;
import com.libra.expr.common.StringSupport;
import com.tmall.wireless.vaf.framework.VafContext;
import com.tmall.wireless.vaf.virtualview.core.ViewBase;
import com.tmall.wireless.vaf.virtualview.core.ViewCache;
/**
* author : Bill
* date : 2021/3/10
* description : 自定义组件练习,功能简单,只是简单的网络加载图片,图片旋转
*/
public class LocalAndNetNetImage extends ViewBase {
private LocalAndNetImageView mLocalAndNetImageView;
private int degreeId;
private int urlId;
private int localResId;
private float degree;
private String url;
private String localRes;
public LocalAndNetNetImage(VafContext context, ViewCache viewCache) {
super(context, viewCache);
mLocalAndNetImageView = new LocalAndNetImageView(context.forViewConstruction());
StringSupport mStringSupport = context.getStringLoader();
degreeId = mStringSupport.getStringId(CustomKey.NET_IMAGE_ATTRS_DEGREE, false);
urlId = mStringSupport.getStringId(CustomKey.NET_IMAGE_ATTRS_URL, false);
localResId = mStringSupport.getStringId(CustomKey.NET_IMAGE_ATTRS_LOCAL_RES, false);
}
@Override
public void onComMeasure(int widthMeasureSpec, int heightMeasureSpec) {
mLocalAndNetImageView.onComMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
public void onComLayout(boolean changed, int l, int t, int r, int b) {
mLocalAndNetImageView.onComLayout(changed, l, t, r, b);
}
@Override
public void comLayout(int l, int t, int r, int b) {
super.comLayout(l, t, r, b);
mLocalAndNetImageView.comLayout(l, t, r, b);
}
@Override
public View getNativeView() {
return mLocalAndNetImageView;
}
@Override
public int getComMeasuredWidth() {
return mLocalAndNetImageView.getComMeasuredWidth();
}
@Override
public int getComMeasuredHeight() {
return mLocalAndNetImageView.getComMeasuredHeight();
}
@Override
protected boolean setAttribute(int key, float value) {
boolean ret = true;
if (key == degreeId) {
degree = value;
} else {
ret = super.setAttribute(key, value);
}
return ret;
}
@Override
protected boolean setAttribute(int key, String stringValue) {
boolean ret = true;
if (key == localResId) {
if (Utils.isEL(stringValue)) {
mViewCache.put(this, localResId, stringValue, ViewCache.Item.TYPE_FLOAT);
} else {
localRes = stringValue;
}
} else if (key == degreeId) {
if (Utils.isEL(stringValue)) {
mViewCache.put(this, degreeId, stringValue, ViewCache.Item.TYPE_FLOAT);
}
} else if (key == urlId) {
if (Utils.isEL(stringValue)) {
mViewCache.put(this, urlId, stringValue, ViewCache.Item.TYPE_STRING);
} else {
url = stringValue;
}
} else {
ret = super.setAttribute(key, stringValue);
}
return ret;
}
@Override
public void reset() {
super.reset();
GlideApp.with(mContext.getApplicationContext()).clear(mLocalAndNetImageView);
}
@Override
public void onParseValueFinished() {
super.onParseValueFinished();
if (TextUtils.isEmpty(localRes)) {
mLocalAndNetImageView.setScaleType(ImageView.ScaleType.CENTER_CROP);
GlideApp.with(mContext.getApplicationContext())
.load(url)
.into(mLocalAndNetImageView);
} else {
mLocalAndNetImageView.setScaleType(ImageView.ScaleType.CENTER);
int id = mContext.getApplicationContext().getResources().getIdentifier(
localRes, "drawable", mContext.getApplicationContext().getPackageName());
if (id > 0)
mLocalAndNetImageView.setImageResource(id);
else
Log.e("LocalAndNetNetImage", "Not found the image in drawable, name is :" + localRes);
}
mLocalAndNetImageView.setRotation(degree);
}
public static class Builder implements ViewBase.IBuilder {
@Override
public ViewBase build(VafContext context, ViewCache viewCache) {
return new LocalAndNetNetImage(context, viewCache);
}
}
}
<file_sep>/app/src/main/java/custom/LocalAndNetImageView.java
package custom;
import android.content.Context;
import android.util.AttributeSet;
import androidx.annotation.Nullable;
import com.tmall.wireless.vaf.virtualview.core.IView;
/**
* author : Bill
* date : 2021/3/10
* description :
*/
public class LocalAndNetImageView extends androidx.appcompat.widget.AppCompatImageView implements IView {
public LocalAndNetImageView(Context context) {
super(context);
}
public LocalAndNetImageView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
}
public LocalAndNetImageView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
public void measureComponent(int widthMeasureSpec, int heightMeasureSpec) {
this.measure(widthMeasureSpec, heightMeasureSpec);
}
@Override
public void comLayout(int l, int t, int r, int b) {
this.layout(l, t, r, b);
}
@Override
public void onComMeasure(int widthMeasureSpec, int heightMeasureSpec) {
this.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
public void onComLayout(boolean changed, int l, int t, int r, int b) {
this.onLayout(changed, l, t, r, b);
}
@Override
public int getComMeasuredWidth() {
return this.getMeasuredWidth();
}
@Override
public int getComMeasuredHeight() {
return this.getMeasuredHeight();
}
}
<file_sep>/app/src/main/java/com/bill/virtualviewtest/MainActivity.java
package com.bill.virtualviewtest;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import com.bill.virtualviewtest.base.BaseActivity;
import com.bill.virtualviewtest.ui.LocalLoadActivity;
import com.bill.virtualviewtest.ui.NetLoadActivity;
import com.bill.virtualviewtest.ui.RealtimeListActivity;
public class MainActivity extends BaseActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void handleLocal(View view) {
startActivity(new Intent(this, LocalLoadActivity.class));
}
public void handleRealtime(View view) {
startActivity(new Intent(this, RealtimeListActivity.class));
}
public void handleNet(View view) {
startActivity(new Intent(this, NetLoadActivity.class));
}
}
<file_sep>/app/src/main/java/com/bill/virtualviewtest/util/HttpUtil.java
package com.bill.virtualviewtest.util;
/**
* author : Bill
* date : 2021/3/3
* description :
*/
public class HttpUtil {
public static String getHostIp() {
return "10.0.2.83";
}
public static String getHostUrl() {
return "http://" + HttpUtil.getHostIp() + ":7788/";
}
}
<file_sep>/app/src/main/java/com/bill/virtualviewtest/ui/LocalParserMd5StrActivity.java
package com.bill.virtualviewtest.ui;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Base64;
import android.view.View;
import android.widget.LinearLayout;
import com.bill.virtualviewtest.MyApplication;
import com.bill.virtualviewtest.R;
import com.bill.virtualviewtest.base.BaseActivity;
import com.bill.virtualviewtest.bytes.MYTEST;
import com.bill.virtualviewtest.util.Utils;
import com.tmall.wireless.vaf.framework.VafContext;
import com.tmall.wireless.vaf.framework.ViewManager;
import com.tmall.wireless.vaf.virtualview.core.IContainer;
import com.tmall.wireless.vaf.virtualview.core.Layout;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
public class LocalParserMd5StrActivity extends BaseActivity {
private static final String NAME = "MyTest";
private static final String TEMPLATE = MYTEST.STR;
private static final String DATA = "data/MyTest.json";
private LinearLayout mLinearLayout;
private VafContext sVafContext;
private ViewManager sViewManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_local_parser);
mLinearLayout = findViewById(R.id.container);
sVafContext = ((MyApplication) getApplication()).getVafContext();
sViewManager = ((MyApplication) getApplication()).getViewManager();
load();
}
private void load() {
loadTemplates(TEMPLATE);
JSONObject json = getJSONDataFromAsset(DATA);
preview(NAME, json);
}
private void loadTemplates(String template) {
sViewManager.loadBinBufferSync(Base64.decode(template, Base64.DEFAULT));
}
private void preview(String templateName, JSONObject jsonData) {
if (TextUtils.isEmpty(templateName)) {
Utils.toast("Template name should not be empty!!!!");
return;
}
View mContainer = sVafContext.getContainerService().getContainer(templateName, true);
IContainer iContainer = (IContainer) mContainer;
if (jsonData != null) {
iContainer.getVirtualView().setVData(jsonData);
}
Layout.Params p = iContainer.getVirtualView().getComLayoutParams();
LinearLayout.LayoutParams marginLayoutParams = new LinearLayout.LayoutParams(p.mLayoutWidth, p.mLayoutHeight);
marginLayoutParams.leftMargin = p.mLayoutMarginLeft;
marginLayoutParams.topMargin = p.mLayoutMarginTop;
marginLayoutParams.rightMargin = p.mLayoutMarginRight;
marginLayoutParams.bottomMargin = p.mLayoutMarginBottom;
mLinearLayout.removeAllViews();
mLinearLayout.addView(mContainer, marginLayoutParams);
}
private JSONObject getJSONDataFromAsset(String name) {
try {
InputStream inputStream = getAssets().open(name);
BufferedReader inputStreamReader = new BufferedReader(new InputStreamReader(inputStream));
StringBuilder sb = new StringBuilder();
String str;
while ((str = inputStreamReader.readLine()) != null) {
sb.append(str);
}
inputStreamReader.close();
return new JSONObject(sb.toString());
} catch (IOException e) {
e.printStackTrace();
} catch (JSONException e) {
e.printStackTrace();
}
return null;
}
}
<file_sep>/app/src/main/java/com/bill/virtualviewtest/widget/ImageType.java
package com.bill.virtualviewtest.widget;
import androidx.annotation.IntDef;
/**
* author : Bill
* date : 2021/3/10
* description :
*/
@IntDef({ImageType.MODE_NONE, ImageType.MODE_ROUND_RECT, ImageType.MODE_CIRCLE})
public @interface ImageType {
int MODE_NONE = 0;
int MODE_ROUND_RECT = 1;
int MODE_CIRCLE = 2;
}
|
454b42b0272633f382e6b108f4c7ce49ed8fa624
|
[
"Java",
"Gradle"
] | 12
|
Gradle
|
YBill/VirtualViewTest
|
216d133957acc6cc4ff6423826905a366aadb9d4
|
b868ee30247b3ffcb99ed36f21627ae73f43d614
|
refs/heads/master
|
<file_sep>package cn.bdqn;
import cn.bdqn.dao.CustomerDao;
import cn.bdqn.entity.Customer;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.jpa.repository.Query;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
@RunWith(SpringJUnit4ClassRunner.class)//声明这个类是spring的测试类
//加载配置文件
@ContextConfiguration(locations = "classpath:applicationContext.xml")
public class CustomerDaoTest {
//注入dao
@Autowired
private CustomerDao customerDao;
//保存客户数据到数据库
@Test
public void testSave(){
Customer customer = new Customer();
customer.setCustName("测试");
customerDao.save(customer);
}
/**
* 修改id为19的客户信息
* save():同时可以更新或者是插入
* 如果传入的对象没有id值,那就执行插入的方法
* 如果传入的对象有id值,那就执行更新的方法
*/
@Test
public void testSave2(){
//获取id为9的客户数据
Customer customer = customerDao.findOne(19L);
customer.setCustName("修改数据测试");
//save() 执行更新之前会查询数据库,如果查询的数据和更新的数据一样就不执行更新操作
customerDao.save(customer);
}
//根据id删除数据
@Test
public void delete(){
customerDao.delete(19L);
}
//根号id查询数据
@Test
public void findOne(){
Customer customer = customerDao.findOne(9L);
System.err.println(customer.getCustName());
}
//查询全部数据
@Test
public void findAll(){
List<Customer> list = customerDao.findAll();
for (Customer customer : list) {
System.err.println(customer.getCustName());
}
}
//使用spring-date-jpa实现延迟加载
//需求:使用延迟加载技术实现通过id查询客户数据
@Test
@Transactional //@Transactional:防止延时加载的时候报错,没有该注解延时加载会报错
public void getById(){
//getOne():通过使用延迟加载的方式,通过id查询数据库
Customer customer = customerDao.getOne(9L);
System.err.println("------------------------------------------------------");
System.err.println(customer.getCustName());
}
/**
* 根据名字查询客户信息
*/
@Test
public void findByCustName(){
Customer customer = customerDao.findByCustName("青鸟");
System.err.println(customer.getCustName());
}
/**
* 根据客户名称模糊查询并且根据地址模糊查询客户信息
*/
@Test
public void findByCustNameLikeAndCustAddressLike(){
Customer customer = customerDao.findByCustNameLikeAndCustAddressLike("%青鸟%", "%州%");
System.err.println(customer.getCustName() + " " + customer.getCustAddress());
}
}
|
dacf78aa3df1e2fd54f1087a2c8aabde6c12f2d4
|
[
"Java"
] | 1
|
Java
|
XieChangHao/Test
|
a66c803035c72c9fe0095fd85c1f4f417f481f32
|
c75e03bea3ea6dbe038188c7240396099f912f05
|
refs/heads/master
|
<file_sep>// (function () {
// angular.module('app', ['ngRoute']).controller('myCtrl', function ($scope, $http) {
// $http.get("https://pokeapi.co/api/v2/pokemon").then(function (response) {
// $scope.data = response.data.results;
// })
// })
// })()
(function () {
var app = angular.module('app', ['ngRoute']);
app.config(function ($routeProvider) {
$routeProvider
.when('/', {
templateUrl: "list.html",
controller: 'myCtrl'
})
.when('/:pokemonName', {
templateUrl: "details.html",
controller: 'detailsController'
})
})
app.controller('myCtrl', function ($rootScope, $scope, $http) {
var self = this;
self.$onInit = onInit;
function onInit() {
$scope.previous = null;
}
$http({
method: 'GET',
url: 'https://pokeapi.co/api/v2/pokemon/'
}).then(function (response) {
$rootScope.pokeList = response.data.results;
$scope.next = response.data.next;
$scope.previous = response.data.previous;
});
$scope.getDetail = function (currUrl) {
$http({
method: 'GET',
url: currUrl
}).then(function (response) {
$rootScope.pokeList = response.data.results;
$scope.next = response.data.next;
$scope.previous = response.data.previous;
});
}
});
app.controller('detailsController', function ($rootScope, $scope, $http, $routeParams) {
function details() {
for (var i in $rootScope.pokeList) {
if ($rootScope.pokeList[i].name == $routeParams.pokemonName) {
$http({
method: 'GET',
url: $rootScope.pokeList[i].url
}).then(function (response) {
$scope.pokeAbilities = response.data.abilities;
$scope.pokeHeight = response.data.height;
$scope.pokeWeight = response.data.weight;
$scope.pokeSprites = response.data.sprites;
})
}
}
}
if (!$rootScope.pokeList) {
$http({
method: 'GET',
url: "https://pokeapi.co/api/v2/pokemon"
}).then(function (response) {
$rootScope.pokeList = response.data.results;
$scope.next = response.data.next;
$scope.previous = response.data.previous;
details();
});
} else {
details();
}
});
})()
|
5519d1d069b846adba05350bd15cd791fe5ef0e3
|
[
"JavaScript"
] | 1
|
JavaScript
|
naomiamelia/Pokemon
|
203d1d69e68573afb354c8ad80fa9137215fd4eb
|
a55ab417091e635f6fb3a5f5b2bc3e3f9731b9ee
|
refs/heads/master
|
<file_sep>var myParticles = [];
function setup () {
createCanvas ( 1000, 1000 );
var cButton = select ( '#clear_button' );
// cButton.mousePressed ( clearParticles() );
cButton.mousePressed ( clearParticles );
}
function draw () {
background ( 220 );
//for(var i=0; i < myParticles; i++){
for ( var i = 0; i < myParticles.length; i++ ){
//myParticles[i].move;
myParticles[i].move();
myParticles[i].render()
}
}
function clearParticles () { myParticles = []; }
function mouseDragged () {
var tempParticle = new Particle ( mouseX, mouseY );
myParticles.push ( tempParticle );
}
class Particle {
constructor ( mX, mY ) {
// this.x = x;
this.x = mX;
// this.y = y;
this.y = mY;
this.speedX = random ( -3, 3 );
this.speedY = random ( -3, 3 );
this.col = color ( random ( 255 ), random ( 255 ), random ( 255 ) );
this.diameter = random ( 3, 15 );
}
move () {
this.x += this.speedX;
this.y += this.speedY;
// if the particles approaches the 'wall' change direction
if ( this.x > width || this.x < 0 ) this.speedX *= -1;
if ( this.y > height || this.y < 0 ) this.speedY *= -1;
}
render () {
noStroke ();
fill ( this.col );
// ellipse (x, y, this.diameter, this.diameter );
ellipse ( this.x, this.y, this.diameter, this.diameter );
}
}
|
53a83a6aac8306184dc597e63184f30511347776
|
[
"JavaScript"
] | 1
|
JavaScript
|
marija2/acc-hw2
|
1d46ed478207613f43a7ae181340c36576dfd0a0
|
a47702c4435afad1ca995057d67ab721a825c562
|
refs/heads/master
|
<file_sep><?php
$locations = array(
'Charbagh' => 0,
'Indira Nagar'=>10,
'BBD'=> 30,
'Barabanki'=>60,
'Basti'=>150,
'Gorakhpur'=>210,
'Faizabad'=>100
);
$pickup = $_REQUEST['pickup'];
$drop = $_REQUEST['drop'];
// if ($pickup==$drop) {
// echo '<script>alert("same location not allowed")</script>';
// return false;
$cabtype = $_REQUEST['cabtype'];
$luggage = $_REQUEST['luggage'];
$fare = 0;
$dis = abs(($locations[$pickup]) - ($locations[$drop]));
$originaldis=$dis;
switch ($cabtype) {
case 'CedMicro':
if ($dis<=10) {
$fare =$dis*13.5;
$fare+=50;
break; # code...
}
elseif ($dis>10&&$dis<=60) {
$dis=$dis-10;
$fare=135+($dis*12);
$fare+=50;
break;
}
elseif ($dis>=60&&$dis<=160) {
$dis=$dis-60;
$fare=735+($dis*10.20);
$fare+=50;
break;
}
else
{
$dis=$dis-160;
$fare=1755+($dis*8.50);
$fare+=50;
break;
}
case 'CedMini':
if($dis<=10) {
$fare = $dis*14.5;
$fare+=150;
break; # code...
}
elseif ($dis>10&&$dis<=60) {
$dis=$dis-10;
$fare=145+($dis*13);
$fare+=150;
break;
}
elseif ($dis>=60&&$dis<=160) {
$dis=$dis-60;
$fare=795+($dis*11.20);
$fare+=150;
break;
}
else
{
$dis=$dis-160;
$fare=1915+($dis*9.50);
$fare+=150;
break;
}
case 'CedRoyal':
if($dis<=10) {
$fare =$dis*15.5;
$fare+=200;
break; # code...
}
elseif($dis>10&&$dis<=60) {
$dis=$dis-10;
$fare=155+($dis*14);
$fare+=200;
break;
}
elseif ($dis>=60&&$dis<=160) {
$dis=$dis-60;
$fare=855+($dis*12.20);
$fare+=200;
break;
}
else
{
$dis=$dis-160;
$fare=2075+($dis*10.50);
$fare+=200;
break;
}
case 'CedSUV':
if($dis<=10) {
$fare =$dis*16.5;
$fare+=250;
break; # code...
}
elseif ($dis>10&&$dis<=60) {
$dis=$dis-10;
$fare=165+($dis*15);
$fare+=250;
break;
}
elseif ($dis>=60&&$dis<=160) {
$dis=$dis-60;
$fare=915+($dis*13.20);
$fare+=250;
break;
}
else
{
$dis=$dis-160;
$fare=2235+($dis*11.50);
$fare+=250;
break;
}
# code...
break;
default:
# code...
break;
}
if($luggage<=10 && $luggage!=0)
{
if ($cabtype == 'CedSUV') {
$fare+=100;
}
else
{
$fare+=50;
}
}
elseif ($luggage>10 && $luggage<=20) {
if ($cabtype== 'CedSUV') {
$fare+=200;
}
else
{
$fare+=100;
}
}
elseif ($luggage>20) {
if ($cabtype== 'CedSUV') {
$fare+=400;
}
else
{
$fare+=200;
}
}
else
{
$fare+=0;
}
echo '<p> Your Total Fare : <strong>Rs. '.$fare.'</strong></p>';
echo '<p> Your Total Distance : <strong> '.$originaldis.' km</strong></p>';
echo '<p> Your CAB Type : <strong> '.$cabtype.'</strong></p>';
?>
|
704ccab30bbc11b176f0cad2f33f239d262b9938
|
[
"PHP"
] | 1
|
PHP
|
skyt864/cedcab
|
100dc0fb4e84436ea2eaa6cd30e1106c80e533d6
|
d034c63747442e820005159ba01c645a9d5456cb
|
refs/heads/master
|
<repo_name>eng-somaia/fewpjs-oo-static-methods-lab-re-coded_sanaa_web001<file_sep>/index.js
class Formatter {
static capitalize (string) {
return string.charAt(0).toUpperCase() + string.slice(1);
}
static sanitize (string) {
return string.replace(/[^A-Za-z0-9-' ]+/g, '');
}
static titleize (string) {
let words = string.split(' ')
let wordsCap = words.map ((word, index) => {
if (word !=='the' && word !=='a' && word !=='an' &&word !=='but' && word !=='of' && word !=='and'&& word !=='for' && word !=='at' && word !=='by' && word !=='from')
return this.capitalize(word)
else if (index === 0)
return this.capitalize(word)
else
return word;
});
return wordsCap.join(" ");
}
}
|
04dd6406e193f97bf7cf660f5a7f4cab94a79560
|
[
"JavaScript"
] | 1
|
JavaScript
|
eng-somaia/fewpjs-oo-static-methods-lab-re-coded_sanaa_web001
|
dd2d8ec3f93fdd270a6fb24d1a6be138e01579bb
|
042dda84a9d036bc668b9cd789002e71107ed36e
|
refs/heads/master
|
<repo_name>tolislagers/ProgrammingAssignment2<file_sep>/cachematrix.R
## makeCacheMatrix stores a matrix in cache
## ---------------------------------------------------------------------------
## Testing makeCacheMatrix and it's operation
## a <- matrix(1:4, 2, 2) -> this will be our testing square matrix
## Run the makeCacheMatrix function
## cm <- makeCacheMatrix(a) -> to make all functions available via cm$
## ia <- solve(a) -> to create an inverted matrix of a
## cm$getsolve() -> see that nothing is stored yet
## cm$setsolve(ia) -> store the inverted matrix
## cm$getsolve() -> check if the inverted matrix was well stored
makeCacheMatrix <- function(x = matrix()) {
s <- NULL
set <- function(y) {
x <<- y
s <<- NULL
}
get <- function() x
setsolve <- function(solve) s <<- solve
getsolve <- function() s
list(set = set, get = get,
setsolve = setsolve,
getsolve = getsolve)
}
## cacheSolve displays the inverted/solved version of the matrix
## stored in makeCacheMatrix
## ---------------------------------------------------------------------------
## cacheSolve can be tested by running the function and then
## cacheSolve(cm)
## It will return the solved value of a
## try
## cm2 <- makeCacheMatrix(a)
## followed by
## cacheSolve(cm2)
## to check for non-cached behaviour
cacheSolve <- function(x, ...) {
s <- x$getsolve()
if(!is.null(s)) {
message("getting cached data")
return(s)
}
data <- x$get()
s <- solve(data, ...)
x$setsolve(s)
s
}
|
9cf7ce3a147a14496812773ec8e4e98483cc5abf
|
[
"R"
] | 1
|
R
|
tolislagers/ProgrammingAssignment2
|
4e0b02da4c9e7e7c193dfd355e218f62b8c5622a
|
3a29b046681a061f4590e126df4ab54f65ee665d
|
refs/heads/main
|
<file_sep>const ceaser = (text, key) => {
const newArray = text.split('').map((x) => x.codePointAt(0));
const encrypted = newArray.map((x) => {
//encrypt the original array
if (x >= 97 && x <= 122) {
// for small letters
if (x + key > 122) {
return 96 + x + key - 122;
} else return x + key;
} else if (x >= 65 && x <= 90) {
// for capital letters
if (x + key > 90) {
return 64 + x + key - 90;
} else return x + key;
} else return x; // for everything else
});
//turn new code into letters
const encryptedText = encrypted.map((x) => String.fromCharCode(x)).join('');
return encryptedText;
};
module.exports = ceaser;
<file_sep>const reverse = require('./reverse');
xtest('reverse vasil to lisav ', () => {
expect(reverse('vasil')).toBe('lisav');
});
<file_sep>const calculator = require('./calculator');
xtest('add 1 plus 2 to equal 3 ', () => {
expect(calculator.add(1, 2)).toBe(3);
});
xtest('substract 5 minus 3 to equal 2 ', () => {
expect(calculator.substract(5, 3)).toBe(2);
});
xtest('divide 5 with 5 to equal 1 ', () => {
expect(calculator.divide(5, 5)).toBe(1);
});
xtest('multiply 5 with 5 to equal 25 ', () => {
expect(calculator.multiply(5, 5)).toBe(25);
});
|
82a93c5dc5c11e2148c74b3d525a647ab5a2f275
|
[
"JavaScript"
] | 3
|
JavaScript
|
vbit27/testing
|
7287aa0b986bb421d2008c805c327f05db661b7c
|
2afa6c44a143333e1fb62495c1faa2a8e19bffed
|
refs/heads/master
|
<file_sep>from data.raw_data import *
invalid_month = [197001, 197002,201412, 201501, 201502, 201607]
gov_vehicle_alarm_monthly = gov_vehicle_alarm_monthly[gov_vehicle_alarm_monthly['StatMonth'].map(lambda x : x not in invalid_month)]
# 超速驾驶
overspeed_columns = ['Overspeed2_5', 'Overspeed2_510', 'Overspeed2_10',
'Overspeed25_510', 'Overspeed25_10', 'Overspeed25_5', 'Overspeed5_5',
'Overspeed5_510', 'Overspeed5_10', 'NightOverspeed2_5',
'NightOverspeed2_510', 'NightOverspeed2_10', 'NightOverspeed25_5',
'NightOverspeed25_510', 'NightOverspeed25_10', 'NightOverspeed5_5',
'NightOverspeed5_510', 'NightOverspeed5_10']
gov_vehicle_alarm_monthly['overspeed_all'] = 0
for column in overspeed_columns:
gov_vehicle_alarm_monthly['overspeed_all'] = gov_vehicle_alarm_monthly['overspeed_all'] + gov_vehicle_alarm_monthly[column]
need_columns = ['VehicleID','StatMonth','Exigency','overspeed_all','FatigueDrive','VehicleType','ZoneID','CompanyID']
gov_vehicle_alarm_monthly = gov_vehicle_alarm_monthly[need_columns]
# gov_vehicle_alarm_monthly['month'] = gov_vehicle_alarm_monthly['StatMonth'].apply(lambda m : str(m)[-2:])
# print(len(gov_vehicle_alarm_monthly['VehicleType'].unique()))
# print(len(gov_vehicle_alarm_monthly['ZoneID'].unique()))
# print(len(gov_vehicle_alarm_monthly['CompanyID'].unique()))
# print(len(gov_vehicle_alarm_monthly['month'].unique()))
# gov_vehicle_alarm_monthly = pd.get_dummies(gov_vehicle_alarm_monthly,prefix='vehicle_type', columns=['VehicleType'])
# gov_vehicle_alarm_monthly = pd.get_dummies(gov_vehicle_alarm_monthly, prefix='zone_id', columns=['ZoneID'])
# gov_vehicle_alarm_monthly = pd.get_dummies(gov_vehicle_alarm_monthly,prefix='company_id', columns=['CompanyID'])
# gov_vehicle_alarm_monthly = pd.get_dummies(gov_vehicle_alarm_monthly,prefix='month', columns=['month'])
print(gov_vehicle_alarm_monthly.columns)
gov_vehicle_alarm_monthly.sort_values(['VehicleID','StatMonth'], axis=0).to_csv(data_dir + 'gov_vehicle_alarm_monthly_need_data.csv', index=False)
# 从201606开始往前统计,数量少于8的用户被删除掉
months = [201503, 201504, 201505, 201506, 201507, 201508, 201509, 201510, 201511, 201512, 201601, 201602,
201603, 201604, 201605, 201606]
gov_vehicle_alarm_monthly_need_data_normal = open(data_dir + 'gov_vehicle_alarm_monthly_need_data_normal_only_alarm.csv', mode='w')
def deal_one_user_records(single_user_records):
valid_single_user_recoreds = []
for i in range(-1, -1 * len(single_user_records), -1):
if str(months[i]) == single_user_records[i][1]:
valid_single_user_recoreds.append(','.join(single_user_records[i]))
else:
break
if len(valid_single_user_recoreds) >= 8:
valid_single_user_recoreds.reverse()
gov_vehicle_alarm_monthly_need_data_normal.write(';'.join(valid_single_user_recoreds) + '\n')
with open(data_dir + 'gov_vehicle_alarm_monthly_need_data.csv') as gov_vehicle_alarm_monthly_need_data_file:
single_user_records = []
current_user = ''
gov_vehicle_alarm_monthly_need_data_file.readline()
lines = gov_vehicle_alarm_monthly_need_data_file.readlines()
count = 0
for line in lines:
count += 1
parts = line.strip().split(',')
if current_user == '':
current_user = parts[0]
single_user_records.append(parts)
else:
if current_user == parts[0]:
single_user_records.append(parts)
if count == len(lines):
deal_one_user_records(single_user_records)
else:
deal_one_user_records(single_user_records)
current_user = parts[0]
single_user_records = [parts]
gov_vehicle_alarm_monthly_need_data_normal.close()
<file_sep>import pandas as pd
import os
import sys
data_dir = r'D:\document\program\ml\machine-learning-databases\wangqing\\'
origional_data_dir = r'G:\partofdata\\'
def load_data(filename, nrows):
if not os.path.exists(data_dir + filename):
pd.read_csv(origional_data_dir + filename, nrows=nrows).to_csv(data_dir + filename, index=False)
return pd.read_csv(data_dir + filename)
gov_vehicle_alarm_monthly = load_data('GOVVehicleAlarmMonthly.csv', sys.maxsize)
# pd.read_csv(origional_data_dir + 'GOV6BanVehicleDaily.csv', nrows=10)
#
# pd.read_csv(origional_data_dir + 'GOVExigencyDetail.csv', nrows=10)
#
# pd.read_csv(origional_data_dir + 'GOVFatigueDriveDetail.csv', nrows=10)
#
# pd.read_csv(origional_data_dir + 'GOVOverspeedDetail.csv', nrows=10)
<file_sep>import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from preprocess.gov_vehicle_alarm_monthly_preprocess_overspeed_for_lstm import overspeed_data
from data.raw_data import data_dir
save_mode_dir = 'model/overspeed/'
predict_file_path = data_dir + 'overspeed_data_predict.csv'
train = False
#生成训练集
#设置常量
time_step=6 #时间步
rnn_unit=100 #hidden layer units
batch_size=60 #每一批次训练多少个样例
input_size=1 #输入层维度
output_size=1 #输出层维度
lr=0.0001 #学习率
epoch = 10
# 导入数据
data_train = []
valid_data = []
for one_sample in overspeed_data:
# 留后两个月预测
input_size = len(one_sample[0])
data_train.append(one_sample[:-2])
valid_data.append(one_sample[-2:])
train_x,train_y=[],[] #训练集
for one_sample in data_train:
for i in range(len(one_sample) - time_step - 1):
x = one_sample[i:i + time_step]
y = [[month[0]] for month in one_sample[i + 1:i + time_step + 1]]
train_x.append(x)
train_y.append(y)
train_x_array = np.array(train_x)
mean = np.mean(train_x, axis=0)
std = np.std(train_x, axis=0)
train_x = np.nan_to_num(((train_x_array - mean) / std)).tolist()
#——————————————————定义神经网络变量——————————————————
X=tf.placeholder(tf.float32, [None,time_step,input_size]) #每批次输入网络的tensor
Y=tf.placeholder(tf.float32, [None,time_step,output_size]) #每批次tensor对应的标签
#输入层、输出层权重、偏置
weights={
'in':tf.Variable(tf.random_normal([input_size,rnn_unit])),
'out':tf.Variable(tf.random_normal([rnn_unit,1]))
}
biases={
'in':tf.Variable(tf.constant(0.1,shape=[rnn_unit,])),
'out':tf.Variable(tf.constant(0.1,shape=[1,]))
}
#——————————————————定义神经网络变量——————————————————
def lstm(batch): #参数:输入网络批次数目
w_in=weights['in']
b_in=biases['in']
input=tf.reshape(X,[-1,input_size]) #需要将tensor转成2维进行计算,计算后的结果作为隐藏层的输入
input_rnn=tf.matmul(input,w_in)+b_in
input_rnn=tf.reshape(input_rnn,[-1,time_step,rnn_unit]) #将tensor转成3维,作为lstm cell的输入
# cell=tf.nn.rnn_cell.BasicLSTMCell(rnn_unit)
cell = tf.nn.rnn_cell.GRUCell(rnn_unit)
init_state=cell.zero_state(batch,dtype=tf.float32)
output_rnn,final_states=tf.nn.dynamic_rnn(cell, input_rnn,initial_state=init_state, dtype=tf.float32) #output_rnn是记录lstm每个输出节点的结果,final_states是最后一个cell的结果
output=tf.reshape(output_rnn,[-1,rnn_unit]) #作为输出层的输入
w_out=weights['out']
b_out=biases['out']
pred=tf.matmul(output,w_out)+b_out
return pred,final_states
# 训练
def train_lstm():
global batch_size
pred,_=lstm(batch_size)
#损失函数
loss=tf.reduce_mean(tf.square(tf.reshape(pred,[-1])-tf.reshape(Y, [-1])))
train_op=tf.train.AdamOptimizer(lr).minimize(loss)
saver=tf.train.Saver(tf.global_variables())
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
# epoch
for i in range(epoch):
step=0
start=0
end=start+batch_size
while(end<len(train_x)):
_,loss_=sess.run([train_op,loss],feed_dict={X:train_x[start:end],Y:train_y[start:end]})
start+=batch_size
end=start+batch_size
#每10步保存一次参数
if step%10==0:
print(i,step,loss_)
print("保存模型:",saver.save(sess, save_mode_dir + 'lstm.model'))
step+=1
# 预测
def prediction():
pred,_=lstm(1) #预测时只输入[1,time_step,input_size]的测试数据
saver=tf.train.Saver(tf.global_variables())
with tf.Session() as sess:
#参数恢复
module_file = tf.train.latest_checkpoint(save_mode_dir)
saver.restore(sess, module_file)
predicts = []
sample_index = 0
for sample in data_train:
prev_seq = np.array(sample)
prev_seq = prev_seq[-1 * time_step:]
predict = []
# 得到之后2个预测结果
for i in range(3):
next_seq = sess.run(pred, feed_dict={X: [prev_seq]})
predict += next_seq[-1].tolist()
# 每次得到最后一个时间步的预测结果,与之前的数据加在一起,形成新的测试样本
if i < len(valid_data[0]):
prev_seq = np.vstack((prev_seq[1:], valid_data[sample_index][i]))
predict = [str(num) for num in predict]
predicts.append(predict)
sample_index += 1
with open(predict_file_path, mode='w', encoding='utf-8') as predict_file:
for predict in predicts:
predict_file.write(' '.join(predict) + '\n')
if train:
train_lstm()
else:
prediction()<file_sep>from data.raw_data import data_dir
columns = ['VehicleID','StatMonth','Exigency','overspeed_all','FatigueDrive','VehicleType','ZoneID','CompanyID']
fatigue_drive_data = []
with open(data_dir + 'gov_vehicle_alarm_monthly_need_data_normal.csv') as data_normal_file:
for line in data_normal_file:
one_sample = [[element for element in month.split(',')] for month in line.split(';')]
one_sample = [[month[4]] + month[5:] for month in one_sample]
fatigue_drive_data.append([[float(feature) for feature in month] for month in one_sample])
<file_sep>from data.raw_data import data_dir
columns = ['VehicleID','StatMonth','Exigency','overspeed_all','FatigueDrive','VehicleType','ZoneID','CompanyID']
test = False
test_sample_num = 500
exigency_data = []
sample_count = 0
with open(data_dir + 'gov_vehicle_alarm_monthly_need_data_normal.csv') as data_normal_file:
for line in data_normal_file:
if test and sample_count >= test_sample_num:
break
one_sample = [[element for element in month.split(',')] for month in line.split(';')]
one_sample = [[month[2]] + month[5:] for month in one_sample]
exigency_data.append([[float(feature) for feature in month] for month in one_sample])
sample_count += 1
|
a551d4428ab0e63b07bba3e581b97f37f4607d6e
|
[
"Python"
] | 5
|
Python
|
l294265421/wang_qing
|
cfd2d9b6c62dc948df80c291d938db826f21b5de
|
9ef8863d11a81b2cee4ccbdf404adb24a5dc4e53
|
refs/heads/master
|
<file_sep><?php
/**
*
* BMG
* © GroSoft, 2016
*
* Classe technique
* Fournit des méthodes statiques permettant l'affichage de parties génériques
*
* @package default
* @author dk
* @version 1.0
*/
class AdminRender {
/**
* Constantes
*/
// icones et messages d'erreur
const MSG_SUCCESS = 'b-alert-success f-alert-success';
const MSG_WARNING = 'b-alert-warning f-alert-warning';
const MSG_INFO = 'b-alert-info f-alert-info';
const MSG_ERROR = 'b-alert-error f-alert-error';
const MSG_QUESTION = 'b-alert-question f-alert-question';
const ICON_SUCCESS = 'fa fa-check-circle-o';
const ICON_WARNING = 'fa fa-exclamation-circle';
const ICON_INFO = 'fa fa-info-circle';
const ICON_QUESTION = 'fa fa-question-circle';
const ICON_ERROR = 'fa fa-exclamation-triangle';
/*
* composant d'affichage d'un message d'erreur
* @param $message : le message à afficher
* @param $boxStyle : style de massage, voir constantes MSG_
* @param $inconStyle : icone, voir contrantes ICON_
*/
static function showMessage($message,$boxStyle,$iconStyle) {
$component = '';
$component .= '<div class="';
$component .= $boxStyle.'">';
$component .= '<div class="b-remaining">';
$component .= '<i class="';
$component .= $iconStyle;
$component .= '"></i> ';
$component .= $message;
$component .= '</div></div>';
return $component;
}
public static function showNotifications() {
if (Application::nbNotifications() > 0) {
foreach ($_SESSION['notifications'] as $notification) {
switch($notification->getType()) {
case INFO : {
$typeMsg = self::MSG_INFO;
$icon = self::ICON_INFO;
} break;
case ERROR : {
$typeMsg = self::MSG_ERROR;
$icon = self::ICON_ERROR;
} break;
case SUCCESS : {
$typeMsg = self::MSG_SUCCESS;
$icon = self::ICON_SUCCESS;
} break;
case WARNING : {
$typeMsg = self::MSG_WARNING;
$icon = self::ICON_WARNING;
} break;
}
echo self::showMessage($notification->getMsg(),$typeMsg,$icon);
}
Application::resetNotifications();
}
}
/**
* Affiche une liste de choix à partir d'un jeu de résultat
* de la forme {identifiant, libellé}
* @param string $tab : un tableau de deux colonnes
* @param string $classe : la classe CSS à appliquer à l'élément
* @param string $id : l'id (et nom) de la liste de choix
* @param int $size : l'attribut size de la liste de choix
* @param string $idSelect : l'élément à présélectionner dans la liste
* @param string $onchange : le nom de la fonction à appeler
* en cas d'événement onchange()
*/
public static function displayList ($tab, $classe, $id, $size, $idSelect, $onchange) {
// affichage de la liste de choix
echo '<select class="'.$classe.'" id="'.$id.'" name="'.$id.'" id="'.$id.'" size="'
.$size.'" onchange="'.$onchange . '">';
if (count($tab) && (empty($idSelect))) {
$idSelect = $tab[0][0];
}
foreach ($tab as $ligne) {
// l'élément en paramètre est présélectionné
if ($ligne[0] != $idSelect) {
echo '<option value="'.$ligne[0].'">'.$ligne[1].'</option>';
}
else {
echo '<option selected value="'.$ligne[0].'">'.$ligne[1].'</option>';
}
}
echo '</select>';
return $idSelect;
}
/**
* Retourne une balise img
* @param string $dir : le nom du dossier
* @param int $id : le numéro de la photo
* @param string $class : le nom d'une classe CSS
* @param int $maxWidth : largeur max de l'image
* @param int $maxHeight : hauteur max de l'image
* @return string
*/
public static function getImage($dir, $id, $class) {
$img = '<img class="'.$class.'" src="';
$imgName = $dir.$id.'.jpg';
if (file_exists($imgName)) {
$img .= $imgName.'" alt=""';
}
else {
$img .= NOT_FOUND_IMG.'" alt="Image indisponible"';
}
$img .= ' />';
return $img;
}
}<file_sep><header id="main-header">
<hr />
<div>
<span class="titre-entete">
Bibliothèque municipale de Groville
</span>
</div>
<hr />
<div id="infos-util">
<?php echo 'Connecté : '
.$_SESSION['prenom']." "
.$_SESSION['nom'];
?>
</div>
</header><file_sep><?php
/**
* Page d'accueil de l'application CAG
* Page d'accueil
* @author
* @package default
*/
?>
<div>
<div id="titre-accueil">
<span class="gro-titre">
BMG<br />
Intranet
</span>
<span class="erreur">Accès réservé</span>
</div>
<div id="logo_accueil">
<img src="img/logo_accueil.jpg" width="250" height="250" alt="" />
</div>
</div>
<file_sep><?php
/**
* Contrôleur secondaire chargé de la gestion des ouvrages
* @author dk
* @package default (mission 4)
*/
// récupération de l'action à effectuer
if (isset($_GET["action"])) {
$action = $_GET["action"];
}
else {
$action = 'listerOuvrages';
}
// variables pour la gestion des messages
$titrePage = 'Gestion des ouvrages';
// variables pour la gestion des erreurs
$tabErreurs = array();
$hasErrors = false;
// ouvrir une connexion
$cnx = connectDB();
// charger la vue en fonction du choix de l'utilisateur
switch ($action) {
case 'consulterOuvrage' : {
if (isset($_GET["id"])) {
$intID = intval(htmlentities($_GET["id"]));
// récupération des valeurs dans la base
$strSQL = "SELECT no_ouvrage as ID, "
."titre, "
."acquisition, "
."lib_genre, "
."salle, "
."rayon, "
."dernier_pret, "
."disponibilite, "
."auteur "
."FROM v_ouvrages "
."WHERE no_ouvrage = ".$intID;
try {
$lOuvrage = getRows($cnx, $strSQL, array($intID));
if ($lOuvrage) {
$strTitre = $lOuvrage[0][1];
$strAcquisition = $lOuvrage[0][2];
$strGenre = $lOuvrage[0][3];
$strSalle = $lOuvrage[0][4];
$strRayon = $lOuvrage[0][5];
$strDernierPret = $lOuvrage[0][6];
$strDispo = $lOuvrage[0][7];
$strAuteur = $lOuvrage[0][8];
}
else {
$tabErreurs["Erreur"] = "Cet ouvrage n'existe pas !";
$tabErreurs["ID"] = $intID;
$hasErrors = true;
}
}
catch (PDOException $e) {
$tabErreurs["Erreur"] = $e->getMessage();
$hasErrors = true;
}
}
else {
// pas d'id dans l'url ni clic sur Valider : c'est anormal
$tabErreurs["Erreur"] =
"Aucun ouvrage n'a été transmis pour consultation !";
$hasErrors = true;
}
if ($hasErrors) {
$msg = "Une erreur s'est produite :";
include 'vues/v_afficherErreurs.php';
}
else {
include 'vues/v_consulterOuvrage.php';
}
} break;
case 'ajouterOuvrage' : {
// initialisation des variables
$strTitre = '';
$intSalle = 1;
$strRayon = '';
$strGenre = '';
$strDate = '';
// traitement de l'option : saisie ou validation ?
if (isset($_GET["option"])) {
$option = htmlentities($_GET["option"]);
}
else {
$option = 'saisirOuvrage';
}
switch($option) {
case 'saisirOuvrage' : {
$strSQL = "SELECT code_genre, lib_genre FROM genre";
$lesGenres = getRows($cnx, $strSQL, array());
include 'vues/v_ajouterOuvrage.php';
} break;
case 'validerOuvrage' : {
// tests de gestion du formulaire
if (isset($_POST["cmdValider"])) {
// récupération des valeurs saisies
if (!empty($_POST["txtTitre"])) {
$strTitre = ucfirst($_POST["txtTitre"]);
}
$intSalle = $_POST["rbnSalle"];
if (!empty($_POST["txtRayon"])) {
$strRayon = ucfirst($_POST["txtRayon"]);
}
$strGenre = $_POST["cbxGenres"];
if (!empty($_POST["txtRayon"])) {
$strDate = $_POST["txtDate"];
}
// test zones obligatoires
if (!empty($strTitre) and
!empty($strRayon) and
!empty($strDate)) {
// tests de cohérence
// test de la date d'acquisition
$dateAcquisition = new DateTime($strDate);
$curDate = new DateTime(date('Y-m-d'));
if ($dateAcquisition > $curDate) {
// la date d'acquisition est postérieure à la date du jour
$tabErreurs["Erreur date"] = "La date d'acquisition doit être antérieure ou égale à la date du jour";
$tabErreurs["Date"] = $strDate;
$hasErrors = true;
}
// contrôle du rayon
if (!rayonValide($strRayon)) {
$tabErreurs["Erreur rayon"] = "Le rayon n'est pas valide, il doit comporter une lettre et un chiffre !";
$tabErreurs["Rayon"] = $strRayon;
$hasErrors = true;
}
}
else {
if (empty($strTitre)) {
$tabErreurs["Titre"] = "Le titre doit être renseigné !";
}
if (empty($strRayon)) {
$tabErreurs["Rayon"] = "Le rayon doit être renseigné !";
}
if (empty($strDate)) {
$tabErreurs["Acqisition"] = "La date d'acquisition doit être renseignée !";
}
$hasErrors = true;
}
if (!$hasErrors) {
// ajout dans la base de données
$strSQL = "INSERT INTO ouvrage (titre, salle, rayon, code_genre, date_acquisition) "
. "VALUES (?,?,?,?,?)";
try {
$res = execSQL(
$cnx, $strSQL, array(
$strTitre,
$intSalle,
$strRayon,
$strGenre,
$strDate
)
);
if ($res) {
$msg = '<span class="info">L\'ouvrage '
.$strTitre.' a été ajouté</span>';
// récupération du numéro (auto-incrément)
$strSQL = "SELECT MAX(no_ouvrage) FROM ouvrage";
$intID = getValue($cnx, $strSQL, array());
// récupération des valeurs dans la base
$strSQL = "SELECT no_ouvrage as ID, "
."titre, "
."acquisition, "
."lib_genre, "
."salle, "
."rayon, "
."dernier_pret, "
."disponibilite, "
."auteur "
."FROM v_ouvrages "
."WHERE no_ouvrage = ".$intID;
$lOuvrage = getRows($cnx, $strSQL, array($intID));
if ($lOuvrage) {
$strTitre = $lOuvrage[0][1];
$strAcquisition = $lOuvrage[0][2];
$strGenre = $lOuvrage[0][3];
$strSalle = $lOuvrage[0][4];
$strRayon = $lOuvrage[0][5];
$strDernierPret = $lOuvrage[0][6];
$strDispo = $lOuvrage[0][7];
$strAuteur = $lOuvrage[0][8];
}
else {
$msg = "Cet ouvrage n'existe pas !";
}
include 'vues/v_consulterOuvrage.php';
}
else {
$tabErreurs["Erreur"] = "Une erreur s'est produite
dans l'opération d'ajout !";
$tabErreurs["Titre"] = $strTitre;
$tabErreurs["Salle"] = $intSalle;
$tabErreurs["Rayon"] = $strRayon;
$tabErreurs["Genre"] = $strGenre;
$tabErreurs["Acquisition"] = $strDate;
$hasErrors = true;
}
}
catch (PDOException $e) {
$tabErreurs["Erreur"] =
"Une exception PDO a été levée !";
$hasErrors = true;
}
}
else {
$msg = "L'opération d'ajout n'a pas pu être menée
à terme en raison des erreurs suivantes :";
$lien = '<a href="index.php?uc=gererOuvrages&action=ajouterOuvrage">Retour à la saisie</a>';
include 'vues/v_afficherErreurs.php';
}
}
} break;
}
} break;
case 'modifierOuvrage' : {
// initialisation des variables
$strNom = '';
$strPrenom = '';
$strAlias = '';
$strNotes = '';
// traitement de l'option : saisie ou validation ?
if (isset($_GET["option"])) {
$option = htmlentities($_GET["option"]);
}
else {
$option = 'saisirOuvrage';
}
switch($option) {
case 'saisirOuvrage' : {
// récupération du code
if (isset($_GET["id"])) {
$intID = intval(htmlentities($_GET["id"]));
// récupération des données dans la base
$strSQL = "SELECT titre, salle, rayon, code_genre, date_acquisition "
."FROM ouvrage "
."WHERE no_ouvrage = ".$intID;
$lOuvrage = getRows($cnx, $strSQL, array($intID));
if (count($lOuvrage) == 1) {
$strTitre = $lOuvrage[0][0];
$intSalle = $lOuvrage[0][1];
$strRayon = $lOuvrage[0][2];
$strGenre = $lOuvrage[0][3];
$strDate = $lOuvrage[0][4]; }
else {
$tabErreurs["Erreur"] = "Cet ouvrage n'existe pas !";
$tabErreurs["ID"] = $intID;
$hasErrors = true;
}
}
include 'vues/v_modifierOuvrage.php';
} break;
case 'validerOuvrage' : {
// si on a cliqué sur Valider
if (isset($_POST["cmdValider"])) {
// mémoriser les données pour les réafficher dans le formulaire
$intID = intval($_POST["txtID"]);
// récupération des valeurs saisies
if (!empty($_POST["txtTitre"])) {
$strTitre = ucfirst($_POST["txtTitre"]);
}
$intSalle = $_POST["rbnSalle"];
if (!empty($_POST["txtRayon"])) {
$strRayon = ucfirst($_POST["txtRayon"]);
}
$strGenre = $_POST["cbxGenres"];
if (!empty($_POST["txtRayon"])) {
$strDate = $_POST["txtDate"];
}
// test zones obligatoires
if (!empty($strTitre) and
!empty($strRayon) and
!empty($strDate)) {
// tests de cohérence
// test de la date d'acquisition
$dateAcquisition = new DateTime($strDate);
$curDate = new DateTime(date('Y-m-d'));
if ($dateAcquisition > $curDate) {
// la date d'acquisition est postérieure à la date du jour
$tabErreurs["Erreur date"] = 'La date d\'acquisition doit être antérieure ou égale à la date du jour';
$tabErreurs["Date"] = $strDate;
$hasErrors = true;
}
// contrôle du rayon
if (!rayonValide($strRayon)) {
$tabErreurs["Erreur rayon"] = 'Le rayon n\'est pas valide, il doit comporter une lettre et un chiffre !';
$tabErreurs["Rayon"] = $strRayon;
$hasErrors = true;
}
}
else {
if (empty($strTitre)) {
$tabErreurs["Titre"] = "Le titre doit être renseigné !";
}
if (empty($strRayon)) {
$tabErreurs["Rayon"] = "Le rayon doit être renseigné !";
}
if (empty($strDate)) {
$tabErreurs["Acqisition"] = "La date d'acquisition doit être renseignée !";
}
$hasErrors = true;
}
if (!$hasErrors) {
// mise à jour dans la base de données
$strSQL = "UPDATE ouvrage SET titre = ?,"
."salle = ?,"
."rayon = ?,"
."code_genre = ?,"
."date_acquisition = ? "
."WHERE no_ouvrage = ?";
try {
$res = execSQL($cnx, $strSQL, array(
$strTitre,
$intSalle,
$strRayon,
$strGenre,
$strDate,
$intID
)
);
if ($res) {
$msg = '<span class="info">L\'ouvrage '
.$strTitre.' a été modifié</span>';
// récupération des valeurs dans la base
$strSQL = "SELECT no_ouvrage as ID, "
."titre, "
."acquisition, "
."lib_genre, "
."salle, "
."rayon, "
."dernier_pret, "
."disponibilite, "
."auteur "
."FROM v_ouvrages "
."WHERE no_ouvrage = ".$intID;
$lOuvrage = getRows($cnx, $strSQL, array($intID));
if ($lOuvrage) {
$strTitre = $lOuvrage[0][1];
$strAcquisition = $lOuvrage[0][2];
$strGenre = $lOuvrage[0][3];
$strSalle = $lOuvrage[0][4];
$strRayon = $lOuvrage[0][5];
$strDernierPret = $lOuvrage[0][6];
$strDispo = $lOuvrage[0][7];
$strAuteur = $lOuvrage[0][8];
}
else {
$msg = "Cet ouvrage n'existe pas !";
}
include 'vues/v_consulterOuvrage.php';
}
else {
$tabErreurs["Erreur"] = 'Une erreur s\'est produite lors de l\'opération de mise à jour !';
$tabErreurs["ID"] = $intID;
$tabErreurs["Titre"] = $strTitre;
$tabErreurs["Salle"] = $intSalle;
$tabErreurs["Rayon"] = $strRayon;
$tabErreurs["Genre"] = $strGenre;
$tabErreurs["Date"] = $strDate;
// en phase de test, on peut ajouter le SQL :
$tabErreurs["SQL"] = $strSQL;
$hasErrors = true;
}
}
catch (PDOException $e) {
$tabErreurs["Erreur"] = 'Une exception a été levée !';
$hasErrors = true;
}
}
}
else {
// pas d'id dans l'url ni clic sur Valider : c'est anormal
$tabErreurs["Erreur"] = "Aucun ouvrage n'a été transmis pour modification !";
$hasErrors = true;
}
}
}
// affichage des erreurs
if ($hasErrors) {
$msg = "Une erreur s'est produite :";
include 'vues/v_afficherErreurs.php';
include 'vues/v_modifierOuvrage.php';
}
} break;
case 'supprimerOuvrage' : {
// récupération de l'identifiant du ouvrage passé dans l'URL
if (isset($_GET["id"])) {
$intID = intval(htmlentities($_GET["id"]));
// récupération des données dans la base
$strSQL = "SELECT nom_ouvrage, prenom_ouvrage, alias "
."FROM ouvrage "
."WHERE id_ouvrage = ?";
$lOuvrage = getRows($cnx, $strSQL, array($intID));
if (count($lOuvrage) == 1) {
$strNom = $lOuvrage[0][0];
$strPrenom = $lOuvrage[0][1];
$strAlias = $lOuvrage[0][2];
}
else {
$tabErreurs["Erreur"] = "Cet ouvrage n'existe pas !";
$tabErreurs["Code"] = $intID;
$hasErrors = true;
}
if (!$hasErrors) {
// rechercher des prêts de cet ouvrage
$strSQL = "SELECT COUNT(*) "
."FROM pret "
."WHERE no_ouvrage = ?";
try {
$prets = getValue($cnx, $strSQL, array($intID));
if ($prets == 0) {
// c'est bon, on peut le supprimer
$strSQL = "DELETE FROM ouvrage WHERE no_ouvrage = ?";
try {
$res = execSQL($cnx, $strSQL, array($intID));
if ($res) {
$msg = '<span class="info">L\'ouvrage '
.$strNom.' a été supprimé';
include 'vues/v_afficherMessage.php';
} }
catch (PDOException $e) {
$tabErreurs["Erreur"] =
"Une exception PDO a été levée !";
$tabErreurs["Message"] = $e->getMessage();
$hasErrors = true;
}
}
else {
$tabErreurs["Erreur"] = "Cet ouvrage est référencé par des prêts, suppression impossible !";
$hasErrors = true;
}
}
catch (PDOException $e) {
$tabErreurs["Erreur"] = $e->getMessage();
}
}
}
// affichage des erreurs
if ($hasErrors) {
$msg = "Une erreur s'est produite :";
$lien = '<a href="index.php?uc=gererOuvrages&action=consulterOuvrage&id='
.$intID.'">Retour à la consultation</a>';
include 'vues/v_afficherErreurs.php';
}
} break;
case 'listerOuvrages' : {
// récupérer les ouvrages
$strSQL = "SELECT no_ouvrage as ID, "
."titre, "
."lib_genre, "
."auteur, "
."salle, "
."rayon, "
."dernier_pret, "
."disponibilite "
."FROM v_ouvrages "
."ORDER BY titre;";
$lesOuvrages = getRows($cnx, $strSQL, array());
// afficher le nombre de ouvrages
$nbOuvrages = count($lesOuvrages);
include 'vues/v_listeOuvrages.php';
} break;
}
// déconnexion
disconnectDB($cnx);
<file_sep><?php
/**
*
* BMG
* © GroSoft, 2016
*
* Business Logic Layer
*
* Utilise les services des classes de la bibliothèque Reference
* Utilise les services de la classe GenreDal
* Utilise les services de la classe Application
*
* @package default
* @author dk
* @version 1.0
*/
/*
* ====================================================================
* Classe Genres : fabrique d'objets Genre
* ====================================================================
*/
// sollicite les méthodes de la classe GenreDal
require_once ('./modele/Dal/GenreDal.class.php');
// sollicite les services de la classe Application
require_once ('./modele/App/Application.class.php');
// sollicite la référence
require_once ('./modele/Reference/Genre.class.php');
class Genres {
/**
* Méthodes publiques
*/
/**
* récupère les genres pour les ouvrages
* @param $mode : 0 == tableau assoc, 1 == tableau d'objets
* @return un tableau de type $mode
*/
public static function chargerLesGenres($mode) {
$tab = GenreDal::loadGenres(1);
if (Application::dataOK($tab)) {
if ($mode == 1) {
$res = array();
foreach ($tab as $ligne) {
$unGenre = new Genre(
$ligne->code_genre,
$ligne->lib_genre
);
array_push($res, $unGenre);
}
return $res;
}
else {
return $tab;
}
}
return NULL;
}
/**
* vérifie si un genre existe
* @param $code : le code du genre à contrôler
* @return un booléen
*/
public static function genreExiste($code) {
$values = GenreDal::loadGenreByID($code, 1);
if (Application::dataOK($values)) {
return 1;
}
return 0;
}
public static function ajouterGenre($valeurs) {
$id = GenreDal::addGenre(
$valeurs[0],
$valeurs[1]
);
return self::chargerGenreParID($id);
}
public static function modifierGenre($genre) {
return GenreDal::setGenre(
$genre->getCode(),
$genre->getLibelle()
);
}
public static function supprimerGenre($code) {
return GenreDal::delGenre($code);
}
/**
* récupère les caractéristiques d'un genre
* @param $id : le code du genre
* @return un objet de la classe Genre
*/
public static function chargerGenreParId($id) {
$values = GenreDal::loadGenreByID($id, 1);
if (Application::dataOK($values)) {
$libelle = $values[0]->lib_genre;
return new Genre ($id, $libelle);
}
return NULL;
}
/**
* récupère le nombre d'ouvrages pour un genre
* @param $code : le code du genre
* @return un entier
*/
public static function nbOuvragesParGenre($code) {
return GenreDal::countOuvragesGenre($code);
}
}
<file_sep><?php
/**
* Contrôleur secondaire chargé de la gestion des genres
* @author dk
* @package default (mission 4)
*/
// bibliothèques à utiliser
require_once ('modele/App/Application.class.php');
require_once ('modele/App/Notification.class.php');
require_once ('modele/Render/AdminRender.class.php');
require_once ('modele/Bll/Genres.class.php');
// récupération de l'action à effectuer
if (isset($_GET["action"])) {
$action = $_GET["action"];
}
else {
$action = 'listerGenres';
}
// si un id est passé en paramètre, créer un objet (pour consultation, modification ou suppression)
if (isset($_REQUEST["id"])) {
$id = $_REQUEST["id"];
$unGenre = Genres::chargerGenreParID($id);
}
// charger la vue en fonction du choix de l'utilisateur
switch ($action) {
case 'listerGenres' : {
// récupérer les genres
$lesGenres = Genres::chargerLesGenres(1);
// afficher le nombre de genres
$nbGenres = count($lesGenres);
include 'vues/v_listeGenres.php';
} break;
case 'consulterGenre' : {
if ($unGenre == NULL) {
Application::addNotification(new Notification("Ce genre n'existe pas !", ERROR));
}
else {
include 'vues/v_consulterGenre.php';
}
} break;
case 'ajouterGenre' : {
// initialisation des variables
$hasErrors = false;
$strCode = '';
$strLibelle = '';
// traitement de l'option : saisie ou validation ?
if (isset($_GET["option"])) {
$option = htmlentities($_GET["option"]);
}
else {
$option = 'saisirGenre';
}
switch($option) {
case 'saisirGenre' : {
include 'vues/v_ajouterGenre.php';
} break;
case 'validerGenre' : {
// tests de gestion du formulaire
if (isset($_POST["cmdValider"])) {
// test zones obligatoires
if (!empty($_POST["txtCode"]) and !empty($_POST["txtLibelle"])) {
// les zones obligatoires sont présentes
$strLibelle = ucfirst(htmlentities(
$_POST["txtLibelle"])
);
$strCode = strtoupper(htmlentities(
$_POST["txtCode"])
);
// tests de cohérence
// contrôle d'existence d'un genre avec le même code
if (Genres::genreExiste($strCode)) {
// signaler l'erreur
Application::addNotification(new Notification("Il existe déjà un genre avec ce code !", ERROR));
$hasErrors = true;
}
}
else {
// une ou plusieurs valeurs n'ont pas été saisies
if (empty($strCode)) {
Application::addNotification(new Notification("Le code doit être renseigné !", ERROR));
}
if (empty($strLibelle)) {
Application::addNotification(new Notification("Le libellé doit être renseigné !", ERROR));
}
$hasErrors = true;
}
if (!$hasErrors) {
// ajout dans la base de données
$unGenre = Genres::ajouterGenre(array($strCode,$strLibelle));
Application::addNotification(new Notification("Le genre a été ajouté !", SUCCESS));
include 'vues/v_consulterGenre.php';
}
else {
include 'vues/v_ajouterGenre.php';
}
}
} break;
}
} break;
case 'modifierGenre' : {
// initialisation des variables
$hasErrors = false;
$strLibelle = '';
// traitement de l'option : saisie ou validation ?
if (isset($_GET["option"])) {
$option = htmlentities($_GET["option"]);
}
else {
$option = 'saisirGenre';
}
switch($option) {
case 'saisirGenre' : {
// récupération du code
if (isset($_GET["id"])) {
include("vues/v_modifierGenre.php");
}
else {
Application::addNotification(new Notification("Le genre est inconnu !", ERROR));
include("vues/v_listeGenres.php");
}
} break;
case 'validerGenre' : {
// si on a cliqué sur Valider
if (isset($_POST["cmdValider"])) {
// mémoriser les valeurs pour les réafficher
// test zones obligatoires
if (!empty($_POST["txtLibelle"])) {
// les zones obligatoires sont présentes
$strLibelle = ucfirst(htmlentities($_POST["txtLibelle"]));
// tests de cohérence
}
else {
if (empty($strLibelle)) {
Application::addNotification(new Notification("Le libellé est obligatoire !", ERROR));
}
$hasErrors = true;
}
if (!$hasErrors) {
// mise à jour dans la base de données
$unGenre->setLibelle($strLibelle);
$res = Genres::modifierGenre($unGenre);
Application::addNotification(new Notification("Le genre a été modifié !", SUCCESS));
include 'vues/v_consulterGenre.php';
}
else {
include("vues/v_modifierGenre.php");
}
}
}
}
} break;
case 'supprimerGenre' : {
// rechercher des ouvrages de ce genre
if (Genres::nbOuvragesParGenre($unGenre->getCode()) > 0) {
// il y a des ouvrages référencés, suppression impossible
Application::addNotification(new Notification("Il existe des ouvrages qui référencent ce genre, suppression impossible !", ERROR));
include 'vues/v_consulterGenre.php';
}
else {
// supprimer le genre
Genres::supprimerGenre($unGenre->getCode());
Application::addNotification(new Notification("Le genre a été supprimé !", SUCCESS));
// afficher la liste
$lesGenres = Genres::chargerLesGenres(1);
$nbGenres = count($lesGenres);
include 'vues/v_listeGenres.php';
}
} break;
}
<file_sep><?php
/**
* BMG
* © GroSoft, 2015
*
* Data Access Layer
* Classe d'accès aux données
*
* Utilise les services de la classe PdoDao
*
* @package default
* @author dk
* @version 1.0
*/
// sollicite les services de la classe PdoDao
require_once ('PdoDao.class.php');
class GenreDal {
/**
* @param $style : 0 == tableau assoc, 1 == objet
* @return un objet de la classe PDOStatement
*/
public static function loadGenres($style) {
$cnx = new PdoDao();
$qry = 'SELECT * FROM genre';
$res = $cnx->getRows($qry,array(),$style);
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* charge un objet de la classe Genre à partir de son code
* @param $id : le code du genre
* @return un objet de la classe Genre
*/
public static function loadGenreByID($id) {
$cnx = new PdoDao();
$qry = 'SELECT * FROM genre WHERE code_genre = ?';
$res = $cnx->getRows($qry,array($id),1);
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* ajoute un genre
* @param string $code : le code du genre à ajouter
* @param string $libelle : le libellé du genre à ajouter
* @return object un objet de la classe Genre
*/
public static function addGenre(
$code,
$libelle
) {
$cnx = new PdoDao();
$qry = 'INSERT INTO genre VALUES (?,?)';
$res = $cnx->execSQL($qry,array(
$code,
$libelle
)
);
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $code;
}
/**
* modifie un genre
* @param int $code
* @param string $libelle
* @return un code erreur
*/
public static function setGenre(
$code,
$libelle
) {
$cnx = new PdoDao();
$qry = 'UPDATE genre SET lib_genre = ? WHERE code_genre = ?';
$res = $cnx->execSQL($qry,array(
$libelle,
$code
));
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* supprime un genre
* @param int $code : le code du genre à supprimer
*/
public static function delGenre($code) {
$cnx = new PdoDao();
$qry = 'DELETE FROM genre WHERE code_genre = ?';
$res = $cnx->execSQL($qry,array($code));
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* calcule le nombre d'ouvrages pour un genre
* @param int $code : le code du genre
*/
public static function countOuvragesGenre($code) {
$cnx = new PdoDao();
$qry = 'SELECT COUNT(*) FROM ouvrage WHERE code_genre = ?';
$res = $cnx->getValue($qry,array($code));
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
}
<file_sep><?php
/**
* BMG
* © GroSoft, 2016
*
* Classe Utilities : fonctions utilitaires à portée globale
*
* @package default
* @author dk
* @version 1.0
*/
class Utilities {
/**
* Indique si une valeur est un entier positif ou nul
* @param $valeur : la valeur à tester
* @return vrai ou faux
*/
public static function isPosInt($valeur) {
return preg_match("/[^0-9]/", $valeur) == 0;
}
/*
* Dates
*/
/**
* Retourne le jour d'une date
* @param $date : un objet \DateTime
* @return le jour
*/
public static function getDay($date) {
if (gettype($date) == 'string') {
$laDate = new \DateTime($date);
}
else {
$laDate = $date;
}
return $laDate->format('d');
}
/**
* Retourne l'année d'une date
* @$date : un objet \DateTime
* @return l'année
*/
public static function getYear($date) {
if (gettype($date) == 'string') {
$laDate = new \DateTime($date);
}
else {
$laDate = $date;
}
return $laDate->format('Y');
}
/**
* Retourne le mois d'une date exprimé en français
* @date : une chaîne représentant une date ou un numéro de mois
* @mode : 1 == le mois en entier, 2 == le mois abrégé
* @return le mois
*/
public static function getMonth($date, $mode) {
switch (gettype($date)) {
case 'integer' : {
$month = intval($date);
}; break;
case 'string' : {
$laDate = new \DateTime($date);
$month = intval($laDate->format('m'));
}; break;
case 'object' : {
$month = intval($date->format('m'));
}; break;
}
if (!($month > 0 and $month < 13 )) {
$month = -1;
}
switch ($month) {
case 1 : {
if ($mode == 1) {
$result = 'Janvier';
}
else {
$result = 'Jan';
}
} break;
case 2 : {
if ($mode == 1) {
$result = 'Février';
}
else {
$result = 'Fév';
}
} break;
case 3 : {
if ($mode == 1) {
$result = 'Mars';
}
else {
$result = 'Mar';
}
} break;
case 4 : {
if ($mode == 1) {
$result = 'Avril';
}
else {
$result = 'Avr';
}
} break;
case 5 : {
if ($mode == 1) {
$result = 'Mai';
}
else {
$result = 'Mai';
}
} break;
case 6 : {
if ($mode == 1) {
$result = 'Juin';
}
else {
$result = 'Juin';
}
} break;
case 7 : {
if ($mode == 1) {
$result = 'Juillet';
}
else {
$result = 'Jul';
}
} break;
case 8 : {
if ($mode == 1) {
$result = 'Août';
}
else {
$result = 'Aout';
}
} break;
case 9 : {
if ($mode == 1) {
$result = 'Septembre';
}
else {
$result = 'Sep';
}
} break;
case 10 : {
if ($mode == 1) {
$result = 'Octobre';
}
else {
$result = 'Oct';
}
} break;
case 11 : {
if ($mode == 1) {
$result = 'Novembre';
}
else {
$result = 'Nov';
}
} break;
case 12 : {
if ($mode == 1) {
$result = 'Décembre';
}
else {
$result = 'Dec';
}
} break;
default : $result = 'Ooops !';
}
return $result;
}
/*
* retourne le jour (en français) correspondant à celui entré en paramétre (en nombre)
* @param un jour correspondant à sa position dans une semaine (0=>Lundi,1=>Mardi...)
* @return Retourne un jour correspondant au nombre passé en paramétre ou OOPS en cas d'erreur
*/
public static function getStrDay($day) {
if ($day >= 0 and $day < 7) {
switch ($day) {
case 0: {
$result = "Lundi";
} break;
case 1: {
$result = "Mardi";
} break;
case 2: {
$result = "Mercredi";
} break;
case 3: {
$result = "Jeudi";
} break;
case 4: {
$result = "Vendredi";
} break;
case 5: {
$result = "Samedi";
} break;
case 6: {
$result = "Dimanche";
} break;
}
}
else {
$result = "Oops";
}
return $result;
}
/*
* retourne le jour (en nombre) correspondant à celui entré en paramétre (en nombre)
* @param un jour correspondant à sa position dans une semaine (0=>Lundi,1=>Mardi...)
* @return Retourne un nombre correspondant au jour passé en paramétre ou OOPS en cas d'erreur
*/
public static function getIntDay($day) {
if(is_string($day)) {
switch ($day) {
case "Lundi" :
case "Monday" : {
$result = 0;
} break;
case "Mardi" :
case "Tuesday" : {
$result = 1;
} break;
case "Mercredi" :
case "Wednesday" : {
$result = 2;
}break;
case "Jeudi" :
case "Thursday" : {
$result = 3;
} break;
case "Vendredi" :
case "Friday" : {
$result = 4;
} break;
case "Samedi" :
case "Saturday" : {
$result = 5;
} break;
case "Dimanche" :
case "Sunday" : {
$result = 6;
} break;
default : {
$result = "Oops !";
}
}
}
else {
$result = "Oops";
}
return $result;
}
/*
* retourne le jour (en français) correspondant à celui entré en paramétre (en nombre ou en anglais)
* @param un jour correspondant à sa position dans une semaine ou son nom en anglais
* @return string une chaîne correspondant au nom du jour en français
*/
public static function getJourFrancais($day) {
switch ($day) {
case 1 :
case "Monday" : {
$result = "Lundi";
} break;
case 2 :
case "Tuesday" : {
$result = "Mardi";
} break;
case 3 :
case "Wednesday" : {
$result = "Mercredi";
} break;
case 4 :
case "Thursday" : {
$result = "Jeudi";
} break;
case 5 :
case "Friday" : {
$result = "Vendedi";
} break;
case 6 :
case "Saturday" : {
$result = "Samedi";
} break;
case 0 :
case "Sunday" : {
$result = "Dimanche";
} break;
default : {
$result = "Oops !";
}
}
return $result;
}
public static function getDateFrancais($date) {
return self::getDay($date).' '.self::getMonth($date,1).' '.self::getYear($date);
}
public static function getDateFrancaisMoisAnnee($date) {
return self::getMonth($date).' '.self::getYear($date);
}
public static function getDernierJourMois ($annee, $mois) {
$bi = self::anneeEstBissextile($annee);
switch ($mois) {
case 1 :
case 3 :
case 5 :
case 7 :
case 8 :
case 10 :
case 12 : {
$last = 31;
}
break;
case 4 :
case 6 :
case 9 :
case 11 : {
$last = 30;
}
break;
case 2 : {
if ($bi) {
$last = 29;
}
else {
$last = 28;
}
}
}
return $last;
}
/*
* vérifie si une année est bissextile
* @param int $annee : l'année
* @return bool
*/
public static function anneeEstBissextile($annee) {
if ((is_int($annee/4) && !is_int($annee/100)) || is_int($annee/400)) {
return TRUE;
}
else {
return FALSE;
}
}
/*
* Gestion de fichiers
*/
/**
* Retourne le nombre de fichiers dans un dossier
* @param string $dir : le nom du dossier
* @return le nombre de fichiers
*/
public static function nbFiles($dir) {
$nb = 0;
$dir_handle = opendir($dir);
while ($entry = readdir($dir_handle)) {
if(is_file($dir.'/'.$entry)) {
$nb++;
}
}
closedir($dir_handle);
return $nb;
}
}
<file_sep><?php
/**
* BMG
* © GroSoft, 2016
*
* Application
* Classe technique pour l'application
*
* @package default
* @author dk
* @version 1.0
*/
/*
* ====================================================================
* Classe Application : fournit des services génériques
* ====================================================================
*/
class Application {
/**
* Méthodes publiques
*/
/**********************************************
* Accès aux données
***********************************************/
/**
* Vérifie si un getRows() ou un getValue() retourne quelque chose
* @param $value : un tableau ou une valeur quelconque
* @return bool
*/
public static function dataOK($value) {
return ($value != NULL) && ($value != PDO_EXCEPTION_VALUE);
}
/**********************************************
* Gestion des notifications
***********************************************/
/**
* Ajoute une notification dans le tableau des notifications
* @param $notification : un objet de la classe Notification
*/
public static function addNotification($notification) {
if (!isset($_SESSION['notifications'])) {
$_SESSION['notifications'] = array();
}
$_SESSION['notifications'][] = $notification;
}
/**
* Retourne le nombre de lignes du tableau des notifications
* @return le nombre de notifications
*/
public static function nbNotifications() {
if (!isset($_SESSION['notifications'])) {
return 0;
}
else {
return count($_SESSION['notifications']);
}
}
public static function resetNotifications() {
unset($_SESSION['notifications']);
}
}
<file_sep><?php
/**
* BMG
* © GroSoft, 2015
*
* Data Access Layer
* Classe d'accès aux données
*
* Utilise les services de la classe PdoDao
*
* @package default
* @author Andriolo & Collin , 28/09/2016
* @version 1.0
*/
// sollicite les services de la classe PdoDao
require_once ('PdoDao.class.php');
class PretDal {
/**
*
* @param type $style 0 => tableau assoc, 1=> objet
* @return type un objet de la classe PDOStatement
*/
public static function loadPret($style) {
$cnx = new PdoDao();
$qry = 'SELECT * FROM pret';
$res = $cnx->getRows($qry, array(), $style);
if (is_a($res, 'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* loadGenericPret est une fonction générique permettant de charger de prêts
* @param int $mode 0 => charge tous les prêts de tous les clients avec etat optionel, 1 => charge un
* prêts par id, 2 => charge tous les prêts d'un clients par etat
* @param array $arrayParams si mode 0 donné un état de prêts (voir config.inc.php), si mode 1 mettre
* un id de pret dans le tableau, si mode 2 mettre un numéro de client et un état de prêts (voir config.inc.php)
* @return mixed
*/
public static function loadGenericPret($mode,$arrayParams) {
$cnx = new PdoDao();
$id = null;
$etat = null;
switch($mode)
{
case 0:{
if(count($arrayParams)==1)
{
$etat = $arrayParams[0];
}
}break;
case 1:{
if(count($arrayParams)==1)
{
$id = $arrayParams[0];
}
else{
return false;
}
}break;
case 2:{
if(count($arrayParams)==2)
{
$id = $arrayParams[0];
$etat = $arrayParams[1];
}
elseif(count($arrayParams)==1)
{
$id = $arrayParams[0];
}
else{
return false;
}
}break;
}
$qry = 'CALL sp_load_prets(?,?,?)';
$res = $cnx->getRows($qry, array($mode,$id,$etat), 1);
if (is_a($res, 'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
}
<file_sep><?php
/**
*
* BMG
* © GroSoft, 2016
*
* References
* Classes métier
*
*
* @package default
* @author dk
* @version 1.0
*/
/*
* ====================================================================
* Classe Genre : représente un genre d'ouvrage
* ====================================================================
*/
class Genre {
private $_code;
private $_libelle;
/**
* Constructeur
*/
public function __construct(
$p_code,
$p_libelle
) {
$this->setCode($p_code);
$this->setLibelle($p_libelle);
}
/**
* Accesseurs
*/
public function getCode () {
return $this->_code;
}
public function getLibelle () {
return $this->_libelle;
}
/**
* Mutateurs
*/
public function setCode ($p_code) {
$this->_code = $p_code;
}
public function setLibelle ($p_libelle) {
$this->_libelle = $p_libelle;
}
}
<file_sep><?php
/**
*
* BMG
* © GroSoft, 2016
*
* Business Logic Layer
*
* Utilise les services des classes de la bibliothèque Reference
* Utilise les services de la classe GenreDal
* Utilise les services de la classe Application
*
* @package default
* @author dk
* @version 1.0
*/
/*
* ====================================================================
* Classe Ouvrages : fabrique d'objets Ouvrage
* ====================================================================
*/
// sollicite les méthodes de la classe OuvrageDal
require_once ('./modele/Dal/OuvrageDal.class.php');
// sollicite les services de la classe Application
require_once ('./modele/App/Application.class.php');
// sollicite la référence
require_once ('./modele/Reference/Ouvrage.class.php');
class Ouvrages {
/**
* Méthodes publiques
*/
/**
* récupère les ouvrages
* @param $mode : 0 == tableau assoc, 1 == tableau d'objets
* @return un tableau de type $mode
*/
public static function chargerLesOuvrages($mode) {
$tab = OuvrageDal::loadOuvrages(1);
if (Application::dataOK($tab)) {
if ($mode == 1) {
$res = array();
foreach ($tab as $ligne) {
$unOuvrage = new Ouvrage(
$ligne->no_ouvrage,
$ligne->titre,
$ligne->salle,
$ligne->rayon,
new Genre($ligne->code_genre,$ligne->lib_genre),
$ligne->acquisition
);
array_push($res, $unOuvrage);
}
return $res;
}
else {
return $tab;
}
}
return NULL;
}
/**
* vérifie si un ouvrage existe
* @param $id : l'id de l'ouvrage à contrôler
* @return un booléen
*/
public static function ouvrageExists($id) {
$values = ouvrageDal::loadOuvrageByID($id, 1);
if (Application::dataOK($values)) {
return 1;
}
return 0;
}
public static function ajouterOuvrage($valeurs) {
$id = OuvrageDal::addOuvrage(
$valeurs[0],
$valeurs[1],
$valeurs[2],
$valeurs[3],
$valeurs[4],
$valeurs[5]
);
$unOuvrage = self::chargerOuvrageParID($id);
return $unOuvrage;
}
public static function modifierOuvrage($unOuvrage) {
return OuvrageDal::setOuvrage(
$unOuvrage->getNo(),
$unOuvrage->getTitre(),
$unOuvrage->getSalle(),
$unOuvrage->getRayon(),
$unOuvrage->getGenre()->getCode(),
$unOuvrage->getDateAcqui()
);
}
public static function supprimerOuvrage($code) {
return OuvrageDal::delOuvrage($code);
}
/**
* récupère les caractéristiques d'un genre
* @param $no : le numéro de l'ouvrage
* @return un objet de la classe Ouvrage
*/
public static function chargerOuvrageParId($no) {
$lOuvrage = OuvrageDal::loadOuvrageByID($no);
$unOuvrage = new Ouvrage($lOuvrage[0]->no_ouvrage,
$lOuvrage[0]->titre,
$lOuvrage[0]->salle,
$lOuvrage[0]->rayon,
(new Genre($lOuvrage[0]->code_genre,$lOuvrage[0]->lib_genre)),
$lOuvrage[0]->acquisition);
return $unOuvrage;
}
/**
* récupère le nombre d'ouvrages pour un genre
* @param $code : le code du genre
* @return un entier
*/
public static function nbOuvragesParGenre($code) {
return GenreDal::countOuvragesGenre($code);
}
/**
* ajouterAuteurOuvrage ajoute un auteur à l'ouvrage donné en paramétre
* @param type $id_ouvrage l'id de l'ouvrage auquel on ajoute un auteur
*/
public static function ajouterAuteurOuvrage($id_ouvrage,$id_auteur)
{
return OuvrageDal::addAuteurOuvrage($id_ouvrage,$id_auteur);
}
}
<file_sep><?php
/**
* Page de gestion des prets
* @author pv
* @package default
*/
?>
<div id="content">
<h2>Gestion des prets</h2>
<?php AdminRender::showNotifications();?>
<a href="index.php?uc=gererPrets&action=ajouterPret" title="Ajouter">
Ajouter un pret
</a>
<div class="corps-form">
<fieldset>
<legend>Prets</legend>
<div id="object-list">
<?php
if($nbPrets>1)
{
echo '<span>'.$nbPrets.' prets trouvés'
. '</span><br /><br />';
}
else{
echo '<span>'.$nbPrets.' pret trouvé'
. '</span><br /><br />';
}
// afficher un tableau des prets
if ($nbPrets > 0) {
// création du tableau
echo '<table>';
// affichage de l'entête du tableau
echo '<tr>'
.'<th>ID</th>'
.'<th>numéro client</th>'
.'<th>numéro ouvrage</th>'
.'<th>date emprunt</th>'
.'<th>date retour</th>'
.'<th>pénalité</th>'
.'</tr>';
// affichage des lignes du tableau
$n = 0;
foreach($lesPrets as $ligne) {
if (($n % 2) == 1) {
echo '<tr class="impair">';
}
else {
echo '<tr class="pair">';
}
// afficher la colonne 1 dans un hyperlien
echo '<td><a href="index.php?uc=gererPrets&action=consulterPret&id='
.$ligne->getId().'">'.$ligne->getId().'</a></td>';
// afficher les colonnes suivantes
echo '<td><center>'.$ligne->getNoClient().'</center></td>';
echo '<td><center><a href="index.php?uc=gererOuvrages&action=consulterOuvrage&id='
.$ligne->getNoOuvrage().'">'.$ligne->getNoOuvrage().'</a></center></td>';
echo '<td>'.$ligne->getDateEmp().' </td>';
echo '<td>'.$ligne->getDateRet().' </td>';
if ($ligne->getPenalite() == NULL) {
echo '<td class="erreur">Pas de pénalité</td>';
}
else {
echo '<td>'.$ligne->getPenalite().'</td>';
}
echo '</tr>';
$n++;
}
echo '</table>';
}
else {
echo "Aucun ouvrage trouvé !";
}
?>
</div>
</fieldset>
</div>
</div><file_sep><?php
/**
* BMG
* © GroSoft, 2015
*
* Data Access Layer
* Classe d'accès aux données
*
* Utilise les services de la classe PdoDao
*
* @package default
* @author pv
* @version 1.0
*/
// sollicite les services de la classe PdoDao
require_once ('PdoDao.class.php');
class OuvrageDal {
/**
* @param $style : 0 == tableau assoc, 1 == objet
* @return un objet de la classe PDOStatement
*/
public static function loadOuvrages($style) {
$cnx = new PdoDao();
$qry = 'SELECT * FROM v_ouvrages';
$res = $cnx->getRows($qry,array(),$style);
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* charge un objet de la classe Ouvrage à partir de son code
* @param $id : le numéro de l'ouvrage
* @return un objet de la classe Ouvrage
*/
public static function loadOuvrageByID($id) {
$cnx = new PdoDao();
$qry = 'SELECT * FROM v_ouvrages WHERE no_ouvrage = ?';
$res = $cnx->getRows($qry,array($id),1);
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* charge un tableau d'objets de la classe Auteur à partir d'un Ouvrage
* @param $no : le numéro de l'ouvrage
* @return un objet de la classe Ouvrage
*/
public static function loadAuteursByOuvrage($no) {
$cnx = new PdoDao();
$qry = 'SELECT id_auteur FROM auteur_ouvrage WHERE no_ouvrage = ?';
$res = $cnx->getRows($qry,array($no),1);
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* Ajoute un ouvrage
* @param type $strTitre
* @param type $intSalle
* @param type $strRayon
* @param type $strGenre
* @param type $strDate
* @return type
*/
public static function addOuvrage(
$strTitre,
$intSalle,
$strRayon,
$idAuteur,
$strGenre,
$strDate
) {
$cnx = new PdoDao();
// On ajoute ici un ouvrage
$qry = 'INSERT INTO ouvrage (titre, salle, rayon,code_genre, date_acquisition) VALUES (?,?,?,?,?)';
$res = $cnx->execSQL($qry,array(
$strTitre,
$intSalle,
$strRayon,
$strGenre,
$strDate)
);
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
$qry = "SELECT MAX(no_ouvrage) FROM ouvrage";
$id = $cnx->getValue($qry, array());
// On attribue l'ouvrage à un auteur
$qry = 'INSERT INTO auteur_ouvrage VALUES(?,?)';
$rq = $cnx->execSQL($qry, array($id,$idAuteur));
if (is_a($rq,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $id;
}
public static function setOuvrage(
$no,
$titre,
$salle,
$rayon,
$codeGenre,
$date
) {
$cnx = new PdoDao();
$qry = 'UPDATE ouvrage SET titre = ?,'
. ' salle = ?,'
. ' rayon = ?,'
. ' code_genre = ?,'
. ' date_acquisition = ?'
. 'WHERE no_ouvrage = ?';
$res = $cnx->execSQL($qry,array(
$titre,
$salle,
$rayon,
$codeGenre,
$date,
$no
));
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return 1;
}
/**
* supprime un ouvrage
* @param int $code : le code de l'ouvrage à supprimer
*/
public static function delOuvrage($code) {
$cnx = new PdoDao();
$qry = 'DELETE FROM auteur_ouvrage WHERE no_ouvrage = ?';
$res = $cnx->execSQL($qry,array($code));
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
$q = 'DELETE FROM ouvrage WHERE no_ouvrage = 1';
$r = $cnx->execSQL($qry,array($code));
if (is_a($r,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $r;
}
/**
* calcule le nombre d'ouvrages pour un genre
* @param int $code : le code du genre
*/
public static function countOuvragesGenre($code) {
$cnx = new PdoDao();
$qry = 'SELECT COUNT(*) FROM ouvrage WHERE code_genre = ?';
$res = $cnx->getValue($qry,array($code));
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* Ajoute un auteur
* @param type $id_ouvrage
* @param type $id_auteur
* @return type
*/
public static function addAuteurOuvrage(
$id_ouvrage,
$id_auteur
) {
$cnx = new PdoDao();
// On ajoute ici un ouvrage
$qry = 'INSERT INTO auteur_ouvrage VALUES (?,?)';
$res = $cnx->execSQL($qry,array(
$id_ouvrage,
$id_auteur)
);
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
$qry = "SELECT MAX(no_ouvrage) FROM auteur_ouvrage";
$id = $cnx->getValue($qry, array());
return $id;
}
}
<file_sep><?php
/**
* BMG
* © GroSoft, 2015
*
* Data Access Layer
* Classe d'accès aux données
*
* Utilise les services de la classe PdoDao
*
* @package default
* @author pv
* @version 1.0
*/
// sollicite les services de la classe PdoDao
require_once ('PdoDao.class.php');
class AuteurDal {
/**
* @param $style : 0 == tableau assoc, 1 == objet
* @return un objet de la classe PDOStatement
*/
public static function loadAuteurs($style) {
$cnx = new PdoDao();
$qry = 'SELECT * FROM auteur';
$res = $cnx->getRows($qry,array(),$style);
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* charge un objet de la classe Auteur à partir de son id
* @param $id : l'identifiant de l'auteur
* @return un objet de la classe Auteur
*/
public static function loadAuteurById($id) {
$cnx = new PdoDao();
$qry = 'SELECT * FROM auteur WHERE id_auteur = ?';
$res = $cnx->getRows($qry,array($id),1);
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* Ajoute une auteur
* @param string $strNom le nom de l'auteur
* @param string $strPrenom le prénom de l'auteur
* @param string $strAlias l'alias de l'auteur
* @param string $strNotes les notes de l'auteur
* @return int $id l'id de l'auteur
*/
public static function addAuteur(
$strNom,
$strPrenom,
$strAlias,
$strNotes
) {
$cnx = new PdoDao();
$qry = 'INSERT INTO auteur (nom_auteur, prenom_auteur, alias, notes) VALUES (?,?,?,?)';
$res = $cnx->execSQL($qry,array(
$strNom,
$strPrenom,
$strAlias,
$strNotes)
);
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
else{
$strSQL = "SELECT MAX(id_auteur) FROM auteur";
$id = $cnx->getValue( $strSQL, array());
if (is_a($id,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
else{
return $id;
}
}
}
/**
* Modifie un auteur
* @param type $intID
* @param string $strNom le nom de l'auteur
* @param string $strPrenom le prénom de l'auteur
* @param string $strAlias l'alias de l'auteur
* @param string $strNotes les notes de l'auteur
* @return int $id l'id de l'auteur
*/
public static function setAuteur(
$intID,
$strNom,
$strPrenom,
$strAlias,
$strNotes
) {
$cnx = new PdoDao();
$qry = 'UPDATE auteur SET nom_auteur = ?,'
. ' prenom_auteur = ?,'
. ' alias = ?,'
. ' notes = ?'
. 'WHERE id_auteur = ?';
$res = $cnx->execSQL($qry,array(
$strNom,
$strPrenom,
$strAlias,
$strNotes,
$intID
));
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* supprime un auteur
* @param int $int : l'id de l'auteur à supprimer
*/
public static function delAuteur($int) {
$cnx = new PdoDao();
$qry = 'DELETE FROM auteur WHERE id_auteur = ?';
$res = $cnx->execSQL($qry,array($int));
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
/**
* calcule le nombre d'ouvrages pour un auteur
* @param int $id : l'id de l'auteur
*/
public static function countOuvragesAuteur($id) {
$cnx = new PdoDao();
$qry = 'SELECT COUNT(*) FROM auteur_ouvrage WHERE id_auteur = ?';
$res = $cnx->getValue($qry,array($id));
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
public static function loadAuteursOuvrage($style, $tabException) {
$cnx = new PdoDao();
if(empty($tabException)){
$qry = 'SELECT id_auteur, nom_auteur FROM auteur';
}
else{
$qry = 'SELECT id_auteur, nom_auteur FROM auteur WHERE id_auteur
NOT IN (
SELECT id_auteur FROM auteur WHERE ';
foreach($tabException as $ligne){
$id = $ligne->getId();
$qry .= ' id_auteur = '.$id.' OR';
}
$qry .= ' id_auteur = ""';
$qry .= ') ORDER BY nom_auteur';
}
$res = $cnx->getRows($qry,array(),$style);
if (is_a($res,'PDOException')) {
return PDO_EXCEPTION_VALUE;
}
return $res;
}
}
<file_sep><?php
/**
* Contrôleur secondaire chargé de la gestion des auteurs
* @author dk
* @package default (mission 4)
*/
// récupération de l'action à effectuer
if (isset($_GET["action"])) {
$action = $_GET["action"];
}
else {
$action = 'listerAuteurs';
}
// variables pour la gestion des messages
$titrePage = 'Gestion des auteurs';
// variables pour la gestion des erreurs
$tabErreurs = array();
$hasErrors = false;
// ouvrir une connexion
$cnx = connectDB();
// charger la vue en fonction du choix de l'utilisateur
switch ($action) {
case 'consulterAuteur' : {
if (isset($_GET["id"])) {
$intID = intval(htmlentities($_GET["id"]));
// récupération des valeurs dans la base
$strSQL = "SELECT nom_auteur, prenom_auteur, alias, notes "
."FROM auteur "
."WHERE id_auteur = ?";
try {
$lAuteur = getRows($cnx, $strSQL, array($intID));
if ($lAuteur) {
$strNom = $lAuteur[0][0];
$strPrenom = $lAuteur[0][1];
$strAlias = $lAuteur[0][2];
$strNotes = $lAuteur[0][3];
}
else {
$tabErreurs["Erreur"] = "Cet auteur n'existe pas !";
$tabErreurs["ID"] = $intID;
$hasErrors = true;
}
}
catch (PDOException $e) {
$tabErreurs["Erreur"] = $e->getMessage();
$hasErrors = true;
}
}
else {
// pas d'id dans l'url ni clic sur Valider : c'est anormal
$tabErreurs["Erreur"] =
"Aucun auteur n'a été transmis pour consultation !";
$hasErrors = true;
}
if ($hasErrors) {
$msg = "Une erreur s'est produite :";
include 'vues/v_afficherErreurs.php';
}
else {
include 'vues/v_consulterAuteur.php';
}
} break;
case 'ajouterAuteur' : {
// initialisation des variables
$strNom = '';
$strPrenom = '';
$strAlias = '';
$strNotes = '';
// traitement de l'option : saisie ou validation ?
if (isset($_GET["option"])) {
$option = htmlentities($_GET["option"]);
}
else {
$option = 'saisirAuteur';
}
switch($option) {
case 'saisirAuteur' : {
include 'vues/v_ajouterAuteur.php';
} break;
case 'validerAuteur' : {
// tests de gestion du formulaire
if (isset($_POST["cmdValider"])) {
// récupération des valeurs saisies
if (!empty($_POST["txtNom"])) {
$strNom = ucfirst($_POST["txtNom"]);
}
if (!empty($_POST["txtPrenom"])) {
$strPrenom = ucfirst($_POST["txtPrenom"]);
}
if (!empty($_POST["txtAlias"])) {
$strAlias = ucfirst($_POST["txtAlias"]);
}
if (!empty($_POST["txtNotes"])) {
$strNotes = ucfirst($_POST["txtNotes"]);
}
// test zones obligatoires
if (!empty($strNom)) {
// les zones obligatoires sont présentes
}
else {
if (empty($strNom)) {
$tabErreurs["Nom"] = "Le nom doit être renseigné !";
}
$hasErrors = true;
}
if (!$hasErrors) {
// ajout dans la base de données
$strSQL = "INSERT INTO auteur (
nom_auteur, prenom_auteur, alias, notes
) VALUES (?,?,?,?)";
try {
$res = execSQL(
$cnx, $strSQL, array(
$strNom,$strPrenom,$strAlias,$strNotes
)
);
if ($res) {
$msg = '<span class="info">L\'auteur '
.$strNom.' '
.$strPrenom.' a été ajouté</span>';
// récupération du numéro (auto-incrément)
$strSQL = "SELECT MAX(id_auteur) FROM auteur";
$intID = getValue($cnx, $strSQL, array());
include 'vues/v_consulterAuteur.php';
}
else {
$tabErreurs["Erreur"] = "Une erreur s'est produite
dans l'opération d'ajout !";
$tabErreurs["Nom"] = $strNom;
$tabErreurs["Prenom"] = $strPrenom;
$tabErreurs["Alias"] = $strAlias;
$tabErreurs["Notes"] = $strNotes;
$hasErrors = true;
}
}
catch (PDOException $e) {
$tabErreurs["Erreur"] =
"Une exception PDO a été levée !";
$hasErrors = true;
}
}
else {
$msg = "L'opération d'ajout n'a pas pu être menée
à terme en raison des erreurs suivantes :";
$lien = '<a href="index.php?uc=gererAuteurs&action=ajouterAuteur">Retour à la saisie</a>';
include 'vues/v_afficherErreurs.php';
}
}
} break;
}
} break;
case 'modifierAuteur' : {
// initialisation des variables
$strNom = '';
$strPrenom = '';
$strAlias = '';
$strNotes = '';
// traitement de l'option : saisie ou validation ?
if (isset($_GET["option"])) {
$option = htmlentities($_GET["option"]);
}
else {
$option = 'saisirAuteur';
}
switch($option) {
case 'saisirAuteur' : {
// récupération du code
if (isset($_GET["id"])) {
$intID = intval(htmlentities($_GET["id"]));
// récupération des données dans la base
$strSQL = "SELECT nom_auteur, prenom_auteur, alias, notes "
."FROM auteur "
."WHERE id_auteur = ?";
$lAuteur = getRows($cnx, $strSQL, array($intID));
if (count($lAuteur) == 1) {
$strNom = $lAuteur[0][0];
$strPrenom = $lAuteur[0][1];
$strAlias = $lAuteur[0][2];
$strNotes = $lAuteur[0][3];
}
else {
$tabErreurs["Erreur"] = "Cet auteur n'existe pas !";
$tabErreurs["ID"] = $intID;
$hasErrors = true;
}
}
include 'vues/v_modifierAuteur.php';
} break;
case 'validerAuteur' : {
// si on a cliqué sur Valider
if (isset($_POST["cmdValider"])) {
// mémoriser les données pour les réafficher dans le formulaire
$intID = intval($_POST["txtID"]);
// récupération des valeurs saisies
if (!empty($_POST["txtNom"])) {
$strNom = ucfirst($_POST["txtNom"]);
}
if (!empty($_POST["txtPrenom"])) {
$strPrenom = ucfirst(($_POST["txtPrenom"]));
}
if (!empty($_POST["txtAlias"])) {
$strAlias = ucfirst(($_POST["txtAlias"]));
}
if (!empty($_POST["txtNotes"])) {
$strNotes = ucfirst($_POST["txtNotes"]);
}
// test zones obligatoires
if (!empty($strNom)) {
// les zones obligatoires sont présentes
// tests de cohérence
}
else {
if (empty($strNom)) {
$tabErreurs["Nom"] = "Le nom doit être renseigné !";
}
$hasErrors = true;
}
if (!$hasErrors) {
// mise à jour dans la base de données
$strSQL = "UPDATE auteur SET nom_auteur = ?,"
."prenom_auteur = ?,"
."alias = ?,"
."notes = ? "
."WHERE id_auteur = ?";
try {
$res = execSQL($cnx,$strSQL,array(
$strNom,
$strPrenom,
$strAlias,
$strNotes,
$intID
)
);
if ($res) {
$msg = '<span class="info">L\'auteur '
.$strNom.' '
.$strPrenom.' a été modifié</span>';
include 'vues/v_consulterAuteur.php';
}
else {
$tabErreurs["Erreur"] = 'Une erreur s\'est produite lors de l\'opération de mise à jour !';
$tabErreurs["ID"] = $intID;
$tabErreurs["Nom"] = $strNom;
$tabErreurs["Prenom"] = $strPrenom;
$tabErreurs["Alias"] = $strAlias;
$tabErreurs["Notes"] = $strNotes;
// en phase de test, on peut ajouter le SQL :
$tabErreurs["SQL"] = $strSQL;
$hasErrors = true;
}
}
catch (PDOException $e) {
$tabErreurs["Erreur"] = 'Une exception a été levée !';
$hasErrors = true;
}
}
}
else {
// pas d'id dans l'url ni clic sur Valider : c'est anormal
$tabErreurs["Erreur"] = "Aucun auteur n'a été transmis pour modification !";
$hasErrors = true;
}
}
}
// affichage des erreurs
if ($hasErrors) {
$msg = "Une erreur s'est produite :";
include 'vues/v_afficherErreurs.php';
include 'vues/v_modifierAuteur.php';
}
} break;
case 'supprimerAuteur' : {
// récupération de l'identifiant du auteur passé dans l'URL
if (isset($_GET["id"])) {
$intID = intval(htmlentities($_GET["id"]));
// récupération des données dans la base
$strSQL = "SELECT nom_auteur, prenom_auteur, alias "
."FROM auteur "
."WHERE id_auteur = ?";
$lAuteur = getRows($cnx, $strSQL, array($intID));
if (count($lAuteur) == 1) {
$strNom = $lAuteur[0][0];
$strPrenom = $lAuteur[0][1];
$strAlias = $lAuteur[0][2];
}
else {
$tabErreurs["Erreur"] = "Cet auteur n'existe pas !";
$tabErreurs["Code"] = $intID;
$hasErrors = true;
}
if (!$hasErrors) {
// rechercher des ouvrages de ce auteur
$strSQL = "SELECT COUNT(*) "
."FROM auteur_ouvrage "
."WHERE id_auteur = ?";
try {
$ouvragesAuteur = getValue($cnx, $strSQL, array($intID));
if ($ouvragesAuteur == 0) {
// c'est bon, on peut le supprimer
$strSQL = "DELETE FROM auteur WHERE id_auteur = ?";
try {
$res = execSQL($cnx, $strSQL, array($intID));
if ($res) {
$msg = '<span class="info">L\'auteur '
.$strNom.' a été supprimé';
include 'vues/v_afficherMessage.php';
} }
catch (PDOException $e) {
$tabErreurs["Erreur"] =
"Une exception PDO a été levée !";
$tabErreurs["Message"] = $e->getMessage();
$hasErrors = true;
}
}
else {
$tabErreurs["Erreur"] = "Cet auteur est référencé par des ouvrages, suppression impossible !";
$tabErreurs["ID"] = $intID;
$tabErreurs["Nom"] = $strNom;
$tabErreurs["Prénom"] = $strPrenom;
$tabErreurs["Ouvrages"] = $ouvragesAuteur;
$hasErrors = true;
}
}
catch (PDOException $e) {
$tabErreurs["Erreur"] = $e->getMessage();
}
}
}
// affichage des erreurs
if ($hasErrors) {
$msg = "Une erreur s'est produite :";
$lien = '<a href="index.php?uc=gererAuteurs&action=consulterAuteur&id='
.$intID.'">Retour à la consultation</a>';
include 'vues/v_afficherErreurs.php';
}
} break;
case 'listerAuteurs' : {
// récupérer les auteurs
$strSQL = "SELECT id_auteur as ID, "
." nom AS Nom "
."FROM v_auteurs ";
$lesAuteurs = getRows($cnx, $strSQL, array());
// afficher le nombre de auteurs
$nbAuteurs = count($lesAuteurs);
include 'vues/v_listeAuteurs.php';
} break;
// déconnexion
disconnectDB($cnx);
}
<file_sep><?php
/**
*
* BMG
* © GroSoft, 2016
*
* Business Logic Layer
*
* Utilise les services des classes de la bibliothèque Reference
* Utilise les services de la classe PretDal
* Utilise les services de la classe Application
*
* @package default
* @author <NAME> Alicia ,28/09/2016
* @version 1.0
*/
/*
* ====================================================================
* Classe Pret : fabrique d'objets Pret
* ====================================================================
*/
// sollicite les méthodes de la classe PretDal
require_once ('./modele/Dal/PretDal.class.php');
// sollicite les services de la classe Application
require_once ('./modele/App/Application.class.php');
// sollicite la référence de la classe Pret
require_once ('./modele/Reference/Pret.class.php');
class Prets {
/**
* Méthodes publiques
*/
/**
* r�cup�re les prets
* @param $mode : 0 == tableau assoc, 1 == tableau d'objets
* @return un tableau de type $mode
*/
public static function chargerLesPrets($mode) {
$tab = PretDal::loadGenericPret(0,array(PRETS_EN_COURS));
if (Application::dataOK($tab)) {
if ($mode == 1) {
$res = array();
foreach ($tab as $ligne) {
$unPret = new Pret(
$ligne->id_pret,
$ligne->no_client,
$ligne->no_ouvrage,
$ligne->date_emp,
$ligne->date_ret,
$ligne->penalite
);
array_push($res, $unPret);
}
return $res;
}
else {
return $tab;
}
}
return NULL;
}
/**
* v�rifie si un pret existe
* @param $id : le code du pret � contr�ler
* @return un bool�en
*/
public static function pretExiste($id) {
$values = PretDal::loadPretByID($id, 1);
if (Application::dataOK($values)) {
return 1;
}
return 0;
}
public static function ajouterPret($valeurs) {
$id = PretDal::addPret(
$valeurs[0],
$valeurs[1],
$valeurs[2],
$valeurs[3],
$valeurs[4]
);
return self::chargerPretParID($id);
}
public static function modifierPret($pret) {
return PretDal::setPret(
$pret->getId(),
$pret->getNoClient(),
$pret->getNoOuvrage(),
$pret->getDateEmp(),
$pret->getDateRet(),
$pret->getPenalite()
);
}
public static function supprimerPret($id) {
return PretDal::delPret($id);
}
/**
* r�cup�re les caract�ristiques d'un pret
* @param $id : le code du pret
* @return un objet de la classe pret
*/
public static function chargerPretParId($id) {
$values = PretDal::loadGenericPret(1,array($id));
if (Application::dataOK($values)) {
$id = $values[0]->id_pret;
$no_client = $values[0]->no_client;
$no_ouvrage = $values[0]->no_ouvrage;
$date_emp = $values[0]->date_emp;
$date_ret = $values[0]->date_ret;
$penalite = $values[0]->penalite;
return new Auteur ($id, $no_client, $no_ouvrage, $date_emp, $date_ret, $penalite);
}
return NULL;
}
}
<file_sep><?php
/**
* Contrôleur secondaire chargé de la gestion des prets
* @author pv
* @package default (mission 4)
*/
// bibliothèques à utiliser
require_once ('modele/App/Application.class.php');
require_once ('modele/App/Notification.class.php');
require_once ('modele/Render/AdminRender.class.php');
require_once ('modele/Bll/Ouvrages.class.php');
require_once ('modele/Bll/Genres.class.php');
require_once ('modele/Bll/Auteurs.class.php');
require_once ('modele/Bll/Prets.class.php');
// récupération de l'action à effectuer
if (isset($_GET["action"])) {
$action = $_GET["action"];
}
else {
$action = 'listerPrets';
}
// si un id est passé en paramètre, créer un objet (pour consultation, modification ou suppression)
if (isset($_REQUEST["id"])) {
$id = $_REQUEST["id"];
$unPret = Prets::chargerPretParId($id);
}
// charger la vue en fonction du choix de l'utilisateur
switch ($action) {
case 'listerPrets' : {
// récupérer les ouvrages
$lesPrets = Prets::chargerLesPrets(1);
// afficher le nombre de ouvrages
$nbPrets = count($lesPrets);
include 'vues/v_listePrets.php';
} break;
case 'consulterOuvrage' : {
if ($unPret == NULL) {
Application::addNotification(new Notification("Cet ouvrage n'existe pas !", ERROR));
$lesOuvrages = Ouvrages::chargerLesOuvrages(0);
// afficher le nombre de ouvrages
$nbOuvrages = count($lesOuvrages);
include 'vues/v_listeOuvrages.php';
}
else {
$no = $unOuvrage->getNo();
$strTitre = $unOuvrage->getTitre();
$strAuteur = $unOuvrage->getAuteurs();
$strAcquisition = $unOuvrage->getDateAcqui();
$strGenre = $unOuvrage->getGenre()->getLibelle();
$strSalle = $unOuvrage->getSalle();
$strRayon = $unOuvrage->getRayon();
$strDernierPret = $unOuvrage->getDernierPret();
$strDispo = $unOuvrage->getDispo();
include 'vues/v_consulterOuvrage.php';
}
} break;
case 'ajouterOuvrage' : {
// initialisation des variables
$hasErrors = false;
$strTitre = '';
$intSalle = 1;
$strRayon = '';
$strDate = '';
$rq = Genres::chargerLesGenres(0);
$lesGenres = array();
foreach($rq as $values)
{
$lesGenres[$values->code_genre] = $values->lib_genre;
}
$strGenre = $rq[1]->code_genre;
// traitement de l'option : saisie ou validation ?
if (isset($_GET["option"])) {
$option = htmlentities($_GET["option"]);
}
else {
$option = 'saisirOuvrage';
}
switch($option) {
case 'saisirOuvrage' : {
include 'vues/v_ajouterOuvrage.php';
} break;
case 'validerOuvrage' : {
// tests de gestion du formulaire
if (isset($_POST["cmdValider"])) {
// test zones obligatoires
if (!empty($_POST["txtTitre"]) and
!empty($_POST["rbnSalle"]) and
!empty($_POST["txtRayon"]) and
!empty($_POST["cbxGenres"])and
!empty($_POST["txtDate"])) {
// les zones obligatoires sont présentes
$strTitre = htmlspecialchars($_POST["txtTitre"]);
if(preg_match("#^[0-2]$#", $intSalle))
{
$intSalle = htmlspecialchars($_POST["rbnSalle"]);
}
else{
$hasErrors = true;
}
if(rayonValide($_POST['txtRayon']))
{
$strRayon = htmlspecialchars($_POST['txtRayon']);
}
else{
$hasErrors = true;
}
if(Genres::genreExiste($_POST["cbxGenres"]))
{
$strGenre = $_POST["cbxGenres"];
}
else{
$hasErrors = true;
}
if(strtotime($_POST["txtDate"]) <= strtotime(date('Y-m-d')))
{
$strDate = $_POST["txtDate"];
}
else{
$hasErrors = true;
}
}
else {
// une ou plusieurs valeurs n'ont pas été saisies
if (empty($_POST["txtTitre"])) {
Application::addNotification(new Notification("Le titre doit être renseigné !", ERROR));
}
if (empty($_POST["rbnSalle"])) {
Application::addNotification(new Notification("La salle doit être renseigné !", ERROR));
}
if (empty($_POST["txtRayon"])) {
Application::addNotification(new Notification("Le rayon doit être renseigné !", ERROR));
}
if (empty($_POST["cbxGenres"])) {
Application::addNotification(new Notification("Le genre doit être renseigné !", ERROR));
}
if(empty($_POST["txtDate"])){
Application::addNotification(new Notification("La date doit être renseigné !", ERROR));
}
$hasErrors = true;
}
if (!$hasErrors) {
// ajout dans la base de données
$unOuvrage = Ouvrages::ajouterOuvrage(array($strTitre,$intSalle,$strRayon,$strGenre,$strDate));
Application::addNotification(new Notification("L'ouvrage à été ajouté !", SUCCESS));
$no = $unOuvrage->getNo();
$strTitre = $unOuvrage->getTitre();
$strAuteur = $unOuvrage->getAuteurs();
$strAcquisition = $unOuvrage->getDateAcqui();
$strGenre = $unOuvrage->getGenre()->getLibelle();
$strSalle = $unOuvrage->getSalle();
$strRayon = $unOuvrage->getRayon();
$strDernierPret = $unOuvrage->getDernierPret();
$strDispo = $unOuvrage->getDispo();
include 'vues/v_consulterOuvrage.php';
}
else {
if(!preg_match("#^([0-2])$#", $_POST["rbnSalle"]))
{
Application::addNotification(new Notification("La salle doit être 1 ou 2 !", ERROR));
}
if(!empty($_POST["txtRayon"]) && !rayonValide($_POST["txtRayon"]))
{
Application::addNotification(new Notification("Le rayon doit être au format suivant : 1 lettre majuscule et 1 chiffre !", ERROR));
}
if(!empty($_POST["cbxGenres"]) && !Genres::genreExiste($_POST["cbxGenres"]))
{
Application::addNotification(new Notification("Le genre :".$_POST["cbxGenres"]." séléctionné n'existe pas !", ERROR));
}
if(!empty($_POST["txtDate"]) && (strtotime($_POST["txtDate"]) > strtotime(date('Y-m-d'))))
{
Application::addNotification(new Notification("La date doit être inférieur à ".date('Y-m-d')." !", ERROR));
}
include 'vues/v_ajouterOuvrage.php';
}
}
} break;
}
} break;
case 'modifierOuvrage' : {
// initialisation des variables
$hasErrors = false;
// traitement de l'option : saisie ou validation ?
if (isset($_GET["option"])) {
$option = htmlentities($_GET["option"]);
}
else {
$option = 'saisirOuvrage';
}
switch($option) {
case 'saisirOuvrage' : {
// récupération de l'id
if (isset($_GET["id"]) && $unOuvrage) {
//initialisation des données
$no = $unOuvrage->getNo();
$strTitre = $unOuvrage->getTitre();
$strAcquisition = $unOuvrage->getDateAcqui();
$strGenre = $unOuvrage->getGenre()->getCode();
$intSalle = $unOuvrage->getSalle();
$strRayon = $unOuvrage->getRayon();
$strDernierPret = $unOuvrage->getDernierPret();
$strDispo = $unOuvrage->getDispo();
//Création d'un tableau de genre afin de formater celui-ci en vue de l'utiliser en paramét
$lesGenres = Genres::chargerLesGenres(0);
$lesGenres = array();
foreach($rq as $values)
{
$lesGenres[$values->code_genre] = $values->lib_genre;
}
include("vues/v_modifierOuvrage.php");
}
else {
Application::addNotification(new Notification("L'ouvrage est inconnu !", ERROR));
$lesOuvrages = Ouvrages::chargerLesOuvrages(0);
// afficher le nombre de ouvrages
$nbOuvrages = count($lesOuvrages);
include("vues/v_listeOuvrages.php");
}
} break;
case 'validerOuvrage' : {
// si on a cliqué sur Valider
if (isset($_POST["cmdValider"])) {
// mémoriser les valeurs pour les réafficher
// test zones obligatoires
if (!empty($_POST["txtLibelle"])) {
// les zones obligatoires sont présentes
$strLibelle = ucfirst(htmlentities($_POST["txtLibelle"]));
// tests de cohérence
}
else {
if (empty($strLibelle)) {
Application::addNotification(new Notification("Le libellé est obligatoire !", ERROR));
}
$hasErrors = true;
}
if (!$hasErrors) {
// mise à jour dans la base de données
$unGenre->setLibelle($strLibelle);
$res = Genres::modifierGenre($unGenre);
Application::addNotification(new Notification("Le genre a été modifié !", SUCCESS));
include 'vues/v_consulterGenre.php';
}
else {
include("vues/v_modifierGenre.php");
}
}
}
}
} break;
case 'supprimerGenre' : {
// rechercher des ouvrages de ce genre
if (Genres::nbOuvragesParGenre($unGenre->getCode()) > 0) {
// il y a des ouvrages référencés, suppression impossible
Application::addNotification(new Notification("Il existe des ouvrages qui référencent ce genre, suppression impossible !", ERROR));
include 'vues/v_consulterGenre.php';
}
else {
// supprimer le genre
Genres::supprimerGenre($unGenre->getCode());
Application::addNotification(new Notification("Le genre a été supprimé !", SUCCESS));
// afficher la liste
$lesGenres = Genres::chargerLesGenres(1);
$nbGenres = count($lesGenres);
include 'vues/v_listeGenres.php';
}
} break;
}<file_sep><?php
/**
*
* Application Schuman
* © Vincent, 2016
*
* Name
*
* Utilise les services de
*
* @package default
* @author pv
* @version 1.0
* @link
*/
class Ouvrage{
private $_no_ouvrage;
private $_titre;
private $_salle;
private $_rayon;
private $_obj_genre;//Un objet
private $_date_acuisition;
public function __construct($p_no_ouvrage,
$p_titre,
$p_salle,
$p_rayon,
$p_obj_genre,
$p_date_acquisition) {
$this->setNo($p_no_ouvrage);
$this->setTitre($p_titre);
$this->setSalle($p_salle);
$this->setRayon($p_rayon);
$this->setGenre($p_obj_genre->getCode(),$p_obj_genre->getLibelle());
$this->setDateAcqui($p_date_acquisition);
}
// Mutateurs en lecture
public function getNo()
{
return $this->_no_ouvrage;
}
public function getTitre()
{
return $this->_titre;
}
public function getSalle()
{
return $this->_salle;
}
public function getRayon()
{
return $this->_rayon;
}
public function getGenre()
{
return $this->_obj_genre;
}
public function getDateAcqui()
{
return $this->_date_acuisition;
}
// Mutateurs en écriture
public function setNo($p_no)
{
$this->_no_ouvrage = $p_no;
}
public function setTitre($p_titre)
{
$this->_titre = $p_titre;
}
public function setSalle($p_salle)
{
$this->_salle = $p_salle;
}
public function setRayon($p_rayon)
{
$this->_rayon = $p_rayon;
}
public function setGenre($p_code_genre,$p_lib_genre)
{
$this->_obj_genre = new Genre($p_code_genre,$p_lib_genre);
}
public function setDateAcqui($p_date)
{
$this->_date_acuisition = $p_date;
}
/**
* getAuteurs Retourne les auteurs de cette ouvrage
* @return array $res retourne un tableau d'objet de la classe Auteur
*/
public function getAuteurs()
{
$tab = OuvrageDal::loadAuteursByOuvrage($this->getNo());
$res = array();
foreach ($tab as $ligne) {
$unAuteur = AuteurDal::loadAuteurByID($ligne->id_auteur);
$lAuteur = new Auteur($ligne->id_auteur,
$unAuteur[0]->nom_auteur,
$unAuteur[0]->prenom_auteur,
$unAuteur[0]->alias,
$unAuteur[0]->notes);
array_push($res, $lAuteur);
}
return $res;
}
/**
* affichAuteursThisOuvrage retourne une chaine contenant pour chaque auteurs de l'ouvrage courant, son nom suivit de son prénom
* @param int $nivDetail le niveau de détail de l'auteur (0 => peu détaillé, 1 => détaillé)
* @return string une chaine contenante le nom suivit du prénom, et ce pour chaque auteurs
*/
public function affichAuteursThisOuvrage($nivDetail)
{
return AdminRender::affichAuteurs($this->getAuteurs(),$nivDetail);
}
/**
* getDernierPret obitent la date du dernier prêt d'un ouvrage
* @return string $date une date
*/
public function getDernierPret()
{
return OuvrageDal::loadOuvrageByID($this->_no_ouvrage)[0]->dernier_pret;
}
public function getDispo()
{
return OuvrageDal::loadOuvrageByID($this->_no_ouvrage)[0]->disponibilite;
}
}<file_sep><?php
/**
*
* BMG
* © GroSoft, 2016
*
* Business Logic Layer
*
* Utilise les services des classes de la bibliothèque Reference
* Utilise les services de la classe AuteurDal
* Utilise les services de la classe Application
*
* @package default
* @author pv
* @version 1.0
*/
/*
* ====================================================================
* Classe Genres : fabrique d'objets Genre
* ====================================================================
*/
// sollicite les méthodes de la classe GenreDal
require_once ('./modele/Dal/AuteurDal.class.php');
// sollicite les services de la classe Application
require_once ('./modele/App/Application.class.php');
// sollicite la référence
require_once ('./modele/Reference/Auteur.class.php');
class Auteurs {
/**
* Méthodes publiques
*/
/**
* récupère les auteurs pour les ouvrages
* @param $mode : 0 == tableau assoc, 1 == tableau d'objets
* @return un tableau de type $mode
*/
public static function chargerLesAuteurs($mode) {
$tab = AuteurDal::loadAuteurs(1);
if (Application::dataOK($tab)) {
if ($mode == 1) {
$res = array();
foreach ($tab as $ligne) {
$unAuteur = new Auteur(
$ligne->id_auteur,
$ligne->nom_auteur,
$ligne->prenom_auteur,
$ligne->alias,
$ligne->notes
);
array_push($res, $unAuteur);
}
return $res;
}
else {
return $tab;
}
}
return NULL;
}
/**
* vérifie si un auteur existe
* @param $id : l'id de l'auteur à contrôler
* @return un booléen
*/
public static function AuteurExiste($id) {
$values = AuteurDal::loadAuteurByID($id, 1);
if (Application::dataOK($values)) {
return 1;
}
return 0;
}
public static function ajouterAuteur($valeurs) {
$id = AuteurDal::addAuteur(
$valeurs[0],
$valeurs[1],
$valeurs[2],
$valeurs[3]
);
return self::chargerAuteurParID($id);
}
public static function modifierAuteur($auteur) {
return AuteurDal::setAuteur(
$auteur->getId(),
$auteur->getNom(),
$auteur->getPrenom(),
$auteur->getAlias(),
$auteur->getNotes()
);
}
public static function supprimerAuteur($id) {
return AuteurDal::delAuteur($id);
}
/**
* récupère les caractéristiques d'un auteur
* @param $id : l'id de l'auteur
* @return un objet de la classe Auteur
*/
public static function chargerAuteurParId($id) {
$values = AuteurDal::loadAuteurByID($id, 1);
if (Application::dataOK($values)) {
$nom = $values[0]->nom_auteur;
$prenom = $values[0]->prenom_auteur;
$alias = $values[0]->alias;
$notes = $values[0]->notes;
return new Auteur ($id, $nom, $prenom, $alias, $notes );
}
return NULL;
}
/**
* récupère le nombre d'ouvrages pour un auteur
* @param $id : l'id de l'auteur
* @return un entier
*/
public static function nbOuvragesParAuteurs($id) {
return AuteurDal::countOuvragesAuteur($id);
}
public static function listeAuteurEx($tabException){
$tab = AuteurDal::loadAuteursOuvrage(0,$tabException);
return $tab;
}
}
<file_sep><?php
/**
*
* Application Schuman
* © Vincent, 2016
*
* Name
*
* Utilise les services de
*
* @package default
* @author pv
* @version 1.0
* @link
*/
/*
* ====================================================================
* Classe Auteur : représente un auteur
* ====================================================================
*/
class Auteur{
private $_id_auteur;
private $_nom_auteur;
private $_prenom_auteur;
private $_alias;
private $_notes;
public function __construct($p_id_auteur,
$p_nom_auteur,
$p_prenom_auteur,
$p_alias,
$p_notes) {
$this->setId($p_id_auteur);
$this->setNom($p_nom_auteur);
$this->setPrenom($p_prenom_auteur);
$this->setAlias($p_alias);
$this->setNotes($p_notes);
}
// Mutateurs en lecture
public function getId()
{
return $this->_id_auteur;
}
public function getNom()
{
return $this->_nom_auteur;
}
public function getPrenom()
{
return $this->_prenom_auteur;
}
public function getAlias()
{
return $this->_alias;
}
public function getNotes()
{
return $this->_notes;
}
// Mutateurs en écriture
public function setId($p_id)
{
$this->_id_auteur = $p_id;
}
public function setNom($p_nom)
{
$this->_nom_auteur = $p_nom;
}
public function setPrenom($p_prenom)
{
$this->_prenom_auteur = $p_prenom;
}
public function setAlias($p_alias)
{
$this->_alias = $p_alias;
}
public function setNotes($p_notes)
{
$this->_notes = $p_notes;
}
public function decrireAuteur()
{
return $this->getNom()." ".$this->getPrenom();
}
}<file_sep><?php
/**
*
* BMG
* © GroSoft, 2016
*
* Notification
* Classe technique pour l'application
*
* @package default
* @author dk
* @version 1.0
*/
class Notification {
private $_msg;
private $_type;
public function __construct($p_msg,$p_type) {
$this->setMsg($p_msg);
$this->setType($p_type);
}
public function getMsg() {
return $this->_msg;
}
public function getType() {
return $this->_type;
}
public function setMsg($p_msg) {
$this->_msg = $p_msg;
}
public function setType($p_type) {
$this->_type = $p_type;
}
}
<file_sep><?php
/**
*
* BMG
* © GroSoft, 2016
*
* References
* Classes métier
*
*
* @package default
* @author <NAME>, <NAME> 28/09/2016
* @version 1.0
*/
/*
* ====================================================================
* Classe Pret : représente un pret
* ====================================================================
*/
class Pret {
private $_id;
private $_no_client;
private $_no_ouvrage;
private $_date_emp;
private $_date_ret;
private $_penalite;
/**
* Constructeur
*/
public function __construct(
$p_id,
$p_no_client,
$p_no_ouvrage,
$p_date_emp,
$p_date_ret,
$p_penalite
) {
$this->setId($p_id);
$this->setNoClient($p_no_client);
$this->setNoOuvrage($p_no_ouvrage);
$this->setDateEmp($p_date_emp);
$this->setDateRet($p_date_ret);
$this->setPenalite($p_penalite); }
/**
* Accesseurs
*/
public function getId () {
return $this->_id;
}
public function getNoClient () {
return $this->_no_client;
}
public function getNoOuvrage () {
return $this->_no_ouvrage;
}
public function getDateEmp () {
return $this->_date_emp;
}
public function getDateRet () {
return $this->_date_ret;
}
public function getPenalite () {
return $this->_penalite;
}
/**
* Mutateurs
*/
public function setId ($p_id) {
$this->_id = $p_id;
}
public function setNoClient ($p_noClient) {
$this->_no_client = $p_noClient;
}
public function setnoOuvrage ($p_noOuvrage) {
$this->_no_ouvrage = $p_noOuvrage;
}
public function setDateEmp ($p_dateEmp) {
$this->_date_emp = $p_dateEmp;
}
public function setDateRet ($p_dateRet) {
$this->_date_ret = $p_dateRet;
}
public function setPenalite ($p_penalite) {
$this->_penalite = $p_penalite;
}
}
<file_sep><?php
/**
* Page de gestion des ouvrages
* @author pv
* @package default
*/
?>
<div id="content">
<h2>Gestion des ouvrages</h2>
<?php AdminRender::showNotifications(); ?>
<div id="object-list">
<form action="index.php?uc=gererOuvrages&action=ajouterAuteur&option=validerAuteur&id=<?php echo($unOuvrage->getNo()) ?>" method="post">
<div class="corps-form">
<fieldset>
<legend>Ajouter un ouvrage</legend>
<table>
<tr>
<td>
<label for="cbxAuteurs">
Auteur :
</label>
</td>
<td>
<?php
afficherListe($lesAuteurs,"cbxAuteurs",$strAuteur,"");
?>
</td>
</tr>
</table>
</fieldset>
</div>
<div class="pied-form">
<p>
<input id="cmdValider" name="cmdValider"
type="submit"
value="Ajouter"
/>
</p>
</div>
</form>
</div>
</div>
|
6af269e698e806f6e2b7ec513944ea3ef58ca1c3
|
[
"PHP"
] | 24
|
PHP
|
BTSSIO2LRS/bmg
|
d6733be27362075c0c9ce6830c767ae9d7092b6c
|
934a81289721ddd2f35d5b854f3242afbdfabb04
|
refs/heads/main
|
<repo_name>alessandroghizzardi/academy-ionic<file_sep>/src/app/recipes/recipe-detail/recipe-detail.page.ts
import { Component, OnInit } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { AlertController } from '@ionic/angular';
import { Recipe } from '../recipe.model';
import { RecipesService } from '../recipes.service';
@Component({
selector: 'app-recipe-detail',
templateUrl: './recipe-detail.page.html',
styleUrls: ['./recipe-detail.page.scss'],
})
export class RecipeDetailPage implements OnInit {
recipe: Recipe;
constructor(
private activatedRoute: ActivatedRoute,
private recipesService: RecipesService,
private rouder: Router,
private alertController: AlertController
) { }
ngOnInit() {
this.activatedRoute.paramMap.subscribe(paramMap => {
if (!paramMap.has('recipeId')) {
//Redirect to recipe list
return;
}
const recipeId = paramMap.get('recipeId');
this.recipe = this.recipesService.getRecipe(recipeId);
});
}
async onDeleteRecipe()
{
console.log('onDeleteRecipe');
const alert = this.alertController.create({
header: 'Are you sure?',
message: 'Do you really want to delete this recipe?',
buttons: [
{
text: 'Cancel',
role: 'cancel'
},
{
text: 'Confirm',
role: 'confirm',
handler: async () => {
this.recipesService.deleteRecipe(this.recipe.id);
await this.presentAlert('Recipe deleted', `Recipe for ${this.recipe.title} has been successfully deleted`);
this.rouder.navigate(['/recipes']);
}
}
]
}).then(alertElement => alertElement.present());
}
async presentAlert(alertHeader: string, alertMessage: string) {
const alert = await this.alertController.create({
//cssClass: 'my-custom-class',
header: alertHeader,
//subHeader: 'Subtitle',
message: alertMessage,
buttons: ['OK']
});
await alert.present();
const { role } = await alert.onDidDismiss();
console.log('onDidDismiss resolved with role', role);
}
}
|
a913926271c26e3a198f4f41bf0ae39052492885
|
[
"TypeScript"
] | 1
|
TypeScript
|
alessandroghizzardi/academy-ionic
|
783372934532be847376605646529393719fb4b1
|
87b46f2bbc6d956aa4abf420e899fc14af4bcdc6
|
refs/heads/master
|
<repo_name>xdbr/TextMate-Bundle-yUML<file_sep>/README.md
yuml.tmbundle
=============
yuml.me TextMate Bundle with Snippets.
Snippets
--------
currently available Snippets are (all snippets are to be followed by TAB):
### Classes and Interfaces
* Interface: `interface`
* Class (simple): `class`
* Class: `class`
### Connections
* Aggregation (to-n): `connection`
* Aggregation: `connection`
* Cardinality: `connection`
* Composition: `connection`
* Dependency: `connection`
* Directional Association: `connection`
* Inheritance: `connection`
* Interface Inheritance: `connection`
* Simple Association: `connection`
* interface: `connection`
Commands
--------
* *Preview* (Command-R)
Installation
------------
### Textmate
```
git clone https://github.com/xdbr/TextMate-Bundle-yUML.git ~/Library/Application\ Support/TextMate/Bundles/yuml.tmbundle || cd $_ && git pull; cd -
```
### Sublime Text 2
```
git clone https://github.com/xdbr/TextMate-Bundle-yUML.git ~/Library/Application\ Support/Sublime\ Text\ 2/Packages/yuml.tmbundle || cd $_ && git pull; cd -
```
The latter is reported to work for Sublime (which I haven't tried) and the first solution is the one I use for TextMate
Contributing
------------
1) `git clone` this repo regularly:
git clone https://github.com/xdbr/TextMate-Bundle-yUML.git
2) Use TextMate's Bundle-Manager to create new snippets, etc.
3) then use `import-new-snippets-from-textmate.sh` to get the current bundle from TextMate in
4) `git commit`
Author
------
xdbr
License
-------
Beerware License
<file_sep>/import-new-snippets-from-textmate.sh
#!/bin/sh
set +x
cd ..
cp -r ~/Library/Application\ Support/TextMate/Bundles/yuml.tmbundle/* ${OLDPWD}
cd -
|
f20638b9bd9fb3dafee71a9225daf6f2f3ab3b41
|
[
"Markdown",
"Shell"
] | 2
|
Markdown
|
xdbr/TextMate-Bundle-yUML
|
156659599f2f0aea61b3cb280827f18187d94909
|
39b8ac01fec8f378365ce44902470448ba174514
|
refs/heads/master
|
<repo_name>Jobayerdev/micro-front-end<file_sep>/packages/container/src/@shared/utils/util-function.ts
import jwt_decode from "jwt-decode"
export const concatFilterQuery = (options: any): string => {
return Object.keys(options)
.map((x) => {
return `${x}=${options[x]}`
})
.join("&")
}
const storagePrefix = "jd_react_"
export const storage = {
getToken: (): string | false => {
let item: any = localStorage.getItem(`${storagePrefix}token`)
return JSON.parse(item) as string
},
getDecodedToken: () => {
let item: any = localStorage.getItem(`${storagePrefix}token`)
return jwt_decode(item)
},
setToken: (token: string) => {
localStorage.setItem(`${storagePrefix}token`, JSON.stringify(token))
},
clear: () => {
localStorage.clear()
},
setData(data: any, key: string) {
localStorage.setItem(key, JSON.stringify(data))
},
getDate(key: string) {
let item = localStorage.getItem(key)
if (!item) {
return
}
return JSON.parse(item)
},
removeData(key: string) {
localStorage.removeItem(key)
},
}
<file_sep>/packages/auth/src/@shared/assets/index.ts
export const IMAGES = {
textureBg: require("./images/auth-bg.jpg").default,
}
<file_sep>/packages/auth/src/ENV.ts
export const ENV = {
CORE_END_POINT: "",
}
<file_sep>/packages/auth/config/webpack.common.js
const HtmlWebpackPlugin = require("html-webpack-plugin")
const path = require("path")
module.exports = {
module: {
rules: [
{
test: /\.(js|jsx|ts|tsx)$/,
exclude: /node_modules/,
use: {
loader: "babel-loader",
options: {
presets: [
"@babel/preset-react",
"@babel/preset-env",
"@babel/preset-typescript",
],
plugins: ["@babel/plugin-transform-runtime"],
},
},
},
{
test: /\.(css|scss|less)$/,
exclude: /node_modules/,
use: [
"style-loader",
"css-loader",
"postcss-loader",
"sass-loader",
{
loader: "less-loader",
options: {
lessOptions: {
modifyVars: {
"primary-color": "#fd683e",
"link-color": "#fd683e",
"border-radius-base": "2px",
},
javascriptEnabled: true,
},
},
},
],
},
{
test: /\.(png|jpe?g|gif)$/i,
use: [
{
loader: "file-loader",
},
],
},
],
},
resolve: {
extensions: [".tsx", ".ts", ".js"],
alias: {
"@shared": path.resolve(__dirname, "../src/@shared/"),
"@modules": path.resolve(__dirname, "../src/@modules/"),
},
},
plugins: [
new HtmlWebpackPlugin({
template: "./public/index.html",
}),
],
}
<file_sep>/packages/auth/src/@shared/utils/index.ts
export * from "./util-function"
export * from "./util-jsx"
<file_sep>/packages/container/src/@shared/config/axios/core-axios-instantance.ts
import { ENV } from "src/ENV"
import axios from "axios"
import { message } from "antd"
import { storage } from "@shared/utils"
const headers: any = {
"Content-Type": "application/json",
"X-Request-ID": 124,
"X-Country-Code": "BD",
"X-Client-Name": "CRM",
"X-Client-Version": 123,
Authorization: `Bearer ${storage?.getToken()}`,
}
export const CoreAxiosInstance = axios.create({
baseURL: ENV.CORE_END_POINT,
timeout: 60000,
headers,
})
CoreAxiosInstance.interceptors.request.use(
(config: any) => {
config.headers["Authorization"] = `Bearer ${storage?.getToken()}`
return config
},
(error: any) => {
return Promise.reject(error)
}
)
CoreAxiosInstance.interceptors.response.use(
(response: any) => {
return response
},
(error: any) => {
if (error?.response?.status === 401) {
storage.clear()
window.location.assign(window.location.origin as unknown as string)
} else if (error.response?.data?.success === false) {
error.response?.data?.errorMessages?.map((x: string) => {
return message.error(x)
})
}
return error
}
)
<file_sep>/packages/container/src/@shared/config/index.ts
export * from "./axios/core-axios-instantance"
export * from "./react-query/react-query"
export * from "./redux/rootReducer"
export * from "./redux/store"
<file_sep>/packages/container/src/@shared/assets/index.ts
export const IMAGES = {
textureBg: require("./images/auth-bg.jpg").default,
logo: "http://placehold.it/50x50",
}
|
971c9bff05861feb4406125f7f4dcf467179da5a
|
[
"JavaScript",
"TypeScript"
] | 8
|
TypeScript
|
Jobayerdev/micro-front-end
|
c1b082fde0e6e6519ce8e4b8497ab24db3cbcc4a
|
147fb373507f10d2c3f6ea96d53d93bcd8911ee7
|
refs/heads/master
|
<repo_name>alexandermikuta/DragDrop-POC<file_sep>/poc/apps/frontend/src/app/tree-view/tree-view.component.ts
// tslint:disable: member-ordering
import {
ChangeDetectionStrategy,
Component,
Input,
OnInit
} from '@angular/core';
import { FlatTreeControl } from '@angular/cdk/tree';
import {
MatTreeFlatDataSource,
MatTreeFlattener
} from '@angular/material/tree';
import { CdkDragDrop, CdkDrag, CdkDropList } from '@angular/cdk/drag-drop';
import { DOCUMENT } from '@angular/common';
interface ExampleFlatNode {
expandable: boolean;
name: string;
level: number;
}
@Component({
selector: 'poc-tree-view',
templateUrl: './tree-view.component.html',
styleUrls: ['./tree-view.component.css'],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class TreeViewComponent implements OnInit {
@Input() treeData: any;
private _transformer = (node: any, level: number) => {
return {
expandable: !!node.children && node.children.length > 0,
name: node.name,
level: level,
nodeData: node
};
};
treeControl = new FlatTreeControl<ExampleFlatNode>(
node => node.level,
node => node.expandable
);
treeFlattener = new MatTreeFlattener(
this._transformer,
node => node.level,
node => node.expandable,
node => node.children
);
dataSource = new MatTreeFlatDataSource(this.treeControl, this.treeFlattener);
public dropped(event: CdkDragDrop<string[]>) {
console.table({ from: event.previousContainer.id, to: event.container.id });
}
constructor() {}
ngOnInit() {
this.dataSource.data = this.treeData;
}
hasChild = (_: number, node: ExampleFlatNode) => node.expandable;
public startDragDrop(nodeType: string) {
if (nodeType === 'testcase') {
const freetestcaseNodes = document.querySelectorAll('[data-type="freetestcases"]');
freetestcaseNodes.forEach(node => node.classList.add('possibleTargetContainer'));
const userstoryNodes = document.querySelectorAll('[data-type="userstory"]');
userstoryNodes.forEach(node => node.classList.add('possibleTargetContainer'));
}
if (nodeType === 'userstory') {
const freeuserstoryNodes = document.querySelectorAll('[data-type="freeuserstories"]');
freeuserstoryNodes.forEach(node => node.classList.add('possibleTargetContainer'));
const epicNodes = document.querySelectorAll('[data-type="epic"]');
epicNodes.forEach(node => node.classList.add('possibleTargetContainer'));
}
}
public endDragDrop() {
document.querySelectorAll('.possibleTargetContainer').forEach(node => node.classList.remove('possibleTargetContainer'));
}
public checkDrop(drag?: CdkDrag, drop?: CdkDropList) {
if (!drag || !drop) {
return false;
}
const dragType = drag.data.nodeData.type;
const dropType = drop.id['type'];
if (
dragType === 'userstory' &&
(dropType === 'epic' || dropType === 'freeuserstories')
) {
return true;
}
if (
dragType === 'testcase' &&
(dropType === 'userstory' || dropType === 'freetestcases')
) {
return true;
}
return false;
}
}
|
681f223be4ad2756675705d2ea95f08a9a804e1b
|
[
"TypeScript"
] | 1
|
TypeScript
|
alexandermikuta/DragDrop-POC
|
8d00635ef070d4fa858ac2cb058f10d0e1595213
|
87fecab0372ca8205521524123e297de7d802fd5
|
refs/heads/master
|
<repo_name>Esben-code/V1code<file_sep>/V1/VS-Code/HCAnderson/README.md
# <NAME> info webside
Den første hjemmeside jeg lavet i V1.
Brugt til læring om HTML, HTML-tags og semantiske HTML-tags.
Siden blev også brugt til undervise i at skabe et Github Repository

<file_sep>/V1/VS-Code/Kogebog/bagvaerk.html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link href="stylesheets/style.css" rel="stylesheet" type="text/css">
<title>Document</title>
</head>
<body>
<header>
<nav>
<a href="index.html">Startside </a>
<a href="aftensmad.html">Opskrifter til Aftensmad</a>
<a href="bagvaerk.html">Opskrifter til Bagværk</a>
<a href="dessert.html">Opskrifter til Desserter</a>
</nav>
</header>
<main>
<h1>Bagværk</h1>
<section>
<article>
<h2>Blåbærmuffins</h2>
<img src="img/blaabaermuffins.jpg" alt="Blåbærmuffins på et fad">
<p>12 stk.</p>
<h3>Ingredienser</h3>
<ul>
<li>100 g smør, blødt</li>
<li>100 g rørsukker</li>
<li>75 g brun farin</li>
<li>3 æg</li>
<li>175 g hvedemel</li>
<li>50 g hvedemel, fuldkorn</li>
<li>2 tsk bagepulver</li>
<li>1 tsk kanel</li>
<li>1/2 tsk vaniljepulver</li>
<li>1/2 tsk salt</li>
<li>225 g blåbær, frosne</li>
</ul>
<p>Tid: 35 minutter</p>
<h3>Fremgangsmåde</h3>
<p>Pisk smør, rørsukker og brun farin sammen til det er luftigt. Pisk derefter æggene i lidt efter lidt.</p>
<p>I en skål for sig vendes mel, fuldkornsmel, bagepulver, kanel, vaniljepulver og salt godt sammen. Vend til sidst blåbær i melblandingen og rør derefter melblandingen sammen med den sammenpiskede æg, sukker og smørblanding.</p>
<p>Fordel dejen i 12 muffinsforme og bag i en forvarmet ovn ved 175 grader varmluft i 22-25 minutter.</p>
</article>
<article>
<h2>Græskarkage</h2>
<img src="img/graeskarkage.jpg" alt="Græskarkage på fad, Græskar">
<p>1 kage</p>
<h3>Ingredienser</h3>
<ul>
<li>300 g hokkaido græskar, groftrevet</li>
<li>175 g smør, stuetempereret</li>
<li>100 g sukker</li>
<li>75 g brun farin</li>
<li>175 g hvedemel</li>
<li>3 æg</li>
<li>2 tsk bagepulver</li>
<li>1 tsk kanel</li>
<li>1/2 tsk kardemomme, stødt</li>
<li>1 knivspids nellike, stødt</li>
<li>1/2 tsk salt</li>
<li>Frosting</li>
<li>200 g flødeost, naturel</li>
<li>3 spsk ahornsirup</li>
<li>1 øko appelsin, fintrevet skal</li>
<li>50 g valnødder, hakkede</li>
<li>30 g pistaciekerner, hakkede</li>
</ul>
<p>Tid: 60 minutter</p>
<h3>Fremgangsmåde</h3>
<p>Pisk smør, sukker og brun farin sammen, rør æg og groftrevet græskar i dejen. I en anden skål blandes mel, bagepulver, krydderier og salt.</p>
<p>Rør melblandingen i skålen med smør, sukker, græskar og æg-blandingen.</p>
<p>Fordel dejen i en smurt springform (ca. 24 cm i diameter), evt. beklædt med bagepapir og bag græskarkagen i en forvarmet ovn ved 175 grader varmluft i cirka 35-40 minutter.</p>
<p>Græskarkagen køles helt af, inden den pyntes med frosting.</p>
<h3>Frosting</h3>
<p>Rør flødeost med ahornsirup.</p>
<p>Smør frostingen på kagen, drys med appelsinskal, grofthakkede valnødder og pistaciekerner. Dryp en smule ahornsirup over, inden servering.</p>
<p>Opskriften er til 6 personer</p>
</article>
<article>
<h2>Kardemommekage</h2>
<img src="img/kardemommekage.jpg" alt="Kardemommekage">
<p>1 kage</p>
<h3>Ingredienser</h3>
<ul>
<li>220 g rørsukker</li>
<li>150 g smør, blødt</li>
<li>200 g marcipan</li>
<li>4 æg</li>
<li>225 g hvedemel</li>
<li>2 tsk bagepulver</li>
<li>2 tsk kardemomme, stødt</li>
<li>1 tsk vaniljepulver</li>
<li>Glasur og pynt</li>
<li>200 g flødeost, naturel</li>
<li>150 g flormelis</li>
<li>1 spsk kakaonibs</li>
<li>1 spsk pistaciekerner</li>
</ul>
<p>Tid: 1 time og 15 minutter</p>
<h3>Fremgangsmåde</h3>
<p>Pisk smør og sukker sammen til en jævn masse. Skær marcipanen i tern og pisk den i sammen med æg til blandingen er jævn.</p>
<p>Rør mel, kardemomme, vaniljepulver og bagepulver sammen i en ren skål, og rør melblandingen i dejen.</p>
<p>Fordel dejen i en smurt springform, min er ca 22 cm i diameter.</p>
<p>Bag kagen i en forvarmet ovn ved 175 grader varmluft i cirka 50-60 minutter.</p>
<p>Opskriften er til 8 personer</p>
</article>
</section>
</main>
<footer>
<nav>
<a href="index.html">Startside </a>
<a href="aftensmad.html">Opskrifter til Aftensmad</a>
<a href="bagvaerk.html">Opskrifter til Bagværk</a>
<a href="dessert.html">Opskrifter til Desserter</a>
</nav>
</footer>
</body>
</html><file_sep>/V2/JavaScript Array Opgave/js/script.js
var bank = 'nordea' //ids = jysk, dansk, nordea
var jyskeBank =
{
bankNavn: 'Jyske Bank',
bankAdr: 'Skattelyvej 1',
bankZip: '2222',
bankBy: 'Solsiden',
bankTlfNr: '+45 1234 1234',
bankMail: '<EMAIL>',
bankSender: '<NAME>',
bankSenderTitle: 'Administrerende direktør',
bankImg: 'img/logo_jb.png',
bankImgAltTxt: 'Jyske Bank Logo'
}
var nordea =
{
bankNavn: 'Nordea',
bankAdr: 'Købtgade 6',
bankZip: '1548',
bankBy: 'Gåserød',
bankTlfNr: '+45 8484 4848',
bankMail: '<EMAIL>',
bankSender: '<NAME>',
bankSenderTitle: 'CCO',
bankImg: 'img/logo_nd.png',
bankImgAltTxt: 'Nordea Logo'
}
var danskeBank =
{
bankNavn: 'Danske Bank',
bankAdr: 'Tæppetovet 7',
bankZip: '2000',
bankBy: 'Pjort',
bankTlfNr: '+45 7777 7778',
bankMail: '<EMAIL>',
bankSender: '<NAME>',
bankSenderTitle: 'Administrerende direktør',
bankImg: 'img/logo_db.png',
bankImgAltTxt: 'Danske Bank Logo'
}
var firmanavn = document.querySelectorAll('.bankNavn');
var firmaadresse = document.querySelectorAll('.bankAdr');
var firmapost = document.querySelectorAll('.bankZip');
var firmaby = document.querySelectorAll('.bankBy');
var firmatlf = document.querySelectorAll('.bankTlfNr');
var firmamail = document.querySelectorAll('.bankMail');
var firmalogo = document.querySelectorAll('.bankImg');
var firmasender = document.querySelectorAll('.bankSender')
var firmatitle = document.querySelectorAll('.bankSenderTitle')
firmanavn.forEach(udskiftBankNavn)
firmaadresse.forEach(udskiftBankAdr)
firmaby.forEach(udskiftBankBy)
firmamail.forEach(udskiftBankMail)
firmapost.forEach(udskiftBankZip)
firmasender.forEach(udskiftBankSender)
firmatitle.forEach(udskiftBankSenderTitle)
firmatlf.forEach(udskiftBankTlfNr)
firmalogo.forEach(udskiftBankImg)
function udskiftBankNavn(element)
{
if(bank == "jysk")
{
element.innerHTML = jyskeBank.bankNavn;
}
else if(bank == "dansk")
{
element.innerHTML = danskeBank.bankNavn;
}
else if(bank == "nordea")
{
element.innerHTML = nordea.bankNavn;
}
}
function udskiftBankAdr(element)
{
if(bank == "jysk")
{
element.innerHTML = jyskeBank.bankAdr;
}
else if(bank == "dansk")
{
element.innerHTML = danskeBank.bankAdr;
}
else if(bank == "nordea")
{
element.innerHTML = nordea.bankAdr;
}
}
function udskiftBankZip(element)
{
if(bank == "jysk")
{
element.innerHTML = jyskeBank.bankZip;
}
else if(bank == "dansk")
{
element.innerHTML = danskeBank.bankZip;
}
else if(bank == "nordea")
{
element.innerHTML = nordea.bankZip;
}
}
function udskiftBankBy(element)
{
if(bank == "jysk")
{
element.innerHTML = jyskeBank.bankBy;
}
else if(bank == "dansk")
{
element.innerHTML = danskeBank.bankBy;
}
else if(bank == "nordea")
{
element.innerHTML = nordea.bankBy;
}
}
function udskiftBankTlfNr(element)
{
if(bank == "jysk")
{
element.innerHTML = jyskeBank.bankTlfNr;
}
else if(bank == "dansk")
{
element.innerHTML = danskeBank.bankTlfNr;
}
else if(bank == "nordea")
{
element.innerHTML = nordea.bankTlfNr;
}
}
function udskiftBankMail(element)
{
if(bank == "jysk")
{
element.innerHTML = jyskeBank.bankMail;
}
else if(bank == "dansk")
{
element.innerHTML = danskeBank.bankMail;
}
else if(bank == "nordea")
{
element.innerHTML = nordea.bankMail;
}
}
function udskiftBankSender(element)
{
if(bank == "jysk")
{
element.innerHTML = jyskeBank.bankSender;
}
else if(bank == "dansk")
{
element.innerHTML = danskeBank.bankSender;
}
else if(bank == "nordea")
{
element.innerHTML = nordea.bankSender;
}
}
function udskiftBankSenderTitle(element)
{
if(bank == "jysk")
{
element.innerHTML = jyskeBank.bankSenderTitle;
}
else if(bank == "dansk")
{
element.innerHTML = danskeBank.bankSenderTitle;
}
else if(bank == "nordea")
{
element.innerHTML = nordea.bankSenderTitle;
}
}
function udskiftBankImg(element)
{
if(bank == "jysk")
{
element.setAttribute("src", jyskeBank.bankImg);
element.setAttribute("alt", jyskeBank.bankImgAltTxt);
}
else if(bank == "dansk")
{
element.setAttribute("src", danskeBank.bankImg);
element.setAttribute("alt", danskeBank.bankImgAltTxt);
}
else if(bank == "nordea")
{
element.setAttribute("src", nordea.bankImg);
element.setAttribute("alt", nordea.bankImgAltTxt);
}
}<file_sep>/V2/JavaScript Event Opgaver/js/script.js
"use strict"
let i = 0; //this counter is important for the CreatBreakLine() function
function CreateBreakerLine() //this function makes p tag with lines of slashes
{
i++;
document.querySelector('main').appendChild(window['breakerLine'+i] = document.createElement('p'));
window['breakerLine'+i].appendChild(window['breakerTxt'+i] = document.createTextNode("//////////////////////////////////////"));
}
////////////////////////////////////////////////////////////
document.querySelector("button").addEventListener("click", () => {console.dir("hello world"); alert("Hello world!")});
////////////////////////////////////////////////////////////
document.querySelectorAll("li").forEach(x => { x.addEventListener("click", () => {console.dir(x.innerHTML); alert(x.innerHTML)})});
////////////////////////////////////////////////////////////
document.querySelector(".colordiv").addEventListener("click", () => { document.querySelector(".colordiv").style.backgroundColor = "yellow" });
////////////////////////////////////////////////////////////
let formPlus = document.createElement('form');
let input1 = document.createElement('input');
let input2 = document.createElement('input');
let input3 = document.createElement('input');
document.querySelector('main').appendChild(formPlus);
formPlus.appendChild(input1).setAttribute("id", "number1");
formPlus.appendChild(input2).setAttribute("id", "number2");
formPlus.appendChild(input3).setAttribute("id", "submitPlus");
formPlus.querySelector('#number1').setAttribute('type', "number");
formPlus.querySelector('#number2').setAttribute('type', "number");
formPlus.querySelector('#submitPlus').setAttribute('type', "submit");
formPlus.querySelector('#submitPlus').addEventListener("click",
(x) => {
x.preventDefault();
let nr1 = Number(formPlus.querySelector('#number1').value);
console.log(nr1);
let nr2 = Number(formPlus.querySelector('#number2').value);
console.log(nr2);
alert(nr1 + nr2);
})
CreateBreakerLine(); //calls the function.
////////////////////////////////////////////////////////////
let formMoms = document.createElement('form');
let inputMoms1 = document.createElement('input');
let inputMoms2 = document.createElement('input');
document.querySelector('main').appendChild(formMoms);
formMoms.appendChild(inputMoms1).setAttribute("id", "numberMoms1");
formMoms.appendChild(inputMoms2).setAttribute("id", "submitMoms");
formMoms.querySelector('#numberMoms1').setAttribute('type', "number");
formMoms.querySelector('#submitMoms').setAttribute('type', "submit");
formMoms.querySelector('#submitMoms').addEventListener("click",
(x) => {
x.preventDefault();
let baseNr = Number(formMoms.querySelector('#numberMoms1').value);
let moms = Number(baseNr * 0.25);
let discountedNr = Number(baseNr * 0.9);
console.log('Base price: ' + baseNr);
console.log('Moms: ' + moms);
console.log('Discounted: ' + discountedNr);
console.log('total price: ' + (discountedNr + moms));
});
CreateBreakerLine();
////////////////////////////////////////////////////////////
let formMail = document.createElement('form');
let inputMailTxt = document.createElement('input');
let inputMailSubmit = document.createElement('input');
document.querySelector('main').appendChild(formMail);
formMail.appendChild(inputMailTxt).setAttribute("id", "inputMailTxt");
formMail.appendChild(inputMailSubmit).setAttribute("id", "inputSubmitMail");
formMail.querySelector('#inputMailTxt').setAttribute('type', "text");
formMail.querySelector('#inputSubmitMail').setAttribute('type', "submit");
formMail.querySelector('#inputSubmitMail').addEventListener(
"click", (x) => {
x.preventDefault();
let mail = formMail.querySelector('#inputMailTxt').value;
let re = /\w{2,99}@.+\..+/
console.log(mail);
console.log(re);
if (mail.search(re) == -1) {
alert('This mail is NOT valid.');
}
else
{
alert('This mail is valid.');
}
});
CreateBreakerLine();
////////////////////////////////////////////////////////////
let gal = document.querySelector("#gallery")
gal.addEventListener(
"click", (e) => {
e.preventDefault();
e.target
});
<file_sep>/V1/VS-Code/fantasybooks/sh-hpofo.html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link href="https://fonts.googleapis.com/css2?family=Fira+Sans&family=Lato&family=Roboto+Slab:wght@500&display=swap" rel="stylesheet">
<link href="css/style.css" rel="stylesheet" type="text/css">
<title>Fantasybooks</title>
</head>
<body>
<header>
<nav>
<ul>
<li><a href="index.html">Forside</a></li>
<li><a href="secondhand.html">Secondhand Store</a></li>
<li><a href="bogklub.html">Bogklub</a></li>
<li><a href="top10.html">Top 10 Lister</a></li>
<li><a href="kontakt.html">Kontakt Os</a></li>
</ul>
</nav>
</header>
<main>
<h1></h1>
<section>
<artical>
<NAME> og Fønixordenen
Format: Bog, indbundet
Sprog: Dansk
Sidetal: 246
Forfattere: <NAME>
Første salgsdato: 19-05-2016
ISBN13: 9788702173260
Pris: 75,-
Hermione og Ron har været meget hemmelighedsfulde i løbet af sommeren og det har undret Harry. Da sommerferien lakker mod enden finder han dog ud af hvad det har handlet om, og det 5. år bliver bestemt ikke let. Alt har ændret sig og Voldemort bliver stærkere og stærkere.
<NAME> står bag "<NAME> og Fønixordenen". Harry og Dumbledore gør hvad de kan for at overbevise Ministeret for Magi om at Voldemord er tilbage, men Ministeret gør alt for at få dem til at tie stille. En større katastrofe truer snart skolen, Dumbledore og ikke mindst Harry og hans venner.
I "<NAME> og Fønixordenen" ændrer alt sig.
</artical>
</section>
</main>
<footer>
<span class="footer-left">
<p>Egelykke 3, st. <br> 8000 Århus C. <br> Tlf: 12 34 56 78 <br> Mail: <EMAIL></p>
</span>
<span class="footer-center">
<p>Åbningstider <br> Mandag - fredag: 10:00 - 18:00 <br> Lørdag: 10:00 - 13:00</p>
</span>
<span class="footer-right">
<p>Fantasybooks på de sociale medier:</p>
<a href="www.facebook.com"><img src="img/FacebookIcon.png" alt="Fantasybooks facebook side"></a>
<a href="www.instagram.com"><img src="img/InstagramIcon.png" alt="Fantasybooks Instagram side"></a>
<a href="www.twitter.com"><img src="img/TwitterIcon.png" alt="Fantasybooks twitter side"></a>
</span>
</footer>
</body>
</html><file_sep>/V2/JavaScript Menu/menuScript.js
var menu = {
'home': 'home.html',
'products': 'products.html',
'pricing': 'pricing.html',
'vision': 'vision.html',
'about': 'about.html',
'contact': 'contact.html'
}
var nav = document.createElement('nav');
var ul = document.createElement('ul');
document.querySelector('body').appendChild(nav);
nav.appendChild(ul);
for (key in menu ) {
var li = document.createElement('li');
var a = document.createElement('a');
var txt = document.createTextNode(key);
var link = (menu[key])
ul.appendChild(li);
li.appendChild(a);
a.appendChild(txt);
a.href = link;
a.classList.add('menuLinks')
li.classList.add('menuListItems')
}
nav.classList.add('menu')
ul.classList.add('menuUl')
// var nav = document.createElement('nav');
// var ul = document.createElement('ul');
// var homeli = document.createElement('li');
// var homea = document.createElement('a');
// var productsli = document.createElement('li');
// var productsa = document.createElement('a');
// var pricingli = document.createElement('li');
// var pricinga = document.createElement('a');
// var visionli = document.createElement('li');
// var visiona = document.createElement('a');
// var aboutli = document.createElement('li');
// var abouta = document.createElement('a');
// var contactli = document.createElement('li');
// var contacta = document.createElement('a');
// // var li = document.createElement('li');
// // var a = document.createElement('a');
// var hometxt = document.createTextNode('Home');
// var productstxt = document.createTextNode('Products');
// var pricingtxt = document.createTextNode('Pricing');
// var visiontxt = document.createTextNode('Vision');
// var abouttxt = document.createTextNode('About');
// var contacttxt = document.createTextNode('Contact');
<file_sep>/V2/Array-letter-løsning/array-letter.js
var bank = "jb"; // Vælg hvilken bank brevet skal være fra. Valgmuligheder er jb for Jyske Bank, nb for Nordea Bank og db for Danske Bank
// Tre associative arrays med bank-informationer, som kan skiftes ud i brevet
var jyskeBank = {firmanavn: "Jyske Bank", firmaadresse: "Skattelyvej 1", firmapost: "2222", firmaby: "Solsiden", firmatlf: "+45 1234 1234", firmamail: "<EMAIL>", firmalogo: "logo_jb.png"};
var danskeBank = {firmanavn: "Danske Bank", firmaadresse: "Mangepengevej 5", firmapost: "3333", firmaby: "Storby", firmatlf: "+45 3333 5566", firmamail: "<EMAIL>", firmalogo: "logo_db.png"};
var nordeaBank = {firmanavn: "Nordea Bank", firmaadresse: "Bankvej 34", firmapost: "4444", firmaby: "Lilleby", firmatlf: "+45 1234 5678", firmamail: "<EMAIL>", firmalogo: "logo_nd.png"};
// Associative array med kundeinformationer (til fremtidig brug)
var kunde = {kundefornavn: "Anders", kundeefternavn: "And", kundeadresse: "Paradisæblevej 111", kundepostby: "1000 Andeby"};
// Hent HTML-elementer med klasserne firmanavn, firmaadresse, firmapost, firmaby, firmatlf, firmamail og firmalogo
var firmanavn = document.querySelectorAll('.firmanavn');
console.log(firmanavn); // Udskriv alle de hentede elementer, som har klassen "firmanavn"
var firmaadresse = document.querySelectorAll('.firmaadresse');
var firmapost = document.querySelectorAll('.firmapost');
var firmaby = document.querySelectorAll('.firmaby');
var firmatlf = document.querySelectorAll('.firmatlf');
var firmamail = document.querySelectorAll('.firmamail');
var firmalogo = document.querySelectorAll('.firmalogo');
console.log(firmalogo);
// På hver af de nodelists (arrays), som bliver dannet med de forskellige elementer fra forskellige klasser, skal køres den tilsvarende callback funktion (en callback-function er en funktion, der bliver sendt som argument for en anden funktion/metode)
firmanavn.forEach(udskiftFirmaNavn); // For hvert element i klassen firmanavn, skal køres callback funktionen udskiftFirmanavn. Det aktuelle HTML-element bliver overført til callback-funktionen som argument (så hvis første element, som har klassen firmanavn er et p-element, så bliver det overført til callback-funktionen)
firmaadresse.forEach(udskiftFirmaAdresse);
firmapost.forEach(udskiftFirmaPost);
firmaby.forEach(udskiftFirmaBy);
firmatlf.forEach(udskiftFirmaTlf);
firmamail.forEach(udskiftFirmaMail);
firmalogo.forEach(udskiftFirmaLogo);
// callback-funktionen udskiftFirmaNavn, som modtager det aktuelle HTML-element som argument og herefter ændrer innerHTML (som er teksten mellem start og slut-tag) til teksten fra det relevante associative array på index firmanavn
function udskiftFirmaNavn(element)
{
if(bank == "jb")
{
element.innerHTML = jyskeBank.firmanavn;
}
else if(bank == "db")
{
element.innerHTML = danskeBank.firmanavn;
}
else if(bank == "nb")
{
element.innerHTML = nordeaBank.firmanavn;
}
}
function udskiftFirmaAdresse(element)
{
if(bank == "jb")
{
element.innerHTML = jyskeBank.firmaadresse;
}
else if(bank == "db")
{
element.innerHTML = danskeBank.firmaadresse;
}
else if(bank == "nb")
{
element.innerHTML = nordeaBank.firmaadresse;
}
}
function udskiftFirmaPost(element)
{
if(bank == "jb")
{
element.innerHTML = jyskeBank.firmapost;
}
else if(bank == "db")
{
element.innerHTML = danskeBank.firmapost;
}
else if(bank == "nb")
{
element.innerHTML = nordeaBank.firmapost;
}
}
function udskiftFirmaBy(element)
{
if(bank == "jb")
{
element.innerHTML = jyskeBank.firmaby;
}
else if(bank == "db")
{
element.innerHTML = danskeBank.firmaby;
}
else if(bank == "nb")
{
element.innerHTML = nordeaBank.firmaby;
}
}
function udskiftFirmaTlf(element)
{
if(bank == "jb")
{
element.innerHTML = jyskeBank.firmatlf;
}
else if(bank == "db")
{
element.innerHTML = danskeBank.firmatlf;
}
else if(bank == "nb")
{
element.innerHTML = nordeaBank.firmatlf;
}
}
function udskiftFirmaMail(element)
{
if(bank == "jb")
{
element.innerHTML = jyskeBank.firmamail;
}
else if(bank == "db")
{
element.innerHTML = danskeBank.firmamail;
}
else if(bank == "nb")
{
element.innerHTML = nordeaBank.firmamail;
}
}
// Funktionen udskiftFirmaLogo ændrer ikke i innerHTML som de øvrige callback-funktioner. Den ændrer i de to attributter src og alt, så der bliver vist det korrekte billede og alt-tekst afhængigt af, hvilken bank, der er valgt i linje 1
function udskiftFirmaLogo(element)
{
if(bank == "jb")
{
element.setAttribute("src", jyskeBank.firmalogo);
element.setAttribute("alt", "Jyske Bank logo");
}
else if(bank == "db")
{
element.setAttribute("src", danskeBank.firmalogo);
element.setAttribute("alt", "Danske Bank logo");
}
else if(bank == "nb")
{
element.setAttribute("src", nordeaBank.firmalogo);
element.setAttribute("alt", "Nordea Bank logo");
}
}<file_sep>/V2/V2 Eksamensopgave - Kopi/NikeProShop/Kontakt.html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="keywords" content="Nike, Nike Pro Shop, Sneakers">
<link href="https://fonts.googleapis.com/css2?family=Dela+Gothic+One&family=Montserrat&family=Open+Sans&display=swap" rel="stylesheet">
<link rel="stylesheet" href="css/style.css" type="text/css">
<title>Kontakt Os</title>
</head>
<body>
<header>
<a href="Index.html"><img src="img/Nike-logo-edges.png" alt="Nike Logo"></a>
<nav>
<!-- Checkbox til at lave "checkbox tricket" - se forklaring i teksten nedenfor -->
<input class="menu-btn" type="checkbox">
<!-- Hamburger menu ikon -->
<img src="img/bars-solid.svg" alt="hamburger menu icon" class="menuicon">
<!-- Luk hamburger menu ikon -->
<img src="img/times-solid.svg" alt="hamburger close icon" class="closeicon">
<ul>
<li>
<a href="NikeHistorie.html">Nikes Historie</a>
</li>
<li>
<a href="About.html">Om Nike Pro Shop</a>
</li>
<li>
<a href="Kontakt.html">Kontakt Os</a>
</li>
</ul>
</nav>
</header>
<main>
<section>
<article><p>Kundeservice
Vi sidder klar ved telefonen samt chatsupport alle ugens dage, hvis du skulle have brug for hjælp.
Vores åbningstider på telefonen er:
Mandag-Fredag: 9-18
Lørdag-Søndag: 10-14
Har du brug for vores hjælp uden for dette tidsrum, er du meget velkommen til at sende os en mail eller efterlade en besked i den røde boks nede i højre hjørne. Vi glæder os til at høre fra dig.
Du kan kontakte os på nedenstående telefonnummer i ovenstående tidsrum, eller på mail hvor vi svarer indenfor 24 timer på alle hverdage.
Telefon: (+45) 33 91 18 18
E-mail: <EMAIL>
Fortrydelsesret
Du har 30 dages fortrydelsesret. Det gør sig både gældende ved køb i vores fysiske shops og på vores webshop. Fortrydelsesretten løber fra den dag, du foretager dit køb i butikken eller modtager din webordre.
Bytte- og returregler
• Bytteret på alle varer i op til 180 dage
• Fuld returret på alle varer i op til 30 dage
• Alle varer som er købt online, kan sendes gratis retur med vedlagt returlabel
• Alle varer som er købt online, kan også byttes/returneres i vores fysiske butikker*
*Det er kun muligt at returnere/ombytte en vare i Magasin, hvis den oprindeligt blev købt der.
Gratis retur
Vi vedlægger altid en gratis returlabel i alle vores forsendelser i Danmark, så du helt uden omkostninger kan sende din webordre retur til vores lager, hvis varerne mod forventning ikke skulle passe dig. Du klistrer blot returlabelen på den kasse (ikke den originale skoæske), du modtog varerne i og afleverer pakken hos din lokale pakkeshop eller postbutik.
*Husk at få en kvittering for indleveringen af pakken med fra pakkeshoppen, da det er din dokumentation for, at du har sendt pakken korrekt retur.
</p></article>
</section>
</main>
<footer></footer>
<script src="js/script.js"></script>
</body>
</html><file_sep>/V2/V2 Eksamensopgave - Kopi/NikeProShop/js/script.js
"Use strict"
var gallery = document.querySelector(".gallery"); // Hent elementet med klassen "gallery"
console.dir(gallery);
gallery.addEventListener("click", function(e) {
console.log(e); // Udskriv klikeventet i konsollen for at se, hvilke properties man kan arbejde med
if(e.target.tagName == "IMG") // Hvis brugeren har klikket på et element med tagget "IMG"
{
var newSrc = e.target.src.split("-"); // Tag src-atributten fra det element, der er klikket på (billedets sti/navn) og split det ved _ tegnet. Herefter lægges det over i variablet newSrc som et array, hvor fx thumb placeres på index 0 og abril.jpg placeres på index 1
var overlay = document.createElement("div"); // Opret et nyt div-element, som skal bruges som overlay
overlay.classList.add("overlay"); // Tilføj klassen .overlay på det nye div-element
document.body.appendChild(overlay); // Sæt det nye div-element ind i din HTML-kode ved at tilføje det under body-elementet
var newImage = document.createElement("img"); // Opret et nyt img-element
newImage.setAttribute("src", newSrc[0] + "-" + newSrc[1] + ".jpg"); // Giv det nye img-element en src-attribut, som du tager fra array'et, hvor du splittede navnet fra den thumbnail, der blev klikket på (se linje 9)
overlay.appendChild(newImage); // Tilføj det nye img-element til det div-element, som blev oprettet i linje 10
overlay.addEventListener("click", function() // Tilføj en eventListener til det nye div-element for at holde øje med, om brugeren klikker på overlayet
{
if (overlay) // Hvis overlay variablen eksisterer og indeholder noget (der er lavet et overlay)
{
overlay.remove(); // Fjern overlayet igen/slet det div-element, som blev oprettet i linje 10
// overlay.parentNode.removeChild(overlay); - gammel metode, som man er sikker på virker i alle browsere
}
});
}
})
var slideshow = document.querySelector(".slideshow"); // Hent articlen som indeholder slideshowet
var slides = slideshow.querySelectorAll("img"); // Hent alle billeder som er på siden (det kunne også være i slideshow-articlen)
var dots = document.querySelector(".dotArea"); // Hent det område, som dots'ene skal være i
var i = 0; // Start tæller
// Løkke til at oprette det samme antal dots, som jeg har billeder
for (var j = 0; j < slides.length; j++) // For alle de billeder, som ligger på HTML siden
{
var newDot = document.createElement("div"); // Opret en nyt div, som skal blive til en dot
newDot.classList.add("dot"); // Giv den klassen dot
dots.appendChild(newDot); // Tilføj den til dot-området
}
var allDots = document.querySelectorAll(".dot"); // Hent alle de dots, som er blevet oprettet
allDots[0].classList.add("active"); // Sæt den første dot til aktiv
setInterval(slideshowSlider, 5000); // Kald funktionen slideshowSlider med 5000 milisekunders mellemrum
function slideshowSlider()
{
if (i == slides.length - 1) // Hvis tælleren er nået til antal billeder
{
i = 0; // Nulstil tæller
}
else
{
i++; // Ellers tæl tæller en op
}
slides[i].classList.add("currentImg"); // Sæt klassen currentImg på det aktuelle billede (sætter opacity til 1)
allDots[i].classList.add("active"); // Sæt klassen active på den aktuelle dot (sætter farven lidt mørkere)
if (i == 0)
{
slides[slides.length - 1].classList.remove("currentImg"); // Hvis tælleren er 0, skal currentImg fjernes fra det sidste billede (det som var currentImg lige før)
allDots[slides.length - 1].classList.remove("active"); // Hvis tælleren er 0, skal active fjernes fra den sidste dot (som var active lige før)
}
else
{
slides[i-1].classList.remove("currentImg"); // Fjern currentImg fra det forrige billede, som var currentImg lige før
allDots[i-1].classList.remove("active"); // Fjern active fra den forrige dot, som var active lige før
}
}
<file_sep>/V1/VS-Code/Kogebog/kylling.html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link href="stylesheets/style.css" rel="stylesheet" type="text/css">
<title>Document</title>
</head>
<body>
<header>
<nav>
<a href="index.html">Startside </a>
<a href="aftensmad.html">Opskrifter til Aftensmad</a>
<a href="bagvaerk.html">Opskrifter til Bagværk</a>
<a href="dessert.html">Opskrifter til Desserter</a>
</nav>
</header>
<main>
<h1>Kylling retter</h1>
<section>
<article>
<h2>Bagt kylling med kartofler og krydderurter - lørdagskylling deluxe</h2>
<img src="img/bagtkyllingmedkartoflerkrydderurter.jpg" alt="Bagt kylling med kartofler og krydderurter">
<p>4 pers.</p>
<h3>Ingredienser</h3>
<ul>
<li>1 kg kylling, overlår og evt bryst med skind</li>
<li>800 g kartofler, skrubbede</li>
<li>4 gulerødder, skrællede og skiveskåret</li>
<li>1 tsk timian, tørret</li>
<li>1 spsk frisk rosmarin, finthakket</li>
<li>1 dl grøntsagsbouillon</li>
<li>flagesalt</li>
<li>sort peber, friskkværnet</li>
<li>1 spsk smør, til at smøre fadet</li>
</ul>
<p>Tid: 60 minutter</p>
<h3>Fremgangsmåde</h3>
<p>Smør et ovnfast fad grundigt med smør. Skær kartoflerne i halve og kom kartofler, gulerødder og boullion i fadet, sammen med krydderurterne. Vend det godt sammen.</p>
<p>Krydre kyllingestykkerne godt med salt og peber, læg dem øverst i fadet og bag i en forvarmet ovn ved 175 grader varmluft i ca. 45 minutter eller til kartoflerne er møre og kyllingen gennemstegt og med sprødt skind.</p>
<p>Drys med friske krydderurter inden servering og server med en god blandet salat og lækkert brød.</p>
</article>
<article>
<h2>Kyllinge Quesadillas med krydret tomatsauce og spinat</h2>
<img src="img/kyllingequesadillas.jpg" alt="Kyllinge quesadillas">
<p>4 pers.</p>
<h3>Ingredienser</h3>
<ul>
<li>1 løg, finthakket</li>
<li>4 fed hvidløg, finthakket</li>
<li>1 spsk olivenolie</li>
<li>2 kyllingebryst</li>
<li>1 tsk koriander, stødt</li>
<li>1 tsk spidskommen</li>
<li>1/2 dl grøntsagsbouillon</li>
<li>1 dåse hakkede tomater</li>
<li>1/2 rød chili, finthakket (mild/mellem styrke)</li>
<li>salt</li>
<li>sort peber, friskkværnet</li>
</ul>
<h4>Dertil</h4>
<ul>
<li>1 pakke tortillas pandekager, (8 stk)</li>
<li>200 g cheddar ost, friskrevet</li>
<li>125 g baby spinat</li>
</ul>
<h4>Tilbehør</h4>
<ul>
<li>1 avocado</li>
<li>1/2 rød chili, skåret i ringe</li>
<li>cremefraiche 18 %</li>
<li>sriracha chilisauce</li>
<li>2 lime</li>
<li>2 håndfulde frisk koriander</li>
</ul>
<p>Tid: 40 minutter</p>
<h3>Fremgangsmåde</h3>
<h4>Pulled kylling i krydret tomatsauce</h4>
<p>Sauter løg og hvidløg i olivenolie til de er bløde og klare. Tilsæt spidskommen og koriander, som varmes godt igennem, hvorefter kylling, hakkede tomater, chili og grøntsagsboullion tilsættes. Lad det simre uden låg i 30 minutter.</p>
<p>Tag kyllingen op af gryden og riv det møre kyllingekød i trevler med to gafler. Lad imens saucen koge yderligere ind uden låg, til væden fordamper og saucen tyknes.</p>
<p>Vend det møre kyllingekød med saucen.</p>
<h4>Samle Quesadilla</h4>
<p>Smør en tortilla pandekage med kylling og tomatsauce, drys med cheddar og lidt spinat, læg en tortilla pandekager over og steg den på begge sider på panden ved høj varme på begge sider, til pandekagen er lidt sprød og osten er smeltet.</p>
<p>Fortsæt til der er tilberedt 4 quesadillas.</p>
<p>Skær ud i trekanter, eller anret som her i halve med de skønneste toppings.</p>
</article>
<article>
<h2>Kyllingesatay</h2>
<img src="img/kyllingesatay.jpg" alt="Kyllingesatay">
<p>4 pers.</p>
<h3>Ingredienser</h3>
<ul>
<li>400 g kyllingefilet</li>
<li>1 dl kokosmælk</li>
<li>60 g peanuts</li>
<li>1 tsk ingefær, friskrevet</li>
<li>1 fed hvidløg, presset</li>
<li>chiliflager, efter smag</li>
<li>1 tsk soja</li>
</ul>
<p>Tid: 1 time og 15 minutter</p>
<h3>Fremgangsmåde</h3>
<p>Kom kokosmælk, peanuts, ingefær, chiliflager og soya i en blender eller minihakker og kør til peanuts er finthakket.</p>
<p>Skær kyllingefileterne ud i 8 aflange strimler, som vendes i marinaden. Stil på køl i en skål med låg i en time eller natten over.</p>
<p>Sæt kyllingerne på iblødsatte* træspyd. Grill de lækre kyllingesatay spyd i omkring 5 minutter, vend dem undervejs.</p>
<p>Har man ikke en grill kan de også tilberedes ved 175 grader varmluft i ovn i ca 18-20 minutter og på en pande stegt i lidt olie til de er gennemstegt.</p>
<p>*hvis grillspydene er lavet af træ, så læg dem i et fad med vand i en times tid, så de ikke brænder på på grillen.</p>
</article>
</section>
</main>
<footer>
<nav>
<a href="index.html">Startside </a>
<a href="aftensmad.html">Opskrifter til Aftensmad</a>
<a href="bagvaerk.html">Opskrifter til Bagværk</a>
<a href="dessert.html">Opskrifter til Desserter</a>
</nav>
</footer>
</body>
</html><file_sep>/V1/Test/README.md
# Test mappe.
Denne mappe indeholder små test filer for forskællige formats og andet
<file_sep>/V1/VS-Code/Undervisning/README.md
# CSS Undervisnings webside
Dette er en webside som både er projeckt og forklaring i hvordan man bruger CSS og HTML.
TL;DR Grim side som vil lærer dig at gøre den pæn.
<file_sep>/V2/Hanne ImageCarousel/js/js-slideshow.js
var slideshow = document.querySelector(".slideshow"); // Hent articlen som indeholder slideshowet
var slides = slideshow.querySelectorAll("img"); // Hent alle billeder som er på siden (det kunne også være i slideshow-articlen)
var dots = document.querySelector(".dotArea"); // Hent det område, som dots'ene skal være i
var i = 0; // Start tæller
// Løkke til at oprette det samme antal dots, som jeg har billeder
for (var j = 0; j < slides.length; j++) // For alle de billeder, som ligger på HTML siden
{
var newDot = document.createElement("div"); // Opret en nyt div, som skal blive til en dot
newDot.classList.add("dot"); // Giv den klassen dot
dots.appendChild(newDot); // Tilføj den til dot-området
}
var allDots = document.querySelectorAll(".dot"); // Hent alle de dots, som er blevet oprettet
allDots[0].classList.add("active"); // Sæt den første dot til aktiv
setInterval(slideshowSlider, 5000); // Kald funktionen slideshowSlider med 5000 milisekunders mellemrum
function slideshowSlider()
{
if (i == slides.length - 1) // Hvis tælleren er nået til antal billeder
{
i = 0; // Nulstil tæller
}
else
{
i++; // Ellers tæl tæller en op
}
slides[i].classList.add("currentImg"); // Sæt klassen currentImg på det aktuelle billede (sætter opacity til 1)
allDots[i].classList.add("active"); // Sæt klassen active på den aktuelle dot (sætter farven lidt mørkere)
if (i == 0)
{
slides[slides.length - 1].classList.remove("currentImg"); // Hvis tælleren er 0, skal currentImg fjernes fra det sidste billede (det som var currentImg lige før)
allDots[slides.length - 1].classList.remove("active"); // Hvis tælleren er 0, skal active fjernes fra den sidste dot (som var active lige før)
}
else
{
slides[i-1].classList.remove("currentImg"); // Fjern currentImg fra det forrige billede, som var currentImg lige før
allDots[i-1].classList.remove("active"); // Fjern active fra den forrige dot, som var active lige før
}
}<file_sep>/V2/JavaScript Conditions/script.js
var moreThen50 = 73;
if (moreThen50 > 50)
{
console.log('Tallet er større end 50.');
}
var minAge = 23;
if (minAge >= 18)
{
console.log(minAge + ' er større end eller lig med 18.');
}
var firstName = null;
if (firstName == null)
{
console.log('Du glemte at indtaste dit fornavn');
}
var gender = 'male';
if (gender == 'male')
{
console.log('Personen er en mand');
}
else
{
console.log('Personen er en kvinde');
}
var counter = 25;
if (counter =! 50)
{
console.log('Tallet er godkendt');
}
else
{
console.log('Tallet er 50');
}
var myAge = 28;
if (myAge <= 18)
{
console.log('Du er et barn');
}
else if (18 < myAge < 62)
{
console.log('Du er voksen');
}
else
{
console.log('Du er pensionist');
}<file_sep>/V1/README.md
# V1 kode mappe main folder
Alt kode og de projecter som jeg lavet i først modul af visualisering.
<file_sep>/README.md
# Visualisering Kode
Dettet er alt det kode og de opgaver jeg har lavet i faget "Visualisering" under min uddanelse på AspIT sønderjylland.
Dette indeholder:
* HTML
* CSS
* SEO
* QA
* Designtænkning
* Designprincipper - Farvelære, komposition, typografi og billedlære.
Læse mere om AspIT og Visualisering [her.](https://aspit.dk/)
<file_sep>/V2/DOMmanipulation/js/script.js
"use strict";
let menu = document.getElementById('menu');
console.log(menu);
console.log("////////////");
////////////
let activeMenu = document.getElementsByClassName('active');
console.log(activeMenu);
console.log("////////////");
////////////
let allh = document.querySelectorAll('h1');
allh.forEach(element => { console.log(element);
});
console.log("////////////");
////////////
let allp = document.querySelectorAll('p');
allp.forEach(element => { console.log(element);
})
console.log("////////////");
////////////
let allImg = document.querySelectorAll('img');
allImg.forEach(element => { element.setAttribute("alt", "Billedet er underlagt copyright");
})
console.log("////////////");
////////////
document.querySelector('h1').innerHTML = "Overskriften er udskifted";
console.log("////////////");
////////////
document.querySelectorAll('h1').forEach(x => { x.classList.add("title")});
console.log("////////////");
////////////
document.querySelectorAll("li").forEach(x => { console.log(x)});
console.log("////////////");
////////////
let playground = document.querySelector('ID#playground')
////////////
////////////
////////////
////////////
////////////
////////////
|
00f7006292e6d86404e1c9ca763b00467a4a2803
|
[
"Markdown",
"JavaScript",
"HTML"
] | 17
|
Markdown
|
Esben-code/V1code
|
3059540b7db57824706cfaf0b250a080affae053
|
99d14d6f2cb9b840d5178d096ead93d502d40f7c
|
refs/heads/master
|
<repo_name>FreeCodeCampProjects/Build-a-Pomodoro-Clock<file_sep>/js/animations.js
/* CSS animation to add to container.
From "animate.css" library.
*******************************************/
$('.container').addClass('animated flipInY');<file_sep>/js/addThenRemoveClass.js
/* Add class(es) and then remove them several seconds later. This is
intented for animation classes (from animate.css library) — we
need to add, remove and re-add them to re-animate elements.
********************************************************************/
addThenRemoveClass = function addThenRemoveClass(element, classes) {
$(element).addClass(classes);
setTimeout(function() {
$(element).removeClass(classes);
}, config.TIME_INTERVAL.UPDATE_ANIMATION);
};<file_sep>/js/pauseFormatUpdate.js
/* Helper function for addThenRemoveClass:
********************************************/
addThenRemoveClasses = function (o, addRemoveClasses) {
for (i = 0; i < addRemoveClasses.length; i++) {
addThenRemoveClass(addRemoveClasses[i][0], addRemoveClasses[i][1]);
}
};
/* Initiate a specially-formated pause screen when user pauses the app:
***********************************************************************/
pauseFormatUpdate = function pauseFormatUpdate(o) {
setHTML({ element: mode, html: o.mode.html });
setCSS([
[mode, ['color', o.mode.color]],
[countdown, ['color', o.countdown.color]],
[countdown, ['font-size', o.countdown.fontSize]]
]);
addThenRemoveClasses(o, o.animationElements.addThenRemoveClasses);
};<file_sep>/js/appModeCalls.js
/* When called, this function calls the checkAppMode function and
passes "session" and "break" mode data:
*********************************************************************/
checkAppModeFunc = function checkAppModeFunc() {
checkAppMode({
session: {
session: true,
lengthVal: sessionLengthVal,
timeValue: timeSessionSecs,
styles: {
backgroundColor: config.STYLES.SESSION.BKG,
border: config.STYLES.SESSION.BORDER
}
},
break: {
session: false,
lengthVal: breakLengthVal,
timeValue: timeBreakSecs,
styles: {
backgroundColor: config.STYLES.BREAK.BKG,
border: config.STYLES.BREAK.BORDER
}
}
});
};<file_sep>/js/updateValues.js
/*
**********************************/
setStyleTimeout = function (o) {
setTimeout(function() {
setCSS([
[child, ['background-color', o.styles.backgroundColor]],
[outerCircle, ['border', o.styles.border]]
]);
}, config.TIME_INTERVAL.UPDATE_FORMAT);
};
/* Update time values
**********************************/
updateTimeValues = function (o) {
start = convertMinutesToSeconds(o.lengthVal);
current = o.timeValue;
end = 0;
completed = parseInt(((current - start) / (end - start)) * 100);
}
/* Update values and format colours
**********************************/
updateValues = function updateValues(o) {
updateTimeValues(o);
formatTime({ outputElement: countdown, timeValue: o.timeValue });
o.session ? timeSessionSecs-- : timeBreakSecs--;
$(countdown).html($(countdown).html() + config.TEXT.DIVIDER + completed + config.TEXT.PERCENT);
document.title = $(mode).html() + config.TEXT.DIVIDER + $(countdown).html();
setStyleTimeout(o);
};<file_sep>/js/app.js
/* Global variables.
Constructors in TitleCase,
Constants in UPPERCASE,
All other (block-scope) variables in camelCase
***************************************************/
let addThenRemoveClass, addThenRemoveClasses,
appModeIndex, appModesArr,
breakLength, breakLengthDecrement, breakLengthIncrement, breakLengthVal,
checkAppMode, checkAppModeFunc,
child,
completed,
config,
ConstructEventListener,
countdown, current, date, end,
formatTime,
group,
mode,
i,
outerCircle,
pauseCheck, pauseFormatUpdate, pauseOff, pauseOn, pauseUpdate, paused,
reconfigTimeValues,
sessionLength, sessionLengthDecrement, sessionLengthIncrement, sessionLengthVal,
setCSS, setFeedbackText, setHTML, setMode, setTimeVals, setStyleTimeout,
si,
start,
timeBreak, timeSessionSecs,
timerVisual,
updateLengthVals, updateMode, updateTimeVals, updateTimeValues, updateTimeValueSwitch, updateValues;
/* Get HTML elements by ID and cache as variables:
**************************************************/
sessionLengthDecrement = $('#session-length-decrement')[0];
sessionLengthIncrement = $('#session-length-increment')[0];
breakLengthDecrement = $('#break-length-decrement')[0];
breakLengthIncrement = $('#break-length-increment')[0];
outerCircle = $('#outer-circle')[0];
timerVisual = $('#timer-visual')[0];
/* Pause state. If "paused" is true, the app is paused.
**********************************************************/
paused = false;
/* List of modes our app will use:
**********************************/
appModesArr = ['session', 'break'];
appModeIndex = appModesArr[0];
/* XXXXXXXdssksflkslkdlfkdspopdosfpls;fvk;flsfsl
**************************************************/
child = $('#child')[0];
countdown = $('#countdown')[0];
mode = $('#mode')[0];
$(mode).html(appModeIndex);
sessionLength = $('#session-length')[0];
sessionLengthVal = $(sessionLength).html();
/* Set an initial break mode time value:
**************************************************/
timeBreak = 5;
/* Cache the break mode element and its value:
**************************************************/
breakLength = $('#break-length')[0];
breakLengthVal = $(breakLength).html();
/* When the user changes the length of a session or break,
this function will update values of corresponding variables:
**************************************************************/
updateLengthVals = function () {
sessionLength = $('#session-length')[0];
sessionLengthVal = $(sessionLength).html();
breakLength = $('#break-length')[0];
breakLengthVal = $(breakLength).html();;
};
/* Helper function for setting HTML values:
**************************************************/
setHTML = function(o) {
$(o.element).html(o.html);
};
/* Helper function for setting (multiple) CSS values:
*****************************************************/
setCSS = function(arr) {
for (i = 0; i < arr.length; i++) {
$(arr[i][0]).css(arr[i][1][0], arr[i][1][1]);
}
};<file_sep>/js/checkAppMode.js
/* Set the feedback text (the mode and time values):
****************************************************/
setFeedbackText = function() {
setHTML({element: mode, html: appModeIndex });
setCSS([
[child, ['top', 100 - completed + config.TEXT.PERCENT]],
]);
}
/* Check which mode is enabled and update accordingly:
*****************************************************/
checkAppMode = function checkAppMode(o) {
// If mode is "session" pass session data.
if (appModeIndex == appModesArr[0]) timeSessionSecs > 0 ? updateValues(o.session) : updateMode(o.break, 1, breakLengthVal);
// If mode is "break" pass break data.
else if (appModeIndex == appModesArr[1]) timeBreakSecs > 0 ? updateValues(o.break) : updateMode(o.session, 0, sessionLengthVal)
setFeedbackText();
};<file_sep>/js/ConstructEventListener.js
/* Function for adding event listeners that works with both the
"addEventListener" method and the older "attachEvent" method seen in IE.
***************************************************************************/
ConstructEventListener = function ConstructEventListener (elem, event, fn, useCapture) {
window.addEventListener ?
elem.addEventListener(event, fn, useCapture) :
elem.attachEvent('on' + event, fn, useCapture);
};<file_sep>/js/convertMinutesToSeconds.js
/* Update time (in seconds) variables for different modes:
**********************************************************/
updateTimeVals = function updateTimeVals () {
timeSessionSecs = convertMinutesToSeconds(sessionLengthVal);
timeBreakSecs = convertMinutesToSeconds(breakLengthVal);
};
/* Convert minutes to seconds:
**********************************/
convertMinutesToSeconds = function convertMinutesToSeconds(m) { return m * 60; };
/* Convert session / break mode minute values to seconds:
**********************************************************/
setTimeVals = function (o) {
updateTimeVals();
setHTML({ element: o.html.element, html: o.html.htmlContent });
$(o.attr.element).attr(o.attr.attribute, config.IMG.AJAX_LOADER);
};
setTimeVals({
html: {
element: mode,
htmlContent: '<img id="ajax-loader"/>'
},
attr: {
element: '#ajax-loader',
attribute: 'src'
}
});<file_sep>/js/setMode.js
/* Set the mode: either "session" or "break".
While we do this, we apply a cool rotation
animation.
*********************************************/
setMode = function setMode(index) {
appModeIndex = appModesArr[index];
};<file_sep>/js/setInterval.js
/* Set interval to iterate over time values:
********************************************/
si = setInterval(function() {
checkAppModeFunc();
}, config.TIME_INTERVAL.UPDATE_FORMAT);<file_sep>/js/config.js
/* Configuration object. Here we centralise important app properties.
*********************************************************************/
config = {
// Classes from "animate.css" library.
ANIMATION_CLASSES: {
ROTATE_IN: 'animated rotateIn',
HEADSHAKE: 'animated headShake',
JELLO: 'animated jello',
PULSE: 'animated pulse'
},
// Image sources (linked to my Dropbox).
IMG: {
AJAX_LOADER: 'https://dl.dropboxusercontent.com/u/7797721/FreeCodeCamp/Pomodoro%20Clock/img/ajax-loader-green.gif'
},
// Styles for "session" and "break" modes.
STYLES: {
SESSION: {
BKG: '#0E3E22',
BORDER: '4px solid #00FF00'
},
BREAK: {
BKG: '#D35400',
BORDER: '4px solid #F1C40F'
},
TEXT_CONTENT: {
UNPAUSED: {
MODE: {
COLOR: '#FFF',
FONT_SIZE: '26px'
},
COUNTDOWN: {
COLOR: '#00FF00'
}
},
PAUSED: {
MODE: {
COLOR: '#FFF',
FONT_SIZE: '18px'
},
COUNTDOWN: {
COLOR: '#FFF'
}
}
}
},
TEXT: {
BREAK: 'break',
PAUSED: '[paused]',
SESSION: 'session',
DIVIDER: ' : ',
PERCENT: '%'
},
// Time increments used for "setInterval" functions.
TIME_INTERVAL: {
UPDATE_FORMAT: 1000,
UPDATE_ANIMATION: 1000,
UPDATE_VALUES: 1000
}
};<file_sep>/js/updateMode.js
/*
****************************************************/
updateMode = function updateMode(m, ind, brkLen) {
updateValues(m);
setMode(ind, 'p');
brkLen == breakLengthVal ?
timeBreakSecs = convertMinutesToSeconds(brkLen) :
timeSessionSecs = convertMinutesToSeconds(brkLen);
};
|
35f55d5811870cf2c3f0cd2542b856be1aef68b7
|
[
"JavaScript"
] | 13
|
JavaScript
|
FreeCodeCampProjects/Build-a-Pomodoro-Clock
|
ce18de06b9ffeb584e85e17b1b301aff57c0f82a
|
8951918540a53b6efce13655a7477ccc83801b5f
|
refs/heads/master
|
<repo_name>Gertrido/Research-Work<file_sep>/js/rs.js
"use strict";
(function() {
let timerMouse, timerVisibility;
let eventMouse, eventVisibility;
let eventDelayMouse, eventDelayVisibility;
eventMouse = function() {
let container = document.querySelector('.activity-mouse');
let count_activityMouse = 0;
count_activityMouse += 1;
container.children[1].innerHTML = count_activityMouse;
}
eventDelayMouse = 2;
function activityMouse(eventMouse, eventDelayMouse) {
clearInterval(timerMouse);
setInterval(eventMouse, eventDelayMouse * 1000);
//event();
}
function handlerVisibilityChange(event = eventVisibility, delay = eventDelayVisibility) {
clearInterval(timerVisibility);
if (document['hidden'])
setInterval(event, delay * 1000);
else {
//alternative
}
}
function clock() {
let container = document.querySelector(".clock");
let date = new Date();
let hour = date.getHours();
if (hour < 10)
hour = '0' + hour;
let min = date.getMinutes();
if (min < 10)
min = '0' + min;
let sec = date.getSeconds();
if (sec < 10)
sec = '0' + sec;
container.children[0].innerHTML = hour;
container.children[1].innerHTML = min;
container.children[2].innerHTML = sec;
}
let timerClock;
function start() {
timerClock = setInterval(clock,1000);
clock();
activityMouse(event);
}
function stop() {
clearInterval(timerClock);
}
//events
window.onload = start;
//document.addEventListener('mousemove', activityMouse, false);
//document.addEventListener('visibilityChange', handlerVisibilityChange, false);
//global variables
window.rs_eventMouse = eventMouse;
window.rs_eventVisibility = eventVisibility;
window.rs_delayMouse = eventDelayMouse;
window.rs_delayVisibility = eventDelayVisibility;
}());
// function sendNotification(title, options) {
// // Проверим, поддерживает ли браузер HTML5 Notifications
// if (!("Notification" in window)) {
// alert('Ваш браузер не поддерживает HTML Notifications, его необходимо обновить.');
// }
// // Проверим, есть ли права на отправку уведомлений
// else if (Notification.permission === "granted") {
// // Если права есть, отправим уведомление
// var notification = new Notification(title, options);
//
// function clickFunc() {notification.close(); }
//
// notification.onclick = clickFunc;
// }
// // Если прав нет, пытаемся их получить
// else if (Notification.permission !== 'denied') {
// Notification.requestPermission(function (permission) {
// // Если права успешно получены, отправляем уведомление
// if (permission === "granted") {
// var notification = new Notification(title, options);
// } else {
// alert('Вы запретили показывать уведомления'); // Юзер отклонил наш запрос на показ уведомлений
// }
// });
// } else {
// // Пользователь ранее отклонил наш запрос на показ уведомлений
// // В этом месте мы можем, но не будем его беспокоить. Уважайте решения своих пользователей.
// }
// }
|
7c340574c6e291de0c79d8a96f8f1507cf2207dc
|
[
"JavaScript"
] | 1
|
JavaScript
|
Gertrido/Research-Work
|
c2768774f850f639c80ff4745dfee16b8000eae7
|
2aca7252dfd56494fbe8dc41b8ed077b520be075
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.