branch_name stringclasses 149 values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38 values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/master | <file_sep>package com.dreamalta.geekydiscountcal;
import android.os.Bundle;
import android.app.Activity;
import android.view.View;
import com.dreamalta.geekydiscountcal.R.id;
import com.dreamalta.geekydiscountcal.R.layout;
import com.dreamalta.geekydiscountcal.R.menu;
import android.graphics.Color;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View.OnTouchListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
public class MainActivity extends Activity implements View.OnClickListener, OnTouchListener { //View.OnClickListener for button clicking
//char [][] num = new char[20][14]; //2 Dimensional array: 20 numbers of up to 13 digits
int nCounter = 0; //number counter
int dCounter = 0; //digit counter
int rdCounter = 0; //restore dCounter //temporary storage for dCounter
int dotCounter = 0; //keep tracks that decimal numbers can only have one dot //0 means no dot yet while 1 means there is dot
int rdotCounter = 0; //restore dotCounter //temporary storage for dotCounter
int strWarn = 0; //prevent deletion or assigning operator on string warning //if value is 1 then prevent
int disV = 0,
taxV = 0,
tipV = 0; //dis, tax, tip variable (determine if the button is clicked)
Double disVal = 0.0,
taxVal = 0.0,
tipVal = 0.0; //dis, tax, tip, val (store the value entered by the user)
int dttDotCounter = 0; //dis, tax, tip dotCounter
int rDttDotCounter = 0; //restore dttDotCounter
int dttDCounter = 0; //dis, taxk, tip digit counter
int rDttDCounter = 0; //restor dttDCounter
TextView display, include, tvDis, tvTax, tvTip, youSave, taxAndTip, total;
Button bDot, b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, qu, eq, div, mul, sub, add, dis, tax, tip;
View swipe;
float x1,x2; //used for swipe detection
float y1, y2; //used for swip detection
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
display = (TextView) findViewById(R.id.textView1);
tvDis = (TextView) findViewById(R.id.tvDis);
tvTax = (TextView) findViewById(R.id.tvTax);
tvTip = (TextView) findViewById(R.id.tvTip);
youSave = (TextView) findViewById(R.id.youSave);
taxAndTip = (TextView) findViewById(R.id.taxAndTip);
total = (TextView) findViewById(R.id.total);
bDot = (Button) findViewById(R.id.bDot);
b0 = (Button) findViewById(R.id.b0);
b1 = (Button) findViewById(R.id.b1);
b2 = (Button) findViewById(R.id.b2);
b3 = (Button) findViewById(R.id.b3);
b4 = (Button) findViewById(R.id.b4);
b5 = (Button) findViewById(R.id.b5);
b6 = (Button) findViewById(R.id.b6);
b7 = (Button) findViewById(R.id.b7);
b8 = (Button) findViewById(R.id.b8);
b9 = (Button) findViewById(R.id.b9);
qu = (Button) findViewById(R.id.qu);
eq = (Button) findViewById(R.id.eq);
div = (Button) findViewById(R.id.divide);
mul = (Button) findViewById(R.id.multiply);
sub = (Button) findViewById(R.id.subtract);
add = (Button) findViewById(R.id.add);
dis = (Button) findViewById(R.id.dis);
tax = (Button) findViewById(R.id.tax);
tip = (Button) findViewById(R.id.tip);
swipe = (View) findViewById(R.id.view1);
bDot.setOnClickListener(this);
b0.setOnClickListener(this);
b1.setOnClickListener(this);
b2.setOnClickListener(this);
b3.setOnClickListener(this);
b4.setOnClickListener(this);
b5.setOnClickListener(this);
b6.setOnClickListener(this);
b7.setOnClickListener(this);
b8.setOnClickListener(this);
b9.setOnClickListener(this);
qu.setOnClickListener(this);
eq.setOnClickListener(this);
div.setOnClickListener(this);
mul.setOnClickListener(this);
sub.setOnClickListener(this);
add.setOnClickListener(this);
dis.setOnClickListener(this);
tax.setOnClickListener(this);
tip.setOnClickListener(this);
swipe.setOnTouchListener(this);
}
@Override
public void onResume() {
super.onResume();
}
@Override
public void onClick(View v) { //button click
// TODO Auto-generated method stub
switch(v.getId()) {
case R.id.bDot:
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
if(dttDCounter < 4 && dttDotCounter == 0)
{
if(disV == 1)
tvDis.append(bDot.getText());
if(taxV == 1)
tvTax.append(bDot.getText());
if(tipV == 1)
tvTip.append(bDot.getText());
dttDCounter++;
dttDotCounter = 1;
}
}
else{
/*********Apply to main display***************/
if(dCounter > 13) { //if it exceeds maximum digits warn user
display.setText("<num>too_large!&");
strWarn = 1;
}
else if(dotCounter == 0 && strWarn == 0) //keep tracks that decimal numbers can only have one dot
{
display.append(bDot.getText());
dCounter++;
dotCounter = 1;
}
}
break;
case R.id.b0:
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
if(dttDCounter < 4)
{
if(disV == 1)
tvDis.append(b0.getText());
if(taxV == 1)
tvTax.append(b0.getText());
if(tipV == 1)
tvTip.append(b0.getText());
dttDCounter++;
}
}
else{
/*********Apply to main display***************/
if(dCounter > 13) //if it exceeds maximum digits warn user
{
display.setText("<num>too_large!&");
strWarn = 1;
}
else if(strWarn == 0)
{
display.append(b0.getText());
dCounter++;
}
}
break;
case R.id.b1:
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
if(dttDCounter < 4)
{
if(disV == 1)
tvDis.append(b1.getText());
if(taxV == 1)
tvTax.append(b1.getText());
if(tipV == 1)
tvTip.append(b1.getText());
dttDCounter++;
}
}
else{
/*********Apply to main display***************/
if(dCounter > 13) { //if it exceeds maximum digits warn user
display.setText("<num>too_large!&");
strWarn = 1;
}
else if(strWarn == 0)
{
display.append(b1.getText());
dCounter++;
}
}
break;
case R.id.b2:
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
if(dttDCounter < 4)
{
if(disV == 1)
tvDis.append(b2.getText());
if(taxV == 1)
tvTax.append(b2.getText());
if(tipV == 1)
tvTip.append(b2.getText());
dttDCounter++;
}
}
else{
/*********Apply to main display***************/
if(dCounter > 13) { //if it exceeds maximum digits warn user
display.setText("<num>too_large!&");
strWarn = 1;
}
else if(strWarn == 0)
{
display.append(b2.getText());
dCounter++;
}
}
break;
case R.id.b3:
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
if(dttDCounter < 4)
{
if(disV == 1)
tvDis.append(b3.getText());
if(taxV == 1)
tvTax.append(b3.getText());
if(tipV == 1)
tvTip.append(b3.getText());
dttDCounter++;
}
}
else{
/*********Apply to main display***************/
if(dCounter > 13) { //if it exceeds maximum digits warn user
display.setText("<num>too_large!&");
strWarn = 1;
}
else if(strWarn == 0)
{
display.append(b3.getText());
dCounter++;
}
}
break;
case R.id.b4:
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
if(dttDCounter < 4)
{
if(disV == 1)
tvDis.append(b4.getText());
if(taxV == 1)
tvTax.append(b4.getText());
if(tipV == 1)
tvTip.append(b4.getText());
dttDCounter++;
}
}
else{
/*********Apply to main display***************/
if(dCounter > 13) { //if it exceeds maximum digits warn user
display.setText("<num>too_large!&");
strWarn = 1;
}
else if(strWarn == 0)
{
display.append(b4.getText());
dCounter++;
}
}
break;
case R.id.b5:
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
if(dttDCounter < 4)
{
if(disV == 1)
tvDis.append(b5.getText());
if(taxV == 1)
tvTax.append(b5.getText());
if(tipV == 1)
tvTip.append(b5.getText());
dttDCounter++;
}
}
else{
/*********Apply to main display***************/
if(dCounter > 13) { //if it exceeds maximum digits warn user
display.setText("<num>too_large!&");
strWarn = 1;
}
else if(strWarn == 0)
{
display.append(b5.getText());
dCounter++;
}
}
break;
case R.id.b6:
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
if(dttDCounter < 4)
{
if(disV == 1)
tvDis.append(b6.getText());
if(taxV == 1)
tvTax.append(b6.getText());
if(tipV == 1)
tvTip.append(b6.getText());
dttDCounter++;
}
}
else{
/*********Apply to main display***************/
if(dCounter > 13) { //if it exceeds maximum digits warn user
display.setText("<num>too_large!&");
strWarn = 1;
}
else if(strWarn == 0)
{
display.append(b6.getText());
dCounter++;
}
}
break;
case R.id.b7:
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
if(dttDCounter < 4)
{
if(disV == 1)
tvDis.append(b7.getText());
if(taxV == 1)
tvTax.append(b7.getText());
if(tipV == 1)
tvTip.append(b7.getText());
dttDCounter++;
}
}
else{
/*********Apply to main display***************/
if(dCounter > 13) { //if it exceeds maximum digits warn user
display.setText("<num>too_large!&");
strWarn = 1;
}
else if(strWarn == 0)
{
display.append(b7.getText());
dCounter++;
}
}
break;
case R.id.b8:
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
if(dttDCounter < 4)
{
if(disV == 1)
tvDis.append(b8.getText());
if(taxV == 1)
tvTax.append(b8.getText());
if(tipV == 1)
tvTip.append(b8.getText());
dttDCounter++;
}
}
else{
/*********Apply to main display***************/
if(dCounter > 13) { //if it exceeds maximum digits warn user
display.setText("<num>too_large!&");
strWarn = 1;
}
else if(strWarn == 0)
{
display.append(b8.getText());
dCounter++;
}
}
break;
case R.id.b9:
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
if(dttDCounter < 4)
{
if(disV == 1)
tvDis.append(b9.getText());
if(taxV == 1)
tvTax.append(b9.getText());
if(tipV == 1)
tvTip.append(b9.getText());
dttDCounter++;
}
}
else{
/*********Apply to main display***************/
if(dCounter > 13) { //if it exceeds maximum digits warn user
display.setText("<num>too_large!&");
strWarn = 1;
}
else if(strWarn == 0)
{
display.append(b9.getText());
dCounter++;
}
}
break;
case R.id.qu:
youSave.setText("[SWIPE].instruction:");
taxAndTip.setText("[SWIPE].right to clear && [SWIPE].left to delete");
total.setText("Developed by DreamAlta");
break;
case R.id.eq:
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
String dttNum = "";
if(disV == 1){
dttNum = tvDis.getText().toString();
if(dttNum.isEmpty() || dttNum.equals(".")) {
tvDis.setBackgroundColor(Color.TRANSPARENT); //if empty or dot then remove display color
disVal = 0.0;
}
else{
disVal = Double.parseDouble(dttNum); //get the value of discount
tvDis.setBackgroundColor(Color.GREEN);
}
disV = 0; //discount amount is now applied
}
else if(taxV == 1){
dttNum = tvTax.getText().toString();
if(dttNum.isEmpty() || dttNum.equals(".")){
tvTax.setBackgroundColor(Color.TRANSPARENT); //if empty or dot then remove display color
taxVal = 0.0;
}
else{
taxVal = Double.parseDouble(dttNum); //get the value of tax
tvTax.setBackgroundColor(Color.GREEN);
}
taxV = 0; //tax amount is now applied
}
else if(tipV == 1){
dttNum = tvTip.getText().toString();
if(dttNum.isEmpty() || dttNum.equals(".")){
tvTip.setBackgroundColor(Color.TRANSPARENT); //if empty or dot then remove display color
tipVal = 0.0;
}
else{
tipVal = Double.parseDouble(dttNum); //get the value of tip
tvTip.setBackgroundColor(Color.GREEN);
}
tipV = 0; //tip amount is now applied
}
dttDCounter = 0;
dttDotCounter = 0;
}
else{/******************Apply to main display***********/
String disNum = display.getText().toString(); //get display numbers and operators
if(disNum.isEmpty()) //if there's nothing to calculate then break out of this button
break;
int i = disNum.length();
if(strWarn == 0 && //calculates display numbers and operators but does not work when display is string text
disNum.charAt(i-1)!='.' && disNum.charAt(i-1)!='/' && disNum.charAt(i-1)!='*' && disNum.charAt(i-1)!='-' && disNum.charAt(i-1)!='+') //does not calculate if last char is .,/,*,- or +
{
int j; //char place on display text
int k = 0; //where the first digit of the next number starts
double [] getNum = new double[10]; //store number in order of seen
int nc = 0; //number counter
char [] operator = new char[10]; //store operator in order of seen
int oc= 0; //operator counter
double ans; //answer from operation
for(j = 0; j < i; j++)
{
if(disNum.charAt(disNum.length()-(disNum.length()-j))=='/'){ //if char is / operator
getNum[nc] = Double.parseDouble(disNum.substring(k,j)); //store starting point digit to ending point digit of a number
k = j + 1; //the starting of the next number. +1 to exclude the operator sign
operator[oc]='/'; //store the operator sign
nc++; //number count increment
oc++; //operator count increment
}
else if(disNum.charAt(disNum.length()-(disNum.length()-j))=='*'){ //if char is * operator
getNum[nc] = Double.parseDouble(disNum.substring(k,j)); //store starting point digit to ending point digit of a number
k = j + 1; //the starting of the next number. +1 to exclude the operator sign
operator[oc]='*'; //store the operator sign
nc++; //number count increment
oc++; //operator count increment
}
else if(disNum.charAt(disNum.length()-(disNum.length()-j))=='-'){ //if char is - operator
getNum[nc] = Double.parseDouble(disNum.substring(k,j)); //store starting point digit to ending point digit of a number
k = j + 1; //the starting of the next number. +1 to exclude the operator sign
operator[oc]='-'; //store the operator sign
nc++; //number count increment
oc++; //operator count increment
}
else if(disNum.charAt(disNum.length()-(disNum.length()-j))=='+'){ //if char is + operator
getNum[nc] = Double.parseDouble(disNum.substring(k,j)); //store starting point digit to ending point digit of a number
k = j + 1; //the starting of the next number. +1 to exclude the operator sign
operator[oc]='+'; //store the operator sign
nc++; //number count increment
oc++; //operator count increment
}
}
getNum[nc] = Double.parseDouble(disNum.substring(k, disNum.length())); //get the very last number
ans = getNum[0]; //assign the first number as answer
for(int x = 0; x < oc; x++){
switch(operator[x]){
case '/':
ans = ans/getNum[x+1];
if(getNum[x+1] == 0) //if divide by zero the results is string Infinity
{
strWarn = 1; //prevent deletion and applying operator on inifinity //must press clear
}
break;
case '*':
ans = ans*getNum[x+1];
break;
case '-':
ans = ans-getNum[x+1];
break;
case '+':
ans = ans+getNum[x+1];
break;
}
}
youSave.setText("SAVE>>" + String.format("%.2f", (ans * (disVal/100)))); //display savings from discount
taxAndTip.setText("TAX>>" + String.format("%.2f", (ans * (taxVal/100))) + " TIP>>" + String.format("%.2f", (ans * (tipVal/100)))); //display the tax and tip amount
total.setText("TOTAL>>" + String.format("%.2f", (ans - (ans * (disVal/100)) + (ans * (taxVal/100)) + (ans * (tipVal/100))))); //display total
/******main display************/
display.setText(String.format("%.2f", ans)); //round ans to 2 decimal places
String getAns = display.getText().toString();
dCounter = getAns.length(); //set up the new number of digits based on the answer
dotCounter = 1; //since result will have dot already
}
}
break;
case R.id.divide:
String opTempD = display.getText().toString();
if(!opTempD.isEmpty() && strWarn == 0 && opTempD.charAt(opTempD.length()-1)!='.' && disV == 0 && taxV == 0 && tipV == 0) //if display text is not empty and there's no string warning and it's not ending with . AND dis, tax, tip button are not activated
{
if(!(opTempD.charAt(opTempD.length()-1)=='/') && !(opTempD.charAt(opTempD.length()-1)=='*') && !(opTempD.charAt(opTempD.length()-1)=='-') && !(opTempD.charAt(opTempD.length()-1)=='+')) { //make sure operator won't be duplicated
//nCounter = nCounter + 1; //move on to next number
display.append("/");
strWarn = 0;
rdotCounter = dotCounter;
dotCounter = 0;
rdCounter = dCounter;
dCounter = dCounter + 1;
}
}
break;
case R.id.multiply:
String opTempM = display.getText().toString();
if(!opTempM.isEmpty() && strWarn == 0 && opTempM.charAt(opTempM.length()-1)!='.' && disV == 0 && taxV == 0 && tipV == 0) //if display text is not empty and there's no string warning and it's not ending with . AND dis, tax, tip button are not activated
{
if(!(opTempM.charAt(opTempM.length()-1)=='/') && !(opTempM.charAt(opTempM.length()-1)=='*') && !(opTempM.charAt(opTempM.length()-1)=='-') && !(opTempM.charAt(opTempM.length()-1)=='+')) { //make sure operator won't be duplicated
//nCounter = nCounter + 1; //move on to next number
display.append("*");
strWarn = 0;
rdotCounter = dotCounter;
dotCounter = 0;
rdCounter = dCounter;
dCounter = dCounter + 1;
}
}
break;
case R.id.subtract:
String opTempS = display.getText().toString();
if(!opTempS.isEmpty() && strWarn == 0 && opTempS.charAt(opTempS.length()-1)!='.' && disV == 0 && taxV == 0 && tipV == 0) //if display text is not empty and there's no string warning and it's not ending with . AND dis, tax, tip button are not activated
{
if(!(opTempS.charAt(opTempS.length()-1)=='/') && !(opTempS.charAt(opTempS.length()-1)=='*') && !(opTempS.charAt(opTempS.length()-1)=='-') && !(opTempS.charAt(opTempS.length()-1)=='+')) { //make sure operator won't be duplicated
//nCounter = nCounter + 1; //move on to next number
display.append("-");
strWarn = 0;
rdotCounter = dotCounter;
dotCounter = 0;
rdCounter = dCounter;
dCounter = dCounter + 1;
}
}
break;
case R.id.add:
String opTempA = display.getText().toString();
if(!opTempA.isEmpty() && strWarn == 0 && opTempA.charAt(opTempA.length()-1)!='.' && disV == 0 && taxV == 0 && tipV == 0) //if display text is not empty and there's no string warning and it's not ending with . AND dis, tax, tip button are not activated
{
if(!(opTempA.charAt(opTempA.length()-1)=='/') && !(opTempA.charAt(opTempA.length()-1)=='*') && !(opTempA.charAt(opTempA.length()-1)=='-') && !(opTempA.charAt(opTempA.length()-1)=='+')) { //make sure operator won't be duplicated
//nCounter = nCounter + 1; //move on to next number
display.append("+");
strWarn = 0;
rdotCounter = dotCounter;
dotCounter = 0;
rdCounter = dCounter;
dCounter = dCounter + 1;
}
}
break;
case R.id.dis:
if(disV == 0 && taxV == 0 && tipV ==0){ //make sure dis, tax, and tip is not currently being set up
disV = 1;
tvDis.setText(""); //start empty
tvDis.setBackgroundColor(Color.RED);
}
break;
case R.id.tax:
if(disV == 0 && taxV == 0 && tipV ==0){ //make sure dis, tax, and tip is not currently being set up
taxV = 1;
tvTax.setText(""); //start empty
tvTax.setBackgroundColor(Color.RED);
}
break;
case R.id.tip:
if(disV == 0 && taxV == 0 && tipV ==0){ //make sure dis, tax, and tip is not currently being set up
tipV = 1;
tvTip.setText(""); //start empty
tvTip.setBackgroundColor(Color.RED);
}
break;
}
}
//************obtained online but modified for this project (for swipe) //works only on the entire layout view but not on buttons or other
// onTouchEvent () method gets called when User performs any touch event on screen
// Method to handle touch event like left to right swap and right to left swap
public boolean onTouch(View v, MotionEvent touchevent)
{
switch (touchevent.getAction())
{
// when user first touches the screen we get x and y coordinate
case MotionEvent.ACTION_DOWN:
{
x1 = touchevent.getX();
y1 = touchevent.getY();
break;
}
case MotionEvent.ACTION_UP:
{
x2 = touchevent.getX();
y2 = touchevent.getY();
//if left to right sweep event on screen //clear display
if (x1 < x2)
{
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
if(disV == 1)
tvDis.setText("");
if(taxV == 1)
tvTax.setText("");
if(tipV == 1)
tvTip.setText("");
dttDCounter = 0;
dttDotCounter = 0;
}
else{
/********Apply to main display**********/
display.setText("");
youSave.setText("");
taxAndTip.setText("");
total.setText("");
strWarn = 0; //warning (num too large) will be cleared
dotCounter = 0;
dCounter = 0;
//nCounter = 0;
}
}
// if right to left sweep event on screen //delete last character
if (x1 > x2)
{
/******Apply to discount, tax, tip**********/
if(disV == 1 || taxV == 1 || tipV == 1){
String dttTemp = "";
if(disV == 1)
dttTemp = tvDis.getText().toString();
if(taxV == 1)
dttTemp = tvTax.getText().toString();
if(tipV == 1)
dttTemp = tvTip.getText().toString();
if(!dttTemp.isEmpty())
{
if(dttTemp.charAt(dttTemp.length()-1)=='.')
dttDotCounter = 0;
dttTemp = dttTemp.substring(0, dttTemp.length()-1);
if(disV == 1)
tvDis.setText(dttTemp);
if(taxV == 1)
tvTax.setText(dttTemp);
if(tipV == 1)
tvTip.setText(dttTemp);
dttDCounter = dttDCounter - 1;
}
}
else{
/*****************Apply to main display**********/
String temp = display.getText().toString();
if(!temp.isEmpty() && strWarn == 0) //check if empty and string warning (num too large) is currently on
{
if(temp.charAt(temp.length()-1)=='.') //if the last character is . then dotCounter restart
dotCounter = 0;
if(temp.charAt(temp.length()-1)=='/' || temp.charAt(temp.length()-1)=='*' || temp.charAt(temp.length()-1)=='-' || temp.charAt(temp.length()-1)=='+') //if the last char is math operator then go back to previous number
{
//nCounter = nCounter - 1;
dCounter = rdCounter; //restore back the number of digits from previous number
dotCounter = rdotCounter; //restore back dotCounter
}
temp = temp.substring(0, temp.length()-1); //remove the last character
display.setText(temp); //output text minus the last char
dCounter = dCounter - 1; //reduce digit by 1
}
}
}
/* if UP to Down sweep event on screen
if (y1 < y2)
{
display.setText("down");
}
*/
/*if Down to UP sweep event on screen
if (y1 > y2)
{
}
*/
break;
}
}
return true;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
| 8360c75dd96d1db3465167e0842c9338a4be7e59 | [
"Java"
] | 1 | Java | aldrinlvp/Geeky-Discount-Tax-Tip-Cal-Android-Application | 6727c8ca0027055dba1ea67808b565a23f6c3979 | 5dc7c794f498ac72fec9135a847e5f31d4d8908c |
refs/heads/main | <file_sep>'use strict';
console.log('funguju!');
//Header
const appElm = document.querySelector('#app');
const headerProps = {
title: 'Jogíni',
links: ['domů', 'lekce', 'náš tým', 'události', 'kontakt'],
};
const Header = (props) => {
return `
<header>
<h1 class="site-title">${props.title}</h1>
<nav>
<a href="#">${props.links[0]}</a>
<a href="#">${props.links[1]}</a>
<a href="#">${props.links[2]}</a>
<a href="#">${props.links[3]}</a>
<a href="#">${props.links[4]}</a>
</nav>
</header>
`;
};
appElm.innerHTML = Header(headerProps);
//Intro
const introProps = {
heading: 'Vítejte mezi Jogíny',
text: `Naše lekce jsou zaměřeny na potřeby klientů, kteří mají odvahu zkusit
něco nového. Cvičíme v pomalém tempu s podrobným slovním doprovodem.
Postupně se seznámíte se základními principy jógy, jak přístupovat k
sobě i ostatním. Krok za krokem objevíte a dostanete pod kontrolu
svoje tělo, pocity a emoce.`,
};
const Intro = (props) => {
return `
<div class="intro">
<h2>${props.heading}</h2>
<p>${props.text}</p>
</div>`;
};
appElm.innerHTML += Intro(introProps);
//Pose
const poseProps = {
src: 'img/yoga-pose.jpg',
};
const Pose = (props) => {
return `<img class="pose" src=${props.src} />`;
};
appElm.innerHTML += Pose(poseProps);
| 1c589f676fc490b938467fa01feb5c6f4916fd50 | [
"JavaScript"
] | 1 | JavaScript | MichaelaRasovska/jogini | b72d1f2e89a9dba71b1bdf35ba4682b48aac4dca | 804d2a5d021e04775770e7328fe3e4c930bbfd3c |
refs/heads/master | <file_sep><?php
namespace App\Http\Controllers\Auth;
use App\Http\Controllers\Controller;
use App\User;
use Illuminate\Support\Facades\Auth;
use Socialite;
class LoginController extends Controller
{
/**
* Redirect the user to the GitHub authentication page.
*
* @return \Illuminate\Http\Response
*/
public function redirectToProvider()
{
return Socialite::driver('github')->redirect();
}
/**
* Obtain the user information from GitHub.
*
* @return \Illuminate\Http\Response
*/
public function handleProviderCallback()
{
$githubUser = Socialite::driver('github')->user();
$user = User::where('email','=', $githubUser->getEmail())->first();
if ($user === null) {
$data = [
'email' => $githubUser->getEmail(),
'name' => $githubUser->getNickName(),
'password' => ''
];
// Create new user based on github user
$user = User::firstOrCreate($data);
}
Auth::login($user);
return redirect()->route('home');
}
}
<file_sep><?php
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Route::get('/', 'FeedController@feed')->name('home')
->middleware('auth');
Route::post('/', 'FeedController@store')
->middleware('auth');
// GitHub OAuth routes
Route::get('login/github', 'Auth\LoginController@redirectToProvider')
->name('login');
Route::get('login/github/callback', 'Auth\LoginController@handleProviderCallback');
Route::get('logout', 'Auth\LogoutController@logout')->name('logout');<file_sep><?php
namespace App\Http\Controllers;
use App\Post;
use Auth;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\DB;
use Illuminate\Validation\Validator;
class FeedController extends Controller
{
public function feed()
{
$posts = DB::table('posts')
->orderBy('created_at', 'asc')
->get();
return view('feed')
->with('posts', $posts);
}
public function store(Request $request)
{
$validatedData = $request->validate([
'content' => 'required|max:500'
]);
$post = new Post;
$post->content = $request->input('content');
$post->user()->associate(Auth::user()->id);
$post->save();
return redirect()->route('home');
}
}
| 5cda67e7107f45f18b0c4ea2df3ba11ab8c4e188 | [
"PHP"
] | 3 | PHP | chrispulse/social-network | c046d01dd0ed0fc73f71f81b5e0d77d88dbc9582 | e7ea606dcb0da7d38ce59abcb15821ab663a11f3 |
refs/heads/master | <file_sep>public function createThumbnail($source_folder, $thumbs_folder, $source_file, $extension, $thumbHeight){
if ($extension == 'gif') {
$imgt = "ImageGIF";
$imgcreatefrom = "ImageCreateFromGIF";
}else if($extension == 'jpg' || $extension == 'jpeg'){
$imgt = "ImageJPEG";
$imgcreatefrom = "ImageCreateFromJPEG";
}else if ($extension == 'png') {
$imgt = "ImagePNG";
$imgcreatefrom = "ImageCreateFromPNG";
}
if ($imgt) {
$img = $imgcreatefrom( $source_folder.$source_file.'.'.$extension );
$width = imagesx( $img );
$height = imagesy( $img );
// keep aspect ratio with these operations...
$new_width = floor( $width * ( $thumbHeight / $height ) );
$new_height = $thumbHeight;
$tmp_img = imagecreatetruecolor( $new_width, $new_height );
if($extension == 'png'){
// Disable alpha mixing and set alpha flag if is a png file
imagealphablending($tmp_img, false);
imagesavealpha($tmp_img, true);
}
imagecopyresized( $tmp_img, $img, 0, 0, 0, 0, $new_width, $new_height, $width, $height );
$imgt( $tmp_img, $thumbs_folder.($source_file.'_'.$new_width.'x'.$new_height.'.'.$extension));
}
} | 72233cc9ccefcfcd8a1b84d4728cfc1cfa5cbc37 | [
"PHP"
] | 1 | PHP | wonder-romane/wr | 8083bff54d13246fac425d304263a32f9bb1c5d7 | 3102b98a841f22a78aa716cfdea92f10c2ed427b |
refs/heads/master | <repo_name>alekslevko/iTechArt-Lab-2018<file_sep>/task_netcore/task4/Client/src/modules/Comments/views/CommentForm/styles.js
const styles = theme => ({
commentFormContainer: {
maxWidth: '1000px',
margin: '20px auto',
'@media (max-width: 1070px)': {
width: '600px'
},
'@media (max-width: 650px)': {
width: '300px'
}
},
button: {
width: '35px',
height: '25px',
marginLeft: '5px'
},
sendIcon: {
fontSize: '18px'
},
commentForm: {
margin: '0 auto'
},
title: {
fontSize: '28px',
color: '#696969',
marginBottom: '10px',
'@media (max-width: 650px)': {
fontSize: '18px'
}
}
});
export default styles;<file_sep>/task_netcore/task4/WebApi/task4/task4.WEB/Startup.cs
using System;
using System.IdentityModel.Tokens.Jwt;
using System.Text;
using AutoMapper;
using Microsoft.AspNetCore.Authentication.JwtBearer;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Identity;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.IdentityModel.Tokens;
using task4.BLL.Interfaces;
using task4.BLL.Services;
using task4.DAL.EF;
using task4.DAL.Entities;
using task4.DAL.Interfaces;
using task4.DAL.Repositories;
using static task4.DAL.Entities.User;
namespace task4.WEB
{
public class Startup
{
public Startup(IConfiguration configuration)
{
Configuration = configuration;
}
public IConfiguration Configuration { get; }
// This method gets called by the runtime. Use this method to add services to the container.
public IServiceProvider ConfigureServices(IServiceCollection services)
{
services.AddDbContext<ApplicationContext>(options =>
options.UseSqlServer(Configuration.GetConnectionString("DefaultConnection")));
services.AddIdentity<User, Role>()
.AddEntityFrameworkStores<ApplicationContext>()
.AddEntityFrameworkStores<ApplicationContext>()
.AddDefaultTokenProviders();
services.AddTransient<IMovieService, MovieService>();
services.AddTransient<IPhotoService, PhotoService>();
services.AddTransient<IAccountService, AccountService>();
services.AddTransient<ICommentService, CommentService>();
services.AddTransient<IRatingService, RatingService>();
services.AddTransient<IUnitOfWork, UnitOfWork>();
services.AddMvc();
services.AddCors();
services.AddAutoMapper();
JwtSecurityTokenHandler.DefaultInboundClaimTypeMap.Clear();
services
.AddAuthentication(options =>
{
options.DefaultAuthenticateScheme = JwtBearerDefaults.AuthenticationScheme;
options.DefaultScheme = JwtBearerDefaults.AuthenticationScheme;
options.DefaultChallengeScheme = JwtBearerDefaults.AuthenticationScheme;
})
.AddJwtBearer(cfg =>
{
cfg.RequireHttpsMetadata = false;
cfg.SaveToken = true;
cfg.TokenValidationParameters = new TokenValidationParameters
{
ValidateIssuer = true,
ValidIssuer = Configuration["JwtIssuer"],
ValidateAudience = true,
ValidAudience = Configuration["JwtIssuer"],
ValidateLifetime = true,
IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(Configuration["JwtKey"])),
ValidateIssuerSigningKey = true
};
});
return services.BuildServiceProvider();
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
{
app.UseDefaultFiles();
app.UseStaticFiles();
app.UseCors(builder => builder.AllowAnyOrigin().AllowAnyHeader().AllowAnyMethod().AllowCredentials());
app.UseAuthentication();
app.UseMvc();
}
}
}
<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Models/RatingResultModel.cs
using System;
using System.Collections.Generic;
using System.Text;
namespace task4.BLL.Models
{
public class RatingResultModel
{
public decimal Value { get; set; }
public bool AlreadyRated { get; set; }
public List<string> Errors { get; set; }
}
}
<file_sep>/task_reactredux/task1/src/containers/SuccessContainer.js
import { connect } from 'react-redux';
import Success from '../views/Success';
const mapStateToProps = (state) => {
return {
mail: state.mail.mail,
password: state.password.password
}
};
export default connect(mapStateToProps)(Success);<file_sep>/task_javascript/task2/README.md
# ItechArt-Lab-2018
2) https://jsbin.com/totekay/edit?js,console<file_sep>/task_netcore/task2/task2/Interfaces/IDataService.cs
using System.Threading.Tasks;
using task2.Models;
namespace task2.Interfaces
{
public interface IDataService
{
ResponseModel GetInfo();
Task<ResponseModel> GetInfoAsync();
}
}
<file_sep>/task_javascript/task1/README.md
# ItechArt-Lab-2018
1) https://jsbin.com/yigeled/edit?js,console<file_sep>/task_reactredux/task1/src/views/Counter/index.js
import React from 'react';
import PropTypes from 'prop-types';
import styles from './styles';
import { Typography, withStyles, Paper, Button } from '@material-ui/core';
const Counter = ({ increment, decrement, reset, count, classes}) => {
return (
<div>
<Paper className={classes.root} elevation={8}>
<Button className={classes.button} variant="contained" color="primary" onClick={() => increment()}>Increment</Button>
<Button className={classes.button} variant="contained" color="secondary" onClick={() => decrement()}>Decrement</Button>
<Button className={classes.button} variant="contained" color="default" onClick={() => reset()}>Reset </Button>
<Typography variant="display2" component="p">{count}</Typography>
</Paper>
</div>
);
}
Counter.propTypes = {
classes: PropTypes.object.isRequired,
increment: PropTypes.func,
decrement: PropTypes.func,
reset: PropTypes.func,
count: PropTypes.number
}
export default withStyles(styles)(Counter);<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Services/RatingService.cs
using AutoMapper;
using System.Collections.Generic;
using System.Linq;
using task4.BLL.Interfaces;
using task4.BLL.Models;
using task4.DAL.Entities;
using task4.DAL.Interfaces;
namespace task4.BLL.Services
{
public class RatingService: IRatingService
{
private readonly IUnitOfWork _unitOfWork;
private readonly IMapper _mapper;
private readonly IMovieService _movieService;
public RatingService(IUnitOfWork uow, IMapper mapper, IMovieService movieService)
{
_unitOfWork = uow;
_mapper = mapper;
_movieService = movieService;
}
public RatingResultModel AddRating(RatingModel ratingModel)
{
var userRating = _unitOfWork.RatingRepository.GetQueryableAll().FirstOrDefault(r => r.User.Id == ratingModel.UserId && r.Movie.Id == ratingModel.MovieId);
var ratingResultModel = new RatingResultModel();
if (userRating != null)
{
ratingResultModel.Errors = new List<string> {"You can't rate again"};
return ratingResultModel;
}
var rating = _mapper.Map<RatingModel, Rating>(ratingModel);
rating.User = _unitOfWork.UserRepository.GetById(ratingModel.UserId);
rating.Movie = _unitOfWork.MovieRepository.GetById(ratingModel.MovieId);
_unitOfWork.RatingRepository.Insert(rating);
_unitOfWork.Commit();
_movieService.UpdateMovieRating(rating.Movie.Id);
return ratingResultModel;
}
public RatingResultModel GetUserRating(int userId, int movieId)
{
var rating = _unitOfWork.RatingRepository.GetQueryableAll().FirstOrDefault(r => r.User.Id == userId && r.Movie.Id == movieId);
var movie = _unitOfWork.MovieRepository.GetById(movieId);
RatingResultModel ratingResultModel;
if (rating != null && movie != null)
{
ratingResultModel = _mapper.Map<Rating, RatingResultModel>(rating);
ratingResultModel.AlreadyRated = true;
return ratingResultModel;
}
ratingResultModel = new RatingResultModel { AlreadyRated = false };
return ratingResultModel;
}
public decimal GetAverageRating(int movieId)
{
var ratings = _unitOfWork.RatingRepository.GetQueryableAll().Where(r => r.Movie.Id == movieId).ToList();
if (ratings.Count != 0)
{
var rating = ratings.Average(r => r.Value);
return rating;
}
return 0;
}
}
}
<file_sep>/task_reactredux/task1/src/views/Login/index.js
import React from 'react';
import PropTypes from 'prop-types';
import { Paper, withStyles, TextField, Button, FormControl, FormHelperText } from '@material-ui/core';
import styles from './styles'
import { errorMessagesEnum } from '../../Constants';
const Login = ({ classes, mail, password, mailValid, passwordValid, onMailChange, onPasswordChange, handleSubmit, wasSubmited }) => {
return (
<div>
<Paper >
<form onSubmit={handleSubmit} className={classes.container}>
<div>
<FormControl>
<TextField
id="mail"
error={!mailValid}
label="Почта"
className={classes.textField}
value={mail}
onChange={onMailChange}
margin="normal" />
{!mailValid && wasSubmited && <FormHelperText >{errorMessagesEnum.EmailErrorMessage}</FormHelperText>}
</FormControl>
</div>
<div>
<FormControl>
<TextField
id="password-input"
error={!passwordValid}
label="Пароль"
className={classes.textField}
onChange={onPasswordChange}
type="password"
value={password}
autoComplete="current-password"
margin="normal" />
{!passwordValid && wasSubmited && <FormHelperText >{errorMessagesEnum.PasswordErrorMessage}</FormHelperText>}
</FormControl>
</div>
<Button type="submit" variant="outlined" className={classes.button}>
Войти
</Button>
</form>
</Paper>
<Paper>
<p>{mail}</p>
<p>{password}</p>
</Paper>
</div>
);
}
Login.propTypes = {
classes: PropTypes.object.isRequired,
mail: PropTypes.string,
password: PropTypes.string,
mailValid: PropTypes.bool,
passwordValid: PropTypes.bool,
onInputChange: PropTypes.func,
onPasswordChange: PropTypes.func,
handleSubmit: PropTypes.func
};
export default withStyles(styles)(Login);<file_sep>/task_netcore/task2/task2/Models/ForeignModel.cs
using System.Collections.Generic;
namespace task2.Models
{
public class ForeignModel
{
public string Next { get; set; }
public string Previous { get; set; }
public int Count { get; set; }
public List<Starship> Results { get; set; }
public ForeignModel()
{
Results = new List<Starship>();
}
}
}
<file_sep>/task_netcore/task4/WebApi/task4/task4.DAL/EF/ApplicationContext.cs
using Microsoft.AspNetCore.Identity.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
using task4.DAL.Entities;
using static task4.DAL.Entities.User;
namespace task4.DAL.EF
{
public class ApplicationContext : IdentityDbContext<User, Role, int, UserClaim, UserRole, UserLogin, RoleClaim, UserToken>
{
public DbSet<Movie> Movies { get; set; }
public DbSet<Photo> Photos { get; set; }
public DbSet<Comment> Comments { get; set; }
public DbSet<Rating> Ratings { get; set; }
public ApplicationContext(DbContextOptions<ApplicationContext> options)
: base(options)
{ }
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
base.OnModelCreating(modelBuilder);
modelBuilder.Entity<Movie>().HasMany(m => m.Photos).WithOne(m => m.Movie).OnDelete(DeleteBehavior.Cascade);
modelBuilder.Entity<User>().HasMany(u => u.Comments).WithOne(u => u.User).OnDelete(DeleteBehavior.Cascade);
modelBuilder.Entity<User>().HasMany(u => u.Ratings).WithOne(u => u.User).OnDelete(DeleteBehavior.Cascade);
modelBuilder.Entity<Movie>().HasData(
new Movie
{
Id = 1,
Name = "Thor",
Country = "USA",
Producer = "<NAME>",
Year = 2011,
Rating = 0M,
Genre = "Fantasy",
PictureUrl = "https://media.kg-portal.ru/movies/t/thor/posters/thor_7.jpg",
Description = "An epic adventure takes place, both on our planet," +
" and in the fictional Kingdom of the Gods of Asgard." +
" In the center of history is the Mighty Thor, " +
"a strong but arrogant warrior, whose reckless actions," +
" the revival of the ancient war in Asgard. The Torah is sent into exile to the Earth," +
" devoid of power, and observance of ordinary people, as punishment ...",
},
new Movie
{
Id = 2,
Name = "Lucy",
Country = "France",
Producer = "<NAME>",
Year = 2014,
Rating = 0M,
Genre = "Thriller",
PictureUrl = "http://lostfilm.info/images/poster/545/5447501.jpg",
Description = "Yesterday she was just a sexy blonde, and today is the most dangerous and" +
" deadly creature on the planet with supernatural abilities and intelligence." +
" The fact that recently the best minds of the world have considered fantastic theories," +
" it has become a reality. And now with production, it will become a hunter." +
" Her name is Lucy..."
},
new Movie
{
Id = 3,
Name = "Unknown",
Country = "UK",
Producer = "<NAME>",
Year = 2011,
Rating = 0M,
Genre = "Thriller",
PictureUrl = "https://www.movieposter.com/posters/archive/main/119/MPW-59897",
Description = "This film tells about a man who, after awakening from a coma," +
" discovers that his personality is appropriated to another," +
" and understands that no one, even his own wife, believes him. And then," +
" with the help of an unknown young woman-taxi driver," +
" the hero rushes to prove who he is."
},
new Movie
{
Id = 4,
Name = "Suits",
Country = "USA",
Producer = "<NAME>",
Year = 2011,
Rating = 0M,
Genre = "Drama",
PictureUrl = "https://st.kp.yandex.net/im/poster/2/4/0/kinopoisk.ru-Suits-2405451.jpg",
Description = "Raging after an unsuccessful attempt to sell drugs," +
" self-taught lawyer <NAME>, posing as a graduate of Harvard," +
" gets to interview one of the best lawyers for New York deals," +
" Harvey Spectrum."
},
new Movie
{
Id = 5,
Name = "Three Billboards Outside Ebbing, Missouri",
Country = "USA",
Producer = "<NAME>",
Year = 2017,
Rating = 0M,
Genre = "Drama",
PictureUrl = "https://fanart.tv/fanart/movies/359940/movieposter/three-billboards-outside-ebbing-missouri-5a84165c374d4.jpg",
Description = "A few months after the murder of the daughter of <NAME>," +
" the criminals were never found. The desperate woman decides to take a bold step," +
" renting three billboards at the entrance to the city with a message" +
" to the authoritative head of the police, <NAME>. When the deputy sheriff," +
" an infantile mama's son with a violent inclination, officer Dixon is involved in the situation," +
" the struggle between Mildred and the city authorities is only aggravated."
},
new Movie
{
Id = 6,
Name = "The second life of Uwe",
Country = "Sweden",
Producer = "<NAME>",
Year = 2015,
Rating = 0M,
Genre = "Drama",
PictureUrl = "http://cinecinema.org/uploads/posts/2016-07/1469737425_en-man-som-heter-ove.jpg",
Description = "Who is he, this Uwe? Aging thorough grumbler, reaching neighbors with endless trailers." +
" He falls into a rage at the sight of a garbage or an improperly standing machine." +
" And the light is on what light stands a frivolous family of new settlers," +
" in which the father and nail can not drive. But despite all of the above," +
" Uwe can do everything: masterfully cram his saab between the porch and the mailbox," +
" repair the battery, puncture the discount from the very tight-fisted shopkeeper."
}
);
modelBuilder.Entity<Photo>().HasData(
new
{
Id = 1,
MovieId = 1,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/48244/208943.jpg"
},
new
{
Id = 2,
MovieId = 1,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/48244/572581.jpg"
},
new
{
Id = 3,
MovieId = 1,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/48244/208942.jpg"
},
new
{
Id = 4,
MovieId = 1,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/48244/208940.jpg"
},
new
{
Id = 5,
MovieId = 2,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/108477/739108.jpg"
},
new
{
Id = 6,
MovieId = 2,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/108477/739110.jpg"
},
new
{
Id = 7,
MovieId = 2,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/108477/739111.jpg"
},
new
{
Id = 8,
MovieId = 2,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/108477/739113.jpg"
},
new
{
Id = 9,
MovieId = 3,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/91794/190675.jpg"
},
new
{
Id = 10,
MovieId = 3,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/91794/190674.jpg"
},
new
{
Id = 11,
MovieId = 3,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/91794/190673.jpg"
},
new
{
Id = 12,
MovieId = 3,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/91794/190670.jpg"
},
new
{
Id = 13,
MovieId = 4,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/96533/245756.jpg"
},
new
{
Id = 14,
MovieId = 4,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/96533/554509.jpg"
},
new
{
Id = 15,
MovieId = 4,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/96533/554512.jpg"
},
new
{
Id = 16,
MovieId = 4,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/96533/554510.jpg"
},
new
{
Id = 17,
MovieId = 5,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/125987/751751.jpg"
},
new
{
Id = 18,
MovieId = 5,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/125987/751750.jpg"
},
new
{
Id = 19,
MovieId = 5,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/125987/751752.jpg"
},
new
{
Id = 20,
MovieId = 5,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/125987/751749.jpg"
},
new
{
Id = 21,
MovieId = 6,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/120115/665900.jpg"
},
new
{
Id = 22,
MovieId = 6,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/120115/665899.jpg"
},
new
{
Id = 23,
MovieId = 6,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/120115/665898.jpg"
},
new
{
Id = 24,
MovieId = 6,
PictureUrl = "https://www.kino-teatr.ru/movie/kadr/120115/665897.jpg"
}
);
}
}
}
<file_sep>/task_netcore/task4/WebApi/task4/task4.DAL/Migrations/20180829113843_Initial.cs
using System;
using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations;
namespace task4.DAL.Migrations
{
public partial class Initial : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "AspNetRoles",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
Name = table.Column<string>(maxLength: 256, nullable: true),
NormalizedName = table.Column<string>(maxLength: 256, nullable: true),
ConcurrencyStamp = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetRoles", x => x.Id);
});
migrationBuilder.CreateTable(
name: "AspNetUsers",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
UserName = table.Column<string>(maxLength: 256, nullable: true),
NormalizedUserName = table.Column<string>(maxLength: 256, nullable: true),
Email = table.Column<string>(maxLength: 256, nullable: true),
NormalizedEmail = table.Column<string>(maxLength: 256, nullable: true),
EmailConfirmed = table.Column<bool>(nullable: false),
PasswordHash = table.Column<string>(nullable: true),
SecurityStamp = table.Column<string>(nullable: true),
ConcurrencyStamp = table.Column<string>(nullable: true),
PhoneNumber = table.Column<string>(nullable: true),
PhoneNumberConfirmed = table.Column<bool>(nullable: false),
TwoFactorEnabled = table.Column<bool>(nullable: false),
LockoutEnd = table.Column<DateTimeOffset>(nullable: true),
LockoutEnabled = table.Column<bool>(nullable: false),
AccessFailedCount = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUsers", x => x.Id);
});
migrationBuilder.CreateTable(
name: "Movies",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
Name = table.Column<string>(nullable: true),
Country = table.Column<string>(nullable: true),
Year = table.Column<int>(nullable: false),
Genre = table.Column<string>(nullable: true),
PictureUrl = table.Column<string>(nullable: true),
Rating = table.Column<decimal>(nullable: false),
Producer = table.Column<string>(nullable: true),
Description = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Movies", x => x.Id);
});
migrationBuilder.CreateTable(
name: "AspNetRoleClaims",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
RoleId = table.Column<int>(nullable: false),
ClaimType = table.Column<string>(nullable: true),
ClaimValue = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetRoleClaims", x => x.Id);
table.ForeignKey(
name: "FK_AspNetRoleClaims_AspNetRoles_RoleId",
column: x => x.RoleId,
principalTable: "AspNetRoles",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserClaims",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
UserId = table.Column<int>(nullable: false),
ClaimType = table.Column<string>(nullable: true),
ClaimValue = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUserClaims", x => x.Id);
table.ForeignKey(
name: "FK_AspNetUserClaims_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserLogins",
columns: table => new
{
LoginProvider = table.Column<string>(nullable: false),
ProviderKey = table.Column<string>(nullable: false),
ProviderDisplayName = table.Column<string>(nullable: true),
UserId = table.Column<int>(nullable: false),
ExpiresIn = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUserLogins", x => new { x.LoginProvider, x.ProviderKey });
table.ForeignKey(
name: "FK_AspNetUserLogins_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserRoles",
columns: table => new
{
UserId = table.Column<int>(nullable: false),
RoleId = table.Column<int>(nullable: false),
RoleId1 = table.Column<int>(nullable: true),
UserId1 = table.Column<int>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUserRoles", x => new { x.UserId, x.RoleId });
table.ForeignKey(
name: "FK_AspNetUserRoles_AspNetRoles_RoleId",
column: x => x.RoleId,
principalTable: "AspNetRoles",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_AspNetUserRoles_AspNetRoles_RoleId1",
column: x => x.RoleId1,
principalTable: "AspNetRoles",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
table.ForeignKey(
name: "FK_AspNetUserRoles_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_AspNetUserRoles_AspNetUsers_UserId1",
column: x => x.UserId1,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateTable(
name: "AspNetUserTokens",
columns: table => new
{
UserId = table.Column<int>(nullable: false),
LoginProvider = table.Column<string>(nullable: false),
Name = table.Column<string>(nullable: false),
Value = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUserTokens", x => new { x.UserId, x.LoginProvider, x.Name });
table.ForeignKey(
name: "FK_AspNetUserTokens_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Comments",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
Message = table.Column<string>(nullable: true),
MovieId = table.Column<int>(nullable: true),
UserId = table.Column<int>(nullable: true),
Date = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Comments", x => x.Id);
table.ForeignKey(
name: "FK_Comments_Movies_MovieId",
column: x => x.MovieId,
principalTable: "Movies",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
table.ForeignKey(
name: "FK_Comments_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Photos",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
PictureUrl = table.Column<string>(nullable: true),
MovieId = table.Column<int>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Photos", x => x.Id);
table.ForeignKey(
name: "FK_Photos_Movies_MovieId",
column: x => x.MovieId,
principalTable: "Movies",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Ratings",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
Value = table.Column<decimal>(nullable: false),
MovieId = table.Column<int>(nullable: true),
UserId = table.Column<int>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Ratings", x => x.Id);
table.ForeignKey(
name: "FK_Ratings_Movies_MovieId",
column: x => x.MovieId,
principalTable: "Movies",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
table.ForeignKey(
name: "FK_Ratings_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.InsertData(
table: "Movies",
columns: new[] { "Id", "Country", "Description", "Genre", "Name", "PictureUrl", "Producer", "Rating", "Year" },
values: new object[,]
{
{ 1, "USA", "An epic adventure takes place, both on our planet, and in the fictional Kingdom of the Gods of Asgard. In the center of history is the Mighty Thor, a strong but arrogant warrior, whose reckless actions, the revival of the ancient war in Asgard. The Torah is sent into exile to the Earth, devoid of power, and observance of ordinary people, as punishment ...", "Fantasy", "Thor", "https://media.kg-portal.ru/movies/t/thor/posters/thor_7.jpg", "<NAME>", 0m, 2011 },
{ 2, "France", "Yesterday she was just a sexy blonde, and today is the most dangerous and deadly creature on the planet with supernatural abilities and intelligence. The fact that recently the best minds of the world have considered fantastic theories, it has become a reality. And now with production, it will become a hunter. Her name is Lucy...", "Thriller", "Lucy", "http://lostfilm.info/images/poster/545/5447501.jpg", "<NAME>", 0m, 2014 },
{ 3, "UK", "This film tells about a man who, after awakening from a coma, discovers that his personality is appropriated to another, and understands that no one, even his own wife, believes him. And then, with the help of an unknown young woman-taxi driver, the hero rushes to prove who he is.", "Thriller", "Unknown", "https://www.movieposter.com/posters/archive/main/119/MPW-59897", "<NAME>", 0m, 2011 },
{ 4, "USA", "Raging after an unsuccessful attempt to sell drugs, self-taught lawyer <NAME>, posing as a graduate of Harvard, gets to interview one of the best lawyers for New York deals, Harvey Spectrum.", "Drama", "Suits", "https://st.kp.yandex.net/im/poster/2/4/0/kinopoisk.ru-Suits-2405451.jpg", "<NAME>", 0m, 2011 },
{ 5, "USA", "A few months after the murder of the daughter of <NAME>, the criminals were never found. The desperate woman decides to take a bold step, renting three billboards at the entrance to the city with a message to the authoritative head of the police, <NAME>. When the deputy sheriff, an infantile mama's son with a violent inclination, officer Dixon is involved in the situation, the struggle between Mildred and the city authorities is only aggravated.", "Drama", "Three Billboards Outside Ebbing, Missouri", "https://fanart.tv/fanart/movies/359940/movieposter/three-billboards-outside-ebbing-missouri-5a84165c374d4.jpg", "<NAME>", 0m, 2017 },
{ 6, "Sweden", "Who is he, this Uwe? Aging thorough grumbler, reaching neighbors with endless trailers. He falls into a rage at the sight of a garbage or an improperly standing machine. And the light is on what light stands a frivolous family of new settlers, in which the father and nail can not drive. But despite all of the above, Uwe can do everything: masterfully cram his saab between the porch and the mailbox, repair the battery, puncture the discount from the very tight-fisted shopkeeper.", "Drama", "The second life of Uwe", "http://cinecinema.org/uploads/posts/2016-07/1469737425_en-man-som-heter-ove.jpg", "<NAME>", 0m, 2015 }
});
migrationBuilder.InsertData(
table: "Photos",
columns: new[] { "Id", "MovieId", "PictureUrl" },
values: new object[,]
{
{ 1, 1, "https://www.kino-teatr.ru/movie/kadr/48244/208943.jpg" },
{ 22, 6, "https://www.kino-teatr.ru/movie/kadr/120115/665899.jpg" },
{ 21, 6, "https://www.kino-teatr.ru/movie/kadr/120115/665900.jpg" },
{ 20, 5, "https://www.kino-teatr.ru/movie/kadr/125987/751749.jpg" },
{ 19, 5, "https://www.kino-teatr.ru/movie/kadr/125987/751752.jpg" },
{ 18, 5, "https://www.kino-teatr.ru/movie/kadr/125987/751750.jpg" },
{ 17, 5, "https://www.kino-teatr.ru/movie/kadr/125987/751751.jpg" },
{ 16, 4, "https://www.kino-teatr.ru/movie/kadr/96533/554510.jpg" },
{ 15, 4, "https://www.kino-teatr.ru/movie/kadr/96533/554512.jpg" },
{ 14, 4, "https://www.kino-teatr.ru/movie/kadr/96533/554509.jpg" },
{ 13, 4, "https://www.kino-teatr.ru/movie/kadr/96533/245756.jpg" },
{ 12, 3, "https://www.kino-teatr.ru/movie/kadr/91794/190670.jpg" },
{ 11, 3, "https://www.kino-teatr.ru/movie/kadr/91794/190673.jpg" },
{ 10, 3, "https://www.kino-teatr.ru/movie/kadr/91794/190674.jpg" },
{ 9, 3, "https://www.kino-teatr.ru/movie/kadr/91794/190675.jpg" },
{ 8, 2, "https://www.kino-teatr.ru/movie/kadr/108477/739113.jpg" },
{ 7, 2, "https://www.kino-teatr.ru/movie/kadr/108477/739111.jpg" },
{ 6, 2, "https://www.kino-teatr.ru/movie/kadr/108477/739110.jpg" },
{ 5, 2, "https://www.kino-teatr.ru/movie/kadr/108477/739108.jpg" },
{ 4, 1, "https://www.kino-teatr.ru/movie/kadr/48244/208940.jpg" },
{ 3, 1, "https://www.kino-teatr.ru/movie/kadr/48244/208942.jpg" },
{ 2, 1, "https://www.kino-teatr.ru/movie/kadr/48244/572581.jpg" },
{ 23, 6, "https://www.kino-teatr.ru/movie/kadr/120115/665898.jpg" },
{ 24, 6, "https://www.kino-teatr.ru/movie/kadr/120115/665897.jpg" }
});
migrationBuilder.CreateIndex(
name: "IX_AspNetRoleClaims_RoleId",
table: "AspNetRoleClaims",
column: "RoleId");
migrationBuilder.CreateIndex(
name: "RoleNameIndex",
table: "AspNetRoles",
column: "NormalizedName",
unique: true,
filter: "[NormalizedName] IS NOT NULL");
migrationBuilder.CreateIndex(
name: "IX_AspNetUserClaims_UserId",
table: "AspNetUserClaims",
column: "UserId");
migrationBuilder.CreateIndex(
name: "IX_AspNetUserLogins_UserId",
table: "AspNetUserLogins",
column: "UserId");
migrationBuilder.CreateIndex(
name: "IX_AspNetUserRoles_RoleId",
table: "AspNetUserRoles",
column: "RoleId");
migrationBuilder.CreateIndex(
name: "IX_AspNetUserRoles_RoleId1",
table: "AspNetUserRoles",
column: "RoleId1");
migrationBuilder.CreateIndex(
name: "IX_AspNetUserRoles_UserId1",
table: "AspNetUserRoles",
column: "UserId1");
migrationBuilder.CreateIndex(
name: "EmailIndex",
table: "AspNetUsers",
column: "NormalizedEmail");
migrationBuilder.CreateIndex(
name: "UserNameIndex",
table: "AspNetUsers",
column: "NormalizedUserName",
unique: true,
filter: "[NormalizedUserName] IS NOT NULL");
migrationBuilder.CreateIndex(
name: "IX_Comments_MovieId",
table: "Comments",
column: "MovieId");
migrationBuilder.CreateIndex(
name: "IX_Comments_UserId",
table: "Comments",
column: "UserId");
migrationBuilder.CreateIndex(
name: "IX_Photos_MovieId",
table: "Photos",
column: "MovieId");
migrationBuilder.CreateIndex(
name: "IX_Ratings_MovieId",
table: "Ratings",
column: "MovieId");
migrationBuilder.CreateIndex(
name: "IX_Ratings_UserId",
table: "Ratings",
column: "UserId");
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "AspNetRoleClaims");
migrationBuilder.DropTable(
name: "AspNetUserClaims");
migrationBuilder.DropTable(
name: "AspNetUserLogins");
migrationBuilder.DropTable(
name: "AspNetUserRoles");
migrationBuilder.DropTable(
name: "AspNetUserTokens");
migrationBuilder.DropTable(
name: "Comments");
migrationBuilder.DropTable(
name: "Photos");
migrationBuilder.DropTable(
name: "Ratings");
migrationBuilder.DropTable(
name: "AspNetRoles");
migrationBuilder.DropTable(
name: "Movies");
migrationBuilder.DropTable(
name: "AspNetUsers");
}
}
}
<file_sep>/task_reactredux/task1/src/views/Menu/index.js
import React from 'react';
import PropTypes from 'prop-types';
import { withStyles, Tab } from '@material-ui/core';
import AppBar from '@material-ui/core/AppBar';
import Tabs from '@material-ui/core/Tabs';
import styles from './styles';
import { Link } from 'react-router-dom';
import { applicationRoutes } from '../../Constants';
const Menu = ({ value, showMenu, handleChange, classes }) => {
return (
<div className={classes.root}>
{showMenu && <AppBar position="static">
<Tabs
value={value}
onChange = {handleChange} >
<Tab label='О нас' component={Link} to={applicationRoutes.aboutRoute} />
<Tab label='Счетчики' component={Link} to={applicationRoutes.countersRoute} />
<Tab label='Войти' component={Link} to={applicationRoutes.loginRoute} />
<Tab label='Войти c Redux' component={Link} to={applicationRoutes.loginReduxRoute} />
<Tab label='Войти c Redux-form' component={Link} to={applicationRoutes.loginReduxFormRoute} />
</Tabs>
</AppBar>
}
</div>);
}
Menu.propTypes = {
classes: PropTypes.object.isRequired,
handleChange: PropTypes.func
};
export default withStyles(styles)(Menu);<file_sep>/task_netcore/task2/task2/Services/DataService.cs
using AutoMapper;
using Microsoft.Extensions.Configuration;
using Newtonsoft.Json;
using System.Net;
using System.Threading.Tasks;
using task2.Interfaces;
using task2.Models;
namespace task2.Services
{
public class DataService : IDataService
{
private string url;
private readonly IMapper _mapper;
public DataService(IMapper mapper, IConfiguration configuration)
{
_mapper = mapper;
url = configuration["url"];
}
public ResponseModel GetInfo()
{
ForeignModel foreignModel;
using (var client = new WebClient())
{
var content = client.DownloadString(url);
foreignModel = JsonConvert.DeserializeObject<ForeignModel>(content);
}
var responseModel = _mapper.Map<ForeignModel, ResponseModel>(foreignModel);
SetIndexField(responseModel);
return responseModel;
}
public async Task<ResponseModel> GetInfoAsync()
{
var foreignModel = new ForeignModel();
using (var client = new WebClient())
{
while (url != null)
{
var nextContent = await client.DownloadStringTaskAsync(url);
ForeignModel nextInfo = JsonConvert.DeserializeObject<ForeignModel>(nextContent);
url = nextInfo.Next;
foreignModel.Results.AddRange(nextInfo.Results);
}
foreignModel.Count = foreignModel.Results.Count;
}
var responseModel = _mapper.Map<ForeignModel, ResponseModel>(foreignModel);
SetIndexField(responseModel);
return responseModel;
}
private void SetIndexField(ResponseModel model)
{
for(var i = 0; i< model.Results.Count; i++)
{
model.Results[i].Index = i + 1;
}
}
}
}<file_sep>/task_netcore/task4/Client/src/modules/AccountForm/views/index.js
import React from 'react';
import { Paper, TextField, Button, FormControl, FormHelperText, withStyles } from '@material-ui/core';
import { Field, reduxForm } from 'redux-form';
import PropTypes from 'prop-types';
import styles from './styles';
import { applicationRoutes } from '../../../Constants';
const renderField = ({ input, label, type, meta: { touched, error }, margin }) => (
<div>
<FormControl>
<div>
<TextField label={label} type={type} error={error && touched} margin={margin} {...input} />
<div>
{touched &&
(error && <FormHelperText>{error}</FormHelperText>)}
</div>
</div>
</FormControl>
</div>
);
let AccountForm = ({ handleSubmit, classes, currentPath, errorMessage, haveAccountErrors }) => {
return (
<div>
<Paper className={classes.paperContainer} >
<form onSubmit={handleSubmit} className={classes.formContainer} >
<div className={classes.fieldContainer}>
<Field name="userName" component={renderField} label="Name" type="text" margin="normal" />
<Field name="password" component={renderField} label="Password" type="<PASSWORD>" margin="normal" />
</div>
<Button type="submit" variant="outlined">
{currentPath === applicationRoutes.registerFormRoute && 'Register'}
{currentPath === applicationRoutes.loginFormRoute && 'Login'}
</Button>
</form>
</Paper>
{
haveAccountErrors && <Paper className={classes.errorMessage}>
{errorMessage}
</Paper>
}
</div>
);
}
AccountForm.propTypes = {
currentPath: PropTypes.string,
handleSubmit: PropTypes.func,
classes: PropTypes.object.isRequired
};
AccountForm = reduxForm({
form: 'register'
})(AccountForm);
export default withStyles(styles)(AccountForm);<file_sep>/task_netcore/task4/WebApi/task4/task4.DAL/Entities/Comment.cs
namespace task4.DAL.Entities
{
public class Comment
{
public int Id { get; set; }
public string Message { get; set; }
public virtual Movie Movie { get; set; }
public virtual User User { get; set; }
public string Date { get; set; }
}
}
<file_sep>/task_netcore/task4/Client/src/modules/MovieSearch/views/NotFound/styles.js
const styles = theme => ({
notfound: {
width: '600px',
margin: '20px auto',
padding: '20px'
}
});
export default styles; <file_sep>/task_netcore/task4/Client/src/modules/MovieInfo/views/styles.js
const styles = theme => ({
movieInfoContainer: {
display: 'flex',
justifyContent: 'center'
},
card: {
margin: '20px',
paddingRight: '10px',
width: '1000px',
'@media (max-width: 1070px)': {
width: '600px'
},
'@media (max-width: 650px)': {
width: '300px',
paddingRight: '0'
}
},
picture: {
maxWidth: '300px',
float: 'left',
marginRight: '20px',
'@media (max-width: 650px)': {
width: '100%'
}
},
name: {
margin: '10px auto',
fontSize: '28px',
color: '#696969'
},
addInfoContainer: {
marginBottom: '10px'
},
addInfo: {
fontSize: '18px',
color: '#696969',
textAlign: 'left',
'@media (max-width: 650px)': {
fontSize: '14px',
padding: '0 10px'
}
},
description: {
fontSize: '14px',
color: '#696969',
textAlign: 'left',
'@media (max-width: 650px)': {
padding: '0 10px'
}
},
photos: {
display: 'flex',
flexWrap: 'wrap',
'@media (max-width: 1070px)': {
width: '600px'
},
'@media (max-width: 650px)': {
width: '300px',
}
}
});
export default styles; <file_sep>/task_netcore/task4/Client/src/modules/Comments/views/CommentContent/styles.js
const styles = theme => ({
commentContentContainer: {
maxWidth: '980px',
margin: '10px auto',
padding: '10px',
textAlign: 'left',
'@media (max-width: 1070px)': {
width: '580px'
},
'@media (max-width: 650px)': {
width: '280px'
}
},
userName: {
color: '#3f51b5'
},
message: {
color: '#696969',
fontSize: '18px'
},
date: {
color: '#696969'
},
avatar: {
float: 'left',
padding: '5px',
marginRight: '10px',
backgroundColor: '#3f51b5'
},
icon: {
fontSize: '35px'
}
});
export default styles;<file_sep>/task_javascript/task5/README.md
# ItechArt-Lab-2018
5_1) https://jsbin.com/vazuti/edit?js,console
5_2) https://jsbin.com/quzozo/edit?js,console
5_3) https://jsbin.com/boxiyod/edit?js,console
5_4) https://jsbin.com/kohejec/edit?js,console<file_sep>/task_javascript/task1/task1.js
(function task1_1() {
"use strict";
Array.prototype.map = function (projectionFunction) {
let arr = [];
for (let i = 0; i < this.length; i++) {
arr.push(projectionFunction.call(this, this[i], i));
}
return arr;
};
console.log(JSON.stringify([1, 2, 3].map(
(x) => {
return x + 1;
})) === "[2,3,4]");
console.log(JSON.stringify([1, 2, 3].map(
(x) => {
return x + 10;
})) === "[11,12,13]");
})();
(function task1_2() {
"use strict";
let newReleases = [{
"id": 70111470,
"title": "Die Hard",
"boxart": "http://cdn-0.nflximg.com/images/2891/DieHard.jpg",
"uri": "http://api.netflix.com/catalog/titles/movies/70111470",
"rating": [4.0],
"bookmark": []
}, {
"id": 654356453,
"title": "Bad Boys",
"boxart": "http://cdn-0.nflximg.com/images/2891/BadBoys.jpg",
"uri": "http://api.netflix.com/catalog/titles/movies/70111470",
"rating": [5.0],
"bookmark": [{ id: 432534, time: 65876586 }]
}, {
"id": 65432445,
"title": "The Chamber",
"boxart": "http://cdn-0.nflximg.com/images/2891/TheChamber.jpg",
"uri": "http://api.netflix.com/catalog/titles/movies/70111470",
"rating": [4.0],
"bookmark": []
}, {
"id": 675465,
"title": "Fracture",
"boxart": "http://cdn-0.nflximg.com/images/2891/Fracture.jpg",
"uri": "http://api.netflix.com/catalog/titles/movies/70111470",
"rating": [5.0],
"bookmark": [{ id: 432534, time: 65876586 }]
}];
newReleases = newReleases.map((rel) => {
let obj = {
"id": rel.id,
"title": rel.title
};
return obj;
});
console.log(JSON.stringify(newReleases));
})();
(function task1_3() {
"use strict";
Array.prototype.filter = function (predicateFunction) {
let arr = [];
for (let i = 0; i < this.length; i++) {
if (predicateFunction.call(this, this[i], i)) {
arr.push(this[i]);
}
}
return arr;
};
console.log(JSON.stringify([1, 2, 3].filter(
(fil) => {
return fil > 2;
})) === "[3]");
console.log(JSON.stringify([1, 2, 3].filter(
(fil) => {
return fil > 1;
})) === "[2,3]");
})();
(function task1_4() {
"use strict";
let newReleases = [{
"id": 70111470,
"title": "Die Hard",
"boxart": "http://cdn-0.nflximg.com/images/2891/DieHard.jpg",
"uri": "http://api.netflix.com/catalog/titles/movies/70111470",
"rating": [4.0],
"bookmark": []
}, {
"id": 654356453,
"title": "Bad Boys",
"boxart": "http://cdn-0.nflximg.com/images/2891/BadBoys.jpg",
"uri": "http://api.netflix.com/catalog/titles/movies/70111470",
"rating": [5.0],
"bookmark": [{ id: 432534, time: 65876586 }]
}, {
"id": 65432445,
"title": "The Chamber",
"boxart": "http://cdn-0.nflximg.com/images/2891/TheChamber.jpg",
"uri": "http://api.netflix.com/catalog/titles/movies/70111470",
"rating": [4.0],
"bookmark": []
}, {
"id": 675465,
"title": "Fracture",
"boxart": "http://cdn-0.nflximg.com/images/2891/Fracture.jpg",
"uri": "http://api.netflix.com/catalog/titles/movies/70111470",
"rating": [5.0],
"bookmark": [{ id: 432534, time: 65876586 }]
}];
newReleases = newReleases.filter((rel) => {
return rel.rating.includes(5.0);
})
.map((rel) => {
return rel.id;
});
console.log(JSON.stringify(newReleases));
})();
(function task1_5() {
"use strict";
let movieLists = [{
name: "<NAME>",
videos: [{
"id": 70111470,
"title": "Die Hard",
"boxarts": [{
width: 150,
height: 200,
url: "http://cdn-0.nflximg.com/images/2891/DieHard150.jpg"
}, {
width: 200,
height: 200,
url: "http://cdn-0.nflximg.com/images/2891/DieHard200.jpg"
}],
"url": "http://api.netflix.com/catalog/titles/movies/70111470",
"rating": 4.0,
"bookmark": []
}, {
"id": 654356453,
"title": "Bad Boys",
"boxarts": [{
width: 200,
height: 200,
url: "http://cdn-0.nflximg.com/images/2891/BadBoys200.jpg"
}, {
width: 150,
height: 200,
url: "http://cdn-0.nflximg.com/images/2891/BadBoys150.jpg"
}],
"url": "http://api.netflix.com/catalog/titles/movies/70111470",
"rating": 5.0,
"bookmark": [{ id: 432534, time: 65876586 }]
}]
}, {
name: "New Releases",
videos: [{
"id": 65432445,
"title": "The Chamber",
"boxarts": [{
width: 150,
height: 200,
url: "http://cdn-0.nflximg.com/images/2891/TheChamber150.jpg"
}, {
width: 200,
height: 200,
url: "http://cdn-0.nflximg.com/images/2891/TheChamber200.jpg"
}],
"url": "http://api.netflix.com/catalog/titles/movies/70111470",
"rating": 4.0,
"bookmark": []
}, {
"id": 675465,
"title": "Fracture",
"boxarts": [{
width: 200,
height: 200,
url: "http://cdn-0.nflximg.com/images/2891/Fracture200.jpg"
}, {
width: 150,
height: 200,
url: "http://cdn-0.nflximg.com/images/2891/Fracture150.jpg"
}, {
width: 300,
height: 200,
url: "http://cdn-0.nflximg.com/images/2891/Fracture300.jpg"
}],
"url": "http://api.netflix.com/catalog/titles/movies/70111470",
"rating": 5.0,
"bookmark": [{ id: 432534, time: 65876586 }]
}]
}];
movieLists = movieLists.map((x) => {
return x.videos;
});
let movie = movieLists[0].concat(movieLists[1]);
movie = movie.map((mov) => {
let boxart = mov.boxarts.filter((size) => {
return size.width == 150 && size.height == 200;
});
let obj = {
"id": mov.id,
"title": mov.title,
"boxart": boxart[0].url
}
return obj;
});
console.log(JSON.stringify(movie));
})();
(function task1_6() {
"use strict";
Array.prototype.reduce = function (combiner, initialValue) {
let res = initialValue;
if (res === undefined) { res = null; }
for (let i = 0; i < this.length; i++) {
res = combiner.call(null, res, this[i], i, this);
}
return res;
};
console.log([1, 2, 3].reduce((memo, item) => {
return memo + item;
}) === 6);
console.log([1, 2, 3].reduce((memo, item) => {
return memo + item;
}, 10) === 16);
})();
(function task1_7() {
"use strict";
let ratings = [2, 3, 1, 4, 5];
let max = ratings.reduce((p, c) => {
return p > c ? p : c
});
console.log(max);
})();
(function task1_8() {
"use strict";
let boxarts = [{
width: 200,
height: 200,
url: "http://cdn-0.nflximg.com/images/2891/Fracture200.jpg"
}, {
width: 150,
height: 200,
url: "http://cdn-0.nflximg.com/images/2891/Fracture150.jpg"
}, {
width: 300,
height: 200,
url: "http://cdn-0.nflximg.com/images/2891/Fracture300.jpg"
}, {
width: 425,
height: 150,
url: "http://cdn-0.nflximg.com/images/2891/Fracture425.jpg"
}];
let url_max_sq = boxarts.reduce((p, c) => {
let curr_sq = c.width * c.height;
let prev_sq;
if (prev_sq !== undefined){
prev_sq = p.width * p.height;
}
return prev_sq > curr_sq ? p : c;
}).url;
console.log(url_max_sq);
})();
(function task1_9() {
"use strict";
let videos = [{
"id": 65432445,
"title": "The Chamber"
}, {
"id": 675465,
"title": "Fracture"
}, {
"id": 70111470,
"title": "Die Hard"
}, {
"id": 654356453,
"title": "Bad Boys"
}];
videos = videos.reduce((p, c) => {
p[c.id] = c.title;
return p;
}, {});
console.log(JSON.stringify(videos));
})();
<file_sep>/task_javascript/task4/README.md
# ItechArt-Lab-2018
4_1) https://jsbin.com/qifavuk/edit?js,console
4_2) https://jsbin.com/rinasaq/edit?js,console
4_3) https://jsbin.com/wipahef/edit?js,console
4_4) https://jsbin.com/quyarup/edit?js,console<file_sep>/task_netcore/task4/Client/src/modules/Photos/containers/PhotosContainer.js
import React from 'react';
import Photos from '../views';
class PhotoContainer extends React.Component {
render() {
const { photos } = this.props;
return (
<Photos
photos={photos} />
);
}
}
export default PhotoContainer;<file_sep>/task_netcore/task3/task3/Data/MoviesContext.cs
using Microsoft.EntityFrameworkCore;
using task3.Data.Entities;
namespace task3.Data
{
public class MoviesContext : DbContext
{
public DbSet<Movie> Movies { get; set; }
public MoviesContext(DbContextOptions<MoviesContext> options)
: base(options)
{ }
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.Entity<Movie>().HasData(
new Movie {
Id = 1,
Name = "Suits",
Country = "USA",
Producer = "<NAME>",
Year = 2011
},
new Movie {
Id = 2,
Name = "Thor",
Country = "USA",
Producer = "<NAME>",
Year = 2011
}
);
}
}
}
<file_sep>/task_netcore/task4/Client/src/modules/Comments/views/CommentForm/index.js
import React from 'react';
import PropTypes from 'prop-types';
import styles from './styles';
import SendIcon from '@material-ui/icons/Send';
import { Typography, withStyles, Card, Button, TextField } from '@material-ui/core';
const CommentForm = ({ classes, message, onSubmit, onCommentChange }) => {
return (
<Card className={classes.commentFormContainer}>
<Typography className={classes.title}>
Tell your opinion about the film
</Typography>
<form onSubmit={onSubmit} className={classes.commentForm}>
<TextField
onChange={onCommentChange}
value={message}
required
id='comment'
label='Comment' />
<Button color='primary' variant='outlined' className={classes.button} type="submit">
<SendIcon className={classes.sendIcon} />
</Button>
</form>
</Card>
);
}
CommentForm.propTypes = {
classes: PropTypes.object.isRequired,
message: PropTypes.string,
onSubmit: PropTypes.func,
onCommentChange: PropTypes.func
};
export default withStyles(styles)(CommentForm);<file_sep>/task_reactredux/task1/src/reducers/FormReducer.js
import { HANDLE_SUBMIT } from '../actions/types';
const initialState = {
wasSubmited: false
};
const formReducer = (state = initialState, action) => {
switch (action.type) {
case HANDLE_SUBMIT:
return {
...state,
wasSubmited: true
}
default:
return state;
}
}
export default (formReducer);<file_sep>/task_netcore/task4/Client/src/modules/MoviesList/actions/index.js
import { webApiRoutes } from '../../../Constants';
import axios from 'axios';
import { REQUESTED_MOVIES, REQUESTED_MOVIES_SUCCEEDED, REQUESTED_MOVIES_FAILED } from './types';
export const requestMovies = () => {
return {
type: REQUESTED_MOVIES
}
};
export const requestMoviesSuccess = (movies) => {
return {
type: REQUESTED_MOVIES_SUCCEEDED,
movies
}
};
export const requestMoviesError = (errorMessage) => {
return {
type: REQUESTED_MOVIES_FAILED,
errorMessage
}
};
export const getMovies = (dispatch) => {
dispatch(requestMovies());
return axios.get(webApiRoutes.loadMoviesRoute)
.then(response => {
dispatch(requestMoviesSuccess(response.data));
})
.catch(errors => {
dispatch(requestMoviesError(errors.response.data));
})
};<file_sep>/task_reactredux/task1/src/containers/MenuContainer.js
import React from 'react';
import { withRouter } from 'react-router-dom';
import Menu from '../views/Menu';
import { applicationRoutes } from '../Constants';
const tabsActiveLinkStates = {
[applicationRoutes.aboutRoute]: {
value: 0,
showMenu: true
},
[applicationRoutes.countersRoute]: {
value: 1,
showMenu: true
},
[applicationRoutes.loginRoute]: {
value: 2,
showMenu: true
},
[applicationRoutes.loginReduxRoute]: {
value: 3,
showMenu: true
},
[applicationRoutes.loginReduxSuccessRoute]: {
value: 3,
showMenu: true
},
[applicationRoutes.loginReduxFormRoute]: {
value: 4,
showMenu: true
},
[applicationRoutes.loginReduxFormSuccessRoute]: {
value: 4,
showMenu: true
},
[applicationRoutes.startPageRoute]: {
value: false,
showMenu: true
},
[applicationRoutes.defaultRoute]: {
value: false,
showMenu: true
},
[applicationRoutes.errorRoute]: {
value: false,
showMenu: false
}
}
class MenuContainer extends React.Component {
constructor(props) {
super(props);
this.state = {
...(tabsActiveLinkStates[this.props.history.location.pathname] ||
tabsActiveLinkStates[applicationRoutes.errorRoute])
};
}
handleChange = (event, value) => {
this.setState({ value });
};
render() {
return (
<Menu
handleChange={this.handleChange}
value={this.state.value}
showMenu={this.state.showMenu} />
);
}
}
export default withRouter(MenuContainer);<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Models/RatingModel.cs
using System;
using System.Collections.Generic;
using System.Text;
namespace task4.BLL.Models
{
public class RatingModel
{
public int MovieId { get; set; }
public decimal Value { get; set; }
public int UserId { get; set; }
}
}
<file_sep>/task_netcore/task4/Client/src/modules/Rating/actions/types.js
export const CLEAR_ERROR_MESSAGE = 'CLEAR_ERROR_MESSAGE';
export const SHOW_ERROR_MESSAGE = 'SHOW_ERROR_MESSAGE';
export const LOAD_USER_RATING = 'LOAD_USER_RATING';
export const REQUESTED_AVERAGE_RATING_SUCCEEDED = 'REQUESTED_AVERAGE_RATING_SUCCEEDED';
export const REQUESTED_AVERAGE_RATING_FAILED = 'REQUESTED_AVERAGE_RATING_FAILED';
export const REQUESTED_AVERAGE_RATING = 'REQUESTED_AVERAGE_RATING';
export const REQUESTED_USER_RATING = 'REQUESTED_USER_RATING';
export const REQUESTED_USER_RATING_SUCCEEDED = 'REQUESTED_USER_RATING_SUCCEEDED';
export const REQUESTED_USER_RATING_FAILED = 'REQUESTED_USER_RATING_FAILED';
export const REQUESTED_SEND_RATING = 'REQUESTED_SEND_RATING';
export const REQUESTED_SEND_RATING_FAILED = 'REQUESTED_SEND_RATING_FAILED';
export const REQUESTED_SEND_RATING_SUCCEEDED = 'REQUESTED_SEND_RATING_SUCCEEDED';<file_sep>/task_netcore/task4/WebApi/task4/task4.WEB/Controllers/CommentController.cs
using System;
using AutoMapper;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using task4.BLL.Interfaces;
using task4.BLL.Models;
using task4.WEB.Common;
using task4.WEB.Models;
namespace task4.WEB.Controllers
{
[Route("[controller]/[action]")]
public class CommentController : Controller
{
private readonly ICommentService _commentService;
private readonly IMapper _mapper;
public CommentController(ICommentService commentService, IMapper mapper)
{
_commentService = commentService;
_mapper = mapper;
}
[HttpGet("{id}")]
public IActionResult GetComments(int id)
{
var comments = _commentService.GetCommentsByMovieId(id);
if (comments == null)
{
return NotFound();
}
return Ok(comments);
}
[Authorize]
[HttpPost]
public IActionResult AddComment([FromBody] CommentViewModel commentViewModel)
{
if (!ModelState.IsValid)
{
return BadRequest(ModelState);
}
var comment = _mapper.Map<CommentViewModel, CommentModel>(commentViewModel);
comment.UserId = Convert.ToInt32(HttpContext.GetUserIdByHttpContext());
comment.Date = DateTime.Now.ToString();
_commentService.AddComment(comment);
return Ok(comment);
}
}
}<file_sep>/task_netcore/task4/WebApi/task4/task4.DAL/Entities/User.cs
using Microsoft.AspNetCore.Identity;
using System.Collections.Generic;
namespace task4.DAL.Entities
{
public class User: IdentityUser<int>
{
public ICollection<Rating> Ratings { get; set; }
public ICollection<Comment> Comments { get; set; }
public class Role : IdentityRole<int>
{
public Role()
{
}
public Role(string name) { Name = name; }
}
public class UserRole : IdentityUserRole<int>
{
public virtual Role Role { get; set; }
public virtual User User { get; set; }
}
public class UserClaim : IdentityUserClaim<int> { }
public class UserLogin : IdentityUserLogin<int>
{
public int ExpiresIn { get; set; }
}
public class RoleClaim : IdentityRoleClaim<int> { }
public class UserToken : IdentityUserToken<int> { }
}
}<file_sep>/task_reactredux/task1/src/containers/LoginReduxFormContainer.js
import React from 'react';
import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import { saveMailFromForm, savePasswordFromForm, loginClearState, loginReduxFormSuccess } from '../actions/index';
import { validateMail, validatePassword } from '../Validation';
import LoginReduxForm from '../views/LoginReduxForm';
import { errorMessagesEnum } from '../Constants';
import { applicationRoutes } from '../Constants';
class LoginReduxFormContainer extends React.Component {
handleSubmit = (values) => {
this.props.saveMailFromForm(values.mail);
this.props.savePasswordFromForm(values.password);
this.props.loginClearState();
this.props.loginReduxFormSuccess();
this.props.history.push(
applicationRoutes.loginReduxFormSuccessRoute);
};
Validation = (values) => {
const errors = {};
if (!values.mail) {
errors.mail = errorMessagesEnum.FieldIsRequired;
} else if (!validateMail(values.mail)) {
errors.mail = errorMessagesEnum.EmailErrorMessage;
}
if (!values.password) {
errors.password = errorMessagesEnum.FieldIsRequired;
} else if (!validatePassword(values.password)) {
errors.password = errorMessagesEnum.PasswordErrorMessage;
}
return errors;
};
render() {
let { mail, password } = this.props.formState.values
? this.props.formState.values : '';
return (
<LoginReduxForm
onSubmit={this.handleSubmit}
mail={mail}
password={password}
validate={this.Validation} />
);
}
}
const mapDispatchToProps = (dispatch) => {
return {
saveMailFromForm: bindActionCreators(saveMailFromForm, dispatch),
savePasswordFromForm: bindActionCreators(savePasswordFromForm, dispatch),
loginClearState: bindActionCreators(loginClearState, dispatch),
loginReduxFormSuccess: bindActionCreators(loginReduxFormSuccess, dispatch)
}
};
const mapStateToProps = (state) => {
return {
formState: {...state.form.login},
}
}
export default connect(mapStateToProps, mapDispatchToProps)(LoginReduxFormContainer);<file_sep>/task_netcore/task4/Client/src/modules/Rating/views/styles.js
const styles = theme => ({
ratingInfo: {
fontSize: '18px',
color: '#696969',
textAlign: 'left',
'@media (max-width: 650px)': {
fontSize: '14px',
padding: '0 10px'
}
},
errorMessage: {
fontSize: '18px',
color: 'red',
textAlign: 'left',
'@media (max-width: 650px)': {
fontSize: '14px',
padding: '0 10px'
}
}
});
export default styles;<file_sep>/task_netcore/task4/WebApi/task4/task4.WEB/Controllers/MovieController.cs
using Microsoft.AspNetCore.Mvc;
using task4.BLL.Interfaces;
namespace task4.WEB.Controllers
{
[Route("[controller]/[action]")]
public class MovieController : Controller
{
private readonly IMovieService _movieService;
public MovieController(IMovieService movieService)
{
_movieService = movieService;
}
[HttpGet]
public IActionResult GetMovies()
{
return Ok(_movieService.GetMovies());
}
[HttpGet("{movieName}")]
public IActionResult GetMoviesByName(string movieName)
{
var movies = _movieService.GetMoviesByName(movieName);
if (movies.Count == 0)
{
return BadRequest("No one movie was found:(");
}
return Ok(movies);
}
[HttpGet("{id}")]
public IActionResult GetMovie(int id)
{
var movie = _movieService.GetMovieInfoById(id);
if (movie == null)
{
return NotFound();
}
return Ok(movie);
}
}
}<file_sep>/task_netcore/task2/task2/Controllers/DataController.cs
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using task2.Interfaces;
namespace task2.Controllers
{
[Route("api/[controller]")]
public class DataController : Controller
{
private readonly IDataService _dataService;
public DataController(IDataService dataService)
{
_dataService = dataService;
}
[HttpGet]
public ActionResult SyncAction()
{
return Ok(_dataService.GetInfo());
}
[HttpGet("async")]
public async Task<ActionResult> AsyncAction()
{
return Ok(await _dataService.GetInfoAsync());
}
}
}<file_sep>/task_reactredux/task1/src/views/PrivateRoute/index.js
import React from 'react';
import { Route, Redirect } from 'react-router-dom';
import { connect } from 'react-redux';
const PrivateRoute = ({ IsAuthorized, ...props }) => {
return IsAuthorized ? <Route {...props} /> : <Redirect to={props.redirect} />
}
const MapStateToProps = (state) => {
return {
IsAuthorized: state.authorizationReducer
}
};
export default connect(MapStateToProps)(PrivateRoute);<file_sep>/task_netcore/task4/Client/src/modules/MovieSearch/views/NotFound/index.js
import React from 'react';
import PropTypes from 'prop-types';
import styles from './styles'
import { Paper, Typography, withStyles } from '@material-ui/core';
const NotFound = ({ classes, errorMessage, haveMovieSearchErrors }) => {
return (
<Paper className={classes.notfound}>
{
!haveMovieSearchErrors ? <Typography variant="display1">
404 – страница не найдена
</Typography> : <Typography variant="display1">
{errorMessage}
</Typography>
}
</Paper>
);
}
NotFound.propTypes = {
classes: PropTypes.object.isRequired
};
export default withStyles(styles)(NotFound);<file_sep>/task_netcore/task4/WebApi/task4/task4.DAL/Entities/Movie.cs
using System.Collections.Generic;
namespace task4.DAL.Entities
{
public class Movie
{
public int Id { get; set; }
public string Name { get; set; }
public string Country { get; set; }
public int Year { get; set; }
public string Genre { get; set; }
public string PictureUrl { get; set; }
public decimal Rating { get; set; }
public string Producer { get; set; }
public string Description { get; set; }
public virtual ICollection<Comment> Comments { get; set; }
public virtual ICollection<Photo> Photos { get; set; }
public virtual ICollection<Rating> Ratings { get; set; }
}
}<file_sep>/task_netcore/task4/Client/src/modules/Comments/containers/CommentContentContainer.js
import React from 'react';
import CommentContent from '../views/CommentContent';
class CommentContentContainer extends React.Component {
initCommentComponent = i => {
return (
<CommentContent
userName={i.userName}
message={i.message}
date={i.date}
key={i + i.date} />
);
};
render() {
return (
<div>
{this.props.comments.map(this.initCommentComponent)}
</div>
)
}
}
export default CommentContentContainer;<file_sep>/task_netcore/task4/Client/src/modules/MoviesList/views/styles.js
const styles = theme => ({
card: {
margin: 20,
width: '500px',
height: '300px',
position: 'relative',
'@media (max-width: 550px)': {
width: '230px',
height: '400px'
}
},
picture: {
width: '40%',
height: '100%',
float: 'left',
marginRight: '20px',
'@media (max-width: 550px)': {
width: '100%',
height: '70%'
}
},
name: {
margin: '10px auto',
fontSize: '24px',
color: '#696969',
'@media (max-width: 550px)': {
fontSize: '18px'
}
},
button: {
margin: 'auto',
position: 'absolute',
bottom: '20px',
right: '100px',
'@media (max-width: 550px)': {
bottom: '10px',
right: '10px'
}
},
addInfo: {
fontSize: '18px',
color: '#696969',
textAlign: 'left',
'@media (max-width: 550px)': {
fontSize: '14px',
marginLeft: '10px'
}
},
link: {
textDecoration: 'none',
color: '#fff'
},
});
export default styles; <file_sep>/task_netcore/task4/WebApi/task4/task4.DAL/Repositories/Repository.cs
using Microsoft.EntityFrameworkCore;
using System.Linq;
using task4.DAL.EF;
using task4.DAL.Interfaces;
namespace task4.DAL.Repositories
{
public class Repository<T> : IRepository<T> where T : class
{
private DbSet<T> _dbSet;
public Repository(ApplicationContext context)
{
_dbSet = context.Set<T>();
}
public IQueryable<T> Entities => _dbSet;
public IQueryable<T> GetQueryableAll()
{
return _dbSet;
}
public T GetById(int id)
{
return _dbSet.Find(id);
}
public void Insert(T TEntity)
{
_dbSet.Add(TEntity);
}
public void Update(T TEntity)
{
_dbSet.Update(TEntity);
}
public void Delete(T TEntity)
{
_dbSet.Remove(TEntity);
}
}
}
<file_sep>/task_netcore/task4/Client/src/modules/AccountForm/containers/AccountFormContainer.js
import React from 'react';
import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import { clearErrorMessage, login, register } from '../actions';
import { validateUserName, validatePassword } from '../../../Validation';
import AccountForm from '../views';
import { withRouter } from 'react-router-dom';
import { applicationRoutes, errorMessagesEnum } from '../../../Constants';
class AccountFormContainer extends React.Component {
componentDidMount() {
this.props.clearErrorMessage();
}
handleSubmit = (values) => {
const user = {
userName: values.userName,
passWord: values.password
};
if (this.getCurrentPath() === applicationRoutes.registerFormRoute) {
register(this.props.dispatch, user, this.props.history);
}
if (this.getCurrentPath() === applicationRoutes.loginFormRoute) {
login(this.props.dispatch, user, this.props.history);
}
};
getCurrentPath = () => {
return this.props.history.location.pathname;
}
validate = (values) => {
const errors = {};
if (!values.userName) {
errors.userName = errorMessagesEnum.FieldIsRequired;
} else if (!validateUserName(values.userName)) {
errors.userName = errorMessagesEnum.UserNameErrorMessage;
}
if (!values.password) {
errors.password = errorMessagesEnum.FieldIsRequired;
} else if (!validatePassword(values.password)) {
errors.password = errorMessagesEnum.PasswordErrorMessage;
}
return errors;
};
render() {
const { haveAccountErrors, errorMessage } = this.props;
return (
<AccountForm
haveAccountErrors={haveAccountErrors}
errorMessage={errorMessage}
currentPath={this.getCurrentPath()}
onSubmit={this.handleSubmit}
validate={this.validate} />
);
}
}
const mapDispatchToProps = (dispatch) => {
return {
clearErrorMessage: bindActionCreators(clearErrorMessage, dispatch),
dispatch
}
};
const mapStateToProps = (state) => {
return {
formState: { ...state.form.register },
...state.isAuth,
...state.account
}
}
export default withRouter(connect(mapStateToProps, mapDispatchToProps)(AccountFormContainer));<file_sep>/task_netcore/task4/Client/src/modules/MoviesList/actions/types.js
export const REQUESTED_MOVIES = 'REQUESTED_MOVIES';
export const REQUESTED_MOVIES_SUCCEEDED = 'REQUESTED_MOVIES_SUCCEEDED';
export const REQUESTED_MOVIES_FAILED = 'REQUESTED_MOVIES_FAILED';<file_sep>/task_netcore/task4/WebApi/task4/task4.WEB/Controllers/AccountController.cs
using System.Threading.Tasks;
using AutoMapper;
using Microsoft.AspNetCore.Mvc;
using task4.BLL.Interfaces;
using task4.BLL.Models;
using task4.WEB.Models;
namespace task4.WEB.Controllers
{
[Route("[controller]/[action]")]
public class AccountController : Controller
{
private readonly IAccountService _accountService;
private readonly IMapper _mapper;
public AccountController(IAccountService accountService, IMapper mapper)
{
_accountService = accountService;
_mapper = mapper;
}
[HttpPost]
public async Task<ActionResult> Register([FromBody] AccountViewModel accountViewModel)
{
if (!ModelState.IsValid)
{
return BadRequest(ModelState);
}
var accountModel = _mapper.Map<AccountViewModel, AccountModel>(accountViewModel);
var resultModel = await _accountService.Register(accountModel);
if (resultModel.Errors != null)
{
return BadRequest(resultModel.Errors);
}
return Ok(new { resultModel.Token, accountViewModel.UserName });
}
[HttpPost]
public async Task<ActionResult> Login([FromBody] AccountViewModel accountViewModel)
{
if (!ModelState.IsValid)
{
return BadRequest(ModelState);
}
var accountModel = _mapper.Map<AccountViewModel, AccountModel>(accountViewModel);
var resultModel = await _accountService.Login(accountModel);
if (resultModel.Errors != null)
{
return BadRequest(resultModel.Errors);
}
return Ok(new { resultModel.Token, accountViewModel.UserName });
}
}
}<file_sep>/task_reactredux/task1/src/actions/types.js
export const ON_MAIL_CHANGE = 'ON_MAIL_CHANGE';
export const ON_PASSWORD_CHANGE = 'ON_PASSWORD_CHANGE'
export const HANDLE_SUBMIT = 'HANDLE_SUBMIT';
export const SAVE_MAIL_FROM_FORM = 'SAVE_MAIL_FROM_FORM';
export const SAVE_PASSWORD_FROM_FORM = 'SAVE_PASSWORD_FROM_FORM';
export const LOGIN_CLEAR_STATE = 'LOGIN_CLEAR_STATE';
export const LOGIN_REDUX_FORM_SUCCESS = 'LOGIN_REDUX_FORM_SUCCESS';<file_sep>/task_netcore/task4/Client/src/modules/Comments/containers/CommentFormContainer.js
import React from 'react';
import CommentForm from '../views/CommentForm';
import { bindActionCreators } from 'redux';
import { withRouter } from 'react-router-dom';
import { connect } from 'react-redux';
import { onCommentChange, loadComments, sendComment } from '../actions'
import { applicationRoutes } from '../../../Constants';
import CommentContentContainer from '../../Comments/containers/CommentContentContainer';
class CommentFormContainer extends React.Component {
state = {
id: this.props.id
};
onCommentChange = (event) => {
this.props.onCommentChange(event.target.value);
};
componentDidMount() {
loadComments(this.props.dispatch, this.state.id);
}
onSubmit = (event) => {
event.preventDefault();
const { isAuth, message } = this.props;
const comment = {
message: message,
movieid: this.state.id
}
if (!isAuth) {
this.props.history.push(applicationRoutes.loginFormRoute);
}
else {
sendComment(this.props.dispatch, comment, this.state.id);
}
}
render() {
const { message, comments } = this.props;
return (
<div>
<CommentForm
onSubmit={this.onSubmit}
onCommentChange={this.onCommentChange}
message={message} />
<CommentContentContainer
comments={comments} />
</div>
)
}
}
const mapDispatchToProps = (dispatch) => {
return {
onCommentChange: bindActionCreators(onCommentChange, dispatch),
dispatch
}
};
const mapStateToProps = (state) => {
return {
...state.isAuth,
...state.commentForm,
...state.comments
}
}
export default withRouter(connect(mapStateToProps, mapDispatchToProps)(CommentFormContainer));<file_sep>/task_netcore/task4/WebApi/task4/task4.WEB/Profiles/MapperProfile.cs
using AutoMapper;
using task4.BLL.Models;
using task4.WEB.Models;
namespace task4.WEB.Profiles
{
public class MapperProfile: Profile
{
public MapperProfile()
{
CreateMap<CommentViewModel, CommentModel>();
CreateMap<AccountViewModel, AccountModel>();
CreateMap<RatingViewModel, RatingModel>();
}
}
}
<file_sep>/task_netcore/task3/task3/Models/MovieModel.cs
using System.ComponentModel.DataAnnotations;
namespace task3.Models
{
public class MovieModel
{
public int Id { get; set; }
[Required]
public string Name { get; set; }
[Required]
public int Year { get; set; }
[Required]
public string Country { get; set; }
[Required]
public string Producer { get; set; }
}
}
<file_sep>/task_reactredux/task1/src/Validation.js
import { MIN_PASSWORD_LENGTH, REG_EXPR } from './Constants';
export const validateMail = (email) => {
return REG_EXPR.test(String(email).toLowerCase());
}
export const validatePassword = (password) => {
return password.length > MIN_PASSWORD_LENGTH;
}<file_sep>/task_netcore/task4/Client/src/modules/MovieSearch/views/MovieSearchForm/styles.js
const styles = theme => ({
movieSearchFormContainer: {
maxWidth: '1000px',
margin: '20px auto',
'@media (max-width: 1100px)': {
width: '600px'
},
'@media (max-width: 620px)': {
width: '300px'
}
},
button: {
width: '35px',
height: '25px',
marginLeft: '5px'
},
sendIcon: {
fontSize: '18px'
},
movieSearchForm: {
margin: '0 auto'
},
title: {
fontSize: '28px',
color: '#696969',
marginBottom: '10px',
'@media (max-width: 620px)': {
fontSize: '18px'
}
}
});
export default styles;<file_sep>/task_netcore/task3/task3/Attributes/ExceptionLoggerAttribute.cs
using Microsoft.AspNetCore.Mvc.Filters;
using task3.Interfaces;
namespace task3.Attributes
{
public class ExceptionLoggerAttribute: ExceptionFilterAttribute
{
private readonly IActionLogger _logger;
public ExceptionLoggerAttribute(IActionLogger logger)
{
_logger = logger;
}
public override void OnException(ExceptionContext context)
{
_logger.Log(context);
context.ExceptionHandled = true;
}
}
}<file_sep>/task_netcore/task4/Client/src/modules/Rating/reducers/UserRatingReducer.js
import { REQUESTED_USER_RATING, REQUESTED_USER_RATING_FAILED, REQUESTED_USER_RATING_SUCCEEDED } from '../actions/types';
const InitialState = {
rating: {
alreadyRated: false,
value: 0
},
error: false
}
const userRatingReducer = (state = InitialState, action) => {
switch (action.type) {
case REQUESTED_USER_RATING:
return {
...InitialState
};
case REQUESTED_USER_RATING_SUCCEEDED:
return {
rating: action.rating,
error: false,
};
case REQUESTED_USER_RATING_FAILED:
return {
rating: {
alreadyRated: false,
value: 0
},
error: true,
};
default:
return state;
}
}
export default (userRatingReducer);<file_sep>/task_javascript/task3/README.md
# ItechArt-Lab-2018
3) https://jsbin.com/quxiday/edit?html,js,console,output<file_sep>/task_reactredux/task1/src/views/About/index.js
import React from 'react';
import { Typography, withStyles } from '@material-ui/core';
import styles from './styles';
import PropTypes from 'prop-types';
const About = ({classes}) => {
return (<Typography variant="display1" className= {classes.about}>
iTechArt Group – это команда Remarkable People, профессионалов в
сфере разработки, тестирования, сервисной поддержки программных продуктов,
модернизации и интеграции бизнес-приложений.
Мы страстно любим дело, которым занимаемся, и
стремимся к совершенству в решении любых задач.
</Typography>);
}
About.propTypes = {
classes: PropTypes.object.isRequired,
};
export default withStyles(styles)(About);<file_sep>/task_reactredux/task1/src/actions/index.js
import { ON_MAIL_CHANGE, ON_PASSWORD_CHANGE, HANDLE_SUBMIT, SAVE_MAIL_FROM_FORM, SAVE_PASSWORD_FROM_FORM, LOGIN_CLEAR_STATE, LOGIN_REDUX_FORM_SUCCESS } from './types';
export const onMailChange = (mail) => {
return {
type: ON_MAIL_CHANGE,
mail
}
};
export const onPasswordChange = (password) => {
return {
type: ON_PASSWORD_CHANGE,
password
}
};
export const handleSubmit = (wasSubmited) => {
return {
type: HANDLE_SUBMIT,
wasSubmited
}
};
export const saveMailFromForm = (mail) => {
return {
type: SAVE_MAIL_FROM_FORM,
mail
}
};
export const savePasswordFromForm = (password) => {
return {
type: SAVE_PASSWORD_FROM_FORM,
password
}
};
export const loginClearState = () => {
return {
type: LOGIN_CLEAR_STATE,
}
}
export const loginReduxFormSuccess = () => {
return {
type: LOGIN_REDUX_FORM_SUCCESS,
}
}<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Models/MovieInfoModel.cs
using System.Collections.Generic;
namespace task4.BLL.Models
{
public class MovieInfoModel: MovieModel
{
public string Description { get; set; }
public string Producer { get; set; }
public ICollection<PhotoModel> Photos { get; set; }
}
}
<file_sep>/task_netcore/task4/Client/src/modules/Comments/actions/index.js
import { webApiRoutes } from '../../../Constants';
import axios from 'axios';
import {
REQUESTED_SEND_COMMENT, REQUESTED_SEND_COMMENT_SUCCEEDED, REQUESTED_SEND_COMMENT_FAILED, CLEAR_COMMENT_FIELD, ON_COMMENT_CHANGE,
REQUESTED_COMMENTS, REQUESTED_COMMENTS_FAILED, REQUESTED_COMMENTS_SUCCEEDED
} from './types';
import { SessionService } from '../../../Services/SessionService';
export const clearCommentField = () => {
return {
type: CLEAR_COMMENT_FIELD
}
}
export const onCommentChange = (message) => {
return {
type: ON_COMMENT_CHANGE,
message
}
}
export const requestSendComment = () => {
return {
type: REQUESTED_SEND_COMMENT
}
};
export const requestSendCommentSuccess = () => {
return {
type: REQUESTED_SEND_COMMENT_SUCCEEDED
}
};
export const requestSendCommentError = (errorMessage) => {
return {
type: REQUESTED_SEND_COMMENT_FAILED,
errorMessage
}
};
export const sendComment = (dispatch, comment, id) => {
dispatch(requestSendComment());
return axios.post(webApiRoutes.addCommentRoute, comment, {
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + SessionService.getJsonItem('account').token
}
})
.then(response => {
dispatch(requestSendCommentSuccess());
dispatch(clearCommentField());
loadComments(dispatch, id);
})
.catch(errors => {
dispatch(requestSendCommentError(errors.data));
})
};
export const requestComments = () => {
return {
type: REQUESTED_COMMENTS
}
};
export const requestCommentsSuccess = (comments) => {
return {
type: REQUESTED_COMMENTS_SUCCEEDED,
comments
}
};
export const requestCommentsError = (errorMessage) => {
return {
type: REQUESTED_COMMENTS_FAILED,
errorMessage
}
};
export const loadComments = (dispatch, id) => {
dispatch(requestComments());
return axios.get(webApiRoutes.loadCommentsRoute + id)
.then(response => {
dispatch(requestCommentsSuccess(response.data));
})
.catch(errors => {
dispatch(requestCommentsError(errors.data));
})
};<file_sep>/task_reactredux/task1/src/containers/LoginContainer.js
import React from 'react';
import Login from '../views/Login/index';
import { validateMail, validatePassword } from '../Validation';
const formDefaultValues = Object.freeze({
mail: '',
password: '',
mailValid: false,
passwordValid: false,
wasSubmited: false
});
class LoginContainer extends React.Component {
constructor(props) {
super(props);
this.state = formDefaultValues;
}
onMailChange = (e) => {
let value = e.target.value;
this.setState({
mail: value,
mailValid: validateMail(value)
});
}
onPasswordChange = (e) => {
let value = e.target.value;
this.setState({
password: value,
passwordValid: validatePassword(value)
});
}
handleSubmit = (e) => {
e.preventDefault();
let { mail, password, mailValid, passwordValid } = this.state;
this.setState({
wasSubmited: true
});
if (mailValid && passwordValid) {
alert(JSON.stringify({ mail: mail, password: password }));
this.setState(formDefaultValues);
}
}
render() {
return (
<Login
handleSubmit={this.handleSubmit}
onMailChange={this.onMailChange}
onPasswordChange={this.onPasswordChange}
wasSubmited={this.state.wasSubmited}
mail={this.state.mail}
password={this.state.password}
mailValid={this.state.mailValid}
passwordValid={this.state.passwordValid} />
);
}
}
export default LoginContainer;<file_sep>/task_netcore/task4/WebApi/task4/task4.DAL/Interfaces/IRepository.cs
using System.Linq;
namespace task4.DAL.Interfaces
{
public interface IRepository<T> where T: class
{
IQueryable<T> GetQueryableAll();
T GetById(int id);
void Insert(T TEntity);
void Update(T TEntity);
void Delete(T TEntity);
}
}
<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Models/CommentModel.cs
namespace task4.BLL.Models
{
public class CommentModel
{
public string Message { get; set; }
public string Date { get; set; }
public int MovieId { get; set; }
public int UserId { get; set; }
public string UserName { get; set; }
}
}<file_sep>/task_netcore/task4/WebApi/task4/task4.DAL/Entities/Rating.cs
namespace task4.DAL.Entities
{
public class Rating
{
public int Id { get; set; }
public decimal Value { get; set; }
public virtual Movie Movie { get; set; }
public virtual User User { get; set; }
}
}<file_sep>/task_reactredux/task1/src/reducers/PasswordReducer.js
import { validatePassword } from '../Validation';
import { ON_PASSWORD_CHANGE, SAVE_PASSWORD_FROM_FORM } from '../actions/types';
const initialState = {
password: '',
passwordValid: false
};
const passwordReducer = (state = initialState, action) => {
switch (action.type) {
case ON_PASSWORD_CHANGE:
return {
...state,
password: action.password,
passwordValid: validatePassword(action.password)
}
case SAVE_PASSWORD_FROM_FORM:
return {
...state,
password: action.<PASSWORD>,
passwordValid: true
}
default:
return state;
}
}
export default (passwordReducer);<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Services/PhotoService.cs
using AutoMapper;
using System.Collections.Generic;
using System.Linq;
using task4.BLL.Interfaces;
using task4.BLL.Models;
using task4.DAL.Entities;
using task4.DAL.Interfaces;
namespace task4.BLL.Services
{
public class PhotoService: IPhotoService
{
private readonly IUnitOfWork _unitOfWork;
private readonly IMapper _mapper;
public PhotoService(IUnitOfWork uow, IMapper mapper)
{
_unitOfWork = uow;
_mapper = mapper;
}
public IList<PhotoModel> GetPhotosByMovie(int movieId)
{
var photos = _unitOfWork.PhotoRepository.GetQueryableAll().Where(p => p.Movie.Id == movieId).ToList();
var photosModel = _mapper.Map<IList<Photo>, IList<PhotoModel>>(photos);
return photosModel;
}
}
}
<file_sep>/task_netcore/task4/WebApi/task4/task4.DAL/Repositories/UnitOfWork.cs
using Microsoft.AspNetCore.Identity;
using System;
using task4.DAL.EF;
using task4.DAL.Entities;
using task4.DAL.Interfaces;
namespace task4.DAL.Repositories
{
public class UnitOfWork : IUnitOfWork
{
private readonly ApplicationContext _context;
private IRepository<Movie> _movieRepository;
private IRepository<User> _userRepository;
private IRepository<Photo> _photoRepository;
private IRepository<Comment> _commentRepository;
private IRepository<Rating> _ratingRepository;
public UnitOfWork (ApplicationContext context,
UserManager<User> userManager,
SignInManager<User> signInManager)
{
_context = context;
UserManager = userManager;
SignInManager = signInManager;
}
public UserManager<User> UserManager { get; }
public SignInManager<User> SignInManager { get; }
public IRepository<Movie> MovieRepository =>
_movieRepository ?? (_movieRepository = new Repository<Movie>(_context));
public IRepository<User> UserRepository =>
_userRepository ?? (_userRepository = new Repository<User>(_context));
public IRepository<Photo> PhotoRepository =>
_photoRepository ?? (_photoRepository = new Repository<Photo>(_context));
public IRepository<Comment> CommentRepository =>
_commentRepository ?? (_commentRepository = new Repository<Comment>(_context));
public IRepository<Rating> RatingRepository =>
_ratingRepository ?? (_ratingRepository = new Repository<Rating>(_context));
public void Commit()
{
_context.SaveChanges();
}
private bool _disposed = false;
public virtual void Dispose(bool disposing)
{
if (!_disposed)
{
if (disposing)
{
_context.Dispose();
}
_disposed = true;
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
}
}
<file_sep>/task_netcore/task3/task3/Services/DataService.cs
using Microsoft.AspNetCore.Mvc;
using System.Collections.Generic;
using System.Linq;
using task3.Data;
using task3.Data.Entities;
using task3.Interfaces;
namespace task3.Services
{
public class DataService: IDataService
{
private readonly MoviesContext _context;
public DataService(MoviesContext context)
{
_context = context;
}
public IQueryable<Movie> GetData()
{
IQueryable<Movie> movies = _context.Movies;
return movies;
}
public Movie GetDataById(int id)
{
var movie = _context.Movies.FirstOrDefault(x => x.Id == id);
return movie;
}
public Movie AddData(Movie movie)
{
_context.Movies.Add(movie);
_context.SaveChanges();
return movie;
}
public Movie UpdateData(Movie movie)
{
_context.Update(movie);
_context.SaveChanges();
return movie;
}
public Movie DeleteData(int id)
{
var movie = _context.Movies.FirstOrDefault(x => x.Id == id);
if(movie == null)
{
return null;
}
_context.Movies.Remove(movie);
_context.SaveChanges();
return movie;
}
}
}
<file_sep>/task_netcore/task4/Client/src/modules/Rating/reducers/AverageRatingReducer.js
import { REQUESTED_AVERAGE_RATING, REQUESTED_AVERAGE_RATING_FAILED, REQUESTED_AVERAGE_RATING_SUCCEEDED } from '../actions/types';
const InitialState = {
averageRating: 0,
isLoading: false,
error: false
}
const averageRatingReducer = (state = InitialState, action) => {
switch (action.type) {
case REQUESTED_AVERAGE_RATING:
return {
averageRating: 0,
isLoading: true,
error: false
};
case REQUESTED_AVERAGE_RATING_SUCCEEDED:
return {
averageRating: action.averageRating,
isLoading: false,
error: false,
};
case REQUESTED_AVERAGE_RATING_FAILED:
return {
averageRating: 0,
isLoading: false,
error: true,
};
default:
return state;
}
}
export default (averageRatingReducer);<file_sep>/task_netcore/task4/Client/src/modules/MoviesList/reducers/MovieReducer.js
import { REQUESTED_MOVIES, REQUESTED_MOVIES_SUCCEEDED, REQUESTED_MOVIES_FAILED } from '../actions/types';
const InitialState = {
movies: [],
isLoading: false,
error: false
}
const movieReducer = (state = InitialState, action) => {
switch (action.type) {
case REQUESTED_MOVIES:
return {
movies: [],
isLoading: true,
error: false,
};
case REQUESTED_MOVIES_SUCCEEDED:
return {
movies: action.movies,
isLoading: false,
error: false,
};
case REQUESTED_MOVIES_FAILED:
return {
movies: [],
isLoading: false,
error: true,
};
default:
return state;
}
}
export default (movieReducer);<file_sep>/task_reactredux/task1/src/reducers/MailReducer.js
import { validateMail } from '../Validation';
import { ON_MAIL_CHANGE, SAVE_MAIL_FROM_FORM } from '../actions/types';
const initialState = {
mail: '',
mailValid: false
};
const mailReducer = (state = initialState, action) => {
switch (action.type) {
case ON_MAIL_CHANGE:
return {
...state,
mail: action.mail,
mailValid: validateMail(action.mail)
}
case SAVE_MAIL_FROM_FORM:
return {
...state,
mail: action.mail,
mailValid: true
}
default:
return state;
}
}
export default (mailReducer);<file_sep>/task_reactredux/task1/src/views/Router/index.js
import React from 'react'
import { Switch, Route, Redirect } from 'react-router-dom'
import About from '../About/index';
import CountersParentContainer from '../../containers/CountersParentContainer';
import NotFound from '../NotFound';
import LoginContainer from '../../containers/LoginContainer';
import LoginReduxContainer from '../../containers/LoginReduxContainer';
import LoginReduxFormContainer from '../../containers/LoginReduxFormContainer';
import SuccessContainer from '../../containers/SuccessContainer';
import PrivateRoute from '../PrivateRoute';
import { applicationRoutes } from '../../Constants';
const Main = () => (
<main>
<Switch>
<Route exact path={applicationRoutes.defaultRoute} render={() => <Redirect to={applicationRoutes.startPageRoute} />} />
<Route exact path={applicationRoutes.startPageRoute} component={null} />
<Route path={applicationRoutes.aboutRoute} component={About} />
<Route path={applicationRoutes.countersRoute} component={CountersParentContainer} />
<Route path={applicationRoutes.loginRoute} component={LoginContainer} />
<Route path={applicationRoutes.loginReduxSuccessRoute} component={SuccessContainer} />
<Route path={applicationRoutes.loginReduxRoute} component={LoginReduxContainer} />
<PrivateRoute path={applicationRoutes.loginReduxFormSuccessRoute} redirect={applicationRoutes.loginReduxFormRoute} component={SuccessContainer} />
<Route path={applicationRoutes.loginReduxFormRoute} component={LoginReduxFormContainer} />
<Route path={applicationRoutes.notFoundRoute} component={NotFound} />
<Route path={applicationRoutes.errorRoute} >
<Redirect to={applicationRoutes.notFoundRoute} />
</Route>
</Switch>
</main>
);
export default (Main);<file_sep>/task_netcore/task4/Client/src/modules/MovieSearch/containers/MovieSearchResultContainer.js
import React from 'react';
import Movie from '../../MoviesList/views';
import { connect } from 'react-redux';
import NotFound from '../views/NotFound';
import { clearErrorMessage } from '../actions';
import { bindActionCreators } from 'redux';
class MovieSearchResultContainer extends React.Component {
componentDidMount() {
this.props.clearErrorMessage();
}
initMovieComponent = i => {
return (
<Movie
name={i.name}
picture={i.pictureUrl}
id={i.id}
country={i.country}
rating={i.rating}
genre={i.genre}
year={i.year}
description={i.description}
key={i + i.id} />
);
};
render() {
const { haveMovieSearchErrors, errorMessage } = this.props
return (
<div>
{
!haveMovieSearchErrors ? <div className='movieContainer'>
{this.props.movies.map(this.initMovieComponent)}
</div> : <NotFound
haveMovieSearchErrors={haveMovieSearchErrors}
errorMessage={errorMessage} />
}
</div>
)
}
}
const mapStateToProps = (state) => {
return {
...state.movieSearch
}
}
const mapDispatchToProps = (dispatch) => {
return {
clearErrorMessage: bindActionCreators(clearErrorMessage, dispatch)
}
};
export default connect(mapStateToProps, mapDispatchToProps)(MovieSearchResultContainer);<file_sep>/README.md
# ItechArt-Lab-2018<file_sep>/task_reactredux/task1/src/reducers/index.js
import { combineReducers } from 'redux';
import formReducer from './FormReducer';
import passwordReducer from './PasswordReducer';
import mailReducer from './MailReducer';
import authorizationReducer from './AuthorizationReducer';
import { reducer as reduxFormReducer } from 'redux-form';
import { LOGIN_CLEAR_STATE } from '../actions/types';
const reducer = combineReducers({
formReducer: formReducer, password: passwordReducer, mail: mailReducer, authorizationReducer: authorizationReducer, form: reduxFormReducer.plugin ({
login: (state, action) => {
switch(action.type) {
case LOGIN_CLEAR_STATE:
return undefined;
default:
return state;
}
}
})
});
export default reducer;<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Models/AccountResultModel.cs
using System;
using System.Collections.Generic;
using System.Text;
namespace task4.BLL.Models
{
public class AccountResultModel
{
public string Token { get; set; }
public List<string> Errors { get; set; }
}
}
<file_sep>/task_netcore/task4/WebApi/task4/task4.DAL/Interfaces/IUnitOfWork.cs
using Microsoft.AspNetCore.Identity;
using System;
using task4.DAL.Entities;
namespace task4.DAL.Interfaces
{
public interface IUnitOfWork: IDisposable
{
UserManager<User> UserManager { get; }
SignInManager<User> SignInManager { get; }
IRepository<User> UserRepository { get; }
IRepository<Movie> MovieRepository { get; }
IRepository<Photo> PhotoRepository { get; }
IRepository<Comment> CommentRepository { get; }
IRepository<Rating> RatingRepository { get; }
void Commit();
}
}
<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Services/AccountService.cs
using Microsoft.Extensions.Configuration;
using Microsoft.IdentityModel.Tokens;
using System;
using System.Collections.Generic;
using System.IdentityModel.Tokens.Jwt;
using System.Linq;
using System.Security.Claims;
using System.Text;
using System.Threading.Tasks;
using task4.BLL.Interfaces;
using task4.BLL.Models;
using task4.DAL.Entities;
using task4.DAL.Interfaces;
namespace task4.BLL.Services
{
public class AccountService : IAccountService
{
private readonly IUnitOfWork _unitOfWork;
private readonly IConfiguration _configuration;
public AccountService(IUnitOfWork uow, IConfiguration configuration)
{
_unitOfWork = uow;
_configuration = configuration;
}
public async Task<AccountResultModel> Register(AccountModel accountModel)
{
var user = new User
{
UserName = accountModel.UserName
};
var result = await _unitOfWork.UserManager.CreateAsync(user, accountModel.Password);
var accountResultModel = new AccountResultModel();
if (result.Succeeded)
{
await _unitOfWork.SignInManager.SignInAsync(user, false);
accountResultModel.Token = GenerateJwtToken(accountModel.UserName, user);
return accountResultModel;
}
accountResultModel.Errors = new List<string>();
foreach (var error in result.Errors)
{
accountResultModel.Errors.Add(error.Description);
}
return accountResultModel;
}
public async Task<AccountResultModel> Login(AccountModel accountModel)
{
var result = await _unitOfWork.SignInManager.PasswordSignInAsync(accountModel.UserName, accountModel.Password, false, false);
var accountResultModel = new AccountResultModel();
if (result.Succeeded)
{
var appUser = _unitOfWork.UserManager.Users.SingleOrDefault(r => r.UserName == accountModel.UserName);
accountResultModel.Token = GenerateJwtToken(accountModel.UserName, appUser);
return accountResultModel;
}
accountResultModel.Errors = new List<string>();
accountResultModel.Errors.Add("Incorrect login or password");
return accountResultModel;
}
private string GenerateJwtToken(string userName, User user)
{
var claims = new List<Claim>
{
new Claim(JwtRegisteredClaimNames.Sub, userName),
new Claim(JwtRegisteredClaimNames.Jti, Guid.NewGuid().ToString()),
new Claim(ClaimTypes.NameIdentifier, user.Id.ToString())
};
var key = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(_configuration["JwtKey"]));
var creds = new SigningCredentials(key, SecurityAlgorithms.HmacSha256);
var expires = DateTime.Now.AddDays(Convert.ToDouble(_configuration["JwtExpireDays"]));
var token = new JwtSecurityToken(
_configuration["JwtIssuer"],
_configuration["JwtIssuer"],
claims,
expires: expires,
signingCredentials: creds
);
return new JwtSecurityTokenHandler().WriteToken(token);
}
}
}<file_sep>/task_reactredux/task1/src/views/Success/index.js
import React from 'react';
import PropTypes from 'prop-types';
import { Paper, Typography } from '@material-ui/core';
const Success = ({ mail, password }) => {
return (
<div>
<Paper>
<Typography variant='display1'>
{mail}
</Typography>
<Typography variant='display1'>
{password}
</Typography>
</Paper>
</div>
);
}
Success.propTypes = {
mail: PropTypes.string,
password: PropTypes.string
};
export default (Success);<file_sep>/task_netcore/task3/task3/Interfaces/IDataService.cs
using Microsoft.AspNetCore.Mvc;
using System.Collections.Generic;
using System.Linq;
using task3.Data.Entities;
namespace task3.Interfaces
{
public interface IDataService
{
IQueryable<Movie> GetData();
Movie GetDataById(int id);
Movie AddData(Movie movie);
Movie UpdateData(Movie movie);
Movie DeleteData(int id);
}
}
<file_sep>/task_netcore/task4/WebApi/task4/task4.WEB/Controllers/RatingController.cs
using AutoMapper;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using task4.BLL.Interfaces;
using task4.BLL.Models;
using task4.WEB.Models;
using task4.WEB.Common;
using System;
namespace task4.WEB.Controllers
{
[Route("[controller]/[action]")]
public class RatingController : Controller
{
private readonly IRatingService _ratingService;
private readonly IMapper _mapper;
public RatingController(IRatingService ratingService, IMapper mapper)
{
_ratingService = ratingService;
_mapper = mapper;
}
[Authorize]
[HttpPost]
public IActionResult AddRating([FromBody] RatingViewModel ratingViewModel)
{
var rating = _mapper.Map<RatingViewModel, RatingModel>(ratingViewModel);
rating.UserId = Convert.ToInt32(HttpContext.GetUserIdByHttpContext());
var ratingResult = _ratingService.AddRating(rating);
if (ratingResult.Errors != null)
{
return BadRequest(ratingResult.Errors);
}
return Ok();
}
[Authorize]
[HttpGet("{id}")]
public IActionResult GetUserRating(int id)
{
var userId = Convert.ToInt32(HttpContext.GetUserIdByHttpContext());
var responceRaitingModel = _ratingService.GetUserRating(userId, id);
return Ok(responceRaitingModel);
}
[HttpGet("{id}")]
public IActionResult GetAverageRating(int id)
{
return Ok(_ratingService.GetAverageRating(id));
}
}
}<file_sep>/task_netcore/task4/Client/src/modules/Header/containers/HeaderContainer.js
import React from 'react';
import { withRouter } from 'react-router-dom';
import Header from '../views';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
import { handleHeaderClose, handleHeaderMenu, logOut } from '../actions';
import { SessionService } from '../../../Services/SessionService';
class HeaderContainer extends React.Component {
getCurrentPath = () => {
return this.props.history.location.pathname;
}
handleHeaderMenu = (event) => {
let value = event.currentTarget;
this.props.handleHeaderMenu(value);
};
handleHeaderClose = () => {
this.props.handleHeaderClose();
};
logOut = () => {
SessionService.removeItem('account');
this.props.logOut();
}
render() {
const { anchorEl, isAuth } = this.props;
const open = Boolean(anchorEl);
let userName = '';
const acсount = SessionService.getJsonItem('account');
if (acсount) {
userName = acсount.userName;
}
return (
<Header
handleMenu={this.handleHeaderMenu}
handleClose={this.handleHeaderClose}
logOut={this.logOut}
anchorEl={anchorEl}
open={open}
userName={userName}
isAuth={isAuth}
path={this.getCurrentPath()} />
);
}
}
const mapDispatchToProps = (dispatch) => {
return {
handleHeaderClose: bindActionCreators(handleHeaderClose, dispatch),
handleHeaderMenu: bindActionCreators(handleHeaderMenu, dispatch),
logOut: bindActionCreators(logOut, dispatch)
}
};
const mapStateToProps = (state) => {
return {
...state.header,
...state.isAuth
}
}
export default withRouter(connect(mapStateToProps, mapDispatchToProps)(HeaderContainer));<file_sep>/task_netcore/task4/WebApi/task4/task4.DAL/Entities/Photo.cs
namespace task4.DAL.Entities
{
public class Photo
{
public int Id { get; set; }
public string PictureUrl { get; set; }
public virtual Movie Movie { get; set; }
}
}
<file_sep>/task_netcore/task4/Client/src/modules/Comments/reducers/CommentFormReducer.js
import { CLEAR_COMMENT_FIELD, ON_COMMENT_CHANGE } from '../actions/types';
const InitialState = {
message: ''
}
const commentFormReducer = (state = InitialState, action) => {
switch (action.type) {
case ON_COMMENT_CHANGE:
return {
...state,
message: action.message
};
case CLEAR_COMMENT_FIELD:
return {
...InitialState
};
default:
return state;
}
}
export default (commentFormReducer);<file_sep>/task_netcore/task4/Client/src/modules/MovieInfo/reducers/MovieInfoReducer.js
import { REQUESTED_MOVIE_INFO, REQUESTED_MOVIE_INFO_FAILED, REQUESTED_MOVIE_INFO_SUCCEEDED } from '../actions/types';
const InitialState = {
movieInfo: {
photos: []
},
isLoading: false,
error: false
}
const movieReducer = (state = InitialState, action) => {
switch (action.type) {
case REQUESTED_MOVIE_INFO:
return {
movieInfo: {
photos: []
},
isLoading: true,
error: false
};
case REQUESTED_MOVIE_INFO_SUCCEEDED:
return {
movieInfo: action.movieInfo,
isLoading: false,
error: false
};
case REQUESTED_MOVIE_INFO_FAILED:
return {
movieInfo: {},
isLoading: false,
error: true,
};
default:
return state;
}
}
export default (movieReducer);<file_sep>/task_netcore/task4/Client/src/modules/Rating/containers/RatingContainer.js
import React from 'react';
import { withRouter } from 'react-router-dom';
import Rating from '../views';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
import {clearErrorMessage, getAverageRating, sendRating, getUserRating } from '../actions';
import { applicationRoutes } from '../../../Constants';
class RatingContainer extends React.Component {
state = {
id: this.props.match.params.id
};
componentDidMount() {
getAverageRating(this.props.dispatch, this.state.id);
if (this.props.isAuth) {
getUserRating(this.props.dispatch, this.state.id);
}
this.props.clearErrorMessage();
}
onRatingChange = (newRating) => {
if (!this.props.isAuth) {
this.props.history.push(applicationRoutes.loginFormRoute);
return;
}
const rating = {
movieId: this.state.id,
value: newRating
}
sendRating(this.props.dispatch, rating, this.state.id)
}
render() {
const { value, alreadyRated } = this.props.rating;
const { averageRating, isLoading } = this.props;
const { haveRatingErrors, errorMessage } = this.props;
return (
<Rating
haveRatingErrors={haveRatingErrors}
errorMessage={errorMessage}
onRatingChange={this.onRatingChange}
value={value}
isLoading={isLoading}
alreadyRated={alreadyRated}
averageRating={averageRating} />
);
}
}
const mapDispatchToProps = (dispatch) => {
return {
clearErrorMessage: bindActionCreators(clearErrorMessage, dispatch),
dispatch
}
};
const mapStateToProps = (state) => {
return {
...state.isAuth,
...state.userRating,
...state.averageRating,
...state.sendRating
}
}
export default withRouter(connect(mapStateToProps, mapDispatchToProps)(RatingContainer));<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Interfaces/IPhotoService.cs
using System.Collections.Generic;
using task4.BLL.Models;
namespace task4.BLL.Interfaces
{
public interface IPhotoService
{
IList<PhotoModel> GetPhotosByMovie(int movieId);
}
}
<file_sep>/task_netcore/task4/Client/src/modules/Header/views/styles.js
const styles = theme => ({
headerContainer: {
display: 'flex',
justifyContent: 'space-between'
},
menuButton: {
marginLeft: -12,
marginRight: 20,
},
loginItems: {
textDecoration: 'none',
color: '#696969',
border: 'none'
},
toMovieLink: {
textDecoration: 'none',
color: '#fff'
}
});
export default styles;<file_sep>/task_netcore/task4/Client/src/modules/Rating/views/index.js
import React from 'react';
import PropTypes from 'prop-types';
import styles from './styles';
import { Typography, withStyles } from '@material-ui/core';
import ReactStars from 'react-stars';
import CircularProgress from '@material-ui/core/CircularProgress';
const Rating = ({ classes, onRatingChange, alreadyRated, value, averageRating, haveRatingErrors, errorMessage, isLoading }) => {
return (
<div>
{
isLoading ? <CircularProgress /> :
<div>
<Typography className={classes.ratingInfo}>
Raiting: { averageRating === 0 ? 'No Rating' : averageRating }
</Typography>
<ReactStars
count={10}
onChange={onRatingChange}
size={24}
value={averageRating}
color2={'#ffd700'} />
{
alreadyRated && <Typography className={classes.ratingInfo}>
Your rating is: {value}
</Typography>
}
{
haveRatingErrors && <Typography className={classes.errorMessage}>
{errorMessage}
</Typography>
}
</div>
}
</div>
);
}
Rating.propTypes = {
classes: PropTypes.object.isRequired,
genre: PropTypes.string,
name: PropTypes.string,
year: PropTypes.number,
rating: PropTypes.number,
picture: PropTypes.string,
description: PropTypes.string,
producer: PropTypes.string,
id: PropTypes.number,
onRatingChange: PropTypes.func
};
export default withStyles(styles)(Rating);<file_sep>/task_netcore/task4/WebApi/task4/task4.WEB/Common/Common.cs
using Microsoft.AspNetCore.Http;
using System.Linq;
using System.Security.Claims;
namespace task4.WEB.Common
{
public static class Common
{
public static string GetUserIdByHttpContext(this HttpContext context)
{
return context.User.Claims.FirstOrDefault(claim => claim.Type == ClaimTypes.NameIdentifier).Value;
}
}
}<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Interfaces/IRatingService.cs
using task4.BLL.Models;
namespace task4.BLL.Interfaces
{
public interface IRatingService
{
RatingResultModel AddRating(RatingModel rating);
RatingResultModel GetUserRating(int userId, int movieId);
decimal GetAverageRating(int movieId);
}
}<file_sep>/task_netcore/task4/Client/src/modules/MoviesList/views/index.js
import React from 'react';
import PropTypes from 'prop-types';
import styles from './styles'
import { Typography, withStyles, Card, Button } from '@material-ui/core';
import { Link } from 'react-router-dom';
const Movie = ({ classes, year, name, genre, rating, picture, country, id }) => {
return (
<Card className={classes.card}>
<img src={picture} className={classes.picture} alt='Poster' />
<Typography className={classes.name}>{name}</Typography>
<Typography className={classes.addInfo}>Genre: {genre}</Typography>
<Typography className={classes.addInfo}>Year: {year}</Typography>
<Typography className={classes.addInfo}>Country: {country}</Typography>
<Typography className={classes.addInfo}>Raiting: {rating === 0 && 'No rating'} {rating !== 0 && rating}</Typography>
<Link to={`/movies/${id}`} className={classes.link}>
<Button color='primary' variant='contained' className={classes.button}>
More
</Button>
</Link>
</Card>
);
}
Movie.propTypes = {
classes: PropTypes.object.isRequired,
genre: PropTypes.string,
name: PropTypes.string,
year: PropTypes.number,
rating: PropTypes.number,
picture: PropTypes.string,
id: PropTypes.number
};
export default withStyles(styles)(Movie);<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Models/PhotoModel.cs
using System;
using System.Collections.Generic;
using System.Text;
namespace task4.BLL.Models
{
public class PhotoModel
{
public string PictureUrl { get; set; }
}
}<file_sep>/task_netcore/task3/task3/Interfaces/IActionLogger.cs
using Microsoft.AspNetCore.Mvc.Filters;
namespace task3.Interfaces
{
public interface IActionLogger
{
void Log(ActionExecutingContext context);
void Log(ExceptionContext context);
}
}
<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Interfaces/ICommentService.cs
using System.Collections.Generic;
using task4.BLL.Models;
namespace task4.BLL.Interfaces
{
public interface ICommentService
{
IList<CommentModel> GetCommentsByMovieId(int movieId);
void AddComment(CommentModel commentModel);
}
}
<file_sep>/task_netcore/task4/Client/src/Constants.js
export const PASSWORD_VALIDATION_EXP = /^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[^\da-zA-Z]).{6,}$/;
export const MIN_USERNAME_LENGTH = 3;
export const errorMessagesEnum = Object.freeze({
FieldIsRequired: 'Field is required',
UserNameErrorMessage: 'Login should be longer than 3 symbols',
PasswordErrorMessage: 'Password should be longer than 6 symbols, it should contains at least one number, one special character, one lowercase and uppercase Latin lettter ',
});
export const applicationRoutes = {
registerFormRoute: '/account/register',
loginFormRoute: '/account/login',
moviesRoute: '/movies',
moviesSearchResultRoute: '/movie-results',
movieInfoRoute: '/movies/:id',
notFoundRoute: '/404',
defaultRoute: '/',
errorRoute: '/*'
};
const domainName = 'http://localhost:49448';
export const webApiRoutes = {
loadMoviesRoute: domainName + '/movie/getmovies',
loadMovieInfoRoute: domainName + '/movie/getmovie/',
registerRoute: domainName + '/account/register',
loginRoute: domainName + '/account/login',
loadCommentsRoute: domainName + '/comment/getcomments/',
addCommentRoute: domainName + '/comment/addcomment',
addRatingRoute: domainName + '/rating/addrating',
loadAverageRatingRoute: domainName + '/rating/getaveragerating/',
loadUserRatingRoute: domainName + '/rating/getuserrating/',
searchMoviesRoute: domainName + '/movie/getmoviesbyname/'
}<file_sep>/task_netcore/task4/Client/src/modules/Comments/actions/types.js
export const REQUESTED_SEND_COMMENT = 'REQUESTED_SEND_COMMENT';
export const REQUESTED_SEND_COMMENT_SUCCEEDED = 'REQUESTED_SEND_COMMENT_SUCCEEDED';
export const REQUESTED_SEND_COMMENT_FAILED = 'REQUESTED_SEND_COMMENT_FAILED';
export const CLEAR_COMMENT_FIELD = 'CLEAR_COMMENT_FIELD';
export const ON_COMMENT_CHANGE = 'ON_COMMENT_CHANGE';
export const REQUESTED_COMMENTS_SUCCEEDED = 'REQUESTED_COMMENTS_SUCCEEDED';
export const REQUESTED_COMMENTS = 'REQUESTED_COMMENTS';
export const REQUESTED_COMMENTS_FAILED = 'REQUESTED_COMMENTS_FAILED';<file_sep>/task_netcore/task4/WebApi/task4/task4.WEB/Controllers/PhotoController.cs
using Microsoft.AspNetCore.Mvc;
using task4.BLL.Interfaces;
namespace task4.WEB.Controllers
{
[Route("[controller]/[action]")]
public class PhotoController : Controller
{
private readonly IPhotoService _photoService;
public PhotoController(IPhotoService photoService)
{
_photoService = photoService;
}
[HttpGet("{id}")]
public IActionResult GetPhotos(int id)
{
var photos = _photoService.GetPhotosByMovie(id);
if (photos == null)
{
return NotFound();
}
return Ok(photos);
}
}
}<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Interfaces/IMovieService.cs
using System.Collections.Generic;
using task4.BLL.Models;
namespace task4.BLL.Interfaces
{
public interface IMovieService
{
IList<MovieModel> GetMovies();
IList<MovieModel> GetMoviesByName(string movieName);
MovieInfoModel GetMovieInfoById(int movieId);
decimal UpdateMovieRating(int movieId);
}
}<file_sep>/task_netcore/task4/Client/src/modules/AccountForm/reducers/AccountReducer.js
import { REQUESTED_AUTHENTICATION, REQUESTED_AUTHENTICATION_FAILED, REQUESTED_AUTHENTICATION_SUCCEEDED, CLEAR_ERROR_MESSAGE } from '../actions/types';
const InitialState = {
token: '',
haveAccountErrors: false,
errorMessage: ''
}
const movieReducer = (state = InitialState, action) => {
switch (action.type) {
case REQUESTED_AUTHENTICATION:
return {
...InitialState
};
case REQUESTED_AUTHENTICATION_SUCCEEDED:
return {
token: action.token,
haveAccountErrors: false,
errorMessage: ''
};
case REQUESTED_AUTHENTICATION_FAILED:
return {
token: '',
haveAccountErrors: true,
errorMessage: action.errorMessage
};
case CLEAR_ERROR_MESSAGE: {
return {
haveAccountErrors: false,
errorMessage: ''
}
}
default:
return state;
}
}
export default (movieReducer);<file_sep>/task_netcore/task4/Client/src/modules/AccountForm/views/styles.js
const styles = theme => ({
formContainer: {
padding: '20px'
},
paperContainer: {
maxWidth: '600px',
margin: '20px auto'
},
fieldContainer: {
margin: '10px'
},
errorMessage: {
display: 'inline-block',
backgroundColor: 'red',
padding: '10px 20px'
}
});
export default styles; <file_sep>/task_netcore/task3/task3/Loggers/ActionLogger.cs
using Microsoft.AspNetCore.Mvc.Filters;
using Microsoft.Extensions.Configuration;
using System;
using System.IO;
using task3.Interfaces;
namespace task3.Loggers
{
public class ActionLogger : IActionLogger
{
private string fileName;
public ActionLogger(IConfiguration configuration)
{
fileName = configuration["FileName"];
}
public void Log(ActionExecutingContext context)
{
using (StreamWriter streamWriter = new StreamWriter(fileName, true))
{
streamWriter.WriteLine($"Request type: {context.HttpContext.Request.Method}");
streamWriter.WriteLine($"Action name: {context.ActionDescriptor.DisplayName}");
streamWriter.WriteLine($"Action time: {DateTime.Now.TimeOfDay}");
streamWriter.WriteLine("---------------------------------------------------");
}
}
public void Log(ExceptionContext context)
{
Exception ex = context.Exception;
using (StreamWriter streamWriter = new StreamWriter(fileName, true))
{
streamWriter.WriteLine($"Exeption message: {ex.Message}");
streamWriter.WriteLine("---------------------------------------------------");
}
}
}
}<file_sep>/task_netcore/task2/task2/Models/ResponseModel.cs
using System.Collections.Generic;
namespace task2.Models
{
public class ResponseModel
{
public int Count { get; set; }
public List<Starship> Results { get; set; }
}
}<file_sep>/task_netcore/task4/Client/src/Router/index.js
import React from 'react'
import { Switch, Route, Redirect } from 'react-router-dom'
import NotFound from '../modules/MovieSearch/views/NotFound';
import AccountFormContainer from '../modules/AccountForm/containers/AccountFormContainer';
import { applicationRoutes } from '../Constants';
import MovieSearchResultContainer from '../modules/MovieSearch/containers/MovieSearchResultContainer';
import MoviesRouter from './MovieRouter';
const Main = () => (
<main>
<Switch>
<Route exact path={applicationRoutes.defaultRoute} render={() => <Redirect to={applicationRoutes.moviesRoute} />} />
<Route path={applicationRoutes.registerFormRoute} component={AccountFormContainer} />
<Route path={applicationRoutes.loginFormRoute} component={AccountFormContainer} />
<Route path={applicationRoutes.notFoundRoute} component={NotFound} />
<Route path={applicationRoutes.moviesRoute} component={MoviesRouter} />
<Route path={applicationRoutes.moviesSearchResultRoute} component={MovieSearchResultContainer} />
<Route path={applicationRoutes.errorRoute} >
<Redirect to={applicationRoutes.notFoundRoute} />
</Route>
</Switch>
</main>
);
export default (Main);<file_sep>/task_netcore/task2/task2/Models/Starship.cs
using System;
namespace task2.Models
{
public class Starship
{
public string Name { get; set; }
public string Model { get; set; }
public int Index { get; set; }
public DateTime Created { get; set; }
public DateTime Edited { get; set; }
public string Url { get; set; }
}
}
<file_sep>/task_javascript/task2/task2.js
(function task2() {
let Robot = function (name) {
this.name = name;
}
function add(op1, op2) {
this.name = this.name || "Human";
return this.name + " can count to " + (op1 + op2);
}
let voltron = new Robot("Voltron");
(function task2_1__4() {
console.log(add(0, 1));
console.log(add.call(voltron, 1, 2));
console.log(add.apply(voltron, [20, 30]));
console.log(add.bind(voltron)("drinking", "beer"));
})();
showName = function () {
let that = this;
let name = this.name;
function show(obj) {
return obj.name;
}
setTimeout(function () {
console.log(show(voltron));
console.log(show.apply(this, [voltron]));
console.log(show.call(this, voltron));
console.log(show.bind(this)(voltron));
console.log(name);
console.log(that.name);
}, 1000);
}.bind(voltron)
showName();
})();<file_sep>/task_netcore/task4/Client/src/modules/Photos/views/styles.js
const styles = theme => ({
galeryContainer: {
margin: '0 auto',
width: '1000px',
maxHeight: '200px',
'@media (max-width: 1070px)': {
width: '600px'
},
'@media (max-width: 650px)': {
width: '300px'
}
}
});
export default styles;<file_sep>/task_reactredux/task1/src/views/LoginReduxForm/index.js
import React from 'react';
import { Paper, TextField, Button, FormControl, FormHelperText } from '@material-ui/core';
import { Field, reduxForm } from 'redux-form';
import PropTypes from 'prop-types';
const renderField = ({ input, label, type, meta: { touched, error }, margin }) => (
<div>
<FormControl>
<div>
<TextField label={label} type={type} error={error && touched} margin={margin} {...input} />
<div>
{touched &&
(error && <FormHelperText>{error}</FormHelperText>)}
</div>
</div>
</FormControl>
</div>
);
let LoginReduxForm = ({ handleSubmit, mail, password }) => {
return (
<div>
<Paper>
<form onSubmit={handleSubmit} >
<Field name="mail" component={renderField} label="Почта" type="text" margin="normal" />
<Field name="password" component={renderField} label="Пароль" type="password" margin="normal" />
<Button type="submit" variant="outlined">
Войти
</Button>
</form>
</Paper>
<Paper>
<p>{mail}</p>
<p>{password}</p>
</Paper>
</div>
);
}
LoginReduxForm.propTypes = {
mail: PropTypes.string,
password: PropTypes.string,
handleSubmit: PropTypes.func
};
LoginReduxForm = reduxForm({
form: 'login'
})(LoginReduxForm);
export default (LoginReduxForm);<file_sep>/task_reactredux/task1/src/views/About/styles.js
const styles = theme => ({
about: {
width: '700px',
margin: 'auto'
}
});
export default styles; <file_sep>/task_netcore/task4/WebApi/task4/task4.WEB/Models/CommentViewModel.cs
using System.ComponentModel.DataAnnotations;
namespace task4.WEB.Models
{
public class CommentViewModel
{
[Required]
public string Message { get; set; }
[Required]
public int MovieId { get; set; }
}
}
<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Models/MovieModel.cs
using System;
using System.Collections.Generic;
using System.Text;
namespace task4.BLL.Models
{
public class MovieModel
{
public int Id { get; set; }
public string Name { get; set; }
public string Country { get; set; }
public int Year { get; set; }
public string Genre { get; set; }
public string PictureUrl { get; set; }
public decimal Rating { get; set; }
}
}
<file_sep>/task_netcore/task1/task1/Controllers/ValuesController.cs
using Microsoft.AspNetCore.Mvc;
using task1.Models;
namespace task1.Controllers
{
[Route("api/[controller]")]
public class ValuesController : Controller
{
[HttpGet]
public ActionResult Get(SumRequestViewModel model)
{
if (ModelState.IsValid && model.A.HasValue && model.B.HasValue)
{
var a = model.A.Value;
var b = model.B.Value;
return Ok(new SumResponseModel { A = a, B = b, Sum = a + b });
}
return BadRequest(ModelState);
}
}
}
<file_sep>/task_netcore/task1/task1/Models/SumResponseModel.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace task1.Models
{
public class SumResponseModel
{
public int A { get; set; }
public int B { get; set; }
public int Sum { get; set; }
}
}
<file_sep>/task_netcore/task4/Client/src/modules/Photos/views/index.js
import React from 'react';
import PropTypes from 'prop-types';
import { withStyles } from '@material-ui/core/styles';
import styles from './styles';
import Gallery from 'react-grid-gallery';
const Photos = ({ photos, classes }) => {
return (
<div className={classes.galeryContainer}>
<Gallery
enableImageSelection={false}
images={[...photos].map(photo => ({
src: photo.pictureUrl,
thumbnail: photo.pictureUrl,
thumbnailHeight: 180
}))} />
</div>
);
}
Photos.propTypes = {
classes: PropTypes.object.isRequired,
photos: PropTypes.array
};
export default withStyles(styles)(Photos);<file_sep>/task_reactredux/task1/src/Constants.js
const commandsEnum = Object.freeze({
start: "start",
add: "add",
del: "delete",
def: "default"
});
export default (commandsEnum);
export const MIN_PASSWORD_LENGTH = 5;
export const REG_EXPR = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
export const errorMessagesEnum = Object.freeze({
FieldIsRequired: 'Поле должно быть заполнено',
EmailErrorMessage: 'Некорректно введена почта',
PasswordErrorMessage: 'Пароль минимум 6 символов'
});
export const applicationRoutes = {
aboutRoute: '/iTechArt-Lab-2018/about',
countersRoute: '/iTechArt-Lab-2018/counters',
loginRoute: '/iTechArt-Lab-2018/login',
loginReduxRoute: '/iTechArt-Lab-2018/login-redux',
loginReduxSuccessRoute: '/iTechArt-Lab-2018/login-redux/success',
loginReduxFormRoute: '/iTechArt-Lab-2018/login-redux-form',
loginReduxFormSuccessRoute: '/iTechArt-Lab-2018/login-redux-form/success',
startPageRoute: '/iTechArt-Lab-2018/',
notFoundRoute: '/iTechArt-Lab-2018/404',
defaultRoute: '/',
errorRoute: '/*'
};<file_sep>/task_netcore/task4/Client/src/Services/SessionService.js
export class SessionService {
static getItem(type) {
return sessionStorage.getItem(type);
};
static setItem(type, value) {
sessionStorage.setItem(type, value);
};
static removeItem(type) {
return sessionStorage.removeItem(type);
}
static hasItem(type) {
return !!sessionStorage.getItem(type);
}
static setJsonItem(type, value) {
sessionStorage.setItem(type, JSON.stringify(value));
};
static getJsonItem(type) {
return JSON.parse(sessionStorage.getItem(type));
};
}<file_sep>/task_netcore/task3/task3/Controllers/DataController.cs
using System.Collections.Generic;
using AutoMapper;
using Microsoft.AspNetCore.Mvc;
using task3.Attributes;
using task3.Data.Entities;
using task3.Interfaces;
using task3.Models;
namespace task3.Controllers
{
[ServiceFilter(typeof(ActionLoggerAttribute))]
[ServiceFilter(typeof(ExceptionLoggerAttribute))]
[Route("api/[controller]")]
public class DataController : Controller
{
private readonly IDataService _dataService;
private readonly IMapper _mapper;
public DataController(IDataService dataService, IMapper mapper)
{
_dataService = dataService;
_mapper = mapper;
}
[HttpGet]
public IActionResult Get()
{
return Ok(_dataService.GetData());
}
[HttpGet("{id}")]
public IActionResult Get(int id)
{
var movie = _dataService.GetDataById(id);
if(movie != null)
{
return Ok(movie);
}
return NotFound();
}
[HttpPost]
public IActionResult Post([FromBody]MovieModel movieModel)
{
if(!ModelState.IsValid)
{
return BadRequest(ModelState);
}
var movie = _mapper.Map<MovieModel, Movie>(movieModel);
_dataService.AddData(movie);
return Ok(movie);
}
[HttpPut]
public IActionResult Put([FromBody]MovieModel movieModel)
{
if (!ModelState.IsValid)
{
return BadRequest(ModelState);
}
var movie = _mapper.Map<MovieModel, Movie>(movieModel);
_dataService.UpdateData(movie);
return Ok(movie);
}
[HttpDelete("{id}")]
public IActionResult Delete(int id)
{
var movie = _dataService.DeleteData(id);
if (movie != null)
{
return Ok(movie);
}
return BadRequest();
}
}
}<file_sep>/task_csshtml/README.md
# ItechArt-Lab-2018
https://alekslevko.github.io/Task_2_public/<file_sep>/task_javascript/task4/task4.js
(function task4_1() {
"use strict";
function delay(duration) {
return new Promise((resolve) => {
setTimeout(resolve, duration);
});
}
function logHi() {
console.log('hi');
}
delay(2000).then(logHi);
})();
(function task4_2() {
"use strict";
new Promise((resolve) => {
setTimeout(() => {
resolve(10);
}, 3000);
})
.then((result) => {
console.log("Получено " + result);
return result + 2;
})
.then((result) => {
console.log("Получено " + result);
return new Promise((resolve) => {
setTimeout(() => {
resolve(result + 2);
}, 2000);
})
})
.then((result) => {
console.log("В итоге " + result);
});
})();
(function task4_3() {
"use strict";
function getRandomInt(min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
let promise = new Promise((resolve, reject) => {
let time = getRandomInt(1, 4);
setTimeout(() => {
time <= 2 ? resolve("Время выполнения меньше 2 секунд") : reject("Время выполнения больше 2 секунд");
}, time * 1000);
});
promise
.then((result) => {
console.log(result);
})
.catch((error) => {
console.error(error);
});
})();
(function task4_4() {
"use strict";
function getRandomInt(min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
let count = getRandomInt(1, 10);
let promises = [];
let sec_arr = [];
for (let i = 0; i < count; i++) {
let sec = getRandomInt(1, 10);
sec_arr.push(sec);
promises.push(new Promise((resolve) => {
setTimeout(() => {
resolve(console.log("Номер " + (i + 1) + " выполнялся " + sec + " секунд"));
}, sec * 1000);
}))
}
Promise.all(promises)
.then(() => {
console.log("Время выполнения " + (Math.max.apply(null, sec_arr)) + " секунд");
});
})();<file_sep>/task_netcore/task1/task1/Models/SumRequestViewModel.cs
using System;
using System.ComponentModel.DataAnnotations;
namespace task1.Models
{
public class SumRequestViewModel
{
[Required(ErrorMessage = "Parameter is required")]
[Range(1, Int32.MaxValue, ErrorMessage = "Parameter should be a positive")]
public int? A { get; set; }
[Required(ErrorMessage = "Parameter is required")]
[Range(Int32.MinValue, -1, ErrorMessage = "Parameter should be a negative")]
public int? B { get; set; }
}
}
<file_sep>/task_javascript/task5/task5.js
//Сommented functions are with errors!!!
//task5_1
function loadVideosAsync() {
return "Load video";
}
function loadMetaAsync() {
return "Load meta";
}
function DoSomething(fromVideo, fromMeta) {
return console.log(fromVideo + " " + fromMeta);
}
/*
loadVideosAsync().then((videos) => {
loadMetaAsync().then((meta) => {
DoSomething(videos, meta);
});
});
*/
Promise.all([loadVideosAsync(), loadMetaAsync()])
.then((results) => {
DoSomething(results[0], results[1]);
});
//task5_2
function doSomethingAsync() {
return new Promise((resolve) => {
resolve("1");
});
}
function somethingComplicated(res) {
return console.log("done" + " " + res);
}
/*
function anAsyncCall() {
var promise = doSomethingAsync();
promise.then(() => {
somethingComplicated();
});
return promise;
}
*/
function anAsyncCall() {
var promise = doSomethingAsync();
return promise.then((res) => {
somethingComplicated(res);
});
}
anAsyncCall();
// task5_3
/*
db.getAllDocs().then((result) => {
result.rows.forEach((row) => {
db.remove(row.doc);
});
})
.then(() => {
// All docs must be removed!
});
*/
db.getAllDocs().then((result) => {
return Promise.all(result.rows.forEach((row) => {
db.remove(row.doc);
}));
})
.then(() => {
// All docs are removed!
});
// task5_4
function doAsync() {
return new Promise((resolve, reject) => {
reject("Error!!!");
});
}
/*
doAsync()
.then(() => {
throw new Error('nope');
}, (err) => {
// I didn't catch your error! :(
});
*/
doAsync()
.then(() => {
throw new Error('nope');
})
.catch((err) => {
console.error(err);
});<file_sep>/task_netcore/task4/WebApi/task4/task4.WEB/Models/RatingViewModel.cs
using System.ComponentModel.DataAnnotations;
namespace task4.WEB.Models
{
public class RatingViewModel
{
[Required]
public int MovieId { get; set; }
[Required]
public decimal Value { get; set; }
}
}
<file_sep>/task_netcore/task4/Client/src/modules/MovieInfo/views/index.js
import React from 'react';
import PropTypes from 'prop-types';
import styles from './styles';
import { Typography, withStyles, Card } from '@material-ui/core';
import RatingContainer from '../../Rating/containers/RatingContainer';
const MovieInfo = ({ classes, year, name, genre, picture, country, producer, description, id }) => {
return (
<div className={classes.movieInfoContainer}>
<Card className={classes.card}>
<img src={picture} className={classes.picture} alt='Poster' />
<Typography className={classes.name}>{name}</Typography>
<div className={classes.addInfoContainer}>
<Typography className={classes.addInfo}>Genre: {genre}</Typography>
<Typography className={classes.addInfo}>Year: {year}</Typography>
<Typography className={classes.addInfo}>Country: {country}</Typography>
<Typography className={classes.addInfo}>Producer: {producer}</Typography>
<RatingContainer
id={id} />
</div>
<Typography className={classes.description}>{description}</Typography>
</Card>
</div>
);
}
MovieInfo.propTypes = {
classes: PropTypes.object.isRequired,
genre: PropTypes.string,
name: PropTypes.string,
year: PropTypes.number,
rating: PropTypes.number,
picture: PropTypes.string,
description: PropTypes.string,
producer: PropTypes.string,
id: PropTypes.number,
ratingChanged: PropTypes.func
};
export default withStyles(styles)(MovieInfo);<file_sep>/task_reactredux/task1/src/containers/LoginReduxContainer.js
import React from 'react';
import { onMailChange, onPasswordChange, handleSubmit } from '../actions';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
import Login from '../views/Login';
import { applicationRoutes } from '../Constants';
class LoginReduxContainer extends React.Component{
onMailChange = (e) => {
let value = e.target.value;
this.props.onMailChange(value);
}
onPasswordChange = (e) => {
let value = e.target.value;
this.props.onPasswordChange(value);
}
handleSubmit = (e) => {
e.preventDefault();
this.props.handleSubmit(true);
const {mailValid, passwordValid} = this.props;
if(mailValid && passwordValid){
this.props.history.push(
applicationRoutes.loginReduxSuccessRoute);
}
}
render() {
return (<Login
handleSubmit = {this.handleSubmit}
onMailChange = {this.onMailChange}
onPasswordChange = {this.onPasswordChange}
mail = {this.props.mail}
wasSubmited = {this.props.wasSubmited}
password = {<PASSWORD>}
mailValid = {this.props.mailValid}
passwordValid = {this.props.passwordValid} />
);
}
}
const mapStateToProps = (state) => {
return {
...state.mail,
...state.password,
...state.formReducer
}
};
const mapDispatchToProps = (dispatch) => {
return {
onMailChange: bindActionCreators(onMailChange, dispatch),
onPasswordChange: bindActionCreators(onPasswordChange, dispatch),
handleSubmit: bindActionCreators(handleSubmit, dispatch)
};
};
export default connect(mapStateToProps, mapDispatchToProps)(LoginReduxContainer);<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Services/CommentService.cs
using AutoMapper;
using System.Collections.Generic;
using System.Linq;
using task4.BLL.Interfaces;
using task4.BLL.Models;
using task4.DAL.Entities;
using task4.DAL.Interfaces;
namespace task4.BLL.Services
{
public class CommentService: ICommentService
{
private readonly IUnitOfWork _unitOfWork;
private readonly IMapper _mapper;
public CommentService(IUnitOfWork uow, IMapper mapper)
{
_unitOfWork = uow;
_mapper = mapper;
}
public IList<CommentModel> GetCommentsByMovieId(int movieId)
{
var comments = _unitOfWork.CommentRepository.GetQueryableAll().Where(c => c.Movie.Id == movieId).Select(x => new Comment
{
Date = x.Date,
Message = x.Message,
User = x.User
}).ToList();
return _mapper.Map<IList<Comment>, IList<CommentModel>>(comments);
}
public void AddComment(CommentModel commentModel)
{
var user = _unitOfWork.UserRepository.GetById(commentModel.UserId);
var movie = _unitOfWork.MovieRepository.GetById(commentModel.MovieId);
var comment = _mapper.Map<CommentModel, Comment>(commentModel);
comment.User = user;
comment.Movie = movie;
_unitOfWork.CommentRepository.Insert(comment);
_unitOfWork.Commit();
}
}
}
<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Services/MovieService.cs
using AutoMapper;
using System;
using System.Collections.Generic;
using System.Linq;
using task4.BLL.Interfaces;
using task4.BLL.Models;
using task4.DAL.Entities;
using task4.DAL.Interfaces;
namespace task4.BLL.Services
{
public class MovieService: IMovieService
{
private readonly IUnitOfWork _unitOfWork;
private readonly IMapper _mapper;
public MovieService(IUnitOfWork uow, IMapper mapper)
{
_unitOfWork = uow;
_mapper = mapper;
}
public IList<MovieModel> GetMovies()
{
var movies = _unitOfWork.MovieRepository.GetQueryableAll().ToList();
return _mapper.Map<IList<Movie>, IList<MovieModel>>(movies);
}
public IList<MovieModel> GetMoviesByName(string movieName)
{
var movies = _unitOfWork.MovieRepository.GetQueryableAll().Where(m => m.Name.Contains(movieName)).ToList();
return _mapper.Map<IList<Movie>, IList<MovieModel>>(movies); ;
}
public MovieInfoModel GetMovieInfoById(int movieId)
{
var movie = _unitOfWork.MovieRepository.GetQueryableAll().Select(x => new Movie()
{
Id = x.Id,
Country = x.Country,
Description = x.Description,
Genre = x.Genre,
Name = x.Name,
PictureUrl = x.PictureUrl,
Photos = x.Photos,
Producer = x.Producer,
Rating = x.Rating,
Year = x.Year
}).FirstOrDefault(x => x.Id == movieId);
return _mapper.Map<Movie, MovieInfoModel>(movie); ;
}
public decimal UpdateMovieRating(int movieId)
{
var movie = _unitOfWork.MovieRepository.GetById(movieId);
var rating = _unitOfWork.RatingRepository.GetQueryableAll().Where(r => r.Movie.Id == movieId).Average(r => r.Value);
if (movie != null)
{
movie.Rating = (rating != 0) ? Math.Round(rating, 1) : 0;
}
_unitOfWork.Commit();
return Math.Round(movie.Rating, 1);
}
}
}
<file_sep>/task_netcore/task3/task3/Attributes/ActionLoggerAttribute.cs
using Microsoft.AspNetCore.Mvc.Filters;
using task3.Interfaces;
namespace task3.Attributes
{
public class ActionLoggerAttribute: ActionFilterAttribute
{
private readonly IActionLogger _logger;
public ActionLoggerAttribute(IActionLogger logger)
{
_logger = logger;
}
public override void OnActionExecuting(ActionExecutingContext context)
{
_logger.Log(context);
}
}
}
<file_sep>/task_netcore/task4/Client/src/modules/MovieInfo/actions/types.js
export const REQUESTED_MOVIE_INFO = 'REQUESTED_MOVIE_INFO';
export const REQUESTED_MOVIE_INFO_SUCCEEDED = 'REQUESTED_MOVIE_INFO_SUCCEEDED';
export const REQUESTED_MOVIE_INFO_FAILED = 'REQUESTED_MOVIE_INFO_FAILED';<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Interfaces/IAccountService.cs
using System.Threading.Tasks;
using task4.BLL.Models;
namespace task4.BLL.Interfaces
{
public interface IAccountService
{
Task<AccountResultModel> Register(AccountModel model);
Task<AccountResultModel> Login(AccountModel model);
}
}
<file_sep>/task_netcore/task4/WebApi/task4/task4.BLL/Profiles/MapperProfile.cs
using AutoMapper;
using task4.BLL.Models;
using task4.DAL.Entities;
namespace task4.BLL.Profiles
{
public class MapperProfile: Profile
{
public MapperProfile()
{
CreateMap<CommentModel, Comment>();
CreateMap<Comment, CommentModel>().ForMember(c => c.UserName, c => c.MapFrom(u => u.User.UserName));
CreateMap<Movie, MovieModel>();
CreateMap<Movie, MovieInfoModel>();
CreateMap<Photo, PhotoModel>();
CreateMap<Rating, RatingResultModel>();
CreateMap<RatingModel, Rating>();
}
}
}
| c9e79b050af1536b71d6534f277f25ee1339700d | [
"JavaScript",
"C#",
"Markdown"
] | 129 | JavaScript | alekslevko/iTechArt-Lab-2018 | e3eb8056126e70529aa835ef97cbbc70daa25c01 | 5ebbe4f8e7b12cf5dccfbc57dbc8296890470935 |
refs/heads/master | <file_sep>import Vue from 'vue'
import Vuex from 'vuex'
Vue.use(Vuex);
const state = { //要设置的全局访问的state对象 //要设置的初始属性值
//login获取token
token: '',
username: '',
isLogin: '',
userLoginVO: {},
//登录界面隐藏部分ui
app_visible: false,
//简介详情
newTec_detail: 0,
//反馈列表跳转详情时传递的参数信息
feedback_detailInfo: {},
//采购统计跳转查看不合格报告详情
reportData: {},
//是否显示返回
testMould_ifAll: true,
sampleCount_ifAll: true,
warn_ifAll: true,
/** 项目查看@fjs */
//切换展示方式
itemCheck_change: true,
//试模统计
itemCheck_testMouldDetail: {},
//采购进度统计列表跳转详情
itemCheck_report: {},
//项目查看--跳转页面传递整个项目信息
itemCheck_item: '',
//项目节点管理--跳转加工进度计划表页面传递整个项目信息
projectsPointManage_item: '',
/** 项目信息--客户@fjs */
//项目统计信息--跳转详情传参
customItems_detail: {},
//项目进度详情--点击查看传递类型
customItems_detailType: {type: 1, detail: {}},
/** 提醒@fjs */
//修改提醒时获取当前提醒的内容
warn_change: {},
/** 项目查看(多模号)传递参数到项目查看详情@fjs */
//跳转项目查看详情时传递的参数
projects_info: {},
//跳转工程任务书详情时传递的参数
projectInfoDetail: {},
//跳转工程任务书详情时传递的参数
projectInfoDetailEdit: {},
/** 表格管理--销售计划表@fjs */
salePlan: {},
};
const getters = { //实时监听state值的变化(最新状态)
//承载变化的 token 的值
getToken(state) {return state.token},
//承载变化的 userLoginVO 的值
userLoginVO(state) {return state.userLoginVO},
//承载变化的 username 的值
getUsername(state) {return state.username},
//承载变化的 username 的值
getIsLogin(state) {return state.isLogin},
//承载变化的 feedback_detailInfo 的值
getFeedbackDetailInfo(state) {return state.feedback_detailInfo},
//承载变化的 reportData 的值
getReportData(state) {return state.reportData},
//承载变化的 newTec_detail 的值
newTec_detail(state) {return state.newTec_detail},
//承载变化的 testMould_ifAll 的值
testMould_ifAll(state) {return state.testMould_ifAll},
//承载变化的 sampleCount_ifAll 的值
sampleCount_ifAll(state) {return state.sampleCount_ifAll},
//承载变化的 warn_ifAll 的值
warn_ifAll(state) {return state.warn_ifAll},
//承载变化的 app_visible 的值
app_visible(state) {return state.app_visible},
/** 项目查看@fjs */
//承载变化的 itemCheck_change 的值
itemCheck_change(state) {return state.itemCheck_change},
//承载变化的 itemCheck_testMouldDetail 的值
itemCheck_testMouldDetail(state) {return state.itemCheck_testMouldDetail},
//承载变化的 itemCheck_report 的值
itemCheck_report(state) {return state.itemCheck_report},
//承载变化的 itemCheck_item 的值
itemCheck_item(state) {return state.itemCheck_item},
//承载变化的 projectsPointManage_item 的值
projectsPointManage_item(state) {return state.projectsPointManage_item},
/** 项目信息--客户@fjs */
//承载变化的 customItems_detail 的值
customItems_detail(state) {return state.customItems_detail},
//承载变化的 customItems_detailType 的值
customItems_detailType(state) {return state.customItems_detailType},
/** 提醒@fjs */
//承载变化的 warn_change 的值
warn_change(state) {return state.warn_change},
/** 项目查看(多模号)传递参数到项目查看详情@fjs */
//承载变化的 projects_info 的值
projects_info(state) {return state.projects_info},
//承载变化的 projectInfoDetail 的值
projectInfoDetail(state) {return state.projectInfoDetail},
//承载变化的 projectInfoDetailEdit 的值
projectInfoDetailEdit(state) {return state.projectInfoDetailEdit},
/** 表格管理--销售计划表@fjs */
//承载变化的 salePlan 的值
salePlan(state) {return state.salePlan},
};
const mutations = {//自定义改变state初始值的方法,这里面的参数除了state之外还可以再传额外的参数(变量或对象);
//改变 token
token(state,item) {state.token = item;},
//改变 userLoginVO
userLoginVO(state,item) {state.userLoginVO = item;},
//改变 username
username(state,item) {state.username = item;},
//改变 username
isLogin(state,item) {state.isLogin = item;},
//改变 feedback_detailInfo
feedbackDetailInfo(state,item) {state.feedback_detailInfo = item;},
//改变 report
reportData(state,item) {state.reportData = item;},
//改变 newTec_detail
newTec_detail(state,item) {state.newTec_detail = item;},
//改变 testMould_ifAll
testMould_ifAll(state,item) {state.testMould_ifAll = item;},
//改变 sampleCount_ifAll
sampleCount_ifAll(state,item) {state.sampleCount_ifAll = item;},
//改变 warn_ifAll
warn_ifAll(state,item) {state.warn_ifAll = item;},
//改变 app_visible
app_visible(state,item) {state.app_visible = item;},
/** 项目查看@fjs */
//改变 itemCheck_change
itemCheck_change(state,item) {state.itemCheck_change = item;},
//改变 itemCheck_testMouldDetail
itemCheck_testMouldDetail(state,item) {state.itemCheck_testMouldDetail = item;},
//改变 itemCheck_report
itemCheck_report(state,item) {state.itemCheck_report = item;},
//改变 itemCheck_item
itemCheck_item(state,item) {state.itemCheck_item = item;},
//改变 projectsPointManage_item
projectsPointManage_item(state,item) {state.projectsPointManage_item = item;},
/** 项目信息--客户@fjs */
//改变 customItems_detail
customItems_detail(state,item) {state.customItems_detail = item;},
//改变 customItems_detailType
customItems_detailType(state,item) {state.customItems_detailType = item;},
/** 提醒@fjs */
//改变 warn_change
warn_change(state,item) {state.warn_change = item;},
/** 项目查看(多模号)传递参数到项目查看详情@fjs */
//改变 projects_info
projects_info(state,item) {state.projects_info = item;},
//改变 projectInfoDetail
projectInfoDetail(state,item) {state.projectInfoDetail = item;},
//改变 projectInfoDetailEdit
projectInfoDetailEdit(state,item) {state.projectInfoDetailEdit = item;},
/** 表格管理--销售计划表@fjs */
//改变 salePlan
salePlan(state,item) {state.salePlan = item;},
};
const actions = {//同上注释,item 为要变化的形参
//异步触发改变 token
getNewToken(context,item){context.commit('token',item)},
//异步触发改变 userLoginVO
userLoginVO(context,item){context.commit('userLoginVO',item)},
//异步触发改变 username
getNewUsername(context,item){context.commit('username',item)},
//异步触发改变 isLogin
getNewIsLogin(context,item){context.commit('isLogin',item)},
//异步触发改变 feedback_detailInfo
getNewFeedbackDetailInfo(context,item){context.commit('feedbackDetailInfo',item)},
//异步触发改变 reportData
getNewReportData(context,item){context.commit('reportData',item)},
//异步触发改变 newTec_detail
newTec_detail(context,item){context.commit('newTec_detail',item)},
//异步触发改变 testMould_ifAll
testMould_ifAll(context,item){context.commit('testMould_ifAll',item)},
//异步触发改变 sampleCount_ifAll
sampleCount_ifAll(context,item){context.commit('sampleCount_ifAll',item)},
//异步触发改变 warn_ifAll
warn_ifAll(context,item){context.commit('warn_ifAll',item)},
//异步触发改变 app_visible
app_visible(context,item){context.commit('app_visible',item)},
/** 项目查看@fjs */
//异步触发改变 itemCheck_change
itemCheck_change(context,item){context.commit('itemCheck_change',item)},
//异步触发改变 itemCheck_testMouldDetail
itemCheck_testMouldDetail(context,item){context.commit('itemCheck_testMouldDetail',item)},
//异步触发改变 itemCheck_report
itemCheck_report(context,item){context.commit('itemCheck_report',item)},
//异步触发改变 itemCheck_item
itemCheck_item(context,item){context.commit('itemCheck_item',item)},
//异步触发改变 projectsPointManage_item
projectsPointManage_item(context,item){context.commit('projectsPointManage_item',item)},
/** 项目信息--客户@fjs */
//异步触发改变 customItems_detail
customItems_detail(context,item){context.commit('customItems_detail',item)},
//异步触发改变 customItems_detailType
customItems_detailType(context,item){context.commit('customItems_detailType',item)},
/** 提醒@fjs */
//异步触发改变 warn_change
warn_change(context,item){context.commit('warn_change',item)},
/** 项目查看(多模号)传递参数到项目查看详情@fjs */
//异步触发改变 projects_info
projects_info(context,item){context.commit('projects_info',item)},
//异步触发改变 projectInfoDetail
projectInfoDetail(context,item){context.commit('projectInfoDetail',item)},
//异步触发改变 projectInfoDetailEdit
projectInfoDetailEdit(context,item){context.commit('projectInfoDetailEdit',item)},
/** 表格管理--销售计划表@fjs */
//异步触发改变 salePlan
salePlan(context,item){context.commit('salePlan',item)},
};
const store = new Vuex.Store({
state,
getters,
mutations,
actions
});
export default store;
<file_sep>// The Vue build version to load with the `import` command
// (runtime-only or standalone) has been set in webpack.base.conf with an alias.
import Vue from 'vue'
import App from './App'
import ElementUI from 'element-ui'
import '../theme/index.css'
import router from './router/index'
import store from './vuex/index'
Vue.config.devtools = true
// import axios from 'axios'
// import Qs from 'qs'
//QS是axios库中带的,不需要我们再npm安装一个
// Vue.use(axios)
// Vue.use(Qs) // 注意 这样的用法是有问题的,axios不支持Vue.use()的声明方式
Vue.use(ElementUI)
Vue.use(store)
Vue.use(router)
//
// Vue.prototype.$axios = axios;
// Vue.prototype.$qs = Qs; //全局定义axios,引入所有组件中
Vue.prototype.$goto = function(path) {
this.$router.push(path);
};
Vue.prototype.$goback = function() {
vm.$router.go(-1);
};
Vue.prototype.$setVuex = function (path, arg, data) {
vm.$store.dispatch(arg, data);
if(path) {
vm.$router.push(path);
}
console.log(vm.$store.getters[arg]);
};
/**
* 序号序列化
* @param {number} index
* @param {number} cur
* @param {number} size
* @returns {*}
*/
Vue.prototype.$indexS = function(index, cur, size) { //序号--序列化方法
let add = (cur - 1) * size + 1;
return index + add;
};
//项目阶段、状态接口数据结构为对象时转换为数组结构
Vue.prototype.$objToOthers = function(obj) {
return Object.keys(obj).map(function (key) {return {'id' : obj[key], 'label': key}});
};
//一键复制打开弹框提示
Vue.prototype.$clip = function (addr) {
if (addr) {
var s = window.getSelection();
if(s.rangeCount > 0) s.removeAllRanges();
var range = document.createRange();
let tNode = document.createElement("p");
tNode.innerHTML = addr;
document.body.appendChild(tNode);
range.selectNode(tNode);
s.addRange(range);
try {
var msg = document.execCommand('copy') ? '成功' : '失败';
console.log('复制内容 ' + msg);
this.$message({showClose: true, type: 'success', message: 'PDCA文件路径已复制成功,请在新标签页的地址栏中粘贴链接并打开!'})
} catch (err) {
console.log('请在Google浏览器打开,以便获得更好的体验,谢谢!');
}
document.body.removeChild(tNode);
} else {
this.$message({showClose: true, type: 'warning', message: 'PDCA文件路径暂无!'})
}
};
//初始化图表
Vue.prototype.$chartInit = function (chart,source,times,type) {
let data = source;
/**
* 判断年份是否为润年
*
* @param {Number} year
*/
function isLeapYear(year) {
return (year % 400 === 0) || (year % 4 === 0 && year % 100 !== 0);
}
/**
* 获取某一年份的某一月份的天数
*
* @param {Number} year
* @param {Number} month
*/
function getMonthDays(year, month) {
return [31, null, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month] || (isLeapYear(year) ? 29 : 28);
}
/**
* 获取某年的某天是第几周
* @param {Number} y
* @param {Number} m
* @param {Number} d
* @returns {Number}
*/
function getWeekNumber(y, m, d) {
var now = new Date(y, m - 1, d),
year = now.getFullYear(),
month = now.getMonth(),
days = now.getDate();
//那一天是那一年中的第多少天
for (var i = 0; i < month; i++) {
days += getMonthDays(year, i);
}
//那一年第一天是星期几
var yearFirstDay = new Date(year, 0, 1).getDay() || 7;
var week = null;
if (yearFirstDay === 1) {
week = Math.ceil(days / 7);
} else {
days -= (7 - yearFirstDay + 1);
week = Math.ceil(days / 7);
}
week = (week<10?"0":"")+ week;
return week;
}
let s = new Date(times.startTime);
let e = new Date(times.endTime);
// let start = this.$format(s.getTime());
// let end = this.$format(e.getTime());
let startYear = s.getFullYear();
let endYear = e.getFullYear();
// let start = this.$format(new Date('2018-09-01').getTime());
// let end = this.$format(new Date('2018-12-31').getTime());
const tickInterval = 24*60*60*1000;
if(times.startTime && times.endTime) {
let min = s.getTime();
let max = e.getTime();
let ticks = [];
// for(let i = min; i <= max; i += tickInterval){ticks.push(i);}
let cha = (max - min) / tickInterval;
if(cha > 630) {
min = new Date(startYear + '-01-01').getTime();
max = new Date(endYear + '-12-31').getTime();
for (let i = startYear; i <= endYear; i++) {
ticks.push(`${i}年1月`, `${i}年2月`,`${i}年3月`,`${i}年4月`,`${i}年5月`,`${i}年6月`,`${i}年7月`,`${i}年8月`,`${i}年9月`,`${i}年10月`,`${i}年11月`,`${i}年12月`);
}
} else if(cha > 90) {
for(let i = min; i <= max; i += 7*tickInterval) {
ticks.push(i);
}
} else {
for(let i = min; i <= max; i += tickInterval) {
ticks.push(i);
}
}
/*设定高度和宽度*/
let size = {
width : 40*ticks.length + 150,
height : 40*data.length + 160,
};
if(type === 0) {
size = {
width : 120*ticks.length,
height : 100*data.length,
};
if(100*data.length < 200) {
size.height = 200;
}
}
chart.changeSize(size.width,size.height);
// console.log(size);
//纵轴的range
let t = (1 - (size.width - 120) / size.width) /2 ;
let rangeY = [0,1-2*t];
if(cha > 90) { //注意区分周日
t = (1 - (size.width - 120) / size.width) /14;
rangeY = [0,1-2*t];
} else if(cha > 630) {
t = (1 - (size.width - 120) / size.width) /30;
rangeY = [0,1-2*t];
}
const defs = {
range: {
type:'time',
min: min,
max: max,
ticks:ticks,
mask: 'MM/DD',
range: rangeY,
nice:false,
},
nodeName: {
type: 'cat',
alias: '加工零件', // 定义别名
nice: false, // 不对最大最小值优化
}
};
/*-- 配置坐标轴 --*/
chart.axis('nodeName', {
line: {
lineWidth: 1,
stroke: '#aaa',
},
grid: null,
label: {
offset: 180, // 设置坐标轴文本 label 距离坐标轴线的距离
textStyle: {
textAlign: 'center', // 文本对齐方向,可取值为: start middle end
fill: 'red', // 文本的颜色
fontSize: '8', // 文本大小
fontWeight: 'bold', // 文本粗细
textBaseline: 'middle' // 文本基准线,可取 top middle bottom,默认为middle
}, // 文本样式,支持回调|| {function},
htmlTemplate: value => {
return `<p class="out" style="width:150px;height:40px;padding-left:5px;box-sizing:border-box;font-size:8px;line-height:40px;margin:0;color:#666;text-align:right;" title="${value}">${value}</p>`;
},
},
});
chart.axis('range', {
label: {
offset: 10, // 设置坐标轴文本 label 距离坐标轴线的距离
textStyle: {
textAlign: 'center', // 文本对齐方向,可取值为: start middle end
fill: '#404040', // 文本的颜色
fontSize: '8', // 文本大小
fontWeight: 'bold', // 文本粗细
textBaseline: 'top' // 文本基准线,可取 top middle bottom,默认为middle
}, // 文本样式,支持回调|| {function},
htmlTemplate: value => {
if(cha > 200) {
// let self = this;
var lot;
(function loaddate(){
var now = new Date(value);
var str = now.getFullYear()+"-"+((now.getMonth()+1)<10?"0":"")+(now.getMonth()+1)+"-"+(now.getDate()<10?"0":"")+now.getDate();
var year = now.getFullYear();
var month =((now.getMonth()+1)<10?"0":"")+(now.getMonth()+1);
var date=(now.getDate()<10?"0":"")+now.getDate();
// this.date2=str;
var ttwo=year.toString().substring(2,year.length);
lot=ttwo+month+date;
value = getWeekNumber(year,month,date);
})();
return '<p class="timeLine" style="margin:0;font-size:8px;">'
+'<span></span>W'+value+
// + format(new Date(value).getTime()).m +'月' + '' +
'</p>';
}else {
return '<p class="timeLine" style="margin:0;font-size:8px;">'
+'<span></span>'+value+
// + format(new Date(value).getTime()).m +'月' + '' +
'</p>';
}
// return '<p class="timeLine">'+ value+ '</p>';
},
},
// tickLine: {
// lineWidth: 1, // 刻度线宽
// stroke: '#aaa', // 刻度线的颜色
// strokeOpacity: 0.5, // 刻度线颜色的透明度
// length: 5, // 刻度线的长度,可以为负值(表示反方向渲染)
// alignWithLabel:true//设为负值则显示为category数据类型特有的样式
// },
grid: null,
});
if(type === 0) {
chart.axis(false);
}
var values = ['计划时间', '实际时间'];
let arr = [];
for(let ii=0; ii<data.length; ii++) {
let n = {}, m = {};
m.nodeName = n.nodeName = data[ii].nodeName;
n.status = values[1];
m.status = values[0];
if(data[ii].planStartTime && data[ii].planEndTime) {
m.range = [new Date(data[ii].planStartTime).getTime(), new Date(data[ii].planEndTime).getTime() + tickInterval];
}else {
m.range = [];
}
if(data[ii].actualStartTime && data[ii].actualEndTime) {
n.range = [new Date(data[ii].actualStartTime).getTime(), new Date(data[ii].actualEndTime).getTime() + tickInterval];
}else {
n.range = [];
}
arr.push(m);
arr.push(n);
}
console.log(arr);
/** 导入数据 */
chart.source(arr,defs);
chart.coord().transpose().scale(1, -1);
chart.legend(values[status], {position: "bottom-left", offsetX: 160, marker: 'circle'});
chart.tooltip({
triggerOn: 'mousemove', // tooltip 的触发方式,默认为 mousemove
showTitle: true, // 是否展示 title,默认为 true
offset: 0, // tooltip 距离鼠标的偏移量
containerTpl: '<div class="g2-tooltip">'
+ '<div class="g2-tooltip-title" style="margin:0 0 10px 0;"></div>'
+ '<ul class="g2-tooltip-list" style="min-width:200px;"></ul></div>', // tooltip 容器模板
itemTpl: '<li data-index={index} style="border-top:1px solid #eee;"><span style="background-color:{color};width:8px;height:8px;border-radius:50%;display:inline-block;margin-right:8px;"></span>{status}{range}</li>', // tooltip 每项记录的默认模板
inPlot: true, // 将 tooltip 展示在指定区域内
follow: true, // tooltip 是否跟随鼠标移动
shared: true, // 默认为 true, false 表示只展示单条 tooltip
// position: 'left' || 'right' || 'top' || 'bottom' // 固定位置展示 tooltip
});
// chart.on('tooltip:change', function(ev) {
// // const items = ev.items; // tooltip显示的项
// chart.showTooltip({x:ev.x, y:ev.y});
// console.log(ev);
// });
chart.interval().position('nodeName*range').color('status', ['#66b1ff', 'rgba(229,28,35,0.5)' ]).size(12).tooltip('status*range', (status,range) => {
if(range.length) {
if(status === '计划时间') {
return {
status: '<span>计划时间</span>',
range: '<p class="margin0">开始时间:' + this.$format(range[0]) + '</p><p class="margin0">结束时间:' + this.$format(range[1] - tickInterval) + '</p>',
};
}else {
return {
status: '<span>实际时间</span>',
range: '<p class="margin0">开始时间:' + this.$format(range[0]) + '</p><p class="margin0">结束时间:' + this.$format(range[1] - tickInterval) + '</p>',
};
}
}else {
return {range: '暂无数据!'}
}
});
}
};
Vue.config.productionTip = false;
/* eslint-disable no-new */
const vm = new Vue({
el: '#app',
router,
store,
render: h => h(App),
});
//全局导航守卫
router.beforeEach((to, from, next) => { //-----------------------待优化
// localStorage.setItem('active',to.name);
if(to.path !== '/login') {
vm.$store.dispatch('app_visible',true);
if(localStorage.getItem('token') !== ''&& localStorage.getItem('token') !== null) {
// console.dir(vm.$router);
if(Object.keys(from.query).length === 0){ //判断路由来源是否有query,处理不是目的跳转的情况
if(from.path === '/login') {
if(to.path !== 'login') {
next(); //解决登录token失效后跳转登录页带有query时无限循环
}else{
next({path: '/projects'});
}
/*
else if(to.path === '/itemAdd' || to.path === '/sampleCount' || to.path === '/testMould' || to.path === '/notice'){
if(from.path !== '/itemCheck'&& from.path !== '/testMouldDetail' && from.path !== '/itemCount' && from.path !== '/projectsPointManage') {
if(to.name === 'itemAdd') {
vm.$store.dispatch('itemCheck_item', {});
}
if((from.name !== 'itemCheck' || from.name !== 'testMouldDetail') && to.name === 'testMould') {
vm.$store.dispatch('testMould_ifAll',false);
}
if(from.name !== 'itemCheck' && to.name === 'sampleCount') {
vm.$store.dispatch('sampleCount_ifAll',false);
}
if(from.name !== 'itemCheck' && to.name === 'notice') {
vm.$store.dispatch('warn_ifAll',false);
}
}else{
if(from.name !== 'itemCheck' && to.name === 'testMould') {
vm.$store.dispatch('testMould_ifAll',false);
}
if((from.name === 'itemCheck' || from.name === 'testMouldDetail') && to.name === 'testMould') {
if(vm.$store.getters.itemCheck_item.mouldNo) {
vm.$store.dispatch('testMould_ifAll',true);
}else {
vm.$store.dispatch('testMould_ifAll',false);
}
}
if(from.name !== 'itemCheck' && to.name === 'sampleCount') {
vm.$store.dispatch('sampleCount_ifAll',false);
}
if(from.name === 'itemCheck' && to.name === 'sampleCount') {
if(vm.$store.getters.itemCheck_item.mouldNo) {
vm.$store.dispatch('sampleCount_ifAll',true);
}else {
vm.$store.dispatch('sampleCount_ifAll',false);
}
}
if(from.name !== 'itemCheck' && to.name === 'notice') {
vm.$store.dispatch('warn_ifAll',false);
}
if(from.name === 'itemCheck' && to.name === 'notice') {
if(vm.$store.getters.itemCheck_item.mouldNo) {
vm.$store.dispatch('warn_ifAll',true);
}else {
vm.$store.dispatch('warn_ifAll',false);
}
}
console.log(vm.$store.getters.itemCheck_item);
}
next();
}*/
}else{
next()
}
}else{
let redirect = from.query.redirect;//如果来源路由有query
if(to.path === redirect){ //这行是解决next无限循环的问题
next()
}else{
next({path:redirect})//跳转到目的路由
}
}
}else{
if(!to.meta.unAuth) { //未登录时是否需要鉴权
localStorage.removeItem('token');
localStorage.removeItem('username');
localStorage.removeItem('isLogin');
localStorage.removeItem('userLoginVO');
vm.$store.dispatch('getNewIsLogin', localStorage.getItem('isLogin'));
// next({ path: '/login',query: {redirect: to.fullPath} })//将目的路由地址存入login的query中
next()
}else {
console.log('login登录');
this.$store.dispatch('app_visible',false);
next()
}
}
}else {
vm.$store.dispatch('app_visible',false);
next();
}
});
/*-- 日期和时间戳转换 --*/
function add0(m){return m<10?'0'+m:m }
Vue.prototype.$format = function format(shijianchuo)
{//shijianchuo是整数,否则要parseInt转换
var time = new Date(shijianchuo);
var y = time.getFullYear();
var m = time.getMonth()+1;
var d = time.getDate();
var h = time.getHours();
var mm = time.getMinutes();
var s = time.getSeconds();
return y+'-'+add0(m)+'-'+add0(d); //+' '+add0(h)+':'+add0(mm)+':'+add0(s)
};
// console.log('%c打印VM:','color:red;font-size:30px;',vm);
export default vm;
<file_sep>/** api-doc*/
import $ajax from './request'
import vm from '../main.js'
// let mark;
// if(vm) {
// mark = vm.$store.getters.itemCheck_item.mark;
// }
//--此处根据method方法改变:method === 'post'? data : params;
console.log($ajax)
/**
* login-controller -------------- 用户管理
* @fjs
* */
//根据查询条件获取用户列表
export const login = data => {return $ajax({url: '/api/login', method: 'post', data: data,})};
//删除用户
export const deleteUser = data => {return $ajax({url: '/api/user/user/deleteUser', method: 'get', params: data,})};
//根据查询条件获取用户列表
export const loginOut = data => {return $ajax({url: '/api/login/loginOut', method: 'post', data: data,})};
/**
* user-controller -------------- 用户管理
* @fjs
* */
//根据查询条件获取用户列表
export const getUserList = data => {return $ajax({url: '/api/user/getUserList', method: 'get', params: data,})};
//获取搜索框中的用户角色列表
export const getRoleList = data => {return $ajax({url: '/api/user/getRoleList', method: 'get', params: data,})};
//更新用户账户状态:已启用,已停用
export const updateStatus = data => {return $ajax({url: '/api/user/updateStatus', method: 'post', data: data,})};
//更新用户信息
export const updateUser = data => {return $ajax({url: '/api/user/updateUser', method: 'post', data: data, meta: 1,})};
//添加用户
export const addUser = data => {return $ajax({url: '/api/user/addUser', method: 'post', data: data, meta: 1,})};
//修改密码
export const changePwd = data => {return $ajax({url: '/api/user/changePwd', method: 'post', data: data,})};
//修改邮箱和联系方式
export const changeUserInfo = data => {return $ajax({url: '/api/user/changeUserInfo', method: 'post', data: data, meta: 1,})};
/**
* project-see-controller -------------- 新增/修改项目
* @fjs
* */
//根据模号获取项目基本信息
export const getMouldBasicInfo = data => {return $ajax({url: '/api/project/getMouldBasicInfo', method: 'get', params: data,})};
//获取项目节点管理列表
export const getProjectNodeManageList = data => {return $ajax({url: '/api/project/getProjectNodeManageList', method: 'get', params: data,})};
//获取项目状态下拉列表项
export const getProjectStatusList = data => {return $ajax({url: '/api/project/getProjectStatusList', method: 'get', params: data,})};
//获取项目阶段下拉列表项
export const getProjectStrageList = data => {return $ajax({url: '/api/project/getProjectStrageList', method: 'get', params: data,})};
//新增、修改项目节点(新增/修改项目------项目节点管理)
export const addProjectNodeManage = data => {return $ajax({url: '/api/project/addProjectNodeManage', method: 'post', data: data, meta: 1,})};
//更新项目(项目查看详情编辑按钮进入)
export const updateProjectInfo = data => {return $ajax({url: '/api/project/addOrUpdateProjectInfo?mark=2', method: 'post', data: data, meta: 1,})};
//更新项目(项目节点管理详情按钮进入)
export const updateProjectInfoDetail = data => {return $ajax({url: '/api/project/addOrUpdateProjectInfo?mark=3', method: 'post', data: data, meta: 1,})};
//手动添加创建项目
export const addProjectInfo = data => {return $ajax({url: '/api/project/addOrUpdateProjectInfo', method: 'post', data: data, meta: 1,})};
//创建新的项目或更新项目--在调用更新设计加工进度接口
export const queryProjectInformationByMouldCodeNew = data => {return $ajax({url: '/api/getPmSystemItemPart/queryProjectInformationByMouldCodeNew', method: 'get', params: data,})};
//项目描述添加或更新(新增/修改项目以及项目查看2)
export const addOrUpdateProjectDescribe = data => {return $ajax({url: '/api/project/addOrUpdateProjectDescribe', method: 'post', data: data, meta: 1,})};
//项目描述列表获取(新增/修改项目以及项目查看2)
export const getProjectDescribeList = data => {return $ajax({url: '/api/project/getProjectDescribeList', method: 'get', params: data,})};
//项目提醒修改
export const updateWarnByMouldAndUser = data => {return $ajax({url: '/api/project/updateWarnByMouldAndUser', method: 'post', data: data,})};
/**
* project-see-controller -------------- 项目分配
* @fjs
* */
//获取项目分配列表(可通过搜索条件查询)
export const getProjectAllotList = data => {return $ajax({url: '/api/project/getProjectAllotList', method: 'get', params: data,})};
//编辑并更新项目分配
export const updateProjectAllot = data => {return $ajax({url: '/api/project/updateProjectAllot', method: 'post', data: data, meta: 1,})};
//编辑弹框中的角色下拉列表信息
export const getUserListByRoleType = data => {return $ajax({url: '/api/project/getUserListByRoleType', method: 'get', params: data,})};
/**
* PM-system-controller -------------- 项目统计模块
* @fjs
* */
//设计节点相关信息
export const pmProjectPlanInformationQueryAndInsert = data => {return $ajax({url: '/api/webService/pmProjectPlanInformationQueryAndInsert', method: 'get', params: data,})};
//根据调试次数和模号获取零件属性进度信息
export const queryItemPartAttributeByTryCountAndMouldCode = data => {return $ajax({url: '/api/webService/queryItemPartAttributeByTryCountAndMouldCode', method: 'get', params: data,})};
//根据调试次数和模号查询设计节点进度信息
export const queryProjectPlanInformationeByTryCountAndMouldCode = data => {return $ajax({url: '/api/webService/queryProjectPlanInformationeByTryCountAndMouldCode', method: 'get', params: data,})};
//根据调试次数和模号查询设计节点进度信息
export const getProjectStatistics = data => {return $ajax({url: '/api/projectInfo/getProjectStatistics', method: 'get', params: data,})};
//根据调试次数和模号查询设计节点进度信息
export const queryItemPartCountByMouldCode = data => {return $ajax({url: '/api/getPmSystemItemPart/queryItemPartCountByMouldCode', method: 'get', params: data,})};
/**
* feed-back-controller -------------- 反馈模块
* @fjs
* */
//新增反馈(添加反馈页面)
export const addFeedBack = data => {return $ajax({url: '/api/feedBack/addFeedBack', method: 'post', data: data, meta: 1,})};
//添加反馈(反馈详情(客户页面传递mark=1))
export const againAddFeedBack = data => {return $ajax({url: '/api/feedBack/againAddFeedBack?mark=1', method: 'post', data: data, meta: 1,})};
//添加反馈(反馈详情(工程师页面不传递mark))
export const againAddFeedBackInner = data => {return $ajax({url: '/api/feedBack/againAddFeedBack', method: 'post', data: data, meta: 1,})};
//获取反馈列表(反馈信息(工程师页面))
export const getFeedBack2List = data => {return $ajax({url: '/api/feedBack/getFeedBack2List', method: 'get', params: data,})};
//获取反馈详情列表(反馈详情)
export const getFeedBackDetailList = data => {return $ajax({url: '/api/feedBack/getFeedBackDetailList', method: 'get', params: data,})};
//获取反馈列表(反馈信息(客户页面))
export const getFeedBackList = data => {return $ajax({url: '/api/feedBack/getFeedBackList', method: 'get', params: data,})};
/**
* erp-purchase-controller -------------- erp系统采购数据
* @fjs
* */
//采购长周期
export const erpLongTimeMessage = data => {return $ajax({url: '/api/erpInformationProcess/erpLongTimeMessage', method: 'get', params: data,})};
//采购计划时间未到厂
export const erpNoArrivedTimeMessage = data => {return $ajax({url: '/api/erpInformationProcess/erpNoArrivedTimeMessage', method: 'get', params: data,})};
//采购委外单
export const erpOutsourcingListTimeMessage = data => {return $ajax({url: '/api/erpInformationProcess/erpOutsourcingListTimeMessage', method: 'get', params: data,})};
//采购已入库
export const getWarehousedPurchaseInformation = data => {return $ajax({url: '/api/erpInformationProcess/getWarehousedPurchaseInformation', method: 'get', params: data,})};
//主料
export const purchaseIngredientsInformation = data => {return $ajax({url: '/api/erpInformationProcess/purchaseIngredientsInformation', method: 'get', params: data,})};
//采购不合格报告
export const selectUnqualifiedFormByMouldCode = data => {return $ajax({url: '/api/erpInformationProcess/selectUnqualifiedFormByMouldCode', method: 'get', params: data,})};
//采购统计--发布统计信息
export const getCountPurchase = data => {return $ajax({url: '/api/erpInformationProcess/getCountPurchase', method: 'get', params: data,})};
/**
* oa-system-controller -------------- 不合格报告查看
* @fjs
* */
//根据模号查询不合格报告
// export const oaUnqualifiedForm = data => {return $ajax({url: '/api/oaWebService/oaUnqualifiedForm', method: 'post', params: data,})};
//根据传入的数据提供不合格报告(1代表制程)--加工进度
export const oaUnqualifiedFormByResource = data => {return $ajax({url: '/api/oaWebService/oaUnqualifiedFormByResource', method: 'get', params: data,})};
/**
* project-see-controller -------------- 项目查看
* @fjs
* */
//项目查看
export const getProjectViewList = data => {return $ajax({url: '/api/project/getProjectViewList', method: 'get', params: data,})};
//项目查看--状态以及阶段更新
export const updateProjectStatusAndStage = data => {return $ajax({url: '/api/project/updateProjectStatusAndStage', method: 'post', data: data,})};
//项目查看--设计发布统计信息
export const getDesignStatisticsInfoByMouldNo = data => {return $ajax({url: '/api/project/getDesignStatisticsInfoByMouldNo', method: 'get', params: data,})};
// //项目查看--加工进度中的发布统计信息
// export const queryItemPartCountByMouldCode = data => {return $ajax({url: '/api/webService/queryItemPartCountByMouldCode', method: 'get', params: data,})};
//项目信息--加工进度表格数据
export const queryItemPartMouldCode = data => {return $ajax({url: '/api/getPmSystemItemPart/queryItemPartMouldCode', method: 'get', params: data,})};
//试模
export const getTestMouldInfoList = data => {return $ajax({url: '/api/testMould/getTestMouldInfoList', method: 'get', params: data,})};
//送样/走模统计--添加
export const addSampleMoveMould = data => {return $ajax({url: '/api/sample/addSampleMoveMould', method: 'post', data: data, meta: 1,})};
//送样/走模统计--获取--(走模查看按钮传递mark=2)
export const getSampleMoveMouldList = data => {return $ajax({url: '/api/sample/getSampleMoveMouldList', method: 'get', params: data,})};
//送样/走模统计--更新时间,位置及相关信息
export const addPositionInfo = data => {return $ajax({url: '/api/sample/addPositionInfo', method: 'post', data: data, meta: 1,})};
/** 旧的接口 */
//加工进度表数据接口
export const queryItemPartAttributeByMouldCodeNew = data => {return $ajax({url: '/api/webService/queryItemPartAttributeByMouldCodeNew', method: 'get', params: data,})};
//设计进度表数据接口
export const queryProjectPlanInformatinByMouldCodeNew = data => {return $ajax({url: '/api/webService/queryProjectPlanInformatinByMouldCodeNew', method: 'get', params: data,})};
/** 新的接口 */
//加工进度表数据接口
export const queryItemPartAttributeByMouldCodeBasis = data => {return $ajax({url: '/api/getPmSystemItemPart/queryItemPartAttributeByMouldCodeBasis', method: 'get', params: data,})};
//设计进度表数据接口
export const queryProjectPlanInformatinByMouldCode = data => {return $ajax({url: '/api/getPmSystemItemPart/queryProjectPlanInformatinByMouldCode', method: 'get', params: data,})};
/**
* project-see-controller -------------- 项目信息相关(客户)
* @fjs
* */
//获取项目图纸列表
export const getDrawingList = data => {return $ajax({url: '/api/projectInfo/getDrawingList', method: 'get', params: data,})};
//获取项目详细列表
export const getProjectInfoList = data => {return $ajax({url: '/api/projectInfo/getProjectInfoList', method: 'get', params: data,})};
//获取项目进度详情
export const getProjectProgressInfo = data => {return $ajax({url: '/api/projectInfo/getProjectProgressInfo', method: 'get', params: data,})};
//获取送样和走模信息
export const getSampleMoveMouldLists = data => {return $ajax({url: '/api/projectInfo/getSampleMoveMouldList', method: 'get', params: data,})};
//获取试模详情
export const getTestMouldDetail = data => {return $ajax({url: '/api/projectInfo/getTestMouldDetail', method: 'get', params: data,})};
//加工零件表格数据详情--弹框数据
export const queryItemByPartId = data => {return $ajax({url: '/api/getPmSystemItemPart/queryItemByPartId', method: 'get', params: data,})};
/**
* warn-controller -------------- 提醒相关
* @fjs
* */
//获取提醒列表--配置管理中的提醒设置项
export const getWarnList = data => {return $ajax({url: '/api/warn/getWarnList', method: 'get', params: data,})};
//创建或修改设置
export const createAndUpdateWarn = data => {return $ajax({url: '/api/warn/createAndUpdateWarn', method: 'post', data: data, meta: 1,})};
//获取提醒默认节点类型以及新增节点提醒
export const getWarnNodeList = data => {return $ajax({url: '/api/warn/getWarnNodeList', method: 'get', params: data,})};
//获取提醒默认节点类型以及新增节点提醒
export const getWarnNodeListById = data => {return $ajax({url: '/api/warn/getWarnNodeListById', method: 'get', params: data,})};
//获取提醒默认节点类型以及新增节点提醒
export const getWarnProjectAbnormalReminder = data => {return $ajax({url: '/api/warn/getWarnProjectAbnormalReminder', method: 'get', params: data,})};
//获取项目状态变更提醒
export const getProjectStatisticsChange = data => {return $ajax({url: '/api/warn/getProjectStatisticsChange', method: 'get', params: data,})};
/**
* 项目查看--修改后是一个项目中有多个模号的
* @fjs
* @param:page-页码,size-个数,
* */
export const projectInfoSeeList = data => {return $ajax({url: '/api/projectInfo/projectInfoSeeList', method: 'get', params: data,})};
export const getProjectSchedule = data => {return $ajax({url: '/api/project/getProjectSchedule', method: 'get', params: data,})};
// /** --------------------------------------------------------------提交表单对于导入导出文件没有用,需要额外处理二进制流文件
// * 总进度导入
// * @fjs
// * @param: 文件格式限制.xlsx
// * */
// export const projectRateImport = data => {return $ajax({url: '/api/project/import/projectRateImport', method: 'post', data: data,})};
//
/**
* 总进度导出
* @fjs
* @param: 文件格式限制.xlsx
* */
export const projectRateExport = (data,arg) => {return $ajax({url: '/api/project/export/projectRateExport', method: 'get', params: data, responseType: 'blob', args: arg,})};
/**
* 工程任务书导出
* @fjs
* @param: 文件格式限制.xlsx
* */
export const engineeringTaskBookExport = (data,arg) => {return $ajax({url: '/api/project/export/engineeringTaskBookExport', method: 'get', params: data, responseType: 'blob', args: arg, })};
/**
* 合同评审表管理和工程任务书管理
* @fjs
* @param:
* page
* size
* projectName
* contractNo
* createTime1:中国标准时间类型
* createTime2:中国标准时间类型
* */
export const queryContractInformation = data => {return $ajax({url: '/api/project/import/queryContractInformation', method: 'get', params: data, })};
export const queryProjectInformation = data => {return $ajax({url: '/api/project/import/queryProjectInformation', method: 'get', params: data, })};
/**
* 工程任务书-详情中的表格数据
* @fjs
* @param:
* page
* size
* id:工程任务书管理列表项的id
* */
export const queryProjectInformationDetails = data => {return $ajax({url: '/api/project/import/queryProjectInformationDetails', method: 'get', params: data, })};
/**
* 工程任务书-详情中的表单数据修改并保存
* @fjs
* @param:
* page
* size
* id:工程任务书管理列表项的id
* */
export const updateProjectInformationDetails = data => {return $ajax({url: '/api/project/updateProjectInformationDetails', method: 'post', data: data, meta: 1,})};
/**
* 工程任务书-详情中的表格中的编辑跳转页面点击保存
* @fjs
* @param: application/json
* */
export const updateProjectInformation = data => {return $ajax({url: '/api/project/updateProjectInformation', method: 'post', data: data, meta: 1,})};
export const queryProcessDetailsByMouldCode = data => {return $ajax({url: 'api/getPmSystemItemPart/queryProcessDetailsByMouldCode', method: 'get', params: data,})};
/**
* 不合格零件
* @fjs
* @param: application/json
* */
export const queryFuzzyUnqualifiedForm = data => {return $ajax({url: '/api/oaWebService/queryFuzzyUnqualifiedForm', method: 'get', params: data,})};
/**
* 加工进度计划表
* @fjs
* @param {Object} data
* */
//表格数据
export const queryProcessDetailsByMouldCodeAndBigTypeCode = data => {return $ajax({url: 'api/getPmSystemItemPart/queryProcessDetailsByMouldCodeAndBigTypeCode', method: 'get', params: data,})};
//删除新增工序
export const deleteProcessDetails = data => {return $ajax({url: '/api/getPmSystemItemPart/deleteProcessDetails', method: 'post', data: data,})};
//修改工序顺序
export const changeOrderProcessDetails = data => {return $ajax({url: '/api/getPmSystemItemPart/changeOrderProcessDetails', method: 'post', data: data,})};
//新增工序
export const insertProcessDetails = data => {return $ajax({url: '/api/getPmSystemItemPart/insertProcessDetails', method: 'post', data: data, meta: 1,})};
//加工节点设置编辑
export const updateProcessDetails = data => {return $ajax({url: '/api/getPmSystemItemPart/updateProcessDetails', method: 'post', data: data, meta: 1,})};
//导出Excel
export const processDetailsExport = (data,arg) => {return $ajax({url: '/api/getPmSystemItemPart/export/processDetailsExport', method: 'get', params: data, responseType: 'blob', args: arg,})};
//加工进度表首页
export const queryProcessTable = data => {return $ajax({url: '/api/getPmSystemItemPart/queryProcessTable', method: 'get', params: data,})};
/**
* 销售计划表
* @fjs
* @param {Object} data
* */
//销售计划表表格数据
export const getSalePlanList = data => {return $ajax({url: 'api/sale/getSalePlanList', method: 'post', data: data,})};
//编辑销售计划表信息更新
export const updateSalePlan = data => {return $ajax({url: '/api/sale/updateSalePlan', method: 'post', data: data,meta: 1,})};
//销售计划表导出
export const salesPlanDetailsExport = data => {return $ajax({url: '/api/sale/export/salesPlanDetailsExport', method: 'get', params: data, responseType: 'blob',exportType: 1,})};
| 3660fcc1e7395bef49ed82ba87f08771ce705b51 | [
"JavaScript"
] | 3 | JavaScript | jiangshanFan/moulds | f01ddac51c746b5b51007779d12793616492971c | 80d20b6d3ef2a71ba40e8ed8c4cae62b399ff33c |
refs/heads/master | <repo_name>Thibault-Brocheton/eco-mod-citeperdue<file_sep>/InfiniteYieldAttribute.cs
namespace Eco.Gameplay.Items
{
using System;
using System.Collections.Generic;
using System.Linq;
using Eco.Gameplay.DynamicValues;
using Eco.Shared.Localization;
using Eco.Shared.Utils;
using Eco.Gameplay.Systems.TextLinks;
public class InfiniteYieldAttribute : ItemAttribute
{
public SkillModifiedValue Yield { get; private set; }
public InfiniteYieldAttribute(Type beneficiary, Type skillType)
: this(beneficiary, Item.Get(beneficiary).UILink(), skillType)
{ }
public InfiniteYieldAttribute(Type beneficiary, LocString beneficiaryText, Type skillType)
{
this.Yield = new SkillModifiedValue(0, new LinearStrategy(), skillType, Localizer.DoStr("harvest yield"), typeof(Yield));
SkillModifiedValueManager.AddSkillBenefit(beneficiary, beneficiaryText, this.Yield);
SkillModifiedValueManager.AddBenefitForObject(beneficiary, beneficiaryText, this.Yield);
}
public InfiniteYieldAttribute(Type beneficiary, Type skillType, float[] multipliers)
: this(beneficiary, Item.Get(beneficiary).UILink(), skillType, multipliers)
{ }
public InfiniteYieldAttribute(Type beneficiary, LocString beneficiaryText, Type skillType, float[] multipliers)
{
this.Yield = new SkillModifiedValue(1, new InfiniteMultiplicativeStrategy(multipliers), skillType, Localizer.DoStr("harvest yield"), typeof(Yield));
SkillModifiedValueManager.AddSkillBenefit(beneficiary, beneficiaryText, this.Yield);
SkillModifiedValueManager.AddBenefitForObject(beneficiary, beneficiaryText, this.Yield);
}
}
}<file_sep>/AutoGen/Tech/Tailor.cs
namespace Eco.Mods.TechTree
{
// [DoNotLocalize]
using System;
using System.Collections.Generic;
using System.Linq;
using Eco.Core.Utils;
using Eco.Core.Utils.AtomicAction;
using Eco.Gameplay.Components;
using Eco.Gameplay.DynamicValues;
using Eco.Gameplay.Items;
using Eco.Gameplay.Players;
using Eco.Gameplay.Property;
using Eco.Gameplay.Skills;
using Eco.Gameplay.Systems.TextLinks;
using Eco.Shared.Localization;
using Eco.Shared.Serialization;
using Eco.Shared.Services;
using Eco.Shared.Utils;
using Gameplay.Systems.Tooltip;
}
<file_sep>/CitePerdueMod.cs
/**
* File: CitePerdueMod.cs
* Eco Version: 8.2.4
* Mod Version: 1.0.0
*
* Author: <NAME>: Zangdar
*
*
* CitePerdue Mod Collection
*
*/
namespace Eco.Mods.CavRn.CitePerdue
{
using System;
using System.Linq;
using Eco.Core.Plugins.Interfaces;
using Eco.Core.Utils;
using Eco.Gameplay.Components;
using Eco.Gameplay.DynamicValues;
using Eco.Gameplay.Items;
using Eco.Gameplay.Objects;
using Eco.Gameplay.Players;
using Eco.Gameplay.Skills;
using Eco.Gameplay.Systems.Chat;
using Eco.Gameplay.Systems.TextLinks;
using Eco.Gameplay.Systems.Tooltip;
using Eco.Mods.TechTree;
using Eco.Shared.Localization;
using Eco.Shared.Serialization;
using System.Threading.Tasks;
using Eco.Core.Plugins;
using Eco.Shared.Utils;
using Eco.Shared.Services;
}
<file_sep>/README.md
# eco-mod-citeperdue
Some mods developed for La Cite Perdue french eco server.
use files as your own risks
<file_sep>/HappyRestartMod.cs
namespace Eco.Mods.CavRn.HappyRestart
{
using System;
using System.Linq;
using Eco.Core.Plugins.Interfaces;
using Eco.Core.Utils;
using Eco.Gameplay.Components;
using Eco.Gameplay.DynamicValues;
using Eco.Gameplay.Items;
using Eco.Gameplay.Objects;
using Eco.Gameplay.Players;
using Eco.Gameplay.Skills;
using Eco.Gameplay.Systems.Chat;
using Eco.Gameplay.Systems.TextLinks;
using Eco.Gameplay.Systems.Tooltip;
using Eco.Mods.TechTree;
using Eco.Shared.Localization;
using Eco.Shared.Serialization;
using System.Threading.Tasks;
using Eco.Core.Plugins;
using Eco.Shared.Utils;
using Eco.Shared.Services;
public class HappyRestartMod : IModKitPlugin, IInitializablePlugin
{
public static string ModVersion = "1.0";
public string GetStatus()
{
return String.Empty;
}
public void Initialize(TimedTask timer)
{
DateTime dateNow = DateTime.Now;
Log.Write(new LocString(string.Format("CitePerdue - Current date is: " + dateNow.ToString())));
Console.WriteLine();
WarnRestart("05:56:00", "3");
WarnRestart("05:58:00", "1");
SaveAndKick("05:59:00");
WarnRestart("11:56:00", "3");
WarnRestart("11:58:00", "1");
SaveAndKick("11:59:00");
WarnRestart("17:56:00", "3");
WarnRestart("17:58:00", "1");
SaveAndKick("17:59:00");
WarnRestart("23:56:00", "3");
WarnRestart("23:58:00", "1");
SaveAndKick("23:59:00");
}
public void WarnRestart(string DailyTime, string minutes)
{
//Time when method needs to be called
var timeParts = DailyTime.Split(new char[1] { ':' });
var dateNow = DateTime.Now;
var date = new DateTime(dateNow.Year, dateNow.Month, dateNow.Day,
int.Parse(timeParts[0]), int.Parse(timeParts[1]), int.Parse(timeParts[2]));
TimeSpan ts;
if (date > dateNow)
ts = date - dateNow;
else
{
date = date.AddDays(1);
ts = date - dateNow;
}
//waits certan time and run the code
Task.Delay(ts).ContinueWith((x) => WarnRestartAction(minutes));
}
public void WarnRestartAction(string minutes)
{
Log.Write(new LocString(string.Format("CitePerdue - Server will restart in " + minutes + " Minutes !!")));
ChatManager.ServerMessageToAll(new LocString(Text.Color(Color.Red, "Server will restart in " + minutes + " Minutes !!")), false, DefaultChatTags.Notifications);
}
public void SaveAndKick(string DailyTime)
{
//Time when method needs to be called
var timeParts = DailyTime.Split(new char[1] { ':' });
var dateNow = DateTime.Now;
var date = new DateTime(dateNow.Year, dateNow.Month, dateNow.Day, int.Parse(timeParts[0]), int.Parse(timeParts[1]), int.Parse(timeParts[2]));
TimeSpan ts;
if (date > dateNow)
ts = date - dateNow;
else
{
date = date.AddDays(1);
ts = date - dateNow;
}
//waits certan time and run the code
Task.Delay(ts).ContinueWith((x) => SaveAndKickAction());
}
public async Task SaveAndKickAction()
{
Log.Write(new LocString(string.Format("CitePerdue - Kicking players...")));
Console.WriteLine();
UserManager.Users.ToList().ForEach(user => {
var player = user.Player;
if (player != null)
{
player.Client.Disconnect("Server will restart in 1 minute. Please don't reconnect in this interval !", "");
Log.Write(new LocString(string.Format("CitePerdue - " + user.Name + " has been kicked.")));
Console.WriteLine();
}
});
Log.Write(new LocString(string.Format("CitePerdue - Saving the game...")));
Console.WriteLine();
await Task.Run(() =>
{
try
{
var time = TickTimeUtil.TimeSubprocess(StorageManager.SaveAndFlush);
Log.Write(new LocString(string.Format("CitePerdue - Game saved !")));
Console.WriteLine();
}
catch (Exception e)
{
Log.Write(new LocString(string.Format("CitePerdue - Error during same game ! " + e.ToString())));
Console.WriteLine();
}
});
}
}
}
<file_sep>/InfiniteStrategies.cs
namespace Eco.Gameplay.DynamicValues
{
using System;
using Eco.Shared;
using Eco.Shared.Utils;
public class InfiniteMultiplicativeStrategy : ModificationStrategy
{
public float[] Factors;
public InfiniteMultiplicativeStrategy(float[] factors) { this.Factors = factors; }
public override float ModifiedValue(float value, int level) {
int currentLevel = level;
if (level >= this.Factors.Length) {
currentLevel = this.Factors.Length - 1;
}
return Mathf.Round(value * this.Factors[currentLevel], 2);
}
public override string StyleBonusValue(float bonusValue) {
return Text.Percent(bonusValue);
}
public override float BonusValue(int level) {
int currentLevel = level;
if (level >= this.Factors.Length) {
currentLevel = this.Factors.Length - 1;
}
return Mathf.Abs(1 - this.Factors[currentLevel]);
}
public override bool Increases() {
return this.Factors[1] > 1;
}
}
public class InfiniteAdditiveStrategy : ModificationStrategy
{
public float[] Additions;
public InfiniteAdditiveStrategy(float[] additions) { this.Additions = additions; }
public override float ModifiedValue(float value, int level) {
int currentLevel = level;
if (level >= this.Additions.Length) {
currentLevel = this.Additions.Length - 1;
}
return value + this.Additions[currentLevel];
}
public override string StyleBonusValue(float bonusValue) {
return Text.Num(bonusValue);
}
public override float BonusValue(int level) {
int currentLevel = level;
if (level >= this.Additions.Length) {
currentLevel = this.Additions.Length - 1;
}
return Mathf.Abs(this.Additions[currentLevel]);
}
public override bool Increases() {
return this.Additions[2] > 0;
}
}
}
<file_sep>/AutoGen/Tech/LockSkills.cs
namespace Eco.Mods.TechTree
{
using System;
using System.Collections.Generic;
using System.Linq;
using Eco.Core.Utils;
using Eco.Core.Utils.AtomicAction;
using Eco.Gameplay.Components;
using Eco.Gameplay.DynamicValues;
using Eco.Gameplay.Items;
using Eco.Gameplay.Players;
using Eco.Gameplay.Property;
using Eco.Gameplay.Skills;
using Eco.Gameplay.Systems.TextLinks;
using Eco.Shared.Localization;
using Eco.Shared.Serialization;
using Eco.Shared.Services;
using Eco.Shared.Utils;
using Gameplay.Systems.Tooltip;
using Eco.Mods.TechTree;
// Librarian
[Serialized]
[RequiresSkill(typeof(SurvivalistSkill), 0)]
public class LibrarianSkill : Skill
{
public override LocString DisplayName { get { return Localizer.DoStr("Librarian"); } }
public override LocString DisplayDescription { get { return Localizer.DoStr("The Librarian can create skill books."); } }
public override void OnLevelUp(User user)
{
}
public static ModificationStrategy MultiplicativeStrategy = new MultiplicativeStrategy(new float[] { 1 });
public override ModificationStrategy MultiStrategy { get { return MultiplicativeStrategy; } }
public static ModificationStrategy AdditiveStrategy = new AdditiveStrategy(new float[] { 0 });
public override ModificationStrategy AddStrategy { get { return AdditiveStrategy; } }
public static int[] SkillPointCost = {};
public override int RequiredPoint { get { return this.Level < SkillPointCost.Length ? SkillPointCost[this.Level] : 0; } }
public override int PrevRequiredPoint { get { return this.Level - 1 >= 0 && this.Level - 1 < SkillPointCost.Length ? SkillPointCost[this.Level - 1] : 0; } }
public override int MaxLevel { get { return 0; } }
public override int Tier { get { return 1; } }
}
// Access via Book and Scroll
[Serialized]
public partial class LibrarianSkillBook : SkillBook<LibrarianSkill, LibrarianSkillScroll>
{
public override LocString DisplayName { get { return Localizer.DoStr("Librarian Skill Book"); } }
}
[Serialized]
public partial class LibrarianSkillScroll : SkillScroll<LibrarianSkill, LibrarianSkillBook>
{
public override LocString DisplayName { get { return Localizer.DoStr("Librarian Skill Scroll"); } }
}
// Logging
[Serialized]
public partial class LoggingSkillBook : SkillBook<LoggingSkill, LoggingSkillScroll>
{
public override LocString DisplayName { get { return Localizer.DoStr("Logging Skill Book"); } }
}
[Serialized]
public partial class LoggingSkillScroll : SkillScroll<LoggingSkill, LoggingSkillBook>
{
public override LocString DisplayName { get { return Localizer.DoStr("Logging Skill Scroll"); } }
}
[RequiresSkill(typeof(LibrarianSkill), 0)]
public partial class LoggingSkillBookRecipe : Recipe
{
public LoggingSkillBookRecipe()
{
this.Products = new CraftingElement[]
{
new CraftingElement<LoggingSkillBook>(),
};
this.Ingredients = new CraftingElement[]
{
new CraftingElement<WoodPulpItem>(10)
};
this.CraftMinutes = new ConstantValue(5);
this.Initialize(Localizer.DoStr("Logging Skill Book"), typeof(LoggingSkillBookRecipe));
CraftingComponent.AddRecipe(typeof(ResearchTableObject), this);
}
}
//Hewing
[Serialized]
public partial class HewingSkillBook : SkillBook<HewingSkill, HewingSkillScroll>
{
public override LocString DisplayName { get { return Localizer.DoStr("Hewing Skill Book"); } }
}
[Serialized]
public partial class HewingSkillScroll : SkillScroll<HewingSkill, HewingSkillBook>
{
public override LocString DisplayName { get { return Localizer.DoStr("Hewing Skill Scroll"); } }
}
[RequiresSkill(typeof(LibrarianSkill), 0)]
public partial class HewingSkillBookRecipe : Recipe
{
public HewingSkillBookRecipe()
{
this.Products = new CraftingElement[]
{
new CraftingElement<HewingSkillBook>(),
};
this.Ingredients = new CraftingElement[]
{
new CraftingElement<LogItem>(10)
};
this.CraftMinutes = new ConstantValue(5);
this.Initialize(Localizer.DoStr("Hewing Skill Book"), typeof(HewingSkillBookRecipe));
CraftingComponent.AddRecipe(typeof(ResearchTableObject), this);
}
}
//Mining
[Serialized]
public partial class MiningSkillBook : SkillBook<MiningSkill, MiningSkillScroll>
{
public override LocString DisplayName { get { return Localizer.DoStr("Mining Skill Book"); } }
}
[Serialized]
public partial class MiningSkillScroll : SkillScroll<MiningSkill, MiningSkillBook>
{
public override LocString DisplayName { get { return Localizer.DoStr("Mining Skill Scroll"); } }
}
[RequiresSkill(typeof(LibrarianSkill), 0)]
public partial class MiningSkillBookRecipe : Recipe
{
public MiningSkillBookRecipe()
{
this.Products = new CraftingElement[]
{
new CraftingElement<MiningSkillBook>(),
};
this.Ingredients = new CraftingElement[]
{
new CraftingElement<StoneItem>(10)
};
this.CraftMinutes = new ConstantValue(5);
this.Initialize(Localizer.DoStr("Mining Skill Book"), typeof(MiningSkillBookRecipe));
CraftingComponent.AddRecipe(typeof(ResearchTableObject), this);
}
}
//Mortaring
[Serialized]
public partial class MortaringSkillBook : SkillBook<MortaringSkill, MortaringSkillScroll>
{
public override LocString DisplayName { get { return Localizer.DoStr("Mortaring Skill Book"); } }
}
[Serialized]
public partial class MortaringSkillScroll : SkillScroll<MortaringSkill, MortaringSkillBook>
{
public override LocString DisplayName { get { return Localizer.DoStr("Mortaring Skill Scroll"); } }
}
[RequiresSkill(typeof(LibrarianSkill), 0)]
public partial class MortaringSkillBookRecipe : Recipe
{
public MortaringSkillBookRecipe()
{
this.Products = new CraftingElement[]
{
new CraftingElement<MortaringSkillBook>(),
};
this.Ingredients = new CraftingElement[]
{
new CraftingElement<LimestoneItem>(10),
new CraftingElement<SandstoneItem>(10),
new CraftingElement<GraniteItem>(10),
new CraftingElement<ShaleItem>(10),
new CraftingElement<GneissItem>(10),
new CraftingElement<BasaltItem>(10),
};
this.CraftMinutes = new ConstantValue(5);
this.Initialize(Localizer.DoStr("Mortaring Skill Book"), typeof(MortaringSkillBookRecipe));
CraftingComponent.AddRecipe(typeof(ResearchTableObject), this);
}
}
//Gathering
[Serialized]
public partial class GatheringSkillBook : SkillBook<GatheringSkill, GatheringSkillScroll>
{
public override LocString DisplayName { get { return Localizer.DoStr("Gathering Skill Book"); } }
}
[Serialized]
public partial class GatheringSkillScroll : SkillScroll<GatheringSkill, GatheringSkillBook>
{
public override LocString DisplayName { get { return Localizer.DoStr("Gathering Skill Scroll"); } }
}
[RequiresSkill(typeof(LibrarianSkill), 0)]
public partial class GatheringSkillBookRecipe : Recipe
{
public GatheringSkillBookRecipe()
{
this.Products = new CraftingElement[]
{
new CraftingElement<GatheringSkillBook>(),
};
this.Ingredients = new CraftingElement[]
{
new CraftingElement<PlantFibersItem>(10)
};
this.CraftMinutes = new ConstantValue(5);
this.Initialize(Localizer.DoStr("Gathering Skill Book"), typeof(GatheringSkillBookRecipe));
CraftingComponent.AddRecipe(typeof(ResearchTableObject), this);
}
}
//AdvancedCampfireCooking
[Serialized]
public partial class AdvancedCampfireCookingSkillBook : SkillBook<AdvancedCampfireCookingSkill, AdvancedCampfireCookingSkillScroll>
{
public override LocString DisplayName { get { return Localizer.DoStr("AdvancedCampfireCooking Skill Book"); } }
}
[Serialized]
public partial class AdvancedCampfireCookingSkillScroll : SkillScroll<AdvancedCampfireCookingSkill, AdvancedCampfireCookingSkillBook>
{
public override LocString DisplayName { get { return Localizer.DoStr("AdvancedCampfireCooking Skill Scroll"); } }
}
[RequiresSkill(typeof(LibrarianSkill), 0)]
public partial class AdvancedCampfireCookingSkillBookRecipe : Recipe
{
public AdvancedCampfireCookingSkillBookRecipe()
{
this.Products = new CraftingElement[]
{
new CraftingElement<AdvancedCampfireCookingSkillBook>(),
};
this.Ingredients = new CraftingElement[]
{
new CraftingElement<TomatoItem>(10)
};
this.CraftMinutes = new ConstantValue(5);
this.Initialize(Localizer.DoStr("AdvancedCampfireCooking Skill Book"), typeof(AdvancedCampfireCookingSkillBookRecipe));
CraftingComponent.AddRecipe(typeof(ResearchTableObject), this);
}
}
//Hunting
[Serialized]
public partial class HuntingSkillBook : SkillBook<HuntingSkill, HuntingSkillScroll>
{
public override LocString DisplayName { get { return Localizer.DoStr("Hunting Skill Book"); } }
}
[Serialized]
public partial class HuntingSkillScroll : SkillScroll<HuntingSkill, HuntingSkillBook>
{
public override LocString DisplayName { get { return Localizer.DoStr("Hunting Skill Scroll"); } }
}
[RequiresSkill(typeof(LibrarianSkill), 0)]
public partial class HuntingSkillBookRecipe : Recipe
{
public HuntingSkillBookRecipe()
{
this.Products = new CraftingElement[]
{
new CraftingElement<HuntingSkillBook>(),
};
this.Ingredients = new CraftingElement[]
{
new CraftingElement<RawFishItem>(10)
};
this.CraftMinutes = new ConstantValue(5);
this.Initialize(Localizer.DoStr("Hunting Skill Book"), typeof(HuntingSkillBookRecipe));
CraftingComponent.AddRecipe(typeof(ResearchTableObject), this);
}
}
}
| fa41980e7a8be79ef0328cc960d90c634ebd2c77 | [
"Markdown",
"C#"
] | 7 | C# | Thibault-Brocheton/eco-mod-citeperdue | aef6c73f414b3b5b477de28049a2b857fd4540b4 | f89f5ac80cc6b6a50fae6716558e98fd66b882fa |
refs/heads/master | <file_sep>#####(rTensor) Tensor Algebra and Statistical Models####
source("/Users/jamesyili/tensors/rTensor_Classes.R")
source("/Users/jamesyili/tensors/rTensor_Generics.R")
source("/Users/jamesyili/tensors/rTensor_BasicMethods.R")
source("/Users/jamesyili/tensors/rTensor_MatrixFunctions.R")
source("/Users/jamesyili/tensors/rTensor_SpecialTensors.R")
source("/Users/jamesyili/tensors/rTensor_Unfoldings.R")
source("/Users/jamesyili/tensors/rTensor_Decompositions.R")
#####Compilation of Tests
###Classes
##Tensor creation
#using new
tnsr <- new("Tensor",3L,c(10L,20L,30L),letters[1:3],data=runif(6000))
#from vectors
vec <- runif(100)
vecT <- as.tensor(vec)
object.size(vec); object.size(vecT) #Really noticeable for small vectors
#from matrices
mat <- matrix(runif(1000),nrow=100,ncol=10)
matT <- as.tensor(mat)
object.size(mat); object.size(matT)
#from arrays
indices <- c(10,30,100,300)
arr <- array(runif(prod(indices)), dim = indices)
arrT <- as.tensor(arr)
object.size(arr); object.size(arrT)
###BasicMethods
tnsr <- new("Tensor",3L,c(3L,4L,5L),data=runif(60))
#getters, show, print, head, tail
getModes(tnsr)
getNumModes(tnsr)
getModenames(tnsr)
getData(tnsr)
tnsr
print(tnsr)
head(tnsr)
tail(tnsr)
#element-wise operation
tnsr2 <- new("Tensor",3L,c(3L,4L,5L),data=runif(60))
tnsrsum <- tnsr + tnsr2
tnsrdiff <- tnsr - tnsr2
tnsrelemprod <- tnsr * tnsr2
tnsrelemquot <- tnsr / tnsr2
for (i in 1:3L){
for (j in 1:4L){
for (k in 1:5L){
stopifnot(tnsrsum[i,j,k]==tnsr[i,j,k]+tnsr2[i,j,k])
stopifnot(tnsrdiff[i,j,k]==tnsr[i,j,k]-tnsr2[i,j,k])
stopifnot(tnsrelemprod[i,j,k]==tnsr[i,j,k]*tnsr2[i,j,k])
stopifnot(tnsrelemquot[i,j,k]==tnsr[i,j,k]/tnsr2[i,j,k])
}
}
}
#subsetting
tnsr[1,2,3]
tnsr[3,1,]
tnsr[,,5]
#modeSum
modeSum(tnsr,3)
modeMean(tnsr,1)
#sweep
sweep(tnsr,m=c(2,3),stat=1,func='-')
sweep(tnsr,m=1,stat=10,func='/')
#fnorm
fnorm(tnsr)
#inner product
innerProd(tnsr,tnsr2)
###Unfoldings
#unfolds
matT1<-cs_unfold(tnsr,m=3)
matT2<-rs_unfold(tnsr,m=2)
identical(matT1,unfold(tnsr,rs=c(1,2),cs=c(3)))
identical(matT2,unfold(tnsr,rs=2,cs=c(1,3)))
matT3<-unfold(tnsr,rs=2,cs=c(3,1))
#folds
identical(cs_fold(matT1,m=3,modes=c(3,4,5)),tnsr)
identical(rs_fold(matT2,m=2,modes=c(3,4,5)),tnsr)
identical(fold(matT3,rs=2,cs=c(3,1),modes=c(3,4,5)),tnsr)
###Operations
tnsr <- new("Tensor",3L,c(3L,4L,5L),data=runif(60))
#ttm
mat <- matrix(runif(50),ncol=5)
ttm(tnsr,mat,m=3)
#ttv
vec <- runif(4)
ttv(tnsr,vec,m=2)
#ttl
lizt <- list('mat1' = matrix(runif(30),ncol=3), 'mat2' = matrix(runif(40),ncol=4),'mat3' = matrix(runif(50),ncol=5))
ttl(tnsr,lizt,ms=c(1,2,3))
#t
identical(t(tnsr)@data[,,1],t(tnsr@data[,,1]))
identical(t(tnsr)@data[,,2],t(tnsr@data[,,5]))
identical(t(t(tnsr)),tnsr)
#%*%
tnsr2 <- new("Tensor",3L,c(4L,3L,5L),data=runif(60))
tnsr%*%tnsr2
###MatrixFunctions
#hamadard_list
lizt <- list('mat1' = matrix(runif(40),ncol=4), 'mat2' = matrix(runif(40),ncol=4),'mat3' = matrix(runif(40),ncol=4))
dim(hamadard_list(lizt))
#kronecker_list
smalllizt <- list('mat1' = matrix(runif(12),ncol=4), 'mat2' = matrix(runif(12),ncol=4),'mat3' = matrix(runif(12),ncol=4))
dim(kronecker_list(smalllizt))
#khartri_rao
dim(khatri_rao(matrix(runif(12),ncol=4),matrix(runif(12),ncol=4)))
#khartri_rao_list
dim(khatri_rao_list(smalllizt))
#circ_mat
circ_mat(1:10L)
###Decompositions
tnsr <- new("Tensor",3L,c(60L,70L,80L),data=runif(336000))
smalltnsr <- new("Tensor",3L,c(10L,10L,10L),data=runif(1000))
#hosvd
hosvdD <-hosvd(tnsr)
hosvdD$resid
hosvdD2 <-hosvd(tnsr,ranks=c(6L,7L,8L))
hosvdD2$resid
#cp_als
cpD <- cp_als(tnsr,num_components=30) #(30^3)/(60*70*80) = 0.08035714
cpD$conv #did not converge with 500 iterations
cpD$norm_percent # 51%
plot(cpD$resids)
smallcpD <- cp_als(smalltnsr,num_components=5)
smallcpD$conv
smallcpD$norm_percent # 57%
plot(smallcpD$resids)
#tucker_als
tuckerD <- tucker_als(tnsr,ranks=c(30,35,40))
tuckerD$conv #did not converge with 500 iterations
tuckerD$norm_percent #56%
plot(tuckerD$resids/fnorm(tnsr))
smalltuckerD <- tucker_als(smalltnsr,ranks=c(5,6,7))
smalltuckerD$conv
smalltuckerD$norm_percent #63%
plot(smalltuckerD$resids)
#mpca_als
mpcaD <- mpca_als(tnsr,ranks=c(30,30))
mpcaD$conv #converged
mpcaD$norm_percent #56%
plot(mpcaD$resids)
smallmpcaD <- mpca_als(smalltnsr,ranks=c(5,5))
smallmpcaD$conv
smallmpcaD$norm_percent #63%
plot(smallmpcaD$resids)
#tsvd3d
tsvdD <- t_svd3d(tnsr)
1 - fnorm(t_svd_reconstruct(tsvdD)-tnsr)/fnorm(tnsr) #98.5%
smalltsvdD <- t_svd3d(smalltnsr)
1 - fnorm(t_svd_reconstruct(smalltsvdD)-smalltnsr)/fnorm(smalltnsr)
###SpecialTensors
#random tensor
rand_tensor()
rand_tensor(c(8,2,100,4))
#superdiagonal tensor
superdiagonal_tensor(3,4)@data
#identity tensor
identity_tensor3d(c(3,3,10))@data
<file_sep>//
// multi_array_demo.h
//
//
// Created by <NAME> on 7/19/13.
//
//
#ifndef ____multi_array_demo__
#define ____multi_array_demo__
#include <iostream>
#endif /* defined(____multi_array_demo__) */
<file_sep>###Tensor times matrix (m-mode product)
ttm<-function(tnsr,mat,m=NULL){
stopifnot(is.matrix(mat))
if(is.null(m)) stop("m must be specified")
mat_dims <- dim(mat)
modes_in <- tnsr@modes
stopifnot(modes_in[m]==mat_dims[2])
modes_out <- modes_in
modes_out[m] <- mat_dims[1]
tnsr_m <- rs_unfold(tnsr,m=m)@data
retarr_m <- mat%*%tnsr_m
rs_fold(retarr_m,m=m,modes=modes_out)
}
###Tensor times vector (contracted m-mode product)
ttv<-function(tnsr,vec,m=NULL){
if(is.null(m)) stop("m must be specified")
vec_dim <- length(vec)
modes_in <- tnsr@modes
stopifnot(modes_in[m]==vec_dim)
modes_out <- modes_in
modes_out[m] <- 1
tnsr_m <- rs_unfold(tnsr,m=m)@data
retarr_m <- vec%*%tnsr_m
rs_fold(retarr_m,m=m,modes=modes_out)
}
###Tensor times a list of matrices
ttl<-function(tnsr,list_mat,ms=NULL){
if(is.null(ms)||!is.vector(ms)) stop ("m modes must be specified as a vector")
if(length(ms)!=length(list_mat)) stop("m modes length does not match list_mat length")
num_mats <- length(list_mat)
if(length(unique(ms))!=num_mats) warning("consider pre-multiplying matrices for the same m for speed")
mat_nrows <- vector("list", num_mats)
mat_ncols <- vector("list", num_mats)
for(i in 1:num_mats){
mat <- list_mat[[i]]
m <- ms[i]
mat_dims <- dim(mat)
modes_in <- tnsr@modes
stopifnot(modes_in[m]==mat_dims[2])
modes_out <- modes_in
modes_out[m] <- mat_dims[1]
tnsr_m <- rs_unfold(tnsr,m=m)@data
retarr_m <- mat%*%tnsr_m
tnsr <- rs_fold(retarr_m,m=m,modes=modes_out)
}
tnsr
}
###Tensor Transpose
setMethod("t",signature="Tensor",
definition=function(x){
if(x@num_modes!=3) stop("Tensor Transpose currently only implemented for 3d Tensors")
modes <- x@modes
new_arr <- array(apply(x@data[,,c(1L,modes[3]:2L)],MARGIN=3,FUN=t),dim=modes[c(2,1,3)])
as.tensor(new_arr)
})
#ifft function definition
ifft <- function(x){suppressWarnings(as.numeric(fft(x,inverse=TRUE))/length(x))}
###Tensor Multiplication (only defined for 3-d so far)
tensor_product3d <- function(x,y){
if((x@num_modes!=3)||(y@num_modes!=3)) stop("Tensor Multiplication currently only implemented for 3d Tensors")
modes_x <- x@modes
modes_y <- y@modes
if(modes_x[2]!=modes_y[1]) stop("Mode 2 of x and Mode 1 of y must match")
n3 <- modes_x[3]
if(n3!=modes_y[3]) stop("Modes 3 of x and y must match")
#fft's for x and y
fft_x <- aperm(apply(x@data,MARGIN=1:2,fft),c(2,3,1))
fft_y <- aperm(apply(y@data,MARGIN=1:2,fft),c(2,3,1))
#multiply the faces (this is terribad! TO-DO: think of better way!)
fft_ret <- array(0,dim=c(modes_x[1],modes_y[2],n3))
for(i in 1:n3){
fft_ret[,,i]<-fft_x[,,i]%*%fft_y[,,i]
}
#ifft and return as Tensor
as.tensor(aperm(apply(fft_ret,MARGIN=1:2,ifft),c(2,3,1)))
}
#Overloading %*% for Tensor class
setMethod("%*%",signature=c("Tensor","Tensor"),
definition=function(x,y){
tensor_product3d(x,y)
})
<file_sep>//
// tensor.cpp
//
//
// Created by <NAME> on 8/16/13.
//
//
#include <Rcpp.h>
//#include <vector>
//#include <algorithm>
//[[Rcpp::export]]
Rcpp::CharacterVector ndTensor_validate(Rcpp::NumericVector x){
//<need to implement checking>
return Rcpp::wrap("Pass");
}
/*******************************************************************************
#include <marray.hxx>
namespace marray = andres;
//[[Rcpp::export]]
Rcpp::List marrayC (Rcpp::NumericVector x, Rcpp::IntegerVector modes_in){
// Determine number of dimensions
size_t num_modes = modes_in.size();
//Copy IntegerVector modes_in into a vector
//modes = Rcpp::as<std::vector<size_t> >(modes_in);
//Initialize marray with dimensions from
//std::vector<size_t>::iterator itr = modes.begin();
Rcpp::IntegerVector::iterator itr = modes_in.begin();
marray::Marray<double> a(marray::SkipInitialization, itr, itr+num_modes, marray::FirstMajorOrder);
//Fill in marray with passed-in numeric vector
std::copy(x.begin(), x.end(), a.begin());
std::vector<size_t> modes (num_modes);
for (size_t j=0; j != num_modes; ++j){
modes[j] = a.shape(j);
}
return Rcpp::List::create(Rcpp::_["data"] = Rcpp::wrap(a), Rcpp::_["modes"] = Rcpp::wrap(modes), Rcpp::_["num_modes"] = a.dimension());
}
*******************************************************************************/<file_sep>#####(rTensor) Tensor Algebra and Statistical Models####
#####Functions that operate on Matrices and Arrays
###Hamadard (element-wise) product of a list of matrices
hamadard_list <- function(L){
isvecORmat <- function(x){is.matrix(x) || is.vector(x)}
stopifnot(all(unlist(lapply(L,isvecORmat))))
retmat <- L[[1]]
for (i in 2:length(L)){
retmat <- retmat*L[[i]]
}
retmat
}
###Kronecker product of a list of matrices
kronecker_list <- function(L){
isvecORmat <- function(x){is.matrix(x) || is.vector(x)}
stopifnot(all(unlist(lapply(L,isvecORmat))))
retmat <- L[[1]]
for(i in 2:length(L)){
retmat <- kronecker(retmat,L[[i]])
}
retmat
}
###Khatri Rao product of matrices
khatri_rao <- function(x,y){
if (!(is.matrix(x)&&is.matrix(y))) stop("Arguments must be matrices.")
if (dim(x)[2]!=dim(y)[2]) stop("Arguments must have same number of columns.")
retmat <- matrix(0,nrow=dim(x)[1]*dim(y)[1],ncol=dim(x)[2])
for (j in 1:ncol(retmat)) retmat[,j] <- kronecker(x[,j],y[,j])
retmat
}
###Khatri Rao product of a list of matrices
khatri_rao_list <- function(L,reverse=FALSE){
stopifnot(all(unlist(lapply(L,is.matrix))))
ncols <- unlist(lapply(L,ncol))
stopifnot(length(unique(ncols))==1)
ncols <- ncols[1]
nrows <- unlist(lapply(L,nrow))
retmat <- matrix(0,nrow=prod(nrows),ncol=ncols)
if (reverse) L <- rev(L)
for(j in 1:ncols){
Lj <- lapply(L,function(x) x[,j])
retmat[,j] <- kronecker_list(Lj)
}
retmat
}
###Norm of vectors
norm_vec <- function(vec){
norm(as.matrix(vec))
}
###Circulant Matrix from a vector
circ_mat <- function(vec){
stopifnot(is.vector(vec))
n <- length(vec)
suppressWarnings(matrix(vec[t(matrix(1:n,n+1,n+1,byrow=T)[c(1,n:2),1:n])],n,n))
}<file_sep>#####(rTensor) Tensor Algebra and Statistical Models####
#####Special Tensors
###Create a Random Tensor
rand_tensor <- function(modes=c(3,4,5)){
as.tensor(array(runif(prod(modes)), dim=modes))
}
###Create a Superdiagonal Tensor
superdiagonal_tensor <- function(num_modes,len,elements=1L){
modes <- rep(len,num_modes)
arr <- array(0, dim = modes)
if(length(elements)==1) elements <- rep(elements,len)
for (i in 1:len){
txt <- paste("arr[",paste(rep("i", num_modes),collapse=","),"] <- ", elements[i],sep="")
eval(parse(text=txt))
}
as.tensor(arr)
}
###Create an 3d Identity Tensor (3D only)
identity_tensor3d <- function(modes){
if(length(modes)!=3L) stop("identity tensor only implemented for 3d so far")
n <- modes[1]
stopifnot(n==modes[2])
arr <- array(0,dim=modes)
arr[,,1] <- diag(1,n,n)
as.tensor(arr)
}
<file_sep>//
// multi_array_demo.cpp
//
//
// Created by <NAME> on 7/19/13.
//
//
#include "multi_array_demo.h"
#include <boost/multi_array.hpp>
#include <boost/array.hpp>
#include <iostream>
#include <cassert>
int main (){
// Create a 3D array that is 3x4x2x6
typedef boost::multi_array<double, 3>array_type;
typedef array_type::index index;
boost::array<array_type::index, 3> shape = {{ 3, 4, 2 }};
array_type A(shape);
// // Assign values to the elements
// int values =0;
// for (index i = 0; i != 3; ++i){
// for (index j = 0; j !=4; ++j){
// for (index k = 0; k != 2; ++k){
// for(index l = 0; l != 6; ++l){
// A[i][j][k][l] = values++;
// }
// }
// }
// }
//
// // Verify and print values to console
// int verify = 0;
// for (index i = 0; i != 3; ++i){
// for (index j = 0; j!= 4; ++j){
// for (index k = 0; k != 2; ++k){
// for (index l = 0; l != 6; ++l){
// assert(A[i][j][k][l]==verify++);
// // std::cout << A[i][j][k][l] << " ";
// }
// }
// }
// }
return 0;
}
<file_sep>#####(rTensor) Tensor Algebra and Statistical Models####
#####Generic Definitions
setGeneric(name="getModes",
def=function(x){standardGeneric("getModes")})
setGeneric(name="getNumModes",
def=function(x){standardGeneric("getNumModes")})
setGeneric(name="getModenames",
def=function(x){standardGeneric("getModenames")})
setGeneric(name="getData",
def=function(x){standardGeneric("getData")})
setGeneric(name="rs_unfold",
def=function(x,...){standardGeneric("rs_unfold")})
setGeneric(name="cs_unfold",
def=function(x,...){standardGeneric("cs_unfold")})
setGeneric(name="unfold",
def=function(x,...){standardGeneric("unfold")})
setGeneric(name="rs_fold",
def=function(x,...){standardGeneric("rs_fold")})
setGeneric(name="cs_fold",
def=function(x,...){standardGeneric("cs_fold")})
setGeneric(name="fold",
def=function(x,...){standardGeneric("fold")})
setGeneric(name="modeSum",
def=function(x,...){standardGeneric("modeSum")})
setGeneric(name="modeMean",
def=function(x,...){standardGeneric("modeMean")})
setGeneric(name="fnorm",
def=function(x){standardGeneric("fnorm")})
setGeneric(name="innerProd",
def=function(x1,x2){standardGeneric("innerProd")})
#setGeneric(name="hosvd",
#def=function(x,...){standardGeneric("hosvd")})
#setGeneric(name="cp_als",
#def=function(x,...){standardGeneric("cp_als")})
#setGeneric(name="tucker_als",
#def=function(x,...){standardGeneric("tucker_als")})
#setGeneric(name="mpca_als",
#def=function(x,...){standardGeneric("mpca_als")})
#setGeneric(name="fft_svd",
#def=function(x,...){standardGeneric("fft_svd")})
<file_sep>#####(rTensor) Tensor Algebra and Statistical Models####
#####Method Definitions
#modes getter
setMethod(f="getModes",
signature="Tensor",
definition=function(x){
x@modes
})
#num_modes getter
setMethod(f="getNumModes",
signature="Tensor",
definition=function(x){
x@num_modes
})
#modenames getter
setMethod(f="getModenames",
signature="Tensor",
definition=function(x){
x@modenames
})
#data getter
setMethod(f="getData",
signature="Tensor",
definition=function(x){
if(x@num_modes==1) return(as.vector(x@data))
return(x@data)
})
###Show and Print
setMethod(f="show",
signature="Tensor",
definition=function(x){
cat("Numeric Tensor of", x@num_modes, "Modes\n", sep=" ")
cat("Modes: ", x@modes, "\n", sep=" ")
modenames <-x@modenames
if (is.null(modenames)){
cat("Modenames: <empty>\n")
}else{
cat("Modenames: ", modenames, "\n", sep=" ")
}
cat("Data: \n")
print(head(x@data))
})
#
setMethod(f="print",
signature="Tensor",
definition=function(x,...){
show(x)
})
###Head and Tail
setMethod(f="head",
signature="Tensor",
definition=function(x,...){
head(x@data,...)
})
#
setMethod(f="tail",
signature="Tensor",
definition=function(x,...){
tail(x@data,...)
})
###Ops
setMethod("Ops", signature(e1="Tensor", e2="Tensor"),
definition=function(e1,e2){
e1@data<-callGeneric(e1@data, e2@data)
validObject(e1)
e1
})
#
setMethod("Ops", signature(e1="Tensor", e2="array"),
definition=function(e1,e2){
e1@data<-callGeneric(e1@data,e2)
validObject(e1)
e1
})
#
setMethod("Ops", signature(e1="array", e2="Tensor"),
definition=function(e1,e2){
e2@data<-callGeneric(e1,e2@data)
validObject(e2)
e2
})
#
setMethod("Ops", signature(e1="Tensor", e2="numeric"),
definition=function(e1,e2){
e1@data<-callGeneric(e1@data,e2)
validObject(e1)
e1
})
#
setMethod("Ops", signature(e1="numeric", e2="Tensor"),
definition=function(e1,e2){
e2@data<-callGeneric(e1,e2@data)
validObject(e2)
e2
})
#####Subsetting Methods ('[' defined for the array data)
setMethod("[", signature="Tensor",
definition=function(x,i,j,...){
as.tensor(`[`(x@data,i,j,...))
})
###Sum/mean aross a given mode
setMethod("modeSum",signature="Tensor",
definition=function(x,m=NULL){
if(is.null(m)) stop("must specify mode m")
num_modes <- x@num_modes
if(m<1||m>num_modes) stop("m out of bounds")
perm <- c(m,(1L:num_modes)[-m])
arr <- colSums(aperm(x@data,perm),dims=1L)
as.tensor(arr)
})
#
setMethod("modeMean",signature="Tensor",
definition=function(x,m=NULL){
if(is.null(m)) stop("must specify mode m")
num_modes <- x@num_modes
if(m<1||m>num_modes) stop("m out of bounds")
perm <- c(m,(1L:num_modes)[-m])
arr <- colSums(aperm(x@data,perm),dims=1L)
modes <- x@modes
as.tensor(arr/modes[m])
})
###Sweep
setMethod("sweep", signature="Tensor",
definition=function(x,m=NULL,stats=NULL,func=NULL,...){
if(is.null(m)) stop("must specify mode m")
as.tensor(sweep(x@data,MARGIN=m,STATS=stats,FUN=func,...))
})
###Norm and Inner Product
setMethod("fnorm",signature="Tensor",
definition=function(x){
arr<-x@data
sqrt(sum(arr*arr))
})
#
setMethod("innerProd",signature=c(x1="Tensor", x2="Tensor"),
definition=function(x1,x2){
stopifnot(x1@modes==x2@modes)
arr1 <- x1@data
arr2 <- x2@data
sum(as.numeric(arr1*arr2))
})
############OLD;REQUIRES ABIND##################
# setMethod("getSubtensor", signature="Tensor",
# definition=function(x,indices=NULL,dims=seq(len=max(getNumModes(x),1)),drop=NULL){
# stopifnot(require(abind))
# if(!is.vector(indices)) stop("indices must be a vector")
# idx <- as.list(indices)
# null_ind <- which(idx < 0)
# if(length(null_ind)!=0) idx[null_ind] <- list(NULL)
# subTensor <- tensor(abind::asub(getData(x),idx,dims=dims,drop=drop),modenames=getModenames(x))
# subTensor
# })
# #
# setMethod("getFiber", signature="Tensor",
# definition=function(x,indices=NULL,asTensor=FALSE){
# stopifnot(require(abind))
# if(!is.vector(indices)) stop("indices must be a vector")
# num_modes <- getNumModes(x)
# if(num_modes==1){
# if(asTensor) return(x)
# return(getData(x))
# }
# if(length(indices)!=num_modes) stop ("indices must have length N")
# idx <- as.list(indices)
# null_ind <- which(idx < 0)
# if(length(null_ind)!=1) stop("there must be exactly 1 negative index")
# idx[null_ind] <- list(NULL)
# fiber <- abind::asub(getData(x),idx=idx,drop=TRUE)
# if(asTensor) fiber <- as.tensor(fiber)
# fiber
# })
# #
# setMethod("getSlice", signature="Tensor",
# definition=function(x,indices=NULL,asTensor=FALSE){
# stopifnot(require(abind))
# if(!is.vector(indices)) stop("indices must be a vector")
# num_modes <- getNumModes(x)
# if(num_modes==1) stop("Tensor has only 1 mode")
# if(num_modes==2){
# if(asTensor) return(x)
# return(getData(x))
# }
# if(length(indices)!=num_modes) stop ("indices must have length N")
# idx <- as.list(indices)
# null_ind <- which(idx < 0)
# if(length(null_ind)!=2) stop("there must be exactly 2 negative indice")
# idx[null_ind] <- list(NULL)
# slice <- abind::asub(getData(x),idx=idx,drop=TRUE)
# if (asTensor) slice <- as.tensor(slice)
# slice
# })<file_sep>#Timing and Percent norm recovered
source("/Users/jamesyili/tensors/rTensor_Classes.R")
source("/Users/jamesyili/tensors/rTensor_Generics.R")
source("/Users/jamesyili/tensors/rTensor_BasicMethods.R")
source("/Users/jamesyili/tensors/rTensor_MatrixFunctions.R")
source("/Users/jamesyili/tensors/rTensor_SpecialTensors.R")
source("/Users/jamesyili/tensors/rTensor_Unfoldings.R")
source("/Users/jamesyili/tensors/rTensor_Decompositions.R")
#tic toc functions
tic <- function (gcFirst = TRUE,overwrite=TRUE) {
if(gcFirst) gc(FALSE)
tic <- proc.time()
ticExists <- ".tic"%in%ls(all.names=TRUE,envir=baseenv())
if(overwrite||!ticExists){
assign(".tic", tic, envir=baseenv())
}
else{
stop("Another timing function running")
}
invisible(tic)
}
toc <- function (kill=TRUE,pr=FALSE) {
toc <- proc.time()
tic <- get(".tic", envir=baseenv())
if(pr) print(toc - tic)
if(kill) rm(.tic, envir=baseenv())
invisible(toc - tic)
}
#1000 x 1000 x 100
tnsr <- new("Tensor",3L,c(1000L,1000L,100L),data=runif(1e08))
object.size(tnsr) #800 001 296 bytes (800 MB)
save.image(file="firstcomparisons.RData")
#hosvd
tic()
hosvdD <- hosvd(tnsr)
time.hosvdD <- toc()
save.image(file="firstcomparisons.RData")
#cp
tic()
cpD <- cp_als(tnsr)
time.cpD <- toc()
save.image(file="firstcomparisons.RData")
#tucker
tic()
tuckerD <- tucker_als(tnsr)
time.tukcerD <- toc()
save.image(file="firstcomparisons.RData")
#mpca
tic()
mpcaD <- mpca_als(tnsr)
time.mpcaD <- toc()
save.image(file="firstcomparisons.RData")
#t_svd
tic()
tsvdD <- t_svd(tnsr)
time.tsvdD <- toc()
save.image(file="firstcomparisons.RData")
<file_sep>#####(rTensor) Tensor Algebra and Statistical Models####
#####Tensor Unfoldings
###General Unfolding
setMethod("unfold", signature="Tensor",
definition=function(x,rs=NULL,cs=NULL){
#checks
if(is.null(rs)||is.null(cs)) stop("row space and col space indices must be specified")
num_modes <- x@num_modes
if (length(rs) + length(cs) != num_modes) stop("incorrect number of indices")
if(any(rs<1) || any(rs>num_modes) || any(cs < 1) || any(cs>num_modes)) stop("illegal indices specified")
perm <- c(rs,cs)
if (any(sort(perm,decreasing=TRUE) != num_modes:1)) stop("missing and/or repeated indices")
modes <- x@modes
mat <- x@data
new_modes <- c(prod(modes[rs]),prod(modes[cs]))
#rearranges into a matrix
mat <- aperm(mat,perm)
dim(mat) <- new_modes
as.tensor(mat)
})
###Row Space Unfolding in the m mode - aka Matricization (Kolda et. al)
setMethod("rs_unfold", signature="Tensor",
definition=function(x,m=NULL){
if(is.null(m)) stop("mode m must be specified")
num_modes <- x@num_modes
rs <- m
cs <- (1:num_modes)[-m]
unfold(x,rs=rs,cs=cs)
})
###Column Space Unfolding in the m mode (Martin et. al)
setMethod("cs_unfold", signature="Tensor",
definition=function(x,m=NULL){
if(is.null(m)) stop("mode m must be specified")
num_modes <- x@num_modes
rs <- (1:num_modes)[-m]
cs <- m
unfold(x,rs=rs,cs=cs)
})
#####Matrix Foldings
###General folding (inverse function to unfold)
fold <- function(mat, rs = NULL, cs = NULL, modes=NULL){
#checks
if(is.null(rs)||is.null(cs)) stop("row space and col space indices must be specified")
if(is.null(modes)) stop("Tensor modes must be specified")
if(!is(mat,"Tensor")){
if(!is.matrix(mat)) stop("mat must be of class 'matrix'")
}else{
stopifnot(mat@num_modes==2)
mat <- mat@data
}
num_modes <- length(modes)
stopifnot(num_modes==length(rs)+length(cs))
mat_modes <- dim(mat)
if((mat_modes[1]!=prod(modes[rs])) || (mat_modes[2]!=prod(modes[cs]))) stop("matrix nrow/ncol does not match Tensor modes")
#rearranges into array
iperm <- match(1:num_modes,c(rs,cs))
as.tensor(aperm(array(mat,dim=c(modes[rs],modes[cs])),iperm))
}
###Row Space Folding (inverse funtion to rs_unfold) in the m mode
rs_fold <- function(mat,m=NULL,modes=NULL){
if(is.null(m)) stop("mode m must be specified")
if(is.null(modes)) stop("Tensor modes must be specified")
num_modes <- length(modes)
rs <- m
cs <- (1:num_modes)[-m]
fold(mat,rs=rs,cs=cs,modes=modes)
}
###Col Space Folding (inverse function to cs_unfold) in the m mode
cs_fold <- function(mat,m=NULL,modes=NULL){
if(is.null(m)) stop("mode m must be specified")
if(is.null(modes)) stop("Tensor modes must be specified")
num_modes <- length(modes)
cs <- m
rs <- (1:num_modes)[-m]
fold(mat,rs=rs,cs=cs,modes=modes)
}
###OLD DEFN of rs_unfold: DO NOT USE###
# setMethod("m_unfold", signature="Tensor",
# definition=function(x,m=NULL,asTensor=FALSE){
# if(is.null(m)) stop("mode m must be specified")
# num_modes <- getNumModes(x)
# if(m < 1 || m > num_modes) stop("mode m incorrectly specified")
# modes <- getModes(x)
# mat <- getData(x)
# new_modes <- c(modes[m],prod(modes[-m]))
# if(m == 1) {
# dim(mat) <- new_modes
# if(asTensor) mat <- as.tensor(mat)
# return(mat)
# }
# if(m == num_modes){
# perm <- c(m,1:(m-1))
# }else {
# perm <- c(m,1:(m-1),(m+1):num_modes)
# }
# mat <- aperm(mat,perm)
# dim(mat) <- new_modes
# if(asTensor) mat <- as.tensor(mat)
# mat
# })<file_sep>#####(rTensor) Tensor Algebra and Statistical Models####
#####Class Defintions (Only 1 class in version 0.1)
setClass("Tensor",
representation(num_modes = "integer", modes = "integer", modenames = "ANY", data="array"),
validity = function(object){
num_modes <- object@num_modes
modes <- object@modes
modenames <- object@modenames
errors <- character()
if (any(modes <= 0)){
msg <- "'modes' must contain strictly positive values; if any mode is 1, consider a smaller num_modes"
errors <- c(errors, msg)
}
if (!is.null(modenames) && (length(modenames)!=num_modes)){
msg <- "warning: 'modenames' length does not match number of modes. recycling"
errors <- c(errors, msg)
}
if(length(errors)==0) TRUE else errors
})
#####Initialization
setMethod(f="initialize",
signature="Tensor",
definition = function(.Object, num_modes=NULL, modes=NULL, modenames=NULL, data=NULL){
if(is.null(num_modes)){
if (is.vector(data)) num_modes <- 1L
else{num_modes <- length(dim(data))}
}
if(is.null(modes)){
if (is.vector(data)) modes <- length(data)
else{modes <- dim(data)}
}
if(is.null(modenames)&&!is.null(dimnames(.Object))){
modenames <- dimnames(data)
}
.Object@num_modes <- num_modes
.Object@modes <- modes
.Object@modenames <- modenames
.Object@data <- array(data,dim=modes,dimnames=modenames)
validObject(.Object)
.Object
})
#####Creation of ndTensor from an array/matrix/vector
as.tensor <- function(x, mode=NULL, modenames=NULL,drop=TRUE){
stopifnot(is.array(x)||is.vector(x))
if (is.vector(x)){
modes <- c(length(x))
num_modes <- 1L
}else{
modes <- dim(x)
num_modes <- length(modes)
if(is.null(modenames)&&!is.null(dimnames(x))){
modenames <- dimnames(data)
}
dim1s <- which(modes==1)
if(drop && (length(dim1s)>0)){
modes <- modes[-dim1s]
if(!is.null(modenames[[1]])) modenames <- modenames[-dim1s]
num_modes <- num_modes-length(dim1s)
}
}
new("Tensor",num_modes,modes,modenames,data=array(x,dim=modes,dimnames=modenames))
}
<file_sep>#####(rTensor) Tensor Algebra and Statistical Models####
#####Various Tensor Decompositions
###(Truncated-)Higher-order SVD
#Input: Tensor tnsr, ranks = k_1 ,..., k_M (optional)
#Output: List containing a core tensor Z (all-orthogonal), and a list of matrices of sizes I_m x k_M if ranks provided, o/w I_m x I_m
hosvd <- function(tnsr,ranks=NULL){
#stopifnot(is(tnsr,"Tensor"))
num_modes <- tnsr@num_modes
#no truncation if ranks not provided
if(is.null(ranks)){
cat("!ranks not provided so left singular matrices will not be truncated.\n")
ranks <- tnsr@modes
}
#progress bar
pb <- txtProgressBar(min=0,max=num_modes,style=3)
#loops through and performs SVD on mode-m matricization of tnsr
U_list <- vector("list",num_modes)
for(m in 1:num_modes){
U_list[[m]] <- (svd(rs_unfold(tnsr,m=m)@data)$u)[,1:ranks[m]]
setTxtProgressBar(pb,m)
}
close(pb)
#computes the core tensor
Z <- ttl(tnsr,lapply(U_list,t),ms=1:num_modes)
resid <- fnorm(ttl(Z,U_list,ms=1:num_modes)-tnsr)/fnorm(tnsr)
#put together the return list, and returns
list(Z=Z,U=U_list,resid=resid)
}
###CP Decomp ALS
#Input: Tensor, number of components to decompose the tensor (k)
#Output: List containing weights lambda (vector of length k), and a list of matrices U's, were each U_m is of size I_m x k
cp_als <- function(tnsr, num_components=NULL,max_iter=500, tol=1e-6){
if(is.null(num_components)) stop("num_components must be specified")
stopifnot(is(tnsr,"Tensor"))
#initialization via truncated hosvd
num_modes <- tnsr@num_modes
modes <- tnsr@modes
U_list <- vector("list",num_modes)
unfolded_mat <- vector("list",num_modes)
for(m in 1:num_modes){
unfolded_mat[[m]] <- rs_unfold(tnsr,m=m)@data
U_list[[m]] <- matrix(rnorm(modes[m]*num_components), nrow=modes[m], ncol=num_components)
}
est <- tnsr
curr_iter <- 1
converged <- FALSE
#set up convergence check
fnorm_resids <- rep(0, max_iter)
CHECK_CONV <- function(est){
curr_resid <- fnorm(tnsr - est)
fnorm_resids[curr_iter] <<- curr_resid
if (curr_iter==1) return(FALSE)
if (abs(curr_resid-fnorm_resids[curr_iter-1]) < tol) return(TRUE)
FALSE
}
#progress bar
pb <- txtProgressBar(min=0,max=max_iter,style=3)
#main loop (until convergence or max_iter)
while((curr_iter < max_iter) && (!converged)){
setTxtProgressBar(pb,curr_iter)
for(m in 1:num_modes){
V <- hamadard_list(lapply(U_list[-m],function(x) {t(x)%*%x}))
V_inv <- solve(V)
tmp <- unfolded_mat[[m]]%*%khatri_rao_list(U_list[-m],rev=TRUE)%*%V_inv
lambdas <- apply(tmp,2,norm_vec)
U_list[[m]] <- sweep(tmp,2,lambdas,"/")
Z <- superdiagonal_tensor(num_modes=num_modes,len=num_components,elements=lambdas)
est <- ttl(Z,U_list,ms=1:num_modes)
}
#checks convergence
if(CHECK_CONV(est)){
converged <- TRUE
setTxtProgressBar(pb,max_iter)
}else{
curr_iter <- curr_iter + 1
}
}
close(pb)
#end of main loop
#put together return list, and returns
fnorm_resids <- fnorm_resids[fnorm_resids!=0]
norm_percent<-1-tail(fnorm_resids,1)/fnorm(tnsr)
invisible(list(lambdas=lambdas, U=U_list, conv=converged, norm_percent=norm_percent, resids=fnorm_resids))
}
###Tucker Decomp ALS
#Input: Tensor of size I_1,...,I_M, ranks k_1,...,k_M, where M is the number of modes to Tensor
#Output: List containing a core Tensor Z and and a list of matrices U's, were each U_m is of size I_m x k_m
tucker_als <- function(tnsr,ranks=NULL,max_iter=50,tol=1e-6){
stopifnot(is(tnsr,"Tensor"))
if(is.null(ranks)) stop("ranks must be specified")
#initialization via truncated hosvd
num_modes <- tnsr@num_modes
U_list <- vector("list",num_modes)
for(m in 1:num_modes){
U_list[[m]] <- (svd(rs_unfold(tnsr,m=m)@data)$u)[,1:ranks[m]]
}
curr_iter <- 1
converged <- FALSE
#set up convergence check
fnorm_resids <- rep(0, max_iter)
CHECK_CONV <- function(Z,U_list){
est <- ttl(Z,U_list,ms=1:num_modes)
curr_resid <- fnorm(tnsr - est)
fnorm_resids[curr_iter] <<- curr_resid
if (curr_iter==1) return(FALSE)
if (abs(curr_resid-fnorm_resids[curr_iter-1]) < tol) return(TRUE)
FALSE
}
#progress bar
pb <- txtProgressBar(min=0,max=max_iter,style=3)
#main loop (until convergence or max_iter)
while((curr_iter < max_iter) && (!converged)){
setTxtProgressBar(pb,curr_iter)
modes_seq <- 1:num_modes
for(m in modes_seq){
#core Z minus mode m
X <- ttl(tnsr,lapply(U_list[-m],t),ms=modes_seq[-m])
#truncated SVD of X
U_list[[m]] <- (svd(rs_unfold(X,m=m)@data)$u)[,1:ranks[m]]
}
#compute core tensor Z
Z <- ttm(X,mat=t(U_list[[num_modes]]),m=num_modes)
#checks convergence
if(CHECK_CONV(Z, U_list)){
converged <- TRUE
setTxtProgressBar(pb,max_iter)
}else{
curr_iter <- curr_iter + 1
}
}
close(pb)
#end of main loop
#put together return list, and returns
fnorm_resids <- fnorm_resids[fnorm_resids!=0]
norm_percent<-1-tail(fnorm_resids,1)/fnorm(tnsr)
invisible(list(Z=Z, U=U_list, conv=converged, norm_percent = norm_percent, resids=fnorm_resids))
}
###MPCA
#Input: Tensor of size I_1,...,I_M, where m=1 is the measurement mode, ranks k_2,...,k_M, where M is the number of modes to Tensor
#Output: List containing an extended core Tensor Z_ext and and a list of matrices U's, were each U_m is of size I_m x k_m for m = 2, ..., M
mpca_als <- function(tnsr, ranks = NULL, max_iter = 500, tol=1e-6){
if(is.null(ranks)) stop("ranks must be specified")
stopifnot(is(tnsr,"Tensor"))
#initialization via hosvd of M-1 modes
num_modes <- tnsr@num_modes
stopifnot(length(ranks)==(num_modes-1))
ranks <- c(1,ranks)
modes <- tnsr@modes
U_list <- vector("list",num_modes)
unfolded_mat <- vector("list",num_modes)
for(m in 2:num_modes){
unfolded_mat <- rs_unfold(tnsr,m=m)@data
mode_m_cov <- unfolded_mat%*%t(unfolded_mat)
U_list[[m]] <- (svd(mode_m_cov)$u)[,1:ranks[m]]
}
Z_ext <- ttl(tnsr,lapply(U_list[-1],t),ms=2:num_modes)
curr_iter <- 1
converged <- FALSE
#set up convergence check
fnorm_resids <- rep(0, max_iter)
CHECK_CONV <- function(Z_ext,U_list){
est <- ttl(Z_ext,U_list[-1],ms=2:num_modes)
curr_resid <- fnorm(tnsr - est)
fnorm_resids[curr_iter] <<- curr_resid
if (curr_iter==1) return(FALSE)
if (abs(curr_resid-fnorm_resids[curr_iter-1]) < tol) return(TRUE)
FALSE
}
#progress bar
pb <- txtProgressBar(min=0,max=max_iter,style=3)
#main loop (until convergence or max_iter)
while((curr_iter < max_iter) && (!converged)){
setTxtProgressBar(pb,curr_iter)
modes_seq <- 2:num_modes
for(m in modes_seq){
#extended core Z minus mode m
X <- ttl(tnsr,lapply(U_list[-c(1,m)],t),ms=modes_seq[-(m-1)])
#truncated SVD of X
U_list[[m]] <- (svd(rs_unfold(X,m=m)@data)$u)[,1:ranks[m]]
}
#compute core tensor Z_ext
Z_ext <- ttm(X,mat=t(U_list[[num_modes]]),m=num_modes)
#checks convergence
if(CHECK_CONV(Z_ext, U_list)){
converged <- TRUE
setTxtProgressBar(pb,max_iter)
}else{
curr_iter <- curr_iter + 1
}
}
close(pb)
#end of main loop
#put together return list, and returns
fnorm_resids <- fnorm_resids[fnorm_resids!=0]
norm_percent<-1-tail(fnorm_resids,1)/fnorm(tnsr)
invisible(list(Z_ext=Z_ext, U=U_list, conv=converged, norm_percent = norm_percent, resids=fnorm_resids))
}
###T_SVD3d
#Input: A 3d tensor of n1 x n2 x n
#Output: List containing the U array (n1xn1xn3), V array (n2xn2xn3), S-mat (n3 x m), where m = min(n1,n2). The rows of S-mat
t_svd3d<-function(tnsr){
if(tnsr@num_modes!=3) stop("T-SVD only implemented for 3d so far")
modes <- tnsr@modes
n1 <- modes[1]
n2 <- modes[2]
n3 <- modes[3]
#progress bar
pb <- txtProgressBar(min=0,max=n3,style=3)
#fft for each of the n1n2 vectors (of length n3) along mode 3
fftz <- aperm(apply(tnsr@data,MARGIN=1:2,fft),c(2,3,1))
#svd for each face (svdz is a list of the results)
U_arr <- array(0,dim=c(n1,n1,n3))
V_arr <- array(0,dim=c(n2,n2,n3))
m <- min(n1,n2)
S_mat <- matrix(0,nrow=m,ncol=n3)
#Think of a way to avoid a loop in the beginning
#Problem is that svd returns a list but ideally we want 3 arrays
#Even with unlist this doesn't seem possible
for (j in 1:n3){
setTxtProgressBar(pb,j)
decomp <- svd(fftz[,,j],nu=n1,nv=n2)
U_arr[,,j] <- decomp$u
V_arr[,,j] <- decomp$v
S_mat[,j] <- decomp$d #length is min(n1,n2)
}
close(pb)
#for each svd result, we want to apply ifft
U <- as.tensor(aperm(apply(U_arr,MARGIN=1:2,ifft),c(2,3,1)))
V <- as.tensor(aperm(apply(V_arr,MARGIN=1:2,ifft),c(2,3,1)))
S <- as.tensor(apply(S_mat,MARGIN=1,ifft))
invisible(list(U=U,V=V,S=S))
}
###reconstruct
t_svd_reconstruct <- function(L){
Umodes <- L$U@modes
n1 <- Umodes[1]
n2 <- L$V@modes[1]
n3 <- Umodes[3]
S_fdiagonal <- array(0,c(n1,n2,n3))
S <- L$S@data
for (i in 1:n3){
S_fdiagonal[,,i] <- diag(S[i,],nrow=n1,ncol=n2)
}
S_fdiagonal <- as.tensor(S_fdiagonal)
# diagcol <- function(col,n1,n2){diag(col,nrow=n1,ncol=n2)}
# S_fdiag <- aperm(apply(S@data,MARGIN=2,diagcol,n1,n2),c(1,2,3))
L$U%*%S_fdiagonal%*%t(L$V)
}
###T_SVD_decomp3d
#Input: A 3d tensor of n1 x n2 x n3, cutoffs k1, k2
#Output:
# t_svd_approx3d<-function(tnsr,ranks=NULL){
# if(getNumModes(tnsr)!=3) stop("T-SVD approx only implemented for 3d so far")
# if(is.null(ranks)||length(ranks)!=2) stop("ranks needs to be a vector of length 2")
# modes <- tnsr@modes
# mat <- modeSum(tnsr,m=3)@data
# full <- svd(mat,nu=modes[1],nv=modes[2])
# U_trunc_t <- t(full$u[,1:ranks[1]])
# V_trunc <- full$v[,1:ranks[2]]
# arr <- tnsr@data
# ret_arr <- array(0,dim=c(ranks,modes[3]))
# for (i in 1L:modes[3]){
# ret_arr[,,i] <- U_trunc_t%*%arr[,,i]%*%V_trunc
# }
# as.tensor(ret_arr)
# }
###MICA - Requires W, the mixing matrix, to be specified (TO-DO)
#Input: Tensor of size I_1,...,I_M, where m=1 is the measurement mode, ranks k_2,...,k_M, where M is the number of modes to Tensor
#Output: List containing an extended core Tensor Z_ext and and a list of orthogonal matrices U's, were each U_m is of size I_m x k_m for m = 2, ..., M
# mica_als <- function(tnsr,ranks = NULL, max_iter = 500, tol=1e-6){
# require(MASS)
# if(is.null(ranks)) stop("ranks must be specified")
# stopifnot(is(tnsr,"Tensor"))
# #initialization via hosvd of M-1 modes
# num_modes <- getNumModes(tnsr)
# stopifnot(length(ranks)==(num_modes-1))
# ranks <- c(1,ranks)
# modes <- getModes(tnsr)
# U_list <- vector("list",num_modes)
# unfolded_mat <- vector("list",num_modes)
# for(m in 2:num_modes){
# unfolded_mat <- m_unfold(tnsr,m=m)
# mode_m_cov <- unfolded_mat%*%t(unfolded_mat)
# U_list[[m]] <- (svd(mode_m_cov)$u)[,1:ranks[m]]
# }
# Z_ext <- ttl(tnsr,lapply(U_list[-1],solve),ms=2:num_modes)
# curr_iter <- 1
# converged <- FALSE
# #set up convergence check
# fnorm_resids <- rep(0, max_iter)
# CHECK_CONV <- function(Z_ext,U_list){
# est <- ttl(Z_ext,U_list[-1],ms=2:num_modes)
# curr_resid <- fnorm(tnsr - est)
# cat("residual: ",curr_resid,"\n")
# fnorm_resids[curr_iter] <<- curr_resid
# if (curr_iter==1) return(FALSE)
# if (abs(curr_resid-fnorm_resids[curr_iter-1]) < tol) return(TRUE)
# FALSE
# }
# #main loop (until convergence or max_iter)
# while((curr_iter < max_iter) && (!converged)){
# cat("iteration: ",curr_iter,"\t")
# modes_seq <- 2:num_modes
# for(m in modes_seq){
# #extended core Z minus mode m
# X <- ttl(tnsr,lapply(U_list[-c(1,m)],solve),ms=modes_seq[-(m-1)])
# #truncated SVD of X
# U_list[[m]] <- (svd(m_unfold(X,m=m))$u)[,1:ranks[m]]
# }
# #compute core tensor Z_ext
# Z_ext <- ttm(X,mat=solve(U_list[[num_modes]]),m=num_modes)
# #checks convergence
# if(CHECK_CONV(Z_ext, U_list)){
# converged <- TRUE
# }else{
# curr_iter <- curr_iter + 1
# }
# }
# #end of main loop
# #put together return list, and returns
# fnorm_resids <- fnorm_resids[fnorm_resids!=0]
# norm_percent<-1-tail(fnorm_resids,1)/fnorm(tnsr)
# retL <- list(Z_ext=Z_ext, U=U_list, conv=converged, norm_percent = norm_percent, resids=fnorm_resids)
# invisible(retL)
# }
<file_sep>tensors
=======
R package for multi-linear statistics.
A collaboration between <NAME> and <NAME>
##################################<to-do-list>######################################
#########################<to be implemented MUCH later>#############################
#implement tensor normal
#(Make Tensor virtual so that dense tensor & sparse tensor implementations could easily inherit basic tensor properties)
#Example: Dense Tensor Classes (Non-virtual)
### Numeric Dense Tensor
#setClass("ndTensor", representation(data = "array"), contains = "Tensor"
#validity = function(object){
# errors <- character()
# ndtv <- "Pass" #ndtv <- ndTensor_validate(object@data)
# if(ndtv!="Pass"){
# errors <- c(errors, ndtv)
# }
#})
### Integer Dense Tensor
#setClass("idTensor", representation(data = "integer"), contains = c("Tensor", "array"), validity = function(object) idTensor_validate(object))
### Logical Dense Tensor
#setClass("ldTensor", representation(data = "logical"), contains = c("Tensor", "array"), validity = function(object) ldTensor_validate(object))
### Sparse tensor
### Initialization Functions
#setMethod("initialize", "ndTensor", function(.Object){
# modes <-
# .Object@data <- .Internal(array(data,modes,modenames)))
# }
### Rcpp & marry.hxx
#require(Rcpp)
#Sys.setenv("PKG_CXXFLAGS"="-I /Users/jamesyili/cpp_include/")
#sourceCpp(file="/Users/jamesyili/Dropbox/Advanced R/tensor.cpp")
#
#a <- array(1:32, dim=rep(2,5))
#b <- marrayC(a, dim(a))
#
#dims = c(1,24,5,12,12,9,8)
#size = prod(dims)
#a <- array(runif(size), dim = dims)
###Subset setters (NEED refClass??)
# setMethod("setSubtensor", signature="ndTensor",
# definition=function(x, dim=seq(len=max(getNumModes(x),1))){
# })
# setMethod("setFiber", signature="ndTensor",
# definition=function(x, dim=seq(len=max(getNumModes(x),1))){
# })
# setMethod("setSlice", signature="ndTensor",
# definition=function(x, dim=seq(len=max(getNumModes(x),1))){
# })
#################################################################################### | 338cd31f2a93d99df9e9abefa1fa623fbc16c53a | [
"Markdown",
"R",
"C++"
] | 14 | R | dandrews/tensors | 84100d5da7b969e06a4d6e0487cc501c869ceb64 | 9cdf4371f842d58e032750d6fffc1486338ac342 |
refs/heads/master | <repo_name>abddul29karim/lat3<file_sep>/src/main/resources/application.properties
spring.mvc.view.prefix=/WEB-INF/view/
spring.mvc.view.suffix=.jsp
spring.datasource.driver-class-name=com.microsoft.sqlserver.jdbc.SQLServerDriver
spring.datasource.url=jdbc:sqlserver://localhost;databaseName=ecommerce
spring.datasource.username=sa
spring.datasource.password=<PASSWORD>
spring.jpa.show-sql=true
spring.jpa.hibernate.ddl-auto=update
spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.SQLServerDialect
spring.jpa.properties.hibernate.current_session_context_class=org.springframework.orm.hibernate5.SpringSessionContext
spring.jpa.properties.hibernate.id.new_generator_mappings = false
spring.jpa.properties..entitymanager.packagesToScan = com
logging.level.org.hibernate.sql = debug
logging.level.org.hibernate.type.descriptor.sql.BasicBinder=trace<file_sep>/src/main/java/com/btpns/training/latihan3/service/UserService.java
package com.btpns.training.latihan3.service;
import com.btpns.training.latihan3.entity.UserEntity;
import java.util.List;
public interface UserService {
public UserEntity findById(int userId);
public UserEntity findByName(String userName);
public List<UserEntity> findByRoleId(int roleId);
public void insertUser(UserEntity userEntity);
}
<file_sep>/src/main/java/com/btpns/training/latihan3/dao/DaoR.java
package com.btpns.training.latihan3.dao;
import com.btpns.training.latihan3.entity.RoleEntity;
public interface DaoR {
RoleEntity findById(int roleId);
RoleEntity findByName(String roleName);
void insertRole(RoleEntity roleEntity);
}
<file_sep>/src/main/java/com/btpns/training/latihan3/service/UserDetailServiceImpl.java
package com.btpns.training.latihan3.service;
import com.btpns.training.latihan3.entity.UserEntity;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
@Service
public class UserDetailServiceImpl implements UserDetailsService {
@Autowired
private UserService userService;
@Override
public UserDetails loadUserByUsername(String s) throws UsernameNotFoundException {
UserEntity user = userService.findByName(s);
List<GrantedAuthority> grantedAuthorities = new ArrayList<GrantedAuthority>();
grantedAuthorities.add(new SimpleGrantedAuthority(user.getFullName()));
return new User(String.valueOf(user.getUserId()), user.getPassword(),grantedAuthorities);
}
}
<file_sep>/src/main/java/com/btpns/training/latihan3/service/RoleServiceImpl.java
package com.btpns.training.latihan3.service;
import com.btpns.training.latihan3.dao.Dao;
import com.btpns.training.latihan3.dao.DaoR;
import com.btpns.training.latihan3.entity.RoleEntity;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class RoleServiceImpl implements RoleService {
@Autowired
private DaoR daoR;
@Override
public RoleEntity findById(int roleId) {
return daoR.findById(roleId);
}
@Override
public RoleEntity findByName(String roleName) {
return daoR.findByName(roleName);
}
@Override
public void insertRole(RoleEntity roleEntity) {
daoR.insertRole(roleEntity);
}
}
| 62bea491422e6d87aa47bcba56295b9923071382 | [
"Java",
"INI"
] | 5 | INI | abddul29karim/lat3 | 26fdb8fc1ae25c31a780e2514612c9818d338e31 | 7c7a863f24a67b268456c2021cdc6a735970a7d8 |
refs/heads/main | <file_sep>#datos
units<-3000
timeA<-3 #min
timeB<-5 #min
Ea<-.95
Eb<-.95
reliabilityA<-.95
reliabilityB<- .9
dA<-.02
dB<-.05
#operaciones
Ib<-units/(1-dB)
Ib
Ia<-Ib/(1-dA)
Ia
minsPerWeek<-5*18*60 #5dyas, 18hrs, 60mins
totA<-timeA*Ia/(reliabilityA*Ea)
totA
totB<-timeB*Ib/(reliabilityB*Eb)
F<-totA+totB+(30*Ia/500)
F
F/minsPerWeek
#ejercicio 2.16
output=5000
5000/(.9)
| e2de46a28e99c3a999f9f50cba5bb8b30b61f32b | [
"R"
] | 1 | R | oaguilarca/Simulacion | b48853fd746b3a1ddee96de011132008faa0acb4 | 0be96620ddcf3768ee06231a8d792bf5df51ed92 |
refs/heads/master | <file_sep>from ec2.EC2Client import EC2Client
from ec2.EC2Instances import EC2Instances
from ec2.EC2Waiter import EC2Waiter
from performance.Neo4jHAConf import Neo4jHAConf
from scripts.ansible.AnsibleRunner import AnsibleRunner
class InstanceConfigurer:
def __init__(self):
self.aws_client = EC2Client()
self.neo4jInstances = EC2Instances()
self.neo4jInstancesIds = []
self.applicationInstances = EC2Instances()
self.applicationInstancesIds = []
self.testDriverInstancesIds = []
self.testDriverInstances = EC2Instances()
self.config = None
self.neo4jHAconfig = None
def load_existing_instances(self):
instances = self.aws_client.getInstances().instances
if len(instances) == 0:
print("No running instances found")
return
self.__save_neo4j_instances(instances)
self.__save_service_instances(instances)
self.__save_test_driver_instances(instances)
print("loaded Neo4j instances: {}".format(self.neo4jInstances.instances))
print("loaded Service instances: {}".format(self.applicationInstances.instances))
print("loaded test driver instances: {}".format(self.testDriverInstances.instances))
def prepare_instances(self, config):
print("Preparing instances...")
self.config = config
self.createNeo4jInstances(config["neo4j"])
self.createApplicationInstances(config["service"])
self.createTestDriverInstances(config["test-driver"])
def createNeo4jInstances(self, neo4j_config):
if neo4j_config["count"] > 0:
instances_to_start = neo4j_config["count"] - len(self.neo4jInstancesIds) if neo4j_config["count"] - len(self.neo4jInstancesIds) > 0 else 0
if instances_to_start > 0:
ids = self.createInstances(neo4j_config["instance-type"], instances_to_start, "neo4j")
self.neo4jInstancesIds = self.neo4jInstancesIds + ids
def createApplicationInstances(self, service_config):
if service_config["count"] > 0 and len(self.applicationInstancesIds) == 0:
ids = self.createInstances(service_config["instance-type"], service_config["count"], "service")
self.applicationInstancesIds = ids
def createTestDriverInstances(self, test_driver_config):
if test_driver_config["count"] > 0 and len(self.testDriverInstancesIds) == 0:
ids = self.createInstances(test_driver_config["instance-type"], test_driver_config["count"], "test-driver")
self.testDriverInstancesIds = ids
def createInstances(self, instance_type, count, purpose):
instances_ids = self.aws_client.createInstances(instance_type, count, purpose)
return instances_ids
def wait_for_instances(self):
all_instances = self.__get_all_ids()
print("Waiting for {} instances...".format(str(all_instances)))
EC2Waiter.waitForRunningState(all_instances)
self.neo4jInstances = self.aws_client.getInstances(self.neo4jInstancesIds, explicit=True)
self.applicationInstances = self.aws_client.getInstances(self.applicationInstancesIds, explicit=True)
self.testDriverInstances = self.aws_client.getInstances(self.testDriverInstancesIds, explicit=True)
def run_apps(self, dryRun=False):
if not dryRun:
self.runNeoOnInstances(self.neo4jInstances.ips())
self.runServices(self.applicationInstances.ips(), self.neo4jInstances.private_ips())
self.prepareTestDriver(self.testDriverInstances.ips(), self.applicationInstances.private_ips())
def runNeoOnInstances(self, ips):
print("Running neo4j on nodes with ips: {}".format(str(ips)))
if self.config["neo4j"]["cluster"] == "HA":
print("Running Neo4j in cluster state")
self.runNeoHAOnInstances()
else:
self.runNeoOnSingleInstance(ips[0])
def runServices(self, nodes_ips, neo4j_node_ips):
if len(nodes_ips) == 0:
return
print("Running service on nodes with ips: {} and neo4j_node_ips: {}".format(str(nodes_ips), str(neo4j_node_ips)))
if self.config['neo4j']['cluster'] == "HA":
AnsibleRunner.runApplicationWithHAproxy(nodes_ips, neo4j_node_ips)
else:
AnsibleRunner.runApplication(nodes_ips, neo4j_node_ips[0])
def prepareTestDriver(self, testDriverIp, service_nodes_ips):
if len(testDriverIp) == 0:
return
print("Preparing test driver node with ip: {} and service nodes ips: {}".format(str(testDriverIp),
str(service_nodes_ips)))
AnsibleRunner.prepare_test_driver(testDriverIp[0], service_nodes_ips)
def runNeoOnSingleInstance(self, instanceIp):
AnsibleRunner.remote_restart_neo4j(instanceIp, "ml-100k", True)
def instances(self):
return {"neo4j": self.neo4jInstances, "service": self.applicationInstances,
"test-driver": self.testDriverInstances}
def service_ips(self):
return self.applicationInstances.ips()
def killAllInstances(self):
all_instances = self.__get_all_ids()
self.aws_client.killAllInstances(all_instances)
self.neo4jInstances = EC2Instances()
self.neo4jInstancesIds = []
self.applicationInstances = EC2Instances()
self.applicationInstancesIds = []
def __get_all_ids(self):
return self.neo4jInstancesIds + self.applicationInstancesIds + self.testDriverInstancesIds
def __save_neo4j_instances(self, instances):
neo4j_instances = list(filter(lambda i: "purpose" in i.tags.keys() and i.tags["purpose"] == "neo4j", instances))
self.neo4jInstancesIds = list(map(lambda x: x.instanceId, neo4j_instances))
self.neo4jInstances = EC2Instances(neo4j_instances)
def __save_service_instances(self, instances):
service_instances = list(
filter(lambda i: "purpose" in i.tags.keys() and i.tags["purpose"] == "service", instances))
self.applicationInstancesIds = list(map(lambda x: x.instanceId, service_instances))
self.applicationInstances = EC2Instances(service_instances)
def __save_test_driver_instances(self, instances):
test_driver_instances = list(
filter(lambda i: "purpose" in i.tags.keys() and i.tags["purpose"] == "test-driver", instances))
self.testDriverInstancesIds = list(map(lambda x: x.instanceId, test_driver_instances))
self.testDriverInstances = EC2Instances(test_driver_instances)
def test_driver_ip(self):
return self.testDriverInstances.ips()[0]
def runNeoHAOnInstances(self):
master_node = self.neo4jInstances.instances[0]
slave_nodes = self.neo4jInstances.instances[1:]
print("Neo4j HA master node: {}".format(master_node))
print("Neo4j HA slave nodes: {}".format(str(slave_nodes)))
self.neo4jHAconfig = Neo4jHAConf(master_node, slave_nodes)
AnsibleRunner.runNeo4jHAMaster(master_node.publicIp, "ml-100k", list(map(lambda x: x.privateIp, slave_nodes)), master_node.privateIp)
for x in range(1, len(slave_nodes) + 1):
slave_ip = slave_nodes[x - 1].publicIp
id = x + 1
AnsibleRunner.runNeo4jHASlave(slave_ip, id, master_node.privateIp, "ml-100k",
list(map(lambda node: node.privateIp, slave_nodes)))
<file_sep>package org.miejski.movies.recommender.api.user.dto
data class UsersIdsResponse(val usersIds: List<Int>)<file_sep>from neo4j_state.assertions.simple_cypher_state_assertion import SimpleCypherStateAssertion
class CosineSimilarityAssertion(SimpleCypherStateAssertion):
def play(self, Neo4jCypherExecutor):
self.execute_query(Neo4jCypherExecutor, self.query_to_execute(), self.arguments())
def is_ok(self, Neo4jCypherExecutor):
result = Neo4jCypherExecutor.invoke(self.__check_query())
return len(result) > 0
def query_to_execute(self):
return "similarity_cosine.cypher"
def arguments(self):
return {}
def __check_query(self):
return "MATCH (p:Person)-[s:Similarity]->(p2:Person) where exists(s.cosine) return s limit 1"
<file_sep>rootProject.name = 'recommender-performance-tests'
<file_sep>from metrics_plot.metrics_result import MetricsResult
class PageCacheMetric:
def __init__(self):
self.metric_name = "pagecache"
self.colors = {
"majflt/s": "r",
"pgsteal/s": "b",
"%vmeff": 'g'
}
self.labels = {
"x": 'time',
'y_left': 'faults/steals per second',
"y_right": 'page reclaim efficiency'
}
self.double_axis = {
"left": ["majflt/s", "pgsteal/s"],
"right": ["%vmeff"]
}
def read_metrics(self, metrics_dir):
"""SAR based metrics. Return MetricsResult object"""
with open("{}/{}.log".format(metrics_dir, self.metric_name)) as metrics_file:
keys, metrics = self.__read_metrics(metrics_file)
return MetricsResult(self.metric_name, keys, metrics, self.colors, self.labels, self.double_axis)
def __read_metrics(self, metrics_file):
metrics = metrics_file.readlines()
metrics = list(filter(lambda x: "pgpgin" not in x and "Linux" not in x, metrics))
metrics = list(filter(lambda x: len(x) > 0, map(
lambda y: y.replace('\n',''), metrics
)))
metrics_split = list(
map(lambda x: list(filter(lambda p: len(p) > 0, x.replace(" ", "\t").split("\t"))), metrics))
keys = list(map(lambda x: x[0].strip().replace("PM", ""), metrics_split))
majflt = self.__to_floats(list(map(lambda x: x[4], metrics_split)))
pgsteal = self.__to_floats(list(map(lambda x: x[-2], metrics_split)))
vmeff = self.__to_floats(list(map(lambda x: x[-1], metrics_split)))
metrics = {
"majflt/s": majflt,
"pgsteal/s": pgsteal,
"%vmeff": vmeff
}
return keys, metrics
def __to_floats(self, l):
return list(map(lambda x: float(x.strip()), l))
<file_sep>After copying data from movielens with proper delimeter:
cat ratings.dat | sed 's/::/ /g' > full.data
all folds stay in ./cross_validation/ folder
<file_sep>class Neo4jHAConf:
def __init__(self, masterNode, slave_nodes):
pass
class Neo4jHANode:
def __init__(self, ec2Instance, label):
self.ec2Instance = ec2Instance
self.label = label
<file_sep>package org.miejski.movies.recommender.infrastructure.configuration
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.filter.Filter;
import ch.qos.logback.core.spi.FilterReply;
class ClassLoggingFilter : Filter<ILoggingEvent>() {
override fun decide(event: ILoggingEvent): FilterReply {
if (event.loggerName.contains("org.neo4j.ogm.drivers.http.request")) {
return FilterReply.DENY
}
return FilterReply.ACCEPT
}
}<file_sep>package org.miejski.movies.recommender.infrastructure.dbstate
import org.miejski.movies.recommender.domain.queries.QueriesLoader
import org.miejski.movies.recommender.infrastructure.dbstate.assertions.AssertionsContainer
import org.springframework.beans.factory.InitializingBean
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Component
@Component
class Neo4jStarStateAsserter @Autowired constructor(val cypherExecutor: CypherExecutor,
val assertions: AssertionsContainer) : InitializingBean {
override fun afterPropertiesSet() {
assertions.assertions().filter {
println("Checking assertion: ${it.name()}")
!it.isOK()
}
.forEach {
println("Executing query for assertion: ${it.name()}")
val cypher = QueriesLoader().loadCypherQuery(it.queryToExecute())
cypherExecutor.execute(cypher, it.queryToExecuteParams())
}
}
fun run() {
afterPropertiesSet()
}
}<file_sep>package org.miejski.movies.recommender.infrastructure.dbstate.assertions
import org.miejski.movies.recommender.infrastructure.dbstate.Neo4jStateAssertion
import org.neo4j.ogm.session.Session
import java.util.*
class Similarity_1_Assertion(val session: Session) : Neo4jStateAssertion {
override fun name(): String {
return "Similarity_1_Assertion"
}
override fun queryToExecute(): String {
return "start_state/similarity_pearson.cypher"
}
override fun isOK(): Boolean {
return session.query("Match (p:Person)-[s:Similarity]-(p2:Person) where exists(s.similarity) return s limit 1", HashMap<String, Object>()).count() > 0
}
}<file_sep>package org.miejski.movies.recommender.api.user.dto
import com.fasterxml.jackson.annotation.JsonIgnoreProperties
@JsonIgnoreProperties(ignoreUnknown = true)
data class MovieRatingRequest constructor(val movieId: Long? = null, val rating: Double? = null) {
constructor() : this(null, null)
}<file_sep>package org.miejski.movies.recommender.infrastructure.repositories
import org.miejski.movies.recommender.domain.movie.Movie
import org.springframework.data.neo4j.annotation.Query
import org.springframework.data.neo4j.repository.GraphRepository
import org.springframework.stereotype.Repository
@Repository
open interface MovieRepository : GraphRepository<Movie> {
@Query("MATCH (n:Movie) return n.movie_id")
fun getAllIds(): List<Int>
}<file_sep>from neo4j_state.assertions.simple_cypher_state_assertion import SimpleCypherStateAssertion
class AverageRatingAssertion(SimpleCypherStateAssertion):
def play(self, Neo4jCypherExecutor):
self.execute_query(Neo4jCypherExecutor, self.query_to_execute(), self.arguments())
def is_ok(self, Neo4jCypherExecutor):
result = Neo4jCypherExecutor.invoke(self.__check_query())
return len(result) > 0
def query_to_execute(self):
return "average_rating.cypher"
def arguments(self):
return {}
def __check_query(self):
return "Match (p:Person) where exists(p.avg_rating) return p limit 1"
<file_sep>package org.miejski.movies.recommender.domain
import org.neo4j.ogm.annotation.GraphId
import org.neo4j.ogm.annotation.NodeEntity
@NodeEntity
open class AbstractEntity(@GraphId var id: Long? = null)<file_sep>import os
import requests
class PrecisionRecallMetric():
def __init__(self, result_folder="/tmp/magisterka/metrics/precisionAndRecall/"):
self.result_folder = result_folder
def run(self, testFilePath, test_name):
print("Running precision metrics: " + test_name)
requests.post('http://localhost:8080/metrics/precision',
json={'testFilePath': testFilePath, 'testName': test_name}, )
def finish(self, test_name):
print("Finishing precision and recall metrics for " + test_name)
response = requests.get('http://localhost:8080/metrics/precision/result')
response_json = response.json()
precision = response_json["result"]['first']
recall = response_json["result"]['second']
time = response_json["timeInSeconds"]
print("Precision = {}\nRecall = {}\nTotal time in seconds: {}".format(precision, recall, time))
self.save_result(test_name, precision, recall, time)
return precision
def save_result(self, test_name, precision, recall, time):
if not os.path.exists(self.result_folder + test_name):
os.makedirs(self.result_folder + test_name)
with open(self.result_folder + test_name + "/precision.log", mode="w") as result_file:
result_file.write("Final precision = {}\n".format(precision))
result_file.write("Final recall = {}\n".format(recall))
result_file.write("Total time in seconds: {0:.2f}s\n".format(time))
if __name__ == "__main__":
metrics = PrecisionRecallMetric()
test_name = "testNameMote"
metrics.run(
"/Users/grzegorz.miejski/home/workspaces/datasets/movielens/prepared/ml-100k/cross_validation/ml-100k_test_0",
test_name)
metrics.finish(test_name)
<file_sep>package org.miejski.movies.recommender.infrastructure.dbstate.assertions
import org.miejski.movies.recommender.infrastructure.dbstate.Neo4jStateAssertion
class MovieIndexAssertion : Neo4jStateAssertion {
override fun isOK(): Boolean {
return false
}
override fun queryToExecute(): String {
return "start_state/movie_index.cypher"
}
override fun name(): String {
return "MovieIndexAssertion"
}
}<file_sep>unlink graph.db
curr_dir=`pwd`
echo $curr_dir
if [ $# -eq 0 ]
then
echo "Usage: $0 [db_name] "
exit 1
fi
target=$curr_dir/$1
echo $target
if [ ! -d "$target" ]; then
cp -r $curr_dir/empty_base.db $target
fi
`ln -s $target graph.db`<file_sep>from neo4j_state.assertions.simple_cypher_state_assertion import SimpleCypherStateAssertion
class PersonIndexAssertion(SimpleCypherStateAssertion):
def play(self, Neo4jCypherExecutor):
self.execute_query(Neo4jCypherExecutor, self.query_to_execute(), self.arguments())
def is_ok(self, Neo4jCypherExecutor):
return False
def query_to_execute(self):
return "person_index.cypher"
def arguments(self):
return {}
<file_sep>from neo4j_state.assertions.simple_cypher_state_assertion import SimpleCypherStateAssertion
class MovieIndexAssertion(SimpleCypherStateAssertion):
def play(self, Neo4jCypherExecutor):
self.execute_query(Neo4jCypherExecutor, self.query_to_execute(), self.arguments())
def is_ok(self, Neo4jCypherExecutor):
return False
def query_to_execute(self):
return "movie_index.cypher"
def arguments(self):
return {}
<file_sep>import collections
import itertools
class RatingsInCommon:
def results(self, data, resuts_file_path):
users_movies = collections.defaultdict(set)
results = []
for x in data:
users_movies[x[0]].add(x[1])
user_pairs = list(itertools.combinations(users_movies.keys(), 2))
for (p1, p2) in user_pairs:
common_movies_count = len(users_movies[p1].intersection(users_movies[p2]))
results.append((p1, p2, common_movies_count))
pass
self.save_results_to_file(results, resuts_file_path)
pass
def save_results_to_file(self, results, resuts_file_path):
with open(resuts_file_path, mode="w") as file:
file.write("p1\tp2\tcount\n")
for r in results:
file.write("{}\t{}\t{}\n".format(r[0], r[1], r[2]))
<file_sep>package org.miejski.movies.recommender.domain.recommendations
import org.miejski.movies.recommender.helper.castTo
import org.neo4j.ogm.session.Session
import org.slf4j.LoggerFactory
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
open class RecommendationsService @Autowired constructor(
val session: Session,
val recommendationsQuery: RecommendationsQuery)
: RecommendationsServiceI {
private val logger = LoggerFactory.getLogger(RecommendationsService::class.java)
override fun findRecommendedMovies(userId: Long, minSimilarity: Double, similarityMethod: String, neighboursCount: Int?): List<MovieRecommendation> {
val queryTemplate = if (neighboursCount != null) {
recommendationsQuery.getRecommendationWIthNBestNeighboursQuery()
} else recommendationsQuery.getRecommendationQuery()
val cypherQuery = queryTemplate.replace("{similarity_method}", similarityMethod)
.replace("{n_best_neighbours}", neighboursCount.toString())
val result = session.query(cypherQuery, mapOf(
Pair("userId", userId),
Pair("min_similarity", minSimilarity)))
.castTo(MoviesPredictionScore::class.java)
return findBestRecommendations(result).take(100)
}
private fun findBestRecommendations(neighboursPredictionScores: List<MoviesPredictionScore>): List<MovieRecommendation> {
return neighboursPredictionScores.map {
MovieRecommendation(it.movieId, it.prediction, it.movieNeighboursRatings.toDouble())
}
}
override fun predictedRating(userId: Long, movieId: Long): Double {
val cypherQuery = recommendationsQuery.getPredictionQuery()
val query = session.query(cypherQuery, mapOf(
Pair("userId", userId),
Pair("movieId", movieId)))
val get = query.firstOrNull()?.get("prediction")?.toString()?.toDouble()
if (get != null) {
return get.toString().toDouble()
}
logger.info("Predicting rating - returning default value for user {} and movie {}", userId, movieId)
return -1.0
}
}
data class MoviesPredictionScore(val movieId: Long, val prediction: Double, val movieNeighboursRatings: Long)
data class MovieRecommendation(val movieId: Long, val prediction: Double, val score: Double)
interface RecommendationsServiceI {
fun findRecommendedMovies(userId: Long, minSimilarity: Double = 0.6, similarityMethod: String = "similarity", neighboursCount: Int? = null): List<MovieRecommendation>
fun predictedRating(userId: Long, movieId: Long): Double
}<file_sep>import math
class AccMetrics():
@staticmethod
def calculate_rmse(predictions_and_ratings):
"predictions_and_ratings is list of pairs of predicts and real rating"
if len(predictions_and_ratings) == 0:
return 0
diffs = map(lambda x: (x[0] - x[1]) * (x[0] - x[1]), predictions_and_ratings)
return math.sqrt(sum(diffs) / float(len(predictions_and_ratings)))
@staticmethod
def calculate_mae(predictions_and_ratings):
"predictions_and_ratings is list of pairs of predicts and real rating"
if len(predictions_and_ratings) == 0:
return 0
diffs = map(lambda x: abs(x[0] - x[1]), predictions_and_ratings)
return sum(diffs) / float(len(predictions_and_ratings))
if __name__ == "__main__":
mae = AccMetrics.calculate_mae([(4, 5), (3, 5), (2, 5)])
assert mae == 2
print(mae)
rmse = AccMetrics.calculate_rmse([(4, 5), (3, 5), (2, 5)])
print(rmse)<file_sep>
def print_instances(instance_configurer):
all_instances = instance_configurer.instances()
print("**************************************************************************")
for key, instances in all_instances.items():
print("{} instances:".format(key))
for x in instances.instances:
print("Id: {}, publicIp: {}".format(x.instanceId, x.publicIp))
print("--------------------------------------------------------------------------")
print("**************************************************************************")
from performance.InstanceConfigurer import InstanceConfigurer
instance_configurer = InstanceConfigurer()
instance_configurer.load_existing_instances()
print_instances(instance_configurer)
<file_sep>import sys
import os
from metrics_plot.cpu_metric import CPUMetric
from metrics_plot.disk_metric import DiskUtilityMetric
from metrics_plot.pagecache_metric import PageCacheMetric
from metrics_plot.plot_metrics import MetricsPlotter
from shutil import copyfile
def print_usage(scriptName):
print("Usage: python " + scriptName + " path_to_logs [os_metrics_result_path]")
def get_latest_sim_folder(files_list):
simulations = list(
filter(lambda x: "simulation" in x, files_list)
)
sim_timestamps = list(map(lambda x: int(x.split("-")[-1]), simulations))
sim_timestamps.sort()
latest_timestamp = sim_timestamps[-1]
return list(filter(lambda x: str(latest_timestamp) in x, files_list))[0]
def find_png_files(os_metrics_path):
files = os.listdir(os_metrics_path)
png_files_names = list(filter(lambda x: "png" in x, files))
return png_files_names
def copy_plots_to_last_reco_result(simulations_dir, os_metrics_path, folder_to_save_plots=None):
path = simulations_dir
name_list = os.listdir(path)
lastest_file_name = get_latest_sim_folder(name_list)
target_simulation_dir = "{}/{}".format(simulations_dir, lastest_file_name)
png_files_paths = find_png_files(os_metrics_path)
for png_file in png_files_paths:
png_file_path = "{}/{}".format(os_metrics_path, png_file)
if folder_to_save_plots is not None:
png_target_file_path = "{}/{}/{}".format(target_simulation_dir, folder_to_save_plots, png_file )
png_target_file_path_folder = "{}/{}".format(target_simulation_dir, folder_to_save_plots )
if not os.path.exists(png_target_file_path_folder):
print("Creating folder : {}".format(png_target_file_path_folder))
os.makedirs(png_target_file_path_folder)
else:
png_target_file_path = "{}/{}".format(target_simulation_dir, png_file)
copyfile(png_file_path, png_target_file_path)
if __name__ == "__main__":
print(sys.argv)
args = sys.argv
os_metrics_path = "/Users/grzegorz.miejski/magisterka/perf/os_metrics"
folder_to_save_plots = None
if len(args) == 2:
os_metrics_path = args[1]
if len(args) == 3:
os_metrics_path = args[1]
folder_to_save_plots = args[2]
metrics_plotter = MetricsPlotter(os_metrics_path)
# metrics = [CPUMetric(), DiskUtilityMetric(), PageCacheMetric()]
metrics = [CPUMetric(), DiskUtilityMetric()]
for metric in metrics:
metric_result = metric.read_metrics(os_metrics_path)
metrics_plotter.plot(metric_result)
copy_plots_to_last_reco_result("/Users/grzegorz.miejski/magisterka/perf", os_metrics_path, folder_to_save_plots)
<file_sep>from neo4j_state.assertions.simple_cypher_state_assertion import SimpleCypherStateAssertion
class DataLoadedAssertion(SimpleCypherStateAssertion):
def __init__(self, train_file):
self.train_file = train_file
def play(self, Neo4jCypherExecutor):
self.execute_query(Neo4jCypherExecutor, self.query_to_execute(), self.arguments())
def is_ok(self, Neo4jCypherExecutor):
result = Neo4jCypherExecutor.invoke(self.__check_query())
return len(result) > 0
def query_to_execute(self):
return "import_data.cypher"
def arguments(self):
return {"trainingDataFile": "file://{}".format(self.train_file)}
def __check_query(self):
return "MATCH (a:Person) return a limit 1"
<file_sep># movies-recommender-api
# movies-recommender-api/recommender-performance-tests
Running given scenario example:
./gradlew clean loadTest -Psimulation=org.miejski.movies.recommender.performance.RatingsSimulation -PapplicationUrl=http://172.16.17.32:8080
# deployment
All scripts running performance tests and metrics tests based on Ansible playbooks and python<file_sep>import unittest
from moto import mock_ec2
from ec2.EC2Client import EC2Client
from performance.InstanceConfigurer import InstanceConfigurer
class InstanceConfigurerTest(unittest.TestCase):
@mock_ec2
def test_prepare_instances(self):
config = {
"neo4j": {
"count": 1,
"cluster": None,
"instance-type": "t2.micro"
},
"service": {
"count": 2,
"instance-type": "t2.micro"
},
"test-driver": {
"count": 1,
"instance-type": "t2.micro"
}
}
instance_configurer = InstanceConfigurer()
instance_configurer.prepare_instances(config)
instance_configurer.wait_for_instances()
instances = instance_configurer.instances()
self.assertEqual(len(instances["neo4j"].instances), 1)
self.assertEqual(len(instances["service"].instances), 2)
@mock_ec2
def test_retrieve_running_instances(self):
config = {
"neo4j": {
"count": 1,
"cluster": None,
"instance-type": "t2.micro"
},
"service": {
"count": 1,
"instance-type": "t2.micro"
},
"test-driver": {
"count": 1,
"instance-type": "t2.micro"
}
}
instance_configurer = InstanceConfigurer()
instance_configurer.prepare_instances(config)
instance_configurer.wait_for_instances()
instance_configurer = InstanceConfigurer()
instance_configurer.load_existing_instances()
# shitty moto doesn't support tags
# instances = instance_configurer.instances()
# self.assertEqual(len(instances["neo4j"].instances), 1)
# self.assertEqual(len(instances["service"].instances), 1)
@mock_ec2
def test_scale_neo4j_instances(self):
config = {
"neo4j": {
"count": 1,
"cluster": None,
"instance-type": "t2.micro"
},
"service": {
"count": 0,
"instance-type": "t2.micro"
},
"test-driver": {
"count": 0,
"instance-type": "t2.micro"
}
}
instance_configurer = InstanceConfigurer()
instance_configurer.prepare_instances(config)
instance_configurer.wait_for_instances()
# then
config['neo4j']['count'] = 2
instance_configurer = InstanceConfigurer()
instance_configurer.load_existing_instances()
instance_configurer.prepare_instances(config)
instance_configurer.wait_for_instances()
instances = instance_configurer.instances()
self.assertEqual(len(instances["neo4j"].instances), 2)
if __name__ == '__main__':
unittest.main()<file_sep>#!/usr/bin/env bash
sim_methods=(cosine pearson_with_sw)
top_n=(10 15 20 25 30 35 40 45 50 100)
for sim_method in ${sim_methods[*]}; do
for n in ${top_n[*]}; do
./gradlew loadTest -Psimulation=org.miejski.movies.recommender.performance.RecommendationsSimulation -PapplicationUrl=http://localhost:8080 -Pmin_similarity=0.0 -Psimilarity_method=${sim_method} -PneighboursCount=${n} >> /tmp/magisterka/perf/${sim_method}_${n}
done
done<file_sep>class EC2Instance:
@staticmethod
def fromJson(json):
return EC2Instance(json['InstanceId'],
json['PublicIpAddress'],
EC2Instance.__to_tags(json['Tags']),
json['PrivateIpAddress'])
def __init__(self, instanceId, publicIp, tags, privateIp):
self.instanceId = instanceId
self.publicIp = publicIp
self.tags = tags
self.privateIp = privateIp
@staticmethod
def __to_tags(tags_list):
tags = {}
for tag_pair in tags_list:
tags[tag_pair["Key"]] = tag_pair["Value"]
return tags
def __repr__(self):
return "EC2Instance(instanceId = {}, publicId = {}, privateIp = {}, tags = {})".format(self.instanceId,
self.publicIp,
self.privateIp,
str(self.tags))
<file_sep>import boto3
class EC2Waiter:
ec2client = boto3.client('ec2')
@staticmethod
def waitForState(ids, state):
waiter = EC2Waiter.ec2client.get_waiter(state)
try:
waiter.wait(
DryRun=False,
InstanceIds=ids,
)
except Exception as e:
print("Error waiting for instances turn into state: " + state)
print("instances: " + str(ids) + " turned into state: " + state)
@staticmethod
def waitForRunningState(ids):
EC2Waiter.waitForState(ids, 'instance_status_ok')
@staticmethod
def waitForTerminatedState(ids):
EC2Waiter.waitForState(ids, 'instance_terminated')
<file_sep>import collections
from collections import defaultdict
from itertools import chain
def flatten(listOfLists):
return list(chain.from_iterable(listOfLists))
class FoldsCreator():
def __init__(self, folds_filename_prefix="", output_directory=".", with_asserts=True):
self.folds_filename_prefix = folds_filename_prefix
self.output_directory = output_directory
self.data_separator = "\t"
self.with_asserts = with_asserts
def create(self, ratings_file_path, k=5):
if self.folds_filename_prefix is "":
self.folds_filename_prefix = ratings_file_path[ratings_file_path.rindex("/"):]
with open(ratings_file_path) as ratingsFile:
usersRatings = collections.defaultdict(list)
for line in ratingsFile:
if line.startswith("user_id"):
continue
split = line.replace("\n", '').split(self.data_separator)
user_hash = split[0].__hash__() % k
usersRatings[user_hash].append(split)
print("Number of ratings in each hash bucket")
print(list(map(lambda x: len(x[1]), usersRatings.items())))
print("Number of users in each hash bucket")
print(list(map(lambda y: len(collections.Counter(list(map(lambda t: t[0], y))).keys()),
map(lambda x: x[1], usersRatings.items()))))
self.createFolds(usersRatings, k)
def createFolds(self, hashed_users_ratings, k, query_ratio=0.8):
users_ratings_buckets = list(map(lambda x: x[1], hashed_users_ratings.items()))
for i in range(0, k):
query_ratings, unsorted_real_test_ratings = self.split_test_for_query(hashed_users_ratings[i], query_ratio)
real_test_ratings = sorted(unsorted_real_test_ratings, key=lambda x: x[3])
sorted_query_ratings = sorted(query_ratings, key=lambda x: x[3])
point_in_time = int(sorted_query_ratings[-1][3])
base_training_ratings = flatten(users_ratings_buckets[:i] + users_ratings_buckets[i + 1:])
self.assert_proper_split(query_ratings, real_test_ratings)
real_training_ratings = list(
filter(lambda r: int(r[3]) < point_in_time, base_training_ratings)) + sorted_query_ratings
self.write_training_data(real_training_ratings, i)
self.write_test_data(real_test_ratings, i)
def split_test_for_query(self, test_set, query_ratio):
real_test_set = []
query_ratings = []
users_ratings = defaultdict(list)
for rating in test_set:
users_ratings[rating[0]].append(rating)
for user, ratings in users_ratings.items():
ratings_sorted_by_date = list(sorted(ratings, key=lambda x: x[3]))
query_index = int(query_ratio * len(ratings_sorted_by_date))
user_query_ratings = ratings_sorted_by_date[:query_index]
user_real_test_ratings = ratings_sorted_by_date[query_index:]
real_test_set += user_real_test_ratings
query_ratings += user_query_ratings
return query_ratings, real_test_set
def get_output_file(self, output_file_name):
if self.output_directory != "":
return open(self.output_directory + "/" + output_file_name, mode="w")
return open(output_file_name, mode="w")
def write_training_data(self, real_training_ratings, i):
output_file_name = self.folds_filename_prefix + "_train_" + str(i)
self.write_data(output_file_name, real_training_ratings)
def write_data(self, output_file_name, real_training_ratings):
with self.get_output_file(output_file_name) as ratingsFile:
ratingsFile.write("user_id\tmovie_id\trating\ttimestamp\n")
for x in real_training_ratings:
ratingsFile.write(self.data_separator.join(x) + "\n")
def write_test_data(self, real_training_ratings, i):
output_file_name = self.folds_filename_prefix + "_test_" + str(i)
self.write_data(output_file_name, real_training_ratings)
def assert_proper_split(self, query_ratings, real_test_ratings):
if self.with_asserts:
latest_query = max(map(lambda x: int(x[3]), query_ratings))
latest_test = max(map(lambda x: int(x[3]), real_test_ratings))
assert latest_query <= latest_test
if __name__ == "__main__":
prefix = "ml-100k"
# prefix = "ml-1m"
# prefix = "ml-10m"
# prefix = "ml-20m"
FoldsCreator(prefix,
"/Users/grzegorz.miejski/home/workspaces/datasets/movielens/prepared/" + prefix + "/cross_validation") \
.create("/Users/grzegorz.miejski/home/workspaces/datasets/movielens/prepared/" + prefix + "/full.data")
<file_sep>package org.miejski.movies.recommender.api.user
import org.miejski.movies.recommender.api.user.dto.MovieRatingRequest
import org.miejski.movies.recommender.domain.user.Person
import org.miejski.movies.recommender.domain.user.UsersService
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.http.MediaType.APPLICATION_JSON_VALUE
import org.springframework.web.bind.annotation.PathVariable
import org.springframework.web.bind.annotation.RequestBody
import org.springframework.web.bind.annotation.RequestMapping
import org.springframework.web.bind.annotation.RequestMethod.POST
import org.springframework.web.bind.annotation.RestController
@RestController
class UsersController @Autowired constructor(val usersService: UsersService) {
@RequestMapping(value = "/users/{userId}")
fun getUser(@PathVariable("userId") userId: Long): Person {
return usersService.findUserById(userId)
}
@RequestMapping(value = "/users")
fun getAllUsers(): List<Person> {
return emptyList()
return usersService.findAll()
}
@RequestMapping(value = "/users/ids")
fun getAllUsersIds(): List<Int> {
return usersService.findAllIds()
}
@RequestMapping(value = "/users/{userId}/ratings",
method = arrayOf(POST),
consumes = arrayOf(APPLICATION_JSON_VALUE))
fun rateMovie(@RequestBody movieRating: MovieRatingRequest, @PathVariable("userId") userId: Long) {
return usersService.rateMovie(userId, movieRating)
}
}<file_sep>package org.miejski.movies.recommender.domain.recommendations
import org.miejski.movies.recommender.domain.queries.QueriesLoader
import org.springframework.stereotype.Component
@Component
class RecommendationsQuery : QueriesLoader() {
fun getRecommendationQuery(): String {
return loadCypherQuery("similiarity_neighbours_recommendation.cypher")
}
fun getRecommendationWIthNBestNeighboursQuery(): String {
return loadCypherQuery("similiarity_neighbours_recommendation_best_neighbours.cypher")
}
fun getPredictionQuery(): String {
return loadCypherQuery("similarity_predicted_rating.cypher")
}
}<file_sep>package org.miejski.movies.recommender.domain.metrics.decisionSupport
import org.miejski.movies.recommender.api.metrics.MetricsResult
import org.miejski.movies.recommender.domain.metrics.MetricsService
import org.miejski.movies.recommender.domain.metrics.accuracy.RealRating
import org.miejski.movies.recommender.domain.recommendations.MovieRecommendation
import org.miejski.movies.recommender.domain.recommendations.RecommendationsServiceI
import org.miejski.movies.recommender.domain.user.UsersService
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
open class PrecisionAndRecallService @Autowired constructor(
val recommendationsService: RecommendationsServiceI,
val userService: UsersService)
: MetricsService<Pair<Double, Double>>() {
private var precisionSupportAccumulator: DecisionSupportAccumulator = DecisionSupportAccumulator()
override fun run(realRatings: List<RealRating>): Double {
start()
val userRatings = realRatings.groupBy { it.person }
val usersWithRatingsAndReco: List<Triple<Long, List<RealRating>, List<MovieRecommendation>>> = runAsyncAndGather(
userRatings.toList(), { Triple(it.first, it.second, recommendationsService.findRecommendedMovies(it.first, neighboursCount = null)) })
val recoMapByUser = usersWithRatingsAndReco.map { Pair(it.first, it.third) }.toMap()
val usersMeanRatings: Map<Long, Double> = runAsyncAndGather(
userRatings.keys.toList(),
{ Pair(it, userService.getMeanRating(it)) })
.toMap()
val moviesLikedByUsers = getMoviesLikedByUsers(userRatings, usersMeanRatings)
val goodRecommendationsCountPerUser: Map<Long, Int> = calculateGoodRecommendationsCount(
moviesLikedByUsers,
usersWithRatingsAndReco)
val precisionPerUser = recoMapByUser.map { goodRecommendationsCountPerUser.get(it.key)!!.toDouble() / recoMapByUser.get(it.key)!!.count() }
val recallPerUser = recoMapByUser
.map { Pair(it.key, goodRecommendationsCountPerUser.get(it.key)!!.toDouble() / moviesLikedByUsers.get(it.key)!!.count()) }
.map { Pair(it.first, if (it.second.equals(Double.NaN)) 0.0 else it.second) }
precisionSupportAccumulator.saveResult(precisionPerUser.average(), recallPerUser.map { it.second }.average(), timeInSeconds())
return precisionPerUser.average()
}
private fun getMoviesLikedByUsers(userRatings: Map<Long, List<RealRating>>, usersMeanRatings: Map<Long, Double>): Map<Long, List<Long>> {
return userRatings.map { userWithRatings ->
Pair(userWithRatings.key,
userWithRatings.value
.filter { it.rating > usersMeanRatings.getOrElse(userWithRatings.key, { 1.0 }) }
.map { it.movie })
}.toMap()
}
private fun calculateGoodRecommendationsCount(moviesLikedByUsers: Map<Long, List<Long>>,
usersWithRatingsAndReco: List<Triple<Long, List<RealRating>, List<MovieRecommendation>>>): Map<Long, Int> {
return usersWithRatingsAndReco.map { userRecord ->
Pair(
userRecord.first,
userRecord.third.map { it.movieId }.count { moviesLikedByUsers[userRecord.first]!!.contains(it) })
}.toMap()
}
private fun precAndRecallAtN(moviesLikedByUsers: Map<Long, List<Long>>, usersWithRatingsAndReco: List<Triple<Long, List<RealRating>, List<MovieRecommendation>>>, n: Int): Pair<Double, Double> {
val recoMapByUser = usersWithRatingsAndReco.map { Pair(it.first, it.third.take(n)) }.toMap()
val goodRecommendationsCountPerUser = usersWithRatingsAndReco.map { userRecord ->
Pair(
userRecord.first,
userRecord.third.take(n).map { it.movieId }.count { moviesLikedByUsers[userRecord.first]!!.contains(it) })
}.toMap()
val precisionPerUser = recoMapByUser.map { goodRecommendationsCountPerUser.get(it.key)!!.toDouble() / recoMapByUser.get(it.key)!!.count() }
val recallPerUser = recoMapByUser
.map { Pair(it.key, goodRecommendationsCountPerUser.get(it.key)!!.toDouble() / moviesLikedByUsers.get(it.key)!!.count()) }
.map { Pair(it.first, if (it.second.equals(Double.NaN)) 0.0 else it.second) }
return Pair(precisionPerUser.average(), recallPerUser.map { it.second }.average())
}
override fun finish(): MetricsResult<Pair<Double, Double>> {
val result = MetricsResult(
Pair(precisionSupportAccumulator.precisionResults.average(), precisionSupportAccumulator.recallResults.average()),
precisionSupportAccumulator.times.sum(),
mapOf())
precisionSupportAccumulator = DecisionSupportAccumulator()
return result
}
}<file_sep>#!/usr/bin/env bash
# copy dataset to remote:
scp -r -i /Users/grzegorz.miejski/.ssh/movies-recommender-service.pem ml-100k/ ec2-user@192.168.127.1264:/home/ec2-user/programming/neo4j-community-3.0.4/data/databases/<file_sep>buildscript {
ext.spring_boot_version = '1.4.0.RELEASE'
repositories {
mavenCentral()
}
dependencies {
classpath "org.springframework.boot:spring-boot-gradle-plugin:$spring_boot_version"
classpath 'com.github.rholder:gradle-one-jar:1.0.4'
}
}
apply plugin: 'gradle-one-jar'
apply plugin: 'java'
apply plugin: 'idea'
apply plugin: 'spring-boot'
apply plugin: 'groovy'
apply plugin: 'application'
repositories {
mavenCentral()
}
dependencies {
compile project(":movies-recommender-service")
compile 'org.neo4j.driver:neo4j-java-driver:1.0.1'
}
sourceCompatibility = 1.8
targetCompatibility = 1.8
jar {
manifest {
attributes 'Main-Class': 'org.miejski.movies.recommender.MainClass'
}
baseName = 'movies-recommender-metrics'
version = '0.1.0'
from {
configurations.compile.filter { !it.name.endsWith(".pom") }.collect { it.isDirectory() ? it : zipTree(it) }
}
}
group = 'gmiejski'
mainClassName = "org.miejski.movies.recommender.MainClass"
sourceSets {
test {
groovy.srcDirs = ['src/test/groovy']
}
integration {
groovy.srcDirs = ['src/integration/groovy']
resources.srcDir 'src/test/resources'
compileClasspath += main.output + test.output
runtimeClasspath += main.output + test.output
}
}
configurations {
integrationCompile.extendsFrom testCompile
integrationRuntime.extendsFrom testRuntime
}
task integration(type: Test, description: 'Runs the integration tests.', group: 'Verification') {
testClassesDir = sourceSets.integration.output.classesDir
classpath = sourceSets.integration.runtimeClasspath
}
task fatJar(type: Jar) {
manifest {
attributes 'Main-Class': 'org.miejski.movies.recommender.MainClass'
}
baseName = project.name + '-all'
from {
configurations.compile.filter { !it.name.endsWith(".pom") }.collect { it.isDirectory() ? it : zipTree(it) }
} {
exclude "META-INF/*.SF"
exclude "META-INF/*.DSA"
exclude "META-INF/*.RSA"
}
with jar
}
task myjar(type: OneJar) {
mainClass = 'org.miejski.movies.recommender.MainClass'
}<file_sep>package org.miejski.movies.recommender.api
import org.miejski.movies.recommender.domain.movie.Movie
import org.miejski.movies.recommender.infrastructure.repositories.MovieRepository
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.http.MediaType.APPLICATION_JSON_VALUE
import org.springframework.web.bind.annotation.PathVariable
import org.springframework.web.bind.annotation.RequestMapping
import org.springframework.web.bind.annotation.RequestMethod.POST
import org.springframework.web.bind.annotation.RestController
@RestController
open class MovieController @Autowired constructor(val movieRepository: MovieRepository) {
@RequestMapping(value = "/movies/{movieId}")
fun getMovie(@PathVariable("movieId") movieId: Long): Movie? {
return movieRepository.findOne(movieId)
}
@RequestMapping(value = "/movies/{movieId}",
method = arrayOf(POST),
consumes = arrayOf(APPLICATION_JSON_VALUE))
fun createMovie(movie: Movie) {
movieRepository.save(movie)
}
@RequestMapping(value = "/movies/ids")
fun ids(): List<Int> {
return movieRepository.getAllIds()
}
}<file_sep>#!/bin/bash
source /Users/grzegorz.miejski/home/workspaces/private/magisterka/movies-recommender-api/deployment/scripts/runner/bin/activate
python metrics_local_test_main.py $@
<file_sep>#!/usr/bin/env bash
set -o xtrace
db_name="new_empty.db"
if [ $# -eq 0 ]
then
echo "Usage: $0 neo4jHomeFolder [db_name] [remove_prev(1 if yes)]"
exit 1
fi
if [ $# -gt 1 ]
then
db_name=$2
fi
neo4jHomeFolder=$1
neo4j_command=${neo4jHomeFolder}"/bin/neo4j"
neo4j_stop=`${neo4j_command} stop`
databaseDirectory=${neo4jHomeFolder}"/data/databases/"
symlinkName="graph.db"
empty_base_directory=${databaseDirectory}empty_base.db
if [ -d "$neo4jHomeFolder" ]; then # database exists
if [ -d "$databaseDirectory$symlinkName" ]; then # clear previous symlink
original=`readlink ${databaseDirectory}${symlinkName}`
unlink ${databaseDirectory}${symlinkName}
if [ $# -eq 3 ] && [ $3 -eq 1 ]; then
echo "Removing " ${original}
rm -rf ${original}
else
echo "Not removing" ${original}
fi
fi
cp -r ${empty_base_directory} ${databaseDirectory}${db_name}
ln -s ${databaseDirectory}${db_name} ${databaseDirectory}${symlinkName} # symlink to use given database
echo "Cleared neo4j DB - new name = "${db_name}
else
echo "No neo4j basic folder exists at $neo4jHomeFolder"
exit 1
fi
neo4j_start=`${neo4j_command} start`<file_sep>package org.miejski.movies.recommender.infrastructure.dbstate
import org.miejski.movies.recommender.infrastructure.dbstate.assertions.AssertionsContainer
import org.neo4j.ogm.session.Session
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Component
@Component
open class AppAssertionsContainer @Autowired constructor(val session: Session) : AssertionsContainer {
override fun assertions(): List<Neo4jStateAssertion> {
return emptyList()
}
}<file_sep>from performance.InstanceConfigurer import InstanceConfigurer
from performance.InstanceStateChecker import InstanceStateChecker
from ansible.AnsibleRunner import AnsibleRunner
config = {
"neo4j": {
"count": 2,
# "cluster": None,
"cluster": "HA",
# "cluster": "Casual",
"instance-type": "t2.micro"
},
"service": {
"count": 1,
"instance-type": "t2.micro"
},
"test-driver": {
"count": 1,
"instance-type": "t2.small"
}
}
instance_configurer = InstanceConfigurer()
instance_configurer.load_existing_instances()
instance_configurer.prepare_instances(config)
instance_configurer.wait_for_instances()
# instance_configurer.run_apps(dryRun=True)
instance_configurer.run_apps(dryRun=False)
service_checker = InstanceStateChecker(instance_configurer.service_ips())
service_checker.wait_for_services()
warmup_reco_config = {
"max_users": 100,
"wait_interval": 500,
"run_time": 2
}
warmup_ratings_config = {
"max_users": 500,
"wait_interval": 50,
"run_time": 2
}
# AnsibleRunner.run_warmup_on_driver(instance_configurer.test_driver_ip(), "RatingsSimulation", warmup_ratings_config)
# AnsibleRunner.run_warmup_on_driver(instance_configurer.test_driver_ip(), "RecommendationsSimulation", warmup_reco_config)
AnsibleRunner.start_collecting_metrics(instance_configurer.neo4jInstances.ips())
reco_config = {
"max_users": 200,
"wait_interval": 1000,
"run_time": 1
}
ratings_config = {
"max_users": 10,
"wait_interval": 50,
"run_time": 1
}
# AnsibleRunner.run_tests_on_driver(instance_configurer.test_driver_ip(), "RecommendationsSimulation", reco_config)
AnsibleRunner.run_tests_on_driver(instance_configurer.test_driver_ip(), "RatingsSimulation", ratings_config)
AnsibleRunner.download_os_metrics(instance_configurer.neo4jInstances.ips())
print("Reco -> Rq/s : {}".format(1000 / reco_config['wait_interval'] * reco_config['max_users']))
print("Ratings -> Rq/s : {}".format(1000 / ratings_config['wait_interval'] * ratings_config['max_users']))
<file_sep>#!/usr/bin/env bash
set -o xtrace
`./clear_local_neo4j.sh /Users/grzegorz.miejski/programming/neo4j/neo4j-community-3.0.4 metrics.db`
# populate database with specific
<file_sep>package org.miejski.movies.recommender.infrastructure.dbstate.assertions
import org.miejski.movies.recommender.infrastructure.dbstate.Neo4jStateAssertion
interface AssertionsContainer {
fun assertions(): List<Neo4jStateAssertion>
}<file_sep>class NeighboursCountBasedDetails:
def __init__(self, neighbours_min_similarity, similarity_method, top_n_neighbours):
self.neighbours_min_similarity = neighbours_min_similarity
self.similarity_method = similarity_method
self.top_n_neighbours = top_n_neighbours
def prepare_metric_cypher(self, testFilePath):
prediction_cypher = "/Users/grzegorz.miejski/home/workspaces/private/magisterka/movies-recommender-api/movies-recommender-service/src/main/resources/cypher/similarity_predicted_rating_best_neighbours_for_metric.cypher"
cypher_template = self.load_file(prediction_cypher)
prefix = """LOAD CSV WITH HEADERS FROM 'file://{}' AS line FIELDTERMINATOR '\\t'
WITH TOINT(line.user_id) as user, TOINT(line.movie_id) as movie, TOFLOAT(line.rating) as original_rating
""".format(testFilePath)
ready_cypher = prefix + cypher_template.replace("{neighbours_min_similarity}", str(self.neighbours_min_similarity)) \
.replace("{similarity_method}", self.similarity_method) \
.replace("{n_best_neighbours}", str(self.top_n_neighbours))
return ready_cypher
def load_file(self, prediction_cypher):
with open(prediction_cypher) as c:
cypher = c.readlines()
return " ".join(cypher)
def get_result_file_name(self):
return "top_neighbours_based_prediction/accuracy_similarityMethod:{}_neighboursMinSimilarity:{}_topN:{}.log".format(
self.similarity_method, self.neighbours_min_similarity, self.top_n_neighbours)
def __str__(self):
return "NeighboursCountBasedDetails(neighbours_min_similarity={}, similarity_method={}, top_n_neighbours={})".format(
self.neighbours_min_similarity, self.similarity_method, self.top_n_neighbours)
<file_sep>import time
from requests import get
class InstanceStateChecker:
def __init__(self, service_ips):
self.service_ips = service_ips
def wait_for_services(self):
responding = []
not_responding = self.service_ips
while True:
for ip in not_responding:
response = None
try:
response = get("http://{}:8080/users/ids".format(ip), timeout=3)
except Exception:
print("Timeout on {}".format(ip))
if response is not None and response.status_code == 200:
responding.append(ip)
not_responding.remove(ip)
self.print_status(responding, not_responding)
if len(not_responding) == 0:
break
else:
time.sleep(1)
def print_status(self, responding, not_responding):
print("###############################################################")
print("Instance state checker report:")
for i in responding:
print("Responding: {}".format(i))
for i in not_responding:
print("Not responding: {}".format(i))
print("###############################################################")
<file_sep>import collections
import os
import time
from neo4j_state.assertions.average_rating_assertion import AverageRatingAssertion
from neo4j_state.assertions.cosine_similarity_assertion import CosineSimilarityAssertion
from neo4j_state.assertions.movie_index_assertion import MovieIndexAssertion
from neo4j_state.assertions.movies_in_common_count import MoviesInCommonAssertion
from neo4j_state.assertions.neo4j_data_assertion import DataLoadedAssertion
from neo4j_state.assertions.pearson_similarity_assertion import PearsonSimilarityAssertion
from neo4j_state.assertions.pearson_similarity_with_sw import PearsonWithSWAssertion
from neo4j_state.assertions.person_index_assertion import PersonIndexAssertion
from neo4j_state.neo4j_cypher_executor import Neo4jCypherExecutor
from neo4j_state.neo4j_state import Neo4jStateAssertions
from ansible.AnsibleRunner import AnsibleRunner
from metrics.AccuracyMetricsRunner import AccuracyMetricsRunner
from metrics.acc_metrics_details.n_best_neighbours_details import NeighboursCountBasedDetails
class LocalAccuracyMetricsRunner:
def __init__(self, metrics_runners):
self.metrics_runners = metrics_runners
def find_cross_validation_folds_datas(self, folds_folder):
print(folds_folder)
files = os.listdir(folds_folder)
train_file_names = list(filter(lambda x: "train" in x, files))
test_file_names = list(filter(lambda x: "test" in x, files))
number_of_folds = len(test_file_names)
cross_validation_folds = []
for x in range(0, number_of_folds):
cross_validation_folds.append({
"test": folds_folder + test_file_names[x],
"train": folds_folder + train_file_names[x],
"fold": train_file_names[x]})
return cross_validation_folds
def neo4j_assertions(self, fold_data):
return [MovieIndexAssertion(), PersonIndexAssertion(), DataLoadedAssertion(fold_data["train"]),
AverageRatingAssertion(), PearsonSimilarityAssertion(),
MoviesInCommonAssertion(fold_data["train"]),
PearsonWithSWAssertion(fold_data["train"], fold_data["fold"], rerun=False),
CosineSimilarityAssertion()
]
def play(self, folds_directory, dataset):
crossValidationDatas = self.find_cross_validation_folds_datas(folds_directory)
for fold_data in crossValidationDatas:
print("Start working on fold_data: {}".format(fold_data))
AnsibleRunner.restartLocalNeo4j(fold_data["fold"])
Neo4jStateAssertions(Neo4jCypherExecutor(), self.neo4j_assertions(fold_data)).run_assertions()
for metric in self.metrics_runners:
metric.run(fold_data["test"], dataset, fold_data["fold"])
for metric in self.metrics_runners:
metric.finish(dataset)
self.save_summary(dataset)
def save_summary(self, dataset):
general_results = collections.defaultdict(list)
for metric in self.metrics_runners:
tuple = metric.get_final_result_and_details()
general_results[tuple[1].similarity_method].append(tuple)
now = time.time()
for similarity_method in general_results.keys():
with open("/tmp/magisterka/metrics/accuracy/ml-100k/summary_{}_{}.csv".format(
similarity_method, now), "w") as summary_file:
summary_file.write("rmse,mae,percentage_found,neighbours_min_similarity,top_n\n")
for result in general_results[similarity_method]:
fr = result[0]
details = result[1]
summary_file.write("{},{},{:.2f},{}".format(fr.rmse, fr.mae, fr.ratings_found_percentage,
details.neighbours_min_similarity))
if hasattr(details, 'top_n_neighbours'):
summary_file.write(",{}".format(details.top_n_neighbours))
else:
summary_file.write(",0")
summary_file.write("\n")
def generate_metrics_to_run():
result_details = []
# similarities = [0.0, 0.02, 0.04, 0.06, 0.08, 0.1, 0.12, 0.14, 0.16]
similarities = [0.0]
similarity_methods = ["pearson_with_sw"]
# similarity_methods = ["similarity", "pearson_with_sw", "cosine"]
# top_n = [10, 15, 20, 25, 30, 35, 40, 45, 50, 100]
top_n = [10, 15, 20, 25, 30]
# top_n = [35, 40, 45, 50, 100]
# top_n = [40]
for s in similarities:
for m in similarity_methods:
# result_details.append(SimilarityBasedDetails(, m))
for n in top_n:
result_details.append(NeighboursCountBasedDetails(s, m, n))
# result_details.append(AverageRatingBasedDetails())
return result_details
if __name__ == "__main__":
# runner = AccuracyMetricsRunner(SimilarityBasedDetails(0.05, "cosine"))
# runner.fold_results = [PartialResult(0.1, 0.1, 100, 90, 2)]
#
# runner2 = AccuracyMetricsRunner(SimilarityBasedDetails(0.05, "pearson"))
# runner2.fold_results = [PartialResult(0.2, 0.2, 100, 91, 3)]
#
# runner3 = AccuracyMetricsRunner(NeighboursCountBasedDetails(0.0, "cosine", 10))
# runner3.fold_results = [PartialResult(0.3, 0.3, 100, 92, 4)]
#
# runner4 = AccuracyMetricsRunner(NeighboursCountBasedDetails(0.0, "pearson_with_sw", 10))
# runner4.fold_results = [PartialResult(0.4, 0.4, 100, 93, 5)]
#
# LocalAccuracyMetricsRunner([runner, runner2, runner3, runner4]).save_summary("ml-1k")
metrics_details = generate_metrics_to_run()
metrics_runners = list(map(lambda x: AccuracyMetricsRunner(x), metrics_details))
basic_directory = "/Users/grzegorz.miejski/home/workspaces/datasets/movielens/prepared/"
dataset = "ml-100k"
print("Datasets = {}".format(dataset))
folds = (dataset, basic_directory + dataset + "/cross_validation/")
LocalAccuracyMetricsRunner(metrics_runners).play(folds[1], folds[0])
print("Finished")
<file_sep>package org.miejski.movies.recommender.infrastructure.configuration
import org.neo4j.ogm.config.Configuration
import org.neo4j.ogm.session.SessionFactory
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Profile
import org.springframework.data.neo4j.repository.config.EnableNeo4jRepositories
import org.springframework.data.neo4j.transaction.Neo4jTransactionManager
import org.springframework.transaction.annotation.EnableTransactionManagement
@EnableNeo4jRepositories(basePackages = arrayOf("org.miejski.movies.recommender.infrastructure.repositories"))
@EnableTransactionManagement
@Profile("integration")
@org.springframework.context.annotation.Configuration
open class Neo4jConfigIntegration {
lateinit @Autowired var neo4JConfigProperties: Neo4jConfigProperties
@Bean
open fun configuration(): Configuration {
val config: Configuration = Configuration()
config
.driverConfiguration()
.setDriverClassName("org.neo4j.ogm.drivers.embedded.driver.EmbeddedDriver")
return config
}
@Bean
open fun sessionFactory(): SessionFactory {
return SessionFactory(configuration(), "org.miejski.movies.recommender.domain")
}
@Bean
open fun neo4jTransactionManager(): Neo4jTransactionManager {
return Neo4jTransactionManager(sessionFactory())
}
}<file_sep>package org.miejski.movies.recommender.infrastructure.configuration
import com.codahale.metrics.MetricRegistry
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
@Configuration
open class MetricsConfiguration {
@Bean
open fun metricRegistry(): MetricRegistry {
return MetricRegistry()
}
}<file_sep>class AverageRatingBasedDetails:
def prepare_metric_cypher(self, testFilePath):
prediction_cypher = "/Users/grzegorz.miejski/home/workspaces/private/magisterka/movies-recommender-api/movies-recommender-service/src/main/resources/cypher/average_predicted_rating_for_metric.cypher"
cypher_template = self.load_file(prediction_cypher)
prefix = """LOAD CSV WITH HEADERS FROM 'file://{}' AS line FIELDTERMINATOR '\\t'
WITH TOINT(line.user_id) as user, TOINT(line.movie_id) as movie, TOFLOAT(line.rating) as original_rating
""".format(testFilePath)
return prefix + cypher_template
def load_file(self, prediction_cypher):
with open(prediction_cypher) as c:
cypher = c.readlines()
return " ".join(cypher)
def get_result_file_name(self):
return "average_rating_based_prediction/accuracy.log"
def __str__(self):
return "AverageRatingBasedDetails()"<file_sep>package org.miejski.movies.recommender.infrastructure.dbstate.assertions
import org.miejski.movies.recommender.infrastructure.dbstate.Neo4jStateAssertion
import org.neo4j.ogm.session.Session
import java.util.*
class StrictDataAssertion(val session: Session) : Neo4jStateAssertion {
override fun isOK(): Boolean {
if (session.query("Match (p:Person) return p limit 1", HashMap<String, Object>()).count() == 0) {
throw RuntimeException("StrictDataAssertion fail - no Persons in database!")
}
return true
}
override fun queryToExecute(): String {
TODO("Shouldn't be called at all")
}
override fun name(): String {
return "StrictDataAssertion"
}
}<file_sep>package org.miejski.movies.recommender.domain.user
import org.miejski.movies.recommender.domain.AbstractEntity
import org.miejski.movies.recommender.domain.rating.Rating
import org.neo4j.ogm.annotation.Relationship
class Person(
id: Long? = null,
var user_id: Long = -1,
@Relationship(type = "Rated", direction = Relationship.OUTGOING)
var ratedMovies: List<Rating> = emptyList(),
var avg_rating: Double = -1.0
) : AbstractEntity(id)<file_sep>package org.miejski.movies.recommender.domain.rating
import com.fasterxml.jackson.annotation.JsonIgnore
import org.miejski.movies.recommender.domain.movie.Movie
import org.miejski.movies.recommender.domain.user.Person
import org.neo4j.ogm.annotation.*
@RelationshipEntity(type = "Rated")
class Rating(
@GraphId
var id: Long? = null,
@StartNode
@JsonIgnore
var person: Person? = null,
@EndNode
var movie: Movie? = null,
@Property
var rating: Double? = null
)<file_sep>from neo4j.v1 import GraphDatabase, basic_auth
class Neo4jCypherExecutor:
def __init__(self, localhost="bolt://localhost:7699"):
self.driver = GraphDatabase.driver(localhost, auth=basic_auth("neo4j", "neo4j1234"))
def invoke(self, cypher, args = {}):
print( "Executing cypher : {}".format(cypher))
session = self.driver.session()
result = session.run(cypher, args)
list_result = list(result)
return list_result<file_sep>package org.miejski.movies.recommender.neo4j;
import org.neo4j.driver.v1.AuthTokens;
import org.neo4j.driver.v1.Driver;
import org.neo4j.driver.v1.GraphDatabase;
import org.neo4j.driver.v1.Record;
import org.neo4j.driver.v1.Session;
import org.neo4j.driver.v1.StatementResult;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class CypherExecutor {
private final Driver driver;
public CypherExecutor() {
driver = GraphDatabase.driver("bolt://localhost:7699", AuthTokens.basic("<PASSWORD>", "<PASSWORD>"));
}
public List<Record> execute(String query, Map<String, Object> params) {
Session session = driver.session();
StatementResult run = session.run(query, params);
List<Record> resultList = run.list();
session.close();
return resultList;
}
public List<Record> execute(String query) {
Map<String, Object> params = new HashMap<>();
return execute(query, params);
}
public void close() {
driver.close();
}
}
<file_sep>package org.miejski.movies.recommender.domain.metrics
import org.miejski.movies.recommender.api.metrics.MetricsResult
import org.miejski.movies.recommender.domain.metrics.accuracy.RealRating
import java.nio.file.Files
import java.nio.file.Paths
import java.util.concurrent.Callable
import java.util.concurrent.Executors
abstract class MetricsService<T> {
private var start: Long? = null
fun start() {
start = System.currentTimeMillis()
}
fun timeInSeconds(): Double {
if (start != null) {
return (System.currentTimeMillis().toDouble() - start!!.toDouble()) / 1000.0
}
throw IllegalStateException("Cannot measure time, without starting it first!")
}
fun run(testFilePath: String?): Double {
val allLines = Files.readAllLines(Paths.get(testFilePath))
val tasks = allLines.drop(1)
.map { toRating(it) }
return run(tasks)
}
protected fun toRating(line: String): RealRating {
val splitLines = line.split("\t")
return RealRating(splitLines[0].toLong(),
splitLines[1].toLong(),
splitLines[2].toDouble(),
splitLines[3].toLong())
}
/**
* Function that accepts list of elements S, that each will produce a resulting pair of types <T,D>
*/
protected fun <S, R> runAsyncAndGather(inputList: List<S>,
f: (S) -> (R)): List<R> {
val tasks = inputList.map {
Callable<R>({ f(it) })
}
val newFixedThreadPool = Executors.newFixedThreadPool(15)
val predictions = newFixedThreadPool.invokeAll(tasks)
newFixedThreadPool.shutdown()
val predictedRatings = predictions.map { it.get() }
return predictedRatings
}
abstract fun run(realRatings: List<RealRating>): Double
abstract fun finish(): MetricsResult<T>
}<file_sep>package org.miejski.movies.recommender.infrastructure.dbstate.assertions
import org.miejski.movies.recommender.infrastructure.dbstate.Neo4jStateAssertion
class PersonIndexAssertion : Neo4jStateAssertion {
override fun isOK(): Boolean {
return false
}
override fun queryToExecute(): String {
return "start_state/person_index.cypher"
}
override fun name(): String {
return "PersonIndexAssertion"
}
}<file_sep>rootProject.name = 'movies-recommender-api'
include 'movies-recommender-service'
include 'recommender-performance-tests'
include 'movies-recommender-metrics'
<file_sep>class MetricsResult:
def __init__(self, metric_name, keys, metrics, colors, labels, double_axis={}):
self.metric_name = metric_name
self.keys = keys
self.metrics = metrics
self.colors = colors
self.labels = labels
self.double_axis = double_axis
<file_sep>group 'org.miejski.recommender'
version '1.0-SNAPSHOT'
apply plugin: 'java'
apply plugin: 'scala'
sourceCompatibility = 1.8
targetCompatibility = 1.8
repositories {
mavenCentral()
}
ext {
gatling = '2.2.4'
}
dependencies {
compile "io.gatling:gatling-http:$gatling"
compile "io.gatling:gatling-app:$gatling"
compile "io.gatling.highcharts:gatling-charts-highcharts:$gatling"
compile "io.gatling:jsonpath_2.10:0.6.6"
compile "org.apache.httpcomponents:httpclient:4.5.2"
compile 'com.stackmob:newman_2.10:1.3.5'
compile "org.json4s:json4s-native_2.10:3.2.11"
compile "org.json4s:json4s-ext_2.10:3.2.11"
}
[compileJava, compileTestJava]*.options*.encoding = "UTF-8"
task loadTest(type: JavaExec) {
println name
if (project.hasProperty("simulation") && project.hasProperty("applicationUrl")) {
println project.properties
println "TestProperty: Simulation: $simulation"
println "TestProperty: Application url: $applicationUrl"
dependsOn testClasses
description = "load test with Gatling"
group = "Load Test"
classpath = sourceSets.test.runtimeClasspath
def minSimilarity = null
if (!project.hasProperty("min_similarity")) {
minSimilarity = "0.0"
} else {
minSimilarity = "$min_similarity"
}
def similarityMethod = null
if (!project.hasProperty("similarity_method")) {
similarityMethod = "similarity"
} else {
similarityMethod = "$similarity_method"
}
def neighbours = null
if (project.hasProperty("neighboursCount")) {
neighbours = "$neighboursCount"
}
def maxUsers = null
if (project.hasProperty("max_users")) {
maxUsers = "$max_users"
}
def waitInterval = null
if (project.hasProperty("wait_interval")) {
waitInterval = "$wait_interval"
}
def runTime = null
if (project.hasProperty("run_time")) {
runTime = "$run_time"
}
println "TestProperty: MinSimilarity: $minSimilarity"
println "TestProperty: SimilarityMethod: $similarityMethod"
println "TestProperty: neighboursCount: $neighbours"
println "TestProperty: maxUsers: $maxUsers"
println "TestProperty: waitInterval: $waitInterval"
println "TestProperty: runTime: $runTime"
jvmArgs = [
// workaround for https://github.com/gatling/gatling/issues/2689
"-Dgatling.core.directory.binaries=${sourceSets.test.output.classesDir.toString()}",
"-DapplicationUrl=$applicationUrl",
"-DminSimilarity=$minSimilarity",
"-DsimilarityMethod=$similarityMethod",
"-DneighboursCount=$neighbours",
"-DmaxUsers=$maxUsers",
"-DwaitInterval=$waitInterval",
"-DrunTime=$runTime",
'-XX:+UseG1GC',
'-Xmx8000m',
'-XX:-PrintGC',
'-XX:-PrintGCDetails',
'-XX:-PrintGCTimeStamps',
// '-Xloggc:/tmp/magisterka/perf/gc.log'
]
main = "io.gatling.app.Gatling"
args = [
"--simulation", simulation,
"--results-folder", "${buildDir}/gatling-results",
"--binaries-folder", sourceSets.test.output.classesDir.toString(), // ignored because of above bug
"--bodies-folder", sourceSets.test.resources.srcDirs.toList().first().toString() + "/gatling/bodies",
]
} else {
println "No simulation specified or no applicationUrl - quitting now!"
}
}
<file_sep>class Neo4jStateAssertions():
def __init__(self, neo4j_query_executor, assertions, ):
self.assertions = assertions
self.neo4j_query_executor = neo4j_query_executor
def run_assertions(self):
for assertion in self.assertions:
if not assertion.is_ok(self.neo4j_query_executor):
print("Executing assertion: " + type(assertion).__name__)
assertion.play(self.neo4j_query_executor)
<file_sep>import subprocess
class AnsibleRunner:
ansible_home = "/Users/grzegorz.miejski/home/workspaces/private/magisterka/movies-recommender-api/deployment"
application_home = "/Users/grzegorz.miejski/home/workspaces/private/magisterka/movies-recommender-api"
@staticmethod
def runApplication(ips, neo4j_host):
process = subprocess.Popen(['ansible-playbook', 'install-application.yaml',
'-i', AnsibleRunner.create_ips_argument(ips),
'-vvv',
'--extra-vars', AnsibleRunner.prepare_extra_variables(neo4j_host)],
cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
return
@staticmethod
def runLocalApplication():
process = subprocess.Popen(['ansible-playbook', 'run-application-local.yaml',
'-vvv'],
# cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
return
@staticmethod
def create_ips_argument(ips):
return ",".join(ips) + ','
@staticmethod
def prepare_extra_variables(neo4j_host):
return "neo4j_host=" + neo4j_host
@staticmethod
def start_performance_tests(application_ips):
host = 'http://' + application_ips[0] + ':8080'
process = subprocess.Popen(['./gradlew', 'loadTest',
'-Psimulation=org.miejski.movies.recommender.performance.RecommendationsSimulation',
'-PapplicationUrl=' + host,
'-Pmin_similarity=0.0',
'-Psimilarity_method=cosine',
'-PneighboursCount=30'],
cwd=AnsibleRunner.application_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
return
@staticmethod
def killLocalApplication():
process = subprocess.Popen(['ansible-playbook', 'kill-application-local.yaml',
'-vvv'],
cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
return
@staticmethod
def restartLocalNeo4j(db_name, verbose=False):
command = ['ansible-playbook', 'restart-neo4j.yaml', '--extra-vars', "neo4j_db_folder=" + db_name]
if verbose:
command.append('-vvv')
process = subprocess.Popen(
command,
cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
return
@staticmethod
def runAccuracyMetricCypher(dataset, fold_name, cypher, verbose=False):
# cypher = "MATCH (p:Person) return p.user_id limit 10"
command = ['ansible-playbook', 'neo4j-shell-cypher.yaml', '--extra-vars',
AnsibleRunner._to_extra_vars({"dataset": dataset, "result_file": fold_name, "cypher": cypher})]
if verbose:
command.append('-vvv')
process = subprocess.Popen(
command,
cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
code = process.returncode
if code != 0:
raise Exception("Return code greater than 0")
return
@staticmethod
def __get_env():
return {
"PATH": "/Users/grzegorz.miejski/home/workspaces/private/magisterka/movies-recommender-api/deployment/scripts/runner/bin:/Library/Frameworks/Python.framework/Versions/3.4/bin:/Users/grzegorz.miejski/programming/spark/spark-1.6.0-bin-hadoop2.6/bin:/Library/Frameworks/Python.framework/Versions/3.4/bin:/usr/local/go/bin/:/Users/grzegorz.miejski/programming/apache-cassandra-2.1.7/bin:/Users/grzegorz.miejski/programming/scala-2.11.4/bin:/Users/grzegorz.miejski/home/programs/apache-storm-0.9.4/bin:/usr/local/heroku/bin:/Users/grzegorz.miejski/home/programs/apache-storm-0.9.4/bin:/Users/grzegorz.miejski/home/maven/bin:/Users/grzegorz.miejski/home/mongodb/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/Users/grzegorz.miejski/.fzf/bin:/usr/local/sbin:/Users/grzegorz.miejski/programming/drivers"}
@staticmethod
def _to_extra_vars(params):
"returns prepared ansible extra args from dict"
result = ""
for k, v in params.items():
v = str(v)
value = v if " " not in v else "'{}'".format(v)
result += "{}={} ".format(k, value)
return result
@staticmethod
def remote_restart_neo4j(instanceIp, dataset, verbose=False):
command = ['ansible-playbook', 'remote-restart-neo4j.yaml',
'-i', '{},'.format(instanceIp),
'--extra-vars', AnsibleRunner._to_extra_vars({"neo4j_db_folder": dataset})]
if verbose:
command.append('-vvv')
process = subprocess.Popen(
command,
cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
return
@staticmethod
def prepare_test_driver(testDriverIp, service_nodes_ips, verbose=True):
ips = ";".join(service_nodes_ips)
service_nodes = "'\'{}'\'".format(ips)
command = ['ansible-playbook', 'prepare-test-driver.yaml',
'-i', '{},'.format(testDriverIp),
'--extra-vars', AnsibleRunner._to_extra_vars({"service_nodes_ips": service_nodes})]
if verbose:
command.append('-vvv')
process = subprocess.Popen(
command,
cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
return
@staticmethod
def run_tests_on_driver(testDriverIp, simulation_name, simulation_config, verbose=True):
config = simulation_config.copy()
config.update({"simulation_name": simulation_name})
command = ['ansible-playbook', 'run-test-on-test-driver.yaml',
'-i', '{},'.format(testDriverIp),
'--extra-vars', AnsibleRunner._to_extra_vars(config)]
if verbose:
command.append('-vvv')
process = subprocess.Popen(
command,
cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
return
@staticmethod
def start_collecting_metrics(ips, verbose=True):
if len(ips) == 0:
raise Exception("Cannot start collecting metrics without instance IP specified!")
command = ['ansible-playbook', 'collect-os-metrics.yaml',
'-i', AnsibleRunner.create_ips_argument(ips), ]
if verbose:
command.append('-vvv')
process = subprocess.Popen(
command,
cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
if process.returncode > 0:
raise Exception("Error running command: {}".format(str(command)))
return
@staticmethod
def download_os_metrics(neo4j_node_ips, verbose=True):
for nodeIp in neo4j_node_ips:
command = ['ansible-playbook', 'download-os-metrics.yaml',
'-i', '{},'.format(nodeIp),
'--extra-vars', AnsibleRunner._to_extra_vars({'os_metrics_folder': nodeIp})
]
if verbose:
command.append('-vvv')
process = subprocess.Popen(
command,
cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
return
@staticmethod
def run_warmup_on_driver(testDriverIp, simulation_name, warmup_config, verbose=True):
config = warmup_config.copy()
config.update({"simulation_name": simulation_name})
command = ['ansible-playbook', 'run-warmup-on-test-driver.yaml',
'-i', '{},'.format(testDriverIp),
'--extra-vars', AnsibleRunner._to_extra_vars(config)]
if verbose:
command.append('-vvv')
process = subprocess.Popen(
command,
cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
return
@staticmethod
def runApplicationWithHAproxy(nodes_ips, neo4j_nodes_ips, verbose=False):
master = neo4j_nodes_ips[0]
slaves = neo4j_nodes_ips[1:]
slave_nodes_ips = "a".join(slaves)
slave_nodes_ips = "'\'{}'\'".format(slave_nodes_ips)
command = ['ansible-playbook', 'install-application-HA-Neo4j.yaml',
'-i', AnsibleRunner.create_ips_argument(nodes_ips),
'--extra-vars',
AnsibleRunner._to_extra_vars({'master_node_ip': master,
'slave_ips': slave_nodes_ips})]
if verbose:
command.append('-vvv')
process = subprocess.Popen(
command,
cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
return
@staticmethod
def getNeo4jHAInitialHostsPort():
return "5001"
@staticmethod
def get_all_hosts_ips_string(all_hosts):
return ",".join(list(map(lambda x: "{}:{}".format(x, AnsibleRunner.getNeo4jHAInitialHostsPort()), all_hosts)))
@staticmethod
def runNeo4jHAMaster(master_node_ip, dataset, slave_nodes_ips, master_node_priv_ip, verbose=False):
service_id = 1
all_hosts = [master_node_priv_ip] + slave_nodes_ips
all_nodes_ips = AnsibleRunner.get_all_hosts_ips_string(all_hosts)
return AnsibleRunner.runNeo4jHANode(master_node_ip, all_nodes_ips, dataset, service_id, False, True)
@staticmethod
def runNeo4jHASlave(slave_ip, service_id, master_node_ip, dataset, slave_nodes_ips, ):
all_hosts = [master_node_ip] + slave_nodes_ips
all_nodes_ips = AnsibleRunner.get_all_hosts_ips_string(all_hosts)
return AnsibleRunner.runNeo4jHANode(slave_ip, all_nodes_ips, dataset, service_id, True, True)
@staticmethod
def runNeo4jHANode(node_public_ip, all_nodes_ips_string, dataset, service_id, is_slave, verbose=False):
command = ['ansible-playbook', 'remote-restart-neo4j-HA.yaml',
'-i', '{},'.format(node_public_ip),
'--extra-vars',
AnsibleRunner._to_extra_vars({'neo4j_db_folder': dataset,
'initial_hosts': all_nodes_ips_string,
'server_id': service_id,
'is_slave_only': is_slave})]
if verbose:
command.append('-vvv')
process = subprocess.Popen(
command,
cwd=AnsibleRunner.ansible_home,
stderr=subprocess.STDOUT,
env=AnsibleRunner.__get_env())
process.communicate()
return
<file_sep>import datetime
import os
from ansible.AnsibleRunner import AnsibleRunner
from metrics import formatter
from metrics.AccMetrics import AccMetrics
class AccuracyMetrics:
def __init__(self, result_folder="/tmp/magisterka/metrics/accuracy/", similarity=0.1,
similarity_method="similarity"):
self.fold_results = []
self.result_folder = result_folder
self.similarity = similarity
self.similarity_method = similarity_method
def run(self, testFilePath, dataset, fold):
print("Running accuracy metrics for dataset {} and fold {}".format(dataset, fold))
start = datetime.datetime.now().replace(microsecond=0)
test_ratings_count = self.__total_ratings_to_predict(testFilePath)
prepared_cypher = self.__prepare_metric_cypher(testFilePath)
AnsibleRunner.runAccuracyMetricCypher(dataset, fold,
prepared_cypher) # TODO inject mean common ratings instead of magic number 18 in cypher
results = self.read_results(dataset, fold)
end = datetime.datetime.now().replace(microsecond=0)
self.fold_results.append(PartialResult(
AccMetrics.calculate_rmse(results), AccMetrics.calculate_mae(results),
test_ratings_count, len(results), (end - start).seconds)
)
print("Finished accuracy metrics for dataset {} and fold {}".format(dataset, fold))
def finish(self, dataset):
print("Finishing accuracy metrics for dataset {}".format(dataset))
result = FinalResult(self.fold_results)
self.save_result(dataset, result.rmse, result.total_time, result.ratings_found_percentage)
return result.rmse
def save_result(self, test_name, rmse, time, percentageOfRatingsFound):
if not os.path.exists(self.result_folder + test_name):
os.makedirs(self.result_folder + test_name)
result_file_name = "/accuracy_method:{}_similarity:{}.log".format(self.similarity_method, self.similarity)
result_path = "{}{}{}".format(self.result_folder, test_name, result_file_name)
with open(result_path, mode="w") as result_file:
result_file.write("Folds results = " + ",".join(map(lambda x: str(x), self.fold_results)) + '\n')
result_file.write("Final RMSE = {}\n".format(rmse))
result_file.write("Ratings found for movies: {0:.2f}%\n".format(percentageOfRatingsFound))
result_file.write("Total time in seconds: {}\n".format(formatter.strfdelta(time, inputtype="s")))
def __prepare_metric_cypher(self, testFilePath):
prediction_cypher = "/Users/grzegorz.miejski/home/workspaces/private/magisterka/movies-recommender-api/movies-recommender-service/src/main/resources/cypher/similarity_predicted_rating_for_metric.cypher"
cypher_template = self.load_file(prediction_cypher)
prefix = """LOAD CSV WITH HEADERS FROM 'file://{}' AS line FIELDTERMINATOR '\t'
WITH TOINT(line.user_id) as user, TOINT(line.movie_id) as movie, TOFLOAT(line.rating) as original_rating
""".format(testFilePath)
ready_cypher = prefix + cypher_template.replace("{userId}", "user") \
.replace("{movieId}", "movie") \
.replace("{similarity}", str(self.similarity)) \
.replace("{similarity_method}", self.similarity_method)
return ready_cypher
def load_file(self, prediction_cypher):
with open(prediction_cypher) as c:
cypher = c.readlines()
return " ".join(cypher)
def read_results(self, dataset, fold):
with open("{}{}/{}".format(self.result_folder, dataset, fold)) as results:
line = results.readlines()[1:]
a = map(lambda x: x.replace('"', ''), line)
b = map(self.__parse_result_line, a)
return list(b)
def __parse_result_line(self, line):
split = line.split(',')
return float(split[2]), float(split[3])
def __total_ratings_to_predict(self, testFilePath):
with open(testFilePath) as f:
for i, l in enumerate(f):
pass
return i + 1
class PartialResult:
def __init__(self, rmse, test_ratings_count, ratings_predicted, time_in_seconds):
self.rmse = rmse
self.test_ratings_count = test_ratings_count
self.ratings_predicted = ratings_predicted
self.time_in_seconds = time_in_seconds
def __str__(self):
return "PartialResult(RMSE={}, ratings_predicted={}, time={})".format(self.rmse, self.ratings_predicted, self.time_in_seconds)
class FinalResult:
def __init__(self, partial_results):
self.rmse = sum(map(lambda x: x.rmse, partial_results))/ float(len(partial_results))
self.total_time = sum(map(lambda x: x.time_in_seconds, partial_results))
ratings_found = sum(map(lambda x: x.ratings_predicted, partial_results))
ratings_to_find = sum(map(lambda x: x.test_ratings_count, partial_results))
self.ratings_found_percentage = ratings_found / ratings_to_find * 100
if __name__ == "__main__":
metrics = AccuracyMetrics(similarity=0.1)
test_name = "ml-100k"
metrics.run(
"/Users/grzegorz.miejski/home/workspaces/datasets/movielens/prepared/ml-100k/cross_validation/ml-100k_test_0",
test_name, "ml-100k_train_0")
metrics.finish(test_name)
<file_sep>from performance.InstanceConfigurer import InstanceConfigurer
instance_configurer = InstanceConfigurer()
instance_configurer.load_existing_instances()
instance_configurer.killAllInstances()
<file_sep>import sys
args = sys.argv
nodes = args[1].split(';')
servers = list(map(lambda x: "server " + x + ":8080;", nodes))
text = "\n".join(servers)
text = text + "\n"
with open("/etc/nginx/conf.d/movie_recommender_api.conf.template") as template:
lines = template.readlines()
final = ''.join(lines).replace("${servers}", text)
with open("/etc/nginx/conf.d/movie_recommender_api.conf", mode="w") as target:
target.write(final)
<file_sep>package org.miejski.movies.recommender
import org.springframework.boot.SpringApplication
import org.springframework.boot.autoconfigure.SpringBootApplication
import org.springframework.boot.autoconfigure.data.neo4j.Neo4jDataAutoConfiguration
import org.springframework.context.annotation.Configuration
@SpringBootApplication(exclude = arrayOf(Neo4jDataAutoConfiguration::class))
@Configuration
open class Application {
companion object {
@JvmStatic fun main(args: Array<String>) {
SpringApplication.run(Application::class.java, *args)
}
}
}<file_sep>from ansible.AnsibleRunner import AnsibleRunner
if __name__ == "__main__":
AnsibleRunner.download_os_metrics('192.168.3.11')
<file_sep>package org.miejski.movies.recommender.api.metrics
import com.fasterxml.jackson.annotation.JsonIgnoreProperties
@JsonIgnoreProperties(ignoreUnknown = true)
data class RunMetricsRequest constructor(val testFilePath: String?, val testName: String?) {
constructor() : this(null, null)
}<file_sep>from metrics_plot.metrics_result import MetricsResult
class CPUMetric:
def __init__(self):
self.colors = {
"%user": "r",
"%system": "b",
"%iowait": 'g',
"%idle": 'k'
}
self.labels = {
"x": 'time',
"y": 'CPU usage %'
}
def read_metrics(self, metrics_dir):
"""SAR based metrics. Return MetricsResult object"""
with open("{}/cpu.log".format(metrics_dir)) as metrics_file:
keys, metrics = self.__read_metrics(metrics_file)
return MetricsResult("cpu", keys, metrics, self.colors, self.labels)
def __read_metrics(self, metrics_file):
metrics = metrics_file.readlines()
metrics = list(filter(lambda x: "all" in x, metrics))
metrics_split = list(
map(lambda x: list(filter(lambda p: len(p) > 0, x.replace(" ", "\t").split("\t"))), metrics))
keys = list(map(lambda x: x[0].replace(" ", "").replace("PM", ""), metrics_split))
user_cpu = self.__to_floats(list(map(lambda x: x[2], metrics_split)))
system_cpu = self.__to_floats(list(map(lambda x: x[4], metrics_split)))
iowait = self.__to_floats(list(map(lambda x: x[5], metrics_split)))
idle = self.__to_floats(list(map(lambda x: x[7], metrics_split)))
metrics = {
"%user": user_cpu,
"%system": system_cpu,
"%iowait": iowait,
"%idle": idle
}
return keys, metrics
def __to_floats(self, l):
return list(map(lambda x: float(x.replace(' ', '')), l))
<file_sep>import datetime
import os
from ansible.AnsibleRunner import AnsibleRunner
from metrics import formatter
from metrics.AccMetrics import AccMetrics
class AccuracyMetricsRunner:
def __init__(self, metric_details, result_folder="/tmp/magisterka/metrics/accuracy/"):
self.fold_results = []
self.metric_details = metric_details
self.result_folder = result_folder
def run(self, testFilePath, dataset, fold):
print("Running accuracy metrics {} for dataset {} and fold {}".format(self.metric_details, dataset, fold))
start = datetime.datetime.now().replace(microsecond=0)
test_ratings_count = self.__total_ratings_to_predict(testFilePath)
prepared_cypher = self.metric_details.prepare_metric_cypher(testFilePath)
AnsibleRunner.runAccuracyMetricCypher(dataset, fold,
prepared_cypher, verbose=False)
results = self.read_results(dataset, fold)
if len(results) == 0:
print("Shit happened")
end = datetime.datetime.now().replace(microsecond=0)
self.fold_results.append(PartialResult(
AccMetrics.calculate_rmse(results), AccMetrics.calculate_mae(results),
test_ratings_count, len(results), (end - start).seconds)
)
print("Finished accuracy metrics for dataset {} and fold {}".format(dataset, fold))
def finish(self, dataset):
print("Finishing accuracy metrics {} for dataset {}".format(self.metric_details, dataset))
result = FinalResult(self.fold_results)
self.save_result(dataset, result.rmse, result.mae, result.total_time, result.ratings_found_percentage)
return result.rmse
def save_result(self, test_name, rmse, mae, time, percentageOfRatingsFound):
result_folder = self.result_folder + test_name + "/"
result_file_name = self.metric_details.get_result_file_name()
full_result_path = "{}{}".format(result_folder, result_file_name)
result_directory = os.path.dirname(full_result_path)
if not os.path.exists(result_directory):
os.makedirs(result_directory)
with open(full_result_path, mode="w") as result_file:
result_file.write("Folds results = " + ",".join(map(lambda x: str(x), self.fold_results)) + '\n')
result_file.write("Final RMSE = {}\n".format(rmse))
result_file.write("Final MAE = {}\n".format(mae))
result_file.write("Ratings found for movies: {0:.2f}%\n".format(percentageOfRatingsFound))
result_file.write("Total time in seconds: {}\n".format(formatter.strfdelta(time, inputtype="s")))
def get_final_result_and_details(self):
fr = FinalResult(self.fold_results)
return fr, self.metric_details
def read_results(self, dataset, fold):
with open("{}{}/{}".format(self.result_folder, dataset, fold)) as results:
line = results.readlines()[1:]
a = map(lambda x: x.replace('"', ''), line)
b = map(self.__parse_result_line, a)
return list(b)
def __parse_result_line(self, line):
split = line.split(',')
return float(split[2]), float(split[3])
def __total_ratings_to_predict(self, testFilePath):
with open(testFilePath) as f:
for i, l in enumerate(f):
pass
return i + 1
class PartialResult:
def __init__(self, rmse, mae, test_ratings_count, ratings_predicted, time_in_seconds):
self.rmse = rmse
self.mae = mae
self.test_ratings_count = test_ratings_count
self.ratings_predicted = ratings_predicted
self.time_in_seconds = time_in_seconds
def __str__(self):
return "PartialResult(RMSE={}, ratings_predicted={}, time={})".format(self.rmse, self.ratings_predicted,
self.time_in_seconds)
class FinalResult:
def __init__(self, partial_results):
self.rmse = sum(map(lambda x: x.rmse, partial_results)) / float(len(partial_results))
self.mae = sum(map(lambda x: x.mae, partial_results)) / float(len(partial_results))
self.total_time = sum(map(lambda x: x.time_in_seconds, partial_results))
ratings_found = sum(map(lambda x: x.ratings_predicted, partial_results))
ratings_to_find = sum(map(lambda x: x.test_ratings_count, partial_results))
self.ratings_found_percentage = ratings_found / ratings_to_find * 100
if __name__ == "__main__":
metrics = AccuracyMetrics(similarity=0.1)
test_name = "ml-100k"
metrics.run(
"/Users/grzegorz.miejski/home/workspaces/datasets/movielens/prepared/ml-100k/cross_validation/ml-100k_test_0",
test_name, "ml-100k_train_0")
metrics.finish(test_name)
<file_sep>package org.miejski.movies.recommender.infrastructure.dbstate
interface Neo4jStateAssertion {
fun name(): String
fun queryToExecute(): String
fun queryToExecuteParams(): Map<String, Object> = emptyMap()
fun isOK(): Boolean {
println("Neo4jStateAssertion: ${name()}")
return false
}
}<file_sep>from neo4j_state.assertions.simple_cypher_state_assertion import SimpleCypherStateAssertion
class PearsonSimilarityAssertion(SimpleCypherStateAssertion):
def play(self, Neo4jCypherExecutor):
self.execute_query(Neo4jCypherExecutor, self.query_to_execute(), self.arguments())
def is_ok(self, Neo4jCypherExecutor):
result = Neo4jCypherExecutor.invoke(self.__check_query())
return len(result) > 0
def query_to_execute(self):
return "similarity_pearson.cypher"
def arguments(self):
return {}
def __check_query(self):
return "MATCH (p:Person)-[s:Similarity]->(p2:Person) return s limit 1" # TODO change for exists(...)
<file_sep>#!/usr/bin/env bash
# TODO check if necessary
baseDirectory="$NEO4J_HOME/data/databases/"
kill -9 `ps aux | grep neo4j | grep -v grep | grep /usr/bin/java | awk 'NR==1{print $2}'` # kill eny existing neo4j instances
if [ $# -eq 0 ]
then
echo "Usage: $0 databaseNameToUse"
exit 1
fi
databaseDirectory=$1
symlinkName="graph.db"
if [ -d "$baseDirectory$databaseDirectory" ]; then # database exists
if [ -d "$baseDirectory$symlinkName" ]; then # clear previous symlink
unlink $baseDirectory$symlinkName
fi
ln -s $baseDirectory$databaseDirectory $baseDirectory$symlinkName # symlink to use given database
echo "Neo4j database in use: $databaseDirectory"
else
echo "No database exists at $baseDirectory$databaseDirectory"
exit 1
fi
nohup $NEO4J_HOME/bin/neo4j console > neo4j-start.out 2> neo4j-start.err < /dev/null &<file_sep>buildscript {
ext.kotlin_version = "1.0.0"
ext.spring_boot_version = '1.5.0.RELEASE'
repositories {
mavenCentral()
}
dependencies {
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
classpath "org.springframework.boot:spring-boot-gradle-plugin:$spring_boot_version"
}
}
apply plugin: 'java'
apply plugin: 'idea'
apply plugin: 'kotlin'
apply plugin: 'spring-boot'
apply plugin: 'groovy'
apply plugin: 'application'
repositories {
mavenCentral()
}
configurations {
all*.exclude module: 'spring-boot-starter-logging'
all*.exclude module: 'slf4j-nop'
}
dependencies {
compile "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version"
compile("org.springframework.boot:spring-boot-starter-web")
compile('org.springframework.data:spring-data-neo4j:4.2.3.RELEASE')
compile 'com.google.guava:guava:19.0'
compile "org.neo4j:neo4j-ogm:$ogm_version"
compile group: "com.fasterxml.jackson.core", name: 'jackson-databind', version: jackson
compile group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-jsr310', version: jackson
compile group: 'com.fasterxml.jackson.datatype', name: 'jackson-datatype-jdk8', version: jackson
compile group: 'io.dropwizard', name: 'dropwizard-core', version: '1.0.2'
compile "org.neo4j.app:neo4j-server:$neo4j"
testCompile group: 'org.spockframework', name: 'spock-core', version: spock
testCompile group: 'org.spockframework', name: 'spock-spring', version: spock
testCompile('com.jayway.restassured:rest-assured:2.6.0') {
exclude group: "org.codehaus.groovy", module: "groovy-all"
}
testCompile "org.neo4j:neo4j-kernel:$neo4j"
testCompile group: 'org.neo4j', name: 'neo4j-ogm-api', version: ogm_version
testCompile group: 'org.neo4j', name: 'neo4j-ogm-test', version: ogm_version
testCompile group: 'org.neo4j', name: 'neo4j-ogm-embedded-driver', version: ogm_version
testCompile group: 'org.neo4j.test', name: 'neo4j-harness', version: "$neo4j"
testCompile group: 'org.springframework.boot', name: 'spring-boot-starter-test', version: "$spring_boot_version"
testCompile group: 'org.springframework', name: 'spring-test', version: '4.3.2.RELEASE'
testCompile group: 'cglib', name: 'cglib-nodep', version: '2.2'
}
jar {
baseName = 'movies-recommender-service'
version = '0.1.0'
}
group = 'gmiejski'
mainClassName = "org.miejski.movies.recommender.Application"
sourceSets {
test {
groovy.srcDirs = ['src/test/groovy']
}
integration {
groovy.srcDirs = ['src/integration/groovy']
resources.srcDir 'src/test/resources'
compileClasspath += main.output + test.output
runtimeClasspath += main.output + test.output
}
}
configurations {
integrationCompile.extendsFrom testCompile
integrationRuntime.extendsFrom testRuntime
}
task integration(type: Test, description: 'Runs the integration tests.', group: 'Verification') {
testClassesDir = sourceSets.integration.output.classesDir
classpath = sourceSets.integration.runtimeClasspath
}<file_sep>package org.miejski.movies.recommender.infrastructure.configuration
import org.neo4j.ogm.config.Configuration
import org.neo4j.ogm.session.SessionFactory
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.context.annotation.Bean
import org.springframework.data.neo4j.repository.config.EnableNeo4jRepositories
import org.springframework.data.neo4j.transaction.Neo4jTransactionManager
import org.springframework.stereotype.Component
import org.springframework.transaction.annotation.EnableTransactionManagement
@EnableNeo4jRepositories(basePackages = arrayOf("org.miejski.movies.recommender.infrastructure.repositories"))
@EnableTransactionManagement
@org.springframework.context.annotation.Configuration
@Component
open class Neo4jConfig {
lateinit @Autowired var neo4JConfigProperties: Neo4jConfigProperties
@Bean
open fun getConfiguration(): Configuration {
val config: Configuration = Configuration()
val buildUri = getBuildUri()
config
.driverConfiguration()
.setDriverClassName("org.neo4j.ogm.drivers.http.driver.HttpDriver")
.setURI(buildUri)
return config
}
private fun getBuildUri(): String {
if (neo4JConfigProperties.user.isNullOrBlank() && neo4JConfigProperties.password.isNullOrBlank()) {
return "http://${neo4JConfigProperties.host}:${neo4JConfigProperties.port}"
}
return "http://${neo4JConfigProperties.user}:${neo4JConfigProperties.password}@${neo4JConfigProperties.host}:${neo4JConfigProperties.port}"
}
@Bean
open fun getSessionFactory(): SessionFactory {
return SessionFactory(getConfiguration(), "org.miejski.movies.recommender.domain")
}
@Bean
open fun neo4jTransactionManager(): Neo4jTransactionManager {
return Neo4jTransactionManager(getSessionFactory())
}
}<file_sep>import boto3
import paramiko
from ec2.EC2Instances import EC2Instances
from ec2.EC2Waiter import EC2Waiter
from scripts.ansible.AnsibleRunner import AnsibleRunner
class EC2Client:
def __init__(self):
self.ec2client = boto3.client('ec2')
self.ec2 = boto3.resource('ec2')
def createInstances(self, instance_type, count, purpose):
response = self.ec2client.run_instances(
DryRun=False,
ImageId='ami-27dd7848',
MinCount=count,
MaxCount=count,
KeyName='movies-recommender-service',
SecurityGroups=[
'movies-recommender-service-sg',
],
InstanceType=instance_type,
TagSpecifications=[
{
'ResourceType': 'instance',
'Tags': [
{
'Key': 'purpose',
'Value': purpose
},
]
},
]
)
instancesIds = list(map(lambda i: i['InstanceId'], response['Instances']))
return instancesIds
def getInstances(self, ids=[], explicit=False):
"""If explicit - return empty instances list when ids argument is an empty list"""
if explicit and len(ids) == 0:
return EC2Instances()
response = self.ec2client.describe_instances(
InstanceIds=ids,
Filters=[
{
'Name': 'instance-state-name',
'Values': [
'pending', 'running'
]
},
],
)
return EC2Instances.fromJson(response)
def killAllInstances(self, ids=[]):
self.ec2client.terminate_instances(InstanceIds=ids)
EC2Waiter.waitForTerminatedState(ids)
<file_sep>import os
import requests
class HttpAccuracyMetrics():
def __init__(self, result_folder="/tmp/magisterka/metrics/accuracy/"):
self.result_folder = result_folder
self.fold_results = []
def run(self, testFilePath, test_name):
print("Running accuracy metrics: " + test_name)
response = requests.post('http://localhost:8080/metrics/accuracy',
json={'testFilePath': testFilePath, 'testName': test_name}, )
response_json = response.json()
self.fold_results.append(response_json)
print("Fold result = " + str(response_json))
def finish(self, test_name):
print("Finishing accuracy metrics for " + test_name)
response = requests.get('http://localhost:8080/metrics/accuracy/result')
response_json = response.json()
rmse = response_json["result"]
time = response_json["timeInSeconds"]
percentageOfRatingsFound = response_json["others"]["percentageOfFoundRatings"]
print("Total RMSE = {}\nRatings found for movies: {}%\nTotal time in seconds: {}".format(rmse,
percentageOfRatingsFound,
time))
self.save_result(test_name, rmse, time, percentageOfRatingsFound)
return rmse
def save_result(self, test_name, rmse, time, percentageOfRatingsFound):
if not os.path.exists(self.result_folder + test_name):
os.makedirs(self.result_folder + test_name)
with open(self.result_folder + test_name + "/accuracy.log", mode="w") as result_file:
result_file.write("Folds results = " + ",".join(map(lambda x: str(x), self.fold_results)) + '\n')
result_file.write("Final RMSE = {}\n".format(rmse))
result_file.write("Ratings found for movies: {0:.2f}%\n".format(percentageOfRatingsFound))
result_file.write("Total time in seconds: {0:.2f}s\n".format(time))
if __name__ == "__main__":
metrics = HttpAccuracyMetrics()
test_name = "testNameMote"
metrics.run(
"/Users/grzegorz.miejski/home/workspaces/datasets/movielens/prepared/ml-100k/cross_validation/ml-100k_test_0",
test_name)
metrics.finish(test_name)
<file_sep>package org.miejski.movies.recommender.state;
import org.jetbrains.annotations.NotNull;
import org.miejski.movies.recommender.infrastructure.dbstate.Neo4jStateAssertion;
import org.miejski.movies.recommender.neo4j.CypherExecutor;
import java.util.HashMap;
import java.util.Map;
public class AvgRatingStateAssertion implements Neo4jStateAssertion {
private final CypherExecutor cypherExecutor;
public AvgRatingStateAssertion(CypherExecutor cypherExecutor) {
this.cypherExecutor = cypherExecutor;
}
@NotNull
@Override
public String name() {
return "AvgRatingStateAssertion";
}
@NotNull
@Override
public String queryToExecute() {
return "start_state/average_rating.cypher";
}
@NotNull
@Override
public Map<String, Object> queryToExecuteParams() {
return new HashMap<>();
}
@Override
public boolean isOK() {
return !cypherExecutor.execute("Match (p:Person) where exists(p.avg_rating) return p limit 1").isEmpty();
}
}
<file_sep>package org.miejski.movies.recommender.infrastructure.configuration
import org.springframework.boot.context.properties.ConfigurationProperties
import org.springframework.stereotype.Component
@Component
@ConfigurationProperties(prefix = "neo4j")
class Neo4jConfigProperties {
lateinit var host: String
lateinit var port: String
lateinit var user: String
lateinit var password: String
}
<file_sep>package org.miejski.movies.recommender.helper
import com.google.common.primitives.Primitives
import org.neo4j.ogm.model.Result
class QueryResultMapper {
fun <T> convert(type: Class<T>, result: Result): List<T> {
val a = result.map { singleResult -> type.declaredFields.map { field -> Pair(field.name, field.type) }
.map { field -> singleResult.getOrElse(field.first, {Primitives.wrap(field.second).newInstance()}) } }
.map { args -> type.constructors.first().newInstance(*args.toTypedArray())
}
return a.toList() as List<T>
}
}
fun <T> Result.castTo(castedClass: Class<T>) : List<T> {
return QueryResultMapper().convert(castedClass, this)
}<file_sep>from ec2.EC2Instance import EC2Instance
class EC2Instances():
def __init__(self, instances=[]):
self.instances = instances
def ips(self):
return list(map(lambda x: x.publicIp, self.instances))
def ids(self):
return list(map(lambda x: x.instanceId, self.instances))
def private_ips(self):
return list(map(lambda x: x.privateIp, self.instances))
@staticmethod
def fromJson(json):
if len(json['Reservations']) == 0:
return EC2Instances()
result = []
for reservation in json['Reservations']:
instances = reservation['Instances']
for instance in instances:
result.append(EC2Instance.fromJson(instance))
return EC2Instances(result)
<file_sep>package org.miejski.movies.recommender.domain.queries
import java.util.stream.Collectors
open class QueriesLoader {
companion object {
val CYPHER_FILES_PATH: String = "cypher/"
}
open fun loadCypherQuery(name: String): String {
val toPath = toPath(name)
return this.javaClass.classLoader.getResourceAsStream(toPath)
.bufferedReader()
.lines()
.collect(Collectors.joining(" "))
}
private fun toPath(cypherFileName: String): String {
return CYPHER_FILES_PATH + cypherFileName
}
}<file_sep>package org.miejski.movies.recommender.domain.movie
import org.miejski.movies.recommender.domain.AbstractEntity
class Movie(
id: Long? = null,
var movie_id: Long = -1
) : AbstractEntity(id)<file_sep>package org.miejski.movies.recommender.api.metrics
import org.miejski.movies.recommender.domain.metrics.accuracy.AccuracyMetricService
import org.miejski.movies.recommender.domain.metrics.decisionSupport.PrecisionAndRecallService
import org.miejski.movies.recommender.domain.metrics.rank.RankMetricService
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.http.MediaType
import org.springframework.http.ResponseEntity
import org.springframework.web.bind.annotation.RequestBody
import org.springframework.web.bind.annotation.RequestMapping
import org.springframework.web.bind.annotation.RequestMethod
import org.springframework.web.bind.annotation.RestController
@RestController
open class MetricsController @Autowired constructor(val accuracyMetricService: AccuracyMetricService,
val rankMetricService: RankMetricService,
val precisionAndRecallService: PrecisionAndRecallService) {
@RequestMapping(
value = "/metrics/accuracy",
method = arrayOf(RequestMethod.POST),
consumes = arrayOf(MediaType.APPLICATION_JSON_VALUE))
open fun accuracyMetrics(@RequestBody runMetricsRequest: RunMetricsRequest): ResponseEntity<Double> {
val resultRmse = accuracyMetricService.run(runMetricsRequest.testFilePath)
return ResponseEntity.ok(resultRmse)
}
@RequestMapping(
value = "/metrics/accuracy/result",
method = arrayOf(RequestMethod.GET)
)
open fun getAccuracyMetricResult(): ResponseEntity<MetricsResult<Double>> {
return ResponseEntity.ok(accuracyMetricService.finish())
}
@RequestMapping(
value = "/metrics/precision",
method = arrayOf(RequestMethod.POST),
consumes = arrayOf(MediaType.APPLICATION_JSON_VALUE)
)
open fun rankMetric(@RequestBody runMetricsRequest: RunMetricsRequest): ResponseEntity<Double> {
return ResponseEntity.ok(precisionAndRecallService.run(runMetricsRequest.testFilePath))
}
@RequestMapping(
value = "/metrics/precision/result",
method = arrayOf(RequestMethod.GET)
)
open fun getDecisionSupportResult(): ResponseEntity<MetricsResult<Pair<Double, Double>>> {
return ResponseEntity.ok(precisionAndRecallService.finish())
}
}<file_sep># deployment
All scripts running performance tests and metrics tests based on Ansible playbooks and python
source /Users/grzegorz.miejski/home/workspaces/private/magisterka/movies-recommender-api/deployment/scripts/runner/bin/activate
## APPLICATION METRICS
### Prepare datasets for local run
use scripts/kcrossvalidation/FoldsCreator.py
### Running metrics locally:
`./run-metrics-local.sh [dataset]`
example:
`./run-metrics-local.sh ml-100k`
Or to run all metrics for given settings run:
`python local_accuraty_metrics_runner.py`
## Running performance tests:
### local run:
<file_sep>package org.miejski.movies.recommender.domain.metrics.decisionSupport
import java.util.*
class DecisionSupportAccumulator() {
val times = ArrayList<Double>()
val precisionResults = ArrayList<Double>()
val recallResults = ArrayList<Double>()
fun saveResult(precisionResult: Double, recallResult: Double, timeInSeconds: Double) {
precisionResults.add(precisionResult)
recallResults.add(recallResult)
times.add(timeInSeconds)
}
}<file_sep>package org.miejski.movies.recommender;
import org.miejski.movies.recommender.api.metrics.MetricsResult;
import org.miejski.movies.recommender.domain.metrics.accuracy.AccuracyMetricService;
import org.miejski.movies.recommender.infrastructure.dbstate.Neo4jStarStateAsserter;
import org.miejski.movies.recommender.infrastructure.dbstate.assertions.MovieIndexAssertion;
import org.miejski.movies.recommender.infrastructure.dbstate.assertions.PersonIndexAssertion;
import org.miejski.movies.recommender.neo4j.CypherExecutor;
import org.miejski.movies.recommender.ratings.PredictionerService;
import org.miejski.movies.recommender.state.AvgRatingStateAssertion;
import org.miejski.movies.recommender.state.DataImportedStateAssertion;
import org.miejski.movies.recommender.state.Similarity1StateAssertion;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
public class MainClass {
public static void main(String[] args) throws IOException {
args = new String[]{
"/Users/grzegorz.miejski/home/workspaces/datasets/movielens/prepared/ml-100k/cross_validation/ml-100k_train_0",
"/Users/grzegorz.miejski/home/workspaces/datasets/movielens/prepared/ml-100k/cross_validation/ml-100k_test_0"};
Logger logger = LoggerFactory.getLogger(MainClass.class);
logger.info("START");
if (args.length != 0) {
logger.info("no args! quit");
}
String trainDataPath = args[0];
String testDataPath = args[1];
final CypherExecutor cypherExecutor = new CypherExecutor();
logger.info("START - state assertions");
new Neo4jStarStateAsserter((cypher, queryToExecuteParams) -> {
HashMap<String, Object> stringHashMap = new HashMap<>(queryToExecuteParams);
cypherExecutor.execute(cypher, stringHashMap);
},
() -> Arrays.asList(
new PersonIndexAssertion(),
new MovieIndexAssertion(),
new DataImportedStateAssertion(trainDataPath, cypherExecutor),
new AvgRatingStateAssertion(cypherExecutor),
new Similarity1StateAssertion(cypherExecutor)))
.run();
logger.info("START - metric calculation");
AccuracyMetricService accuracyMetricService = new AccuracyMetricService(new PredictionerService(cypherExecutor));
accuracyMetricService.run(testDataPath);
MetricsResult<Double> finish = accuracyMetricService.finish();
logger.info(finish.getResult().toString());
logger.info(Arrays.toString(args));
// Stream<Path> list = Files.list(Paths.get(args[0]));
// list.forEach(System.out::println);
cypherExecutor.close();
logger.info("END");
}
}
<file_sep>package org.miejski.movies.recommender.domain.metrics.accuracy
import java.util.*
class AccuracyAccumulator {
val times = ArrayList<Double>()
val results = ArrayList<Double>()
val foundRatingsCounts = ArrayList<Int>()
val orderedPredictiosCounts = ArrayList<Int>()
fun saveResult(result: Double, timeInSeconds: Double, predictedRatings: Int, orderedRatings: Int) {
results.add(result)
times.add(timeInSeconds)
foundRatingsCounts.add(predictedRatings)
orderedPredictiosCounts.add(orderedRatings)
}
}<file_sep>package org.miejski.movies.recommender.domain.metrics.accuracy
open class RMSEMetric {
companion object {
fun calculate(predictedRatings: List<Pair<Double, Double>>): Double {
val mse = predictedRatings
.map { (it.first - it.second) * (it.first - it.second) }
.sum() / predictedRatings.size.toDouble()
return Math.sqrt(mse)
}
}
}<file_sep>class Neo4jInstances:
def __init__(self, instances):
self.instances = instances
<file_sep>
class SimpleCypherStateAssertion:
query_folder = "/Users/grzegorz.miejski/home/workspaces/private/magisterka/movies-recommender-api/movies-recommender-service/src/main/resources/cypher/start_state/"
def execute_query(self, neo4j_query_executor, query_to_execute, arguments = {}):
cypher_file = "{}{}".format(SimpleCypherStateAssertion.query_folder, query_to_execute)
cypher = self.__read_cypher_from_file(cypher_file)
neo4j_query_executor.invoke(cypher, arguments)
def __read_cypher_from_file(self, file):
with open(file) as f:
text = f.readlines()
return ' '.join(text)
<file_sep>class AvailableSimilarities():
@staticmethod
def available_methods():
return ["similarity", "pearson_with_sw", "cosine"]
@staticmethod
def available_methods_string():
return "[{}]".format(",".join(AvailableSimilarities.available_methods()))
@staticmethod
def is_available(method):
return method in AvailableSimilarities.available_methods()
<file_sep>package org.miejski.movies.recommender.api
import org.miejski.movies.recommender.domain.recommendations.MovieRecommendation
import org.miejski.movies.recommender.domain.recommendations.RecommendationsService
import org.slf4j.LoggerFactory
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.web.bind.annotation.PathVariable
import org.springframework.web.bind.annotation.RequestMapping
import org.springframework.web.bind.annotation.RequestParam
import org.springframework.web.bind.annotation.RestController
@RestController
class RecommendationsController @Autowired constructor(val recommendationsService: RecommendationsService) {
private val logger = LoggerFactory.getLogger(RecommendationsController::class.java)
@RequestMapping(value = "/recommendations/user/{userId}")
fun getRecommendedMovies(@PathVariable("userId") userId: Long,
@RequestParam(name = "minSimilarity", required = false) minSimilarity: Double?,
@RequestParam(name = "similarityMethod", required = false) similarityMethod: String?,
@RequestParam(name = "neighboursCount", required = false) neighboursCount: Int?): List<MovieRecommendation> {
val minSim = minSimilarity ?: 0.0
val simMethod = similarityMethod ?: "similarity"
val recommendations = recommendationsService.findRecommendedMovies(userId, minSim, simMethod, neighboursCount)
logger.info("Movies: ${recommendations.size}, minSimilarity: $minSim, similarityMethod: $simMethod, neighboursCount: $neighboursCount, userId: $userId")
if ( recommendations.isEmpty()) {
throw RuntimeException("Error - recommendations are empty for user $userId!")
}
return recommendations
}
}<file_sep>package org.miejski.movies.recommender.api.metrics
import com.fasterxml.jackson.annotation.JsonIgnoreProperties
@JsonIgnoreProperties(ignoreUnknown = true)
data class MetricsResult<T> constructor(val result: T, val timeInSeconds: Double, val others: Map<String, Any>) {
}<file_sep>package org.miejski.movies.recommender.domain.metrics.accuracy
import org.miejski.movies.recommender.api.metrics.MetricsResult
import org.miejski.movies.recommender.domain.metrics.MetricsService
import org.miejski.movies.recommender.domain.recommendations.RecommendationsServiceI
import org.slf4j.LoggerFactory
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
open class AccuracyMetricService @Autowired constructor(val recommendationsService: RecommendationsServiceI) : MetricsService<Double>() {
private val logger = LoggerFactory.getLogger(AccuracyMetricService::class.java)
private var accuracyAccumulator = AccuracyAccumulator()
override fun run(realRatings: List<RealRating>): Double {
start()
logger.info("Looking for predictions for {} ratings.", realRatings.size)
val predictedRatings = runAsyncAndGather(realRatings,
{ Pair(it.rating, recommendationsService.predictedRating(it.person, it.movie)) })
.filter { it.second > 0 }
val result = RMSEMetric.calculate(predictedRatings)
val timeInSeconds = timeInSeconds()
accuracyAccumulator.saveResult(result, timeInSeconds, predictedRatings.size, realRatings.size)
logger.info("Found rating for {} movies", predictedRatings.size)
logger.info("Resulting rmse = {} in time: {} seconds", result, timeInSeconds)
return result
}
override fun finish(): MetricsResult<Double> {
val result = MetricsResult(accuracyAccumulator.results.average(),
accuracyAccumulator.times.sum(),
mapOf(Pair("percentageOfFoundRatings", accuracyAccumulator.foundRatingsCounts.sum().toDouble() / accuracyAccumulator.orderedPredictiosCounts.sum().toDouble() * 100.0)))
accuracyAccumulator = AccuracyAccumulator()
return result
}
}
data class RealRating(val person: Long, val movie: Long, val rating: Double, val timestamp: Long) {}<file_sep>boto3
requests
ansible
neo4j-driver
locustio
pyzmq
moto
matplotlib<file_sep>group 'org.miejski.recommender'
version '1.0-SNAPSHOT'
buildscript {
repositories {
mavenCentral()
maven {
url "https://plugins.gradle.org/m2/"
}
}
dependencies {
classpath "gradle.plugin.com.github.lkishalmi.gradle:gradle-gatling-plugin:0.3.1"
}
}
apply plugin: "com.github.lkishalmi.gatling"
ext {
spock = '1.1-groovy-2.4-rc-3'
neo4j = '3.2.0'
jackson = '2.6.1'
ogm_version = '2.1.3'
}
task wrapper(type: Wrapper) {
gradleVersion = '2.14'
}<file_sep>import os
from neo4j_state.assertions.simple_cypher_state_assertion import SimpleCypherStateAssertion
from neo4j_state.neo4j_cypher_executor import Neo4jCypherExecutor
from ratings_in_common.ratings_in_common import RatingsInCommon
class MoviesInCommonAssertion(SimpleCypherStateAssertion):
def __init__(self, train_file):
self.train_file = train_file
def play(self, Neo4jCypherExecutor):
data = MoviesInCommonAssertion.loadRatingsData(self.train_file)
RatingsInCommon().results(data, self.__movies_in_common_file_path())
self.execute_query(Neo4jCypherExecutor, self.query_to_execute(), self.arguments())
self.__remove_common_movies_file()
def is_ok(self, Neo4jCypherExecutor):
result = Neo4jCypherExecutor.invoke(self.__check_query())
return len(result) > 0
def query_to_execute(self):
return "movies_in_common.cypher"
def arguments(self):
return {"moviesInCommonFile": "file://{}".format(self.__movies_in_common_file_path())}
def __check_query(self):
return "MATCH (p:Person)-[s:Similarity]->(p2:Person) where exists(s.movies_in_common) return s limit 1"
def __movies_in_common_file_path(self):
return "{}-movies_in_common".format(self.train_file)
@staticmethod
def loadRatingsData(path):
with open(path) as f:
lines = f.readlines()
return list(
map(lambda line_splited: (line_splited[0], line_splited[1]),
map(lambda x: x.split("\t"),
lines[1:])))
def __remove_common_movies_file(self):
os.remove(self.__movies_in_common_file_path())
if __name__ == "__main__":
a = MoviesInCommonAssertion(
"/Users/grzegorz.miejski/home/workspaces/datasets/movielens/prepared/ml-100k/cross_validation/ml-100k_train_0",
"ml-100k_train_0")
executor = Neo4jCypherExecutor(localhost="bolt://localhost:7687")
if not a.is_ok(executor):
a.play(executor)
<file_sep>package org.miejski.movies.recommender.infrastructure.repositories
import org.miejski.movies.recommender.domain.user.Person
import org.springframework.data.neo4j.annotation.Query
import org.springframework.data.neo4j.repository.GraphRepository
import org.springframework.data.repository.query.Param
import org.springframework.stereotype.Repository
@Repository
interface UsersRepository : GraphRepository<Person> {
@Query("MATCH (n:Person) return n.user_id")
fun getAllIds(): List<Int>
@Query("MATCH (n:Person {user_id: {userId}}),(m:Movie {movie_id: {movieId}}) merge (n)-[r:Rated]-(m) SET r.rating={ratingValue} return n")
fun addMovieRating(@Param("userId") userId: Long, @Param("ratingValue") rating: Double, @Param("movieId") movieId: Long)
@Query("match (p:Person)-[r:Rated]->(m:Movie) where p.user_id = {userId} return avg(r.rating)")
fun meanRating(@Param("userId") userId: Long): Double
@Query("MATCH (p:Person) where p.user_id = {userId} optional match (p)-[r:Rated]->(m:Movie) return p,r,m")
fun findOneByUserId(@Param("userId") userId: Long): Person
}<file_sep>from metrics_plot.metrics_result import MetricsResult
class DiskUtilityMetric:
def __init__(self):
self.metric_name = "disk"
self.colors = {
"r/s": "r",
"w/s": "b",
"%util": 'k'
}
self.labels = {
"x": 'time',
'y_left': 'r/w per second',
"y_right": 'disk utility %'
}
self.double_axis = {
"left": ["r/s", "w/s"],
"right": ["%util"]
}
def read_metrics(self, metrics_dir):
"""iostat based metrics. Return MetricsResult object"""
with open("{}/{}.log".format(metrics_dir, self.metric_name)) as metrics_file:
keys, metrics = self.__read_metrics(metrics_file)
return MetricsResult(self.metric_name, keys, metrics, self.colors, self.labels, self.double_axis)
def __parse_date(self, str):
import time
try:
t = str.split(' ')[1]
datetime_object = time.strptime(t, '%H:%M:%S')
return t
except Exception as e:
return None
def __read_metrics(self, metrics_file):
keys = []
metrics_results = {
"r/s": [],
"w/s": [],
"%util": []
}
metrics = metrics_file.readlines()
metrics = list(map(lambda x: x.replace('\n', '').replace(' ', '\t'), metrics))
for index in range(0, len(metrics)):
line = metrics[index]
if "Linux" in line or len(line) == 0:
continue
log_time = self.__parse_date(line)
if log_time is not None:
keys.append(log_time)
index += 1
second_line = metrics[index]
if "Device" not in second_line:
raise Exception()
index+=1
metric_line = metrics[index]
r = list(filter(lambda x: len(x) > 0, metric_line.replace(" ","\t").split('\t')))
r = list(map(lambda x: x.strip(), r))
metrics_results["r/s"].append(r[3])
metrics_results["w/s"].append(r[4])
metrics_results["%util"].append(self.__get_util(r[-1]))
return keys, metrics_results
def __to_floats(self, l):
return list(map(lambda x: float(x.replace(' ', '')), l))
def __get_util(self, param):
"""argument like: '0.89 0.89 0.16'"""
return list(filter(lambda x: len(x) > 0, param.split(" ")))[-1]
<file_sep>import sys
args = sys.argv
logs = open("/home/ec2-user/programming/haproxy/templater.log", mode='w')
master_node_ip = args[1].split('a')
slave_ips = args[2].split('a')
slave_ips = list(filter(lambda x: len(x) > 0, slave_ips))
logs.writelines("Master nodes: {}\n".format(master_node_ip))
logs.writelines("Slave nodes: {}\n".format(str(slave_ips)))
logs.flush()
all_nodes = master_node_ip + slave_ips
nodes_ids = list(range(0, len(all_nodes)))
static_port = 7474
zipped = zip(nodes_ids ,all_nodes)
prepared_strings = list(map(lambda x: "server {}:{};".format(x[1], static_port), zipped))
text = "\n".join(prepared_strings) + "\n"
with open("/etc/nginx/conf.d/nginx.conf.template") as template:
lines = template.readlines()
final = ''.join(lines).replace("${servers}", text)
with open("/etc/nginx/conf.d/nginx.conf", mode="w") as target:
target.write(final)
<file_sep>package org.miejski.movies.recommender.infrastructure.dbstate
interface CypherExecutor {
fun execute(cypher: String, queryToExecuteParams: Map<String, Object>)
}<file_sep>package org.miejski.movies.recommender.domain.user
import org.miejski.movies.recommender.api.user.dto.MovieRatingRequest
import org.miejski.movies.recommender.infrastructure.repositories.UsersRepository
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
//@Transactional
open class UsersService @Autowired constructor(val usersRepository: UsersRepository) {
fun findUser(userId: Long): Person {
return usersRepository.findOne(userId)
}
fun findAll(): List<Person> = usersRepository.findAll().toList()
fun findAllIds(): List<Int> = usersRepository.getAllIds()
fun rateMovie(userId: Long, movieRating: MovieRatingRequest) {
if (movieRating.movieId != null && movieRating.rating != null) {
usersRepository.addMovieRating(userId, movieRating.rating, movieRating.movieId)
}
}
fun getMeanRating(userId: Long): Double {
return usersRepository.meanRating(userId)
}
fun findUserById(userId: Long): Person {
return usersRepository.findOneByUserId(userId)
}
fun save(person: Person): Person {
val save = usersRepository.save(person)
return save
}
}<file_sep>import matplotlib.pyplot as plt
from metrics_plot.cpu_metric import CPUMetric
from metrics_plot.disk_metric import DiskUtilityMetric
from metrics_plot.pagecache_metric import PageCacheMetric
class MetricsPlotter:
def __init__(self, metrics_dir):
self.metrics_dir = metrics_dir
self.keys_count = 10
def plot(self, metric_result):
metric_name = metric_result.metric_name
keys = metric_result.keys
metrics = metric_result.metrics
colors = metric_result.colors
labels = metric_result.labels
double_axis = metric_result.double_axis
if len(double_axis.keys()) == 0:
f = self.__create_figure(keys, metrics, colors, labels)
else:
f = self.__create_double_axis_figure(keys, metrics, colors, labels, double_axis)
self.__save_plot(metric_name, self.metrics_dir, f)
def __save_plot(self, metric_name, metrics_dir, figure):
figure.savefig("{}/{}.png".format(metrics_dir, metric_name), bbox_inches='tight')
def __create_figure(self, keys, metrics, colors, labels):
f = plt.figure()
plt.ylabel(labels['y'])
plt.xlabel('time')
limited__x_keys = self.prepare_keys(keys)
x = range(0, len(limited__x_keys))
for key, values in metrics.items():
plt.xticks(x, limited__x_keys, rotation=90)
plt.plot(x, values, colors[key], label=key, linewidth=2.0)
plt.legend(metrics.keys())
return f
def __create_double_axis_figure(self, keys, metrics, colors, labels, double_axis):
fig, ax1 = plt.subplots()
limited__x_keys = self.prepare_keys(keys)
x = range(0, len(limited__x_keys))
plt.xticks(x, limited__x_keys, rotation=90)
for left in double_axis["left"]:
m = metrics[left]
ax1.plot(x, m, colors[left], label=left, linewidth=2.0)
ax1.set_ylabel(labels["y_left"], color=colors[left])
ax1.tick_params('y', colors=colors[left])
# ax1.plot(limited__x_keys, s1, 'b-')
ax1.set_xlabel('time')
ax1.legend(loc=2)
ax2 = ax1.twinx()
for right in double_axis["right"]:
m = metrics[right]
ax2.plot(x, m, colors[right], label=right, linewidth=2.0)
ax2.set_ylabel(labels["y_right"], color=colors[right])
ax2.tick_params('y', color=colors[right])
ax2.legend(loc=1)
# fig.tight_layout()
return fig
def prepare_keys(self, keys):
result = []
important_keys_index_diff = int(len(keys) / 10)
for x in range(0, len(keys)):
if x % important_keys_index_diff == 0:
result.append(keys[x])
else:
result.append("")
return result
if __name__ == "__main__":
metrics_dir = "/Users/grzegorz.miejski/magisterka/perf/os_metrics"
# MetricsPlotter(metrics_dir).plot(CPUMetric().read_metrics(metrics_dir))
# MetricsPlotter(metrics_dir).plot(DiskUtilityMetric().read_metrics(metrics_dir))
MetricsPlotter(metrics_dir).plot(PageCacheMetric().read_metrics(metrics_dir))
<file_sep>package org.miejski.movies.recommender.domain.metrics.rank
import org.miejski.movies.recommender.api.metrics.MetricsResult
import org.miejski.movies.recommender.domain.metrics.MetricsService
import org.miejski.movies.recommender.domain.metrics.accuracy.RealRating
import org.miejski.movies.recommender.domain.recommendations.RecommendationsServiceI
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@Service
open class RankMetricService @Autowired constructor(val recommendationsService: RecommendationsServiceI) : MetricsService<Double>() {
override fun finish(): MetricsResult<Double> {
throw UnsupportedOperationException("not implemented") //To change body of created functions use File | Settings | File Templates.
}
override fun run(realRatings: List<RealRating>): Double {
throw UnsupportedOperationException("not implemented") //To change body of created functions use File | Settings | File Templates.
}
}<file_sep>package org.miejski.movies.recommender.infrastructure.configuration
import org.miejski.movies.recommender.infrastructure.dbstate.CypherExecutor
import org.neo4j.ogm.session.Session
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
@Configuration
open class CypherExecutorConfiguration {
@Bean
open fun cypherExecutor(session: Session): CypherExecutor {
return object : CypherExecutor {
override fun execute(cypher: String, queryToExecuteParams: Map<String, Object>) {
session.query(cypher, queryToExecuteParams)
}
}
}
} | 194e2628ac1ea46ee2809d25fd7a23ae6c3e2f0a | [
"Markdown",
"Gradle",
"Java",
"Python",
"Text",
"Kotlin",
"Shell"
] | 105 | Python | gmiejski/movies-recommender-api | d27654192efc7063e5d691efae9626775cb91940 | 12114f426cf047d915dfae6c3369f2074bd93907 |
refs/heads/main | <file_sep>//
// ViewController.swift
// BMI
//
// Created by Artur on 13/02/21.
// Copyright © 2021 Artur. All rights reserved.
//
import UIKit
class ViewController: UIViewController {
@IBOutlet weak var weight: UITextField!
@IBOutlet weak var height: UITextField!
@IBOutlet weak var resLabel: UILabel!
@IBOutlet weak var resImage: UIImageView!
@IBOutlet weak var resView: UIView!
var BMI: Double = 0
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
}
@IBAction func calculate(_ sender: Any) {
if let weight = Double(weight.text!), let height = Double(height.text!){
BMI = weight / (height * height)
showResult()
}
}
func showResult(){
var result: String = ""
var image: String = ""
switch BMI{
case 0..<16:
result = "Thin"
image = "magreza"
case 16.1..<18.5:
result = "Under ideal weight"
image = "abaixo"
case 18.5..<25:
result = "Ideal Weight"
image = "ideal"
case 25..<30:
result = "Overweight"
image = "sobre"
default:
result = "Thin"
image = "obesidade"
}
resLabel.text = result
resImage.image = UIImage(named: image)
resView.isHidden = false
}
}
<file_sep># Body Mass Index App
## Project Description
Small IOS application made while studyin. The user can put his height and weight to see in what body mass index (BMI) he is in.
| acd86604fcbeee450fa9fca3ad4e40eb308f9a8b | [
"Swift",
"Markdown"
] | 2 | Swift | arturcpinheiro/BMI_Swift | fd4b550f0fe254f69a26ea3f6464a81aaf3af46a | 6bbc3298e64b90f0f6a3b74ec7b6257c231843e0 |
refs/heads/main | <file_sep>const GradientTags = (props) => {
const { tags, filter, setFilter } = props
return (
<div className="mt-3">
{tags.sort().map((el) => (
<button
type="button"
className="btn btn-sm me-2 mb-2 bg-dark text-white"
filter={filter}
onClick={() => setFilter(el)}
key={el}
disabled={el === filter}
>
{el}
</button>
))}
</div>
)
}
export default GradientTags
<file_sep>import Gradient from "./Gradient"
const GradientsList = (props) => {
let { filter, gradients, setFilter } = props
if (filter !== "all") {
gradients = gradients.filter((el) => el.tags.includes(filter))
}
return (
<ul className="row list-unstyled">
{gradients.map((el) => (
<Gradient
colorStart={el.start}
colorEnd={el.end}
name={el.name}
tags={el.tags}
key={el}
setFilter={setFilter}
filter={filter}
/>
))}
</ul>
)
}
export default GradientsList
<file_sep>import GradientsList from "./GradientsList"
import GradientsSelect from "./GradientsSelect"
import React from "react"
import { gradients } from "../gradients"
import Footer from "./Footer"
const GradientsApp = () => {
const [filter, setFilter] = React.useState("all")
return (
<>
<h1 className="text-center my-4">Alyra Gradients</h1>
<main className="container">
<GradientsSelect filter={filter} setFilter={setFilter} />
<GradientsList
filter={filter}
setFilter={setFilter}
gradients={gradients}
/>
</main>
<Footer />
</>
)
}
export default GradientsApp
| 6e1cb21b8a2da56d8bcc5702c423fe1c3927ce10 | [
"JavaScript"
] | 3 | JavaScript | Gregory-Alexis/gradients-project-start | 2c0deb3b1e628d0922ffce239bca61fc71a20dff | 2df323625e1032a4e4886b857e22078f31b9d754 |
refs/heads/master | <repo_name>raytroop/pratical_shell<file_sep>/kaggle_carvana_segmentation/README.md
Fork from [https://github.com/asanakoy/kaggle_carvana_segmentation](https://github.com/asanakoy/kaggle_carvana_segmentation)
<br>
# Kaggle Carvana Image Masking Challenge
Code for the 1st place solution in [Carvana Image Masking Challenge on car segmentaion](https://www.kaggle.com/c/carvana-image-masking-challenge/leaderboard).
We used CNNs to segment a car in the image.
To achieve best results we use an ensemble of several differnet networks (Linknet, Unet-like CNN with custom encoder, several types of Unet-like CNNs with VGG11 encoder).
Our team:
- <NAME> ([linkedin](https://www.linkedin.com/in/sanakoev/))
- <NAME> ([linkedin](https://www.linkedin.com/in/al-buslaev/))
- <NAME> ([linkedin](https://www.linkedin.com/in/iglovikov/))
Blogpost explaining the solution: http://blog.kaggle.com/2017/12/22/carvana-image-masking-first-place-interview/
# Requirements
To train final models you will need the following:
- OS: Ubuntu 16.04
- Required hardware:
- Any decent modern computer with x86-64 CPU,
- 32 GB RAM
- Powerful GPU: Nvidia Titan X (12Gb VRAM) or Nvidia GeForce GTX 1080 Ti. The more the better.
### Main software for training neural networks
- Cuda 8.0
- Python 2.7 and Python 3.5
- Pytorch 0.2.0
## Install
1. Install required OS and Python
2. Install packages with `pip install -r requirements.txt`
3. Set your paths in [congif/config.json](congif/config.json) :
- `input_data_dir`: path to the folder with input images (`train_hq`, `test_hq`), masks (`train_masks`) and `sample_submission.csv`
- `submissions_dir`: path to the folder which will be used to store predicted probability maps and submission files
- `models_dir`: path to the dir which will be used to store model snapshots. You should put downloaded model weights in this folder.
# Train all and predict all
If you want to train all the models and generate predicts:
- Run `bash train_and_predict.sh`
# Train models
We have several separate neural networks in our solution which we then combine in a final ensemble.
To train all the necessary networks:
- Run `bash train.sh`
After training finishes trained weights are saved in `model_dir` directory and can be used by prediction scripts.
Or you can directly use downloaded weights and skip the training procedure.
**Required time:** *It may require quite a long time depending on hardware used. Takes about 30-60 min per epoch depending on the network on a single Titan X Pascal GPU. Total time needed is about 2140 hours, which is ~90 days on a single Titan X Pascal. The required time can be reduced if you use more GPUs in parallel.*
# Predict
- Run `bash predict.sh`
It may take considerable amount of time to generate all predictions as there are a lot of data in test and we need to generate prediction for every single model and then average them. Some of the models use test time augmentation for the best model performance. Each single model takes about 5 hours to predict on all test images on a single Titan X GPU.
When all predictions are done they will be merged in a single file for submit.
File `ens_scratch2(1)_v1-final(1)_al27(1)_te27(1).csv.gz` that contains final predicted masks for all tst images will be saved in `submisions_dir`.
**Required time:** *It may require quite a long time depending on hardware used. Takes from 4 to 8 hours per model to generate predictions on a single Titan X Pascal GPU. Total time needed is about 320 hours, which is ~13 days on a single Titan X Pascal. The required time can be reduced if you use more GPUs in parallel.*
# Remarks
Please, keep in mind that this isn't a production ready code but a very specific solution for the particular competition created in short time frame and with a lot of other constrains (limited training data, scarce computing resources and a small number of attents to check for improvements).
Also, inherent stochasticity of neural networks training on many different levels (random initialization of weights, random augmentations and so on) makes it impossible to reproduce exact submission from scratch.
<file_sep>/main.sh
#!/bin/bash
echo "\$PWD:"
echo $PWD
pushd subdir > /dev/null
bash sub.sh
popd > /dev/null
echo
# $PWD depend on where you run the program rather than where the program is
bash subdir/sub.sh
exit 0<file_sep>/README.md
- **2>&1** and **tee**
LightGBM/lightgbm config=lightgbm.conf data=../data/higgs.train valid=../data/higgs.test objective=binary metric=auc **2>&1 | tee** lightgbm_higgs_accuracy.log
> `2>&1` 的意思就是将标准错误重定向到标准输出<br>
> `tee` 把输出保存到文件中,又在屏幕上看到输出内容
[https://github.com/guolinke/boosting_tree_benchmarks/blob/master/lightgbm/test_accuracy.sh](https://github.com/guolinke/boosting_tree_benchmarks/blob/master/lightgbm/test_accuracy.sh)
- `$PWD` depend on where you run the program rather than where the program is;
`mkdir` ... is relative to current working directory.
```bash
localhost:work$ bash main.sh
$PWD:
~/work
$PWD:
~/work/subdir
You are running sub.sh
$PWD:
~/work
You are running subdir/sub.sh
```
<br>
- `&&` and `||` operators cheatsheet
- `A ; B` Run A and then B, regardless of success of A
- `A && B` Run B if A succeeded
- `A || B` Run B if A failed
- `A &` Run A in background.
<br>
```
-d file 检查file是否存在并是一个目录
-e file 检查file是否存在
-f file 检查file是否存在并是一个文件
```
<br>
`-d`测试会检查指定的目录是否存在于系统中。如果你打算将文件写入目录或是准备切换到某
个目录中,先进行测试总是件好事情。
```bash
$ cat test11.sh
#!/bin/bash
# Look before you leap
#
jump_directory=/home/arthur
#
if [ -d $jump_directory ]
then
echo "The $jump_directory directory exists"
cd $jump_directory
ls
else
echo "The $jump_directory directory does not exist"
fi
#
```
<br>
- 向bash启动的python脚本传递参数
```bash
#! /bin/bash
python -c "import sys; print([sys.argv[i] for i in range(len(sys.argv))])" $@
echo "------------------------------"
python main.py $@
```
```python
# main.py
import sys
for arg in sys.argv:
print(arg)
```
运行bash script的输出:
```
['-c', '1', '2', '3']
------------------------------
main.py
1
2
3
```
**NOTE:* 注意上述差异
- [shell中各种括号()、(())、[]、[[]]、{}的作用](https://www.cnblogs.com/nkwy2012/p/9171414.html)
- 字符串比较用双中括号`[[ ]]`
- 算数比较用单中括号`[ ]`——**左右留空格**
- 算数运算用双小括号`(( ))`
- shell命令及输出用小括号`( )`——**左右不留空格**
- 快速替换用花括号`{ }`——**左右留空格**
- 反单引号起着命令替换的作用\` \`
<br>
- [shell 中 exit0 exit1 的区别](https://blog.csdn.net/super_gnu/article/details/77099395)
- exit(0):正常运行程序并退出程序;
- exit(1):非正常运行导致退出程序;
`exit 0`可以告知你的程序的使用者:你的程序是正常结束的。如果`exit`非`0`值,那么你的程序的使用者通常会认为你的程序产生了一个错误。
在 shell 中调用完你的程序之后,用`echo $?`命令就可以看到你的程序的`exit`值。在shell脚本中,通常会根据上一个命令的`$?`值来进行一些流程控制。
#### credits:
- [kaggle_carvana_segmentation](kaggle_carvana_segmentation/README.md)
<file_sep>/kaggle_carvana_segmentation/requirements.txt
matplotlib
numpy
opencv_python==3.2.0.7
pandas
scipy
tqdm==4.8.4
pathlib2==2.3.0
tqdm==4.8.4
joblib==0.7.1
pathlib2==2.3.0
Pillow==4.3.0
scikit_learn==0.19.0
tensorboardX==0.8
<file_sep>/kaggle_carvana_segmentation/ternaus/train.sh
#!/usr/bin/env bash
set -e # abort if any command fails
#source activate py35_ternaus
python src/prepare_folds.py
for i in 0 1 2 3 4
do
python src/train.py --size 1280x1920 --device-ids 0,1,2,4 --batch-size 4 --fold $i --workers 12 --lr 0.0001 --n-epochs 52
done
#source deactivate
<file_sep>/kaggle_carvana_segmentation/ternaus/src/train.py
import argparse
import json
from pathlib import Path
from typing import Dict, Tuple
import cv2
import numpy as np
import torch
import torch.backends.cudnn
import utils
from PIL import Image
from torch import nn
from torch.optim import Adam
from torch.utils.data import DataLoader, Dataset
from unet_models import Loss, UNet11
img_cols, img_rows = 1280, 1920
Size = Tuple[int, int]
class CarvanaDataset(Dataset):
def __init__(self, root: Path, to_augment=False):
# TODO This potentially may lead to bug.
self.image_paths = sorted(root.joinpath('images').glob('*.jpg'))
self.mask_paths = sorted(root.joinpath('masks').glob('*'))
self.to_augment = to_augment
def __len__(self):
return len(self.image_paths)
def __getitem__(self, idx):
img = load_image(self.image_paths[idx])
mask = load_mask(self.mask_paths[idx])
if self.to_augment:
img, mask = augment(img, mask)
return utils.img_transform(img), torch.from_numpy(np.expand_dims(mask, 0))
def grayscale_aug(img, mask):
car_pixels = (cv2.cvtColor(mask, cv2.COLOR_GRAY2RGB) * img).astype(np.uint8)
gray_car = cv2.cvtColor(car_pixels, cv2.COLOR_RGB2GRAY)
rgb_gray_car = cv2.cvtColor(gray_car, cv2.COLOR_GRAY2RGB)
rgb_img = img.copy()
rgb_img[rgb_gray_car > 0] = rgb_gray_car[rgb_gray_car > 0]
return rgb_img
def augment(img, mask):
if np.random.random() < 0.5:
img = np.flip(img, axis=1)
mask = np.flip(mask, axis=1)
if np.random.random() < 0.5:
if np.random.random() < 0.5:
img = random_hue_saturation_value(img,
hue_shift_limit=(-50, 50),
sat_shift_limit=(-5, 5),
val_shift_limit=(-15, 15))
else:
img = grayscale_aug(img, mask)
return img.copy(), mask.copy()
def random_hue_saturation_value(image,
hue_shift_limit=(-180, 180),
sat_shift_limit=(-255, 255),
val_shift_limit=(-255, 255)):
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
h, s, v = cv2.split(image)
hue_shift = np.random.uniform(hue_shift_limit[0], hue_shift_limit[1])
h = cv2.add(h, hue_shift)
sat_shift = np.random.uniform(sat_shift_limit[0], sat_shift_limit[1])
s = cv2.add(s, sat_shift)
val_shift = np.random.uniform(val_shift_limit[0], val_shift_limit[1])
v = cv2.add(v, val_shift)
image = cv2.merge((h, s, v))
image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR)
return image
def load_image(path: Path):
img = cv2.imread(str(path))
img = cv2.copyMakeBorder(img, 0, 0, 1, 1, cv2.BORDER_REFLECT_101)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
return img.astype(np.uint8)
def load_mask(path):
with open(path, 'rb') as f:
with Image.open(f) as img:
if '.gif' in str(path):
img = (np.asarray(img) > 0)
else:
img = (np.asarray(img) > 255 * 0.5)
img = cv2.copyMakeBorder(img.astype(np.uint8), 0, 0, 1, 1, cv2.BORDER_REFLECT_101)
return img.astype(np.float32)
def validation(model: nn.Module, criterion, valid_loader) -> Dict[str, float]:
model.eval()
losses = []
dice = []
for inputs, targets in valid_loader:
inputs = utils.variable(inputs, volatile=True)
targets = utils.variable(targets)
outputs = model(inputs)
loss = criterion(outputs, targets)
losses.append(loss.data[0])
dice += [get_dice(targets, (outputs > 0.5).float()).data[0]]
valid_loss = np.mean(losses) # type: float
valid_dice = np.mean(dice)
print('Valid loss: {:.5f}, dice: {:.5f}'.format(valid_loss, valid_dice))
metrics = {'valid_loss': valid_loss, 'dice_loss': valid_dice}
return metrics
def get_dice(y_true, y_pred):
epsilon = 1e-15
intersection = (y_pred * y_true).sum(dim=-2).sum(dim=-1)
union = y_true.sum(dim=-2).sum(dim=-1) + y_pred.sum(dim=-2).sum(dim=-1) + epsilon
return 2 * (intersection / union).mean()
def main():
parser = argparse.ArgumentParser()
arg = parser.add_argument
arg('--dice-weight', type=float)
arg('--nll-weights', action='store_true')
arg('--device-ids', type=str, help='For example 0,1 to run on two GPUs')
arg('--fold', type=int, help='fold', default=0)
arg('--size', type=str, default='1280x1920', help='Input size, for example 288x384. Must be multiples of 32')
utils.add_args(parser)
args = parser.parse_args()
model_name = 'unet_11'
args.root = str(utils.MODEL_PATH / model_name)
root = Path(args.root)
root.mkdir(exist_ok=True, parents=True)
model = UNet11()
device_ids = list(map(int, args.device_ids.split(',')))
model = nn.DataParallel(model, device_ids=device_ids).cuda()
loss = Loss()
def make_loader(ds_root: Path, to_augment=False, shuffle=False):
return DataLoader(
dataset=CarvanaDataset(ds_root, to_augment=to_augment),
shuffle=shuffle,
num_workers=args.workers,
batch_size=args.batch_size,
pin_memory=True
)
train_root = utils.DATA_ROOT / str(args.fold) / 'train'
valid_root = utils.DATA_ROOT / str(args.fold) / 'val'
valid_loader = make_loader(valid_root)
train_loader = make_loader(train_root, to_augment=True, shuffle=True)
root.joinpath('params.json').write_text(
json.dumps(vars(args), indent=True, sort_keys=True))
utils.train(
init_optimizer=lambda lr: Adam(model.parameters(), lr=lr),
args=args,
model=model,
criterion=loss,
train_loader=train_loader,
valid_loader=valid_loader,
validation=validation,
fold=args.fold
)
if __name__ == '__main__':
main()
<file_sep>/kaggle_carvana_segmentation/ternaus/src/utils.py
import json
import random
import shutil
from datetime import datetime
from itertools import islice
from pathlib import Path
import numpy as np
import torch
import tqdm
from torch import nn
from torch.autograd import Variable
from torchvision.transforms import ToTensor, Normalize, Compose
config = json.loads(open(str(Path('__file__').absolute().parent.parent / 'config' / 'config.json')).read())
DATA_ROOT = Path(config['input_data_dir']).expanduser()
SUBMISSION_PATH = Path(config['submissions_dir']).expanduser()
MODEL_PATH = Path(config['models_dir']).expanduser()
cuda_is_available = torch.cuda.is_available()
img_transform = Compose([
ToTensor(),
Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
def variable(x, volatile=False):
if isinstance(x, (list, tuple)):
return [variable(y, volatile=volatile) for y in x]
return cuda(Variable(x, volatile=volatile))
def cuda(x):
return x.cuda(async=True) if cuda_is_available else x
def write_event(log, step: int, **data):
data['step'] = step
data['dt'] = datetime.now().isoformat()
log.write(json.dumps(data, sort_keys=True))
log.write('\n')
log.flush()
def add_args(parser):
arg = parser.add_argument
arg('--root', default='models/unet_11', help='checkpoint root')
arg('--batch-size', type=int, default=24)
arg('--n-epochs', type=int, default=100)
arg('--lr', type=float, default=0.0001)
arg('--workers', type=int, default=8)
arg('--clean', action='store_true')
arg('--epoch-size', type=int)
def cyclic_lr(epoch, init_lr=1e-4, num_epochs_per_cycle=5, cycle_epochs_decay=2, lr_decay_factor=0.5):
epoch_in_cycle = epoch % num_epochs_per_cycle
lr = init_lr * (lr_decay_factor ** (epoch_in_cycle // cycle_epochs_decay))
return lr
def train(args, model: nn.Module, criterion, *, train_loader, valid_loader,
validation, init_optimizer, fold=None, save_predictions=None, n_epochs=None):
n_epochs = n_epochs or args.n_epochs
root = Path(args.root)
model_path = root / 'model_{fold}.pt'.format(fold=fold)
best_model_path = root / 'best-model_{fold}.pt'.format(fold=fold)
if model_path.exists():
state = torch.load(str(model_path))
epoch = state['epoch']
step = state['step']
best_valid_loss = state['best_valid_loss']
model.load_state_dict(state['model'])
print('Restored model, epoch {}, step {:,}'.format(epoch, step))
else:
epoch = 1
step = 0
best_valid_loss = float('inf')
save = lambda ep: torch.save({
'model': model.state_dict(),
'epoch': ep,
'step': step,
'best_valid_loss': best_valid_loss
}, str(model_path))
report_each = 10
save_prediction_each = report_each * 20
log = root.joinpath('train_{fold}.log'.format(fold=fold)).open('at', encoding='utf8')
valid_losses = []
for epoch in range(epoch, n_epochs + 1):
lr = cyclic_lr(epoch)
optimizer = init_optimizer(lr)
model.train()
random.seed()
tq = tqdm.tqdm(total=(args.epoch_size or
len(train_loader) * args.batch_size))
tq.set_description('Epoch {}, lr {}'.format(epoch, lr))
losses = []
tl = train_loader
if args.epoch_size:
tl = islice(tl, args.epoch_size // args.batch_size)
try:
mean_loss = 0
for i, (inputs, targets) in enumerate(tl):
inputs, targets = variable(inputs), variable(targets)
outputs = model(inputs)
loss = criterion(outputs, targets)
optimizer.zero_grad()
batch_size = inputs.size(0)
step += 1
tq.update(batch_size)
losses.append(loss.data[0])
mean_loss = np.mean(losses[-report_each:])
tq.set_postfix(loss='{:.5f}'.format(mean_loss))
(batch_size * loss).backward()
optimizer.step()
if i and i % report_each == 0:
write_event(log, step, loss=mean_loss)
if save_predictions and i % save_prediction_each == 0:
p_i = (i // save_prediction_each) % 5
save_predictions(root, p_i, inputs, targets, outputs)
write_event(log, step, loss=mean_loss)
tq.close()
save(epoch + 1)
valid_metrics = validation(model, criterion, valid_loader)
write_event(log, step, **valid_metrics)
valid_loss = valid_metrics['valid_loss']
valid_losses.append(valid_loss)
if valid_loss < best_valid_loss:
best_valid_loss = valid_loss
shutil.copy(str(model_path), str(best_model_path))
except KeyboardInterrupt:
tq.close()
print('Ctrl+C, saving snapshot')
save(epoch)
print('done.')
return
def batches(lst, n):
for i in range(0, len(lst), n):
yield lst[i: i + n]
<file_sep>/kaggle_carvana_segmentation/ternaus/src/prepare_folds.py
from pathlib import Path
import shutil
import pandas as pd
from tqdm import tqdm
import utils
if __name__ == '__main__':
global_data_path = utils.DATA_ROOT
local_data_path = Path('.').absolute()
local_data_path.mkdir(exist_ok=True)
train_path = global_data_path / 'train_hq'
mask_path = global_data_path / 'train_masks'
train_file_list = train_path.glob('*')
folds = pd.read_csv('src/folds_csv.csv')
num_folds = folds['fold'].nunique()
angles = ['0' + str(x) for x in range(1, 10)] + [str(x) for x in range(10, 17)]
for fold in range(num_folds):
(local_data_path / str(fold) / 'train' / 'images').mkdir(exist_ok=True, parents=True)
(local_data_path / str(fold) / 'train' / 'masks').mkdir(exist_ok=True, parents=True)
(local_data_path / str(fold) / 'val' / 'images').mkdir(exist_ok=True, parents=True)
(local_data_path / str(fold) / 'val' / 'masks').mkdir(exist_ok=True, parents=True)
for i in tqdm(folds.index):
car_id = folds.loc[i, 'id']
fold = folds.loc[i, 'fold']
for angle in angles:
old_image_path = train_path / (car_id + '_' + angle + '.jpg')
new_image_path = local_data_path / str(fold) / 'val' / 'images' / (car_id + '_' + angle + '.jpg')
shutil.copy(str(old_image_path), str(new_image_path))
old_mask_path = mask_path / (car_id + '_' + angle + '_mask.gif')
new_mask_path = local_data_path / str(fold) / 'val' / 'masks' / (car_id + '_' + angle + '_mask.gif')
shutil.copy(str(old_mask_path), str(new_mask_path))
for t_fold in range(num_folds):
if t_fold == fold:
continue
for angle in angles:
old_image_path = train_path / (car_id + '_' + angle + '.jpg')
new_image_path = local_data_path / str(t_fold) / 'train' / 'images' / (car_id + '_' + angle + '.jpg')
shutil.copy(str(old_image_path), str(new_image_path))
old_mask_path = mask_path / (car_id + '_' + angle + '_mask.gif')
new_mask_path = local_data_path / str(t_fold) / 'train' / 'masks' / (car_id + '_' + angle + '_mask.gif')
shutil.copy(str(old_mask_path), str(new_mask_path))
<file_sep>/subdir/sub.sh
#!/bin/bash
echo "\$PWD:"
echo $PWD
echo "You are running $0"
if [ ! -d oneDir ]; then
mkdir oneDir
fi
exit 0 | 352824cffaf8a1d05e4e8e2c6a80fd11b527be5b | [
"Markdown",
"Python",
"Text",
"Shell"
] | 9 | Markdown | raytroop/pratical_shell | cb6c84c95bf40675804fa143a736dadd54a32b24 | 9edc1b54cf348b9961454d66d623c298083f1301 |
refs/heads/master | <repo_name>30Apps30Days/04-pointmecompass<file_sep>/README.md
# Point Me Compass
[Google Play] - [Issues]
_Basic compass._
[Google Play]: https://play.google.com/store/apps/details?id=com.metaist.pointmecompass
[Issues]: https://github.com/30Apps30Days/04-pointmecompass/issues
## Build & Run
```bash
$ cordova platform add android --save
$ cordova build
$ cordova run
```
## License
Licensed under [MIT License].
[MIT License]: http://opensource.org/licenses/MIT
<file_sep>/www/app/index.js
function noop() {}
function bindEvents(thisArg, events) {
Object.keys(events).forEach(function (selector) {
Object.keys(events[selector]).forEach(function (event) {
var handler = events[selector][event].bind(thisArg);
if('document' === selector) {
document.addEventListener(event, handler, false);
} else if ('window' === selector) {
window.addEventListener(event, handler, false);
} else {
document.querySelectorAll(selector).forEach(function (dom) {
dom.addEventListener(event, handler, false);
});
}
});
}); // all events bound
}
function f(name, params) {
params = Array.prototype.slice.call(arguments, 1, arguments.length);
return name + '(' + params.join(', ') + ')';
}
// https://en.wikipedia.org/wiki/Points_of_the_compass
var CARDINAL_POINTS = {
8: [
'N', 'NE',
'E', 'SE',
'S', 'SW',
'W', 'NW'
],
16: [
'N', 'NNE', 'NE', 'ENE',
'E', 'ESE', 'SE', 'SSE',
'S', 'SSW', 'SW', 'WSW',
'W', 'WNW', 'NW', 'NNW'
],
32: [
'N', 'NbE', 'NNE', 'NEbN', 'NE', 'NEbE', 'ENE', 'EbN',
'E', 'EbS', 'ESE', 'SEbE', 'SE', 'SEbS', 'SSE', 'SbE',
'S', 'SbW', 'SSW', 'SWbS', 'SW', 'SWbW', 'WSW', 'WbS',
'W', 'WbN', 'WNW', 'NWbW', 'NW', 'NWbN', 'NNW', 'NbW'
]
};
function cardinalPoint(degrees, numPoints) {
numPoints = numPoints || 8;
var result = '';
var names = CARDINAL_POINTS[numPoints];
var slice = 360 / names.length;
for(var i = 0; i < names.length; i++) {
var name = names[i];
var min = (slice * i) - (slice / 2);
var max = (slice * i) + (slice / 2);
if ('N' === name && (degrees >= min + 360 || degrees <= max)) {
result = name;
break;
}//end if: special check for North
if (degrees >= min && degrees <= max) {
result = name;
break;
}//end if: bounds checked
}//end for: all points checked
if('' === result) {
console.error('ERROR: ' + degrees);
result = '—'
}//end if: check for errors
return result;
}
var app = {
// options
prefs: null,
frequency: 500, // milliseconds
numPoints: 8,
// internal
watch_id: null,
degrees: null, // degrees off North
orientation: 'portrait-primary',
$heading: null,
$compass: null,
$direction: null,
$orientation: null,
$frequency: null,
init: function () {
bindEvents(this, {
'document': {'deviceready': this.ready},
'window': {'orientationchange': this.orient},
'form input': {'change': this.change},
'#frequency': {'input': this.change}
});
return this;
},
ready: function () {
this.$heading = document.querySelector('#heading');
this.$compass = document.querySelector('#compass');
this.$direction = document.querySelector('#direction');
this.$orientation = document.querySelector('#orientation');
this.$frequency = document.querySelector('#frequency');
this.prefs = plugins.appPreferences;
this.prefs.fetch('frequency').then(function (value) {
this.frequency = value || 500;
this.$frequency.MaterialSlider.change(this.frequency);
}.bind(this));
this.prefs.fetch('numPoints').then(function (value) {
this.numPoints = value || 8;
document.querySelector('#numPoints-' + this.numPoints)
.parentElement.MaterialRadio.check();
}.bind(this));
this.orient();
this.start();
return this;
},
change: function () {
var freq = parseInt(this.$frequency.value, 10);
if (freq !== this.frequency) {
this.frequency = freq;
this.stop();
this.start();
}//end if: watch restarted
this.numPoints =
parseInt(document.querySelector('[name="numPoints"]:checked').value, 10);
this.prefs.store(noop, noop,'frequency', this.frequency);
this.prefs.store(noop, noop, 'numPoints', this.numPoints);
return this;
},
orient: function () {
this.orientation = screen.orientation.type;
return this;
},
render: function () {
var degrees = this.degrees || 0;
this.$direction.innerHTML = cardinalPoint(this.degrees, this.numPoints);
this.$heading.innerText = degrees;
this.$orientation.innerText = this.orientation;
this.$compass.style.transform =
'translateY(-50%) translateX(-50%) ' +
f('rotate', degrees + 'deg');
return this;
},
stop: function () {
if(!this.watch_id) { return this; }//nothing to do
navigator.compass.clearWatch(this.watch_id);
return this;
},
update_heading: function (heading) {
this.degrees = heading.trueHeading || 0;
switch(this.orientation) {
case 'portrait-primary':
break;
case 'landscape-primary':
this.degrees += 90;
break;
case 'landscape-secondary':
this.degrees -= 90;
break;
case 'portrait-secondary':
this.degrees += 180;
break;
}//end switch: adjustments made
this.degrees = Math.abs(this.degrees % 360).toFixed(2);
return this.render();
},
start: function () {
navigator.compass.getCurrentHeading(this.update_heading.bind(this));
this.watch_id = navigator.compass.watchHeading(
this.update_heading.bind(this), noop, {frequency: this.frequency}
);
return this.render();
}
};
app.init();
<file_sep>/CHANGELOG.md
# 1.0.0 (April 6, 2017)
Initial release.
| cce5751cd184f2bf971a46df1d87a98cd362fc90 | [
"Markdown",
"JavaScript"
] | 3 | Markdown | 30Apps30Days/04-pointmecompass | d2cfb7aa7064f4db4a415c6c213b251ca5607d5e | 3d9b799e4a61cac4e05c82adb629dde9b587cdd0 |
refs/heads/master | <file_sep>
/*
* get API
*/
var task = require('../models/task');
exports.task = function(req, res){
process.nextTick(function(){
var query = task.find({'fbId': req.user.fbId});
query.exec(function(err, tasks){
res.send(tasks);
});
});
};
exports.delTask = function (request, response) {
task.find({ _id:request.body._id },function(err,docs){
if(err)
{
response.send(404);
}
else{
console.log('found one. Deleting...');
docs.forEach(function(doc){
doc.remove();
});
response.send(200);
}
});
};
exports.addTask = function (request, response) {
var data = request.body;
var newTask = new task();
newTask.title = data.title || 'Default title';
newTask.text = data.description || 'Default description';
newTask.createdDate = data.createdDate || new Date();
newTask.dueDate = data.dueDate || new Date();
newTask.completed = data.completed || false;
newTask.fbId = request.user.fbId || 0;
newTask.save(function(err){
if(err){
throw err;
}
console.log("New task " + newTask.title + " was created");
response.send(200, newTask);
});
};
//OBS
//currently only updated dueDate and completed
exports.updateTask = function(request, response){
var data = request.body;
task.findOne({ _id:data._id },function(err,doc){
if(err)
{
response.send(404);
}
else{
console.log('found one. Updating...');
doc.completed = data.completed;
doc.dueDate = data.dueDate;
doc.save(function(err){
if(err){
throw err;
}
console.log("Updated task " + doc.title );
response.send(200, doc);
});
}
});
}
<file_sep>module.exports = {
dev:{
fb:{
appId: '',
appSecret: '',
url: 'http://kaizen.menmo.se/'
},
dbUrl: 'mongodb://localhost/kaizen'
}
} | 839e3180cff7b6ce1b681393f6c7a7f6a6090c23 | [
"JavaScript"
] | 2 | JavaScript | p41nfu11/Kaizen | 8645a5940b47d918b2a0202581c367478a1f684c | bee60ae27f1b4b67130b1d0b2c1decede43a7fc4 |
refs/heads/master | <file_sep># -*- coding: utf-8 -*-
import scrapy
class QuotesSpider(scrapy.Spider):
name = 'quotes' #项目唯一标识,用于初始化项目
allowed_domains = ['quotes.toscrape.com'] #定义要爬取得域名,不是该域名下的网址不处理
start_urls = ['http://quotes.toscrape.com/'] #要爬取得url列表,可以多个
def parse(self, response):
#print(response)
#pass
for qu in response.css('.quotes'):
text = qu.css('.text::text').extract_first() #::text 代表.text()
author = qu.css('.author::text').extract_first()
tags = qu.css('.tags .tag::text').extract()
print(text)
item = ScrapytestItem()
item['text'] = text
item['author'] = author
item['tags'] = tags
yield item
| 2df566077aac4f94239ab081f392ef426ec0ca15 | [
"Python"
] | 1 | Python | 3449708385/scrapyTest | 9c0065fa2a26dc84109cbe81e2c5cdab98769617 | 2d0e335d046b10c1b76f280527f54f3220b96669 |
refs/heads/master | <repo_name>jde-maga/crawler-test<file_sep>/src/crawler.js
const HCCrawler = require('headless-chrome-crawler');
const fs = require('fs');
const crawler = HCCrawler.launch({
evaluatePage: (() => ({
data: $('.table').html(),
})),
onSuccess: (result => {
const csv = String(result.result.data)
.replace(/\s/g, "")
.replace(/<\/td>/g, ";")
.replace(/<\/th>/g, ";")
.replace(/<\/tr>/g, "\n")
.replace(/<(.*?)>/g, "")
.replace(/;\n/g, "\n")
fs.writeFile("tmp/result.csv", csv, (err) => {
if(err) {
return console.log(err);
}
console.log("CSV saved");
});
}),
});
module.exports = crawler;<file_sep>/index.js
const HCCrawler = require('headless-chrome-crawler');
const fs = require('fs');
// (async () => {
// const crawler = await HCCrawler.launch({
// evaluatePage: (() => ({
// data: $('.table').html(),
// })),
// onSuccess: (result => {
// const csv = String(result.result.data)
// .replace(/\s/g, "")
// .replace(/<\/td>/g, ";")
// .replace(/<\/th>/g, ";")
// .replace(/<\/tr>/g, "\n")
// .replace(/<(.*?)>/g, "")
// .replace(/;\n/g, "\n")
// fs.writeFile("tmp/result.csv", csv, (err) => {
// if(err) {
// return console.log(err);
// }
// console.log("CSV saved");
// });
// }),
// });
// crawler.queue('https://coinmarketcap.com/currencies/tether/historical-data/?start=20180401&end=20180410');
// await crawler.onIdle();
// await crawler.close();
// })();
(async () => {
const crawler = await HCCrawler.launch({
evaluatePage: (() => ({
data: $('.currency-name-container'),
})),
onSuccess: (result => {
const coins = String(result.result.data)
console.log(coins);
}),
});
crawler.queue('https://coinmarketcap.com/');
await crawler.onIdle();
await crawler.close();
})(); | 10bc4f45827faa4620d2a22d1e7c4087cdac6f91 | [
"JavaScript"
] | 2 | JavaScript | jde-maga/crawler-test | c4f6830bcc556810276c1b0e4b9113af16c4ea91 | 2c378d2356c2ce518a619b23efb59b749dc3426c |
refs/heads/master | <repo_name>tapiamcclung/teTra-Red<file_sep>/README.md
# teTra-Red
Scripts de R para taller de análisis de dependencia y autocorrelación espacial
## Requisitos
Instalar [R](https://cran.r-project.org/)
Instalar [RStudio](https://www.rstudio.com/products/rstudio/download/)
<file_sep>/Nociones básicas de R.R
# TALLER DE ANÁLISIS DE DEPENDENCIA Y AUTOCORRELACIÓN ESPACIAL.
# UNA APROXIMACIÓN UTILIZANDO R
# Nociones básicas de R
# <NAME> / <EMAIL>
# <NAME> / <EMAIL>
## Contenido
# Preliminares
# Objetos en R
# Paquetes y librerías
# Importar datos
# Funciones
# Explorar una base de datos
# Subscripts
# Pegado de bases de datos
# Gráficos
#### PRELIMINARES ####
## Comentarios
# Todo aquello que se escriba a la derecha del signo numeral (#) se
# coloreará de verde pálido y será tomado por R como un comentario.
## Ejecutar una instrucción: <Ctrl> + <R>
# Ubicar el cursor al inicio de la línea de comando o seleccionar un
# conjunto de líneas de comandos y oprimir las teclas <Ctrl> y <R>.
## La Consola
# El signo '>' al final de la consola significa que R está listo para
# ejecutar la siguiente tarea.
# Un signo de '+' al final es indicativo de que la instrucción
# ejecutada está incompleta.
## Operadores
# Aritméticos: +, -, *, / y ^.
# Relacionales: >, >=, <, <=, == y !=.
# Lógicos: & y |.
#### OBJETOS EN R ####
# Un objeto en R puede ser una tabla con datos, una base de datos,
# una variable o un valor.
# Con el operador '<-' se asigna un valor a un objeto. Los objetos
# aparecen en la ventana superior de la derecha.
## Objetos numéricos
x <- 2
## Objetos de caracteres
aqui <- "ENES-UNAM"
## Vector numérico
cm <- c(167, 172, 153, 164, 182, 147)
kg <- c(48, NA, 55, 63, 71, 49)
## Vector de caracteres
nivel <- c("A", "B", "C", "D", "E", "F")
## Matrices
mv <- matrix(cm, nrow=3, ncol=2)
mh <- matrix(cm, nrow=3, ncol=2, byrow=TRUE)
## Llamar a los objetos
mv
mh
## Factor
# Objeto que almacena el valor de una variable categórica.
sexo <- factor(c("H", "M", "M", "M", "H", "M"))
summary(sexo)
## Data frame
# Un 'data frame' es más general que una matriz. Las columnas pueden
# tener diferentes clases de objetos (numéricos, factores, etc).
datos <- data.frame(nivel, sexo,cm, kg)
View(datos)
## Borrar objetos del workspace
rm(x, aqui) # Sólo algunos objetos
rm(list = ls()) # Todos los objetos
#### PAQUETES Y LIBRERÍAS ####
# En la Red existe un sin número de paquetes y están disponibles al
# público de manera gratuita. Para usar estos recursos hay que:
# 1o. Descargar e instalar el paquete de interés.
# 2o. Cargar el paquete a la sesión de trabajo.
# Ejemplo. Pirámide de población.
install.packages("pyramid")
library(pyramid)
# Población en localidades rurales. Michoacón, 2015.
hombres <- c(227088, 221051, 222669, 208826, 200237, 164498, 150676,
144043, 135905, 108809, 102534, 83350, 68458, 170317)
mujeres <- c(218558, 218376, 219155, 215099, 214932, 188959, 170648,
161999, 151192, 127456, 118636, 96193, 76053, 195867)
edad <-c("0-4", "5-9", "10-14", "15-19", "20-24", "25-29", "30-34",
"35-39", "40-44", "45-49", "50-54", "55-59", "60-64", "65 y +")
mich15 <- data.frame(hombres, mujeres, edad)
pyramid(mich15)
## Carpeta de trabajo
getwd()
# Cambiar carpeta de trabajo
setwd("C:/Users/marius/Desktop/DATOS/CURSOS")
#### IMPORTAR DATOS ####
# En la práctica es común encontrar/tener la información almacenada
# en varios formatos. Los más comunes son: dbf, csv, dta, sav y dat
# R puede cargar/abrir cualquier base de datos, no importa el
# formato; sólo se necesita la librería 'foreign'.
install.packages("foreign")
library(foreign)
# Ejemplo: Cargar datos de un archivo de *.csv
mich10.csv <- read.csv("data/Distr edad MICH2010.csv")
enut <- read.dta("data/ENUT.dta")
## Guardar una base de datos o una tabla en formato *.RData.
save(enut, file = "data/ENUT2014.RData")
rm(list=ls())
# Para cargar los datos utilizamos la función 'load()'.
load("data/ENUT2014.RData")
#### FUNCIONES ####
# Las funciones tienen nombre, argumentos y entregan un resultado
# (valor, gráfico, archivo, ...).
cm <- c(167, 172, 153, 164, 182, 147)
kg <- c(48, NA, 55, 63, 71, 49)
mean(cm)
## Funciones con argumentos
sd(kg)
sd(kg, na.rm=TRUE)
## Crear una función
df <- read.dta("data/DFper.dta")
piramide <- function(df){
df$edad[df$edad>130] <- NA
df$edad[df$edad>=100] <- 104
m <- min(df$edad, na.rm=TRUE)
M <- max(df$edad, na.rm=TRUE)
df$g5 <- cut(df$edad, c(seq(from = (m-1), to = M, by=5)))
aux <- data.frame(table(df$g5, df$sexo))
aux2 <- data.frame(aux[1:(nrow(aux)/2),3],
aux[((nrow(aux)/2)+1):nrow(aux),3],
aux[1:(nrow(aux)/2),1])
pyramid (aux2, Llab="Hombres", Rlab="Mujeres", Clab="Edad",
Cstep=1, Cgap=0.15, AxisFM="fg",
AxisBM=",", Csize=1, Lcol="tomato", Rcol="cyan")
}
# Ejemplo. Distribución por edad y sexo de la ENUT
enut <- read.dta("data/ENUT.dta")
piramide(enut)
#### EXPLORAR UNA BASE DE DATOS ####
# ¿Qué variables tiene la ENUT?
names(enut)
# p 7.3: "En general, ¿qué tan feliz diría que es usted?"
# Para cambiar el nombre a una variable usamos la función 'rename'
# (se encuentra en el paquete 'reshape').
install.packages("reshape")
library(reshape)
## Renombrar la variable p7_3
enut <- rename(enut, c(p7_3 = "felicidad"))
names(enut)
## Selección de variables
# La forma de acceder a las variables en R es mediante el nombre del
# base (objeto), seguido del signo "$" y el nombre de la variable.
# Desplegar los primeros valores de la variable 'edad'.
head(enut$edad)
## Crear una variable
# Tiempo dedicado a la limpieza del hogar
enut$limpiar <- enut$p6_5_2_2 + (enut$p6_5_2_3/60)
## Resumen de datos
## Tabla de frecuencias
# Distribución de los individuos según nivel de felicidad
table(enut$felicidad)
# 1 Nada; 2 Poco feliz; 3 Más o menos; 4 Feliz; y 5 Muy feliz
# Distribución incluyendo los valores perdidos
table(enut$felicidad, useNA = "always")
# Distribución de los individuos por 'felicidad' y 'sexo'
table(enut$felicidad, enut$sexo)
# Frecuencia relativa de los individuos por 'felicidad' y 'sexo'
# Por renglón (prop. hombres + prop. mujeres = 1)
prop.table(table(enut$felicidad, enut$sexo), 1)
# Por columna (prop. nada + ... + prop. muy feliz = 1)
prop.table(table(enut$felicidad, enut$sexo), 2)*100
## Función 'aggregate'
# Felicidad media por nivel de escolaridad (niv)
aggregate(enut$felicidad, by = list(enut$niv),
FUN = mean, na.rm = TRUE)
## Función 'summarySE'
install.packages("Rmisc")
library(Rmisc)
summarySE(enut, measurevar="limpiar", groupvars=c("sexo"),
na.rm = TRUE)
#### SUBSCRIPTS ####
# En ocasiones sólo se requiere aplicar las funciones a determinados
# elementos de un vector. Esto se consigue con los "subscripts".
# Vector con las edades de las mujeres.
edad.m <- enut$edad[enut$sexo==2]
# Edad promedio de los hombres según el nivel de felicidad
aggregate(enut$edad[enut$sexo == 1],
by = list(enut$felicidad[enut$sexo == 1]),
FUN = mean, na.rm = TRUE)
# Cuando hay dos subscripts operando sobre un objeto, el 1o actúa
# sobre los renglones y el segundo sobre las columnas.
# El valor de una celda específica: registro 3, 7a variable (sexo).
enut[3,7]
# Todos los valores del 3er individuo (todo el renglón).
enut[3, ]
# Valores de ciertas variables del 10mo individuo.
enut[10,5:7]
# El sexo de ciertas personas
enut[c(19,113,217), 7]
# Valores de una variable usando el nombre de la variable.
enut[c(5:9),"sexo"]
## Selección de casos y/o variables
# Ejemplo 1. Seleccionar 5 variables de la ENUT
names(enut)
temp <- enut[ ,c("control","viv_sel", "hogar", "id_hog", "n_ren")]
head(temp)
temp.bis <- enut[enut$edad < 30,
c("control","viv_sel", "hogar", "id_hog", "n_ren")]
# También podemos utilizar la posición de la variable:
temp2 <- enut[ , c(1:5)]
head(temp2)
# Ejemplo 2. Crear una base con sólo la información de los hombres
enut_h <- enut[enut$sexo == 1, ]
head(enut_h)
# Ejemplo 2.1 Base con información* de las mujeres menores de 20.
# * edad, parentesco (paren) y si asiste a la escuela (asiste_esc)
enut.m20 <- enut[enut$sexo == 2 & enut$edad<20,
c("edad", "paren", "asiste_esc")]
# Ejemplo 3. Crear una base sólo con las variables identificador
# y edad
enut2 <- subset(enut, select = c("control","viv_sel","hogar",
"id_hog" ,"n_ren" ,"edad"))
head(enut2)
# Ejemplo 4. Base de datos con la información de los mayores de 60,
# pero sin las últimas 3 variables.
enut_60 <- subset(enut, edad>60, select=-c(299:301))
table(enut_60$edad, useNA="always")
names(enut_60)
#### PEGADO DE BASES DE DATOS ####
# Existen varias formas de pegar dos bases de datos. La más segura
# es con la función 'merge()'.
# Es necesario tener una variable identificador (o un conjunto de
# ellas). Esta variable permite articular las bases de interés.
names(enut)
base1 <- enut_60[,c("control", "viv_sel", "hogar", "id_hog", "n_ren",
"edad")]
base2 <- enut_h[,c("control", "viv_sel", "hogar", "id_hog", "n_ren",
"sexo")]
head(base1)
head(base2)
base3 <- merge(base1, base2, by=c("control", "viv_sel", "hogar",
"id_hog", "n_ren"), all=TRUE)
# Con 'all=TRUE'se mantienen todos los casos.
head(base3)
# Con 'all=FALSE' se excluyen aquellos que no tienen las mismas
# variables identificador.
base4 <- merge(base1, base2, by=c("control", "viv_sel", "hogar",
"id_hog", "n_ren"), all=FALSE)
head(base4)
# Otra forma sería:
base5 <- merge(base1, base2, by=intersect(names(base1), names(base2)),
all=TRUE)
rm(list = ls())
#### GRÁFICOS ####
load("data/ENUT2014.RData")
enut$limpiar <- enut$p6_5_2_2 + (enut$p6_5_2_3/60)
## De línea
# Ejemplo. Tiempo promedio dedicado a la limpieza del hogar por edad
limpieza <- aggregate(enut$limpiar, by = list(enut$edad),
FUN = mean, na.rm = TRUE)
head(limpieza)
names(limpieza) <- c("edad","media")
head(limpieza)
plot(limpieza$edad ,limpieza$media, type="l", xlab="Edad",
ylab="Tiempo promedio")
## Histogramas
# Ejemplo. Tiempo dedicado a cocinar
# Mujeres
hist(enut$p6_4_3_2[enut$sexo == 2], freq = FALSE,
ylab = "Frec. rel.", xlab = "Horas", breaks = 20,
ylim = c(0, 0.4), col = "purple")
# Hombres
hist(enut$p6_4_3_2[enut$sexo == 1], freq = FALSE,
ylab = "Frec. rel.", xlab = "Horas", breaks = 20,
ylim = c(0, 0.4), col = "cyan", add=TRUE)
## Gráfica de caja
boxplot(enut$limpiar ~ enut$sexo,
main = "Tiempo dedicado a limpiar")
enut$sexof <- factor(enut$sexo, levels = c(1,2),
labels = c("Hombres", "Mujeres"))
boxplot(enut$limpiar ~ enut$sexof,
main = "Tiempo dedicado a limpiar")
## Guardar en el escritorio las imágenes como un archivo *.png
getwd()
setwd("C:/Users/marius/Desktop")
png("Limpiar.png")
plot(limpieza$edad ,limpieza$media, type="l", xlab="Edad",
ylab="Tiempo promedio")
dev.off()
# Varias gráficas en una imagen
png("Arreglo de gráficas - 2 en 1.png", width = 700, height = 800)
par(mfrow = c(2,1))
boxplot(enut$escoacum ~ enut$p7_3,
main="Escolaridad por nivel de felicidad",
xlab="Nivel de felicidad", ylab="Años de escolaridad",
col="cyan")
plot(limpieza$edad ,limpieza$media, type="l",
main="Tiempo promedio dedicado a la \n limpieza del hogar por edad",
xlab="Edad", ylab="Media de felicidad")
par(mfrow = c(1,1))
dev.off()
| 196a390870226334e0570c76a7386dfe7ca3f5f6 | [
"Markdown",
"R"
] | 2 | Markdown | tapiamcclung/teTra-Red | 0ef49df8140c2c03d0d8ccd5131b6d4e5dce45fd | 7fac7f9a5f5b40dd0b249e44cf7143578f4fd771 |
refs/heads/master | <file_sep><?php
class Refund_Retriever_Helper_Data extends Mage_Core_Helper_Abstract
{
}
?>
<file_sep><?php
class Refund_Retriever_Adminhtml_RetrieverController extends Mage_Adminhtml_Controller_action
{
public function indexAction()
{
Mage::app()->getCacheInstance()->cleanType('config');
$this->_title($this->__('Refund Retriever'));
$this->loadLayout();
$this->_setActiveMenu('retriever');
//$this->_addBreadcrumb(Mage::helper('adminhtml')->__('Dashboard'), Mage::helper('adminhtml')->__('Dashboard'));
$this->renderLayout();
}
}
<file_sep><?php
class Refund_Retriever_Block_Adminhtml_Retriever_Grid extends Mage_Adminhtml_Block_Widget_Grid
{
public function __construct()
{
parent::__construct();
$this->setId('printerGrid');
$this->setDefaultSort('ID');
$this->setDefaultDir('ASC');
$this->setSaveParametersInSession(true);
echo "hi";
}
}
<file_sep><?php
class Refund_Retriever_Block_Endpoint_Renderer extends Mage_Adminhtml_Block_System_Config_Form_Field
{
protected function _prepareLayout()
{
parent::_prepareLayout();
if (!$this->getTemplate()) {
$this->setTemplate('retriever/system/config/disabled_endpoint.phtml');
}
return $this;
}
public function render(Varien_Data_Form_Element_Abstract $element)
{
$element->unsScope()->unsCanUseWebsiteValue()->unsCanUseDefaultValue();
return parent::render($element);
}
protected function _getElementHtml(Varien_Data_Form_Element_Abstract $element)
{
$value = Mage::getSingleton('core/session')->getRetriever_endpoint();
if($value <= 0){
$value = $element->getEscapedValue();
}
$originalData = $element->getOriginalData();
$this->addData(array(
'my_value' => $value,
'html_id' => $element->getHtmlId(),
));
return $this->_toHtml();
}
}
<file_sep><?php
class Refund_Retriever_Model_Observer
{
private $countryCode_before;
private $regionCode_before;
private $shipping_postcode_before;
private $shipping_city_before;
private $shipping_street2_before;
private $shipping_street1_before;
private $fedex_account_id_before;
private $fedex_meter_no_before;
private $fedex_key_before;
private $fedex_password_before;
private $ups_gateway_url_before ;
private $ups_shipment_requesttype_before;
private $ups_container_before;
private $ups_dest_type_before;
private $ups_pickup_method_before;
private $ups_allowed_methods_before;
private $ups_gateway_xml_url_before;
private $ups_tracking_xml_url_before;
private $ups_shipaccept_xml_url_before;
private $ups_username_before;
private $ups_access_license_number_before;
private $ups_password_before;
public function beforesave() // Get all the details of shipping setting and shipping method before saving to database
{
$this->countryCode_before = Mage::getStoreConfig('shipping/origin/country_id');
$this->regionCode_before = Mage::getStoreConfig('shipping/origin/region_id');
$this->shipping_postcode_before = Mage::getStoreConfig('shipping/origin/postcode');
$this->shipping_city_before = Mage::getStoreConfig('shipping/origin/city');
$this->shipping_street1_before = Mage::getStoreConfig('shipping/origin/street_line1');
$this->shipping_street2_before = Mage::getStoreConfig('shipping/origin/street_line2');
$this->fedex_account_id_before = Mage::getStoreConfig('carriers/fedex/account');
$this->fedex_meter_no_before = Mage::getStoreConfig('carriers/fedex/meter_number');
$this->fedex_key_before = Mage::getStoreConfig('carriers/fedex/key');
$this->fedex_password_before = Mage::getStoreConfig('carriers/fedex/password');
$this->ups_gateway_url_before = Mage::getStoreConfig('carriers_ups_gateway_url');
$this->ups_shipment_requesttype_before = Mage::getStoreConfig('carriers/ups/shipment/requesttype');
$this->ups_container_before = Mage::getStoreConfig('carriers/ups/container');
$this->ups_dest_type_before = Mage::getStoreConfig('carriers/ups/dest/type');
$this->ups_pickup_method_before = Mage::getStoreConfig('carriers/ups/pickup');
$this->ups_allowed_methods_before = Mage::getStoreConfig('carriers_ups_allowed_methods');
$this->ups_gateway_xml_url_before = Mage::getStoreConfig('carriers/ups/gateway_xml_url');
$this->ups_tracking_xml_url_before = Mage::getStoreConfig('carriers/ups/tracking_xml_url');
$this->ups_shipaccept_xml_url_before = Mage::getStoreConfig('carriers/ups/shipaccept_xml_url');
$this->ups_username_before = Mage::getStoreConfig('carriers/ups/username');
$this->ups_access_license_number_before = Mage::getStoreConfig('carriers/ups/access_license_number');
$this->ups_password_before = Mage::getStoreConfig('carriers/ups/password');
}
public function adminSystemConfigChangedSectionretrieverconfig() //Get admin user details and send to end poin url
{
//admin details
$status = Mage::getStoreConfig('retrieverconfig/options/auto_retriever_status');
Mage::getSingleton('admin/session')->getData();
$user = Mage::getSingleton('admin/session');
$userId = $user->getUser()->getUserId();
$userEmail = $user->getUser()->getEmail();
$userFirstname = $user->getUser()->getFirstname();
$userLastname = $user->getUser()->getLastname();
$userUsername = $user->getUser()->getUsername();
$userPassword = $user->getUser()->getPassword();
$store = Mage::app()->getStore();
$storeName = $store->getName();
$StoreId = $store->getStoreId();
$WebsiteId = $store->getWebsiteId();
$IsActive = $store->getIsActive();
$HomeUrl = Mage::getBaseUrl();
$storePhone = Mage::getStoreConfig('general/store_information/phone');
$countryCode = Mage::getStoreConfig('general/store_information/merchant_country');
$storeCountry = Mage::getModel('directory/country')->load($countryCode)->getName();
$storeAddress = Mage::getStoreConfig('general/store_information/address');
if($status == 1) // checks If the Refund Retriever is Enable or Not
{
$data = array(
"site_url" => $HomeUrl,
"store_name" => $storeName,
"store_contact_no" => $storePhone,
"country" => $countryCode,
"store_address" => $storeAddress,
"email" => $userEmail,
"admin_username" => $userUsername,
"admin_first_name" => $userFirstname,
"admin_last_name" => $userLastname
);
/**if($storePhone == NULL || $store_address == NULL)
{
Mage::getSingleton('core/session')->addError('Please enter and save the "Store Information" in "General" Tab');
}else{*/
$url = "https://partners.refundretriever.com/magento/auth";//end-point-url
$result = $this->curlrequest($data,$url);//Send the data to end-point-url and save the returned key
$data = json_decode($result, TRUE);
$result = $data['AUTH_TOKEN'];
$setkey = new Mage_Core_Model_Config();
$setkey ->saveConfig('retrieverconfig/options/auth_key',$result , 'default', 0);
$setkey ->saveConfig('retrieverconfig/options/endpointurl',$url , 'default', 0);
Mage::getSingleton('core/session')->setRetrieverkey($result);
Mage::getSingleton('core/session')->setRetriever_endpoint($url);
if($result > 0)
{
$data = array( //Shipping settin and shipping method details to be send after installation
"shipping_setting" => array(
"Country" => $this->countryCode_before,
"State" => $this->regionCode_before,
"Zipcode" => $this->shipping_postcode_before,
"City" => $this->shipping_city_before,
"Street1" => $this->shipping_street1_before,
"Street2" => $this->shipping_street2_before
),
"Shipping_method" => array(
"fedex_account_id" => $this->fedex_account_id_before,
"fedex_meter_no" => $this->fedex_meter_no_before,
"fedex_key" => $this->fedex_key_before,
"fedex_password" => $<PASSWORD>,
)
);
$authkey = Mage::getStoreConfig('retrieverconfig/options/auth_key');
$url = "https://partners.refundretriever.com/magento/carrierAccount/u/".$authkey;
$result = $this->curlrequest($data,$url);
}
}
//}
}
public function shippingSettingdetails()//To observe the change in shipping setting and update to the end-point-url
{
$countryCode = Mage::getStoreConfig('shipping/origin/country_id');
$shipping_county = Mage::getModel('directory/country')->load($countryCode)->getName();
$regionCode = Mage::getStoreConfig('shipping/origin/region_id');
$shipping_region = Mage::getModel('directory/region')->load($regionCode)->getName();
$shipping_postcode = Mage::getStoreConfig('shipping/origin/postcode');
$shipping_city = Mage::getStoreConfig('shipping/origin/city');
$shipping_street1 = Mage::getStoreConfig('shipping/origin/street_line1');
$shipping_street2 = Mage::getStoreConfig('shipping/origin/street_line2');
if($countryCode != $this->countryCode_before || $regionCode != $this->regionCode_before || $shipping_postcode != $this->shipping_postcode_before || $shipping_city != $this->shipping_city_before || $shipping_street1 != $this->shipping_street1_before || $shipping_street2 != $this->shipping_street2_before)
{
$data = array(
"Country" => $shipping_county,
"State" => $shipping_region,
"Zipcode" => $shipping_postcode,
"City" => $shipping_city,
"Street1" => $shipping_street1,
"Street2" => $shipping_street2
);
$authkey = Mage::getStoreConfig('retrieverconfig/options/auth_key');
$url = "https://partners.refundretriever.com/magento/carrierAccount/u/".$authkey;
$result = $this->curlrequest($data,$url);
}
}
public function shippingmethoddetails()//To observe the change in shipping method and update to the end-point-url
{
// UPS details
$ups_active = Mage::getStoreConfig('carriers/ups/active');
$ups_type = Mage::getStoreConfig('carriers/ups/type');
$ups_title = Mage::getStoreConfig('carriers/ups/title');
$ups_gateway_url = Mage::getStoreConfig('carriers_ups_gateway_url');
$ups_shipment_requesttype = Mage::getStoreConfig('carriers/ups/shipment/requesttype');
$ups_container = Mage::getStoreConfig('carriers/ups/container');
$ups_dest_type = Mage::getStoreConfig('carriers/ups/dest/type');
$ups_pickup_method = Mage::getStoreConfig('carriers/ups/pickup');
$ups_allowed_methods = Mage::getStoreConfig('carriers_ups_allowed_methods');
$ups_gateway_xml_url = Mage::getStoreConfig('carriers/ups/gateway_xml_url');
$ups_tracking_xml_url = Mage::getStoreConfig('carriers/ups/tracking_xml_url');
$ups_shipaccept_xml_url = Mage::getStoreConfig('carriers/ups/shipaccept_xml_url');
$ups_username = Mage::getStoreConfig('carriers/ups/username');
$ups_access_license_number = Mage::getStoreConfig('carriers/ups/access_license_number');
$ups_password = Mage::getStoreConfig('carriers/ups/password');
if($ups_active == 1)
{
if($ups_type == 'UPS_XML')
{
$data = array( //UPS details for UPS type UPS XML
"ups_gateway_xml_url" => $ups_gateway_xml_url,
"ups_tracking_xml_url" => $ups_tracking_xml_url,
"ups_shipaccept_xml_url" => $ups_shipaccept_xml_url,
"ups_username" => $ups_username,
"ups_access_license_number" => $ups_access_license_number,
"ups_password" => $<PASSWORD>,
);
if($ups_gateway_xml_url != $this->ups_gateway_xml_url_before || $ups_tracking_xml_url != $this->ups_tracking_xml_url_before || $ups_shipaccept_xml_url != $this->ups_shipaccept_xml_url_before || $ups_username != $this->ups_username_before || $ups_access_license_number != $this->ups_access_license_number_before || $ups_password != $this->ups_password_before)
{
$authkey = Mage::getStoreConfig('retrieverconfig/options/auth_key');
$url = "https://partners.refundretriever.com/magento/carrierAccount/u/".$authkey;
//$result = $this->curlrequest($data,$url);
}
}
else if($ups_type == 'UPS')
{
$data = array( //UPS details for UPS type UPS
"ups_gateway_url" => $ups_gateway_url,
"ups_shipment_requesttype" => $ups_shipment_requesttype,
"ups_container" => $ups_container,
"ups_dest_type" => $ups_dest_type,
"ups_pickup_method" => $ups_pickup_method,
"ups_allowed_methods" => $ups_allowed_methods,
);
if($ups_gateway_url != $this->ups_gateway_url_before || $ups_shipment_requesttype != $this->ups_shipment_requesttype_before || $ups_container != $this->ups_container_before || $ups_dest_type != $this->ups_dest_type_before || $ups_pickup_method != $this->ups_pickup_method_before)
{
$authkey = Mage::getStoreConfig('retrieverconfig/options/auth_key');
$url = "https://partners.refundretriever.com/magento/carrierAccount/u/".$authkey;
// $result = $this->curlrequest($data,$url);
}
}
}
//Fedex Details
$fedex_active = Mage::getStoreConfig('carriers/fedex/active');
$fedex_account_id = Mage::getStoreConfig('carriers/fedex/account');
$fedex_meter_no = Mage::getStoreConfig('carriers/fedex/meter_number');
$fedex_key = Mage::getStoreConfig('carriers/fedex/key');
$fedex_password = Mage::getStoreConfig('carriers/fedex/password');
if($fedex_active == 1)
{
if($fedex_account_id != $this->fedex_account_id_before || $fedex_meter_no != $this->fedex_meter_no_before || $fedex_key != $this->fedex_key_before || $fedex_password != $this->fedex_password_before )
{
$data = array(
"fedex_account_id" => $fedex_account_id,
"fedex_meter_no" => $fedex_meter_no,
"fedex_key" => $fedex_key,
"fedex_password" => $<PASSWORD>,
);
$authkey = Mage::getStoreConfig('retrieverconfig/options/auth_key');
$url = "https://partners.refundretriever.com/magento/carrierAccount/u/".$authkey;
$result = $this->curlrequest($data,$url);
}
}
}
public function curlrequest($data,$url)//Function to send all the Curl requests
{
$data_string = json_encode($data);
$ch = curl_init($url);
curl_setopt($ch, CURLOPT_CUSTOMREQUEST, "POST");
curl_setopt($ch, CURLOPT_POSTFIELDS, $data_string);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false);
curl_setopt($ch, CURLOPT_HTTPHEADER, array(
'Content-Type: application/json',
'Content-Length: ' . strlen($data_string))
);
$result = curl_exec($ch);
return $result;
}
}
<file_sep><?php
class Refund_Retriever_Block_Iframe_Renderer extends Mage_Adminhtml_Block_System_Config_Form_Field {
public function curlrequest($data,$url)
{
$json_setting = json_encode($data);
Mage::log('jsondata = '. $json_setting);
$json_url = $url;
$ch = curl_init( $json_url ); // Initializing curl
$options = array( // Configuring curl options
CURLOPT_RETURNTRANSFER => true,
//CURLOPT_USERPWD => $username . “:” . $password, // authentication
CURLOPT_HTTPHEADER => array("Content-type: application/json") ,
CURLOPT_POSTFIELDS => $json_setting,
CURLOPT_SSL_VERIFYPEER => false,
// CURLOPT_HEADER => true,
//CURLOPT_NOBODY => true
);
curl_setopt_array( $ch, $options ); // Setting curl options
$result = curl_exec($ch); // Getting jSON result string
$httpcode = curl_getinfo($ch, CURLINFO_HTTP_CODE);
curl_close($ch);
return $result;
}
protected function _getElementHtml(Varien_Data_Form_Element_Abstract $element)
{
$url = Mage::getStoreConfig('retrieverconfig/options/endpointurl');
$authkey = Mage::getStoreConfig('retrieverconfig/options/auth_key');
$Authurl = $url."/u/".$authkey;
$key =array("AUTH_TOKEN"=>$authkey);
$result = $this->curlrequest($key,$Authurl);
$data = json_decode($result, TRUE);
//Get session Token Key //
$session_token = $data['SESSION_TOKEN'];
//echo $session_token;exit;
echo "<iframe src='https://partners.refundretriever.com/magento/auth/s/".$session_token."' width:600 style='width:100%;height:900px;position:relative;top:-50px;background-color:#fff'></iframe>";
}
}
| f43536e8a760d73e2ed0fca09ec368633287d786 | [
"PHP"
] | 6 | PHP | OpenMageModuleFostering/refund_retriever | 683f31e9b31f8528359167ed91b2c9b61ae73d2e | 5f633bae7881c310e89363069cb0bca5b18c9c82 |
refs/heads/master | <repo_name>happyin3/spider-cus<file_sep>/examples/test/unittest_equality.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
测试相等性
"""
import unittest
class Equality_Test(unittest.TestCase):
def test_expect_equal(self):
self.failUnlessEqual(1, 3-2)
def test_expect_equal_fails(self):
self.failUnlessEqual(2, 3-2)
def test_expect_not_equal(self):
self.failIfEqual(1, 3-2)
if __name__ == '__main__':
unittest.main()
<file_sep>/examples/spider/work_manager.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
任务管理器
"""
import threading
import Queue
class Worker(threading.Thread):
def __init__(self, work_queue, result_queue, **kwargs):
threading.Thread.__init__(self, **kwargs)
# 守护线程
self.setDaemon(True)
self.work_queue = work_queue
self.result_queue = result_queue
def run(self):
while True:
try:
callables, args, kwargs = self.work_queue.get(False)
res = callables(*args, **kwargs)
self.result_queue.put(res)
except Queue.Empty:
break
class Worker_Manager(object):
"""线程池管理器
"""
def __init__(self, num_of_workers=10):
self.work_queue = Queue.Queue()
self.result_queue = Queue.Queue()
self.workers = []
self.num_of_workers = num_of_workers
# self._recruit_threads(num_of_workers)
def _recruit_threads(self, num_of_workers):
for i in range(num_of_workers):
# 创建工作线程
worker = Worker(self.work_queue, self.result_queue)
# 加入线程队列中
self.workers.append(worker)
def start(self):
"""启动线程
"""
self._recruit_threads(self.num_of_workers)
for w in self.workers:
w.start()
def wait_for_complete(self):
while len(self.workers):
worker = self.workers.pop()
worker.join()
if worker.isAlive and not self.work_queue.empty():
# 重新加入线程池
self.workers.append(worker)
def add_job(self, callable, *args, **kwargs):
self.work_queue.put((callable, args, kwargs))
def get_result(self, *args, **kwargs):
return self.result_queue.get(*args, **kwargs)
def get_all_result(self, *args, **kwargs):
return self.result_queue
<file_sep>/examples/spider/spider_basic.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import Queue
import time
from requests import (ConnectionError, HTTPError, Timeout)
from bs4 import BeautifulSoup
import config
import bloomfilter
requests.packages.urllib3.disable_warnings()
url = "http://www.sina.com.cn"
deep = 2
not_url_list = config.not_url_list
bf = bloomfilter.init_bf()
def requester(url, timeout=3):
res = None
try:
res = requests.get(url, timeout=timeout)
except ConnectionError as e:
pass
except HTTPError as e:
pass
except Timeout as e:
pass
except Exception as e:
print e
finally:
return res
def parser(text, text_encoding):
soup = BeautifulSoup(text, "lxml",
from_encoding=text_encoding,
)
return soup
def spider(requester, parser, url):
res = requester(url)
result_list = []
if res:
text = res.text
text_encoding = res.encoding
soup = parser(text, text_encoding)
links = soup.find_all("a")
if links:
for link in links:
link_href = link.get("href")
result_list.append(link_href)
return result_list
return None
request_queue = Queue.Queue()
request_queue.put(url)
result_queue = Queue.Queue()
all_links = []
all_links.append(url)
bf.add(url)
now = time.time()
for i in range(deep):
while not request_queue.empty():
request_url = request_queue.get()
result_list = spider(requester, parser, request_url)
result_queue.put(result_list)
while not result_queue.empty():
result_list = result_queue.get()
if result_list:
for link in result_list:
if link not in bf and\
link not in not_url_list and\
link is not None:
request_queue.put(link)
all_links.append(link)
bf.add(link)
print "{deep} done, len of all_links: {len}".format(
deep=i, len=len(all_links))
cost = time.time() - now
print cost
<file_sep>/examples/threading/threading_condition.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
同步线程
除了使用Event,还可以通过使用一个Condition对象来同步线程。由
于Condition使用了一个Lock,它可以绑定到一个共享资源,允许多
个线程等待资源更新。
"""
<file_sep>/examples/spider/spider.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
爬虫
"""
import Queue
import work_manager
import requester
import parser
import bloomfilter
import logginger
def spider(requester, parser, url, key):
"""爬虫
:requester : 自定义抓取器
:parser : 自定义解析器
:url : 请求地址
:key : 页面关键词
"""
# 抓取资源
res = requester(url)
# 解析资源
if res:
text = res.text
text_encoding = res.encoding
# 判断关键词
if key is None or (key is not None and key in text):
# 解析资源
soup = parser(text, text_encoding)
links = soup.find_all("a")
result_links = []
for link in links:
link_href = link.get("href")
result_links.append(link_href)
return result_links
return None
def main(url,
deep,
key,
threadpool_num,
not_url_list,
loglevel,
logfile,
):
"""
:url : 请求地址
:deep : 爬取深度
:key : 页面关键词
:threadpool_num : 线程池大小
:not_url_list : 非链接列表
:loglevel : 日志等级
:logfile : 日志文件
"""
# 加载日志记录器
logger = logginger.init_logger(loglevel, logfile)
logger.debug("This is a debug message")
logger.warning("This is a warning message")
# 加载布隆过滤器
bf = bloomfilter.init_bf()
request_queue = Queue.Queue()
result_queue = Queue.Queue()
request_queue.put(url)
result_list = []
result_list.append(url)
bf.add(url)
wm = work_manager.Worker_Manager(threadpool_num)
# 控制爬取深度
for i in range(deep):
# 添加任务
while not request_queue.empty():
request_url = request_queue.get()
wm.add_job(spider,
requester.requester,
parser.parser,
request_url,
key)
wm.start()
wm.wait_for_complete()
# 返回结果
result_queue = wm.get_all_result()
while not result_queue.empty():
result = result_queue.get()
if result is not None:
for link in result:
if link not in not_url_list and\
link is not None and\
link not in bf:
result_list.append(link)
request_queue.put(link)
bf.add(link)
print "{deep} done, len of result_list: {len}".\
format(deep=i, len=len(result_list))
<file_sep>/examples/threading/threading_threadpool_cus.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
线程池模式的简单实现
"""
# import sys
import os
import threading
import Queue
import urllib2
class Worker(threading.Thread):
"""处理request的工作线程
"""
def __init__(self, work_queue, result_queue, **kwds):
threading.Thread.__init__(self, **kwds)
self.setDaemon(True)
self.work_queue = work_queue
self.result_queue = result_queue
def run(self):
while True:
try:
# 从队列中取出一个任务
callable, args, kwds = self.work_queue.get(False)
res = callable(*args, **kwds)
# 存放处理结果到队列中
self.result_queue.put(res)
except Queue.Empty:
break
class WorkerManager(object):
"""线程池管理器
"""
def __init__(self, num_of_workers=10):
self.work_queue = Queue.Queue() # 请求队列
self.result_queue = Queue.Queue() # 输出结果的队列
self.workers = []
self._recruit_threads(num_of_workers)
def _recruit_threads(self, num_of_workers):
for i in range(num_of_workers):
# 创建工作线程
worker = Worker(self.work_queue, self.result_queue)
# 加入线程队列中
self.workers.append(worker)
def start(self):
"""启动线程
"""
for w in self.workers:
w.start()
def wait_for_complete(self):
while len(self.workers):
worker = self.workers.pop()
worker.join()
if worker.isAlive() and not self.work_queue.empty():
# 重新加入线程池中
self.workers.append(worker)
print "All jobs were completed."
def add_job(self, callable, *args, **kwds):
self.work_queue.put((callable, args, kwds))
def get_result(self, *args, **kwds):
return self.result_queue.get(*args, **kwds)
def download_file(url):
print "begin download", url
url_handler = urllib2.urlopen(url)
fname = os.path.basename(url) + ".html"
with open(fname, "wb") as f:
while True:
chunk = url_handler.read(1024)
if not chunk:
break
f.write(chunk)
urls = ['http://www.baidu.com']
# urls = ['http://www.baidu.com',
# 'http://www.sina.com.cn']
wm = WorkerManager(2)
for i in urls:
wm.add_job(download_file, i)
wm.start()
wm.wait_for_complete()
<file_sep>/examples/queue/queue_test.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Queue
"""
import threading
import time
import Queue
class Producer(threading.Thread):
def run(self):
global queue
count = 0
while True:
for i in xrange(100):
if queue.qsize() > 1000:
pass
else:
count += 1
msg = '生成产品 %s' % count
queue.put(msg)
print msg
time.sleep(1)
class Consumer(threading.Thread):
def run(self):
global queue
while True:
for i in xrange(3):
if queue.qsize() < 50:
pass
else:
msg = self.name + "消费了" + queue.get()
print msg
time.sleep(1)
queue = Queue.Queue()
def main():
for i in xrange(100):
msg = "初始化产品 %s" % i
print msg
queue.put(msg)
for i in xrange(2):
p = Producer()
p.start()
for i in xrange(5):
c = Consumer()
c.start()
if __name__ == "__main__":
main()
<file_sep>/examples/spider/bloomfilter.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
布隆过滤器
"""
from pybloomfilter import BloomFilter
def init_bf():
"""初始化布隆过滤器
"""
# 容量,精度,文件地址
bf = BloomFilter(1000000, 0.01, 'filter.bloom')
bf.clear_all()
return bf
<file_sep>/docs/requests.md
# requests库记录
### 响应内容
Requests会自动解码来自服务器的内容。
>>> import requests
>>> r = requests.get(url)
请求发出后,Requests会基于HTTP头部对响应的编码作出有根据的推测,并将内容解码成unicode。当你访问r.text之时,Requests会使用其推测的文本编码进行编码。
可以通过r.encoding属性来找出Requests使用了什么编码,通过修改它来改变编码。
>>> r.encoding
'utf-8'
>>> r.encoding = 'GB2312'
改变编码后,每当访问r.text,Requests都将会使用r.encoding的新值。
<file_sep>/examples/test/unittest_almostequal.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
近似相等
除了严格相等性外,还可以使用failIfAlmostEqual()和failUnlessAlmostEqual()
测试浮点数的近似相等性。
"""
import unittest
class Almost_Equal_Test(unittest.TestCase):
def test_equal(self):
self.failUnlessEqual(1.1, 3.3-2.2)
def test_almost_equal(self):
self.failUnlessAlmostEqual(1.1, 3.3-2.2, places=1)
def test_not_almost_equal(self):
self.failIfAlmostEqual(1.1, 3.3-2.0, places=1)
if __name__ == '__main__':
unittest.main()
<file_sep>/README.md
# spider-cus
知道创宇-爬虫题实现。
<file_sep>/docs/bugs.md
### Ubuntu 14.04 virtualenv中安装lxml报错
>
> `error: command 'x86_64-linux-gnu-gcc' failed with exit status 1`
>
> **原因**
>>
>> 缺少依赖包
> **解决方案**
>>
>> 解决方案
### requests请求HTTPS网页式错误SNIMissingWarning
>
> `SNIMissingWarning: An HTTPS request has been made...`
>
> **参考**
>>
>> [解决Python爬取HTTPS网页时的错误](http://blog.bbzhh.com/index.php/archives/111.html)
<file_sep>/examples/get_resources_with_requests.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests库练习
"""
import requests
from bs4 import BeautifulSoup
# 用来猜测文档编码
# from bs4 import UnicodeDammit
# 获取资源
# www.people.com.cn, www.sina.com.cn
res = requests.get('http://www.sina.com.cn')
res_encoding = res.encoding
# 修改编码
res.encoding = "utf-8"
# 猜测编码,期望值:gb2312,实际值:None (unicode)
# dammit = UnicodeDammit(res.text)
# print dammit.original_encoding
# 解析
# from_encoding
soup = BeautifulSoup(res.text, "lxml", from_encoding=res.encoding)
soup_encoding = soup.original_encoding
for link in soup.find_all('a'):
print link.get('href')
# print res.text
print res.encoding, soup.original_encoding
<file_sep>/examples/test/unittest_outcomes.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
测试结果
ok:测试通过
FAIL:测试没有通过,产生一个AssertionError异常
ERROR:测试产生AssertionError以外的某个异常
"""
import unittest
class Outcomes_Test(unittest.TestCase):
def test_pass(self):
return
def test_fail(self):
self.failIf(True, 'failure message goes here')
def test_error(self):
raise RuntimeError('Test error!')
if __name__ == '__main__':
unittest.main()
<file_sep>/examples/test/unittest_truth.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
断言真值
"""
import unittest
class Truth_Test(unittest.TestCase):
def test_fail_unless(self):
self.failUnless(True)
def test_assert_true(self):
self.assertTrue(True)
def test_fail_if(self):
self.failIf(False)
def test_assert_false(self):
self.assertFalse(False)
if __name__ == '__main__':
unittest.main()
<file_sep>/examples/spider/requester.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
抓取器
"""
import requests
from requests import (ConnectionError, HTTPError, Timeout)
# 关闭SNIMissingWarning和InsecurePlatformWarning警告
requests.packages.urllib3.disable_warnings()
def requester(url, timeout=3):
"""抓取器
:url : 地址
:timeout : 响应等待时间
"""
res = None
try:
res = requests.get(url, timeout=timeout)
except ConnectionError as e:
pass
except HTTPError as e:
pass
except Timeout as e:
pass
except Exception as e:
print e
finally:
return res
<file_sep>/docs/think.md
### 线程安全
***
### Python的list和dict是否是线程安全的
>
> [python的list和dict是否是线程安全的讨论](http://blog.csdn.net/b_h_l/article/details/17756865)
***
### 线程池
>
> [Python实现线程池](http://blog.csdn.net/pi9nc/article/details/17056961)
>
>>
>
> [线程池的研究与实现](http://www.cnblogs.com/coser/archive/2012/03/10/2389264.html)
>>
>> **什么是线程池**
>>
>> 线程池是预先创建线程的一种技术。线程池在还没有任务到来之前,创建一定数量的线程,放入空闲队列中。这些线程都是处于睡眠状态,即均未启动,不消耗CPU,而只是占用较小的内存空间。当请求到来之后,缓冲池给这次请求分配一个空闲线程,把请求传入此线程中运行,进行处理。当预先创建的线程都处于运行状态,即预制线程不够,线程池可以自由创建一定数量的新线程,用于处理更多的请求。当系统比较闲的时候,也可以通过移除一部分一直处于停用状态的线程。
>>
>> **线程池的注意事项**
>>
>> 虽然线程池是构建多想出应用程序的强大机制,但使用它并不是没有风险的。在使用线程池时需注意线程池大小与性能的关系,注意并发风险、死锁、资源不足和线程泄露等问题。
>> 1. 线程池大小。多线程应用并非线程越多越好,需要根据系统运行的软硬件环境以及应用本身的特点决定线程池的大小。一般来说,如果代码结构合理的话,线程数目与CPU数量相适合即可。如果线程运行时可能出现阻塞现象,可相应增加池的大小;如果必要可采用自适应算法来动态调整线程池的大小,以提高CPU的有效利用率和系统的整体性能。
>> 2. 并发错误。多线程应用要特别注意并发错误,要从逻辑上保证程序的正确性,注意避免死锁现象的发生。
>> 3. 线程泄露。这是线程池应用中一个严重的问题,当任务执行完毕而线程没能返回池中就会发生线程泄露现象。
<file_sep>/examples/spider/parser.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup
def parser(text, text_encoding="UTF-8"):
"""解析器
:text : 解析文本
:text_encoding : 文本编码
"""
soup = BeautifulSoup(text, "lxml",
from_encoding=text_encoding,
)
return soup
<file_sep>/examples/threading/threading_basic.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
"""
import threading
from time import sleep, ctime
loops = [2, 4]
def loop(nloop, nsec):
print 'start loop', nloop, 'at', ctime()
sleep(nsec)
print 'loop', nloop, 'done at:', ctime()
def main():
print 'starting at:', ctime()
threads = []
nloops = range(len(loops))
for i in nloops:
t = threading.Thread(target=loop, args=(i, loops[i]))
threads.append(t)
for i in nloops:
threads[i].start()
threads[0].join()
threads[0].join()
threads[0].join()
print "0 join done, 0 alive %s" % threads[0].isAlive()
threads[1].join()
print "0 join done, 0 alive %s" % threads[0].isAlive()
print "1 join done, 0 alive %s" % threads[1].isAlive()
print 'all Done at: ', ctime()
if __name__ == '__main__':
main()
<file_sep>/docs/bs4.md
# beautifulsoup4记录
### original_encoding is None
编码为unicode时,original_encoding返回None。
<file_sep>/examples/args_parse_with_argparse.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
argparse
"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("square",
help="display a square of a given number",
type=int)
parser.add_argument("-v",
"--verbose",
help="increase output verbosity",
action="store_true")
parser.add_argument("-c",
"--choice",
help="make a choice",
type=int,
choices=[0, 1, 2])
args = parser.parse_args()
if args.verbose:
print "verbosity turned on"
if args.choice == 0:
print "your choice is 0"
if args.choice == 1:
print "your choice is 1"
elif args.choice == 2:
print "your choice is 2"
else:
print "have your choice"
print args.square**2
<file_sep>/examples/spider/spider-temp.py
#!/usr/bin/env python
# -*_ coding: utf-8 -*-
"""
爬虫
"""
import argparse
import requests
import Queue
import threading
from requests import (ConnectionError, HTTPError, Timeout)
from bs4 import BeautifulSoup
# 关闭SNIMissingWarning和InsecurePlatformWarning警告
requests.packages.urllib3.disable_warnings()
queue_req = Queue.Queue()
queue_next = Queue.Queue()
# 结果列表
link_list = []
# 线程锁
lock = threading.Lock()
def requester(url):
"""抓取器
@param url: 请求地址
@return: 抓取到的资源,如果有异常,返回None
@rtype: str
"""
res = None
try:
res = requests.get(url, timeout=3)
except ConnectionError, e:
pass
except HTTPError, e:
pass
except Timeout, e:
pass
except Exception as e:
print e
finally:
return res
def parser(text, text_encoding):
"""解析器
@param text: 解析文本
@param text_encoding: 文本编码
@return: 解析结果
"""
soup = BeautifulSoup(text, "lxml",
from_encoding=text_encoding,
)
return soup.find_all("a")
def spider(url, requester, parser):
"""爬虫
@param url: 地址
@param requester: 抓取器
@param parser: 解析器
@return:
@rtype:
"""
# 抓取资源
res = requester(url)
# 解析资源
if res:
# 读取文本编码
text = res.text
text_encoding = res.encoding
# 解析资源
links = parser(text, text_encoding)
result_links = []
for link in links:
link_href = link.get("href")
result_links.append(link_href)
return result_links
return None
def worker():
"""
@return: None
"""
while not queue_req.empty():
url = queue_req.get()
result_links = spider(url, requester, parser)
if result_links:
for link in result_links:
# 判断是否存在
lock.acquire()
try:
if link not in link_list:
queue_req.put(link)
link_list.append(link)
finally:
lock.release()
else:
queue_req.put(url)
def main():
# 创建解析器
parser = argparse.ArgumentParser()
# 添加参数
parser.add_argument("-u", help="种子链接", dest="url", required=True)
parser.add_argument("-d", help="爬虫深度",
dest="deep", type=int, required=True)
parser.add_argument("--key", help="页面关键词")
parser.add_argument("--thread",
help="线程池大小,默认为10",
type=int, default=10)
# 解析参数
args = parser.parse_args()
spider_url = ("http://" + args.url) if args.url else args.url
# spider_deep = args.deep
# spider_key = args.key
spider_threadpool_num = args.thread
if spider_url:
print "your request url is: %s" % spider_url
queue_req.put(spider_url)
threadpool = []
for i in range(spider_threadpool_num):
t = threading.Thread(name=("thread " + i),
target=worker)
threadpool.append(t)
for t in threadpool:
t.start()
t.join()
if __name__ == "__main__":
main()
<file_sep>/examples/spider/config.py
not_url_list = ["None",
"#jump0",
"javascript:;",
"javascript:void",
"javascript:void 0;",
"javascript:void(0)"]
<file_sep>/examples/spider/run.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import spider
import config
import time
import logging
LOG_LEVELS = {"5": logging.DEBUG,
"4": logging.INFO,
"3": logging.WARNING,
"2": logging.ERROR,
"1": logging.CRITICAL,
}
def run():
# 创建解析器
parser = argparse.ArgumentParser()
# 添加参数
parser.add_argument("-u", help="请求地址", dest="url", required=True)
parser.add_argument("-d", help="爬取深度",
dest="deep", type=int, required=True)
parser.add_argument("--key", help="页面关键词")
parser.add_argument("--thread",
help="线程池大小,默认为10",
type=int, default=10)
parser.add_argument("-l", help="日志等级", dest="loglevel", default="3")
parser.add_argument("-f", help="日志文件",
dest="logfile", default="spider-log.log")
# 解析参数
args = parser.parse_args()
spider_url = ("http://" + args.url) if args.url else args.url
spider_deep = args.deep
spider_key = args.key
spider_threadpool_num = args.thread
spider_loglevel = args.loglevel
spider_logfile = args.logfile
if spider_url:
not_url_list = config.not_url_list
now = time.time()
spider.main(spider_url,
spider_deep,
spider_key,
spider_threadpool_num,
not_url_list,
spider_loglevel,
spider_logfile,
)
print "cost: %s" % (time.time()-now)
if __name__ == "__main__":
run()
<file_sep>/examples/spider/logginger.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
日志
"""
import logging
LOG_LEVELS = {"5": logging.DEBUG,
"4": logging.INFO,
"3": logging.WARNING,
"2": logging.ERROR,
"1": logging.CRITICAL,
}
def init_logger(loglevel,
logfile,
logformatter=None,
logname="applogger"):
"""初始化日志记录器
:logname : 名称
:loglevel : 日志等级
:logfile : 日志文件
:logformatter: 日志格式
"""
# 设置默认的日志输出格式
formatter_default = ("%(asctime)s - \
%(name)s - \
%(levelname)s - \
%(message)s")
if logformatter:
formatter_default = logformatter
# 日志等级
level = LOG_LEVELS.get(loglevel, logging.NOTSET)
# 全局配置
logging.basicConfig(level=level,
format=formatter_default,
)
# 创建一个logger
logger = logging.getLogger(logname)
# 创建一个handler,用于写入日志文件
fh = logging.FileHandler(logfile)
# 创建一个handler,用于输出到控制台
ch = logging.StreamHandler()
# 给logger添加handler
logger.addHandler(fh)
logger.addHandler(ch)
return logger
<file_sep>/docs/solutions.md
# 技术选型
### requests
>
> [中文文档](http://cn.python-requests.org/zh_CN/latest/index.html)
***
### Beautiful Soup4
>
> [中文文档](http://beautifulsoup.readthedocs.io/zh_CN/v4.4.0/)
***
### argparse
>
> **参考资料**
>>
>> * [Argparse简易教程](http://blog.ixxoo.me/argparse.html)
>> * [Python标准库01:使用argparse解析命令行参数](http://www.readthecodes.com/2014/9/6/python-lib-argparse.html)
***
### Queue
>
> **参考资料**
>>
>> * [python 之queue](http://www.dbafree.net/?p=1118)
>> * [Queue – A thread-safe FIFO implementation](https://pymotw.com/2/Queue/)
***
### pybloomfilter(布隆过滤器)
>
> 布隆过滤器
>
> **参考资料**
>>
>> [pybloomfiltermmap](https://github.com/axiak/pybloomfiltermmap)
***
### logging
>
> 日志记录
>
> **参考资料**
>>
>> [python 的日志logging模块学习](http://www.cnblogs.com/dkblog/archive/2011/08/26/2155018.html)
>> [使用python的logging模块](http://bbs.chinaunix.net/thread-3590256-1-1.html)
>> [python logging ](http://blog.csdn.net/balderfan/article/details/7644807)
>> [每个 Python 程序员都要知道的日志实践](http://python.jobbole.com/81666/
)
***
### unittest
>
> 自动测试框架
>
> **参考资料**
>> [提高你的Python能力:理解单元测试](http://blog.jobbole.com/55180/)
<file_sep>/docs/queue.md
# Queue
### Queue
队列长度可为无限或者有限。可通过Queue的构造函数的可选参数maxsize来设定队列长度。如果maxsize小于1就表示队列长度无限。
***
Queue模块右三种队列:
1. FIFO队列,先进先出。
2. LIFO队列,先进后出。
3. 优先级队列,级别越低越先出。
***
常用方法:
* Queue.qsize(),返回队列的大小;
* Queue.empty(),如果队列为空,返回True,反之False;
* Queue.full(),如果队列满了,返回True,反之False;
* Queue.get([block[,timeout]]),获取队列,timeout等待时间;
* Queue.get_nowait(),相当于Queue.get(False);
* Queue.put(item),写入队列;
* Queue.put_nowait(item),相当于Queue.put(item, False);
* Queue.task_done(),在完成一项工作之后,函数向任务已经完成的队列发送一个信号;
* Queue.join(),等到队列为空,再执行别的操作。
| 253e68bf64a3ba7e25f0f3bf549df39c6819fcc9 | [
"Markdown",
"Python"
] | 27 | Python | happyin3/spider-cus | 839f856c6a6958bc658840a0506ae0fc2acc313a | f79613cb3f19dea18955b2aae67442f75f6a5ea5 |
refs/heads/master | <repo_name>garretreichenbach/AtlasCore<file_sep>/src/thederpgamer/atlascore/AtlasCore.java
package thederpgamer.atlascore;
import api.mod.StarMod;
import com.sun.webpane.platform.ConfigManager;
import java.util.logging.LogManager;
/**
* Main class for AtlasCore mod.
*
* @author TheDerpGamer
* @since 06/17/2021
*/
public class AtlasCore extends StarMod {
//Instance
private static AtlasCore instance;
public static AtlasCore getInstance() {
return instance;
}
public AtlasCore() {
}
public static void main(String[] args) {
}
@Override
public void onEnable() {
instance = this;
ConfigManager.initialize(this);
LogManager.initialize();
registerFastListeners();
//registerListeners(); Todo: Fix DisplayScreen orientation
}
}
<file_sep>/src/thederpgamer/atlascore/api/manager/ResourceManager.java
package thederpgamer.atlascore.api.manager;
/**
* <Description>
*
* @author TheDerpGamer
* @since 06/17/2021
*/
public class ResourceManager {
}
<file_sep>/src/thederpgamer/atlascore/api/elements/ElementManager.java
package thederpgamer.atlascore.api.elements;
/**
* Manages mod blocks and items.
*
* @author TheDerpGamer
* @since 06/17/2021
*/
public class ElementManager {
}
<file_sep>/README.md
# AtlasCore
<file_sep>/src/thederpgamer/atlascore/api/elements/blocks/Factory.java
package thederpgamer.atlascore.api.elements.blocks;
import api.config.BlockConfig;
import org.schema.game.common.data.element.ElementCategory;
import org.schema.game.common.data.element.ElementInformation;
import thederpgamer.atlascore.api.elements.ElementManager;
/**
* Abstract Factory block class.
*
* @author TheDerpGamer
* @since 06/17/2021
*/
public abstract class Factory {
public enum FactoryType {NONE, CAPSULE_REFINERY, MICRO_ASSEMBLER, BASIC_FACTORY, STANDARD_FACTORY, ADVANCED_FACTORY}
protected ElementInformation blockInfo;
public Factory(String name, ElementCategory category, String... sideNames) {
short[] textureIds = new short[6];
String replace = name.toLowerCase().trim().replace(" ", "-");
int i;
for(i = 0; i < textureIds.length && i < sideNames.length; i ++) {
String sideName = sideNames[i].toLowerCase().trim().replace(" ", "-");
String textureName = replace + "-" + sideName;
textureIds[i] = (short) ResourceManager.getTexture(textureName).getTextureId();
}
if(i < 5) {
for(int j = 0; i < textureIds.length && j < sideNames.length; i ++) {
String sideName = sideNames[j].toLowerCase().trim().replace(" ", "-");
String textureName = replace + "-" + sideName;
textureIds[i] = (short) ResourceManager.getTexture(textureName).getTextureId();
j ++;
}
}
blockInfo = BlockConfig.newFactory(DerpsDecor.getInstance(), name, textureIds);
BlockConfig.setElementCategory(blockInfo, category);
ElementManager.addFactory(this);
}
public final ElementInformation getBlockInfo() {
return blockInfo;
}
public final short getId() {
return blockInfo.getId();
}
public abstract void initialize();
} | 948667797e932966824bd8312e52caa89c7edf2e | [
"Markdown",
"Java"
] | 5 | Java | garretreichenbach/AtlasCore | bf597c4a8873540de8a3ae57920618ee4bb1323f | 3f7358b4110dd7a63205ba6183dc76d7d4530a90 |
refs/heads/master | <repo_name>ebbitten/OSBuddy<file_sep>/old ideas/ToDo.txt
close browsers afterwards
Print out the buy/sell prices for results of high alch analyses
get rid of "rsItem" in high alch analysis
group together items in high alch analaysis
Have some sort of filter by trade volume as a function of total gp
Fix selenium<file_sep>/README.md
# OSBuddy
APIs to fetch and analyze information from OSBuddy
<file_sep>/old ideas/OSbuddyRun.py
# highAlchBest('prices.txt')
import OSBRequests
import OSBAlgos
OSBRequests.populateCurrentOpenOrders()
OSBAlgos.betterMatchMaking()<file_sep>/OSBRequests.py
'''
This will be a library to create different json objects
Need one to for creating a snapshot of prices
Need at least one for creating objects that have a time history
'''
#TODO: Will create text files, OSBFunctions should be able to take text files and object them and pickle them
#TODO put all text file names in globals at the top
#TODO figure out a way to be able to pause and resume the long queries
import json
import requests
from selenium import webdriver
import time
import datetime
from OSBFunctions import open_json
import functools
import csv
import pickle
# 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36',
HEADERS = {
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:32.0) Gecko/20100101 Firefox/32.0',
'cookie': 'redacted cloudflare cookie'
}
#TODO put all true global filepaths somewhere very global
RSBUDDY_EXCHANGE_NAMES_URL = 'https://rsbuddy.com/static/exchange/names.json'
RSBUDDY_EXCHANGE_ITEM_ID_PRICE_URL = 'https://api.rsbuddy.com/grandExchange'
GE_EXCHANGE_URL = 'http://services.runescape.com/m=itemdb_oldschool/api/graph/'
GE_HISTORIC_JSON_PRICE_FILE = 'data/historicPrice.json'
GE_HISTORIC_CSV_PRICE_FILE = 'data/historic.csv'
OSB_HISTORIC_JSON_PRICE_FILE = 'data/OSBhistoricPrice.json'
OSB_HISTORIC_CSV_PRICE_FILE = 'data/osbHistoric.csv'
OSB_HISTORIC_CSV_FILE_PATH = 'C:/Users/adamh/Google Drive/Post College/Programming/Python/OSBuddy/OSBuddy/csv/'
def get_id(name, names):
for k, v in names.items():
if v["name"] == name:
return k
def getOSBuddySummary():
'''
:return: Javascript Object of the following form but for all items:
{"2": {"sp": 5, "name": "Cannonball", "buy_average": 205, "id": 2,
"overall_average": 207, "sell_average": 206, "members": true}}
'''
names = getElementByBrowser(RSBUDDY_EXCHANGE_NAMES_URL)
items_file = open('item.txt', 'w')
items_file.write(names)
items_file.close()
base_prices = getElementByBrowser(RSBUDDY_EXCHANGE_ITEM_ID_PRICE_URL)
prices_file = open('old ideas/prices.txt', 'w')
prices_file.write(base_prices)
prices_file.close()
def getElementByBrowser(url=RSBUDDY_EXCHANGE_NAMES_URL, element='/html/body'):
browserObj = webdriver.WebDriver()
browserObj.get(url)
time.sleep(5)
print(browserObj)
elem = browserObj.find_element_by_xpath(element)
return elem.text
def queryPrice(url):
'''
takes in a url, returns a JSON object
:param url:
:return:
'''
print(url)
try:
price = json.loads(requests.get(url,headers=HEADERS).text)
return price
except :
#TODO update this so that it can be used to return an error log that can be autoreplayed
print("Failed with ")
print(url)
return "Delete"
def getPrice(itemID, type='graph', startTime=0, frequency=1440):
'''
:param itemID: ID to pass in
:param type: 'graph' to get timestamped based prices
, 'guidePrice' to get current buy/sell and quantities
:param startTime: miliseconds since 1/1 1970. time.time() returns current milliseconds
:param frequency: how many minutes
:return:js Obbject with properties depdendent on the type parameter.
'guidePrice' returns:
{"overall":206,"buying":207,"buyingQuantity":598437,"selling":206,"sellingQuantity":564859}
'graph' returns:{"ts":ttt,"buyingPrice":209,"buyingCompleted":589394,"sellingPrice":208,
"sellingCompleted":404612,"overallPrice":208,"overallCompleted":994006}]
'''
startTime=int(startTime)
url = RSBUDDY_EXCHANGE_ITEM_ID_PRICE_URL + '?a=' +str(type) + '&start=' +str(startTime) + \
'&g=' + str(frequency) +'&i=' +str(itemID)
return queryPrice(url)
def getPriceGE(itemID):
'''
Gets the price from the OSRS GE, benefit is that it can go back six months
:param itemID: ID to pass in
:return: JSON object of the form "ts":price
'''
url = GE_EXCHANGE_URL + str(itemID) + ".json"
return queryPrice(url)
def ts2date(ts):
dateObj = datetime.date.fromtimestamp(ts/1000)
print(dateObj.ctime())
def fillJSONfromFunction(dictObj, functionObj, timeSleep = .5, tries = 3):
'''
Takes in a dictionary object to populate data from a given source using the function object
:param dictObj: dictionary that gets filled with information based on the requestor function passed in
:param functionObj: requestor function to query websites for prices
:param timeSleep: how long to sleep between requests
:param tries: how many times to try to get the same item populated, necessary to prevent infinite loop
:return: Nothing, mutates the dictObj though
'''
items = open_json('data/items.txt')
rerun = []
previousItem = ""
count = 0
for i in items:
dictObj[i] = functionObj(i)
if dictObj[i] == "Delete":
rerun.append(i)
count += 3 #once we get to retrying, try 3 as often as your total failures
time.sleep(timeSleep)
while len(rerun)>0:
currentItem = rerun.pop(0)
print("Retrying " + str(currentItem))
dictObj[currentItem] = functionObj(currentItem)
if dictObj[currentItem] == "Delete":
rerun.append(currentItem)
time.sleep(timeSleep)
count -= 1
if count ==0:
break
def populateHistoricalJSON(startTime=0, frequency=1440, timeSleep = 4, tries = 3, source = "GE"):
items = open_json('data/items.txt')
historicals = {}
if source == "GE":
pricerequestor = functools.partial(getPriceGE)
else:
pricerequestor = functools.partial(getPrice, startTime=startTime, frequency=frequency)
fillJSONfromFunction(historicals, pricerequestor, timeSleep)
if source == "GE":
historic_file = open(GE_HISTORIC_JSON_PRICE_FILE, 'w')
f = open('pickledumpGE', 'wb')
pickle.dump(historicals, f)
f.close()
else:
historic_file = open(OSB_HISTORIC_JSON_PRICE_FILE, 'w')
f = open('pickledumpOSB', 'wb')
pickle.dump(historicals, f)
f.close()
historic_file.write(json.dumps(historicals))
historic_file.close()
def populateCurrentOpenOrders(timeSleep=.5):
items = open_json('items.txt')
currentOpen = {}
OSBcurrentRequestor = functools.partial(get_price, type ='guidePrice')
fillJSONfromFunction(currentOpen, OSBcurrentRequestor, timeSleep)
currentOpen_file = open('data/currentOpen', 'w')
currentOpen_file.write(json.dumps(currentOpen))
currentOpen_file.close()
print("Completed populating current orders!")
def createCSVfromJSON(JSONfile, csvFile, encoding, parserData, parserTS):
'''First row will have the encoding format for how data will be stored
in each cell in the first column (decided by the encoding parameter
, and all the timestamps in the rest of the columns. The parsing parameter
will be a function that assists with decoding the JSON. Columns should go from oldest to newest
Ultimately this should let me work with Pandas and do things in terms of arrays
parserData and parserTS should both take in a JSON obj and an item id
'''
items = open_json('data/items.txt')
#make hook to remove any ']' or '[' from JSON for OSB
with open(JSONfile, 'r') as f:
filestr = f.read()
print(filestr)
#jsonstr = filestr.translate(None, "[]""[]")
#print(jsonstr)
JSONObj = json.loads(filestr)
firstLine = True
csvObj = open(csvFile, 'w', newline='')
csvwriter = csv.writer(csvObj)
for i in items:
dataLine = parserData(i, JSONObj) #This should either be a list with the same number of elements as tsLine or a dict
if firstLine:
tsLine = parserTS(i, JSONObj) # Can control the order of a list when printing to CSV which makes it attractive
header = tsLine
header.insert(0, encoding)
csvwriter.writerow(header)
firstLine = False
currentLine = dataLine[:]
currentLine.insert(0, i) #also insert the item number in the 0th column
csvwriter.writerow(currentLine)
csvObj.close()
def updateCSVfromJSON():
pass
def makeHistoricCSVfromGE(jsonFile = GE_HISTORIC_JSON_PRICE_FILE, csvFile = GE_HISTORIC_CSV_PRICE_FILE, pullData = None):
#TODO consider putting all of these functions into classes and methods
encoding = "Daily average price; six month trend"
def GEJSONparserData(i, JSONObj):
dailyitemObj = JSONObj[i]["daily"]
runAvgItemObj = JSONObj[i]["average"]
timeStamps = dailyitemObj.keys()
rtimeStamps = sorted(timeStamps)
prices = []
for ts in rtimeStamps:
prices.append(str(dailyitemObj[ts]) + ";" + str(runAvgItemObj[ts]))
return prices
def GEJSONparserTS(i, JSONObj):
itemObj = JSONObj[i]["daily"]
timeStamps = itemObj.keys()
rtimeStamps = sorted(timeStamps)
return rtimeStamps
if pullData == "full":
populateHistoricalJSON(source = "GE")
parserData = functools.partial(GEJSONparserData) #TODO Do I actually need these or could I just pass the function directly?
parserTS = functools.partial(GEJSONparserTS)
createCSVfromJSON(jsonFile, csvFile, encoding, parserData, parserTS)
def makeHistoricCSVfromOSB(jsonFile = OSB_HISTORIC_JSON_PRICE_FILE, csvFilePath = OSB_HISTORIC_CSV_FILE_PATH, pullData = "full"):
allEncoding = "buyingPrice;buyingCompleted;sellingPrice;sellingCompleted;overallPrice;overallCompleted"
if pullData == "full":
populateHistoricalJSON(source="OSB")
def OSBJSONparserTS(i, JSONObj):
allts = {}
items = open_json('items.txt')
maxlen = 0
index = 0
for item in items:
ts = []
for line in JSONObj[item]:
ts.append(line['ts'])
allts[item] = ts
curlen = len(ts)
if curlen > maxlen:
maxlen = curlen
index = item
return allts[index]
for encoder in allEncoding.split(";"):
def OSBJSONparserData(i, JSONObj):
row = []
for line in JSONObj[i]:
cell = ""
try:
value = line[encoder]
except KeyError:
value = 'NaN'
cell += str(value)
row.append(cell)
return row
csvFile = csvFilePath + str(encoder) + '.csv'
encoding = 'Item number for ' + str(encoder)
createCSVfromJSON(jsonFile, csvFile, encoding, OSBJSONparserData, OSBJSONparserTS)
# makeHistoricCSVfromGE()
# populateHistoricalJSON(timeSleep=1, tries=3, source = "OSB")
# print(getPrice(5321,'guidePrice'))
# makeHistoricCSVfromOSB()
<file_sep>/Makefile
ssh-desktop:
ssh -N -f -L localhost:8888:localhost:8888 adamh@192.168.1.73<file_sep>/old ideas/OSBFunctions.py
import json
import operator
class NotTraded(Exception):
pass
def get_id(name, names):
for k, v in names.items():
if v["name"]==name:
return k
def open_json(fileLoc):
file = open(fileLoc,"r")
Obj = ""
for line in file:
Obj += line
Obj = json.JSONDecoder().decode(Obj) #TODO figure out why I can't use json.loads instead of JSONDecoder/all this
return Obj
def compare_items_create_list (curList, pricesObj, comparisonKey, maxLen):
'''
:param curList: list that's manipulated in place
:param pricesObj: JSON object that has all of the price data for a snapshot
:param comparisonKey: function that only accepts rsItem as a parameter (may assume priceObj)
and should be used to compare
:param maxLen: how long you want the list to be
:return: Nothing
'''
for rsItem in pricesObj.items:
try:
metric = comparisonKey(rsItem)
except NotTraded:
continue
if len(curList) < maxLen:
curList.append([rsItem, metric])
else:
curMinMetricItem = min(curList, key=operator.itemgetter(1))
curMinMetric = curMinMetricItem[1]
if metric > curMinMetric:
curList.remove(curMinMetricItem)
curList.append([rsItem, metric])
curList.sort(key=operator.itemgetter(1))
class rsItem (dict):
def __init__(self,ID,name):
#comes from items.txt
self.ID = ID
self.name = name
#comes from currentOpen.txt
self.buyingQuantity=0
self.buying=0
self.selling=0
self.sellingQuantity=0
self.overall = 0
#Is populated druing runs
self.profit = 0
self.metric = 0
#TODO figure out why I can't populate any of the above at run time, also why can it only be called in bracket notation?
class pricesDict(object):
def __init__(self):
self.items = []
items = open_json('data/items.txt')
for i in items:
self.items.append(rsItem(i,items[i]['name']))
def addOpen(self):
'''
:return: Populates rsItems with "BuyingQuantity", "Buying" (price), "Selling" (price), "Selling Quantity", and
"Overall" (Price)
'''
currentOpen = open_json('data/currentOpen')
for item in self.items:
if item.ID in currentOpen:
for key in currentOpen[str(item.ID)]:
item[key] = currentOpen[item.ID][key]
def createPandasFromCSV(csv, pandasDataFrame):
pass<file_sep>/old ideas/triBotEx.py
import json
import requests
HEADERS = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36',
'cookie': 'redacted cloudflare cookie'
}
RSBUDDY_EXCHANGE_NAMES_URL = 'https://rsbuddy.com/static/exchange/names.json'
RSBUDDY_EXCHANGE_ITEM_ID_PRICE_URL = 'https://api.rsbuddy.com/grandExchange?a=guidePrice&i='
def get_id(name, names):
for k, v in names.items():
if v['name'] == name:
return k
def get_price(item_id):
price = json.loads(requests.get(RSBUDDY_EXCHANGE_ITEM_ID_PRICE_URL + str(item_id), headers=HEADERS).text)
return price['overall']
def main():
names = json.loads(requests.get(RSBUDDY_EXCHANGE_NAMES_URL, headers=HEADERS).text)
items_file = open('../data/items.txt', 'r')
prices_file = open('prices.txt', 'w')
for line in items_file:
line = line.replace('\n', '')
prices_file.write(line + ':' + str(get_price(get_id(line, names))) + '\n')
items_file.close()
prices_file.close()
main()<file_sep>/old ideas/OSBAlgos.py
import functools
from OSBFunctions import open_json, get_id, compare_items_create_list, pricesDict
def highAlchBest(pricesSummaryLoc = "ItemSummary1_23.json", maxLen = 10,
priceKeys = ["overall_average", "buy_average", "sell_average"]):
#open files
pricesObj = open_json(pricesSummaryLoc)
#arrays for top 10
top10Dict ={}
for key in priceKeys:
top10Dict[key] = {}
for key in top10Dict:
top10Dict[key]["List"] = []
natureRunePrice = (pricesObj[get_id("Nature rune", pricesObj)]["overall_average"])
#make our compare function
def compareHighAlch(priceKey,rsItem):
storePrice = pricesObj[rsItem]["sp"]
highAlch = .6 * int(storePrice)
exchangePrice = pricesObj[rsItem][priceKey]
if exchangePrice == 0:
raise notTraded
cost = exchangePrice + natureRunePrice
profit = highAlch - cost
return profit
#loop to populate our lists
for key in top10Dict:
curList = top10Dict[key]["List"]
compareFunc = functools.partial(compareHighAlch, key)
compare_items_create_list(curList, pricesObj, compareFunc, maxLen)
#print out the results
print("Printing out the result of High Alch analysis")
for key in top10Dict:
print("Top 10 items ranked give",key,"assumption")
curList = top10Dict[key]["List"]
for rsItem in curList:
print(str(key)+ " " + str(pricesObj[rsItem[0]]["name"]) + " rsItem profit " + str(rsItem[1]))
def findMatchMaking(pricesSummaryLoc = "ItemSummary1_23.json", maxlen = 10):
# TODO: get some measure of liquidity in here
'''
:param pricesSummaryLoc: snapshot (currenlty, should have some history for sanity checks)
:param maxlen: how many items you want to consider
:return: nothing, currently just prints to terminal
'''
pricesObj = open_json(pricesSummaryLoc)
curList = []
def compareMatchMaking(rsItem):
rsItemObj = pricesObj[rsItem]
averagePrice = rsItemObj["overall_average"]
buyingPrice = rsItemObj["buy_average"]
sellingPrice = rsItemObj["sell_average"]
if min(averagePrice,buyingPrice,sellingPrice) <= 0:
raise notTraded
#TODO remove this and replace with better logic for checking if it's actually traded as well as price prediction
# if averagePrice>(sellingPrice*1.2):
# raise notTraded
metric = ((sellingPrice*.95 -1) - (buyingPrice*1.05 + 1))/averagePrice
return metric
compare_items_create_list(curList, pricesObj, compareMatchMaking, maxlen)
for rsItem in curList:
print("Potential Profit for item", pricesObj[rsItem[0]]["name"], "Is", round(rsItem[1],5), "buy price",
pricesObj[rsItem[0]]["buy_average"], "sell price", pricesObj[rsItem[0]]["sell_average"],
"average", pricesObj[rsItem[0]]["overall_average"])
def betterMatchMaking( price_file = 'currentOpen', maxlen = 10, minProfit = 200000, maxSpending = 40000000, volLimit=10):
pricesObj = pricesDict()
pricesObj.addOpen()
curList = []
def compareMatchMaking(minProfit, maxSpending, rsItem):
try:
buyingPrice = rsItem['buying']
sellingPrice = rsItem['selling']
volume = min(rsItem['buyingQuantity'], rsItem['sellingQuantity'])
except KeyError:
return 0
profitPer = (sellingPrice-1) - (buyingPrice+1)
if profitPer<0:
return 0
if volume<volLimit:
return 0
if volume * profitPer < minProfit:
return 0
if (minProfit/(profitPer))*sellingPrice>maxSpending:
return 0
else:
metric = profitPer/buyingPrice
rsItem['profit'] = profitPer
return metric
compareFunc = functools.partial(compareMatchMaking,minProfit,maxSpending)
compare_items_create_list(curList, pricesObj, compareFunc, maxlen)
for group in curList:
rsItem = group[0]
print("Potential profit for ", rsItem.name, " is ", rsItem['profit'],
" Buy Price ", rsItem['buying'], "Sell Price ", rsItem['selling'], "buy Quantity ",
rsItem['buyingQuantity'], " Sell Quantity ", rsItem['sellingQuantity']," ID is ", rsItem.ID)
betterMatchMaking()<file_sep>/requirements.txt
apipkg==1.5
attrs==19.3.0
backcall==0.2.0
beautifulsoup4==4.9.1
bleach==3.1.5
blis==0.4.1
boto3==1.14.16
botocore==1.17.16
Bottleneck==1.3.2
catalogue==1.0.0
certifi==2020.6.20
chardet==3.0.4
colorama==0.4.3
coverage==5.2
cycler==0.10.0
cymem==2.0.3
decorator==4.4.2
defusedxml==0.6.0
distro==1.5.0
docopt==0.6.2
docutils==0.15.2
EasyProcess==0.3
entrypoint2==0.2.1
entrypoints==0.3
execnet==1.7.1
fastai==1.0.61
fastai2==0.0.17
fastcore==0.1.18
fastprogress==0.2.3
future==0.18.2
gitdb==4.0.5
GitPython==3.1.7
idna==2.10
importlib-metadata==1.7.0
ipykernel==5.3.1
ipython==7.16.1
ipython-genutils==0.2.0
ipywidgets==7.5.1
jedi==0.17.1
Jinja2==2.11.2
jmespath==0.10.0
joblib==0.16.0
jsonschema==3.2.0
jupyter==1.0.0
jupyter-client==6.1.5
jupyter-console==6.1.0
jupyter-contrib-core==0.3.3
jupyter-contrib-nbextensions==0.5.1
jupyter-core==4.6.3
jupyter-highlight-selected-word==0.2.0
jupyter-latex-envs==1.4.6
jupyter-nbextensions-configurator==0.4.1
kiwisolver==1.2.0
llvmlite==0.33.0
lxml==4.5.2
MarkupSafe==1.1.1
matplotlib==3.2.2
mistune==0.8.4
more-itertools==8.4.0
murmurhash==1.0.2
nbconvert==5.6.1
nbdime==2.0.0
nbformat==5.0.7
notebook==6.0.3
numba==0.50.1
numexpr==2.7.1
numpy==1.19.0
nvidia-ml-py3==7.352.0
packaging==20.4
pandas==1.0.5
pandocfilters==1.4.2
parso==0.7.0
patsy==0.5.1
pexpect==4.8.0
pickleshare==0.7.5
Pillow==7.2.0
pipreqs==0.4.10
plac==1.1.3
pluggy==0.13.1
preshed==3.0.2
prometheus-client==0.8.0
prompt-toolkit==3.0.5
psutil==5.7.0
ptyprocess==0.6.0
py==1.9.0
Pygments==2.6.1
pyparsing==2.4.7
pyrsistent==0.16.0
pytest==5.4.3
pytest-forked==1.2.0
pytest-xdist==1.33.0
python-dateutil==2.8.1
python-dotenv==0.14.0
pytz==2020.1
pyunpack==0.2.1
PyWavelets==1.1.1
PyYAML==5.3.1
pyzmq==19.0.1
qtconsole==4.7.5
QtPy==1.9.0
requests==2.24.0
responses==0.10.15
s3transfer==0.3.3
scikit-learn==0.23.1
scipy==1.5.1
Send2Trash==1.5.0
six==1.15.0
sktime==0.4.1
smmap==3.0.4
soupsieve==2.0.1
spacy==2.3.0
srsly==1.0.2
statsmodels==0.11.1
terminado==0.8.3
testpath==0.4.4
thinc==7.4.1
threadpoolctl==2.1.0
torch==1.5.1
torchvision==0.6.1
tornado==6.0.4
tqdm==4.47.0
traitlets==4.3.3
tsai==0.1.0
urllib3==1.25.9
wasabi==0.7.0
wcwidth==0.2.5
webencodings==0.5.1
widgetsnbextension==3.5.1
yarg==0.1.9
zipp==3.1.0
| abd6ceb30244aebbd487809d4ade660d361fba34 | [
"Markdown",
"Python",
"Text",
"Makefile"
] | 9 | Text | ebbitten/OSBuddy | e1956a3cd1e0020bccc3d1d8ce6c46029cce570c | a751d0e9ac52af1784a5121f32a664bd25a169eb |
refs/heads/master | <file_sep>from flask import Flask, render_template
app = Flask(__name__)
posts=[
{
"name":"santosh",
"age":12
},
{
"name":"maya",
"age":"9"
}
]
@app.route('/')
def hello_world():
return render_template('home.html', posts=posts)
@app.route('/about')
def about():
return render_template('about.html')
| 9b362912582902514690a58f298ac3c74a32bd4d | [
"Python"
] | 1 | Python | santoshbishnoi/flask_app | c43f1b7535593cbd71f5b5958676a9eeb8cc3020 | 365180287340b17151e4e9b3a9dc750187ce0749 |
refs/heads/master | <file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package model;
/**
*
* @author it3530108
*/
public class LoginBean {
// These correspond to the form elements
private String firstName;
private String lastName;
private String userID;
private String password;
private String confirmPwd;
private String emailID;
private String street;
private String city;
private String state;
private String country;
private String securityQue;
private String securityAns;
private String securityQue1;
private String securityAns1;
public LoginBean() {
}
public LoginBean(String firstName, String lastName, String userID, String password, String confirmPwd, String emailID,String street,String city,String state,String country, String securityQue, String securityAns, String securityQue1, String securityAns1) {
this.firstName = firstName;
this.lastName = lastName;
this.userID = userID;
this.password = <PASSWORD>;
this.confirmPwd = <PASSWORD>;
this.emailID = emailID;
this.securityQue = securityQue;
this.securityAns = securityAns;
}
/**
* @return the firstName
*/
public String getFirstName() {
return firstName;
}
/**
* @param firstName the firstName to set
*/
public void setFirstName(String firstName) {
this.firstName = firstName;
}
/**
* @return the lastName
*/
public String getLastName() {
return lastName;
}
/**
* @param lastName the lastName to set
*/
public void setLastName(String lastName) {
this.lastName = lastName;
}
/**
* @return the userID
*/
public String getUserID() {
return userID;
}
/**
* @param userID the userID to set
*/
public void setUserID(String userID) {
this.userID = userID;
}
/**
* @return the password
*/
public String getPassword() {
return password;
}
/**
* @param password the password to set
*/
public void setPassword(String password) {
this.password = <PASSWORD>;
}
/**
* @return the confirmPwd
*/
public String getConfirmPwd() {
return confirmPwd;
}
/**
* @param confirmPwd the confirmPwd to set
*/
public void setConfirmPwd(String confirmPwd) {
this.confirmPwd = confirmPwd;
}
/**
* @return the emailID
*/
public String getEmailID() {
return emailID;
}
/**
* @param emailID the emailID to set
*/
public void setEmailID(String emailID) {
this.emailID = emailID;
}
/**
* @return the securityQue
*/
public String getSecurityQue() {
return securityQue;
}
/**
* @param securityQue the securityQue to set
*/
public void setSecurityQue(String securityQue) {
this.securityQue = securityQue;
}
/**
* @return the securityAns
*/
public String getSecurityAns() {
return securityAns;
}
/**
* @param securityAns the securityAns to set
*/
public void setSecurityAns(String securityAns) {
this.securityAns = securityAns;
}
/**
* @return the street
*/
public String getStreet() {
return street;
}
/**
* @param street the street to set
*/
public void setStreet(String street) {
this.street = street;
}
/**
* @return the city
*/
public String getCity() {
return city;
}
/**
* @param city the city to set
*/
public void setCity(String city) {
this.city = city;
}
/**
* @return the state
*/
public String getState() {
return state;
}
/**
* @param state the state to set
*/
public void setState(String state) {
this.state = state;
}
/**
* @return the country
*/
public String getCountry() {
return country;
}
/**
* @param country the country to set
*/
public void setCountry(String country) {
this.country = country;
}
/**
* @return the securityQue1
*/
public String getSecurityQue1() {
return securityQue1;
}
/**
* @param securityQue1 the securityQue1 to set
*/
public void setSecurityQue1(String securityQue1) {
this.securityQue1 = securityQue1;
}
/**
* @return the securityAns1
*/
public String getSecurityAns1() {
return securityAns1;
}
/**
* @param securityAns1 the securityAns1 to set
*/
public void setSecurityAns1(String securityAns1) {
this.securityAns1 = securityAns1;
}
/**
* @return the clientName
*/
}
<file_sep>//$(document).ready(function () {
// var canvas = document.getElementById("ex1");
// var ctx = canvas.getContext("2d");
// var image = new Image();
// image.src = "resources/images/sample.jpg";
// $(image).load(function () {
// ctx.drawImage(image, 0, 0, 1000, 1000);
// ctx.fillStyle = "white";
// ctx.fillRect(0, 0, 1000, 1000);
//// ctx.fillRect(0, 202, 1000, 400);
//// ctx.fillRect(100, 402, 900, 398);
//// ctx.fillRect(0, 802, 1000, 400);
// });
// $(canvas).click(function (e) {
// var canvasOffset = $(canvas).offset();
// var canvasX = Math.floor(e.pageX - canvasOffset.left);
// var canvasY = Math.floor(e.pageY - canvasOffset.top);
//
// var imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
// var pixels = imageData.json1;
// var pixelRedIndex = ((canvasY - 1) * (imageData.width * 4)) + ((canvasX - 1) * 4);
// console.log(pixelRedIndex);
// var pixelcolor = "rgba(" + pixels[pixelRedIndex] + ", " + pixels[pixelRedIndex + 1] + ", " + pixels[pixelRedIndex + 2] + ", " + pixels[pixelRedIndex + 3] + ")";
//
// ctx.drawImage(image,canvasX, canvasY, 100, 100, canvasX, canvasY, 100, 100);
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//// $("body").css("backgroundColor", pixelcolor);
// });
//});
$(document).ready(function () {
var imageObj = new Image();
imageObj.src = 'resources/images/sample.jpg';
imageObj.width = 1000;
imageObj.height = 1000;
imageObj.id = "image1";
var canvas = document.getElementById('ex1');
var ctx = canvas.getContext('2d');
var imageWidth = imageObj.width;
var imageHeight = imageObj.height;
function reOffset() {
var mouse = canvas.getBoundingClientRect();
offsetX = mouse.left;
offsetY = mouse.top;
}
var offsetX, offsetY;
reOffset();
window.onscroll = function (e) {
reOffset();
}
window.onresize = function (e) {
reOffset();
}
$("#ex1").mousemove(function (e) {
handleMouseChange(e);
});
$("#ex1").mouseout(function (e) {
$("#nm").html("");
});
imageObj.onload = function () {
canvas.width = imageWidth;
canvas.height = imageHeight;
ctx.fillStyle = ctx.createPattern(this, "no-repeat");
makeRect();
};
function defineNames(mouseX, mouseY) {
var CoOrX = 0;
var CoOrY = 0;
var iniCoordX = 0;
var iniCoordY = 0;
var q = JSON.parse($('#json1').html());
ctx.beginPath();
ctx.moveTo(0, 0);
jQuery.each(q, function (i, val) {
CoOrX = parseInt(q[i].pixelNumber);
CoOrY = 1;
ctx.rect(iniCoordX, iniCoordY, CoOrX, CoOrY);
//ctx.rect(10, 10, 100, 100);
ctx.fill();
ctx.closePath();
if (mouseX >= iniCoordX && mouseX <= (CoOrX + iniCoordX) && mouseY == (iniCoordY)) {
$("#nm").html(q[i].revealName);
return false;
}
iniCoordX += parseInt(CoOrX);
if (iniCoordX >= 1000) {
iniCoordX = 0;
iniCoordY += 1;
}
});
}
function makeRect() {
var q = JSON.parse($('#json1').html());
var CoOrX = 0;
var CoOrY = 0;
var iniCoordX = 0;
var iniCoordY = 0;
ctx.beginPath();
ctx.moveTo(0, 0);
jQuery.each(q, function (i, val) {
CoOrX = parseInt(q[i].pixelNumber);
CoOrY = 1; //parseInt(offsetTop);
ctx.rect(iniCoordX, iniCoordY, CoOrX, 1);
ctx.fill();
ctx.closePath();
iniCoordX += CoOrX;
if (iniCoordX >= 1000) {
iniCoordX = 0;
iniCoordY += 1;
}
});
}
function handleMouseChange(e) {
// tell the browser we're handling this event
e.preventDefault();
e.stopPropagation();
mouseX = parseInt(e.clientX - offsetX);
mouseY = parseInt(e.clientY - offsetY);
defineNames(mouseX, mouseY);
if (ctx.isPointInPath(mouseX, mouseY)) {
defineNames(mouseX, mouseY);
} else {
$("#nm").html("");
}
}
});
<file_sep>$(document).ready(function () {
var canvas = document.getElementById("ex1");
var ctx = canvas.getContext("2d");
var image = new Image();
image.src = "resources/images/sample.jpg";
$(image).load(function () {
ctx.drawImage(image, 0, 0, 1000, 1000);
ctx.fillStyle = "white";
ctx.fillRect(0, 0, 1000, 200);
ctx.fillRect(0, 202, 1000, 400);
ctx.fillRect(100, 402, 900, 398);
ctx.fillRect(0, 802, 1000, 400);
});
$(canvas).click(function (e) {
var canvasOffset = $(canvas).offset();
var canvasX = Math.floor(e.pageX - canvasOffset.left);
var canvasY = Math.floor(e.pageY - canvasOffset.top);
var imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
var pixels = imageData.data;
var pixelRedIndex = ((canvasY - 1) * (imageData.width * 4)) + ((canvasX - 1) * 4);
var pixelcolor = "rgba(" + pixels[pixelRedIndex] + ", " + pixels[pixelRedIndex + 1] + ", " + pixels[pixelRedIndex + 2] + ", " + pixels[pixelRedIndex + 3] + ")";
$("body").css("backgroundColor", pixelcolor);
});
});<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package model;
/**
*
* @author it3530108
*/
public class PixelBean {
// These correspond to the form elements
private String pixelNumber;
private String displayName;
public PixelBean() {
}
public PixelBean(String pixelNumber) {
this.pixelNumber = pixelNumber;
}
public PixelBean(String dName, String pNum) {
this.displayName = dName;
this.pixelNumber = pNum;
}
/**
* @return the pixelNumber
*/
public String getPixelNumber() {
return pixelNumber;
}
/**
* @param pixelNumber the pixelNumber to set
*/
public void setPixelNumber(String pixelNumber) {
this.pixelNumber = pixelNumber;
}
/**
* @return the displayName
*/
public String getDisplayName() {
return displayName;
}
/**
* @param displayName the displayName to set
*/
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
}
| 0357bfcbc0b1cb04a987c277963ede20fa354ec3 | [
"JavaScript",
"Java"
] | 4 | Java | adit02/Project_353 | 1a4b0e99c40afa98a44b9eaa154bcc592761238c | 1edde802b68a3d986fd921512314365bb2cd5f2e |
refs/heads/master | <repo_name>iascchen/react-tfjs-playground<file_sep>/src/components/mnist/MnistDatasetPng.ts
/**
* @license
* Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs'
import { IMnistDataset, IMAGE_H, IMAGE_W, IMAGE_SIZE, NUM_CLASSES } from './mnistConsts'
const NUM_DATASET_ELEMENTS = 65000
const NUM_TRAIN_ELEMENTS = 35000
const NUM_TEST_ELEMENTS = 7000
const BASE_URL = '/preload/data/mnist'
const MNIST_IMAGES_SPRITE_PATH = `${BASE_URL}/mnist_images.png`
const MNIST_LABELS_PATH = `${BASE_URL}/mnist_labels_uint8`
/**
* A class that fetches the sprited MNIST dataset and returns shuffled batches.
*
* NOTE: This will get much easier. For now, we do data fetching and
* manipulation manually.
*/
export class MnistDatasetPng implements IMnistDataset {
trainImages!: Float32Array
testImages!: Float32Array
trainLabels!: Uint8Array
testLabels!: Uint8Array
trainIndices!: Uint32Array
testIndices!: Uint32Array
shuffledTrainIndex: number
shuffledTestIndex: number
constructor () {
this.shuffledTrainIndex = 0
this.shuffledTestIndex = 0
}
loadData = async (): Promise<void> => {
let datasetImages: Float32Array
// Make a request for the MNIST sprited image.
const img = new Image()
const canvas = document.createElement('canvas')
const ctx = canvas.getContext('2d')
const imgRequest = new Promise((resolve, reject) => {
img.crossOrigin = ''
img.onload = () => {
img.width = img.naturalWidth
img.height = img.naturalHeight
const datasetBytesBuffer =
new ArrayBuffer(NUM_DATASET_ELEMENTS * IMAGE_SIZE * 4)
const chunkSize = 5000
canvas.width = img.width
canvas.height = chunkSize
for (let i = 0; i < NUM_DATASET_ELEMENTS / chunkSize; i++) {
const datasetBytesView = new Float32Array(
datasetBytesBuffer, i * IMAGE_SIZE * chunkSize * 4,
IMAGE_SIZE * chunkSize)
ctx?.drawImage(
img, 0, i * chunkSize, img.width, chunkSize, 0, 0, img.width,
chunkSize)
const imageData = ctx?.getImageData(0, 0, canvas.width, canvas.height)
const length = imageData?.data.length ?? 0
for (let j = 0; j < length / 4; j++) {
// All channels hold an equal value since the image is grayscale, so
// just read the red channel.
const v = imageData?.data[j * 4] ?? 0
datasetBytesView[j] = v / 255
}
}
datasetImages = new Float32Array(datasetBytesBuffer)
resolve()
}
img.src = MNIST_IMAGES_SPRITE_PATH
})
const labelsRequest = fetch(MNIST_LABELS_PATH)
const [_, labelsResponse] = await Promise.all([imgRequest, labelsRequest])
const datasetLabels = new Uint8Array(await (labelsResponse as Response).arrayBuffer())
// Create shuffled indices into the train/test set for when we select a
// random dataset element for training / validation.
this.trainIndices = tf.util.createShuffledIndices(NUM_TRAIN_ELEMENTS)
this.testIndices = tf.util.createShuffledIndices(NUM_TEST_ELEMENTS)
// Slice the the images and labels into train and test sets.
this.trainImages = datasetImages!.slice(0, IMAGE_SIZE * NUM_TRAIN_ELEMENTS)
this.testImages = datasetImages!.slice(IMAGE_SIZE * NUM_TRAIN_ELEMENTS)
this.trainLabels = datasetLabels.slice(0, NUM_CLASSES * NUM_TRAIN_ELEMENTS)
this.testLabels = datasetLabels.slice(NUM_CLASSES * NUM_TRAIN_ELEMENTS)
}
/**
* Get all training data as a data tensor and a labels tensor.
*
* @param {number} numExamples Optional number of examples to get. If not provided,
* all training examples will be returned.
* @returns
* xs: The data tensor, of shape `[numTestExamples, 28, 28, 1]`.
* ys: labels. The one-hot encoded labels tensor, of shape `[numTestExamples, 10]`.
*/
getTrainData = (numExamples?: number): tf.TensorContainerObject => {
let xs = tf.tensor4d(
this.trainImages,
[this.trainImages.length / IMAGE_SIZE, IMAGE_H, IMAGE_W, 1])
let labels = tf.tensor2d(
this.trainLabels, [this.trainLabels.length / NUM_CLASSES, NUM_CLASSES])
if (numExamples != null) {
xs = xs.slice([0, 0, 0, 0], [numExamples, IMAGE_H, IMAGE_W, 1])
labels = labels.slice([0, 0], [numExamples, NUM_CLASSES])
}
return { xs, ys: labels }
}
/**
* Get all test data as a data tensor a a labels tensor.
*
* @param {number} numExamples Optional number of examples to get. If not provided,
* all test examples will be returned.
* @returns
* xs: The data tensor, of shape `[numTestExamples, 28, 28, 1]`.
* ys: labels. The one-hot encoded labels tensor, of shape `[numTestExamples, 10]`.
*/
getTestData = (numExamples?: number): tf.TensorContainerObject => {
let xs = tf.tensor4d(this.testImages,
[this.testImages.length / IMAGE_SIZE, IMAGE_H, IMAGE_W, 1])
let labels = tf.tensor2d(
this.testLabels, [this.testLabels.length / NUM_CLASSES, NUM_CLASSES])
if (numExamples != null) {
xs = xs.slice([0, 0, 0, 0], [numExamples, IMAGE_H, IMAGE_W, 1])
labels = labels.slice([0, 0], [numExamples, NUM_CLASSES])
}
return { xs, ys: labels }
}
nextTrainBatch = (batchSize: number): tf.TensorContainerObject => {
return this.nextBatch(batchSize, [this.trainImages, this.trainLabels],
() => {
this.shuffledTrainIndex = (this.shuffledTrainIndex + 1) % this.trainIndices.length
return this.trainIndices[this.shuffledTrainIndex]
})
}
nextTestBatch = (batchSize: number): tf.TensorContainerObject => {
return this.nextBatch(batchSize, [this.testImages, this.testLabels],
() => {
this.shuffledTestIndex = (this.shuffledTestIndex + 1) % this.testIndices.length
return this.testIndices[this.shuffledTestIndex]
})
}
nextBatch = (batchSize: number, data: [Float32Array, Uint8Array], index: Function): tf.TensorContainerObject => {
const batchImagesArray = new Float32Array(batchSize * IMAGE_SIZE)
const batchLabelsArray = new Uint8Array(batchSize * NUM_CLASSES)
for (let i = 0; i < batchSize; i++) {
const idx = index() as number
const image = data[0].slice(idx * IMAGE_SIZE, idx * IMAGE_SIZE + IMAGE_SIZE)
batchImagesArray.set(image, i * IMAGE_SIZE)
const label = data[1].slice(idx * NUM_CLASSES, idx * NUM_CLASSES + NUM_CLASSES)
batchLabelsArray.set(label, i * NUM_CLASSES)
}
const xs = tf.tensor4d(batchImagesArray, [batchSize, IMAGE_H, IMAGE_W, 1])
const labels = tf.tensor2d(batchLabelsArray, [batchSize, NUM_CLASSES])
return { xs, ys: labels }
}
}
<file_sep>/node/src/utils.ts
import * as tf from '@tensorflow/tfjs-node'
import * as zlib from 'zlib'
// import fetch from 'isomorphic-fetch'
import * as fs from 'fs'
import * as util from 'util'
export const logger = console.log
export type IDataSet = tf.data.Dataset<tf.TensorContainer>
export type IArray = any[]
export enum STATUS {
INIT = 'Init',
LOADING = 'Loading',
LOADED = 'Loaded',
TRAINING = 'Training',
TRAINED = 'Trained',
}
export interface ITrainInfo {
iteration?: number
logs: tf.Logs
}
export interface ISampleInfo {
data: number[]
shape: number[]
shapeStr: string
shapeSize: number
length: number
}
// export const range = (from: number, to = 0): number[] => {
// return [...Array(Math.abs(to - from)).keys()].map(v => v + from)
// }
export const splitDataSet = (shuffled: IArray, testSplit: number, shuffle = false): IArray[] => {
if (shuffle) {
tf.util.shuffle(shuffled)
}
const totalRecord = shuffled.length
// Split the data into training and testing portions.
const numTestExamples = Math.round(totalRecord * testSplit)
const numTrainExamples = totalRecord - numTestExamples
const train = shuffled.slice(0, numTrainExamples)
const test = shuffled.slice(numTrainExamples)
return [train, test]
}
export const arrayDispose = (_array: any[]): void => {
_array?.splice(0, _array.length)
}
const readFile = util.promisify(fs.readFile)
// Downloads a test file only once and returns the buffer for the file.
export const fetchLocal = async (filename: string, isUnzip?: boolean): Promise<Buffer | undefined> => {
logger('fetchLocal dirname : ', __dirname)
const buf = await readFile(filename)
// const buf = await response.arrayBuffer()
// const buf = Buffer.from(response)
if (isUnzip) {
logger('unzip...', filename)
return zlib.unzipSync(Buffer.from(buf))
} else {
return Buffer.from(buf)
}
}
export const getUploadFileBase64 = async (blob: File | Blob | undefined): Promise<string> => {
if (!blob) {
throw (new Error('File Blob is undefined'))
}
return new Promise((resolve, reject) => {
const reader = new FileReader()
console.log('blob', JSON.stringify(blob))
reader.readAsDataURL(blob)
reader.onload = () => {
const text = reader.result?.toString()
// logger('getUploadFileBase64', text)
resolve(text)
}
reader.onerror = error => reject(error)
})
}
export const getUploadFileArray = async (blob: File | Blob | undefined): Promise<Buffer> => {
if (!blob) {
throw (new Error('File Blob is undefined'))
}
return new Promise((resolve, reject) => {
const reader = new FileReader()
reader.readAsArrayBuffer(blob)
reader.onload = () => {
const buffer = reader.result as ArrayBuffer
// logger('getUploadFileBase64', text)
resolve(Buffer.from(buffer))
}
reader.onerror = error => reject(error)
})
}
<file_sep>/src/components/common/visulization/camUtils.ts
/**
* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
/**
* Utility functions for the visualize-convnet demo.
*/
import * as tf from '@tensorflow/tfjs'
import * as jimp from 'jimp'
// const jimp = require('jimp')
// const tf = require('@tensorflow/tfjs')
/**
* Read an image file as a TensorFlow.js tensor.
*
* Image resizing is performed with tf.image.resizeBilinear.
*
* @param {string} filePath Path to the input image file.
* @param {number} height Desired height of the output image tensor, in pixels.
* @param {number} width Desired width of the output image tensor, in pixels.
* @return {tf.Tensor4D} The read float32-type tf.Tensor of shape
* `[1, height, width, 3]`
*/
export const readImageTensorFromFile = async (filePath: string, height: number, width: number): Promise<tf.Tensor> => {
return new Promise((resolve, reject) => {
jimp.read(filePath, (err, image) => {
if (err) {
reject(err)
} else {
const h = image.bitmap.height
const w = image.bitmap.width
const buffer: tf.TensorBuffer<tf.Rank.R4> = tf.buffer([1, h, w, 3], 'float32')
image.scan(0, 0, w, h, function (x, y, index) {
buffer.set(image.bitmap.data[index], 0, y, x, 0)
buffer.set(image.bitmap.data[index + 1], 0, y, x, 1)
buffer.set(image.bitmap.data[index + 2], 0, y, x, 2)
})
resolve(tf.tidy(
() => tf.image.resizeBilinear(buffer.toTensor(), [height, width])))
}
})
})
}
// /**
// * Write an image tensor to a image file.
// *
// * @param {tf.Tensor} imageTensor The image tensor to write to file.
// * Assumed to be an int32-type tensor with value in the range 0-255.
// * @param {string} filePath Destination file path.
// */
// export const writeImageTensorToFile = async (imageTensor: tf.Tensor, filePath: string) => {
// const imageH = imageTensor.shape[1] ?? 0
// const imageW = imageTensor.shape[2] ?? 0
// const imageData = imageTensor.dataSync()
//
// const bufferLen = imageH * imageW * 4
// const buffer = new Uint8Array(bufferLen)
// let index = 0
// for (let i = 0; i < imageH; ++i) {
// for (let j = 0; j < imageW; ++j) {
// const inIndex = 3 * (i * imageW + j)
// buffer.set([Math.floor(imageData[inIndex])], index++)
// buffer.set([Math.floor(imageData[inIndex + 1])], index++)
// buffer.set([Math.floor(imageData[inIndex + 2])], index++)
// buffer.set([255], index++)
// }
// }
//
// return new Promise((resolve, reject) => {
// jimp.new(
// { data: Buffer.from(buffer), width: imageW, height: imageH },
// (err, img) => {
// if (err) {
// reject(err)
// } else {
// img.write(filePath)
// resolve()
// }
// })
// })
// }
// 64 x 3 RGB colormap.
// This is used to convert a 1-channel (grayscale) image into a color
// (RGB) one. The color map is based on the output of the "parula" colormap
// command in MATLAB.
const RGB_COLORMAP: number[] = [
0.2422, 0.1504, 0.6603, 0.25039, 0.165, 0.70761, 0.25777,
0.18178, 0.75114, 0.26473, 0.19776, 0.79521, 0.27065, 0.21468,
0.83637, 0.27511, 0.23424, 0.87099, 0.2783, 0.25587, 0.89907,
0.28033, 0.27823, 0.9221, 0.28134, 0.3006, 0.94138, 0.28101,
0.32276, 0.95789, 0.27947, 0.34467, 0.97168, 0.27597, 0.36668,
0.9829, 0.26991, 0.3892, 0.9906, 0.26024, 0.41233, 0.99516,
0.24403, 0.43583, 0.99883, 0.22064, 0.46026, 0.99729, 0.19633,
0.48472, 0.98915, 0.1834, 0.50737, 0.9798, 0.17864, 0.52886,
0.96816, 0.17644, 0.5499, 0.95202, 0.16874, 0.57026, 0.93587,
0.154, 0.5902, 0.9218, 0.14603, 0.60912, 0.90786, 0.13802,
0.62763, 0.89729, 0.12481, 0.64593, 0.88834, 0.11125, 0.6635,
0.87631, 0.09521, 0.67983, 0.85978, 0.068871, 0.69477, 0.83936,
0.029667, 0.70817, 0.81633, 0.0035714, 0.72027, 0.7917, 0.0066571,
0.73121, 0.76601, 0.043329, 0.7411, 0.73941, 0.096395, 0.75,
0.71204, 0.14077, 0.7584, 0.68416, 0.1717, 0.76696, 0.65544,
0.19377, 0.77577, 0.6251, 0.21609, 0.7843, 0.5923, 0.24696,
0.7918, 0.55674, 0.29061, 0.79729, 0.51883, 0.34064, 0.8008,
0.47886, 0.3909, 0.80287, 0.43545, 0.44563, 0.80242, 0.39092,
0.5044, 0.7993, 0.348, 0.56156, 0.79423, 0.30448, 0.6174,
0.78762, 0.26124, 0.67199, 0.77927, 0.2227, 0.7242, 0.76984,
0.19103, 0.77383, 0.7598, 0.16461, 0.82031, 0.74981, 0.15353,
0.86343, 0.7406, 0.15963, 0.90354, 0.73303, 0.17741, 0.93926,
0.72879, 0.20996, 0.97276, 0.72977, 0.23944, 0.99565, 0.74337,
0.23715, 0.99699, 0.76586, 0.21994, 0.9952, 0.78925, 0.20276,
0.9892, 0.81357, 0.18853, 0.97863, 0.83863, 0.17656, 0.96765,
0.8639, 0.16429, 0.96101, 0.88902, 0.15368, 0.95967, 0.91346,
0.14226, 0.9628, 0.93734, 0.12651, 0.96911, 0.96063, 0.10636,
0.9769, 0.9839, 0.0805
]
/**
* Convert an input monocolor image to color by applying a color map.
*
* @param {tf.Tensor4d} x Input monocolor image, assumed to be of shape
* `[1, height, width, 1]`.
* @returns Color image, of shape `[1, height, width, 3]`.
*/
export const applyColorMap = (x: tf.Tensor4D): tf.Tensor => {
tf.util.assert(
x.rank === 4, () => `Expected rank-4 tensor input, got rank ${x.rank}`)
tf.util.assert(
x.shape[0] === 1,
() => `Expected exactly one example, but got ${x.shape[0]} examples`)
tf.util.assert(
x.shape[3] === 1,
() => `Expected exactly one channel, but got ${x.shape[3]} channels`)
return tf.tidy(() => {
// Get normalized x.
const EPSILON = 1e-5
const xRange = x.max().sub(x.min())
const xNorm = x.sub(x.min()).div(xRange.add(EPSILON))
const xNormData = xNorm.dataSync()
const h = x.shape[1]
const w = x.shape[2]
const buffer = tf.buffer([1, h, w, 3])
const colorMapSize = RGB_COLORMAP.length / 3
for (let i = 0; i < h; ++i) {
for (let j = 0; j < w; ++j) {
const pixelValue = xNormData[i * w + j]
const row = Math.floor(pixelValue * colorMapSize)
buffer.set(RGB_COLORMAP[3 * row], 0, i, j, 0)
buffer.set(RGB_COLORMAP[3 * row + 1], 0, i, j, 1)
buffer.set(RGB_COLORMAP[3 * row + 2], 0, i, j, 2)
}
}
return buffer.toTensor()
})
}
<file_sep>/Dockerfile
# FROM node
FROM node:alpine
MAINTAINER IascCHEN
# 更新Alpine的软件源为国内(清华大学)的站点 TUNA
RUN echo "https://mirror.tuna.tsinghua.edu.cn/alpine/v3.11/main/" > /etc/apk/repositories
RUN apk update \
&& apk add --no-cache ca-certificates \
&& update-ca-certificates \
&& apk add --no-cache --virtual .gyp bash bash-doc bash-completion vim wget python make g++
ARG NPM_REGISTRY="https://registry.npm.taobao.org"
RUN mkdir -p /opt/app/node
EXPOSE 3000
CMD ["yarn", "start"]
# use changes to package.json to force Docker not to use the cache
# when we change our application's nodejs dependencies:
RUN yarn config set registry ${NPM_REGISTRY}
RUN yarn config get registry
WORKDIR /opt/app/node
COPY node/package.json /opt/app/node/package.json
COPY node/yarn.lock /opt/app/node/yarn.lock
RUN yarn
WORKDIR /opt/app
COPY package.json /opt/app/package.json
COPY yarn.lock /opt/app/yarn.lock
RUN yarn
COPY . /opt/app
<file_sep>/src/react-app-env.d.ts
/// <reference types="react-scripts" />
declare module '@matejmazur/react-mathjax'
declare module 'remark-math'
<file_sep>/public/docs/dev/docker.md
# Docker
docker run
https://note.qidong.name/2017/06/26/docker-clean/
清理所有停止运行的容器:
docker container prune
清理所有悬挂(<none>)镜像:
docker image prune
清理所有无用数据卷:
docker volume prune
<file_sep>/public/docs/dev/mnist-core-api.md
# MNIST CNN 的 Core API 实现
## 数据分片加载
下面的代码引用自 `./src/componenets/mnist/MnistDatasetPng.ts`。使用 `tf.util.createShuffledIndices` 来实现按批次随机抽取数据分片的目的。
loadData = async (): Promise<void> => {
...
// Create shuffled indices into the train/test set for when we select a
// random dataset element for training / validation.
this.trainIndices = tf.util.createShuffledIndices(NUM_TRAIN_ELEMENTS)
this.testIndices = tf.util.createShuffledIndices(NUM_TEST_ELEMENTS)
...
}
nextTrainBatch = (batchSize: number): tf.TensorContainerObject => {
return this.nextBatch(batchSize, [this.trainImages, this.trainLabels],
() => {
this.shuffledTrainIndex = (this.shuffledTrainIndex + 1) % this.trainIndices.length
return this.trainIndices[this.shuffledTrainIndex]
})
}
...
nextBatch = (batchSize: number, data: [Float32Array, Uint8Array], index: Function): tf.TensorContainerObject => {
const batchImagesArray = new Float32Array(batchSize * IMAGE_SIZE)
const batchLabelsArray = new Uint8Array(batchSize * NUM_CLASSES)
for (let i = 0; i < batchSize; i++) {
const idx = index() as number
const image = data[0].slice(idx * IMAGE_SIZE, idx * IMAGE_SIZE + IMAGE_SIZE)
batchImagesArray.set(image, i * IMAGE_SIZE)
const label = data[1].slice(idx * NUM_CLASSES, idx * NUM_CLASSES + NUM_CLASSES)
batchLabelsArray.set(label, i * NUM_CLASSES)
}
const xs = tf.tensor4d(batchImagesArray, [batchSize, IMAGE_H, IMAGE_W, 1])
const labels = tf.tensor2d(batchLabelsArray, [batchSize, NUM_CLASSES])
return { xs, ys: labels }
}
## 使用 Tensorflow.js 的 Core API 构造深度神经网络
相关代码在 `./src/componenets/mnist/modelCoreApi.ts`。
### 等价的 Layers API 实现
本节采用 Core API 实现的 CNN 模型,等价于下面这段 Layers API 所描述的网络。
const model = tf.sequential()
model.add(tf.layers.conv2d({
inputShape: [IMAGE_H, IMAGE_W, 1], kernelSize: 5, filters: 8, activation: 'relu', padding: 'same'
}))
model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 }))
model.add(tf.layers.conv2d({ kernelSize: 5, filters: 16, activation: 'relu', padding: 'same'}))
model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 }))
model.add(tf.layers.flatten({}))
model.add(tf.layers.dense({ units: 10, activation: 'softmax' }))
### 卷积模型的权重参数
// Variables that we want to optimize
const conv1OutputDepth = 8
const conv1Weights = tf.variable(tf.randomNormal([5, 5, 1, conv1OutputDepth], 0, 0.1))
const conv2InputDepth = conv1OutputDepth
const conv2OutputDepth = 16
const conv2Weights = tf.variable(tf.randomNormal([5, 5, conv2InputDepth, conv2OutputDepth], 0, 0.1))
const fullyConnectedWeights = tf.variable(
tf.randomNormal([7 * 7 * conv2OutputDepth, NUM_CLASSES], 0,
1 / Math.sqrt(7 * 7 * conv2OutputDepth)))
const fullyConnectedBias = tf.variable(tf.zeros([NUM_CLASSES]))
* conv1Weights 的形状 [5, 5, 1, conv1OutputDepth] 所对应的维度含义是 [filterHeight, filterWidth, inDepth, outDepth],描述了此卷基使用 5 * 5 的卷积核,输入数据深度为 1,输出数据深度为 8。
* conv2Weights 的形状 [5, 5, conv1OutputDepth, conv2OutputDepth],描述了此卷基使用 5 * 5 的卷积核,输入数据深度为 8,输出数据深度为 16。
* 输出层计算时,将 layer2 的输出,被扁平化为长度 784 的一维向量,输出长度为 10 的结果向量。
### 卷积模型的前向传播计算过程
接下来,我们看看输入一组训练数据 xs 后,在模型中是如何计算的。
export const model = (inputXs: tf.Tensor): tf.Tensor => {
const xs = inputXs.as4D(-1, IMAGE_H, IMAGE_W, 1)
const strides = 2
const pad = 0
// Conv 1
const layer1 = tf.tidy(() => {
return xs.conv2d(conv1Weights as tf.Tensor4D, 1, 'same')
.relu()
.maxPool([2, 2], strides, pad)
})
// Conv 2
const layer2 = tf.tidy(() => {
return layer1.conv2d(conv2Weights as tf.Tensor4D, 1, 'same')
.relu()
.maxPool([2, 2], strides, pad)
})
// Final layer
return layer2.as2D(-1, fullyConnectedWeights.shape[0])
.matMul(fullyConnectedWeights as tf.Tensor)
.add(fullyConnectedBias)
}
1. 输入 Tensor4D 为形状为 [-1, 28, 28, 1] 的 xs 数据。shape[0] 为 `-1` 表示使用 Tensor 在这个维度的实际值。
2. 经过 layer1 计算。先做了卷积核为 5 * 5 的卷积核,`'same'` 参数表示,卷积后输出数据的 shape 保持与输入一致, 输出形状为 [-1, 28, 28, 8]。
3. 使用 relu 做激活。
4. 使用 maxPool 进行 [2,2] 池化。输出形状为 [-1, 14, 14, 8]。
5. 经过 layer2 计算。输出形状为 [-1, 14, 14, 16]。
6. 使用 relu 做激活。
7. 使用 maxPool 进行 [2,2] 池化。输出形状为 [-1, 7, 7, 16]
8. Final 层计算,首先将 [-1, 7, 7, 16] 的数据扁平化为 [-1, 784] 的向量表达
9. 经过 matMul 和 add 计算后,形成 [-1, 10] 的输出 One-Hot 结果。
### 模型的训练——被隐藏的梯度下降和反向传播
选用对输出的 One-Hot 结果经过 softmax 计算后的交叉熵为 Loss 值。
// Loss function
const loss = (labels: tf.Tensor, ys: tf.Tensor): tf.Scalar => {
return tf.losses.softmaxCrossEntropy(labels, ys).mean()
}
Tensorflow 对于模型的自动求导是靠各式各样的 Optimizer 类进行的,我们只需要在程序中构建前向图,然后加上Optimizer,再调用minimize()方法就可以完成梯度的反向传播。
// Train the model.
export const train = async (data: IMnistDataSet, log: Function,
steps: number, batchSize: number, learningRate: number): Promise<void> => {
const returnCost = true
const optimizer = tf.train.adam(learningRate)
for (let i = 0; i < steps; i++) {
const cost = optimizer.minimize(() => {
const batch = data.nextTrainBatch(batchSize)
const _labels = batch.ys as tf.Tensor
const _xs = batch.xs as tf.Tensor
return loss(_labels, model(_xs))
}, returnCost)
log(i, cost?.dataSync())
await tf.nextFrame()
}
}
Optimizer class是所有 Optimizer 的基类,整个反向传播过程可分为三步,这三步仅需通过一个minimize()函数完成:
1. 计算每一个部分的梯度,compute_gradients()
2. 根据需要对梯度进行处理
3. 把梯度更新到参数上,apply_gradients()
<file_sep>/public/preload/data/download_mnist_data.sh
#!/bin/bash
#mkdir mnist
#cd mnist
#################
# MNIST for Web Example
#################
BASE_URL="https://storage.googleapis.com/learnjs-data/model-builder"
MNIST_IMAGES_SPRITE_PATH="${BASE_URL}/mnist_images.png"
MNIST_LABELS_PATH="${BASE_URL}/mnist_labels_uint8"
wget $MNIST_IMAGES_SPRITE_PATH
wget $MNIST_LABELS_PATH
#################
# MNIST in GZ
#################
BASE_URL="https://storage.googleapis.com/cvdf-datasets/mnist"
TRAIN_IMAGES_FILE="${BASE_URL}/train-images-idx3-ubyte.gz"
TRAIN_LABELS_FILE="${BASE_URL}/train-labels-idx1-ubyte.gz"
TEST_IMAGES_FILE="${BASE_URL}/t10k-images-idx3-ubyte.gz"
TEST_LABELS_FILE="${BASE_URL}/t10k-labels-idx1-ubyte.gz"
wget $TRAIN_IMAGES_FILE
wget $TRAIN_LABELS_FILE
wget $TEST_IMAGES_FILE
wget $TEST_LABELS_FILE
#################
# MNIST Fashion in GZ
#################
cd ..
mkdir fashion
cd fashion
BASE_URL="http://fashion-mnist.s3-website.eu-central-1.amazonaws.com"
TRAIN_IMAGES_FILE="${BASE_URL}/train-images-idx3-ubyte.gz"
TRAIN_LABELS_FILE="${BASE_URL}/train-labels-idx1-ubyte.gz"
TEST_IMAGES_FILE="${BASE_URL}/t10k-images-idx3-ubyte.gz"
TEST_LABELS_FILE="${BASE_URL}/t10k-labels-idx1-ubyte.gz"
wget $TRAIN_IMAGES_FILE
wget $TRAIN_LABELS_FILE
wget $TEST_IMAGES_FILE
wget $TEST_LABELS_FILE
<file_sep>/README.md
# react-tfjs-camp 全栈 AI 训练
这是一个以学习 tensorflow.js 为目的的项目。使用 React Hooks + Typescript 实现主要界面。
BTW,这是个平民 AI 教程,可以不使用 GPU 完成。
## 技术栈
最近版本,在以下技术栈上运行。
* TypeScript (v3.7.2)
* React (v16.12) : react-scripts, react-hooks, react-router
* Tensorflow.js (v1.7.1)
* AntD(v4.1.0) & AntV/bizcharts
### 快速开始
更多内容,请移步后面的 “开发教程 Develop Tutorial” 部分
* 下载代码
$ git clone https://github.com/iascchen/react-tfjs-camp.git
$ cd react-tfjs-camp
* Web APP 服务
$ yarn
$ yarn start
Open [http://loalhost:3000](http://loalhost:3000)
* 使用 Node.js 的代码部分
$ cd node
$ yarn
$ yarn run ts-node ./src/*/train.ts ...
* 使用 Docker
考虑到在国内安装 tensorflow.js 的 node、node-gpu 部分比较慢,所以做了一个 Docker Image,也算是个开箱即用的实验环境。
目前,编译好的 Docker Image 目前还没上传(待 Release 版本之后再传)。
下面的脚本,有兴趣的同学可以打开看看。
* 使用 Dockerfile Build,会生成 `iasc/react-tfjs-capm` 的 Docker Image。
$ ./docker_build.sh
* 运行 Docker。这个脚本为 Docker 环境中的 `/public/model`、`/public/data` 两个目录做了本地映射,便于集成您自己使用的数据和训练的模型。
$ ./docker_run.sh
### 关键目录
.
|____node tfjs-node 代码,用于部分模型训练,比在浏览器中快不少。
|____src React Tensorflow.js Camp 的 Web App 代码
|____public
| |____model 用于存放你自己的模型,可以直接在 Web App 中使用 fetch('/model/...') 获取
| |____data 用于存放你自己的数据,可以直接在 Web App 中使用 fetch('/data/...') 获取
| |____preload 预先下载好的数据和模型,因为体积太大,并不放在 git 里。打包在 Docker Image 中。
| | |____model Web App中使用 fetch('/preload/model/...') 获取到。node.js 中使用文件访问相对路径获取,如:fetchLocal('../public/preload/model')
| | |____data
## AI 概念 AI Concept
AI 概念相关的文档内容会放在 `public/docs/ai` 目录下。这些个目录下的内容,在Web APP 程序运行时,会展示在“问题”标签页面下。
学习新的技术,仅仅看看文档,跑几个例子,基本上也就能够了解了。不过要想真正深入了解这个技术的优缺点,在初步学习的基础上,还得按照自己的想法做点东西。
这个项目的开始比较随性,2020 年的 COVID-19 疫情宅家期间,列了个 AI 技术实践点的列表,起了个 Repo,只是想着把 TFJS Example 的典型例子重刷一遍,能够留下些学习的记录,希望对大家有些帮助。
做例子的过程,以前用 Python 也曾经撸过一遍,学习后个人的体会是:各个例子比较散,这样去学习 TF 与 AI,往往是关注模型,多过关注问题本身,难于形成全局的概念。而且,算法和模型天天都在进化,日日都有新论文发出来,要理解 AI 相关的技术,还是要去思考和理解一些更本质的东西。因此,在做了几个 TFJS 的例子之后,我开始对 Web APP 的内容和结构做调整,几经调整之后,成了这个程序现在的模样。
一年多前,曾有个为人工智能的初学者提供一个“端到端”的学习和体验的平台的想法。

所谓端到端,就是从领域问题出发,思考所需的数据,运用相应的模型,执行训练,验证推理,形成新应用。这个过程,我自己理解是“从数据出发”的科学探究方法的延伸,就像是古人观测天象、记录物理现象一样。而未来和过去不一样的地方在于,我们可以利用计算机和 AI ,处理更大量的数据,发现隐藏的更深的规律。
从这个起点出发,甄选了一些 AI 学习的例子,在展现上,将对 AI 概念解释、与参数交互直接体验结合起来,最终形成了当前 React-tfjs-camp 的模样。

### 代码部分
- [x] 第一部分,AI 来做加减乘除。从传统的实验数据记录处理问题出发,以曲线拟合、经典的 IRIS 鸢尾花为例,介绍了 Tensor、神经元模型、线性回归、以及多层感知机。
- [x] 第二部分,AI 识数。用 AI 学习的 Hello World 程序 MNIST,介绍了这个例子的数据加载、体验使用不同模型计算的差别,还提供了一个手写数字识别画板,可以直接看到学习前后,模型给出结果的差异。对于 MNIST 的训练学习部分,利用 Tensorflow 的 高级模型 LayerModel 和基础模型 Graph 分别做了实现。实现的过程并不太顺利,因为太多的数据放在内存之中,会导致内存溢出,程序不可用,这也许是目前在浏览器中做 AI 训练不可逾越的困难吧。不过最终,还是通过调整参数,规避了这个问题。
- [x] 第三部分,迁移学习。以 Mobilenet 模型为基础,重点讨论的是如何使用预训练的模型,介绍了四个例子。分别是:直接使用 Mobilenet 执行图片分类、Mobilenet + 机器学习 KNN 实现 Teachable Machine、在 Mobilenet 的基础上进行模型扩展实现个性化的图片分类器、以及扩展 Mobilenet以进行简单的对象识别的例子。之所以选择这些例子,是因为这些例子更多的体现了边缘计算 AI 应用的特点,即:在后台大数据计算形成的基础模型之上,利用用户端的计算能力,完成个性化的新任务。
- [x] 第四部分,处理连续数据。语言、音乐、文字、侦测信号,都是连续数据的典型应用。这部分集成了:Jena 气象预测,以展示时序数据和 RNN 的使用;IMDB 数据的语义分析,为了介绍对于文本处理的基本办法,例如 Multihot 或 词嵌入;使用 LSTM 进行文本,初步介绍文本生成模型。这几个例子,在浏览器端“爬”得比较吃力,所以,也提供了基于 TFJS Example 的 Node.js 例子的 TypeScript 版本,可以较快的完成训练。
- [x] 第五部分,预训练模型的使用。集成了 Handpose、Facemesh、Posenet 三个有趣的模型,展示的主要是如何通过摄像头和预训练的 AI 模型交互。
这五个部分,是结合 Tensorflow.js 学习 AI 的最基础内容,所有的内容,都包括如何使用 JS 完成相应的界面的交互。希望这个 Repo 能够成为 AI 学习的练习场,在实现过程中,也会穿插所需要的前后端技术,能够为想了解全栈开发技术的同学提供帮助,也可以形成一个 AI 工具集。
此项目使用 MIT 许可。不过,此项目的部分代码源自于Google 的 tfjs-examples 和 tfjs-model,根据项目需要做了改写。在代码中,我保留了 Google 的 Apache v2 License 声明,使用时请了解。
参考书目:
* 《Deep Learning with Python》 Manning
* 《Deep Learning with JavaScript》 Manning
* 《Deep Learning》 花书
* 逐渐补充...
### 文档部分
- [x] [AI Concept 1 曲线拟合](./public/docs/ai/curve.md)
- [x] [AI Concept 2 鸢尾花](./public/docs/ai/iris.md)
- [ ] [AI Concept 3 MNIST Keras 模型](public/docs/ai/mnist-layers-api.md)
- [ ] [AI Concept 4 MNIST Core API 实现](./public/docs/ai/mnist-core-api.md)
- [ ] [AI Concept 5 Mobilenet](./public/docs/ai/mobilenet.md)
- [ ] [AI Concept 6 Mobilenet + KNN](./public/docs/ai/mobilenet-knn.md)
- [ ] [AI Concept 7 Mobilenet 迁移学习](./public/docs/ai/mobilenet-transfer.md)
- [ ] [AI Concept 8 Mobilenet 简单对象检测](./public/docs/ai/mobilenet-obj-detector.md)
- [ ] [AI Concept 9 Jena 气象预报](./public/docs/ai/jena.md)
- [ ] [AI Concept 10 IMDB 语义分类](./public/docs/ai/sentiment-imdb.md)
- [ ] [AI Concept 11 LSTM 文本生成](./public/docs/ai/lstm-txt-gen.md)
- [ ] [AI Concept 12 和摄像头交互的预训练模型](./public/docs/ai/model-with-stream.md)
## 开发教程 Develop Tutorial
这部分内容放在 `public/docs/dev` 目录下,主要是 Step by Step 的开发教程。
尽量少说废话,不做过多展开,向 MIT Fablab 学习,提供参考链接供您深入了解。
### 构建开发框架
- [x] [Develop Tutorial 1 从零开始](./public/docs/dev/start-from-scratch.md)
* 环境安装
* 安装 Node 环境
* 安装 yarn 工具
* React 和 React Hooks
* 创建 React 应用
* 创建一个新的 React 项目
* React 项目目录简述
- [x] [Develop Tutorial 2 构建 React 开发框架](./public/docs/dev/dev-structure.md)
* React-tfjs-camp 的目录结构
* public 目录结构
* src 目录结构
* node 目录结构
* 规范代码语法和风格检查
* tsconfig.json
* .eslintrc.js
* 改造页面布局
* React 函数化组件
* 使用 Ant Design 构建页面框架
* 在项目中使用 AntD
* 页面布局
* 边栏菜单导航
* AntD Layout Sider
* 使用 React Hooks 的 useState 管理边栏状态
* 用 React-Route 实现页面路由跳转
* ErrorBoundary
- [x] [Develop Tutorial 3 搭建展示端到端 AI 概念的舞台](./public/docs/dev/ai-process-panel.md)
* 端到端的 AI 概念
* AIProcessTabs
* 带参数的 React 函数组件
* 使用 React Hooks 的 useEffect 处理组件内的数据依赖
* 处理需要隐藏的 TabPane
* Sticky 的使用
* MarkdownWidget
### 操练 Tensorflow.js
- [x] [Develop Tutorial 4 初步了解 Tensorflow.js](./public/docs/dev/tfjs-intro.md)
* 使用 Tensorflow.js 的几点须知
* Backend —— 为什么我的 tfjs 运行很慢?
* 内存管理 —— 这样避免我的程序内存溢出?
* tfjs 安装
* tfjs 加载
* 使用 Tensorflow.js 和 React 生成数据集
* 随机生成 a, b, c 三个参数
* 实现公式计算 & useCallback
* 训练集和测试集的生成
* 函数数据可视化
* 使用 Tensorflow.js 创建人工神经网络
* 实现一个简单的多层人工神经网络
* 窥探一下 LayerModel 的内部
* 模型训练
* 调整 LearningRate 观察对训练的影响
* 模型训练 model.fit
* 及时停止模型训练 —— useRef Hook 登场
* 模型推理
- [x] [Develop Tutorial 5 用 Tensorflow.js 处理按数据分类问题](./public/docs/dev/data-classifier.md)
* 分类问题的数据表述
* 标签编码
* One-Hot
* 用 tf.data.Dataset 构造训练集和测试集
* 按比例分配数据集
* 了解 tf.data.Dataset
* 初始化数据集
* SampleDataVis 展示数据样本
* 使用 useEffect 构建细粒度的数据驱动渲染
* AntD Table 的使用
* 全联接网络模型
* 训练
* 调整训练参数:注意一下 Loss 函数
* 使用 Model.fitDataset 训练
* 展示训练过程 —— 在 useState 中使用数组
- [x] [Develop Tutorial 6 MNIST CNN 的 Layer Model 实现](public/docs/dev/mnist-layer-api.md)
* MNIST 的数据集
* MNIST 的数据集的两种格式—— PNG 和 GZ
* 预先下载数据集到本地
* PNG 格式数据的加载和使用
* GZ 格式数据的加载和使用
* 使用 fetch 加载数据文件
* 数据的加载
* 修改 SampleDataVis 以显示图片
* 组件 RowImageWidget—— 使用 useRef 访问 HTML Element
* CNN 网络模型
* 将 tfjs-vis 集成到 React
* 模型训练
* 推理
* canvas 数字手写板的实现 —— DrawPanelWidget
* 使用 Tfjs 将 canvas 位图转化为 Tensor
- [x] [Develop Tutorial 7 MNIST CNN 的 Core API 实现](./public/docs/dev/mnist-core-api.md)
* 数据分片加载
* 使用 Tensorflow.js 的 Core API 构造深度神经网络
* 等价的 Layers API 实现
* 卷积模型的权重参数
* 卷积模型的前向传播计算过程
* 模型的训练——被隐藏的梯度下降和反向传播
- [ ] [Develop Tutorial 8 站在预训练模型的肩上——以 MobileNet 为例 ](./public/docs/dev/mobilenet-basic.md)
- [ ] 待续
<file_sep>/public/preload/model/download_mobilenet_model.py
'''
Tools to download tensorflow.js models
Authot : iascchen
'''
import os
import requests
import json
BASE_FOLDER = "."
# MOBILENET
folders = [
"mobilenet/mobilenet_v1_0.25_224",
"mobilenet/mobilenet_v1_0.50_224",
"mobilenet/mobilenet_v1_0.75_224",
"mobilenet/mobilenet_v1_1.0_224",
]
models_json = [
"https://storage.googleapis.com/tfjs-models/tfjs/mobilenet_v1_0.25_224",
"https://storage.googleapis.com/tfjs-models/tfjs/mobilenet_v1_0.50_224",
"https://storage.googleapis.com/tfjs-models/tfjs/mobilenet_v1_0.75_224",
"https://storage.googleapis.com/tfjs-models/tfjs/mobilenet_v1_1.0_224",
]
models_fn = "model.json"
def get_remote_target(folder, file_name):
return "%s/%s" % (folder, file_name)
def get_target(folder, file_name=None):
if file_name is None:
return "%s/%s" % (BASE_FOLDER, folder)
else:
return "%s/%s" % (folder, file_name)
def download_model(remote_target, target):
target_folder = get_target(target)
if not os.path.exists(target_folder):
os.makedirs(target_folder)
target_manifest = get_target(target_folder, models_fn)
manifest_url = get_remote_target(remote_target, models_fn)
download_file(manifest_url, target_manifest)
file_list = parse_model_json(target_manifest)
for file_name in file_list:
target_file = get_target(target_folder, file_name)
remote_url = get_remote_target(remote_target, file_name)
download_file(remote_url, target_file)
def parse_model_json(josn_file):
files = []
print("Parsing ==>", josn_file)
json_data = open(josn_file).read()
data = json.loads(json_data)
weights_manifest = data["weightsManifest"]
for value in weights_manifest:
# d.itervalues: an iterator over the values of d
print("paths ==> ", value["paths"])
# files.append(value["paths"])
files = files + value["paths"]
return files
def download_file(url, target):
print("Downloading ==>", url, target)
r = requests.get(url)
f = open(target, 'wb')
for chunk in r.iter_content(chunk_size=512 * 1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.close()
return
for idx, val in enumerate(models_json):
print("Begin ==>", val)
download_model(val, folders[idx])
print("End ==>")
<file_sep>/src/components/iris/data.ts
/**
* @license
* Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs'
import { splitDataSet } from '../../utils'
export const IRIS_CLASSES = ['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']
export const IRIS_NUM_CLASSES = IRIS_CLASSES.length
export const IRIS_NUM_FEATURES = 4
// Iris flowers data. Source:
// https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data
export const IRIS_RAW_DATA = [
[5.1, 3.5, 1.4, 0.2, 0], [4.9, 3.0, 1.4, 0.2, 0], [4.7, 3.2, 1.3, 0.2, 0],
[4.6, 3.1, 1.5, 0.2, 0], [5.0, 3.6, 1.4, 0.2, 0], [5.4, 3.9, 1.7, 0.4, 0],
[4.6, 3.4, 1.4, 0.3, 0], [5.0, 3.4, 1.5, 0.2, 0], [4.4, 2.9, 1.4, 0.2, 0],
[4.9, 3.1, 1.5, 0.1, 0], [5.4, 3.7, 1.5, 0.2, 0], [4.8, 3.4, 1.6, 0.2, 0],
[4.8, 3.0, 1.4, 0.1, 0], [4.3, 3.0, 1.1, 0.1, 0], [5.8, 4.0, 1.2, 0.2, 0],
[5.7, 4.4, 1.5, 0.4, 0], [5.4, 3.9, 1.3, 0.4, 0], [5.1, 3.5, 1.4, 0.3, 0],
[5.7, 3.8, 1.7, 0.3, 0], [5.1, 3.8, 1.5, 0.3, 0], [5.4, 3.4, 1.7, 0.2, 0],
[5.1, 3.7, 1.5, 0.4, 0], [4.6, 3.6, 1.0, 0.2, 0], [5.1, 3.3, 1.7, 0.5, 0],
[4.8, 3.4, 1.9, 0.2, 0], [5.0, 3.0, 1.6, 0.2, 0], [5.0, 3.4, 1.6, 0.4, 0],
[5.2, 3.5, 1.5, 0.2, 0], [5.2, 3.4, 1.4, 0.2, 0], [4.7, 3.2, 1.6, 0.2, 0],
[4.8, 3.1, 1.6, 0.2, 0], [5.4, 3.4, 1.5, 0.4, 0], [5.2, 4.1, 1.5, 0.1, 0],
[5.5, 4.2, 1.4, 0.2, 0], [4.9, 3.1, 1.5, 0.1, 0], [5.0, 3.2, 1.2, 0.2, 0],
[5.5, 3.5, 1.3, 0.2, 0], [4.9, 3.1, 1.5, 0.1, 0], [4.4, 3.0, 1.3, 0.2, 0],
[5.1, 3.4, 1.5, 0.2, 0], [5.0, 3.5, 1.3, 0.3, 0], [4.5, 2.3, 1.3, 0.3, 0],
[4.4, 3.2, 1.3, 0.2, 0], [5.0, 3.5, 1.6, 0.6, 0], [5.1, 3.8, 1.9, 0.4, 0],
[4.8, 3.0, 1.4, 0.3, 0], [5.1, 3.8, 1.6, 0.2, 0], [4.6, 3.2, 1.4, 0.2, 0],
[5.3, 3.7, 1.5, 0.2, 0], [5.0, 3.3, 1.4, 0.2, 0], [7.0, 3.2, 4.7, 1.4, 1],
[6.4, 3.2, 4.5, 1.5, 1], [6.9, 3.1, 4.9, 1.5, 1], [5.5, 2.3, 4.0, 1.3, 1],
[6.5, 2.8, 4.6, 1.5, 1], [5.7, 2.8, 4.5, 1.3, 1], [6.3, 3.3, 4.7, 1.6, 1],
[4.9, 2.4, 3.3, 1.0, 1], [6.6, 2.9, 4.6, 1.3, 1], [5.2, 2.7, 3.9, 1.4, 1],
[5.0, 2.0, 3.5, 1.0, 1], [5.9, 3.0, 4.2, 1.5, 1], [6.0, 2.2, 4.0, 1.0, 1],
[6.1, 2.9, 4.7, 1.4, 1], [5.6, 2.9, 3.6, 1.3, 1], [6.7, 3.1, 4.4, 1.4, 1],
[5.6, 3.0, 4.5, 1.5, 1], [5.8, 2.7, 4.1, 1.0, 1], [6.2, 2.2, 4.5, 1.5, 1],
[5.6, 2.5, 3.9, 1.1, 1], [5.9, 3.2, 4.8, 1.8, 1], [6.1, 2.8, 4.0, 1.3, 1],
[6.3, 2.5, 4.9, 1.5, 1], [6.1, 2.8, 4.7, 1.2, 1], [6.4, 2.9, 4.3, 1.3, 1],
[6.6, 3.0, 4.4, 1.4, 1], [6.8, 2.8, 4.8, 1.4, 1], [6.7, 3.0, 5.0, 1.7, 1],
[6.0, 2.9, 4.5, 1.5, 1], [5.7, 2.6, 3.5, 1.0, 1], [5.5, 2.4, 3.8, 1.1, 1],
[5.5, 2.4, 3.7, 1.0, 1], [5.8, 2.7, 3.9, 1.2, 1], [6.0, 2.7, 5.1, 1.6, 1],
[5.4, 3.0, 4.5, 1.5, 1], [6.0, 3.4, 4.5, 1.6, 1], [6.7, 3.1, 4.7, 1.5, 1],
[6.3, 2.3, 4.4, 1.3, 1], [5.6, 3.0, 4.1, 1.3, 1], [5.5, 2.5, 4.0, 1.3, 1],
[5.5, 2.6, 4.4, 1.2, 1], [6.1, 3.0, 4.6, 1.4, 1], [5.8, 2.6, 4.0, 1.2, 1],
[5.0, 2.3, 3.3, 1.0, 1], [5.6, 2.7, 4.2, 1.3, 1], [5.7, 3.0, 4.2, 1.2, 1],
[5.7, 2.9, 4.2, 1.3, 1], [6.2, 2.9, 4.3, 1.3, 1], [5.1, 2.5, 3.0, 1.1, 1],
[5.7, 2.8, 4.1, 1.3, 1], [6.3, 3.3, 6.0, 2.5, 2], [5.8, 2.7, 5.1, 1.9, 2],
[7.1, 3.0, 5.9, 2.1, 2], [6.3, 2.9, 5.6, 1.8, 2], [6.5, 3.0, 5.8, 2.2, 2],
[7.6, 3.0, 6.6, 2.1, 2], [4.9, 2.5, 4.5, 1.7, 2], [7.3, 2.9, 6.3, 1.8, 2],
[6.7, 2.5, 5.8, 1.8, 2], [7.2, 3.6, 6.1, 2.5, 2], [6.5, 3.2, 5.1, 2.0, 2],
[6.4, 2.7, 5.3, 1.9, 2], [6.8, 3.0, 5.5, 2.1, 2], [5.7, 2.5, 5.0, 2.0, 2],
[5.8, 2.8, 5.1, 2.4, 2], [6.4, 3.2, 5.3, 2.3, 2], [6.5, 3.0, 5.5, 1.8, 2],
[7.7, 3.8, 6.7, 2.2, 2], [7.7, 2.6, 6.9, 2.3, 2], [6.0, 2.2, 5.0, 1.5, 2],
[6.9, 3.2, 5.7, 2.3, 2], [5.6, 2.8, 4.9, 2.0, 2], [7.7, 2.8, 6.7, 2.0, 2],
[6.3, 2.7, 4.9, 1.8, 2], [6.7, 3.3, 5.7, 2.1, 2], [7.2, 3.2, 6.0, 1.8, 2],
[6.2, 2.8, 4.8, 1.8, 2], [6.1, 3.0, 4.9, 1.8, 2], [6.4, 2.8, 5.6, 2.1, 2],
[7.2, 3.0, 5.8, 1.6, 2], [7.4, 2.8, 6.1, 1.9, 2], [7.9, 3.8, 6.4, 2.0, 2],
[6.4, 2.8, 5.6, 2.2, 2], [6.3, 2.8, 5.1, 1.5, 2], [6.1, 2.6, 5.6, 1.4, 2],
[7.7, 3.0, 6.1, 2.3, 2], [6.3, 3.4, 5.6, 2.4, 2], [6.4, 3.1, 5.5, 1.8, 2],
[6.0, 3.0, 4.8, 1.8, 2], [6.9, 3.1, 5.4, 2.1, 2], [6.7, 3.1, 5.6, 2.4, 2],
[6.9, 3.1, 5.1, 2.3, 2], [5.8, 2.7, 5.1, 1.9, 2], [6.8, 3.2, 5.9, 2.3, 2],
[6.7, 3.3, 5.7, 2.5, 2], [6.7, 3.0, 5.2, 2.3, 2], [6.3, 2.5, 5.0, 1.9, 2],
[6.5, 3.0, 5.2, 2.0, 2], [6.2, 3.4, 5.4, 2.3, 2], [5.9, 3.0, 5.1, 1.8, 2]
]
/**
* Converts an integer into its one-hot representation and returns
* the data as a JS Array.
*/
export const flatOneHot = (idx: number): number[] => {
// TODO(bileschi): Remove 'Array.from' from here once tf.data supports typed
// arrays https://github.com/tensorflow/tfjs/issues/1041
// TODO(bileschi): Remove '.dataSync()' from here once tf.data supports
// datasets built from tensors.
// https://github.com/tensorflow/tfjs/issues/1046
return Array.from(tf.oneHot([idx], 3).dataSync())
}
export const getIrisData = (testSplit: number, isOntHot = true,
shuffle = true): Array<tf.data.Dataset<tf.TensorContainer>> => {
// Shuffle a copy of the raw data.
const shuffled = IRIS_RAW_DATA.slice()
const [train, test] = splitDataSet(shuffled, testSplit, shuffle)
// Split the data into into X & y and apply feature mapping transformations
const trainX = tf.data.array(train.map(r => r.slice(0, 4)))
const testX = tf.data.array(test.map(r => r.slice(0, 4)))
let trainY: tf.data.Dataset<number[]>
let testY: tf.data.Dataset<number[]>
if (isOntHot) {
// TODO(we should be able to just directly use tensors built from oneHot here
// instead of converting to tensor and back using datasync & Array.from.
// This causes an internal disposal error however.
// See https://github.com/tensorflow/tfjs/issues/1071
// trainY = tf.data.array(train.map(r => tf.oneHot([r[4]], 3)))
// testY = tf.data.array(test.map(r => tf.oneHot([r[4]], 3)))
trainY = tf.data.array(train.map(r => flatOneHot(r[4])))
testY = tf.data.array(test.map(r => flatOneHot(r[4])))
} else {
trainY = tf.data.array(train.map(r => [r[4]]))
testY = tf.data.array(test.map(r => [r[4]]))
}
// Recombine the X and y portions of the data.
const trainDataset = tf.data.zip({ xs: trainX, ys: trainY })
const testDataset = tf.data.zip({ xs: testX, ys: testY })
return [trainDataset, testDataset]
}
<file_sep>/.eslintrc.js
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
plugins: [
'@typescript-eslint', "react", 'react-hooks', 'eslint-comments'
],
extends: [
"react-app",
'eslint:recommended',
'plugin:@typescript-eslint/eslint-recommended',
'plugin:@typescript-eslint/recommended',
"plugin:react/recommended",
'standard-with-typescript',
],
parserOptions: {
project: "./tsconfig.json",
sourceType: 'module', // Allows for the use of imports
},
rules: {
"react-hooks/rules-of-hooks": "error",
"react-hooks/exhaustive-deps": "warn",
"@typescript-eslint/interface-name-prefix": ["error", {"prefixWithI": "always"}],
"@typescript-eslint/indent": ["error", 4, { 'SwitchCase': 1 }],
"jsx-quotes": ["error", "prefer-single"],
'@typescript-eslint/no-unused-vars': ['error', {
'vars': 'all',
'args': 'none',
'ignoreRestSiblings': true,
}],
"@typescript-eslint/strict-boolean-expressions": 0,
},
settings: {
react: {
version: 'detect', // Tells eslint-plugin-react to automatically detect the version of React to use
},
}
};
<file_sep>/public/docs/blog/20200312.md
## 20200312 缘起
> 20200312: 应该是从 20200215 正式确定写这个 Repo。眨眼就一个月了,每天吃吃睡睡、宅家锻炼、处理些公司里的事、做做实验,与龟速的机器和越来越高的墙作斗争。接下来应该还能有半个月的空闲时间,先把第一季各部分的教程写完,并重新优化代码。
多灾多难的 2020 年,必定会在历史上记一笔。春节以来,COVID-19 疫情日益严重,全国人民都在家隔离。憋在家里的这段日子,每天被各种信息坏消息和好消息蹂躏来,蹂躏去。
时间久了,也渐渐应激疲劳,静下来,找点事,排遣一下时间。
最近几年,我一直在关注如何让用户拥有通过私有数据训练,实现 “私有AI” 能力的方法。不过没有找到特别理想的方案,直到看到 Tensorflow.js 的最近更新。
我们先来看看当前经典的 AI 使能架构:
[前端AI应用+模型Serving+后端计算的示意图]
你的所有数据都会被传送到后端服务器,再使用服务端资源进行训练。在大数据和 AI 面前,你完全没有隐私可言,隐私换智(fang)能(bian),就是当前的现状。
另外,对于小型的 AI 创新服务企业而言,这种模式需要更多的资源才能实施,一是数据传输量大,二是服务端计算资源要求也不老少。
相对理想的架构是将一些的 AI 计算放在客户端,也就是大家常说的“边缘计算”。在这种模式下,客户端在本地上具有足够的“智能”,除了能够进行必要的事件处理,过滤“无用(也许吧)”的数据,更妙的是在客户端可以进行“小数据”个性化训练,实现“私有 AI”。在这种架构下,服务端负责提供在“大数据”上预先训练好的基础模型,客户端进行“小数据”上的私人订制,既可以保持隐私,也能够享受到更智能私人服务。
### Tensorflow.js
Tensorflow.js 在 2月4日 除夕那天发布了 v1.5.2,能够在浏览器端做训练了!!!能直接在浏览器端做训练,即使只是“简单”的训练,是个很有意义的大进步。可以在用户(浏览器)端进行个性化的训练,进行隐私信息的脱敏,再和服务端更强大的AI能力相配合,相信不久之后,我们能够看到更多既能保持个人数据隐私,也足够智能灵活的新型AI应用。Google也提供了“联邦计算”的例子,也许这是“私人AI”时代的曙光。
[后端预训练+前端个性化训练+联邦计算的示意图]
除此之外,模型加载、存储和转换相关功能基本完备,能够很顺畅地与 Tensorflow v2.0 以上版本训练所产生的模型进行对接。
### React Hooks
因为工作性质的关系,我自己的主要时间都放在了团队管理、技术预研和架构设计方面,虽然也动手做一部分代码,但是对于不少新的技术特性细节,都得靠团队小伙伴们的鼎力支持了。
我的团队主要使用基于 JavaScript/TypeScript 的 React + Redux + Express 进行全栈开发,除了人工智能后端用 Python。
使用 React 已经是相当舒适的编程体验了,不过如果涉及到复杂的组件间信息交互,以及多层父子之间的数据共享,就必须使用高阶组件+Context、或者 Redux Connect 等形式传递数据,这样的代码风格,并不简洁和优美。
React Hooks 是 React 在 16.8 之后支持的新的特性,发布至今已经一年了吧。因为当前工作中的项目里一直使用的是 Component 的对象形式,一直没有太仔细的学习和使用。React Hooks 更容易将组件的 UI 与状态分离,不用再写很看起来不爽的 setState,不用担心在 componentDidMount、componentWillUnmount 忘了做这做那。使用 React Hooks 开发,可以用很直观的数据变化驱动界面变化的思路来组织代码,实际使用,还真香。
### TypeScript
工作中,只要求团队在后端开发使用 TypeScript。
作为一个资深老 Java,最近几年,使用 JavaScript 和 Python 居多,确实有一种放飞自我的感觉。源自于 Java 开发养成的代码习惯,加上 JS 和 Python 的灵活方便,大大提高了我将创意和想法快速开发实现的效率。不过,因为经常在多种语言之间切来切去,不少语法细节到用时也会经常感到费解和奇怪。
趁这个时间,把 TypeScript 的语法细节也捋了一下,实践中,也遇到了不少以前忽略的 TypeScript 使用细节。
<file_sep>/src/routers.ts
import { RouteConfig } from 'react-router-config'
import Home from './components/common/Home'
import Curve from './components/curve/Curve'
import Iris from './components/iris/Iris'
import MnistLayersApiImpl from './components/mnist/MnistLayersApiImpl'
import MnistCoreApiImpl from './components/mnist/MnistCoreApiImpl'
import MobilenetClassifier from './components/mobilenet/MobilenetClassifier'
import MobilenetKnnClassifier from './components/mobilenet/MobilenetKnnClassifier'
import MobilenetTransfer from './components/mobilenet/MobilenetTransfer'
import MobilenetObjDetector from './components/mobilenet/MobilenetObjDetector'
import JenaWeather from './components/rnn/JenaWeather'
import SentimentImdb from './components/rnn/SentimentImdb'
import TextGenLstm from './components/rnn/TextGenLstm'
import HandPosePanel from './components/pretrained/HandPosePanel'
import FaceMeshPanel from './components/pretrained/FaceMeshPanel'
import PoseNetPanel from './components/pretrained/PoseNetPanel'
import FetchWidget from './components/sandbox/FetchWidget'
import TypedArrayWidget from './components/sandbox/TypedArrayWidget'
import TfvisWidget from './components/sandbox/TfvisWidget'
const routes: RouteConfig[] = [
{ path: '/', exact: true, component: Home },
{ path: '/curve', component: Curve },
{ path: '/iris', component: Iris },
{ path: '/mnist/layers', component: MnistLayersApiImpl },
{ path: '/mnist/core', component: MnistCoreApiImpl },
{ path: '/mobilenet/basic', component: MobilenetClassifier },
{ path: '/mobilenet/knn', component: MobilenetKnnClassifier },
{ path: '/mobilenet/transfer', component: MobilenetTransfer },
{ path: '/mobilenet/objdetector', component: MobilenetObjDetector },
{ path: '/rnn/jena', component: JenaWeather },
{ path: '/rnn/sentiment', component: SentimentImdb },
{ path: '/rnn/lstm', component: TextGenLstm },
{ path: '/pretrained/handpose', component: HandPosePanel },
{ path: '/pretrained/facemesh', component: FaceMeshPanel },
{ path: '/pretrained/posenet', component: PoseNetPanel },
{ path: '/sandbox/fetch', component: FetchWidget },
{ path: '/sandbox/array', component: TypedArrayWidget },
{ path: '/sandbox/tfvis', component: TfvisWidget },
{ path: '*', component: Home }
]
export default routes
<file_sep>/src/components/mobilenet/modelTransfer.ts
import * as tf from '@tensorflow/tfjs'
import { MOBILENET_MODEL_PATH } from './mobilenetUtils'
import { logger } from '../../utils'
export const createTruncatedMobileNet = async (): Promise<tf.LayersModel> => {
const mobilenet = await tf.loadLayersModel(MOBILENET_MODEL_PATH)
// Return a model that outputs an internal activation.
const layer = mobilenet.getLayer('conv_pw_13_relu')
const truncatedMobileNet = tf.model({ inputs: mobilenet.inputs, outputs: layer.output })
truncatedMobileNet.trainable = false
return truncatedMobileNet
}
export const createModel = (truncatedMobileNet: tf.LayersModel, outputClasses: number, denseUnits = 10): tf.LayersModel => {
const inputShape = truncatedMobileNet.outputs[0].shape.slice(1)
logger('inputShape', inputShape)
const modelWillTrained = tf.sequential({
layers: [
// Flattens the input to a vector so we can use it in a dense layer. While
// technically a layer, this only performs a reshape (and has no training
// parameters).
tf.layers.flatten({ inputShape }),
// Layer 1.
tf.layers.dense({
units: denseUnits, activation: 'relu', kernelInitializer: 'varianceScaling', useBias: true
}),
// Layer 2. The number of units of the last layer should correspond
// to the number of classes we want to predict.
tf.layers.dense({
units: outputClasses, kernelInitializer: 'varianceScaling', useBias: false, activation: 'softmax'
})
]
})
return modelWillTrained
}
<file_sep>/public/docs/ai/jena.md
# 处理连续数据的模型:循环神经网络
## Jena 天气预报
### 数据
**注意**
* 如果您要在本地环境运行这个例子,最好预先下载数据文件。并将数据文件放在此项目的 `./public/data` 目录下。
[https://storage.googleapis.com/learnjs-data/jena_climate/jena_climate_2009_2016.csv](https://storage.googleapis.com/learnjs-data/jena_climate/jena_climate_2009_2016.csv)
* 所需的数据大约有 41.2MB。
* 刷新页面,会丢失已经加载的数据。
这个例子里最重要的部分是构建训练数据集的部分,参考如下相关代码。
getNextBatchFunction = (shuffle: boolean, lookBack: number, delay: number, batchSize: number, step: number, minIndex: number, maxIndex: number, normalize: boolean,
includeDateTime: boolean): any => {
let startIndex = minIndex + lookBack
const lookBackSlices = Math.floor(lookBack / step)
return {
next: () => {
const rowIndices = []
let done = false // Indicates whether the dataset has ended.
if (shuffle) {
// If `shuffle` is `true`, start from randomly chosen rows.
const range = maxIndex - (minIndex + lookBack)
for (let i = 0; i < batchSize; ++i) {
const row = minIndex + lookBack + Math.floor(Math.random() * range)
rowIndices.push(row)
}
} else {
// If `shuffle` is `false`, the starting row indices will be sequential.
let r = startIndex
for (; r < startIndex + batchSize && r < maxIndex; ++r) {
rowIndices.push(r)
}
if (r >= maxIndex) {
done = true
}
}
const numExamples = rowIndices.length
startIndex += numExamples
const featureLength =
includeDateTime ? this.numColumns + 2 : this.numColumns
const samples = tf.buffer([numExamples, lookBackSlices, featureLength])
const targets = tf.buffer([numExamples, 1])
// Iterate over examples. Each example contains a number of rows.
for (let j = 0; j < numExamples; ++j) {
const rowIndex = rowIndices[j]
let exampleRow = 0
// Iterate over rows in the example.
for (let r = rowIndex - lookBack; r < rowIndex; r += step) {
let exampleCol = 0
// Iterate over features in the row.
for (let n = 0; n < featureLength; ++n) {
let value
if (n < this.numColumns) {
value = normalize ? this.normalizedData[r][n] : this.data[r][n]
} else if (n === this.numColumns) {
// Normalized day-of-the-year feature.
value = this.normalizedDayOfYear[r]
} else {
// Normalized time-of-the-day feature.
value = this.normalizedTimeOfDay[r]
}
samples.set(value, j, exampleRow, exampleCol++)
}
const value = normalize
? this.normalizedData[r + delay][this.tempCol]
: this.data[r + delay][this.tempCol]
targets.set(value, j, 0)
exampleRow++
}
}
return {
value: { xs: samples.toTensor(), ys: targets.toTensor() },
done
}
}
}
}
### 模型
提供以下神经网络模型,供比较。
* linear-regression 单层线性回归模型
* mlp 多层感知机
* mlp-l2 多层感知机,使用 L2 正则化
* mlp-dropout 多层感知机,使用 Dropout 处理过拟合
* simpleRnn 简单的 RNN 模型
* gru GRU 模型
https://zhuanlan.zhihu.com/p/32481747
模型的输入 Shappe 和所加载的特征数据的列数有关。(=14)
#### RNN
循环神经网络(Recurrent Neural Network,RNN)是一种用于处理序列数据的神经网络。
相比一般的神经网络来说,它能够处理序列变化的数据。比如某个单词的意思会因为上文提到的内容不同而有不同的含义,RNN就能够很好地解决这类问题。
https://zhuanlan.zhihu.com/p/32085405
RNN 的伪代码
y=0
for x in input_sequence:
y = f(dot(W, x) + dot(U, y))
#### GRU
GRU(Gate Recurrent Unit)是循环神经网络(Recurrent Neural Network, RNN)的一种。和LSTM(Long-Short Term Memory)一样,也是为了解决长期记忆和反向传播中的梯度等问题而提出来的。
GRU和LSTM在很多情况下实际表现上相差无几,那么为什么我们要使用新人GRU(2014年提出)而不是相对经受了更多考验的LSTM(1997提出)呢。
"我们在我们的实验中选择GRU是因为它的实验效果与LSTM相似,但是更易于计算。"
简单来说就是贫穷限制了我们的计算能力...
相比LSTM,使用GRU能够达到相当的效果,并且相比之下更容易进行训练,能够很大程度上提高训练效率,因此很多时候会更倾向于使用GRU。
GRU 的伪代码
h=0
for x_i in input_sequence:
z = sigmoid(dot(W_z, x) + dot(U_z, h))
r = sigmoid(dot(W_r, x) + dot(W_r, h))
h_prime = tanh(dot(W, x) + dot(r, dot(U, h)))
h = dot(1 - z, h) + dot(z, h_prime)
#### LSTM
https://zhuanlan.zhihu.com/p/74034891
### 使用 RNN 进行语义分析
对文章进行 multi-hot 分析,对 IMDB 数据进行分析。
更高效的技术:词嵌入
我们已经了解了如何使用和构建自己的 CNN 网络。
接下来,我们来认识另外一个重要的深度神经网络模型:循环神经网络 RNN
RNN 被用于处理文本理解、语音识别等场景。这类场景的共同特点是:
**问题结果与过去的几个输入数据都相关**
简单说来就是 y = f( Wx + Uy)
RNN
LSTM
中文分词
作诗、作文、作曲Magenta
生成模型:图像风格迁移,
seq-to-seq 的其他应用,DNA 碱基对,代码生成。。。
BERT
使用 RNN 处理视频、音频。。。
## 第九章 声音的处理:对信号的AI处理
傅立叶变换将信号转成频谱图 -> 图像处理
声音的输入
基于 LSTM 的 语音识别
## GAN 对抗生成网络
<file_sep>/public/docs/ai/mnist-core-api.md
# 手写数字识别 MNIST
## 知识点
这个例子涉及以下的 AI 知识点:
* 卷积神经网络
* MaxPooling
* Dropout
* Tensorflow Layers API
* Tensorflow Core API
## 问题
## 数据
MNIST 数据:
* gz 压缩格式的数据,包括 60000 个样本的训练集和测试集数据及其标注:下载地址为 [https://storage.googleapis.com/cvdf-datasets/mnist/](https://storage.googleapis.com/cvdf-datasets/mnist/)
* png 数据,为 Web 版本预处理的图片数据,包括 55000 个数据
## 模型
* 用 LayerModel 构建的多层感知机 MLP 模型
* CNN + 池化 模型
* CNN + Dropout 模型
* 使用 Tensorflow 核心 API 构建的计算网络
## 训练
观察数据,以及随着训练模型参数的变化,观察测试集的推理结果正确情况。
## 推理
在画板上,手写输入数字,观察在其推理输出结果。
## 使用 tfjs-node 加速训练
ts-node --project tsconfig.node.json ./node/mnist/main.ts
<file_sep>/docker_build.sh
docker build -t iasc/react-tfjs-capm .
<file_sep>/node/src/tfjsEnv.ts
import * as tf from '@tensorflow/tfjs-node'
console.log('Backend : ', tf.getBackend())
console.log('Env.features : ', tf.ENV.features)
<file_sep>/node/src/simpleObjDetector/dataObjDetector.ts
/**
* @license
* Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
/**
* Module for synthesizing images to be used for training and testing the
* simple object-detection model.
*
* This module is written in a way that can be used in both the Node.js-based
* training pipeline (train.ts) and the browser-based testing environment
* (index.js).
*/
import * as tf from '@tensorflow/tfjs'
import {Canvas} from 'canvas'
// let tf // tensorflowjs module passed in for browser/node compatibility.
/**
* Generate a random color style for canvas strokes and fills.
*
* @returns {string} Style string in the form of 'rgb(100,200,250)'.
*/
const generateRandomColorStyle = (): string => {
const colorR = Math.round(Math.random() * 255)
const colorG = Math.round(Math.random() * 255)
const colorB = Math.round(Math.random() * 255)
return `rgb(${colorR},${colorG},${colorB})`
}
/**
* Synthesizes images for simple object recognition.
*
* The synthesized imags consist of
* - a white background
* - a configurable number of circles of random radii and random color
* - a configurable number of line segments of random starting and ending
* points and random color
* - Target object: a rectangle or a triangle, with configurable probabilities.
* - If a rectangle, the side lengths are random and so is the color
* - If a triangle, it is always equilateral. The side length and the color
* is random and the triangle is rotated by a random angle.
*/
class ObjectDetectionImageSynthesizer {
// Min and max of circles' radii.
CIRCLE_RADIUS_MIN = 5
CIRCLE_RADIUS_MAX = 20
// Min and max of rectangle side lengths.
RECTANGLE_SIDE_MIN = 40
RECTANGLE_SIDE_MAX = 100
// Min and max of triangle side lengths.
TRIANGLE_SIDE_MIN = 50
TRIANGLE_SIDE_MAX = 100
// Canvas dimensions.
canvas: Canvas
w: number
h: number
/**
* Constructor of ObjectDetectionImageSynthesizer.
*
* @param {} canvas An HTML canvas object or node-canvas object.
* @param {*} tensorFlow A tensorflow module passed in. This done for
* compatibility between browser and Node.js.
*/
constructor (canvas: Canvas) {
this.canvas = canvas
// Canvas dimensions.
this.w = this.canvas.width
this.h = this.canvas.height
}
/**
* Generate a single image example.
*
* @param {number} numCircles Number of circles (background object type)
* to include.
* @param {number} numLines Number of line segments (backgrond object
* type) to include
* @param {number} triangleProbability The probability of the target
* object being a triangle (instead of a rectangle). Must be a number
* >= 0 and <= 1. Default: 0.5.
* @returns {Object} An object with the following fields:
* - image: A [w, h, 3]-shaped tensor for the pixel content of the image.
* w and h are the width and height of the canvas, respectively.
* - target: A [5]-shaped tensor. The first element is a 0-1 indicator
* for whether the target is a triangle (0) or a rectangle (1).
* The remaning four elements are the bounding box of the shape:
* [left, right, top, bottom], in the unit of pixels.
*/
generateExample = async (numCircles: number, numLines: number, triangleProbability: number): Promise<tf.TensorContainerObject | void> => {
const _triangleProbability = triangleProbability ?? 0.5
// tf.util.assert(
// triangleProbability >= 0 && triangleProbability <= 1,
// 'triangleProbability must be a number >= 0 and <= 1, but got ' +
// `${triangleProbability}`)
const ctx = this.canvas.getContext('2d')
if (!ctx) {
return
}
ctx.clearRect(0, 0, this.w, this.h) // Clear canvas.
// Draw circles (1st half).
for (let i = 0; i < numCircles / 2; ++i) {
this.drawCircle(ctx)
}
// Draw lines segments (1st half).
for (let i = 0; i < numLines / 2; ++i) {
this.drawLineSegment(ctx)
}
// Draw the target object: a rectangle or an equilateral triangle.
// Determine whether the target is a rectangle or a triangle.
const isRectangle = Math.random() > _triangleProbability
let boundingBox: number[]
ctx.fillStyle = generateRandomColorStyle()
ctx.beginPath()
if (isRectangle) {
// Draw a rectangle.
// Both side lengths of the rectangle are random and independent of
// each other.
const rectangleW =
Math.random() * (this.RECTANGLE_SIDE_MAX - this.RECTANGLE_SIDE_MIN) +
this.RECTANGLE_SIDE_MIN
const rectangleH =
Math.random() * (this.RECTANGLE_SIDE_MAX - this.RECTANGLE_SIDE_MIN) +
this.RECTANGLE_SIDE_MIN
const centerX = (this.w - rectangleW) * Math.random() + (rectangleW / 2)
const centerY = (this.h - rectangleH) * Math.random() + (rectangleH / 2)
boundingBox = this.drawRectangle(ctx, centerX, centerY, rectangleH, rectangleW)
} else {
// Draw an equilateral triangle, rotated by a random angle.
// The distance from the center of the triangle to any of the three
// vertices.
const side = this.TRIANGLE_SIDE_MIN +
(this.TRIANGLE_SIDE_MAX - this.TRIANGLE_SIDE_MIN) * Math.random()
const centerX = (this.w - side) * Math.random() + (side / 2)
const centerY = (this.h - side) * Math.random() + (side / 2)
// Rotate the equilateral triangle by a random angle uniformly
// distributed between 0 and degrees.
const angle = Math.PI / 3 * 2 * Math.random() // 0 - 120 degrees.
boundingBox = this.drawTriangle(ctx, centerX, centerY, side, angle)
}
ctx.fill()
// Draw circles (2nd half).
for (let i = numCircles / 2; i < numCircles; ++i) {
this.drawCircle(ctx)
}
// Draw lines segments (2nd half).
for (let i = numLines / 2; i < numLines; ++i) {
this.drawLineSegment(ctx)
}
return tf.tidy(() => {
// @ts-ignore
const imageTensor = tf.browser.fromPixels(this.canvas)
const shapeClassIndicator = isRectangle ? 1 : 0
const targetTensor = tf.tensor1d([shapeClassIndicator].concat(boundingBox))
return { image: imageTensor, target: targetTensor }
})
}
drawCircle = (ctx: CanvasRenderingContext2D, centerX?: number, centerY?: number, radius?: number): void => {
centerX = centerX == null ? this.w * Math.random() : centerX
centerY = centerY == null ? this.h * Math.random() : centerY
radius = radius == null ? this.CIRCLE_RADIUS_MIN +
(this.CIRCLE_RADIUS_MAX - this.CIRCLE_RADIUS_MIN) * Math.random()
: radius
ctx.fillStyle = generateRandomColorStyle()
ctx.beginPath()
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2)
ctx.fill()
}
drawLineSegment = (ctx: CanvasRenderingContext2D, x0?: number, y0?: number, x1?: number, y1?: number): void => {
x0 = x0 == null ? Math.random() * this.w : x0
y0 = y0 == null ? Math.random() * this.h : y0
x1 = x1 == null ? Math.random() * this.w : x1
y1 = y1 == null ? Math.random() * this.h : y1
ctx.strokeStyle = generateRandomColorStyle()
ctx.beginPath()
ctx.moveTo(x0, y0)
ctx.lineTo(x1, y1)
ctx.stroke()
}
/**
* Draw a rectangle.
*
* A rectangle is a target object in the simple object detection task here.
* Therefore, its bounding box is returned.
*
* @param {} ctx Canvas context.
* @param {number} centerX Center x-coordinate of the triangle.
* @param {number} centerY Center y-coordinate of the triangle.
* @param {number} w Width of the rectangle.
* @param {number} h Height of the rectangle.
* @param {number} angle Angle that the triangle is rotated for, in radians.
* @returns {[number, number, number, number]} Bounding box of the rectangle:
* [left, right, top bottom].
*/
drawRectangle = (ctx: CanvasRenderingContext2D, centerX: number, centerY: number, w: number, h: number): number[] => {
ctx.moveTo(centerX - w / 2, centerY - h / 2)
ctx.lineTo(centerX + w / 2, centerY - h / 2)
ctx.lineTo(centerX + w / 2, centerY + h / 2)
ctx.lineTo(centerX - w / 2, centerY + h / 2)
return [centerX - w / 2, centerX + w / 2, centerY - h / 2, centerY + h / 2]
}
/**
* Draw an equilateral triangle.
*
* A triangle are a target object in the simple object detection task here.
* Therefore, its bounding box is returned.
*
* @param {} ctx Canvas context.
* @param {number} centerX Center x-coordinate of the triangle.
* @param {number} centerY Center y-coordinate of the triangle.
* @param {number} side Length of the side.
* @param {number} angle Angle that the triangle is rotated for, in radians.
* @returns {[number, number, number, number]} Bounding the triangle, with
* the rotation taken into account: [left, right, top bottom].
*/
drawTriangle = (ctx: CanvasRenderingContext2D, centerX: number, centerY: number, side: number, angle: number): number[] => {
const ctrToVertex = side / 2 / Math.cos(30 / 180 * Math.PI)
ctx.fillStyle = generateRandomColorStyle()
ctx.beginPath()
const alpha1 = angle + Math.PI / 2
const x1 = centerX + Math.cos(alpha1) * ctrToVertex
const y1 = centerY + Math.sin(alpha1) * ctrToVertex
const alpha2 = alpha1 + Math.PI / 3 * 2
const x2 = centerX + Math.cos(alpha2) * ctrToVertex
const y2 = centerY + Math.sin(alpha2) * ctrToVertex
const alpha3 = alpha2 + Math.PI / 3 * 2
const x3 = centerX + Math.cos(alpha3) * ctrToVertex
const y3 = centerY + Math.sin(alpha3) * ctrToVertex
ctx.moveTo(x1, y1)
ctx.lineTo(x2, y2)
ctx.lineTo(x3, y3)
const xs = [x1, x2, x3]
const ys = [y1, y2, y3]
return [Math.min(...xs), Math.max(...xs), Math.min(...ys), Math.max(...ys)]
}
/**
* Generate a number (i.e., batch) of examples.
*
* @param {number} batchSize Number of example image in the batch.
* @param {number} numCircles Number of circles (background object type)
* to include.
* @param {number} numLines Number of line segments (background object type)
* to include.
* @returns {Object} An object with the following fields:
* - image: A [batchSize, w, h, 3]-shaped tensor for the pixel content of
* the image. w and h are the width and height of the canvas,
* respectively.
* - target: A [batchSize, 5]-shaped tensor. The first column is a 0-1
* indicator for whether the target is a triangle(0) or a rectangle (1).
* The remaning four columns are the bounding box of the shape:
* [left, right, top, bottom], in the unit of pixels.
*/
generateExampleBatch = async (batchSize: number, numCircles: number, numLines: number, triangleProbability?: number): Promise<tf.TensorContainerObject | void> => {
if (triangleProbability == null) {
triangleProbability = 0.5
}
const imageTensors: tf.Tensor[] = []
const targetTensors: tf.Tensor[] = []
for (let i = 0; i < batchSize; ++i) {
const { image, target } = await this.generateExample(numCircles, numLines, triangleProbability) as tf.TensorContainerObject
imageTensors.push(image as tf.Tensor)
targetTensors.push(target as tf.Tensor)
}
const images = tf.stack(imageTensors)
const targets = tf.stack(targetTensors)
tf.dispose([imageTensors, targetTensors])
return { images, targets }
}
dispose = (): void => {
// TODO
}
}
export default ObjectDetectionImageSynthesizer
<file_sep>/src/components/mobilenet/modelObjDetector.ts
import * as tf from '@tensorflow/tfjs'
import { MOBILENET_IMAGE_SIZE, MOBILENET_MODEL_PATH } from './mobilenetUtils'
const LABEL_MULTIPLIER = [MOBILENET_IMAGE_SIZE, 1, 1, 1, 1]
// Name prefixes of layers that will be unfrozen during fine-tuning.
const topLayerGroupNames = ['conv_pw_9', 'conv_pw_10', 'conv_pw_11']
// Name of the layer that will become the top layer of the truncated base.
const topLayerName = `${topLayerGroupNames[topLayerGroupNames.length - 1]}_relu`
interface IModelWithFineTuning {
model: tf.LayersModel
fineTuningLayers: tf.layers.Layer[]
}
const loadTruncatedBase = async (): Promise<IModelWithFineTuning> => {
const mobilenet = await tf.loadLayersModel(MOBILENET_MODEL_PATH)
// Return a model that outputs an internal activation.
const fineTuningLayers: tf.layers.Layer[] = []
const layer = mobilenet.getLayer(topLayerName)
const truncatedBase = tf.model({ inputs: mobilenet.inputs, outputs: layer.output })
// Freeze the model's layers.
for (const layer of truncatedBase.layers) {
layer.trainable = false
for (const groupName of topLayerGroupNames) {
if (layer.name.indexOf(groupName) === 0) {
fineTuningLayers.push(layer)
break
}
}
}
return { model: truncatedBase, fineTuningLayers }
}
const buildNewHead = (inputShape: tf.Shape): tf.LayersModel => {
const newHead = tf.sequential()
newHead.add(tf.layers.flatten({ inputShape }))
newHead.add(tf.layers.dense({ units: 200, activation: 'relu' }))
// Five output units:
// - The first is a shape indictor: predicts whether the target
// shape is a triangle or a rectangle.
// - The remaining four units are for bounding-box prediction:
// [left, right, top, bottom] in the unit of pixels.
newHead.add(tf.layers.dense({ units: 5 }))
return newHead
}
export const buildObjectDetectionModel = async (): Promise<IModelWithFineTuning> => {
const { model: truncatedBase, fineTuningLayers } = await loadTruncatedBase()
// Build the new head model.
const newHead = buildNewHead(truncatedBase.outputs[0].shape.slice(1))
const newOutput = newHead.apply(truncatedBase.outputs[0])
const model = tf.model({ inputs: truncatedBase.inputs, outputs: newOutput as tf.SymbolicTensor })
return { model, fineTuningLayers }
}
export const customLossFunction = (yTrue: tf.Tensor, yPred: tf.Tensor): tf.Tensor => {
return tf.tidy(() => {
// Scale the the first column (0-1 shape indicator) of `yTrue` in order
// to ensure balanced contributions to the final loss value
// from shape and bounding-box predictions.
return tf.metrics.meanSquaredError(yTrue.mul(LABEL_MULTIPLIER), yPred)
})
}
<file_sep>/src/components/mnist/mnistConsts.ts
/**
* @license
* Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import { TensorContainerObject } from '@tensorflow/tfjs'
export const IMAGE_H = 28
export const IMAGE_W = 28
export const IMAGE_SIZE = IMAGE_H * IMAGE_W
export const NUM_CLASSES = 10
export interface IMnistDataset {
loadData: () => Promise<void>
getTrainData: (numExamples?: number) => TensorContainerObject
getTestData: (numExamples?: number) => TensorContainerObject
nextTrainBatch: (batchSize: number) => TensorContainerObject
nextTestBatch: (batchSize: number) => TensorContainerObject
}
<file_sep>/src/utils.ts
import * as tf from '@tensorflow/tfjs'
import * as zlib from 'zlib'
import { UploadFile } from 'antd/es/upload/interface'
export const logger = console.log
export const loggerError = (e: any): void => {
logger(e)
// eslint-disable-next-line @typescript-eslint/no-floating-promises
// message.error(e)
}
export type IDataSet = tf.data.Dataset<tf.TensorContainer>
export type IArray = any[]
export enum STATUS {
INIT = 'Init',
READY = 'Ready',
WAITING = 'Waiting',
LOADED = 'Loaded',
TRAINED = 'Trained',
PREDICTED = 'Predicted',
CALCULATED = 'Calculated',
STOPPED = 'Stopped',
}
export interface ITrainInfo {
iteration?: number
logs: tf.Logs
}
export interface ILabeledImage {
uid: string
name: string
tensor?: tf.Tensor3D | undefined
img?: string | undefined // base64 of image
}
export interface ILabeledImageSet {
label: string
imageList?: ILabeledImage[]
}
export interface ILabeledImageFileJson {
keys?: number[]
labeledImageSetList: ILabeledImageSet[]
}
export interface IKnnPredictResult {
label: string
classIndex: number
confidences: {
[label: string]: number
}
}
export interface ILayerSelectOption {
name: string
index: number
}
export interface ILabelMap {
[index: number]: string
}
export const range = (from: number, to = 0): number[] => {
// return [...Array(Math.abs(to - from)).keys()].map(v => v + from)
return Array.from(tf.range(from, to).dataSync())
}
export const splitDataSet = (shuffled: IArray, testSplit: number, shuffle = false): IArray[] => {
if (shuffle) {
tf.util.shuffle(shuffled)
}
const totalRecord = shuffled.length
// Split the data into training and testing portions.
const numTestExamples = Math.round(totalRecord * testSplit)
const numTrainExamples = totalRecord - numTestExamples
const train = shuffled.slice(0, numTrainExamples)
const test = shuffled.slice(numTrainExamples)
return [train, test]
}
export const arrayDispose = (_array: any[]): void => {
_array?.splice(0, _array.length)
}
export const fetchResource = async (url: string, isUnzip?: boolean): Promise<Buffer> => {
const response = await fetch(url)
const buf = await response.arrayBuffer()
if (isUnzip) {
logger('unzip...', url)
return zlib.unzipSync(Buffer.from(buf))
} else {
return Buffer.from(buf)
}
}
// const base64ToArray = (base64: string): [Uint8Array, string] => {
// const arr = base64.split(',')
// const mime = arr[0].match(/:(.*?);/)[1]
// const bstr = atob(arr[1])
// let n = bstr.length
// const u8arr = new Uint8Array(n)
//
// while (n--) {
// u8arr[n] = bstr.charCodeAt(n)
// }
// return [u8arr, mime]
// }
//
// export const base64ToFile = (base64: string, filename: string): File => {
// const [u8arr, mime] = base64ToArray(base64)
// return new File([u8arr], filename, { type: mime })
// }
//
// export const base64ToBlob = (base64: string): Blob => {
// const [u8arr, mime] = base64ToArray(base64)
// return new Blob([u8arr], { type: mime })
// }
export const getUploadFileBase64 = async (blob: File | Blob | undefined): Promise<string> => {
if (!blob) {
throw (new Error('File Blob is undefined'))
}
return new Promise((resolve, reject) => {
const reader = new FileReader()
// logger('blob', JSON.stringify(blob))
reader.onload = () => {
const text = reader.result?.toString()
// logger('getUploadFileBase64', text)
resolve(text)
}
reader.onerror = error => reject(error)
reader.readAsDataURL(blob)
})
}
export const getUploadFileArray = async (blob: File | Blob | undefined): Promise<Buffer> => {
if (!blob) {
throw (new Error('File Blob is undefined'))
}
return new Promise((resolve, reject) => {
const reader = new FileReader()
reader.onload = () => {
const buffer = reader.result as ArrayBuffer
// logger('getUploadFileArray', text)
resolve(Buffer.from(buffer))
}
reader.onerror = error => reject(error)
reader.readAsArrayBuffer(blob)
})
}
export const getImageDataFromBase64 = async (imgBase64: string): Promise<ImageData> => {
const img = new Image()
const canvas = document.createElement('canvas')
const ctx = canvas.getContext('2d')
return new Promise((resolve) => {
img.crossOrigin = ''
img.onload = () => {
img.width = img.naturalWidth
img.height = img.naturalHeight
ctx?.drawImage(img, 0, 0, img.width, img.height)
const imageData = ctx?.getImageData(0, 0, canvas.width, canvas.height)
// logger('imageData', imageData)
resolve(imageData)
}
img.src = imgBase64
})
}
export const checkUploadDone = (fileList: UploadFile[]): number => {
let unload: number = fileList.length
fileList.forEach(item => {
// logger(item.status)
if (item.status === 'done') {
unload--
}
})
logger('waiting checkUploadDone : ', fileList.length, unload)
return unload
}
export const formatTensorToStringArray = (tensor: tf.Tensor, floatFixed = 0): string[] => {
if (!tensor) {
return []
}
const _array = Array.from(tensor.dataSync())
return _array.map(v => v.toFixed(floatFixed))
}
export const getTensorLabel = (tensorArray: tf.Tensor[]): string[] => {
if (!tensorArray) {
return []
}
return tf.tidy(() => {
const labels = tensorArray.map((t) => {
if (t.dataSync().length > 1) {
return t.argMax(-1).dataSync().toString()
} else {
return t.dataSync().toString()
}
})
return labels
})
}
<file_sep>/src/constant.ts
export const layout = {
labelCol: { span: 8 },
wrapperCol: { span: 16 }
}
export const tailLayout = {
wrapperCol: { offset: 8, span: 16 }
}
export const normalLayout = {
wrapperCol: { span: 24 }
}
export const formItemLayout = {
wrapperCol: {
xs: { span: 24 },
sm: { span: 24 }
}
}
<file_sep>/node/README.md
# 执行 tf-node 程序
##
## 执行
cd node
yarn
ts-node ./src/**/*.ts
说明:`ts-node` 会找里当前运行目录最近的 `tsconfig.json` 文件。因此,如果此项目的根目录下运行这些程序,会因为使用的 tsconfig 不对而报错。
<file_sep>/src/components/mnist/modelLayersApi.ts
import * as tf from '@tensorflow/tfjs'
import { IMAGE_H, IMAGE_W } from './mnistConsts'
export const addCovDropoutLayers = (model: tf.Sequential): void => {
model.add(tf.layers.conv2d({
inputShape: [IMAGE_H, IMAGE_W, 1], filters: 32, kernelSize: 3, activation: 'relu'
}))
model.add(tf.layers.conv2d({ filters: 32, kernelSize: 3, activation: 'relu' }))
model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 }))
model.add(tf.layers.conv2d({ filters: 64, kernelSize: 3, activation: 'relu' }))
model.add(tf.layers.conv2d({ filters: 64, kernelSize: 3, activation: 'relu' }))
model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 }))
model.add(tf.layers.flatten())
model.add(tf.layers.dropout({ rate: 0.25 }))
model.add(tf.layers.dense({ units: 512, activation: 'relu' }))
model.add(tf.layers.dropout({ rate: 0.5 }))
}
export const addDenseLayers = (model: tf.Sequential): void => {
model.add(tf.layers.flatten({ inputShape: [IMAGE_H, IMAGE_W, 1] }))
model.add(tf.layers.dense({ units: 42, activation: 'relu' }))
}
export const addCovPoolingLayers = (model: tf.Sequential): void => {
model.add(tf.layers.conv2d({
inputShape: [IMAGE_H, IMAGE_W, 1], kernelSize: 3, filters: 16, activation: 'relu'
}))
model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 }))
model.add(tf.layers.conv2d({ kernelSize: 3, filters: 32, activation: 'relu' }))
model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 }))
model.add(tf.layers.conv2d({ kernelSize: 3, filters: 32, activation: 'relu' }))
model.add(tf.layers.flatten({}))
model.add(tf.layers.dense({ units: 64, activation: 'relu' }))
}
<file_sep>/public/docs/dev/mobilenet-basic.md
# 使用预训练模型——MobileNet 图片分类器
Google 提供了预训练的 MobileNet 图片分类器,我们可以直接使用它。
## MobileNet 使用的数据集 —— ImageNet
### ImageNet 的 1000 个分类
MobileNet 的训练数据集为 ImageNet,包括 1000 个分类。让我们看看这 1000 个分类是什么,参考代码为 `./src/components/mobilenet/ImageNetClasses.ts`:
export interface ILabelMap {
[index: number]: string
}
export const ImageNetClasses: ILabelMap = {
0: 'tench, Tinca tinca',
1: 'goldfish, Carassius auratus',
2: 'great white shark, white shark, man-eater, man-eating shark, ' +
'Carcharodon carcharias',
3: 'tiger shark, Galeocerdo cuvieri',
4: 'hammerhead, hammerhead shark',
5: 'electric ray, crampfish, numbfish, torpedo',
6: 'stingray',
7: 'cock',
...
}
### 使用 AntD Tags 展示分类
参考代码为 `./src/components/mobilenet/ImageNetTagsWidget.tsx`:
const ImageNetTagsWidget = (): JSX.Element => {
...
return (
...
{Object.keys(ImageNetClasses).map((key, index) => {
const tag = ImageNetClasses[index]
const isLongTag = tag.length > 20
const tagElem = (
<Tag key={tag}>
{isLongTag ? `${tag.slice(0, 20)}...` : tag}
</Tag>
)
return isLongTag ? (
<Tooltip title={tag} key={tag}>
{tagElem}
</Tooltip>
) : (
tagElem
)
})}
...
)
}
## MobileNet 图片分类器模型
参考代码为 `./src/components/mobilenet/MobileNetClassifier.tsx`
### 预训练模型下载
在使用 MobileNet 模型前,需要对预训练模型进行下载。
* 我们可以使用 URL 直接从 Google 下载。
export const MOBILENET_MODEL_PATH = 'https://storage.googleapis.com/tfjs-models/tfjs/mobilenet_v1_0.25_224/model.json'
* 也可以提前下载,然后本地加载。
执行下面的语句下载预训练的模型。下载完成后,可以进入对应的目录,观察一下用于 Tfjs 的模型到底是什么模样。
$ cd ./public/preload/model/
$ python3 ./tfjs_mobilenet_model_downloader.py
使用本地 URL 加载:
export const MOBILENET_MODEL_PATH = '/preload/model/mobilenet/mobilenet_v1_0.25_224/model.json'
### 用 tf.loadLayersModel 模型加载
useEffect(() => {
logger('init model ...')
tf.backend()
setTfBackend(tf.getBackend())
setStatus(STATUS.WAITING)
let model: tf.LayersModel
tf.loadLayersModel(MOBILENET_MODEL_PATH).then(
(mobilenet) => {
model = mobilenet
// Warmup the model. This isn't necessary, but makes the first prediction
// faster. Call `dispose` to release the WebGL memory allocated for the return
// value of `predict`.
const temp = model.predict(tf.zeros([1, MOBILENET_IMAGE_SIZE, MOBILENET_IMAGE_SIZE, 3])) as tf.Tensor
temp.dispose()
setModel(model)
const layerOptions: ILayerSelectOption[] = model?.layers.map((l, index) => {
return { name: l.name, index }
})
setLayersOption(layerOptions)
setStatus(STATUS.LOADED)
},
loggerError
)
return () => {
logger('Model Dispose')
model?.dispose()
}
}, [])
* 使用 `tf.loadLayersModel(MOBILENET_MODEL_PATH)` 加载预训练的 MobileNet 模型及权重。
* 加载后,可以做下模型预热,并非必须,不过可以提升第一次 predict 的速度。
// Warmup the model. This isn't necessary, but makes the first prediction
// faster. Call `dispose` to release the WebGL memory allocated for the return
// value of `predict`.
const temp = model.predict(tf.zeros([1, MOBILENET_IMAGE_SIZE, MOBILENET_IMAGE_SIZE, 3])) as tf.Tensor
temp.dispose()
* 提取模型的 Layers 信息,用于详细观察 Layers 的具体情况。
const layerOptions: ILayerSelectOption[] = model?.layers.map((l, index) => {
return { name: l.name, index }
})
setLayersOption(layerOptions)
### 模型的展示
<TabPane tab=' ' key={AIProcessTabPanes.MODEL}>
<Row>
<Col span={12}>
<Card title='MobileNet Model Info' style={{ margin: '8px' }} size='small'>
<TfvisModelWidget model={sModel}/>
</Card>
</Col>
<Col span={12}>
<Card title='Show Layer' style={{ margin: '8px' }} size='small'>
<Form {...layout} initialValues={{
layer: 0
}}>
<Form.Item name='layer' label='Show Layer'>
<Select onChange={handleLayerChange}>
{sLayersOption?.map((v) => {
return <Option key={v.index} value={v.index}>{v.name}</Option>
})}
</Select>
</Form.Item>
<Form.Item {...tailLayout}>
<p>status: {sStatus}</p>
<p>backend: {sTfBackend}</p>
</Form.Item>
</Form>
</Card>
<Card title='Layer Info' style={{ margin: '8px' }} size='small'>
<TfvisLayerWidget layer={sCurLayer}/>
</Card>
</Col>
</Row>
</TabPane>
* `TfvisModelWidget` 是集成了 tfjs-vis 的 React 组件,能够方便的展示模型的更多信息。
* `TfvisLayerWidget` 是集成了 tfjs-vis 的 React 组件,能够方便的展示各个 Layer 的更多信息。**已知问题** 对于无参数的 Layer 选择 'Show Values Distribution for' 可能会抛出异常。

## 推理
### 图片上传显示组件
#### ImageUploadWidget
import React, { useEffect, useRef, useState } from 'react'
import * as tf from '@tensorflow/tfjs'
import { Button, Row } from 'antd'
import PicturesWall from '../../common/PicturesWall'
import { ImageNetClasses } from '../../mobilenet/ImageNetClasses'
import { IKnnPredictResult, ILabelMap, logger } from '../../../utils'
interface IProps {
prediction?: tf.Tensor | IKnnPredictResult
labelsMap?: ILabelMap
onSubmit?: (tensor: tf.Tensor) => void
}
const IMAGE_HEIGHT = 360
const ImageUploadWidget = (props: IProps): JSX.Element => {
const [sImgViewSrc, setImgViewSrc] = useState<string>('/images/cat.jpg')
const [sLabel, setLabel] = useState<string>()
const imageViewRef = useRef<HTMLImageElement>(null)
useEffect(() => {
if (!props.prediction) {
return
}
const knnRet = props.prediction as IKnnPredictResult
if (knnRet.label) {
const knnRet = props.prediction as IKnnPredictResult
setLabel(`${knnRet.label} : ${JSON.stringify(knnRet.confidences)}`)
} else {
// ImageNet Classes
const imagenetPred = props.prediction as tf.Tensor
const labelIndex = imagenetPred.arraySync() as number
logger('labelIndex', labelIndex)
const _label = props.labelsMap ? props.labelsMap[labelIndex] : ImageNetClasses[labelIndex]
setLabel(`${labelIndex.toString()} : ${_label}`)
}
}, [props.prediction])
const handlePreview = (file: string): void => {
// logger('handlePreview', file)
setImgViewSrc(file)
}
const handleSubmit = (): void => {
if (!imageViewRef.current) {
return
}
const _tensor = tf.browser.fromPixels(imageViewRef.current).toFloat()
props.onSubmit && props.onSubmit(_tensor)
}
/***********************
* Render
***********************/
return (
<>
<Row className='centerContainer'>
<img src={sImgViewSrc} height={IMAGE_HEIGHT} ref={imageViewRef} />
</Row>
<Row className='centerContainer'>
<Button onClick={handleSubmit} type='primary' style={{ width: '30%', margin: '8px' }}>Predict</Button>
</Row>
<Row className='centerContainer' >
{sLabel && (
<span>{sLabel}</span>
)}
</Row>
<PicturesWall onPreview={handlePreview} />
</>
)
}
export default ImageUploadWidget
#### PicturesWall
import React, { useEffect, useReducer, useState } from 'react'
import { Modal, Upload } from 'antd'
import { PlusOutlined } from '@ant-design/icons'
import { RcFile, UploadChangeParam, UploadFile } from 'antd/es/upload/interface'
import { checkUploadDone, getUploadFileBase64, logger } from '../../utils'
interface IProps {
onPreview?: (file: string) => void
}
const PicturesWall = (props: IProps): JSX.Element => {
const [previewImage, setPreviewImage] = useState<string>()
const [imageList, setImageList] = useState<UploadFile[]>([])
const [modelDisplay, setModalDisplay] = useState(false)
const [waitingPush, forceWaitingPush] = useReducer((x: number) => x + 1, 0)
useEffect(() => {
// eslint-disable-next-line @typescript-eslint/no-misused-promises
const timer = setInterval(async (): Promise<void> => {
logger('Waiting upload...')
if (checkUploadDone(imageList) > 0) {
forceWaitingPush()
} else {
clearInterval(timer)
const _file = imageList[imageList.length - 1]
if (_file) {
await handlePreview(_file)
}
}
}, 10)
return () => {
clearInterval(timer)
}
}, [waitingPush])
const handleCancel = (): void => {
setModalDisplay(false)
}
const handlePreview = async (file: UploadFile): Promise<void> => {
// logger('handlePreview', file)
let imgSrc = file.url ?? file.preview
if (!imgSrc) {
const result = await getUploadFileBase64(file.originFileObj)
file.preview = result
imgSrc = file.preview
}
if (imgSrc) {
setPreviewImage(imgSrc)
// setModalDispaly(true)
props.onPreview && props.onPreview(imgSrc)
}
}
const handleChange = ({ fileList }: UploadChangeParam): void => {
// logger('handleChange', fileList)
setImageList(fileList)
// Must wait until all file status is 'done', then push then to LabeledImageWidget
forceWaitingPush()
}
const handleUpload = async (file: RcFile): Promise<string> => {
// logger(file)
return getUploadFileBase64(file)
}
const uploadButton = (
<div>
<PlusOutlined />
<div className='ant-upload-text'>Upload</div>
</div>
)
return (
<div className='clearfix'>
<Upload action={handleUpload} fileList={imageList} onPreview={handlePreview} onChange={handleChange}
listType='picture-card'>
{imageList.length >= 8 ? null : uploadButton}
</Upload>
<Modal visible={modelDisplay} footer={null} onCancel={handleCancel}>
<img alt='example' style={{ width: '100%' }} src={previewImage} />
</Modal>
</div>
)
}
export default PicturesWall
### 摄像头拍照组件 —— WebCamera
import React, { forwardRef, Ref, useEffect, useImperativeHandle, useRef, useState } from 'react'
import * as tf from '@tensorflow/tfjs'
import { WebcamIterator } from '@tensorflow/tfjs-data/dist/iterators/webcam_iterator'
import { Button, Row, Col } from 'antd'
import { CameraOutlined } from '@ant-design/icons'
import { IKnnPredictResult, ILabelMap, logger, loggerError } from '../../../utils'
import { ImageNetClasses } from '../../mobilenet/ImageNetClasses'
import TensorImageThumbWidget from './TensorImageThumbWidget'
const VIDEO_SHAPE = [480, 360] // [width, height]
const IMAGE_HEIGHT = 86
const DEFAULT_CONFIG = {
// facingMode: 'user',
// resizeWidth: VIDEO_SHAPE[0],
// resizeHeight: VIDEO_SHAPE[1],
centerCrop: false
}
export interface IWebCameraHandler {
capture: () => Promise<tf.Tensor3D | void>
}
interface IProps {
prediction?: tf.Tensor | IKnnPredictResult
isPreview?: boolean
labelsMap?: ILabelMap
config?: tf.data.WebcamConfig
onSubmit?: (tensor: tf.Tensor) => void
}
const WebCamera = (props: IProps, ref: Ref<IWebCameraHandler>): JSX.Element => {
const [sLabel, setLabel] = useState<string>()
const [sPreview, setPreview] = useState<tf.Tensor3D>()
const [sCamera, setCamera] = useState<WebcamIterator>()
const videoRef = useRef<HTMLVideoElement>(null)
useImperativeHandle(ref, (): IWebCameraHandler => ({
capture
}))
useEffect(() => {
if (!videoRef.current) {
return
}
let _cam: WebcamIterator
const config = props.config ? props.config : DEFAULT_CONFIG
tf.data.webcam(videoRef.current, config).then(
(cam) => {
_cam = cam
setCamera(cam)
},
loggerError
)
return () => {
_cam?.stop()
}
}, [videoRef, props.config])
useEffect(() => {
if (!props.prediction) {
return
}
const knnRet = props.prediction as IKnnPredictResult
if (knnRet.label) {
const knnRet = props.prediction as IKnnPredictResult
setLabel(`${knnRet.label} : ${JSON.stringify(knnRet.confidences)}`)
} else {
// ImageNet Classes
const imagenetRet = props.prediction as tf.Tensor
const labelIndex = imagenetRet.arraySync() as number
logger('labelIndex', labelIndex)
const _label = props.labelsMap ? props.labelsMap[labelIndex] : ImageNetClasses[labelIndex]
setLabel(`${labelIndex.toString()} : ${_label}`)
}
}, [props.prediction])
const capture = async (): Promise<tf.Tensor3D | void> => {
if (!sCamera) {
return
}
return sCamera.capture()
}
const handleCapture = async (): Promise<void> => {
const imgTensor = await capture()
props.isPreview && setPreview(imgTensor as tf.Tensor3D)
}
const handleSubmit = async (): Promise<void> => {
const imgTensor = await capture()
if (imgTensor) {
props.isPreview && setPreview(imgTensor)
props.onSubmit && props.onSubmit(imgTensor)
}
}
/***********************
* Render
***********************/
return (
<>
<Row className='centerContainer'>
<video autoPlay muted playsInline width={VIDEO_SHAPE[0]} height={VIDEO_SHAPE[1]} ref={videoRef}
style={{ backgroundColor: 'lightgray' }}/>
</Row>
<Row className='centerContainer'>
<div style={{ width: 500, padding: '8px' }}>
{props.isPreview && (
<Button style={{ width: '30%', margin: '0 10%' }} icon={<CameraOutlined />}
onClick={handleCapture} >Capture</Button>
)}
{props.onSubmit && (
<Button onClick={handleSubmit} type='primary' style={{ width: '30%', margin: '0 10%' }}>Predict</Button>
)}
</div>
</Row>
<Row >
{props.isPreview && (
<Col span={12}>
<div className='centerContainer'>Captured Images</div>
<div className='centerContainer'>
{sPreview && <TensorImageThumbWidget height={IMAGE_HEIGHT} data={sPreview}/>}
</div>
</Col>
)}
{sLabel && (
<Col span={12}>
<div className='centerContainer'> Prediction Result </div>
<div className='centerContainer' style={{ margin: '8px' }}>{sLabel}</div>
</Col>
)}
</Row>
</>
)
}
export default forwardRef(WebCamera)
<file_sep>/node/src/jena/train.ts
/**
* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
/**
* Train recurrent neural networks (RNNs) for temperature prediction.
*
* This script drives the RNN training process in the Node.js environment
* using tfjs-node or tfjs-node-gpu (see the `--gpu` flag).
*
* - See [data.js](./data.js) for how the Jena weather dataset is loaded.
* - See [models.js](./train.ts) for the detailed model creation and training
* logic.
*/
import { ArgumentParser } from 'argparse'
import { JenaWeatherData } from './dataJena'
import { buildModel, getBaselineMeanAbsoluteError, trainModel } from './modelJena'
import { logger } from '../utils'
import { Callback } from '@tensorflow/tfjs-node'
// global.fetch = require('node-fetch')
const parseArguments = (): any => {
const parser =
new ArgumentParser({ description: 'Train RNNs for Jena weather problem' })
parser.addArgument('--modelType', {
type: 'string',
defaultValue: 'gru',
optionStrings: ['baseline', 'gru', 'simpleRNN'],
// TODO(cais): Add more model types, e.g., gru with recurrent dropout.
help: 'Type of the model to train. Use "baseline" to compute the ' +
'commonsense baseline prediction error.'
})
parser.addArgument('--gpu', {
action: 'storeTrue',
help: 'Use GPU'
})
parser.addArgument('--lookBack', {
type: 'int',
defaultValue: 10 * 24 * 6,
help: 'Look-back period (# of rows) for generating features'
})
parser.addArgument('--step', {
type: 'int',
defaultValue: 6,
help: 'Step size (# of rows) used for generating features'
})
parser.addArgument('--delay', {
type: 'int',
defaultValue: 24 * 6,
help: 'How many steps (# of rows) in the future to predict the ' +
'temperature for'
})
parser.addArgument('--normalize', {
defaultValue: true,
help: 'Used normalized feature values (default: true)'
})
parser.addArgument('--includeDateTime', {
action: 'storeTrue',
help: 'Used date and time features (default: false)'
})
parser.addArgument(
'--batchSize',
{ type: 'int', defaultValue: 128, help: 'Batch size for training' })
parser.addArgument(
'--epochs',
{ type: 'int', defaultValue: 20, help: 'Number of training epochs' })
parser.addArgument('--earlyStoppingPatience', {
type: 'int',
defaultValue: 2,
help: 'Optional patience number for EarlyStoppingCallback'
})
parser.addArgument('--logDir', {
type: 'string',
help: 'Optional tensorboard log directory, to which the loss and ' +
'accuracy will be logged during model training.'
})
parser.addArgument('--logUpdateFreq', {
type: 'string',
defaultValue: 'batch',
optionStrings: ['batch', 'epoch'],
help: 'Frequency at which the loss and accuracy will be logged to ' +
'tensorboard.'
})
return parser.parseArgs()
}
const main = async (): Promise<void> => {
const args = parseArguments()
let tfn
if (args.gpu) {
console.log('Using GPU for training.')
tfn = require('@tensorflow/tfjs-node-gpu')
} else {
console.log('Using CPU for training.')
tfn = require('@tensorflow/tfjs-node')
}
const jenaWeatherData = new JenaWeatherData()
console.log('Loading Jena weather data...')
await jenaWeatherData.loadCsv()
jenaWeatherData.loadDataColumnNames()
await jenaWeatherData.load()
logger('modelType', args.modelType)
if (args.modelType === 'baseline') {
console.log('Calculating commonsense baseline mean absolute error...')
const baselineError = await getBaselineMeanAbsoluteError(
jenaWeatherData, args.normalize, args.includeDateTime, args.lookBack,
args.step, args.delay)
console.log(
'Commonsense baseline mean absolute error: ' + `${baselineError.toFixed(6)}`)
} else {
const numFeatures = jenaWeatherData.getDataColumnNames().length
const model = buildModel(
args.modelType, Math.floor(args.lookBack / args.step), numFeatures)
const callback: Callback[] = []
if (args.logDir != null) {
console.log('Logging to tensorboard. ' + 'Use the command below to bring up tensorboard server:\n' +
` tensorboard --logdir ${args.logDir}`)
callback.push(tfn.node.tensorBoard(args.logDir, {
updateFreq: args.logUpdateFreq
}))
}
if (args.earlyStoppingPatience != null) {
console.log('Using earlyStoppingCallback with patience ' + `${args.earlyStoppingPatience}.`)
callback.push(tfn.callbacks.earlyStopping({
patience: args.earlyStoppingPatience
}))
}
await trainModel(model, jenaWeatherData, args.normalize, args.includeDateTime,
args.lookBack, args.step, args.delay, args.batchSize, args.epochs,
callback)
}
}
if (require.main === module) {
main().then(
() => {
logger('Finished')
},
(e) => {
logger(e.msg)
}
)
}
// yarn run ts-node ./src/jena/train.ts --logDir ./logs/jena
<file_sep>/public/docs/dev/dev-structure.md
# 构建 React 开发框架
在上一篇中,我们使用 React-Scripts 创建了一个新的 React APP。现在开始装修改造。
## React-tfjs-camp 的目录结构
React-tfjs-camp 对目录结构作了如下调整:
.
|____.eslintrc.js 使用 eslint 进行代码格式检查的配置文件
|____.git
|____.gitignore
|____README.md
|____LICENSE 此项目使用 MIT LICENSE 的说明
|____node_modules
|____yarn.lock
|____package.json
|____tsconfig.json
|____public 静态资源目录
|____src Web APP 代码目录
|____node Node.js 代码目录
|____Dockerfile 用于构建 Docker Image
|____docker_build.sh Docker 构建脚本
|____docker_run.sh Docker 运行脚本
### public 目录结构
.
|____favicon.ico
|____index.html
|____404.html
|____manifest.json
|____robots.txt
|____images 根目录的静态图片目录
|____docs 文档目录
| |____images 文档所使用的图片
| |____ai AI Concept 相关文档
| |____dev Develop Tutorial 相关文档
|____model 用户通过 Web App 或者 Node.js 训练后的模型,可以拷贝到这里,供下次使用
|____data 用户代码中使用的个性化数据,可以放置在此处,供下次加载使用
|____preload 项目使用的各种数据集和预训练模型文件,下载一次之后,减少不必要的网络延迟
| |____model
| | |____download_model.sh 下载模型的脚本
| |____data
| | |____download_data.sh 下载数据集的脚本
### src 目录结构
.
|____index.tsx React 入口,整个项目的 Web 渲染由此开始
|____index.css
|____App.tsx App Root 组件
|____App.css App 组件相关格式 css
|____serviceWorker.ts Service Worker 相关,未修改
|____routers.ts 使用 React-Router 集中处理页面路由
|____constant.ts 一些常量
|____utils.ts 一些工具常数和函数
|____App.test.tsx APP Unit Test 入口,未修改
|____setupTests.ts 未修改
|____components 主要代码在这里,定义了 Web APP 所用到的页面组件
| |____common
| | |____visulization
| | |____tensor
| | |____tfvis
| |____curve
| |____iris
| |____mnist
| |____mobilenet
| |____rnn
| |____pretrained
|____react-app-env.d.ts 一些没有声明类型的 npm 包,需要放在这里,才可以被 Typescript 正确编译
|____typescript_logo.svg
|____react_logo.svg
### node 目录结构
.
|____README.md
|____package.json 使用 Node.js 代码的 Package.json
|____tsconfig.json 使用 Node.js 代码的 Typescript 语法特性设置
|____node_modules
|____src Node.js 代码目录
| |____jena
| |____sentiment
| |____simpleObjDetector
| |____textGenLstm
| |____utils.ts 一些工具常数和函数
|____logs 用于存放训练产生的 logs
## 规范代码语法和风格检查
这部分内容非常重要,不过却往往被各种开发教程忽略,可以让我们避免使用在 JS 中广受诟病的那些陈旧语法和奇技淫巧,提高代码的可读性,减少代码的漏洞。
### tsconfig.json
使用 tsconfig.json 对 Typescript 语法特性设置。这个文件会在使用 `tsc` 进行 TypeScript 语法检查和编译时起作用。
在 React-tfjs-camp 的 Web APP 中,使用了 React 和 ES6 语法特性,设置如下。其中 `...` 略去的部分主要对代码格式进行规范化限制的部分。简单来说,这些设置使得我们能够使用诸如:import/export、箭头函数、async/await 等较新的 JS 语法。
{
"compilerOptions": {
"allowJs": false,
"module": "esnext",
"jsx": "react",
"target": "es6",
"lib": [
"dom",
"dom.iterable",
"es6",
"es7",
"esnext"
],
...
},
"include": [
"src"
],
"exclude": [
"node_modules"
]
针对于 Node.js 的代码,使用的配置有如下修改:
{
"compilerOptions": {
"allowJs": true,
"module": "commonjs",
"target": "es6",
...
}
### .eslintrc.js
.eslintrc.js 是 eslint 的配置文件,被用于进行代码风格检查,在开发的 IDE 中使用。下面的设置,集成了常用的 Typescript、React 推荐代码风格检查规则。
在一些文档中,你还会看到使用 tslint 进行 Typescript 的代码检查。当前,Typescript 官方已经推荐使用的是 eslint。
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
plugins: [
'@typescript-eslint', "react", 'react-hooks', 'eslint-comments'
],
extends: [
"react-app",
'eslint:recommended',
'plugin:@typescript-eslint/eslint-recommended',
'plugin:@typescript-eslint/recommended',
"plugin:react/recommended",
'standard-with-typescript',
],
parserOptions: {
project: "./tsconfig.json",
sourceType: 'module', // Allows for the use of imports
},
rules: {
"react-hooks/rules-of-hooks": "error",
"react-hooks/exhaustive-deps": "warn",
"@typescript-eslint/interface-name-prefix": ["error", {"prefixWithI": "always"}],
"@typescript-eslint/indent": ["error", 4, { 'SwitchCase': 1 }],
"jsx-quotes": ["error", "prefer-single"],
'@typescript-eslint/no-unused-vars': ['error', {
'vars': 'all',
'args': 'none',
'ignoreRestSiblings': true,
}],
"@typescript-eslint/strict-boolean-expressions": 0,
},
settings: {
react: {
version: 'detect', // Tells eslint-plugin-react to automatically detect the version of React to use
},
}
};
## 改造页面布局
`/src/App.tsx` 是 React App 中常用的根页面组件。
### React 函数化组件
我们先来看一下 App.tsx 页面的结构,这是一个最简单的 React 函数组件的例子。
import React, { ... } from 'react'
const App = (): JSX.Element => {
...
return (
<Layout>
...
</Layout>
)
}
export default App
* `import React, { ... } from 'react'` 语句声明了当前组件所依赖 React 包。
* `const App = (): JSX.Element => { ... }` 声明了这是一个名为 `App` 的页面函数组件,这个组件的返回值是 JSX.Element 类型。
* `return (<Layout>...</Layout>)` 这段代码展示的是具体返回的 JSX.Element 由哪些页面组件和元素组成。
* `export default App` export 输出的内容,才能够被其他组件引用。非 default 的输出,需要在 import 时放在 `{}` 中。
### 使用 Ant Design 构建页面框架
Ant Design 是蚂蚁金服体验技术部推出的一个服务于企业级产品的设计体系。如果您对于交互界面没有特殊的视觉效果设计要求,使用 AntD 是个不错的选择。在实际应用中,AntD 常常被用于 Web 应用的管理后台开发,能够非常快的搞定交互界面。
Ant Design v4 于 2020 年的 2 月 28 日正式发布,当前(码字时)版本已经升级到 v4.1.2 了。Ant Design 4 有较大的提升,最重要的更新在于全面支持 React Hooks,重写了 Form、Table 等关键组件的实现,使用起来代码优美了不少。
AntD 的文档非常易于理解和使用。参考链接 [https://ant.design/index-cn](https://ant.design/index-cn)
参照 AntD 的官方文档,很快就能够搭出 React-tfjs-camp 的页面框架。
#### 在项目中使用 AntD
在项目根目录中,执行以下命令,安装 AntD 包。安装完成之后,package.json 中会自动增加 `"antd": "^4.1.2",` 的依赖包条目。
$ yarn add antd
打开 `/src/index.tsx`, 在文件头部的声明部分增加 antd.css,以使用 AntD 定义的页面风格资源。
import 'antd/dist/antd.css'
在需要的页面 import 所需使用 AntD 组件即可。
import { ... } from 'antd'
### 页面布局
使用 AntD 的 Layout 组件,能够帮助我们非常容易的构建出各种结构的应用框架。React-tfjs-camp 采用左右结构的页面布局,如下图所示。

左侧的菜单条被封装在 SideBar 里,页面主体被封装在 BodyContainer 里。修改 `/src/App.tsx` 如下:
import React, { useState } from 'react'
import { Layout } from 'antd'
import './App.css'
import SideBar from './components/common/SideBar'
import BodyContainer from './components/common/BodyContainer'
...
const { Content, Header, Sider, Footer } = Layout
const App = (): JSX.Element => {
...
return (
<Layout>
...
<Sider collapsible collapsed={sCollapsed} onCollapse={onCollapse}>
<SideBar/>
</Sider>
<Layout className='site-layout'>
<Header style={{ background: '#fff', padding: '0' }}>
...
</Header>
<Content style={{ margin: '16px' }}>
<BodyContainer/>
</Content>
<Footer style={{ textAlign: 'center' }}>©2020 Created by <NAME>(<EMAIL>)</Footer>
</Layout>
...
</Layout>
)
}
...
## 边栏菜单导航
### AntD Layout Sider
边栏菜单的实现使用了 AntD Layout 中的 Sider 组件。
<Sider collapsible collapsed={sCollapsed} onCollapse={onCollapse}>
<SideBar/>
</Sider>
* `collapsible` 属性说明了它可以折叠与展开
* `collapsed` 指示折叠状态,它的值被设定为 sCollapsed
* `onCollapse` 函数是对应折叠按钮点击的响应方法
### 使用 React Hooks 的 useState 管理边栏状态
App.tsx 需要保存 Sider 组件的折叠状态。这里用到了 Hooks 的 useState。
import React, { useState } from 'react'
...
const [sCollapsed, setCollapsed] = useState(true)
...
const onCollapse = (): void => {
setCollapsed(collapsed => !collapsed)
}
* `const [sCollapsed, setCollapsed] = useState(true)` 声明了一个名为 `sCollapsed` 的状态变量,对其进行赋值的函数为 `setCollapsed`,这个状态的初始值为 `true`
* `setCollapsed`的参数,可以是具体的一个值,也可以是一个回调函数。如果新的 state 需要通过使用先前的 state 计算得出,那么可以将回调函数当做参数传递给 setState。该回调函数将接收先前的 state,并返回一个更新后的值。
* 个人的 Tips:将所有的 State 变量,以 `s` 开头命名,在引用的时候便于和局部变量区分。
**请注意:**
useState 和后面介绍的其他的 React Hooks 声明一样,都需要放在组件函数的**前部**,才能被正确使用,这是由 Hooks 使用队列实现的原理决定的。更多使用 Hooks 的规则细节请参考[Invalid Hook Call Warning](https://reactjs.org/warnings/invalid-hook-call-warning.html)。
### 用 React-Route 实现页面路由跳转
使用 React 构建的单页面应用,要实现页面间的跳转,需要使用页面路由切换——React-Route。
在项目中增加 React-Route 相关的包,后两个 @types 包是 TypeScript 的需要:
$ yarn add react-router-config react-router-dom @types/react-router-config @types/react-router-dom
`/src/App.tsx` 使用 BrowserRouter 将需要进行路由的页面部分包起来。
import React, { useState } from 'react'
import { BrowserRouter as Router } from 'react-router-dom'
...
const App = (): JSX.Element => {
...
return (
<Layout>
<Router>
<Sider collapsible collapsed={sCollapsed} onCollapse={onCollapse}>
<SideBar/>
</Sider>
<Layout className='site-layout'>
...
<BodyContainer/>
...
</Layout>
</Router>
</Layout>
)
}
...
`/src/components/common/SideBar.tsx` 使用了 AntD 的 Menu 组件设置边栏菜单格式,用 react-route-dom 的 Link 设置页面之间的路由关系。
import React from 'react'
import { Link } from 'react-router-dom'
import { Menu } from 'antd'
...
const { Item, SubMenu } = Menu
const SideBar = (): JSX.Element => {
return (
<div>
<header className='App-header'>
<Link to='/'>
<img src={logo} className='App-logo' alt='logo'/><h2 style={{ color: 'white' }}>RTCamp</h2>
</Link>
</header>
<Menu theme='dark' mode='inline'}>
<SubMenu title={<span><LineChartOutlined/><span>逻辑回归 Logisttc </span></span>}>
<Item key='1.1'>
<Link to='/curve'><span> 曲线拟合 Curve </span></Link>
</Item>
...
</SubMenu>
...
</Menu>
</div>
)
}
...
`/src/components/common/BodyContainer.tsx` 使用 react-router-config 包里的 renderRoutes,我们可以将集中设置在 routers.ts 中的路由映射,对应到页面框架里的 BodyContainer 里。
import React from 'react'
import { renderRoutes } from 'react-router-config'
import { Alert } from 'antd'
import routes from '../../routers'
const { ErrorBoundary } = Alert
const BodyContainer = (): JSX.Element => {
return (
<div style={{ padding: 24, background: '#ffffff', minHeight: '80vh' }}>
<ErrorBoundary>
{renderRoutes(routes)}
</ErrorBoundary>
</div>
)
}
export default BodyContainer
所有的路由映射都被定义在 `/src/routers.ts` 中。这么做的好处是便于维护管理,让组件可以专注于自己的功能逻辑。
import { RouteConfig } from 'react-router-config'
import Home from './components/common/Home'
import Curve from './components/curve/Curve'
...
const routes: RouteConfig[] = [
{ path: '/', exact: true, component: Home },
{ path: '/curve', component: Curve },
...
{ path: '*', component: Home }
]
* 设置 `'/'` 映射时,使用 `exact: true` 以表明不会“误杀”其他以 `'/'` 开头的路由设置。你可以试试 `exact: false` ,或者把这个设置去掉,看看会出现什么结果。
* 设置 `'*'` 映射,对无效的页面路由统一处理,都映射到 Home。试试去掉这个映射,看看会出现什么😄
我们只用到了 React-Route 的一点点基础部分。关于 React-Route 的更多内容可以参考:
* [官方Github](https://github.com/ReactTraining/react-router)
* [React-Router 的 Hooks 实现](https://blog.csdn.net/weixin_43870742/article/details/102966040)
## ErrorBoundary
你有没有注意到,在 `/src/components/common/BodyContainer.tsx` 中,我们为 Content 封装一个 ErrorBoundary,用于截获在 Web APP 运行时,没能被 Catch 到的异常和错误,对它们进行统一的显示。
目前,React 官方还没有实现 getDerivedStateFromError、componentDidCatch 这些用于错误和异常处理的函数,所以只能够采用 React 类组件来完成这个功能。参考文档 [错误边界](https://zh-hans.reactjs.org/docs/error-boundaries.html)
AntD 对 React 官方文档中的 ErrorBoundary 做了封装,我们可以直接使用。
**请注意** 在开发模式下,ErrorBoundary 显不出效果。
<file_sep>/public/docs/ai/curve.md
# 曲线拟合 Curve
## 知识点
这个例子涉及以下的 AI 知识点:
* Tensor及其计算
* 神经元模型
* 多层感知机网络
* 激活函数
## 问题
每隔不久,朋友圈里都会流传一些幼儿园考题,撩骚大家重温幼儿园无忧无虑的幸福时光,还会让你自卑地抱(jie)怨(chao),智商比不过小学生。
 
让我们来猜猜下面数据的关系:
X = [1, 2, 3, 4, 5, 6, 8, 9, 10]
Y = [15, 39, 77, 129, 195, 275, 369, 477, 599, 735]
x = 11, y = ?
似乎不太好猜。
我们让 AI 也来做类似的试题:
$$ Y = f(X) $$
给出一组 X 和 Y 的对应“训练数据”,让 AI 学习其中的规律,然后随机给出一个数字 x ,让 AI 计算应该得什么 y。
## 数据
我们先用程序生成一组 X, Y 的 对应关系。
$$ y = a x^2 + b x + c $$
a, b, c 三个参数随机生成。tfjs 里面有好几种用于生成随机数的方法,用起来非常简便。下面的代码生成了 0 到 10 之间的三个随机数,我们取整之后,用作 a, b, c。
import * as tf from '@tensorflow/tfjs-node'
let params = tf.randomUniform([3], 0, 10).toInt()
params.print()
let [a, b, c] = Array.from(params.dataSync())
console.log(a, b, c)
上面题目中的数据,就使用以下的代码计算生成 a, b, c 分别取值 7, 3, 5。需要注意的是,这种链式的调用仅仅与顺序有关,没有先乘除后加减的计算符的优先级。
import * as tf from '@tensorflow/tfjs'
let x = tf.range(1, 12)
let y = x.pow(2).mul(a).add( x.mul(b) ).add(c)
x.print()
y.print()
切换到命令行下,在项目目录中,试试执行这些代码吧:
$ cd node
$ yarn
$ yarn run ts-node
> import * as tf from '@tensorflow/tfjs-node' {}
> let params = tf.randomUniform([3], -10, 10).toInt()
> params.print()
Tensor
[8, 5, 4]
> let [a, b, c] = Array.from(params.dataSync())
> console.log(a, b, c)
8 5 4
>
> let x = tf.range(1, 12)
> let y = x.pow(2).mul(a).add( x.mul(b) ).add(c)
undefined
> x.print()
Tensor
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
> y.print()
[17, 46, 91, 152, 229, 322, 431, 556, 697, 854, 1027]
>
### Tensor
Tensor就是一个数据单元的通用术语,也是 Tensorflow 的基础概念——张量。简单说来,就是多维数据量。
下图介绍了张量的维度(秩):Rank/Order

#### Tensor的典型属性
1. 数据类型dtype:d是data的首字母,type是类型的意思。tensor里每一个元素的数据类型是一样的。类似于Numpy中ndarray.dtype,tensorflow里的数据类型可以有很多种,比方说tf.float32就是32位的浮点数,tf.int8就是8位的整型,tf.unit8就是8位的无符号整型,tf.string为字符串等等。
2. 形状Shape:比方说一个2行3列的二维矩阵,他的形状就是2行3列。Tensor的形状可以通过 Reshape 等函数进行变换。Shape的描述顺序是由外到内(最左边的Shape是最外层的维度,reshape时,最外层可以设置为 -1,表示按照实际计算返回)。
### 为模型训练准备数据
有了公式,我们能够为模型训练提供足够的数据。这些数据通常被分为三个集合:
* 训练集:确定模型后,用于训练参数,注意训练的是普通参数(每多加入一个数据对模型进行训练,模型中就会受到影响的参数,通过多次迭代不断更新,是一个梯度下降的过程)而不是超参数(超参数是指训练开始之前设置的参数,超参数的选择与训练过程实际上是独立的,训练过程不会影响超参数。但是训练结束后可以根据训练结果考虑超参数是否可优化,可优化的话就调整超参数的值开始下一次训练)
* 验证集:用训练集对模型训练完毕后,再用验证集对模型测试,测试模型是否准确而不是训练模型的参数
* 测试集:虽然验证集没有对模型的参数产生影响,但是我们却根据验证集的测试结果的准确度来调整参数(这里调整超参数),所以验证集对结果还是有影响的,即使得模型在验证集上达到最优。在很多个模型中,验证集选择了代价函数最小的一个模型。虽然在这个模型上代价很小,但并不代表在其他数据上代价也小。所以需要一个完全没有经过训练的测试集来再最后测试模型的准确率。
### 代码中的实现
用下面的代码,为模型训练准备了训练集和测试集。在大多数场景下,我们会从训练集中抽出一部分数据作为验证集。
**注意** 我们生成的 X 是 (-1, 1) 的浮点数,而不是像前面的例子那样,直接生成整数变量。这是在机器学习中非常重要的一种手段——归一化。对变量按照每个维度做归一化,将他们变换到 (-1, 1) 或者 (0, 1) 之间,能够使不同维度的数据“公平竞争”,并减少数据溢出的风险。
const calc = useCallback((x: tf.Tensor) => {
const [a, b, c] = sCurveParams // = a * x^2 + b * x + c
return x.pow(2).mul(a).add(x.mul(b)).add(c)
}, [sCurveParams])
logger('init data set ...')
// train set
const _trainTensorX = tf.randomUniform([totalRecord], -1, 1)
const _trainTensorY = calc(_trainTensorX)
setTrainSet({ xs: _trainTensorX, ys: _trainTensorY })
// test set
const _testTensorX = tf.randomUniform([testRecord], -1, 1)
const _testTensorY = calc(_testTensorX)
setTestSet({ xs: _testTensorX, ys: _testTensorY })
## 模型
### 人工神经元模型
人工神经网络是一种从信息处理角度模仿人脑神经元的数学模型,最初是由生物学家大约在1943年提出来的(爷爷辈儿的理论),是一种仿生类的模型,生物学中的神经元模型通常是由树突、轴突、细胞核等组成,其基本结构如图所示。

在人工神经网络中,拥有数量非常多的神经元,它们之间相连组成神经网络,并且神经元之间都有连接权值,称为权重,是模仿人脑中“记忆”机制,神经网络中的每一个节点都代表着一种特定的输出,称为“激励函数”,其大致结构如图所示:

神经网络从两个方面模拟大脑:
1. 神经网络获取的知识是从外界环境中学习得来的。
2. 内部神经元的连接强度,即突触权值,用于储存获取的知识。
神经网络系统由能够处理人类大脑不同部分之间信息传递的由大量神经元连接形成的拓扑结构组成,依赖于这些庞大的神经元数目和它们之间的联系,人类的大脑能够收到输入的信息的刺激由分布式并行处理的神经元相互连接进行非线性映射处理,从而实现复杂的信息处理和推理任务。
对于某个处理单元(神经元)来说,假设来自其他处理单元(神经元)i的信息为Xi,它们与本处理单元的互相作用强度即连接权值为Wi, i=0,1,…,n-1,处理单元的内部阈值为θ。那么本处理单元(神经元)的输入为:
$$ \sum_{i=0}^{n-1} w_i x_i $$
而处理单元的输出为:
$$ y = f( \sum_{i=0}^{n-1} ( w_i x_i - \theta )) $$
f称为激活函数或作用函数,它决定节点(神经元)的输出。θ表示隐含层神经节点的阈值。最经典的激活函数是 Sigmoid 函数。
### 多层人工神经网络
不太严格来说,当前人工智能最红的深度神经网络,可以被理解为就是“更多层”的人工神经网络。
人工神经网络包括:输入层、输出层、以及两者之间的隐藏层。每一层网络包括 n 个神经元,这些神经元,也可以有不同的激活函数。
在随机梯度下降算法和反向传播算法完善之后,神经网络曾经有个快速发展的时期。

简单的浅层的网络已经能够完成一部分工作,手写数字识别准确率可以达到了98%以上。但对于更复杂的挑战,浅层网络就表现不佳了。
如何改进?直觉上来说,我们会觉得增加包含更多层隐藏层的深度网络会更好。

但是,在尝试使用主力学习算法——随机梯度下降算法时,深度网络没有表现的比浅层网络更好。这个失败的结果确实很意外。经过仔细观察后发现,深度网络中的不同层学习的速度差别很大。具体来说就是,当网络后面的层学习很好的时候,前面的层的学习经常会卡住,几乎学不到任何东西。相反的现象也会发生:前面层的学习很好,后面的层学习会卡住。这个问题与运气无关,跟基于梯度的学习算法有关。在深度网络中,基于梯度下降的学习算法有一种内在的不稳定性。这种不稳定性导致前面或则后面层的学习卡住。

要训练深度网络,就必须解决梯度不稳定的问题。这个问题卡了10多年。2010年Glorot和Bengio发现sigmoid激活函数会导致最后一层隐藏层的输出在0附近饱和,导致学习变慢的问题。他们建议使用一些替换的激活函数。2013年Sutskever, Martens, Dahl 和 Hinton研究了随机权重初始化和动量梯度下降对深度学习的影响。研究的结果是:训练的梯度不稳定跟所用的激活函数、权重初始化的方式、甚至梯度下降的具体实现形式都有关系。当然网络的结构,其他超参的取值也很重要,原因是多方面的,不过随后的发展,开发出的各种方法某种程度上克服了或则是绕过了这些障碍,最终促进了深度神经网络的成功。
Google 提供了一个非常直观的理解多层神经网络的工具 NN PlayGround 。
[NN PlayGround](http://playground.tensorflow.org/)
### 激活函数
常见的激活函数有:Sigmoid、ReLU、Tanh 等。
#### Sigmoid S型生长曲线
$$ sigmoid(x) = \frac{1}{1+e^{-x}} $$

Sigmoid函数曾被广泛地应用,也是非常经典的logic函数。
优点:
* Sigmoid函数的输出映射在(0,1)之间,单调连续,输出范围有限,优化稳定,可以用作输出层
* 求导容易
缺点:
* 由于其软饱和性,容易产生梯度消失,导致训练出现问题
* 其输出并不是以0为中心的。
#### ReLU
$$ relu(x) = max(0, x) $$

优点是:
* 可以更加简单的实现
* 相比起Sigmoid和tanh能够在 SGD 中快速收敛
* 有效缓解了梯度消失的问题
* 在没有无监督预训练的时候也能有较好的表现
* 提供了神经网络的稀疏表达能力
缺点是:
* 随着训练的进行,可能会出现神经元死亡,权重无法更新的情况。如果发生这种情况,那么流经神经元的梯度从这一点开始将永远是0。也就是说,ReLU神经元在训练中不可逆地死亡了。如果使用 ReLU,要小心设置 learning rate,注意不要让网络出现很多 “dead” 神经元,如果不好解决,可以试试 Leaky ReLU、PReLU 或者 Maxout.
ReLU 有不少变形算法。

#### Tanh 双曲正切
$$ tanh(x) = \frac{1 - e^{-2x}}{1 + e^{-2x}} $$

优点:
* 比Sigmoid函数收敛速度更快。
* 相比Sigmoid函数,其输出以0为中心。
缺点:
* 还是没有改变Sigmoid函数的最大问题——由于饱和性产生的梯度消失。
### 代码中的实现
在代码中,你可以选择体验的隐藏层数量、隐藏层内神经元数量、以及激活函数的不同,给训练带来的影响。
激活函数的设定和 Layer 在一起,例如:
const model = tf.sequential()
model.add(tf.layers.dense({ inputShape: [1], units: sDenseUnits, activation: sActivation as any }))
for (let i = sLayerCount - 2; i > 0; i--) {
model.add(tf.layers.dense({ units: sDenseUnits, activation: sActivation as any }))
}
model.add(tf.layers.dense({ units: 1 }))
## 训练
在神经元网络模型确定之后,还需要设置优化器,才能进行训练。以 SGD 算法为例,通过调整 Learning Rate 参数,会改变学习的收敛速度,以及学习精度。
const optimizer = tf.train.sgd(sLearningRate)
model.compile({ loss: 'meanSquaredError', optimizer })
训练时,还需要指定下面的参数:
* epochs 迭代次数
* batchSize 因为计算环境资源有限,每次取用合适的数据量,以避免内存溢出等问题。
* validationSplit 从训练集中挑选验证集数据的比率
model.fit(trainSet.xs as tf.Tensor, trainSet.ys as tf.Tensor, {
epochs: NUM_EPOCHS,
batchSize: BATCH_SIZE,
validationSplit: VALIDATE_SPLIT,
callbacks: {
onEpochEnd: (epoch: number) => {
...
if (stopRef.current) {
logger('Checked stop', stopRef.current)
statusRef.current = STATUS.STOPPED
model.stopTraining = stopRef.current
}
}
}
}).then(
() => {
statusRef.current = STATUS.TRAINED
}
)
## 推理
模型训练好之后,可以通过 model.predict 或 model.evaluate 来检验训练结果。
const pred = model.predict(testSet.xs as tf.Tensor) as tf.Tensor
setTestP(pred)
const evaluate = model.evaluate(testSet.xs as tf.Tensor, testSet.ys as tf.Tensor) as tf.Scalar
setTestV(evaluate)
## 补充内容
[参考链接 Tensorflow.js API 文档](https://js.tensorflow.org/api/latest/?hl=zh-cn)
#### Tensor 的常用运算
| API | 说明 |
|---|---|
| tf.add | a + b |
| tf.sub | a - b |
| tf.mul | a * b |
| tf.div | a / b |
| tf.addN | a + [b, c, d...] |
| tf.divNoNan | 相除分母为0时,返回0 |
| tf.floorDiv | 相除结果取下整 |
| tf.maximum | a, b直接对应元素取大值 |
| tf.minimum | a, b直接对应元素取小值 |
| tf.mod | 对应元素取模 |
| tf.pow | 对应元素取幂 |
| tf.squaredDifference | (a - b) * (a - b) |
| tf.dot | 点乘 |
tf 还有各种数学函数可以使用。
#### Tensorflow.js 的随机数生成 API
| API | 说明 |
|---|---|
| tf.randomUniform | 均匀分布采样 |
| tf.randomNormal | 正态分布采样 |
| tf.multinomial | 多项式分布采样 |
| tf.randomGamma | Gamma分布采样 |
#### Tensor 的常用变形
| API | 说明 |
|---|---|
| tf.reshape | 根据给出的 Shape 变形 |
| tf.reshapeAs | 根据给出的 Tensor 变形 |
| tf.expandDims | 增加一个维度 |
| tf.flatten | 把Tensor转换成 1 维数组 |
#### 更多激活函数
[参考链接 wikipedia](https://en.wikipedia.org/wiki/Activation_function)




## 拓展阅读
关于 Tensor
* [CSDN 穿秋裤的兔子的文章](https://blog.csdn.net/kansas_lh/article/details/79321234)
* [机器学习的敲门砖:手把手教你TensorFlow初级入门](https://yq.aliyun.com/articles/64410?utm_content=m_32686)
人工神经元模型
* [参考链接,简书](https://www.jianshu.com/p/f73f5985cda4)
* [参考链接,简书](https://www.jianshu.com/p/3d8802fe7853)
多层人工神经网络
* [神经网络与深度学习(五):深度网络训练难点](https://blog.csdn.net/gaofeipaopaotang/article/details/80002590)
激活函数
* [参考链接](https://www.jiqizhixin.com/graph/technologies/1697e627-30e7-48a6-b799-39e2338ffab5)
* [参考链接: 不会停的蜗牛](https://www.jianshu.com/p/22d9720dbf1a)
《神经网络和深度学习简史》
* [神经网络和深度学习简史(第一部分):从感知机到BP算法](https://www.jianshu.com/p/f90d923b73b5)
* [神经网络和深度学习简史(第二部分):BP算法之后的又一突破——信念网络](https://www.jianshu.com/p/9dc4c2320732)
* [神经网络和深度学习简史(第三部分):90年代的兴衰——强化学习与递归神经网络](https://www.jianshu.com/p/5db8170d4bcb)
* [神经网络和深度学习简史(第四部分):深度学习终迎伟大复兴](https://www.jianshu.com/p/e1bac195f06d)
《神经网络和深度学习简史》
* [神经网络和深度学习简史(一)](https://www.jianshu.com/p/c9a2a0c446d4)
* [深度学习和神经网络简史(二)](https://www.jianshu.com/p/558a2c0a5b9b)
* [神经网络和深度学习简史(三)](https://www.jianshu.com/p/70209952de90)
* [神经网络和深度学习简史(四)](https://www.jianshu.com/p/757c5a57c5d2)
<file_sep>/node/src/textGenLstm/train.ts
/**
* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
/**
* Training of a next-char prediction model.
*/
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import * as argparse from 'argparse';
import {maybeDownload, TEXT_DATA_URLS, TextData} from './data';
import {compileModel, createModel, fitModel, generateText} from './model';
function parseArgs() {
const parser = argparse.ArgumentParser({
description: 'Train an lstm-text-generation model.'
});
parser.addArgument('textDatasetName', {
type: 'string',
choices: Object.keys(TEXT_DATA_URLS),
help: 'Name of the text dataset'
});
parser.addArgument('--gpu', {
action: 'storeTrue',
help: 'Use CUDA GPU for training.'
});
parser.addArgument('--sampleLen', {
type: 'int',
defaultValue: 60,
help: 'Sample length: Length of each input sequence to the model, in ' +
'number of characters.'
});
parser.addArgument('--sampleStep', {
type: 'int',
defaultValue: 3,
help: 'Step length: how many characters to skip between one example ' +
'extracted from the text data to the next.'
});
parser.addArgument('--learningRate', {
type: 'float',
defaultValue: 1e-2,
help: 'Learning rate to be used during training'
});
parser.addArgument('--epochs', {
type: 'int',
defaultValue: 150,
help: 'Number of training epochs'
});
parser.addArgument('--examplesPerEpoch', {
type: 'int',
defaultValue: 10000,
help: 'Number of examples to sample from the text in each training epoch.'
});
parser.addArgument('--batchSize', {
type: 'int',
defaultValue: 128,
help: 'Batch size for training.'
});
parser.addArgument('--validationSplit', {
type: 'float',
defaultValue: 0.0625,
help: 'Validation split for training.'
});
parser.addArgument('--displayLength', {
type: 'int',
defaultValue: 120,
help: 'Length of the sampled text to display after each epoch of training.'
});
parser.addArgument('--savePath', {
type: 'string',
help: 'Path to which the model will be saved (optional)'
});
parser.addArgument('--lstmLayerSize', {
type: 'string',
defaultValue: '128,128',
help: 'LSTM layer size. Can be a single number or an array of numbers ' +
'separated by commas (E.g., "256", "256,128")'
}); // TODO(cais): Support
return parser.parseArgs();
}
async function main() {
const args = parseArgs();
if (args.gpu) {
console.log('Using GPU');
require('@tensorflow/tfjs-node-gpu');
} else {
console.log('Using CPU');
require('@tensorflow/tfjs-node');
}
// Create the text data object.
const textDataURL = TEXT_DATA_URLS[args.textDatasetName].url;
const localTextDataPath = path.join(os.tmpdir(), path.basename(textDataURL));
await maybeDownload(textDataURL, localTextDataPath);
const text = fs.readFileSync(localTextDataPath, { encoding: 'utf-8' });
const textData =
new TextData('text-data', text, args.sampleLen, args.sampleStep);
// Convert lstmLayerSize from string to number array before handing it
// to `createModel()`.
const lstmLayerSize = args.lstmLayerSize.indexOf(',') === -1 ?
Number.parseInt(args.lstmLayerSize) :
args.lstmLayerSize.split(',').map(x => Number.parseInt(x));
const model = createModel(
textData.sampleLen(), textData.charSetSize(), lstmLayerSize);
compileModel(model, args.learningRate);
// Get a seed text for display in the course of model training.
const [seed, seedIndices] = textData.getRandomSlice();
console.log(`Seed text:\n"${seed}"\n`);
const DISPLAY_TEMPERATURES = [0, 0.25, 0.5, 0.75];
let epochCount = 0;
await fitModel(
model, textData, args.epochs, args.examplesPerEpoch, args.batchSize,
args.validationSplit, {
onTrainBegin: async () => {
epochCount++;
console.log(`Epoch ${epochCount} of ${args.epochs}:`);
},
onTrainEnd: async () => {
DISPLAY_TEMPERATURES.forEach(async temperature => {
const generated = await generateText(
model, textData, seedIndices, args.displayLength, temperature);
console.log(
`Generated text (temperature=${temperature}):\n` +
`"${generated}"\n`);
});
}
});
if (args.savePath != null && args.savePath.length > 0) {
await model.save(`file://${args.savePath}`);
console.log(`Saved model to ${args.savePath}`);
}
}
main();
<file_sep>/public/docs/dev/data-classifier.md
# 用 Tensorflow.js 处理按数据分类问题
## 分类问题的数据表述
鸢尾花原始的数据,类似这样:
4.8,3.0,1.4,0.3,Iris-setosa
5.1,3.8,1.6,0.2,Iris-setosa
4.6,3.2,1.4,0.2,Iris-setosa
5.7,3.0,4.2,1.2,Iris-versicolor
5.7,2.9,4.2,1.3,Iris-versicolor
6.2,2.9,4.3,1.3,Iris-versicolor
6.3,3.3,6.0,2.5,Iris-virginica
5.8,2.7,5.1,1.9,Iris-virginica
7.1,3.0,5.9,2.1,Iris-virginica
为了便于计算处理,需要对分类结果进行转换处理。
常见的处理分类目标数据的方法有:标签编码 和 One-Hot
### 标签编码
使用 int 类型, 对三种分类进行编号替换,就形成了整数标签目标数据:
* 0 :Iris setosa(山鸢尾)
* 1 :Iris versicolor(杂色鸢尾)
* 2 :Iris virginica(维吉尼亚鸢尾)
上面的数据被转换成(为了便于观察,在数据中增加了空格,以区分特征数据和目标分类数据):
4.8,3.0,1.4,0.3, 0
5.1,3.8,1.6,0.2, 0
4.6,3.2,1.4,0.2, 0
5.7,3.0,4.2,1.2, 1
5.7,2.9,4.2,1.3, 1
6.2,2.9,4.3,1.3, 1
6.3,3.3,6.0,2.5, 2
5.8,2.7,5.1,1.9, 2
7.1,3.0,5.9,2.1, 2
标签编码的类别值从0开始(因为大多数计算机系统如此计数),所以,如果有N个类别,类别值为 0 至 N-1 的。
鸢尾花数据量不大,直接按照整数标签编码,在代码中定义为一个多维数组。
export const IRIS_RAW_DATA = [
[5.1, 3.5, 1.4, 0.2, 0], [4.9, 3.0, 1.4, 0.2, 0], [4.7, 3.2, 1.3, 0.2, 0],
[4.6, 3.1, 1.5, 0.2, 0], [5.0, 3.6, 1.4, 0.2, 0], [5.4, 3.9, 1.7, 0.4, 0],
[4.6, 3.4, 1.4, 0.3, 0], [5.0, 3.4, 1.5, 0.2, 0], [4.4, 2.9, 1.4, 0.2, 0],
...
[6.9, 3.1, 5.1, 2.3, 2], [5.8, 2.7, 5.1, 1.9, 2], [6.8, 3.2, 5.9, 2.3, 2],
[6.7, 3.3, 5.7, 2.5, 2], [6.7, 3.0, 5.2, 2.3, 2], [6.3, 2.5, 5.0, 1.9, 2],
[6.5, 3.0, 5.2, 2.0, 2], [6.2, 3.4, 5.4, 2.3, 2], [5.9, 3.0, 5.1, 1.8, 2]
]
**标签编码的适用场景**:
* 如果原本的标签编码是有序意义的,例如评分等级,使用标签编码就是一个更好的选择。
* 不过,如果标签编码是和鸢尾数据类似的无顺序数据,在计算中,更高的标签数值会给计算带来不必要的附加影响。这时候更好的方案是使用 one-hot 编码方式。
在上面的数据中。在进行标签编码的数据集中有
$$ virginica(2) > versicolor(1) > setosa(0) $$
比方说,假设模型内部计算平均值(神经网络中有大量加权平均运算),那么0 + 2 = 2,2 / 2 = 1. 这意味着:virginica 和 setosa 平均一下是 versicolor。如果不对Loss 函数作些变化,该模型的预测也许会有大量误差。
### One-Hot
One-Hot 编码是将类别变量转换为机器学习算法易于利用的一种形式的过程。One-Hot 将 n 个分类,表示为一个 只含有 0,1 数值的向量。向量的位置表示了对应的分类。
例如,采用 One-Hot 编码,上面的数据就应该编码成:
4.8,3.0,1.4,0.3, [1,0,0]
5.1,3.8,1.6,0.2, [1,0,0]
4.6,3.2,1.4,0.2, [1,0,0]
5.7,3.0,4.2,1.2, [0,1,0]
5.7,2.9,4.2,1.3, [0,1,0]
6.2,2.9,4.3,1.3, [0,1,0]
6.3,3.3,6.0,2.5, [0,0,1]
5.8,2.7,5.1,1.9, [0,0,1]
7.1,3.0,5.9,2.1, [0,0,1]
tfjs 里也提供了将标签编码转化成 One-Hot 的函数 `tf.oneHot`,使用起来很方便。
## 用 tf.data.Dataset 构造训练集和测试集
### 按比例分配数据集
export const splitDataSet = (shuffled: IArray, testSplit: number, shuffle = false): IArray[] => {
if (shuffle) {
tf.util.shuffle(shuffled)
}
const totalRecord = shuffled.length
// Split the data into training and testing portions.
const numTestExamples = Math.round(totalRecord * testSplit)
const numTrainExamples = totalRecord - numTestExamples
const train = shuffled.slice(0, numTrainExamples)
const test = shuffled.slice(numTrainExamples)
return [train, test]
}
* 利用 ES6 的数组函数 `Array.slice()` 简单粗暴的按照数组下标将原始数据分拆成训练集和测试集。
* `tf.util.shuffle` 被用于打乱数组数据的顺序。这是在做数据处理时经常用到的方法。
> ES6 里还有个很形似 `Array.slice()` 的函数,`Array.splice()`,不小心的话容易混淆,需要注意区分一下。
>
> * slice()方法返回数组中被选中的元素,作为一个新的数组对象。splice()方法返回数组中被删除的项。
* slice()方法不改变原来的数组,而splice()方法改变了原来的数组。
* slice()方法可以接受2个参数。splice()方法可以接受n个参数。
### 了解 tf.data.Dataset
有两种方法可以训练LayersModel :
* 使用 `model.fit()` 并将数据作为一个大张量提供。
* 使用 `model.fitDataset()` 并通过 Dataset 对象提供数据.
在 Curve 的例子中,我们已经使用 Tensor 作为数据,对模型进行了训练。如果您的数据集能够被放进内存,并且可以作为单个张量使用,则可以通过调用 fit() 方法来训练模型。
而如果数据不能完全放入内存或正在流式传输,则可以通过使用 Dataset 对象的 fitDataset() 来训练模型.
Dataset 表示一个有序的元素集合对象,这个对象能够通过链式方法完成一系列加载和转换,返回另一个 Dataset。数据加载和转换是以一种懒加载和流的方式完成。数据集可能会被迭代多次;并且每次迭代都会从头开始进行。例如:
const processedDataset = rawDataset.filter(...).map(...).batch(...)
下面的代码被用于生成鸢尾花的 DataSet,来自 `./src/components/iris/data.ts`。
export const getIrisData = (testSplit: number, isOntHot = true,
shuffle = true): Array<tf.data.Dataset<tf.TensorContainer>> => {
// Shuffle a copy of the raw data.
const shuffled = IRIS_RAW_DATA.slice()
const [train, test] = splitDataSet(shuffled, testSplit, shuffle)
// Split the data into into X & y and apply feature mapping transformations
const trainX = tf.data.array(train.map(r => r.slice(0, 4)))
const testX = tf.data.array(test.map(r => r.slice(0, 4)))
let trainY: tf.data.Dataset<number[]>
let testY: tf.data.Dataset<number[]>
if (isOntHot) {
trainY = tf.data.array(train.map(r => flatOneHot(r[4])))
testY = tf.data.array(test.map(r => flatOneHot(r[4])))
} else {
trainY = tf.data.array(train.map(r => [r[4]]))
testY = tf.data.array(test.map(r => [r[4]]))
}
// Recombine the X and y portions of the data.
const trainDataset = tf.data.zip({ xs: trainX, ys: trainY })
const testDataset = tf.data.zip({ xs: testX, ys: testY })
return [trainDataset, testDataset]
}
* 将每条鸢尾花数据的前四个元素作为 X。
// Split the data into into X & y and apply feature mapping transformations
const trainX = tf.data.array(train.map(r => r.slice(0, 4)))
const testX = tf.data.array(test.map(r => r.slice(0, 4)))
* Y 则根据所使用的编码方式而发生变化。
if (isOntHot) {
trainY = tf.data.array(train.map(r => flatOneHot(r[4])))
testY = tf.data.array(test.map(r => flatOneHot(r[4])))
} else {
trainY = tf.data.array(train.map(r => [r[4]]))
testY = tf.data.array(test.map(r => [r[4]]))
}
* 将整数标签编码转换成 OneHot 编码的函数如下。
export const flatOneHot = (idx: number): number[] => {
return Array.from(tf.oneHot([idx], 3).dataSync())
}
### 初始化数据集
我们需要根据用户选择的 sTargetEncode 更新训练和测试数据集,这样的代码,放在 useEffect 里很适合,来自 `./src/components/iris/Iris.tsx`。
useEffect(() => {
if (!sTargetEncode) {
return
}
logger('encode dataset ...')
const isOneHot = sTargetEncode === ONE_HOT
const [tSet, vSet] = data.getIrisData(VALIDATE_SPLIT, isOneHot)
// Batch datasets.
setTrainSet(tSet.batch(BATCH_SIZE))
setValidSet(vSet.batch(BATCH_SIZE))
...
}, [sTargetEncode])
* 这里有个前面强调过的知识点,Dataset.batch 会在最后训练发生时,才会去迭代执行,产生数据片段。
### SampleDataVis 展示数据样本
为了便于观察数据样本,构造了 SampleDataVis 组件,来自 `./src/components/common/tensor/SampleDataVis.tsx`。
#### 使用 useEffect 构建细粒度的数据驱动渲染
在 `SampleDataVis.tsx` 中,我们设计了如下的数据变化 useEffect,以避免不必要的 UI 渲染:
1. [] => SampleDataVis 组件创建时,创建 AntD Table 的数据列,这个操作只做一次。
2. 展示太多数据,对我们了解数据集情况意义不大。因此,属性数据 props.*Dataset 变化时,我们仅取出 sSampleCount 个数据用于显示。
* [props.xDataset, sSampleCount] => 因此,属性数据 props.xDataset 变化时,仅取出 sSampleCount 个数据用于显示,将取出的数据结果放在 xData 中。
* [props.yDataset, sSampleCount] , [props.pDataset, sSampleCount] => 和 props.xDataset 处理类似,多一步将计算出的 OneHot 向量转化为 Label 的步骤,便于在显示是对比 Predict 值是否正确。
3. SampleDataVis 组件内部的数据按需渲染。
* [xData, yData] => 有变化时,及时渲染。
* [pData] => 每一次使用模型进行 Predict 之后,都会更改 props.pDataset 数据,引起 pData 的变化。而此时 props.xDataset、props.yDataset 并不变化。所以我们只修改 pData 的显示即可,不必重复渲染 xData, yData。
#### AntD Table 的使用
SampleDataVis 组件使用 AntD Table 进行数据显示。
* 如果 Y 数据是 Label,会直接显示数值。

* 如果 Y 数据是 OneHot,则显示 OneHot 数组以及对应的 Label。

下面的代码展示了 AntD 中 Table 的用法,通过构建 columns 数组来描述每列的数据及其渲染,其中每一行展示的数据格式如 interface IDataRecord 的定义。
interface IDataRecord {
key: number
x: tf.Tensor
y: tf.Tensor
p?: tf.Tensor
yLabel?: string
pLabel?: string
}
...
useEffect(() => {
const _columns = [
{
title: 'X',
dataIndex: 'x',
render: (text: string, record: tf.TensorContainerObject): JSX.Element => {
return <span>{formatX(record.x as tf.Tensor)}</span>
}
},
{
title: 'Y',
dataIndex: 'y',
render: (text: string, record: tf.TensorContainerObject): JSX.Element => {
const yArray = formatTensorToStringArray(record.y as tf.Tensor, 0)
const yStr = yArray.length > 1 ? `[${yArray.join(', ')}] => ${record.yLabel}` : yArray.join(', ')
const color = record.yLabel === record.pLabel ? 'green' : 'red'
return <span style={{ color: color }}>{yStr}</span>
}
},
{
title: 'P',
dataIndex: 'p',
render: (text: string, record: tf.TensorContainerObject): JSX.Element => {
const pArray = formatTensorToStringArray(record.p as tf.Tensor, 2)
const pStr = pArray.length > 1 ? `[${pArray.join(', ')}] => ${record.pLabel}` : pArray.join(', ')
const color = record.yLabel === record.pLabel ? 'green' : 'red'
return pStr ? <span style={{ color: color }}>{pStr}</span> : <></>
}
}]
setColumns(_columns)
}, [])
...
return (
<div>
...
<Table columns={columns} dataSource={sData as object[]} pagination={{ pageSize: props.pageSize ?? DEFAULT_PAGE_SIZE }}/>
</div>
)
让我们来看看,列的数据是如何展示的。
{
title: 'X',
dataIndex: 'x',
render: (text: string, record: tf.TensorContainerObject): JSX.Element => {
return <span>{formatX(record.x as tf.Tensor)}</span>
}
},
* `dataIndex` 为列数据在 IDataRecord 数据项中对应的路径,支持通过数组查询嵌套路径。
* `render` 生成复杂数据的渲染函数,参数分别为当前行的值,当前行数据,行索引,@return 里面可以设置表格行/列合并。`Function(text, record, index) {}`
更多 AntD Table 的信息请参考 [https://ant.design/components/table-cn/](https://ant.design/components/table-cn/)
## 全联接网络模型
鸢尾花分类采用全联接网络,参考代码实现如下。其中激活函数、输入层的神经元数量都可以在页面上直接调整。
const model = tf.sequential()
model.add(tf.layers.dense({
units: sDenseUnits,
activation: sActivation as any,
inputShape: [data.IRIS_NUM_FEATURES]
}))
model.add(tf.layers.dense({ units: 3, activation: 'Softmax' }))
* 输入层的 inputShape 是和特征数据相关的,是个 4 元向量。
* 因为要输出三个分类,所以输出层的神经元数量设置为 3。
* 多分类问题的输出层,激活函数使用 Softmax。如果是二分类问题,激活函数可以使用 Sigmoid。
* `sActivation` 激活函数可以选择 `['sigmoid', 'relu', 'tanh']`, 感受一下不同激活函数对于分类的影响。
* `sDenseUnits` 全联接网络的神经元数量可调。
## 训练
### 调整训练参数:注意一下 Loss 函数
调整训练参数,体会一下对于训练过程有什么影响。
useEffect(() => {
if (!sModel || !sLearningRate || !sOptimizer || !sLoss) {
return
}
logger('init optimizer ...')
let optimizer: tf.Optimizer
switch (sOptimizer) {
case 'SGD' :
optimizer = tf.train.sgd(sLearningRate)
break
case 'RMSProp' :
optimizer = tf.train.rmsprop(sLearningRate)
break
case 'Adam' :
default:
optimizer = tf.train.adam(sLearningRate)
break
}
sModel.compile({ optimizer: optimizer, loss: sLoss, metrics: ['accuracy'] })
// setModel(model)
return () => {
logger('Optimizer Dispose')
optimizer?.dispose()
}
}, [sModel, sLearningRate, sOptimizer, sLoss])
* sLearningRate 调整。
* sOptimizer 训练时的优化器可以选择 `['SGD', 'Adam', 'RMSProp']` 三种算法
* sLoss 必须要注意一下,这个 Loss 函数的选择是和目标数据编码方式相关的。
* 如果选择 Label 编码,则需要使用 `sparseCategoricalCrossentropy`,
* 如果选择 OneHot 编码,则需要使用 `categoricalCrossentropy`
### 使用 Model.fitDataset 训练
const beginMs = performance.now()
model.fitDataset(trainDataset, {
epochs: EPOCHS,
validationData: validDataset,
callbacks: {
onEpochEnd: async (epoch, logs) => {
logger('onEpochEnd', epoch)
logs && addTrainInfo({ iteration: epoch, logs })
predictModel(model, sPredictSet?.xs)
await tf.nextFrame()
},
onBatchEnd: async () => {
if (stopRef.current) {
logger('onBatchEnd Checked stop', stopRef.current)
setStatus(STATUS.STOPPED)
model.stopTraining = stopRef.current
}
await tf.nextFrame()
}
}
}).then(
() => {
setStatus(STATUS.TRAINED)
const secPerEpoch = (performance.now() - beginMs) / (1000 * EPOCHS)
logger(secPerEpoch)
},
loggerError
)
### 展示训练过程 —— 在 useState 中使用数组
在 onEpochEnd 或 onBatchEnd 回调函数被调用时,可以得到当前训练的 logs 信息,结合我们在上一张中使用过的 BizChart 工具,可以自己定义训练过程的展示组件 HistoryWidget。HistoryWidget 的实现和画曲线类似,不做过多赘述。
不过,这一次和以前使用 useState 的场景略有不一样,需要注意一下。先看看代码:
const [sTrainInfos, setTrainInfos] = useState<ITrainInfo[]>([])
const addTrainInfo = (info: ITrainInfo): void => {
sTrainInfos.push(info)
setTrainInfos([...sTrainInfos])
}
...
<HistoryWidget infos={sTrainInfos} totalIterations={EPOCHS}/>
你发现了没有?我们在 useState 时定义了一个**数组**。而且,当我们在往 sTrainInfos 里 push 数据时,还增加了一个**看似无用**的 `setTrainInfos([...sTrainInfos])` 语句。
你可以去掉它试试,会发现 log 数据就显示不出来了。这是因为React Hooks 函数组件每次因刷新渲染而被调用时,都会重新创建内部的对象。因此,在这里只能再次强行赋值一遍。
<file_sep>/src/components/rnn/dataSentiment.ts
/**
* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs'
import * as path from 'path'
import { fetchResource, logger } from '../../utils'
import { OOV_INDEX, padSequences } from './sequenceUtils'
export const DATA_BASE_URL = '/preload/data/imdb'
/**
* Load IMDB data features from a local file.
*
* @param {string} filePath Data file on local filesystem.
* @param {string} numWords Number of words in the vocabulary. Word indices
* that exceed this limit will be marked as `OOV_INDEX`.
* @param {string} maxLen Length of each sequence. Longer sequences will be
* pre-truncated; shorter ones will be pre-padded.
* @param {string} multihot Whether to use multi-hot encoding of the words.
* Default: `false`.
* @return {tf.Tensor} If `multihot` is `false` (default), the dataset
* represented as a 2D `tf.Tensor` of shape `[numExamples, maxLen]` and
* dtype `int32`. Else, the dataset represented as a 2D `tf.Tensor` of
* shape `[numExamples, numWords]` and dtype `float32`.
*/
const loadFeatures = async (filePath: string, numWords: number, maxLen: number,
multihot = false): Promise<tf.Tensor> => {
const buffer = await fetchResource(filePath, false)
const numBytes = buffer.byteLength
const sequences = []
let seq = []
let index = 0
while (index < numBytes) {
const value = buffer.readInt32LE(index)
if (value === 1) {
// A new sequence has started.
if (index > 0) {
sequences.push(seq)
}
seq = []
} else {
// Sequence continues.
seq.push(value >= numWords ? OOV_INDEX : value)
}
index += 4
}
if (seq.length > 0) {
sequences.push(seq)
}
// Get some sequence length stats.
let minLength = Infinity
let maxLength = -Infinity
sequences.forEach(seq => {
const length = seq.length
if (length < minLength) {
minLength = length
}
if (length > maxLength) {
maxLength = length
}
})
logger(`Sequence length: min = ${minLength}; max = ${maxLength}`)
if (multihot) {
// If requested by the arg, encode the sequences as multi-hot vectors.
const buf = tf.buffer([sequences.length, numWords])
sequences.forEach((seq, i) => {
seq.forEach(wordIndex => {
if (wordIndex !== OOV_INDEX) {
buf.set(1, i, wordIndex)
}
})
})
return buf.toTensor()
} else {
const paddedSequences =
padSequences(sequences, maxLen, 'pre', 'pre')
return tf.tensor2d(paddedSequences, [paddedSequences.length, maxLen], 'int32')
}
}
/**
* Load IMDB targets from a file.
*
* @param {string} filePath Path to the binary targets file.
* @return {tf.Tensor} The targets as `tf.Tensor` of shape `[numExamples, 1]`
* and dtype `float32`. It has 0 or 1 values.
*/
const loadTargets = async (filePath: string): Promise<tf.Tensor2D> => {
const buffer = await fetchResource(filePath, false)
const numBytes = buffer.byteLength
let numPositive = 0
let numNegative = 0
const ys = []
for (let i = 0; i < numBytes; ++i) {
const y = buffer.readUInt8(i)
if (y === 1) {
numPositive++
} else {
numNegative++
}
ys.push(y)
}
logger(`Loaded ${numPositive} positive examples and ${numNegative} negative examples.`)
return tf.tensor2d(ys, [ys.length, 1], 'float32')
}
/**
* Load data by downloading and extracting files if necessary.
*
* @param {number} numWords Number of words to in the vocabulary.
* @param {number} len Length of each sequence. Longer sequences will
* be pre-truncated and shorter ones will be pre-padded.
* @return
* xTrain: Training data as a `tf.Tensor` of shape
* `[numExamples, len]` and `int32` dtype.
* yTrain: Targets for the training data, as a `tf.Tensor` of
* `[numExamples, 1]` and `float32` dtype. The values are 0 or 1.
* xTest: The same as `xTrain`, but for the test dataset.
* yTest: The same as `yTrain`, but for the test dataset.
*/
export const loadData = async (numWords: number, len: number, multihot = false): Promise<tf.TensorContainerObject> => {
const dataDir = `${DATA_BASE_URL}/`
const trainFeaturePath = path.join(dataDir, 'imdb_train_data.bin')
const xTrain = await loadFeatures(trainFeaturePath, numWords, len, multihot)
const testFeaturePath = path.join(dataDir, 'imdb_test_data.bin')
const xTest = await loadFeatures(testFeaturePath, numWords, len, multihot)
const trainTargetsPath = path.join(dataDir, 'imdb_train_targets.bin')
const yTrain = await loadTargets(trainTargetsPath)
const testTargetsPath = path.join(dataDir, 'imdb_test_targets.bin')
const yTest = await loadTargets(testTargetsPath)
tf.util.assert(
xTrain.shape[0] === yTrain.shape[0],
() => 'Mismatch in number of examples between xTrain and yTrain')
tf.util.assert(
xTest.shape[0] === yTest.shape[0],
() => 'Mismatch in number of examples between xTest and yTest')
return { xTrain, yTrain, xTest, yTest }
}
<file_sep>/public/docs/ai/mobilenet-transfer.md
# 图像识别 MobileNet
## 数据
Mobilnet 使用 imagenet 的数据集,进行了训练,能够识别 1000 个分类。
在此基础之上,我们可以通过迁移学习,用较少的数据集,就能够作出个性化的定制训练。
## 模型
* 图片分类:直接使用 MobileNet 预训练模型
* Teachable Machine:使用 MobileNet 输出特征,再通过 KNN 方法识别分类。非常适合较小的训练集。
* 迁移学习,图片分类:使用 MobileNet 输出特征,增加分类层,形成一个新的复合模型,进行训练。
* 迁移学习,对象识别:在 MobileNet 的输出特征基础上,增加用于对象识别的层,进行训练。
## 训练
观察数据,以及随着训练模型参数的变化,观察测试集的推理结果正确情况。
## 推理
通过上传图片,或者摄像头拍摄,进行推理验证。
<file_sep>/public/docs/dev/README.md
# 开发教程 Develop Tutorial
## 构建开发框架
- [x] [Develop Tutorial 1 从零开始](./start-from-scratch.md)
* 环境安装
* 安装 Node 环境
* 安装 yarn 工具
* React 和 React Hooks
* 创建 React 应用
* 创建一个新的 React 项目
* React 项目目录简述
- [x] [Develop Tutorial 2 构建 React 开发框架](./dev-structure.md)
* React-tfjs-camp 的目录结构
* public 目录结构
* src 目录结构
* node 目录结构
* 规范代码语法和风格检查
* tsconfig.json
* .eslintrc.js
* 改造页面布局
* React 函数化组件
* 使用 Ant Design 构建页面框架
* 在项目中使用 AntD
* 页面布局
* 边栏菜单导航
* AntD Layout Sider
* 使用 React Hooks 的 useState 管理边栏状态
* 用 React-Route 实现页面路由跳转
* ErrorBoundary
- [x] [Develop Tutorial 3 搭建展示端到端 AI 概念的舞台](./ai-process-panel.md)
* 端到端的 AI 概念
* AIProcessTabs
* 带参数的 React 函数组件
* 使用 React Hooks 的 useEffect 处理组件内的数据依赖
* 处理需要隐藏的 TabPane
* Sticky 的使用
* MarkdownWidget
### 操练 Tensorflow.js
- [x] [Develop Tutorial 4 初步了解 Tensorflow.js](./tfjs-intro.md)
* 使用 Tensorflow.js 的几点须知
* Backend —— 为什么我的 tfjs 运行很慢?
* 内存管理 —— 这样避免我的程序内存溢出?
* tfjs 安装
* tfjs 加载
* 使用 Tensorflow.js 和 React 生成数据集
* 随机生成 a, b, c 三个参数
* 实现公式计算 & useCallback
* 训练集和测试集的生成
* 函数数据可视化
* 使用 Tensorflow.js 创建人工神经网络
* 实现一个简单的多层人工神经网络
* 窥探一下 LayerModel 的内部
* 模型训练
* 调整 LearningRate 观察对训练的影响
* 模型训练 model.fit
* 及时停止模型训练 —— useRef Hook 登场
* 模型推理
- [x] [Develop Tutorial 5 用 Tensorflow.js 处理按数据分类问题](./data-classifier.md)
* 分类问题的数据表述
* 标签编码
* One-Hot
* 用 tf.data.Dataset 构造训练集和测试集
* 按比例分配数据集
* 了解 tf.data.Dataset
* 初始化数据集
* SampleDataVis 展示数据样本
* 使用 useEffect 构建细粒度的数据驱动渲染
* AntD Table 的使用
* 全联接网络模型
* 训练
* 调整训练参数:注意一下 Loss 函数
* 使用 Model.fitDataset 训练
* 展示训练过程 —— 在 useState 中使用数组
- [x] [Develop Tutorial 6 MNIST CNN 的 Layers API 实现](mnist-layer-api.md)
* MNIST 的数据集
* MNIST 的数据集的两种格式—— PNG 和 GZ
* 预先下载数据集到本地
* PNG 格式数据的加载和使用
* GZ 格式数据的加载和使用
* 使用 fetch 加载数据文件
* 数据的加载
* 修改 SampleDataVis 以显示图片
* 组件 RowImageWidget—— 使用 useRef 访问 HTML Element
* CNN 网络模型
* 将 tfjs-vis 集成到 React
* 模型训练
* 推理
* 数字手写板的实现 —— 在 React 中使用 canvas 绘图
* 使用 Tfjs 将 canvas 位图转化为 Tensor
- [x] [Develop Tutorial 7 MNIST CNN 的 Core API 实现](./mnist-core-api.md)
* 数据分片加载
* 使用 Tensorflow.js 的 Core API 构造深度神经网络
* 等价的 Layers API 实现
* 卷积模型的权重参数
* 卷积模型的前向传播计算过程
* 模型的训练——被隐藏的梯度下降和反向传播
- [ ] [Develop Tutorial 8 站在预训练模型的肩上——以 MobileNet 为例 ](./mobilenet-basic.md)
- [ ] 待续
===========================
[ ] 使用 td-node 执行训练
[ ] 卷积层的可视化
### MobileNet 图片分类:使用预训练的模型,进行迁移学习
#### 模型
[x] 使用预训练的MobileNet模型. 获得模型和加载 Weights
[x] 使用预训练的MobileNet模型 -> 特征 -> 机器学习算法 KNN [teachable-machine](https://github.com/googlecreativelab/teachable-machine-boilerplate.git)
[x] 使用预训练的MobileNet模型 -> 扩展模型 -> 仅训练靠后扩展的几层 -> 新的可用模型
#### 数据和模型的保存
[x] 模型存储和上传加载
[x] 数据存储和上传加载
#### 交互设计和实现
[x] 图片上传显示组件
[x] 图片分类标注组件
[x] 摄像头组件,拍照上传
[ ] 对视频流的处理
#### 构建一个模型服务器
构建一个 Model Serving 的 Web 服务器(Docker基础)
使用 tfjs.convertor 进行模型转换
### 简单对象识别,基于 MobileNet 扩展
#### 标注数据的生成
#### 在后台用 Python/Node.js 和 GPU 训练
#### 对象识别
#### 对象计数等
### 处理连续数据:Jena 天气预报
### RNN 文本情感分析 —— RNN和词嵌入
### RNN 文本生成 —— LSTM
### 其他备忘
作诗、作文、作曲Magenta
生成模型:图像风格迁移,
seq-to-seq 的其他应用,DNA 碱基对,代码生成。。。
BERT
使用 RNN 处理视频、音频。。。
### 声音的处理:对信号的AI处理
傅立叶变换将信号转成频谱图 -> 图像处理
声音的输入
基于 LSTM 的 语音识别
<file_sep>/public/docs/dev/mnist-layer-api.md
# MNIST CNN 的 Layers API 实现
## MNIST 的数据集
### MNIST 的数据集的两种格式—— PNG 和 GZ
MNIST 的数据集有以下两种格式:
* GZ 格式数据集:将训练集、测试集,以及它们各自的标注,分别存放在 gz 压缩文件中。需要下载并展开后方能使用。Python、Node.js 中使用这种格式比较方便。
* PNG 格式数据集:将所有的小图片都合成到一个大的 PNG 图片中,便于 Web 端加载。在浏览器端使用此格式更方便。
React-tfjs-camp 实现了对这两种格式数据的处理,将 Mnist 数据集封装在 `interface IMnistDataSet` 中。
import { TensorContainerObject } from '@tensorflow/tfjs'
export const IMAGE_H = 28
export const IMAGE_W = 28
export const IMAGE_SIZE = IMAGE_H * IMAGE_W
export const NUM_CLASSES = 10
export interface IMnistDataSet {
loadData: () => Promise<void>
getTrainData: (numExamples?: number) => TensorContainerObject
getTestData: (numExamples?: number) => TensorContainerObject
nextTrainBatch: (batchSize: number) => TensorContainerObject
nextTestBatch: (batchSize: number) => TensorContainerObject
}
* TensorContainerObject 是封装 Tensor 常用的方式,其形式为 Json。
* getTrainData 和 getTestData,用于 tfjs LayerModel 形式的 model.fit 训练。
* nextTrainBatch 和 nextTestBatch 被用于 tfjs core API 形式的训练。
使用 useEffect 在用户修改数据集时,进行相应加载的 useEffect 如下,用完数据及时使用 tf.dispose 释放是个好习惯。
useEffect(() => {
logger('init data set ...')
setStatus(STATUS.WAITING)
let mnistDataset: IMnistDataSet
if (sDataSourceName === 'mnist' || sDataSourceName === 'fashion') {
mnistDataset = new MnistDatasetGz(sDataSourceName)
} else {
mnistDataset = new MnistDatasetPng()
}
let tSet: tf.TensorContainerObject
let vSet: tf.TensorContainerObject
mnistDataset.loadData().then(
() => {
tSet = mnistDataset.getTrainData()
vSet = mnistDataset.getTestData(SHOW_SAMPLE)
setTrainSet(tSet)
setTestSet(vSet)
setStatus(STATUS.LOADED)
},
loggerError
)
return () => {
logger('Data Set Dispose')
tf.dispose([tSet.xs, tSet.ys])
tf.dispose([vSet.xs, vSet.ys])
}
}, [sDataSourceName])
### 预先下载数据集到本地
要完成 MNIST 实验,需要下载对应的数据集。在国内下载速度比较慢(或者需要科学上网),为了减少不必要的等待,我们先将这些数据集下载到本地,以便多次使用。
在命令行中使用以下命令,下载数据。
$ cd ./public/preload/data
$ ./download_mnist_data.sh
如果不能执行的话,请检查一下系统是否已经安装 `wget` 。
### PNG 格式数据的加载和使用
PNG 格式数据加载的主要代码请参考 `./src/components/mnist/MnistDatasetPng.ts`。
* 所依赖的数据集需要预先被下载到 `/preload/data/mnist` 目录下。
const BASE_URL = '/preload/data/mnist'
const MNIST_IMAGES_SPRITE_PATH = `${BASE_URL}/mnist_images.png`
const MNIST_LABELS_PATH = `${BASE_URL}/mnist_labels_uint8`
* 原始数据集大约有 65000 个数据。在 Web 端使用时,如果 `NUM_TRAIN_ELEMENTS`、`NUM_TEST_ELEMENTS` 设置过大,会导致数据图片不能够被正常加载。你可以将 `NUM_TRAIN_ELEMENTS` 调大为 55000 试试,并在界面上的SampleDataVis 组件中观察变化。
const NUM_DATASET_ELEMENTS = 65000
const NUM_TRAIN_ELEMENTS = 35000
const NUM_TEST_ELEMENTS = 7000
* 使用了浏览器的 Canvas 对已经加载的大 PNG 文件进行分割,这是个很常用的技巧。
loadData = async (): Promise<void> => {
let datasetImages: Float32Array
// Make a request for the MNIST sprited image.
const img = new Image()
const canvas = document.createElement('canvas')
const ctx = canvas.getContext('2d')
const imgRequest = new Promise((resolve, reject) => {
img.crossOrigin = ''
img.onload = () => {
img.width = img.naturalWidth
img.height = img.naturalHeight
const datasetBytesBuffer =
new ArrayBuffer(NUM_DATASET_ELEMENTS * IMAGE_SIZE * 4)
const chunkSize = 5000
canvas.width = img.width
canvas.height = chunkSize
for (let i = 0; i < NUM_DATASET_ELEMENTS / chunkSize; i++) {
const datasetBytesView = new Float32Array(
datasetBytesBuffer, i * IMAGE_SIZE * chunkSize * 4,
IMAGE_SIZE * chunkSize)
ctx?.drawImage(
img, 0, i * chunkSize, img.width, chunkSize, 0, 0, img.width,
chunkSize)
const imageData = ctx?.getImageData(0, 0, canvas.width, canvas.height)
...
}
datasetImages = new Float32Array(datasetBytesBuffer)
resolve()
}
img.src = MNIST_IMAGES_SPRITE_PATH
})
...
}
* 对图像数据需要做个预处理,将颜色值从 int 值(0-255) 转化成 float。
const length = imageData?.data.length ?? 0
for (let j = 0; j < length / 4; j++) {
// All channels hold an equal value since the image is grayscale, so
// just read the red channel.
const v = imageData?.data[j * 4] ?? 0
datasetBytesView[j] = v / 255
}
* 构建训练数据集,返回形如 `{xs, ys}` 的训练数据。xs 是一个个的小图片,ys 则是对应的 One-Hot 向量。
getTrainData = (numExamples?: number): tf.TensorContainerObject => {
let xs = tf.tensor4d(
this.trainImages,
[this.trainImages.length / IMAGE_SIZE, IMAGE_H, IMAGE_W, 1])
let labels = tf.tensor2d(
this.trainLabels, [this.trainLabels.length / NUM_CLASSES, NUM_CLASSES])
if (numExamples != null) {
xs = xs.slice([0, 0, 0, 0], [numExamples, IMAGE_H, IMAGE_W, 1])
labels = labels.slice([0, 0], [numExamples, NUM_CLASSES])
}
return { xs, ys: labels }
}
### GZ 格式数据的加载和使用
GZ 数据的加载和 PNG 格式略有不同。主要代码请参考 `./src/components/mnist/MnistDatasetGz.ts`。
#### 使用 fetch 加载数据文件
加载数据的代码会在多地多次使用,放在 `./src/utils.ts` 中,将 URL 所指示的资源文件,加载到 Buffer 对象中。为了处理 gz 文件,使用了 `zlib` 包。
import * as zlib from 'zlib'
...
export const fetchResource = async (url: string, isUnzip?: boolean): Promise<Buffer> => {
const response = await fetch(url)
const buf = await response.arrayBuffer()
if (isUnzip) {
logger('unzip...', url)
return zlib.unzipSync(Buffer.from(buf))
} else {
return Buffer.from(buf)
}
}
#### 数据的加载
* GZ 格式数据可以支持加载手写数字数据集,也可以支持 MNIST-Fashion 数据集。对应数据需要预先下载到 `/preload/data/${source}` 目录下。source 取值为 `mnist` 或 `fashion`。
constructor (source: string) {
this.source = source
this.baseUrl = `/preload/data/${source}`
this.trainImagesFileUrl = `${this.baseUrl}/train-images-idx3-ubyte.gz`
this.trainLabelsFileUrl = `${this.baseUrl}/train-labels-idx1-ubyte.gz`
this.testImagesFileUrl = `${this.baseUrl}/t10k-images-idx3-ubyte.gz`
this.testLabelsFileUrl = `${this.baseUrl}/t10k-labels-idx1-ubyte.gz`
}
* 读取文件后,需要跳过文件头中的一些描述数据。
const IMAGE_HEADER_BYTES = 16
const LABEL_HEADER_BYTES = 8
const LABEL_RECORD_BYTE = 1
const loadHeaderValues = (buffer: Buffer, headerLength: number): number[] => {
const headerValues = []
for (let i = 0; i < headerLength / 4; i++) {
// Header data is stored in-order (aka big-endian)
headerValues[i] = buffer.readUInt32BE(i * 4)
}
return headerValues
}
* 加载图片
const loadImages = async (url: string): Promise<Float32Array[]> => {
const buffer = await fetchResource(url, true)
const headerBytes = IMAGE_HEADER_BYTES
const recordBytes = IMAGE_SIZE
// skip header
const headerValues = loadHeaderValues(buffer, headerBytes)
logger('image header', headerValues)
const images = []
let index = headerBytes
while (index < buffer.byteLength) {
const array = new Float32Array(recordBytes)
for (let i = 0; i < recordBytes; i++) {
// Normalize the pixel values into the 0-1 interval, from
// the original 0-255 interval.
array[i] = buffer.readUInt8(index++) / 255.0
}
images.push(array)
}
logger('Load images :', `${images.length.toString()} / ${headerValues[1].toString()}`)
return images
}
* 加载 Labels 的代码基本相似
const loadLabels = async (url: string): Promise<Uint8Array[]> => {
const buffer = await fetchResource(url, true)
const headerBytes = LABEL_HEADER_BYTES
const recordBytes = LABEL_RECORD_BYTE
// skip header
const headerValues = loadHeaderValues(buffer, headerBytes)
logger('label header', headerValues)
const labels = []
let index = headerBytes
while (index < buffer.byteLength) {
const array = new Uint8Array(recordBytes)
for (let i = 0; i < recordBytes; i++) {
array[i] = buffer.readUInt8(index++)
}
labels.push(array)
}
logger('Load labels :', `${labels.length.toString()} / ${headerValues[1].toString()}`)
return labels
}
* 返回数据集的部分参考如下。
getTrainData = (numExamples = NUM_TRAIN_ELEMENTS): tf.TensorContainerObject => {
return this.getData_(this.trainImages, this.trainLabels, numExamples)
}
...
getData_ = (imageSet: Float32Array[], labelSet: Uint8Array[], numExamples?: number): tf.TensorContainerObject => {
const size = imageSet.length
// Only create one big array to hold batch of images.
const imagesShape: [number, number, number, number] = [size, IMAGE_H, IMAGE_W, 1]
const images = new Float32Array(tf.util.sizeFromShape(imagesShape))
const labels = new Int32Array(tf.util.sizeFromShape([size, 1]))
let imageOffset = 0
let labelOffset = 0
for (let i = 0; i < size; ++i) {
images.set(imageSet[i], imageOffset)
labels.set(labelSet[i], labelOffset)
imageOffset += IMAGE_SIZE
labelOffset += 1
}
let xs = tf.tensor4d(images, imagesShape)
let ys = tf.oneHot(tf.tensor1d(labels, 'int32'), NUM_CLASSES)
if (numExamples != null) {
xs = xs.slice([0, 0, 0, 0], [numExamples, IMAGE_H, IMAGE_W, 1])
ys = ys.slice([0, 0], [numExamples, NUM_CLASSES])
}
return { xs, ys }
}
## 修改 SampleDataVis 以显示图片
MNIST 数据集的 X 为图片,我们修改 SampleDataVis 以获得更直观的展示。
* 为 SampleDataVis 组件增加新的属性,以说明对 X 数据使用图片方式预览。
interface IProps {
...
xIsImage?: boolean
...
}
const formatX = useCallback((sampleInfo: tf.Tensor) => {
return props.xIsImage
? formatImage(sampleInfo)
: formatTensorToStringArray(sampleInfo, props?.xFloatFixed).join(', ')
}, [props.xFloatFixed, props.xIsImage])
* 创建 RowImageWidget 组件用于显示图片。
const formatImage = (sampleInfo: tf.Tensor): JSX.Element => {
const data = Array.from(sampleInfo.dataSync())
const shapeArg = sampleInfo.shape.slice(1, 3) as [number, number]
return <RowImageWidget data={data} shape={shapeArg}/>
}
### 组件 RowImageWidget—— 使用 useRef 访问 HTML Element
组件 RowImageWidget 的代码位于 `./src/componenets/common/tensor/RowImageWidget.tsx`。
import React, { useEffect, useRef, useState } from 'react'
const draw = (canvas: HTMLCanvasElement | null, data: number[] | undefined, shape: number[]): void => {
if (!canvas || !data || data.length === 0) {
return
}
const [width, height] = shape
canvas.width = width
canvas.height = height
const ctx = canvas.getContext('2d')
const imageData = new ImageData(width, height)
// const data = image.dataSync()
for (let i = 0; i < height * width; ++i) {
const j = i * 4
imageData.data[j] = data[i] * 255
imageData.data[j + 1] = data[i] * 255
imageData.data[j + 2] = data[i] * 255
imageData.data[j + 3] = 255
}
ctx?.putImageData(imageData, 0, 0)
}
interface IProps {
data?: number[]
shape?: number[]
}
const RowImageWidget = (props: IProps): JSX.Element => {
const [shape, setShape] = useState<number[]>([28, 28])
const rowCanvasRef = useRef<HTMLCanvasElement>(null)
useEffect(() => {
if (props.shape) {
setShape(props.shape)
}
}, [props.shape])
useEffect(() => {
if (!props.data || !rowCanvasRef) {
return
}
draw(rowCanvasRef.current, props.data, shape)
}, [props.data, shape])
return <canvas width={shape[0]} height={shape[1]} ref={rowCanvasRef} />
}
export default RowImageWidget
* 这里我们使用了 useRef 访问 HTML canvas,这是 useRef 另外一种常用的使用场景。
* **注意** canvas 的 width 和 height **必须用属性来指定**。如果用 style 来制定,会被放大两倍。
* 在 draw 函数中,我们把图片数据乘了 255, 将浮点数(0-1)的颜色值转成整数(0-255),用于 canvas 的显示。

## CNN 网络模型
在代码实现中提供了从简单到复杂的三种参考实现。
* `dense` 最简单的两层全联接网络
* `cnn-pooling` 带卷积、带 MaxPolling 层
* `cnn-dropout` 带卷积、MaxPolling 层、以及 dropout 层
useEffect(() => {
logger('init model ...')
tf.backend()
setTfBackend(tf.getBackend())
// Create a sequential neural network model. tf.sequential provides an API
// for creating "stacked" models where the output from one layer is used as
// the input to the next layer.
const model = tf.sequential()
switch (sModelName) {
case 'dense' :
addDenseLayers(model)
break
case 'cnn-pooling' :
addCovPoolingLayers(model)
break
case 'cnn-dropout' :
addCovDropoutLayers(model)
break
}
model.add(tf.layers.dense({ units: 10, activation: 'softmax' }))
setModel(model)
...
return () => {
logger('Model Dispose')
model?.dispose()
}
}, [sModelName])
下面的代码展示了其中最复杂的一个模型 —— 带 maxPooling 和 dropout 的 CNN,使用 Layers API 构建的顺序深度神经网络模型:
const model = tf.sequential()
model.add(tf.layers.conv2d({
inputShape: [IMAGE_H, IMAGE_W, 1], filters: 32, kernelSize: 3, activation: 'relu'
}))
model.add(tf.layers.conv2d({ filters: 32, kernelSize: 3, activation: 'relu' }))
model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 }))
model.add(tf.layers.conv2d({ filters: 64, kernelSize: 3, activation: 'relu' }))
model.add(tf.layers.conv2d({ filters: 64, kernelSize: 3, activation: 'relu' }))
model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 }))
model.add(tf.layers.flatten())
model.add(tf.layers.dropout({ rate: 0.25 }))
model.add(tf.layers.dense({ units: 512, activation: 'relu' }))
model.add(tf.layers.dropout({ rate: 0.5 }))
model.add(tf.layers.dense({ units: 10, activation: 'softmax' }))
### 将 tfjs-vis 集成到 React
在前面的内容里,出于理解和学习的目的,我们曾创建了一些简单的模型可视化和数据可视化组件。
Tensorflow.js 提供了一个更强大的 tfjs-vis,能够在浏览器中对模型和数据进行可视化展示。
关于 tfjs-vis 的 API 说明,可以参考 [https://js.tensorflow.org/api_vis/latest/](https://js.tensorflow.org/api_vis/latest/)
为了展示如何将 React Hooks 和 tfjs-vis 集成在一起,单独写了个用来做测试和验证的 `TfvisWidget.tsx`。代码位置 `./src/componenets/sandbox/TfvisWidget.tsx`。
* 在 React 中引入 tfjs-vis,需要使用 require 语句,不能通过 import 语句加载。
// eslint-disable-next-line @typescript-eslint/no-var-requires
const tfvis = require('@tensorflow/tfjs-vis')
* 使用 useRef 绑定需要 tfjs-vis 渲染的 HTML div 元素。
const logs = {
history: { loss: [1, 2, 1.5], val_loss: [1.5, 2.5, 2.8] }
}
...
const canvasRef = useRef<HTMLDivElement>(null)
...
const drawDiv1 = (): void => {
if (!canvasRef.current) {
return
}
tfvis.show.history(canvasRef.current, logs, ['loss', 'val_loss'])
}
* 也可以使用 tfvis.visor 展示数据。
const headers = ['DataSet', 'Shape', 'dType', 'stride', 'json']
const tensor = tf.tensor1d([0, 0, 0, 0, 2, 3, 4])
const values = [
['xs', tensor.shape, tensor.dtype, tensor.strides, JSON.stringify(tensor)], // xs
['ys', tensor.shape, tensor.dtype, tensor.strides, JSON.stringify(tensor)] // ys
]
const data = [
{ index: 0, value: 50 },
{ index: 1, value: 100 },
{ index: 2, value: 150 }
]
...
const drawSurface1 = (): void => {
// Render to visor
const surface2 = { name: 'Bar chart', tab: 'My Tab1' }
tfvis.render.barchart(surface2, data)
}
const drawSurface2 = (): void => {
const suffer = tfvis.visor().surface({
tab: 'My Tab2',
name: 'Custom Height 2',
styles: {
height: 300
}
})
tfvis.render.table(suffer, { headers, values })
}
React-tfjs-camp 的代码中集成了几个常用的 tfjs-vis API,都放在 `./src/componenets/tfvis` 目录下。
下面的代码展示了如何使用 `TfvisModelWidget` 和 `TfvisLayerWidget` 显示模型和层信息。
<TabPane tab=' ' key={AIProcessTabPanes.MODEL}>
<Row>
<Col span={8}>
{modelAdjustCard()}
<Card title='Show Layer' style={{ margin: '8px' }} size='small'>
<Form {...layout} initialValues={{
layer: 0
}}>
<Form.Item name='layer' label='Show Layer'>
<Select onChange={handleLayerChange} >
{sLayersOption?.map((v) => {
return <Option key={v.index} value={v.index}>{v.name}</Option>
})}
</Select>
</Form.Item>
</Form>
</Card>
</Col>
<Col span={16}>
<Card title='Model Info' style={{ margin: '8px' }} size='small'>
<TfvisModelWidget model={sModel}/>
</Card>
<Card title='Layer Info' style={{ margin: '8px' }} size='small'>
<TfvisLayerWidget layer={sCurLayer}/>
</Card>
</Col>
</Row>
</TabPane>
> 已知问题:使用 require 语句引入的 tfjs-vis 组件,会导致 ErrorBoundary 失效,在使用 `TfvisLayerWidget` 选择可视化权重分布时,有些操作会导致异常。
## 模型训练
使用 model.fit 进行模型训练,这部分以前介绍过。
您可以调整 sLearningRate ,修改 model.compile 的相关参数,观察训练结果。
useEffect(() => {
if (!sModel) {
return
}
logger('init model optimizer...', sLearningRate)
const optimizer = tf.train.adam(sLearningRate)
sModel.compile({ optimizer, loss: 'categoricalCrossentropy', metrics: ['accuracy'] })
}, [sModel, sLearningRate])
还可以修改 Epochs 和 Batch 等训练相关的参数。将检查停止状态的代码,调整放在 `onBatchBegin` 回调函数中。
setStatus(STATUS.WAITING)
stopRef.current = false
const beginMs = performance.now()
let trainBatchCount = 0
let iteration = 0
model.fit(trainDataset.xs as tf.Tensor, trainDataset.ys as tf.Tensor, {
epochs: sEpochs,
batchSize: sBatchSize,
validationSplit: VALID_SPLIT,
callbacks: {
onEpochEnd: async (epoch, logs) => {
logger('onEpochEnd', epoch)
logs && addTrainInfo({ iteration: iteration++, logs })
predictModel(model, validDataset.xs as tf.Tensor)
await tf.nextFrame()
},
onBatchEnd: async (batch, logs) => {
trainBatchCount++
logs && addTrainInfo({ iteration: iteration++, logs })
if (batch % 10 === 0) {
logger(`onBatchEnd: ${batch.toString()} / ${trainBatchCount.toString()}`)
predictModel(model, validDataset.xs as tf.Tensor)
}
await tf.nextFrame()
},
onBatchBegin: async () => {
if (stopRef.current) {
logger('Checked stop', stopRef.current)
setStatus(STATUS.STOPPED)
model.stopTraining = stopRef.current
}
await tf.nextFrame()
}
}
}).then(
() => {
setStatus(STATUS.TRAINED)
const secSpend = (performance.now() - beginMs) / 1000
logger(`Spend : ${secSpend.toString()}s`)
},
loggerError
)
## 推理
既然做手写数字识别,必须要支持“手写“。通过数字手写板,可以对模型的训练结果做出最直观的验证。
训练前后分别做一下,感受一下相应的手写识别正确率差距吧。
### 数字手写板的实现 —— 在 React 中使用 canvas 绘图
DrawPanelWidget 使用 canvas 实现鼠标画图,并将其作为手写数字识别的输入。相关代码在 `./src/componenets/common/tensor/DrawPanelWidget.tsx`。
* 在 canvas 创建后,为其增加鼠标行为相关的 EventListener,并在页面销毁时解除侦听。
const panelRef = useRef<HTMLCanvasElement>(null)
...
useEffect(() => {
const _canvas = panelRef.current
if (!_canvas) {
return
}
logger('canvasRef init')
// Note: this implementation is a bit simplified
_canvas?.addEventListener('mousemove', handleWindowMouseMove)
_canvas?.addEventListener('mousedown', handleWindowMouseDown)
_canvas?.addEventListener('mouseup', handleWindowMouseup)
_canvas?.addEventListener('mouseleave', handleWindowMouseup)
return () => {
logger('Dispose canvasRef')
_canvas?.removeEventListener('mousemove', handleWindowMouseMove)
_canvas?.removeEventListener('mousedown', handleWindowMouseDown)
_canvas?.removeEventListener('mouseup', handleWindowMouseup)
_canvas?.removeEventListener('mouseleave', handleWindowMouseup)
}
}, [panelRef])
* 及时获得鼠标位置。
const handleWindowMouseMove = (e: MouseEvent): void => {
const _pos = getMousePos(e)
_pos && setCurrPos(_pos)
}
* 从 canvas 获取鼠标位置坐标。
const getMousePos = (e: MouseEvent): IPoint | null => {
const _canvas = panelRef.current
const bbox = _canvas?.getBoundingClientRect()
return bbox ? {
x: e.clientX - bbox?.left,
y: e.clientY - bbox?.top
} : null
}
* 鼠标按下,进入绘画状态。
const handleWindowMouseDown = (e: MouseEvent): void => {
setDrawing(true)
const _pos = getMousePos(e)
_pos && setCurrPos(_pos)
}
const handleWindowMouseup = (e: MouseEvent): void => {
setDrawing(false)
const _pos = getMousePos(e)
_pos && setCurrPos(_pos)
}
* 在鼠标位置绘制
const DrawPanelWidget = (props: IProps): JSX.Element => {
...
const draw = (from: IPoint | undefined): void => {
const _canvas = panelRef.current
const _ctx = _canvas?.getContext('2d')
if (!_ctx || !sCurrPos || !from) {
return
}
_ctx.beginPath()
_ctx.lineWidth = 10
_ctx.strokeStyle = 'white'
_ctx.fillStyle = 'white'
_ctx.arc(from.x, from.y, 8, 0, 2 * Math.PI, false)
_ctx.fill()
_ctx.stroke()
_ctx.closePath()
}
...
if (sDrawing && sCurrPos) {
draw(sCurrPos)
}
return (
...
<canvas width={CANVAS_WIDTH} height={CANVAS_HEIGHT} style={{ backgroundColor: 'black' }} ref={panelRef}/>
...
)
}
export default DrawPanelWidget
* 清除 canvas
const handleClear = (): void => {
const _canvas = panelRef.current
if (!_canvas) {
return
}
const _ctx = _canvas.getContext('2d')
_ctx?.clearRect(0, 0, _canvas.width, _canvas.height)
}
### 使用 Tfjs 将 canvas 位图转化为 Tensor
在向 MNIST CNN Model 提交手写数据时,需要将 canvas 的图片数据转换成 Tensor。
const handleSubmit = (): void => {
const _canvas = panelRef.current
if (!_canvas) {
return
}
const _ctx = _canvas.getContext('2d')
const imageData = _ctx?.getImageData(0, 0, CANVAS_WIDTH, CANVAS_HEIGHT)
if (imageData && props.onSubmit) {
// logger('imageData', imageData)
const _tensor = tf.browser.fromPixels(imageData, 1)
const _sample = tf.image.resizeBilinear(_tensor, [28, 28])
setMiniSample(Array.from(_sample.dataSync()))
props.onSubmit(_sample.expandDims(0))
}
}
* Tensorflow.js 提供了将 canvas 图像数据转化为 Tensor 的工具,并提供 resize。这些操作返回的是 Tensor3D 对象。
const _tensor = tf.browser.fromPixels(imageData, 1)
const _sample = tf.image.resizeBilinear(_tensor, [28, 28])
* Reshape,为推理提交 Tensor4D 对象。
props.onSubmit(_sample.expandDims(0))
<file_sep>/public/docs/dev/tfjs-intro.md
# 初步了解 Tensorflow.js
TensorFlow.js 是 Goolge Tensorflow 的 JS 版本,将高性能机器学习能力带到 JS 世界。
通过《曲线拟合》这个例子,我们对 Tensorflow.js 进行初步了解。在这个例子中,我们要构建一个一元二次方程,并观察使用人工神经网络模型求解的过程。

## 使用 Tensorflow.js 的几点须知
官方文档 [平台和环境](https://www.tensorflow.org/js/guide/platform_environment)
中,描述了使用 tfjs 的须知,下面的内容列举了其中必须了解的几点。在此,我们先做介绍,稍后在代码中体现。
### Backend —— 为什么我的 tfjs 运行很慢?
慢这件事,到底什么原因造成的,确实不好讲。不过了解 Tensorflow.js 运行 backend 的一些背景,会有些帮助。
TensorFlow.js 开发的程序运行时,所有的配置被统称为环境。它包含一个全局的backend,以及一些可以精确控制 TensorFlow.js 特性的标记。
TensorFlow.js 有两种工作平台:浏览器和 Node.js。不同平台有很多不同的配置,平台间的差异影响着基于平台的应用开发。
* 在浏览器平台上,TensorFlow.js 既支持移动设备,也支持台式设备,使用 WebGL API 作为 backend,自动检测并做相应的优化配置。你可以检查一下,浏览器中是否已开启“硬件加速模式”。

* 在 Node.js 平台上,TensorFlow.js 支持直接使用 TensorFlow 的 C 语言 API 来加速操作,它会尽可能使用机器的 GPU 硬件加速模块,如 CUDA。也支持更慢的 CPU 环境。
### 内存管理 —— 这样避免我的程序内存溢出?
使用 WebGL backend 时,**需要显式管理内存**。因为存储Tensor的WebGL纹理,不会被浏览器的垃圾收集机制自动清理。
* 调用dispose()清理tf.Tensor占用的内存
* 在应用中,经常需要把多个操作组合起来。TensorFlow.js提供tf.tidy()方法,可以将 多个操作组合封装在函数中。函数返回时,会清理不再需要的tf.Tensor,这就好像函数执行后,本地变量都会被清理一样。
在一些非 WebGL 环境,有自动垃圾回收机制,在这些环境下使用dispose()或tidy()没有副作用。不过,主动调用通常会比垃圾回收的清理带来更好的性能。
### tfjs 安装
Web 端安装
$ yarn add @tensorflow/tfjs
Node.js 使用 TensorFlow.js 原生 C++ 加速。**坑**:MAC OS上,安装时会对原生 C++ 依赖包进行下载编译,慢,执行 gyp 需要使用 python v2 环境。
$ yarn add @tensorflow/tfjs-node
Node.js 使用 TensorFlow.js GPU 加速( Linux Only)
$ yarn add @tensorflow/tfjs-node-gpu
### tfjs 加载
基于浏览器的版本,加载 @tensorflow/tfjs 使用 tensorflow.js。如果是在 Node.js 环境中使用,需要引入 `@tensorflow/tfjs-node` 或 `@tensorflow/tfjs-gpu`
import * as tf from '@tensorflow/tfjs'
## 使用 Tensorflow.js 和 React 生成数据集
下面的代码引用自 `./src/components/curve/curve.tsx`

$$ y = a x^2 + b x + c $$
### 随机生成 a, b, c 三个参数
const [sCurveParams, setCurveParams] = useState<number[] | string>(INIT_PARAMS)
const genCurveParams = (): number[] => {
return tf.tidy(() => {
const params = tf.randomUniform([3], -10, 10).toInt()
return Array.from(params.dataSync())
})
}
const Curve = (): JSX.Element => {
...
useEffect(() => {
const [a, b, c] = genCurveParams()
setCurveParams([a, b, c])
}, [])
...
}
* `genCurveParams` 是个单纯的本地功能函数,放在 Curve 之外;使用它的 `useEffect` 被放在 Curve 组件的前部位置,符合我们前面说的使用 Hooks 的规则。
* `genCurveParams` 通过 `tf.randomUniform` 生成了 -10 到 10 之间的三个随机数,取整之后,用作 a, b, c。tfjs 里面还有好几种用于生成随机数的方法,用起来非常容易,可以根据问题需要使用。
* 这段代码被封装在 tf.tidy 中,以及时回收不用的内存。
* useEffect 第二个参数设置为 `[]`,表示在组件加载时调用。
### 实现公式计算 & useCallback
const calc = useCallback((x: tf.Tensor) => {
return tf.tidy(() => {
const [a, b, c] = sCurveParams
// = a * x^2 + b * x + c
return x.pow(2).mul(a).add(x.mul(b)).add(c)
})
}, [sCurveParams])
* tf.Tensor 提供了很多用于张量计算的函数,使用函数式编程的链式调用用起来也比较方便。需要注意的是,这种链式的调用仅仅与顺序有关,没有“先乘除,后加减”的计算符的优先级。
* 假如在此处使用普通的 JS 函数实现,每一次 Curve 组件渲染都会生成一个新的 calc 函数实例。
* `useCallback` 是我们所用到的第三类 React Hook。`useCallback` 会返回一个 memoized 的函数,用来对函数进行缓存,防止总是重复的生成新的函数。calc 函数被封装到了 `useCallback` 之后,只有当触发条件 [sCurveParams] 被修改时,才会触发回调函数 calc 发生改变,创建新实例。
### 训练集和测试集的生成
const [sTrainSet, setTrainSet] = useState<tf.TensorContainerObject>()
const [sTestSet, setTestSet] = useState<tf.TensorContainerObject>()
...
useEffect(() => {
logger('init data set ...')
// train set
const trainTensorX = tf.randomUniform([TOTAL_RECORD], -1, 1)
const trainTensorY = calc(trainTensorX)
setTrainSet({ xs: trainTensorX, ys: trainTensorY })
// test set
const testTensorX = tf.randomUniform([TEST_RECORD], -1, 1)
const testTensorY = calc(testTensorX)
setTestSet({ xs: testTensorX, ys: testTensorY })
return () => {
logger('Train Data Dispose')
// Specify how to clean up after this effect:
trainTensorX?.dispose()
trainTensorY?.dispose()
testTensorX?.dispose()
testTensorY?.dispose()
}
}, [calc])
* 仅在当 calc 由于参数改变而发生改变时,才触发对于训练集和测试集的更新。
* 随机生成了 1000 个 (-1,1) 之间的浮点数,作为训练集 trainTensorX。随机生成了 200 个 (-1,1) 之间的浮点数,作为测试集 testTensorX。
* 初始化数据的 `useEffect` 函数和以前的用法相比,有了返回值。在 effect 中返回一个函数是 effect 的清除机制。每个 effect 都可以返回一个清除函数,它们都属于 effect 的一部分。对于 tfjs 应用来说,正好可以在这里清除不用的 tf.Tensor 对象,React Hooks 和 Tensorflow.js 真是相得益彰。React 会在执行当前 effect 之前对上一个 effect 进行清除。
return () => {
logger('Train Data Dispose')
// Specify how to clean up after this effect:
trainTensorX?.dispose()
trainTensorY?.dispose()
testTensorX?.dispose()
testTensorY?.dispose()
}
### 运用 AntD From 实现参数调整
AntD v4 的 Form 做了较大的修改,我们一起来看看。
import React, { useCallback, useEffect, useRef, useState } from 'react'
...
import { Button, Card, Col, Form, Slider, Row, Select, Tabs } from 'antd'
const Curve = (): JSX.Element => {
...
const [formData] = Form.useForm()
...
const handleResetCurveParams = (): void => {
const [a, b, c] = genCurveParams()
formData.setFieldsValue({ a, b, c })
setCurveParams([a, b, c])
}
const handleCurveParamsChange = (): void => {
const values = formData.getFieldsValue()
// logger('handleParamsFormChange', values)
const { a, b, c } = values
setCurveParams([a, b, c])
}
const curveParam = (): JSX.Element => {
return <Slider min={-10} max={10} marks={{ '-10': -10, 0: 0, 10: 10 }} />
}
const dataAdjustCard = (): JSX.Element => {
return (
<Card title='Adjust Data' style={{ margin: '8px' }} size='small'>
<Form {...layout} form={formData} onFieldsChange={handleCurveParamsChange}
initialValues={{
a: sCurveParams[0],
b: sCurveParams[1],
c: sCurveParams[2]
}}>
<Form.Item name='a' label='Curve param a'>
{curveParam()}
</Form.Item>
...
<Form.Item {...tailLayout} >
<Button onClick={handleResetCurveParams} style={{ width: '60%', margin: '0 20%' }}> Random a,b,c </Button>
...
</Form.Item>
</Form>
</Card>
)
}
...
* 在 Curve 组件的前部,使用 `const [formData] = Form.useForm()` 定义 Form 的数据域引用。`useForm` 只能用于函数组件。
* 在 Form 表单定义部分,使用 `form={formData}` 与数据域引用相关联。使用 `initialValues` 属性定义标点数据初始值。
<Form {...layout} form={formData} onFieldsChange={handleCurveParamsChange}
initialValues={{
a: sCurveParams[0],
b: sCurveParams[1],
c: sCurveParams[2]
}}>
* Form 内的各数据项使用 Form.Item 装饰。 其 `name`属性为 Form 内变量名称。
<Form.Item name='a' label='Curve param a'>
{curveParam()}
</Form.Item>
* 在界面上调整 Slider 组件时,会触发由 `onFieldsChange={handleCurveParamsChange}` 定义的回调函数。利用 `const values = formData.getFieldsValue()` 读取 Form 中的数据值。
* 点击 Button 时,`onClick={handleResetCurveParams}` 定义的回调函数会采用 `formData.setFieldsValue({ a, b, c })` 设置 From 中的数据值。
<Button onClick={handleResetCurveParams} style={{ width: '60%', margin: '0 20%' }}> Random a,b,c </Button>
* 在 Form 中用 `onFinish` 函数设置 Form 的 Submit。
## 函数数据可视化
要对训练集和测试集数据进行直观的观察,我们使用了阿里巴巴的前端领域通用图表组件库 Bizchart。Bizchart 的功能相当强大,在这个项目中只使用了九牛一毛。[BizCharts参考链接](https://bizcharts.net/)
`/src/components/curve/CurveVis.tsx` 封装了函数曲线可视化的组件。
<CurveVis xDataset={sTrainSet.xs as tf.Tensor} yDataset={sTrainSet.ys as tf.Tensor}
sampleCount={TOTAL_RECORD}/>
CurveVis 的实现要点如下:
import React, { useEffect, useState } from 'react'
...
import { Axis, Chart, Geom, Legend, Tooltip } from 'bizcharts'
import { arrayDispose, logger } from '../../utils'
const MAX_SAMPLES_COUNT = 100
...
interface IChartData {
x: number
y: number
type: string
}
interface IProps {
xDataset: Tensor
yDataset: Tensor
pDataset?: Tensor
sampleCount?: number
debug?: boolean
}
const CurveVis = (props: IProps): JSX.Element => {
const [xData, setXData] = useState<number[]>([])
const [yData, setYData] = useState<number[]>([])
const [pData, setPData] = useState<number[]>([])
const [data, setData] = useState()
const [sampleCount] = useState(props.sampleCount)
...
useEffect(() => {
logger('init sample data [p] ...')
const _data: IChartData[] = []
pData?.forEach((v: number, i: number) => {
_data.push({ x: xData[i], y: yData[i], type: 'y' })
_data.push({ x: xData[i], y: v, type: 'p' })
})
setData(_data)
return () => {
logger('Dispose sample data [p] ...')
arrayDispose(_data)
}
}, [pData])
return (
<Card>
<Chart height={400} data={data} padding='auto' forceFit>
<Axis name='X'/>
<Axis name='Y'/>
<Legend/>
<Tooltip/>
<Geom type='line' position='x*y' size={2} color={'type'} shape={'smooth'}/>
</Chart>
Sample count : {props.sampleCount}
{props.debug ? JSON.stringify(data) : ''}
</Card>
)
}
export default CurveVis
* 需要将从属性设置的 X、Y、P Tensor 转化成格式如 IChartData 的数组。使用 IChartData.type 区分不同的曲线。
interface IChartData {
x: number
y: number
type: string
}
...
const _data: IChartData[] = []
pData?.forEach((v: number, i: number) => {
_data.push({ x: xData[i], y: yData[i], type: 'y' })
_data.push({ x: xData[i], y: v, type: 'p' })
})
setData(_data)
* 使用如下方式绘制函数曲线。
return (
<Card>
<Chart height={400} data={data} padding='auto' forceFit>
<Axis name='X'/>
<Axis name='Y'/>
<Legend/>
<Tooltip/>
<Geom type='line' position='x*y' size={2} color={'type'} shape={'smooth'}/>
</Chart>
Sample count : {props.sampleCount}
{props.debug ? JSON.stringify(data) : ''}
</Card>
)
## 使用 Tensorflow.js 创建人工神经网络

### 实现一个简单的多层人工神经网络
通过 formModel 调整,可以调整人工神经网络的层数 sLayerCount、每层的神经元数 sDenseUnits、以及激活函数 sActivation。当这几个值改变的时候,Curve.tsx 会相应调整人工神经网络模型。
useEffect(() => {
logger('init model ...')
tf.backend()
setTfBackend(tf.getBackend())
// The linear regression model.
const model = tf.sequential()
model.add(tf.layers.dense({ inputShape: [1], units: sDenseUnits, activation: sActivation as any }))
for (let i = sLayerCount - 2; i > 0; i--) {
model.add(tf.layers.dense({ units: sDenseUnits, activation: sActivation as any }))
}
model.add(tf.layers.dense({ units: 1 }))
setModel(model)
return () => {
logger('Model Dispose')
model.dispose()
}
}, [sActivation, sLayerCount, sDenseUnits])
* 使用 tf.sequential 很容易构建出顺序多层神经网络。最简单的顺序全联接网络。tf.Sequential 是 LayerModel 的一个实例。
const model = tf.sequential()
* 为网络增加输入层,因为 X 为一维向量,所以 `inputShape: [1]`。
model.add(tf.layers.dense({ inputShape: [1], units: sDenseUnits, activation: sActivation as any }))
* 中间根据用户选择,增加多个隐藏层。
for (let i = sLayerCount - 2; i > 0; i--) {
model.add(tf.layers.dense({ units: sDenseUnits, activation: sActivation as any }))
}
* 输出层,因为只输出一维的 Y 值,所以 `{ units: 1 }`。
model.add(tf.layers.dense({ units: 1 }))
### 窥探一下 LayerModel 的内部
* 使用 `model.summary()`是最常用的观察模型的方法,不过只能够在浏览器的 Console 里显示结果。
* 实现一个简单的模型展示组件 `/src/components/common/tensor/ModelInfo.tsx`,看看模型的层次和权重相关的信息。
import React from 'react'
import * as tf from '@tensorflow/tfjs'
interface IProps {
model: tf.LayersModel
}
const ModelInfo = (props: IProps): JSX.Element => {
const { model } = props
return (
<>
<div>
<h2>Layers</h2>
{model.layers.map((l, index) => <div key={index}>{l.name}</div>)}
</div>
<div>
<h2>Weights</h2>
{model.weights.map((w, index) => <div key={index}>{w.name}, [{w.shape.join(', ')}]</div>)}
</div>
</>
)
}
export default ModelInfo
## 模型训练

在模型训练这个 Tab 中,我们可以对数据、模型、以及训练参数进行调整,以观察参数变化的影响。
### 调整 LearningRate 观察对训练的影响
useEffect(() => {
if (!sModel) {
return
}
logger('init optimizer ...')
const optimizer = tf.train.sgd(sLearningRate)
sModel.compile({ loss: 'meanSquaredError', optimizer })
return () => {
logger('Optimizer Dispose')
optimizer.dispose()
}
}, [sModel, sLearningRate])
* 调整 “随机梯度下降 SGD” 优化器的 sLearningRate,需要通过 `sModel.compile` 使之生效。
const optimizer = tf.train.sgd(sLearningRate)
sModel.compile({ loss: 'meanSquaredError', optimizer })
* 生成了新的优化器后,可以对老的优化器做清除。
return () => {
logger('Optimizer Dispose')
optimizer.dispose()
}
### 模型训练 model.fit
const trainModel = (model: tf.LayersModel, trainSet: tf.TensorContainerObject, testSet: tf.TensorContainerObject): void => {
if (!model || !trainSet || !testSet) {
return
}
setStatus(STATUS.WAITING)
stopRef.current = false
model.fit(trainSet.xs as tf.Tensor, trainSet.ys as tf.Tensor, {
epochs: NUM_EPOCHS,
batchSize: BATCH_SIZE,
validationSplit: VALIDATE_SPLIT,
callbacks: {
onEpochEnd: async (epoch: number) => {
const trainStatus = `${(epoch + 1).toString()}/${NUM_EPOCHS.toString()} = ${((epoch + 1) / NUM_EPOCHS * 100).toFixed(0)} %`
setTrainStatusStr(trainStatus)
if (epoch % 10 === 0) {
evaluateModel(model, testSet)
}
if (stopRef.current) {
logger('Checked stop', stopRef.current)
setStatus(STATUS.STOPPED)
model.stopTraining = stopRef.current
}
await tf.nextFrame()
}
}
}).then(
() => {
setStatus(STATUS.TRAINED)
},
loggerError
)
}
* `model.fit` 是模型训练的函数。训练时,还需要指定下面的参数。和 Python 不同,这些参数需要封装在 JS 对象中传递:
* epochs 迭代次数
* batchSize 因为计算环境资源有限,每次取用合适的数据量,以避免内存溢出等问题。
* validationSplit 从训练集中挑选验证集数据的比率
model.fit(trainSet.xs as tf.Tensor, trainSet.ys as tf.Tensor, {
epochs: NUM_EPOCHS,
batchSize: BATCH_SIZE,
validationSplit: VALIDATE_SPLIT,
callbacks: {...}
})
* 一般来讲,训练一次会需要花费较长的时间。通过设置回调函数,我们能够及时了解训练过程的中间状态。`onEpochEnd` 函数在每个 Epoch 迭代结束时被调用,下面的代码展示的是,每 10 个 Epoch,使用当前模型里的 Weights 值,进行一次推理验证,并将结果推送出来。
onEpochEnd: async (epoch: number) => {
const trainStatus = `${(epoch + 1).toString()}/${NUM_EPOCHS.toString()} = ${((epoch + 1) / NUM_EPOCHS * 100).toFixed(0)} %`
setTrainStatusStr(trainStatus)
if (epoch % 10 === 0) {
evaluateModel(model, testSet)
}
...
await tf.nextFrame()
}
### 及时停止模型训练 —— useRef Hook 登场
想中止训练,可以通过在 `model.stopTraining = true` 语句来完成。
我们在 `onEpochEnd` 增加了一段,试一下,看看这两段看起来功能相似的代码,执行结果有何不同?
const stopRef = useRef(false)
const [sStop, setStop] = useState<boolean>(false)
...
onEpochEnd: async (epoch: number) => {
...
// Compare useRef with useState
if (sStop) {
logger('Checked stop by useState', sStop)
setStatus(STATUS.STOPPED)
model.stopTraining = sStop
}
if (stopRef.current) {
logger('Checked stop by useRef', stopRef.current)
setStatus(STATUS.STOPPED)
model.stopTraining = stopRef.current
}
await tf.nextFrame()
}
* 第一段实现使用了 `const [sStop, setStop] = useState<boolean>(false)`。为停止训练设置了 sStop,如果 sStop 为 true,则,停止训练。当用户点击相应按钮时,`setStop(true)`
const handleTrainStopState = (): void => {
logger('handleTrainStopState')
setStop(true)
}
* 第二段实现使用了 `const stopRef = useRef(false)`。当用户点击相应按钮时,`stopRef.current = true`
const handleTrainStop = (): void => {
logger('handleTrainStop')
stopRef.current = true
}
你实验出差别了吗?Why?
1. `setState` 用于状态的更新,state不能存储跨渲染周期的数据,因为state的保存会触发组件重渲染。`onEpochEnd` 函数在训练开始时被创建,它用到的是当时的 sStop 实例 sStop_1。而当 sStop 变化之后,触发了页面渲染,在新的渲染中,sStop 已经变成了一个新实例 sStop_2。此时,这就是为什么 setState 不起作用的原因。
2. 而 `useRef` 则返回一个可变的 ref 对象,返回的 ref 对象在组件的整个生命周期内保持不变。所以,Ref 可以用于在渲染周期之间共享数据的存储,对它修改也 **不会** 引起组件渲染。也就是说,stopRef.current 随时都指向的“那个”对象的当前值。
官方文档中关于 useRef 有个 setInterval 的例子,在一定程度上有利于理解这个问题。[Is there something like instance variables?](https://reactjs.org/docs/hooks-faq.html#is-there-something-like-instance-variables)
`useRef` 还被用于获取DOM元素的节点、获取子组件的实例、以及在渲染周期之间共享数据的存储等场景。
关于 useRef 的更多信息:[官方文档 useRef](https://reactjs.org/docs/hooks-reference.html#useref)
## 模型推理
可以通过 model.predict 或 model.evaluate 来检验训练结果。
const evaluateModel = (model: tf.LayersModel, testSet: tf.TensorContainerObject): void => {
if (!model || !testSet) {
return
}
const pred = model.predict(testSet.xs as tf.Tensor) as tf.Tensor
setTestP(pred)
const evaluate = model.evaluate(testSet.xs as tf.Tensor, testSet.ys as tf.Tensor) as tf.Scalar
setTestV(evaluate)
}
<file_sep>/public/docs/ai/iris.md
# 鸢尾花分类 IRIS
## 知识点
这个例子涉及以下的 AI 知识点:
* 分类问题的处理思路:标签整数张量,one-hot 编码方式
* Softmax 激活函数
* 损失函数
* 优化器算法
## 问题

鸢【音:yuān】尾花(Iris)是单子叶百合目花卉。在北京植物园最北端的宿根花卉区,种植有40余个品种的鸢尾,最佳观赏时间大约在4月下旬至5月中下旬。也就是说,再过一个多月,北京的鸢尾花就应该开了。想必到那时候,新冠疫情应该已经结束,可以和家人朋友一起出去浪,拍几张照片,换换这里网上搜来的照片。

鸢尾花数据集最初由 <NAME> 测量得到,而后在著名的统计学家和生物学家 <NAME> 于 1936 年发表的文章「The use of multiple measurements in taxonomic problems」中被使用,用其作为线性判别分析(Linear Discriminant Analysis)的一个例子,证明分类的统计方法,从此而被众人所知。
鸢尾花数据集由 3 种不同类型的鸢尾花的各 50 个样本数据构成。每个样本包含了4个属性,特征数值都是正浮点数,单位为厘米:
* Sepal.Length(花萼长度)
* Sepal.Width(花萼宽度)
* Petal.Length(花瓣长度)
* Petal.Width(花瓣宽度)
预测变量目标值为鸢尾花的分类为三类,其中的一个种类与另外两个种类是线性可分离的,后两个种类是非线性可分离的:
* Iris Setosa(山鸢尾)
* Iris Versicolour(杂色鸢尾)
* Iris Virginica(维吉尼亚鸢尾)
本节的内容,就是使用测量得到的特征数据,对目标进行分类,是个非常典型的场景。
## 数据
鸢尾花原始的数据,类似这样:
4.8,3.0,1.4,0.3,Iris-setosa
5.1,3.8,1.6,0.2,Iris-setosa
4.6,3.2,1.4,0.2,Iris-setosa
5.7,3.0,4.2,1.2,Iris-versicolor
5.7,2.9,4.2,1.3,Iris-versicolor
6.2,2.9,4.3,1.3,Iris-versicolor
6.3,3.3,6.0,2.5,Iris-virginica
5.8,2.7,5.1,1.9,Iris-virginica
7.1,3.0,5.9,2.1,Iris-virginica
为了便于计算处理,需要对分类结果进行转换处理。
常见的处理分类目标数据的方法有:标签编码 和 One-Hot
### 标签编码
使用 int 类型, 对三种分类进行编号替换,就形成了整数标签目标数据:
* 0 :Iris setosa(山鸢尾)
* 1 :Iris versicolor(杂色鸢尾)
* 2 :Iris virginica(维吉尼亚鸢尾)
上面的数据被转换成(为了便于观察,在数据中增加了空格,以区分特征数据和目标分类数据):
4.8,3.0,1.4,0.3, 0
5.1,3.8,1.6,0.2, 0
4.6,3.2,1.4,0.2, 0
5.7,3.0,4.2,1.2, 1
5.7,2.9,4.2,1.3, 1
6.2,2.9,4.3,1.3, 1
6.3,3.3,6.0,2.5, 2
5.8,2.7,5.1,1.9, 2
7.1,3.0,5.9,2.1, 2
标签编码的类别值从0开始(因为大多数计算机系统计数),所以,如果有N个类别,类别值为 0 至 N-1 的。
**标签编码的适用场景**:
* 如果原本的标签编码是有序意义的,例如评分等级,使用标签编码就是一个更好的选择。
* 不过,如果标签编码是和鸢尾数据类似的无顺序数据,在计算中,更高的标签数值会给计算带来不必要的附加影响。这时候更好的方案是使用 one-hot 编码方式。

在上面的数据中。在进行标签编码的数据集中有
$$ virginica(2) > versicolor(1) > setosa(0) $$
比方说,假设模型内部计算平均值(神经网络中有大量加权平均运算),那么0 + 2 = 2,2 / 2 = 1. 这意味着:virginica 和 setosa 平均一下是 versicolor。该模型的预测会有大量误差。
### One-Hot
One-Hot 编码是将类别变量转换为机器学习算法易于利用的一种形式的过程。One-Hot 将 n 个分类,表示为一个 只含有 0,1 数值的向量。向量的位置表示了对应的分类。
例如,采用 One-Hot 编码,上面的数据就应该编码成:
4.8,3.0,1.4,0.3, [1,0,0]
5.1,3.8,1.6,0.2, [1,0,0]
4.6,3.2,1.4,0.2, [1,0,0]
5.7,3.0,4.2,1.2, [0,1,0]
5.7,2.9,4.2,1.3, [0,1,0]
6.2,2.9,4.3,1.3, [0,1,0]
6.3,3.3,6.0,2.5, [0,0,1]
5.8,2.7,5.1,1.9, [0,0,1]
7.1,3.0,5.9,2.1, [0,0,1]
tfjs 里也提供了将标签编码转化成 One-Hot 的函数 `tf.oneHot`,使用起来很方便。
## 模型
鸢尾花的计算模型使用的是两层的全联接网络。
参考代码实现如下。其中激活函数、输入层的神经元数量都可以在页面上直接调整。
const model = tf.sequential()
model.add(tf.layers.dense({
units: sDenseUnits,
activation: sActivation as any,
inputShape: [data.IRIS_NUM_FEATURES]
}))
model.add(tf.layers.dense({ units: 3, activation: 'Softmax' }))
* 输入层的 inputShape 是和特征数据相关的,是个 4 元向量。
* 因为要输出三个分类,所以输出层的神经元数量设置为 3。
* 多分类问题的输出层,激活函数使用 Softmax。如果是二分类问题,激活函数可以使用 Sigmoid
### Softmax 激活函数
$$ Softmax(z)_j = \frac{ e^{z_i} }{ \sum e^{z_j} } for i=1..J $$

Softmax用于多分类神经网络输出. 如果某一个 zj 大过其他 z, 那这个映射的分量就逼近于 1,其他就逼近于 0。
Sigmoid 将一个实数映射到(0,1)的区间,用来做二分类。而 Softmax 把一个 k 维的实数向量(a1,a2,a3,a4…)映射成一个(b1,b2,b3,b4…)其中 bi 是一个 0~1 的常数,输出神经元之和为 1.0,所以相当于概率值,然后可以根据 bi 的概率大小来进行多分类的任务。二分类问题时 Sigmoid 和 Softmax 是一样的,求的都是**交叉墒损失(cross entropy loss)**,而 Softmax 可以用于多分类问题。Softmax 是 Sigmoid的扩展,因为,当类别数 k=2 时,Softmax 回归退化为 logistic 回归。
## 训练
这次训练除了能够调整 Learning Rate 参数,还能够调整优化算法。
const optimizer = tf.train.adam(sLearningRate)
model.compile({ optimizer: sOptimizer, loss: sLoss, metrics: ['accuracy'] })
### Loss 函数的选择
Loss 函数对于训练非常重要。
在这个例子里,根据目标数据编码形式的不同,需要选用不同的 Loss 函数。
* 标签编码: sparseCategoricalCrossentropy
* One-Hot: categoricalCrossentropy
### 优化器算法
#### SGD
这是最基础的梯度下降算法,更新权重W,不多解释。

其中 α是learning rate(学习速率)。我们可以把下降的损失函数看成一个机器人,由于在下降的时候坡度不是均匀的,机器人会左右摇摆,所以下降速度会比较慢,有时候遇到局部最优,还可能在原地徘徊好长时间。

#### RMSProp
RMSprop 是 <NAME> 提出的一种自适应学习率方法。Hinton 建议设定 γ 为 0.9, 学习率 η 为 0.001。

#### Adam
Adam是目前用得最广的优化算法。这个算法是一种计算每个参数的自适应学习率的方法。和 RMSprop 一样存储了过去梯度的平方 vt 的指数衰减平均值 ,也保持了过去梯度 mt 的指数衰减平均值。相当于给机器人一个惯性,同时还让它穿上了防止侧滑的鞋子,当然就相当好用啦。
建议 β1 = 0.9,β2 = 0.999,ϵ = 10e−8。实践表明,Adam 比其他适应性学习方法效果要好。



#### 不同优化算法下降速度的差距


## 扩展阅读
### 鸢尾花数据集
[鸢尾花数据集](https://www.jianshu.com/p/6ada344f91ce)
### One-Hot
[什么是one hot编码?为什么要使用one hot编码?](https://zhuanlan.zhihu.com/p/37471802)
### 优化器
[关于深度学习优化器 optimizer 的选择,你需要了解这些](https://www.leiphone.com/news/201706/e0PuNeEzaXWsMPZX.html)
[Tensorflow中的Optimizer(优化器)](https://www.jianshu.com/p/8f9247bc6a9a)
<file_sep>/node/src/jena/dataJena.ts
/**
* @license
* Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
/**
* Data object for Jena Weather data.
*
* The data used in this demo is the
* [Jena weather archive
* dataset](https://www.kaggle.com/pankrzysiu/weather-archive-jena).
*
* This file is used to load the Jena weather data in both
* - the browser: see [index.js](./index.js), and
* - the Node.js backend environment: see [train-jena.js](./train-jena.js).
*/
import * as tf from '@tensorflow/tfjs-node'
import { fetchLocal, logger } from '../utils'
const BASE_URL = '../public/preload/data'
const LOCAL_JENA_WEATHER_CSV_PATH = `${BASE_URL}/jena_climate_2009_2016.csv`
// const REMOTE_JENA_WEATHER_CSV_PATH =
// 'https://storage.googleapis.com/learnjs-data/jena_climate/jena_climate_2009_2016.csv'
const SAMPLE_OFFSET = 50
const SAMPLE_LEN = 3
interface IParsedDate {
date: Date
normalizedDayOfYear: number // normalizedDayOfYear: Day of the year, normalized between 0 and 1.
normalizedTimeOfDay: number // normalizedTimeOfDay: Time of the day, normalized between 0 and 1.
}
/**
* A class that fetches and processes the Jena weather archive data.
*
* It also provides a method to create a function that iterates over
* batches of training or validation data.
*/
export class JenaWeatherData {
dataColumnNames: string[] = []
dateTimeCol = 0
tempCol = 0
numRows = 0
numColumns = 0
numColumnsExcludingTarget = 0
dateTime: Date[] = []
// Day of the year data, normalized between 0 and 1.
normalizedDayOfYear: number[] = []
// Time of the day, normalized between 0 and 1.
normalizedTimeOfDay: number[] = []
data: number[][] = [] // Unnormalized data.
means: number[] = []
stddevs: number[] = []
normalizedData: number[][] = []
dataset?: tf.TensorContainer[]
sampleData?: tf.TensorContainerObject
csvLines: string[] = []
constructor () {
// this.csvDataset = tf.data.csv(LOCAL_JENA_WEATHER_CSV_PATH)
}
loadCsv = async (): Promise<void> => {
const buffer = await fetchLocal(LOCAL_JENA_WEATHER_CSV_PATH)
if (!buffer) {
return
}
const csvData = buffer.toString()
this.csvLines = csvData.split('\n')
}
loadDataColumnNames = (): void => {
// Parse header.
const columnNames = this.csvLines[0].split(',')
for (let i = 0; i < columnNames.length; ++i) {
// Discard the quotes around the column name.
columnNames[i] = columnNames[i].slice(1, columnNames[i].length - 1)
}
this.dateTimeCol = columnNames.indexOf('Date Time')
// tf.util.assert(this.dateTimeCol === 0, 'Unexpected date-time column index')
this.dataColumnNames = columnNames.slice(1)
this.tempCol = this.dataColumnNames.indexOf('T (degC)')
// tf.util.assert(this.tempCol >= 1, 'Unexpected T (degC) column index')
}
load = async (): Promise<void> => {
// Parse CSV file. will spend 10+ sec
// const beginMs = performance.now()
this.dateTime = []
this.data = [] // Unnormalized data.
this.normalizedDayOfYear = [] // Day of the year data, normalized between 0 and 1.
this.normalizedTimeOfDay = [] // Time of the day, normalized between 0 and 1.
for (let i = 1; i < this.csvLines.length; ++i) {
const line = this.csvLines[i].trim()
if (line.length === 0) {
continue
}
const items = line.split(',')
const parsed = this.parseDateTime_(items[0])
const newDateTime: Date = parsed.date
if (this.dateTime.length > 0 &&
newDateTime.getTime() <=
this.dateTime[this.dateTime.length - 1].getTime()) {
}
this.dateTime.push(newDateTime)
this.data.push(items.slice(1).map(x => +x))
this.normalizedDayOfYear.push(parsed.normalizedDayOfYear)
this.normalizedTimeOfDay.push(parsed.normalizedTimeOfDay)
if ((i % 100) === 0) {
logger('.')
}
}
this.numRows = this.data.length
this.numColumns = this.data[0].length
this.numColumnsExcludingTarget = this.data[0].length - 1
logger(`this.numColumnsExcludingTarget = ${this.numColumnsExcludingTarget}`)
await this.calculateMeansAndStddevs_()
// logger('spend time: ', (performance.now() - beginMs) / 1000)
// logger(this.normalizedData)
}
// loadDataColumnNames = async (): Promise<void> => {
// // Parse header.
// const columnNames = await this.csvDataset.columnNames()
// this.dataColumnNames = columnNames.slice(1)
// this.tempCol = 0
// // logger(columnNames)
// }
// load = async (): Promise<void> => {
// // // will spend 130+ sec
// // // Headers
// // const cName = ['Date Time', 'p (mbar)', 'T (degC)', 'Tpot (K)', 'Tdew (degC)', 'rh (%)',
// // 'VPmax (mbar)', 'VPact (mbar)', 'VPdef (mbar)', 'sh (g/kg)', 'H2OC (mmol/mol)',
// // 'rho (g/m**3)', 'wv (m/s)', 'max. wv (m/s)', 'wd (deg)']
//
// const beginMs = performance.now()
//
// // const sampleObjs = csvDataset.skip(SAMPLE_OFFSET).take(SAMPLE_LEN)
//
// this.dateTime = []
// this.data = [] // Unnormalized data.
// this.normalizedDayOfYear = [] // Day of the year data, normalized between 0 and 1.
// this.normalizedTimeOfDay = [] // Time of the day, normalized between 0 and 1.
//
// // await allData.forEachAsync((row: any) => {
// await this.csvDataset.forEachAsync((row: any) => {
// const rowValues = Object.values(row)
// // logger(rowValues)
//
// const parsed: any = this.parseDateTime_(rowValues[0] as string)
// const newDateTime = parsed.date
//
// this.dateTime.push(newDateTime)
// this.data.push(rowValues.slice(1).map(x => Number(x)))
// this.normalizedDayOfYear.push(parsed.normalizedDayOfYear)
// this.normalizedTimeOfDay.push(parsed.normalizedTimeOfDay)
// })
//
// this.numRows = this.data.length
// this.numColumns = this.data[0].length
// this.numColumnsExcludingTarget = this.data[0].length - 1
//
// await this.calculateMeansAndStddevs_()
//
// // this.sampleData = {
// // data: tf.tensor2d(this.data, [this.numRows, this.numColumns]),
// // normalizedTimeOfDay: tf.tensor2d(this.normalizedTimeOfDay, [this.numRows, 1])
// // }
//
// logger('spend time: ', (performance.now() - beginMs) / 1000)
// logger(this.normalizedData)
// }
parseDateTime_ = (str: string): IParsedDate => {
// The date time string with a format that looks like: "17.01.2009 22:10:00"
const items = str.split(' ')
const dateStr = items[0]
const dateStrItems = dateStr.split('.')
const day = +dateStrItems[0]
const month = +dateStrItems[1] - 1 // month is 0-based in JS `Date` class.
const year = +dateStrItems[2]
const timeStrItems = items[1].split(':')
const hours = +timeStrItems[0]
const minutes = +timeStrItems[1]
const seconds = +timeStrItems[2]
const date = new Date(Date.UTC(year, month, day, hours, minutes, seconds))
const yearOnset = new Date(year, 0, 1)
// normalizedDayOfYear: Day of the year, normalized between 0 and 1.
const normalizedDayOfYear = (date.getTime() - yearOnset.getTime()) / (366 * 1000 * 60 * 60 * 24)
const dayOnset = new Date(year, month, day)
// normalizedTimeOfDay: Time of the day, normalized between 0 and 1.
const normalizedTimeOfDay = (date.getTime() - dayOnset.getTime()) / (1000 * 60 * 60 * 24)
return { date, normalizedDayOfYear, normalizedTimeOfDay }
}
/**
* Calculate the means and standard deviations of every column.
*
* TensorFlow.js is used for acceleration.
*/
calculateMeansAndStddevs_ = async (): Promise<void> => {
tf.tidy(() => {
// Instead of doing it on all columns at once, we do it
// column by column, as doing it all at once causes WebGL OOM
// on some machines.
this.means = []
this.stddevs = []
for (const columnName of this.dataColumnNames) {
const data = tf.tensor1d(this.getColumnData(columnName).slice(0, 6 * 24 * 365))
const moments = tf.moments(data)
this.means.push(moments.mean.dataSync()[0])
this.stddevs.push(Math.sqrt(moments.variance.dataSync()[0]))
}
// console.log('means:', this.means)
// console.log('stddevs:', this.stddevs)
})
// Cache normalized values.
this.normalizedData = []
for (let i = 0; i < this.numRows; ++i) {
const row = []
for (let j = 0; j < this.numColumns; ++j) {
row.push((this.data[i][j] - this.means[j]) / this.stddevs[j])
}
this.normalizedData.push(row)
}
}
getDataColumnNames = (): string[] => {
return this.dataColumnNames
}
getDataLength = (): number => {
return this.data.length
}
getSampleData = (): tf.TensorContainerObject | undefined => {
// logger('sampleData', this.sampleData)
return this.sampleData
}
getTime = (index: number): Date => {
return this.dateTime[index]
}
/** Get the mean and standard deviation of a data column. */
getMeanAndStddev = (dataColumnName: string): any => {
if (this.means == null || this.stddevs == null) {
throw new Error('means and stddevs have not been calculated yet.')
}
const index = this.getDataColumnNames().indexOf(dataColumnName)
if (index === -1) {
throw new Error(`Invalid data column name: ${dataColumnName}`)
}
return {
mean: this.means[index], stddev: this.stddevs[index]
}
}
getColumnData = (columnName: string, includeTime?: boolean, normalize?: boolean,
beginIndex?: number, length?: number, stride?: number): any[] => {
const columnIndex = this.dataColumnNames.indexOf(columnName)
// tf.util.assert(columnIndex >= 0, `Invalid column name: ${columnName}`)
if (beginIndex == null) {
beginIndex = 0
}
if (length == null) {
length = this.numRows - beginIndex
}
if (stride == null) {
stride = 1
}
const out = []
for (let i = beginIndex; i < beginIndex + length && i < this.numRows; i += stride) {
let value: any = normalize ? this.normalizedData[i][columnIndex] : this.data[i][columnIndex]
if (includeTime) {
value = { x: this.dateTime[i].getTime(), y: value as number }
}
out.push(value)
}
return out
}
getNextBatchFunction = (shuffle: boolean, lookBack: number, delay: number, batchSize: number, step: number, minIndex: number, maxIndex: number, normalize: boolean,
includeDateTime: boolean): any => {
let startIndex = minIndex + lookBack
const lookBackSlices = Math.floor(lookBack / step)
return {
next: () => {
const rowIndices = []
let done = false // Indicates whether the dataset has ended.
if (shuffle) {
// If `shuffle` is `true`, start from randomly chosen rows.
const range = maxIndex - (minIndex + lookBack)
for (let i = 0; i < batchSize; ++i) {
const row = minIndex + lookBack + Math.floor(Math.random() * range)
rowIndices.push(row)
}
} else {
// If `shuffle` is `false`, the starting row indices will be sequential.
let r = startIndex
for (; r < startIndex + batchSize && r < maxIndex; ++r) {
rowIndices.push(r)
}
if (r >= maxIndex) {
done = true
}
}
const numExamples = rowIndices.length
startIndex += numExamples
const featureLength =
includeDateTime ? this.numColumns + 2 : this.numColumns
const samples = tf.buffer([numExamples, lookBackSlices, featureLength])
const targets = tf.buffer([numExamples, 1])
// Iterate over examples. Each example contains a number of rows.
for (let j = 0; j < numExamples; ++j) {
const rowIndex = rowIndices[j]
let exampleRow = 0
// Iterate over rows in the example.
for (let r = rowIndex - lookBack; r < rowIndex; r += step) {
let exampleCol = 0
// Iterate over features in the row.
for (let n = 0; n < featureLength; ++n) {
let value
if (n < this.numColumns) {
value = normalize ? this.normalizedData[r][n] : this.data[r][n]
} else if (n === this.numColumns) {
// Normalized day-of-the-year feature.
value = this.normalizedDayOfYear[r]
} else {
// Normalized time-of-the-day feature.
value = this.normalizedTimeOfDay[r]
}
samples.set(value, j, exampleRow, exampleCol++)
}
const value = normalize
? this.normalizedData[r + delay][this.tempCol]
: this.data[r + delay][this.tempCol]
targets.set(value, j, 0)
exampleRow++
}
}
return {
value: { xs: samples.toTensor(), ys: targets.toTensor() },
done
}
}
}
}
dispose = (): void => {
// todo
}
}
<file_sep>/public/docs/dev/ai-process-panel.md
# 展示端到端 AI 概念的舞台
## 端到端的 AI 概念

所谓端到端,就是从领域问题出发,思考所需的数据,运用相应的模型,执行训练,验证推理,形成新应用。这个过程,我自己理解是“从数据出发”的科学探究方法的延伸,就像是古人观测天象、记录物理现象一样。而未来和过去不一样的地方在于,我们可以利用计算机和 AI ,处理更大量的数据,发现隐藏的更深的规律。
在 React-tfjs-camp 的实现中,展示了从 `问题 --> 数据 --> 模型 --> 训练 --> 推理` 的依赖顺序关系。

## AIProcessTabs
为了从视觉上能够展示端到端的前后依赖关系,将 AntD 的 Step 和 Tabs 做了结合,体现为 AIProcessTabs 组件, `/src/components/common/AIProcessTabs.tsx`。
### 带参数的 React 函数组件
AIProcessTabs 比前面介绍的 APP、SiderBar 等组件要复杂些,需要接受上层传递的属性参数。
import React, { useEffect, useState } from 'react'
...
export enum AIProcessTabPanes {
INFO = '1',
DATA = '2',
MODEL = '3',
TRAIN = '4',
PREDICT = '5'
}
const INVISIBLE_PANES: AIProcessTabPanes[] = []
interface IProps {
children?: JSX.Element[]
title?: string | JSX.Element
current?: number
invisiblePanes?: AIProcessTabPanes[]
onChange?: (current: number) => void
}
const AIProcessTabs = (props: IProps): JSX.Element => {
...
const handleChange = (current: number): void => {
setCurrent(current)
props.onChange && props.onChange(current + 1)
}
}
export default AIProcessTabs
* 使用 interface 能够约束所传递属性的结构。`?` 表示这是个非必须属性。
interface IProps {
children?: JSX.Element[]
title?: string | JSX.Element
current?: number
invisiblePanes?: AIProcessTabPanes[]
onChange?: (current: number) => void
}
* 使用 `const AIProcessTabs = (props: IProps): JSX.Element => {` 限定传入组件的属性。
* 使用诸如 `props.onChange` 的形式使用属性。由于 `props.onChange` 是非必须的,所以,加上检查条件再使用 `props.onChange && props.onChange(...)`,能够增加代码的鲁棒性。
非常容易理解,不是吗?
### 使用 React Hooks 的 useEffect 处理组件内的数据依赖
我们用 Steps 来展示从 `问题 --> 数据 --> 模型 --> 训练 --> 推理` 的依赖顺序关系。使用 Tabs 容纳每个步骤的具体页面内容。对 Tabs 的子元素 TabPane,我们使用了枚举类型 enum 来限制其关键属性 key 只能使用规定的这五类。
export enum AIProcessTabPanes {
INFO = '1',
DATA = '2',
MODEL = '3',
TRAIN = '4',
PREDICT = '5'
}
...
<TabPane tab=' ' key={AIProcessTabPanes.INFO}>
<MarkdownWidget url={'/docs/ai/curve.md'}/>
</TabPane>
我们想达到的效果是,点击 Steps 中的某一步,能够自动跳转到对应的 TabPane 去。也就是说,Steps 和 Tabs 之间,必须能够保持与内容一致的联动。
AntD 的 Steps 组件的子元素 Step 下标从 `0` 起始,而此时对应的 TabPane 的 key 属性是 `'1'`。为了这个差距,AIProcessTabs 使用了 React Hooks 中另一个重要的 Hook —— useEffect。
import React, { useEffect, useState } from 'react'
import { Steps, Tabs } from 'antd'
...
const { Step } = Steps
const AIProcessTabs = (props: IProps): JSX.Element => {
const [sCurrent, setCurrent] = useState<number>(0)
const [sInvisiblePanes, setInvisiblePanes] = useState<AIProcessTabPanes[]>(INVISIBLE_PANES)
useEffect(() => {
logger('init current', props.current)
props.current && setCurrent(props.current - 1)
}, [props.current])
useEffect(() => {
props.invisiblePanes && setInvisiblePanes(props.invisiblePanes)
}, [props.invisiblePanes])
const handleChange = (current: number): void => {
setCurrent(current)
props.onChange && props.onChange(current + 1)
}
...
return (
...
<Steps current={sCurrent} onChange={handleChange}>
...
</Steps>
...
<Tabs activeKey={(sCurrent + 1).toString()} tabBarStyle={{ height: 0 }}>
{props.children}
</Tabs>
...
)
}
export default AIProcessTabs
`useEffect()` 接受两个参数。第一个参数是一个函数,异步操作的代码放在里面。第二个参数是一个数组,用于给出 Effect 的依赖项,只要这个数组发生变化,useEffect()就会执行。
* 使用 useEffect 实现将 Props 参数和组件状态之间的映射。考虑到外部使用此组件时,开发者感知到的是 TabPane 的 key,所以,这个 props.current 参数也是参考 TabPane key 所对应的数值设置。例如:要激活 AIProcessTabPanes.INFO 时,设置此 `props.current = 1`。而进入了组件内部,这个 current 需要影响的是 Steps 的 current 属性,即需要对应 `<Steps current={sCurrent} ...>` 的 `sCurrent = 0`。
useEffect(() => {
logger('init current', props.current)
props.current && setCurrent(props.current - 1)
}, [props.current])
* `useEffect(() => {...}, [props.current])` 放置于第二个参数 [] 中的部分表示:一旦 props.current 有所变化,执行 useEffect 的函数部分。熟悉 Redux 的读者,立刻会理解这个操作正是 `mapPropsToState` 所做的。
* 如果将第二个参数设置为 `[]` 空数组,则表示仅仅在组件初始化时执行(类似于 React 生命周期函数 componentDidMount)。
* 如果省略第二个参数,每次组件渲染时都会执行useEffect()。
### 处理需要隐藏的 TabPane
在 React-tfjs-camp 的开发过程中,有些 AI 概念内容并不具备完整个五个步骤,所以增加了 `props.invisiblePanes`, 以隐藏相应的 TabPane。在视觉上采用实心图标表示此步为可用,空心图标则不可用。下图显示了在 MobileNet Calssifier 例子里,由于使用的是预训练模型,不需要展示训练过程。

import React, { useEffect, useState } from 'react'
import { Steps, Tabs } from 'antd'
...
import {
BoxPlotFilled,
BoxPlotOutlined,
ControlFilled,
ControlOutlined,
DashboardFilled,
DashboardOutlined,
FileTextFilled,
FileTextOutlined,
TrophyFilled,
TrophyOutlined
} from '@ant-design/icons'
import { logger } from '../../utils'
const { Step } = Steps
export enum AIProcessTabPanes {
INFO = '1',
DATA = '2',
MODEL = '3',
TRAIN = '4',
PREDICT = '5'
}
const INVISIBLE_PANES: AIProcessTabPanes[] = []
interface IProps {
...
invisiblePanes?: AIProcessTabPanes[]
...
}
const AIProcessTabs = (props: IProps): JSX.Element => {
...
const [sInvisiblePanes, setInvisiblePanes] = useState<AIProcessTabPanes[]>(INVISIBLE_PANES)
...
useEffect(() => {
props.invisiblePanes && setInvisiblePanes(props.invisiblePanes)
}, [props.invisiblePanes])
...
const getStepProps = (disabled: boolean, iconEnabled: JSX.Element, iconDisabled: JSX.Element): object => {
if (disabled) {
return { disabled, icon: iconDisabled }
} else {
return { icon: iconEnabled }
}
}
return (
...
<Steps current={sCurrent} onChange={handleChange}>
<Step title='问题' description='要解决的问题背景'
{...getStepProps(sInvisiblePanes?.includes(AIProcessTabPanes.INFO), <FileTextFilled/>, <FileTextOutlined/>)}
/>
<Step title='数据' description='加载和准备所需数据'
{...getStepProps(sInvisiblePanes?.includes(AIProcessTabPanes.DATA), <BoxPlotFilled/>, <BoxPlotOutlined/>)}
/>
...
</Steps>
...
### Sticky 的使用
内容页可能会比较长,有时候会需要向下滚屏。下面的代码展示了使用 Sticky 包裹标题和 Steps,以使 AIProcessTabs 保持在页面顶部。

import React, { useEffect, useState } from 'react'
...
import { Sticky, StickyContainer } from 'react-sticky'
...
const AIProcessTabs = (props: IProps): JSX.Element => {
...
return (
<StickyContainer>
<Sticky>{
({ style }) => {
const _style = { zIndex: 1, backgroundColor: 'white', ...style }
return (<div style={_style}>
<h1>{props.title}</h1>
<Steps current={sCurrent} onChange={handleChange}>
...
</Steps>
</div>)
}
}
</Sticky>
<Tabs activeKey={(sCurrent + 1).toString()} tabBarStyle={{ height: 0 }}>
{props.children}
</Tabs>
</StickyContainer>
)
}
...
## MarkdownWidget
在 AIProcessTabs 中,问题描述部分会有较多的富文本内容,为了将内容和代码分开,增加了 Markdown 展示组件 MarkdownWidget,`/src/components/common/MarkdownWidget.tsx`。
import React, { useEffect, useState } from 'react'
import { message } from 'antd'
import ReactMarkdown from 'react-markdown'
import MathJax from '@matejmazur/react-mathjax'
import RemarkMathPlugin from 'remark-math'
import { fetchResource, logger } from '../../utils'
const DEFAULT_INFO = 'Please set props url or source'
const loadMD = async (url: string): Promise<string> => {
const buffer = await fetchResource(url, false)
return buffer.toString()
}
const math = (p: {value: string}): JSX.Element => {
return <MathJax.Node>{p.value}</MathJax.Node>
}
const inlineMath = (p: {value: string}): JSX.Element => {
return <MathJax.Node inline>{p.value}</MathJax.Node>
}
const renderers = {
math, inlineMath
}
interface IProps {
source?: string
url?: string
imgPathPrefix?: string
}
const MarkdownWidget = (props: IProps): JSX.Element => {
const [sSource, setSource] = useState<string>(DEFAULT_INFO)
useEffect(() => {
if (!props.url) {
return
}
logger('Load MD from url: ', props.url)
// Fetch and load MD content
loadMD(props.url).then(
(src) => {
const prefix = props.imgPathPrefix ?? '/docs'
const _src = src.replace(/.\/images/g, `${prefix}/images`)
setSource(_src)
}, (e) => {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
message.error(e.message)
})
}, [props.url])
useEffect(() => {
props.source && setSource(props.source)
}, [props.source])
return (
<MathJax.Context>
<ReactMarkdown source={sSource} escapeHtml={true} plugins={[RemarkMathPlugin]} renderers={renderers}/>
</MathJax.Context>
)
}
export default MarkdownWidget
* source 属性,直接渲染 source 属性中的 MD 文本串。
import React, { useEffect, useState } from 'react'
import ReactMarkdown from 'react-markdown'
...
const MarkdownWidget = (props: IProps): JSX.Element => {
const [sSource, setSource] = useState<string>(DEFAULT_INFO)
...
useEffect(() => {
props.source && setSource(props.source)
}, [props.source])
...
return (
<MathJax.Context>
<ReactMarkdown source={sSource} escapeHtml={true} .../>
</MathJax.Context>
)
}
* url 属性,下载网络 MD 文件进行渲染,会覆盖 source 属性。如果无法加载 url 资源,返回错误消息。此处用到的 `fetchResource` 函数,起到的作用就是从 URL 获取相应的文件,稍后再介绍。
const loadMD = async (url: string): Promise<string> => {
const buffer = await fetchResource(url, false)
return buffer.toString()
}
...
useEffect(() => {
if (!props.url) {
return
}
logger('Load MD from url: ', props.url)
// Fetch and load MD content
loadMD(props.url).then(
(src) => {
...
setSource(_src)
}, (e) => {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
message.error(e.message)
})
}, [props.url])
* 对嵌入图片链接进行处理,使之能够在 React-tfjs-camp 中正常加载,并且能够在 github 上正常显示。将文档中的图片链接相对地址 `../images`,映射成 Web APP 中的绝对地址 `/docs/images`。
loadMD(props.url).then(
(src) => {
const prefix = props.imgPathPrefix ?? '/docs'
const _src = src.replace(/.\/images/g, `${prefix}/images`)
setSource(_src)
}, (e) => {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
message.error(e.message)
})
* 支持使用 Latex 描述公式。`react-markdown` 支持使用 Latex 插件,以显示 Latex 公式。不过当前这个实现很老旧了,在页面切换时,有时会显示不出来。受实现所限,仅能支持 `$$` 形式的 Latex, 例如:`$$ relu(x) = max(0, x) $$`。如果以后找到更好的实现,再替换一下。
import React, { useEffect, useState } from 'react'
import ReactMarkdown from 'react-markdown'
import MathJax from '@matejmazur/react-mathjax'
import RemarkMathPlugin from 'remark-math'
...
const math = (p: {value: string}): JSX.Element => {
return <MathJax.Node>{p.value}</MathJax.Node>
}
const inlineMath = (p: {value: string}): JSX.Element => {
return <MathJax.Node inline>{p.value}</MathJax.Node>
}
const renderers = {
math, inlineMath
}
...
const MarkdownWidget = (props: IProps): JSX.Element => {
...
return (
<MathJax.Context>
<ReactMarkdown source={sSource} escapeHtml={true} plugins={[RemarkMathPlugin]} renderers={renderers}/>
</MathJax.Context>
)
}
...
<file_sep>/node/src/textGenLstm/dataTextGen.ts
/**
* @license
* Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs'
const BASE_URL = '/preload/data/lstm-text-generation'
interface IKeyMap {
[index: string]: {url: string, needle: string}
}
export const TEXT_DATA_URLS: IKeyMap = {
nietzsche: {
url: `${BASE_URL}/nietzsche.txt`,
// 'https://storage.googleapis.com/tfjs-examples/lstm-text-generation/data/nietzsche.txt',
needle: 'Nietzsche'
},
julesverne: {
url: `${BASE_URL}/t1.verne.txt`,
// 'https://storage.googleapis.com/tfjs-examples/lstm-text-generation/data/t1.verne.txt',
needle: '<NAME>'
},
shakespeare: {
url: `${BASE_URL}/t8.shakespeare.txt`,
// 'https://storage.googleapis.com/tfjs-examples/lstm-text-generation/data/t8.shakespeare.txt',
needle: 'Shakespeare'
},
'tfjs-code': {
url: `${BASE_URL}/tf.txt`,
// 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs@0.11.7/dist/tf.js',
needle: 'TensorFlow.js Code (Compiled, 0.11.7)'
}
}
/**
* A class for text data.
*
* This class manages the following:
*
* - Converting training data (as a string) into one-hot encoded vectors.
* - Drawing random slices from the training data. This is useful for training
* models and obtaining the seed text for model-based text generation.
*/
export class TextData {
dataIdentifier_: string
textString_: string
textLen_: number
sampleLen_: number
sampleStep_: number
charSetSize_: number
charSet_: string[]
examplePosition_ = 0
exampleBeginIndices_: number[] = []
indices_: Uint16Array
/**
* Constructor of TextData.
*
* @param {string} dataIdentifier An identifier for this instance of TextData.
* @param {string} textString The training text data.
* @param {number} sampleLen Length of each training example, i.e., the input
* sequence length expected by the LSTM model.
* @param {number} sampleStep How many characters to skip when going from one
* example of the training data (in `textString`) to the next.
*/
constructor (dataIdentifier: string, textString: string, sampleLen: number, sampleStep: number) {
tf.util.assert(
sampleLen > 0,
() => `Expected sampleLen to be a positive integer, but got ${sampleLen}`)
tf.util.assert(
sampleStep > 0,
() => `Expected sampleStep to be a positive integer, but got ${sampleStep}`)
if (!dataIdentifier) {
throw new Error('Model identifier is not provided.')
}
this.dataIdentifier_ = dataIdentifier
this.textString_ = textString
this.textLen_ = textString.length
this.sampleLen_ = sampleLen
this.sampleStep_ = sampleStep
// this.getCharSet_()
this.charSet_ = []
for (let i = 0; i < this.textLen_; ++i) {
if (!this.charSet_.includes(this.textString_[i])) {
this.charSet_.push(this.textString_[i])
}
}
this.charSetSize_ = this.charSet_.length
// this.convertAllTextToIndices_()
this.indices_ = new Uint16Array(this.textToIndices(this.textString_))
}
/**
* Get data identifier.
*
* @returns {string} The data identifier.
*/
dataIdentifier = (): string => {
return this.dataIdentifier_
}
/**
* Get length of the training text data.
*
* @returns {number} Length of training text data.
*/
textLen = (): number => {
return this.textLen_
}
/**
* Get the length of each training example.
*/
sampleLen = (): number => {
return this.sampleLen_
}
/**
* Get the size of the character set.
*
* @returns {number} Size of the character set, i.e., how many unique
* characters there are in the training text data.
*/
charSetSize = (): number => {
return this.charSetSize_
}
/**
* Generate the next epoch of data for training models.
*
* @param {number} numExamples Number examples to generate.
* @returns {[tf.Tensor, tf.Tensor]} `xs` and `ys` Tensors.
* `xs` has the shape of `[numExamples, this.sampleLen, this.charSetSize]`.
* `ys` has the shape of `[numExamples, this.charSetSize]`.
*/
nextDataEpoch = (numExamples: number): tf.Tensor[] => {
this.generateExampleBeginIndices_()
if (numExamples == null) {
numExamples = this.exampleBeginIndices_.length
}
const xsBuffer = new tf.TensorBuffer([numExamples, this.sampleLen_, this.charSetSize_], 'float32')
const ysBuffer = new tf.TensorBuffer([numExamples, this.charSetSize_], 'float32')
for (let i = 0; i < numExamples; ++i) {
const beginIndex = this.exampleBeginIndices_[this.examplePosition_ % this.exampleBeginIndices_.length]
for (let j = 0; j < this.sampleLen_; ++j) {
xsBuffer.set(1, i, j, this.indices_[beginIndex + j])
}
ysBuffer.set(1, i, this.indices_[beginIndex + this.sampleLen_])
this.examplePosition_++
}
return [xsBuffer.toTensor(), ysBuffer.toTensor()]
}
/**
* Get the unique character at given index from the character set.
*
* @param {number} index
* @returns {string} The unique character at `index` of the character set.
*/
getFromCharSet = (index: number): string => {
return this.charSet_[index]
}
/**
* Convert text string to integer indices.
*
* @param {string} text Input text.
* @returns {number[]} Indices of the characters of `text`.
*/
textToIndices = (text: string): number[] => {
const indices = []
for (let i = 0; i < text.length; ++i) {
indices.push(this.charSet_.indexOf(text[i]))
}
return indices
}
/**
* Get a random slice of text data.
*
* @returns {[string, number[]} The string and index representation of the
* same slice.
*/
getRandomSlice = (): [string, number[]] => {
const startIndex =
Math.round(Math.random() * (this.textLen_ - this.sampleLen_ - 1))
const textSlice = this.slice_(startIndex, startIndex + this.sampleLen_)
return [textSlice, this.textToIndices(textSlice)]
}
/**
* Get a slice of the training text data.
*
* @param {number} startIndex
* @param {number} endIndex
* @param {bool} useIndices Whether to return the indices instead of string.
* @returns {string | Uint16Array} The result of the slicing.
*/
slice_ (startIndex: number, endIndex: number): string {
return this.textString_.slice(startIndex, endIndex)
}
/**
* Generate the example-begin indices; shuffle them randomly.
*/
generateExampleBeginIndices_ = (): void => {
// Prepare beginning indices of examples.
this.exampleBeginIndices_ = []
for (let i = 0; i < this.textLen_ - this.sampleLen_ - 1;
i += this.sampleStep_) {
this.exampleBeginIndices_.push(i)
}
// Randomly shuffle the beginning indices.
tf.util.shuffle(this.exampleBeginIndices_)
this.examplePosition_ = 0
}
}
<file_sep>/node/src/textGenLstm/TextGenerator.ts
/**
* @license
* Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
/**
* TensorFlow.js Example: LSTM Text Generation.
*
* Inspiration comes from:
*
* -
* https://github.com/keras-team/keras/blob/master/examples/lstm_text_generation.py
* - <NAME>. "The Unreasonable Effectiveness of Recurrent Neural
* Networks" http://karpathy.github.io/2015/05/21/rnn-effectiveness/
*/
import * as tf from '@tensorflow/tfjs'
import { TextData } from './dataTextGen'
import { logger } from '../utils'
const sample = (probs: tf.Tensor, temperature: number): number => {
return tf.tidy(() => {
const logits = tf.div(tf.log(probs), Math.max(temperature, 1e-6))
const isNormalized = false
// `logits` is for a multinomial distribution, scaled by the temperature.
// We randomly draw a sample from the distribution.
return tf.multinomial(logits as tf.Tensor1D, 1, undefined, isNormalized).dataSync()[0]
})
}
/**
* Class that manages LSTM-based text generation.
*
* This class manages the following:
*
* - Creating and training a LSTM model, written with the tf.layers API, to
* predict the next character given a sequence of input characters.
* - Generating random text using the LSTM model.
*/
export class LSTMTextGenerator {
textData_: TextData
charSetSize_: number
sampleLen_: number
textLen_: number
model: tf.LayersModel | undefined
stopFlag = false
/**
* Constructor of NeuralNetworkTextGenerator.
*
* @param {TextData} textData An instance of `TextData`.
*/
constructor (textData: TextData) {
this.textData_ = textData
this.charSetSize_ = textData.charSetSize()
this.sampleLen_ = textData.sampleLen()
this.textLen_ = textData.textLen()
}
/**
* Create LSTM model from scratch.
*
* @param {number | number[]} lstmLayerSizes Sizes of the LSTM layers, as a
* number or an non-empty array of numbers.
*/
createModel = (lstmLayerSizes: number | number[]): void => {
if (!Array.isArray(lstmLayerSizes)) {
lstmLayerSizes = [lstmLayerSizes]
}
const model = tf.sequential()
for (let i = 0; i < lstmLayerSizes.length; ++i) {
const lstmLayerSize = lstmLayerSizes[i]
model.add(tf.layers.lstm({
units: lstmLayerSize,
returnSequences: i < lstmLayerSizes.length - 1,
inputShape: i === 0 ? [this.sampleLen_, this.charSetSize_] : undefined
}))
}
model.add(tf.layers.dense({ units: this.charSetSize_, activation: 'softmax' }))
this.model = model
}
/**
* Compile model for training.
*
* @param {number} learningRate The learning rate to use during training.
*/
compileModel = (learningRate: number): void => {
if (!this.model) {
return
}
// logger(`Compiled model with learning rate ${learningRate}`)
const optimizer = tf.train.rmsprop(learningRate)
this.model.compile({ optimizer: optimizer, loss: 'categoricalCrossentropy' })
// model.summary()
}
myCallback = {
onBatchBegin: async (batch: number) => {
if (!this.model) {
return
}
if (this.stopFlag) {
logger('Checked stop', this.stopFlag)
this.model.stopTraining = this.stopFlag
}
await tf.nextFrame()
}
}
fitModel = async (numEpochs: number, examplesPerEpoch: number, batchSize: number, validationSplit: number,
callbacks: any[]): Promise<void> => {
if (!this.model || !this.textData_) {
return
}
const _callbacks = [this.myCallback, ...callbacks]
this.stopFlag = false
for (let i = 0; i < numEpochs; ++i) {
if (this.stopFlag) {
return
}
const [xs, ys] = tf.tidy(() => {
return this.textData_.nextDataEpoch(examplesPerEpoch)
})
await this.model.fit(xs, ys, {
epochs: 1,
batchSize: batchSize,
validationSplit,
callbacks: _callbacks
})
xs.dispose()
ys.dispose()
}
}
/**
* Generate text using the LSTM model.
*
* @param {number[]} sentenceIndices Seed sentence, represented as the
* indices of the constituent characters.
* @param {number} length Length of the text to generate, in number of
* characters.
* @param {number} temperature Temperature parameter. Must be a number > 0.
* @returns {string} The generated text.
*/
generateText = async (sentenceIndices: number[], length: number, temperature: number): Promise<string | void> => {
if (!this.model) {
return
}
const callbacks = (char: string): void => {
// ignore
logger('genCallback', char)
}
const sampleLen = this.model.inputs[0].shape[1] as number
const charSetSize = this.model.inputs[0].shape[2] as number
// Avoid overwriting the original input.
sentenceIndices = sentenceIndices.slice()
let generated = ''
while (generated.length < length) {
// Encode the current input sequence as a one-hot Tensor.
const inputBuffer = new tf.TensorBuffer([1, sampleLen, charSetSize], 'float32')
// Make the one-hot encoding of the seeding sentence.
for (let i = 0; i < sampleLen; ++i) {
inputBuffer.set(1, 0, i, sentenceIndices[i])
}
const input = inputBuffer.toTensor()
// Call model.predict() to get the probability values of the next
// character.
const output = this.model.predict(input) as tf.Tensor
// Sample randomly based on the probability values.
const winnerIndex = sample(tf.squeeze(output), temperature)
const winnerChar = this.textData_.getFromCharSet(winnerIndex)
if (callbacks != null) {
await callbacks(winnerChar)
}
generated += winnerChar
sentenceIndices = sentenceIndices.slice(1)
sentenceIndices.push(winnerIndex)
// Memory cleanups.
input.dispose()
output.dispose()
}
return generated
}
stopTrain = (): void => {
this.stopFlag = true
}
loadModelFromFile = async (url: string): Promise<tf.LayersModel> => {
this.model = await tf.loadLayersModel(url)
return this.model
}
}
/**
* A subclass of LSTMTextGenerator that supports model saving and loading.
*
* The model is saved to and loaded from browser's IndexedDB.
*/
export class SavableLSTMTextGenerator extends LSTMTextGenerator {
modelIdentifier_: string
MODEL_SAVE_PATH_PREFIX_: string
modelSavePath_: string
/**
* Constructor of NeuralNetworkTextGenerator.
*
* @param {TextData} textData An instance of `TextData`.
*/
constructor (textData: TextData) {
super(textData)
this.modelIdentifier_ = textData.dataIdentifier()
this.MODEL_SAVE_PATH_PREFIX_ = 'indexeddb://lstm-text-generation'
this.modelSavePath_ = `${this.MODEL_SAVE_PATH_PREFIX_}/${this.modelIdentifier_}`
}
/**
* Get model identifier.
*
* @returns {string} The model identifier.
*/
modelIdentifier = (): string => {
return this.modelIdentifier_
}
/**
* Create LSTM model if it is not saved locally; load it if it is.
*
* @param {number | number[]} lstmLayerSizes Sizes of the LSTM layers, as a
* number or an non-empty array of numbers.
*/
loadModel = async (lstmLayerSizes: number): Promise<void> => {
const modelsInfo = await tf.io.listModels()
if (this.modelSavePath_ in modelsInfo) {
logger('Loading existing model...')
this.model = await tf.loadLayersModel(this.modelSavePath_)
logger(`Loaded model from ${this.modelSavePath_}`)
} else {
throw new Error(
`Cannot find model at ${this.modelSavePath_}. ` +
'Creating model from scratch.')
}
}
/**
* Save the model in IndexedDB.
*
* @returns ModelInfo from the saving, if the saving succeeds.
*/
saveModel = async (): Promise<tf.io.SaveResult> => {
if (this.model == null) {
throw new Error('Cannot save model before creating model.')
} else {
return this.model.save(this.modelSavePath_)
}
}
/**
* Remove the locally saved model from IndexedDB.
*/
removeModel = async (): Promise<any> => {
if (await this.checkStoredModelStatus() == null) {
throw new Error(
'Cannot remove locally saved model because it does not exist.')
}
return tf.io.removeModel(this.modelSavePath_)
}
/**
* Check the status of locally saved model.
*
* @returns If the locally saved model exists, the model info as a JSON
* object. Else, `undefined`.
*/
checkStoredModelStatus = async (): Promise<any> => {
const modelsInfo = await tf.io.listModels()
return modelsInfo[this.modelSavePath_]
}
/**
* Get a representation of the sizes of the LSTM layers in the model.
*
* @returns {number | number[]} The sizes (i.e., number of units) of the
* LSTM layers that the model contains. If there is only one LSTM layer, a
* single number is returned; else, an Array of numbers is returned.
*/
lstmLayerSizes = (): void => {
if (this.model == null) {
throw new Error('Create model first.')
}
const numLSTMLayers = this.model.layers.length - 1
const layerSizes = []
for (let i = 0; i < numLSTMLayers; ++i) {
const layer = this.model.layers[i] as any
layerSizes.push(layer.units)
}
return layerSizes.length === 1 ? layerSizes[0] : layerSizes
}
}
<file_sep>/src/components/mnist/MnistDatasetGz.ts
/**
* @license
* Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs'
import { fetchResource, logger } from '../../utils'
import { IMnistDataset, IMAGE_H, IMAGE_W, IMAGE_SIZE, NUM_CLASSES } from './mnistConsts'
const NUM_TRAIN_ELEMENTS = 35000
const NUM_TEST_ELEMENTS = 7000
const IMAGE_HEADER_BYTES = 16
const LABEL_HEADER_BYTES = 8
const LABEL_RECORD_BYTE = 1
/**
* A class that fetches the sprited MNIST dataset and returns shuffled batches.
*
* NOTE: This will get much easier. For now, we do data fetching and
* manipulation manually.
*/
const loadHeaderValues = (buffer: Buffer, headerLength: number): number[] => {
const headerValues = []
for (let i = 0; i < headerLength / 4; i++) {
// Header data is stored in-order (aka big-endian)
headerValues[i] = buffer.readUInt32BE(i * 4)
}
return headerValues
}
const loadImages = async (url: string): Promise<Float32Array[]> => {
const buffer = await fetchResource(url, true)
const headerBytes = IMAGE_HEADER_BYTES
const recordBytes = IMAGE_SIZE
// skip header
const headerValues = loadHeaderValues(buffer, headerBytes)
logger('image header', headerValues)
const images = []
let index = headerBytes
while (index < buffer.byteLength) {
const array = new Float32Array(recordBytes)
for (let i = 0; i < recordBytes; i++) {
// Normalize the pixel values into the 0-1 interval, from
// the original 0-255 interval.
array[i] = buffer.readUInt8(index++) / 255.0
}
images.push(array)
}
logger('Load images :', `${images.length.toString()} / ${headerValues[1].toString()}`)
return images
}
const loadLabels = async (url: string): Promise<Uint8Array[]> => {
const buffer = await fetchResource(url, true)
const headerBytes = LABEL_HEADER_BYTES
const recordBytes = LABEL_RECORD_BYTE
// skip header
const headerValues = loadHeaderValues(buffer, headerBytes)
logger('label header', headerValues)
const labels = []
let index = headerBytes
while (index < buffer.byteLength) {
const array = new Uint8Array(recordBytes)
for (let i = 0; i < recordBytes; i++) {
array[i] = buffer.readUInt8(index++)
}
labels.push(array)
}
logger('Load labels :', `${labels.length.toString()} / ${headerValues[1].toString()}`)
return labels
}
export class MnistDatasetGz implements IMnistDataset {
source: string
baseUrl: string
trainImagesFileUrl: string
trainLabelsFileUrl: string
testImagesFileUrl: string
testLabelsFileUrl: string
trainImages!: Float32Array[]
testImages!: Float32Array[]
trainLabels!: Uint8Array[]
testLabels!: Uint8Array[]
trainIndices!: Uint32Array
testIndices!: Uint32Array
shuffledTrainIndex = 0
shuffledTestIndex = 0
constructor (source: string) {
this.source = source
this.baseUrl = `/preload/data/${source}`
this.trainImagesFileUrl = `${this.baseUrl}/train-images-idx3-ubyte.gz`
this.trainLabelsFileUrl = `${this.baseUrl}/train-labels-idx1-ubyte.gz`
this.testImagesFileUrl = `${this.baseUrl}/t10k-images-idx3-ubyte.gz`
this.testLabelsFileUrl = `${this.baseUrl}/t10k-labels-idx1-ubyte.gz`
}
/** Loads training and test data. */
loadData = async (): Promise<void> => {
// Create shuffled indices into the train/test set for when we select a
// random dataset element for training / validation.
this.trainIndices = tf.util.createShuffledIndices(NUM_TRAIN_ELEMENTS)
this.testIndices = tf.util.createShuffledIndices(NUM_TEST_ELEMENTS)
// Slice the the images and labels into train and test sets.
this.trainImages = await loadImages(this.trainImagesFileUrl)
this.trainImages = this.trainImages.slice(0, NUM_TRAIN_ELEMENTS)
this.trainLabels = await loadLabels(this.trainLabelsFileUrl)
this.trainLabels = this.trainLabels.slice(0, NUM_TRAIN_ELEMENTS)
this.testImages = await loadImages(this.testImagesFileUrl)
this.testImages = this.testImages.slice(0, NUM_TEST_ELEMENTS)
this.testLabels = await loadLabels(this.testLabelsFileUrl)
this.testLabels = this.testLabels.slice(0, NUM_TEST_ELEMENTS)
}
getTrainData = (numExamples = NUM_TRAIN_ELEMENTS): tf.TensorContainerObject => {
return this.getData_(this.trainImages, this.trainLabels, numExamples)
}
getTestData = (numExamples = NUM_TEST_ELEMENTS): tf.TensorContainerObject => {
return this.getData_(this.testImages, this.testLabels, numExamples)
}
getData_ = (imageSet: Float32Array[], labelSet: Uint8Array[], numExamples?: number): tf.TensorContainerObject => {
const size = imageSet.length
// Only create one big array to hold batch of images.
const imagesShape: [number, number, number, number] = [size, IMAGE_H, IMAGE_W, 1]
const images = new Float32Array(tf.util.sizeFromShape(imagesShape))
const labels = new Int32Array(tf.util.sizeFromShape([size, 1]))
let imageOffset = 0
let labelOffset = 0
for (let i = 0; i < size; ++i) {
images.set(imageSet[i], imageOffset)
labels.set(labelSet[i], labelOffset)
imageOffset += IMAGE_SIZE
labelOffset += 1
}
let xs = tf.tensor4d(images, imagesShape)
let ys = tf.oneHot(tf.tensor1d(labels, 'int32'), NUM_CLASSES)
if (numExamples != null) {
xs = xs.slice([0, 0, 0, 0], [numExamples, IMAGE_H, IMAGE_W, 1])
ys = ys.slice([0, 0], [numExamples, NUM_CLASSES])
}
return { xs, ys }
}
nextTrainBatch = (batchSize: number): tf.TensorContainerObject => {
return this.nextBatch(batchSize, [this.trainImages, this.trainLabels],
() => {
this.shuffledTrainIndex = (this.shuffledTrainIndex + 1) % this.trainIndices.length
return this.trainIndices[this.shuffledTrainIndex]
})
}
nextTestBatch = (batchSize: number): tf.TensorContainerObject => {
return this.nextBatch(batchSize, [this.testImages, this.testLabels],
() => {
this.shuffledTestIndex = (this.shuffledTestIndex + 1) % this.testIndices.length
return this.testIndices[this.shuffledTestIndex]
})
}
nextBatch = (batchSize: number, data: [Float32Array[], Uint8Array[]], index: Function): tf.TensorContainerObject => {
const batchImagesArray = new Float32Array(batchSize * IMAGE_SIZE)
const batchLabelsArray = new Uint8Array(batchSize * NUM_CLASSES)
for (let i = 0; i < batchSize; i++) {
const idx = index() as number
const image = data[0].slice(idx, idx + 1)[0]
batchImagesArray.set(image, i * IMAGE_SIZE)
const label = data[1].slice(idx, idx + 1)[0]
const ys = Array.from(tf.oneHot([label], NUM_CLASSES).dataSync())
batchLabelsArray.set(ys, i * NUM_CLASSES)
}
const xs = tf.tensor4d(batchImagesArray, [batchSize, IMAGE_H, IMAGE_W, 1])
const ys = tf.tensor2d(batchLabelsArray, [batchSize, NUM_CLASSES])
return { xs, ys }
}
}
<file_sep>/public/docs/ai/sentiment-imdb.md
# 语义分析 Sentiment RNN
Seq2Seq模型是输出的长度不确定时采用的模型,这种情况一般是在机器翻译的任务中出现,将一句中文翻译成英文,那么这句英文的长度有可能会比中文短,也有可能会比中文长,所以输出的长度就不确定了。如下图所,输入的中文长度为4,输出的英文长度为2。
在网络结构中,输入一个中文序列,然后输出它对应的中文翻译,输出的部分的结果预测后面,根据上面的例子,也就是先输出“machine”,将"machine"作为下一次的输入,接着输出"learning",这样就能输出任意长的序列。
机器翻译、人机对话、聊天机器人等等,这些都是应用在当今社会都或多或少的运用到了我们这里所说的Seq2Seq。
举个简单的例子,当我们使用机器翻译时:输入(Hello) --->输出(你好)。再比如在人机对话中,我们问机器:“你是谁?”,机器会返回答案“我是某某某”。如下图所示为一个简单的邮件对话的场景,发送方问:“你明天是否有空”;接收方回答:“有空,怎么了?”。
## Seq2Seq结构
seq2seq属于encoder-decoder结构的一种,这里看看常见的encoder-decoder结构,基本思想就是利用两个RNN,一个RNN作为encoder,另一个RNN作为decoder。
encoder负责将输入序列压缩成指定长度的向量,这个向量就可以看成是这个序列的语义,这个过程称为编码,获取语义向量最简单的方式就是直接将最后一个输入的隐状态作为语义向量C。也可以对最后一个隐含状态做一个变换得到语义向量,还可以将输入序列的所有隐含状态做一个变换得到语义变量。
decoder则负责根据语义向量生成指定的序列,这个过程也称为解码,如下图,最简单的方式是将encoder得到的语义变量作为初始状态输入到decoder的RNN中,得到输出序列。可以看到上一时刻的输出会作为当前时刻的输入,而且其中语义向量C只作为初始状态参与运算,后面的运算都与语义向量C无关。
decoder处理方式还有另外一种,就是语义向量C参与了序列所有时刻的运算,如下图,上一时刻的输出仍然作为当前时刻的输入,但语义向量C会参与所有时刻的运算。
## 如何训练Seq2Seq模型
RNN是可以学习概率分布,然后进行预测,比如我们输入t时刻的数据后,预测t+1时刻的数据,比较常见的是字符预测例子或者时间序列预测。为了得到概率分布,一般会在RNN的输出层使用softmax激活函数,就可以得到每个分类的概率。
Softmax 在机器学习和深度学习中有着非常广泛的应用。尤其在处理多分类(C > 2)问题,分类器最后的输出单元需要Softmax 函数进行数值处理。
<file_sep>/docker_run.sh
docker run -it --rm --name my_rtcamp -p 8000:3000 \
-v $(pwd)/public/model:/opt/app/public/model \
-v $(pwd)/public/data:/opt/app/public/data \
iasc/react-tfjs-capm
<file_sep>/public/docs/dev/start-from-scratch.md
# 从零开始 Start from Scratch
以下操作均在 MAC OS 上验证。
参考文档尽量都看官方最新的。其他地方的文档和教程,往往会随着时间的流逝而陈旧过时,本文亦如是。
## 环境安装
### 安装 Node 环境
使用 nvm 便于管理 node.js 的版本更替。参考链接 [https://github.com/nvm-sh/nvm](https://github.com/nvm-sh/nvm)。
写此文档时 Node.js 最新的版本是 13.12。参考链接 [https://nodejs.org/](https://nodejs.org/)
在命令行中输入以下命令。其中 `$` 为命令行提示符,请不要输入。
$ curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash
$ nvm install 13
$ nvm list
-> v13.12.0
default -> v13.12.0
node -> stable (-> v13.12.0) (default)
stable -> 13.12 (-> v13.12.0) (default)
iojs -> N/A (default)
$ node --version
v13.12.0
### 安装 yarn 工具
yarn 是一个很方便的包管理工具,可以替代 npm。如果您需要快速了解 npm ,可以参考链接 [安装Node.js和npm](https://www.liaoxuefeng.com/wiki/1022910821149312/1023025597810528)
yarn 会缓存每个下载过的包,所以再次使用时无需重复下载。同时利用并行下载以最大化资源利用率,因此安装速度更快。参考链接 [https://classic.yarnpkg.com/en/docs/](https://classic.yarnpkg.com/en/docs/)
$ curl -o- -L https://yarnpkg.com/install.sh | bash
$ yarn --version
1.22.4
常用 yarn 命令有:
* `yarn` 安装 package.json 中的包,相当于 `npm install`。
* `yarn add [package]` 安装指定的 npm 包, 相当于 `npm install [package]`。
* `yarn start` 运行 package.json 中的命令脚本, 相当于 `npm run start`。
> 在国内,直接从源头安装 npm 包有时会比较慢,可以使用淘宝的 npm 镜像加速。使用下面的命令设置和查看
$ npm config set registry https://registry.npm.taobao.org
$ npm config list
; cli configs
metrics-registry = "https://registry.npm.taobao.org/"
scope = ""
user-agent = "npm/6.14.4 node/v13.12.0 darwin x64"
; userconfig /Users/chenhao/.npmrc
registry = "https://registry.npm.taobao.org/"
...
## React 和 React Hooks
使用组件构建 Web APP,是当前前端开发的最佳实践之一。从本质上说,就是将你的应用分拆成一个个功能明确的模块,每个模块之间可以通过合适的方式互相组合和联系,形成复杂的前端 Web 应用。
比较流行的组件框架有 Facebook 开源的 React,还有一个是国人尤雨溪开源的 Vue。想了解这两个框架的基本差异的同学,可以阅读一下知乎上的 [React VS Vue:谁会成为2020年的冠军](https://zhuanlan.zhihu.com/p/89416436)
自从 React 诞生后,其创建组件的方式从 ES5 时期声明式的 createClass ,到支持原生 ES6 class 的 OOP 语法,再到发展出 HOC 或 render props 的函数式写法,官方和社区一直在探索更方便合理的 React 组件化之路。随之而来的一些问题是:
* 组件往往变得嵌套过多
* 各种写法的组件随着逻辑的增长,变得难以理解
* 尤其是基于类写法的组件中,this 关键字暧昧模糊,人和机器读起来都比较懵,难以在不同的组件直接复用基于 state 的逻辑
* 人们不满足于只用函数式组件做简单的展示组件,也想把 state 和生命周期等引入其中
Hooks 是 React 16.8 之后推出的新特性,React 团队希望,组件不要变成复杂的容器,最好只是数据流的管道,开发者根据需要,组合管道即可。
这种函数化(Function Program)的编程形式,能够大大降低 React 的学习曲线。
属实讲,挺香的。
关于 React Hooks,已经有了不少中文文章。例如:阮一峰的入门介绍就写得挺好。在后续的内容中,对于一些初级使用,我不会做太多展开,重点会记录在 React-Tfjs-Camp 的实现过程中,遇到的一些典型问题,以及是如何使用合适的方式进行解决的。
参考链接:
* React Hooks 官方链接 [https://reactjs.org/docs/hooks-intro.html](https://reactjs.org/docs/hooks-intro.html)
* 阮一峰的入门介绍 [React Hooks 入门教程](https://www.ruanyifeng.com/blog/2019/09/react-hooks.html)。
* 深入剖析可以读一下 [React Hooks 深入不浅出](https://segmentfault.com/a/1190000017182184)
### 创建 React 应用
下面的内容是使用 React-Scripts 创建一个全新的 React 项目。这些内容记录了如何从零开始,一步一步创建 React-Tfjs-Camp 的主要过程。
参考链接:React-Scripts [https://create-react-app.dev/](https://create-react-app.dev/)
#### 创建一个新的 React 项目
`yarn create react-app` 用于创建 React App,等于 npm 原生命令的 `npx create-react-app`。
`--template typescript` 的参数,表明使用 typescript 作为编程语言。
$ yarn create react-app react-tfjs-new --template typescript
$ cd react-tfjs-new
执行 `yarn start` 之后,就能够通过 [http://localhost:3000](http://localhost:3000) 访问这个新的项目了。
$ yarn start
Compiled successfully!
You can now view react-tfjs-new in the browser.
Local: http://localhost:3000
你还可以尝试一下其它的命令。
$ yarn test
$ yarn build
#### React 项目目录简述
使用 `ls -la` 能够看到项目中生成一些文件。
.
|____.git 使用 Git 的配置信息目录
|____.gitignore 哪些文件不需要被 Git 进行版本管理
|____README.md
|____node_modules 下载和安装的 npm 包。运行 `yarn` 时检查package.json 的信息来安装
|____yarn.lock 使用 `yarn` 安装生成的 npm 包依赖文件。当项目中发生依赖包冲突的时候,可以通过修改和调整它来解决
|____package.json Node.js 项目中最重要的文件,关于依赖包、运行脚本,统统放在这里
|____tsconfig.json 使用 TypeScript 所需的配置文件,用于设置 TypeScript 支持的语法特性
|____public 存放 Web APP 中的静态页面和资源文件
|____src Web APP 源代码
<file_sep>/node/src/sentiment/train.ts
/**
* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs-node'
import { ArgumentParser } from 'argparse'
import * as fs from 'fs'
import * as path from 'path'
import { loadData, loadMetadataTemplate } from './dataSentiment'
import { writeEmbeddingMatrixAndLabels } from './embedding'
// import * as shelljs from 'shelljs'
/**
* Create a model for IMDB sentiment analysis.
*
* @param {string} modelType Type of the model to be created.
* @param {number} maxLen Input vocabulary size.
* @param {number} vocabularySize Input vocabulary size.
* @param {number} embeddingSize Embedding vector size, used to
* configure the embedding layer.
* @returns An uncompiled instance of `tf.Model`.
*/
export const buildModel = (modelType: string, maxLen: number, vocabularySize: number, embeddingSize: number): tf.LayersModel => {
const model = tf.sequential()
if (modelType === 'multihot') {
// A 'multihot' model takes a multi-hot encoding of all words in the
// sentence and uses dense layers with relu and sigmoid activation functions
// to classify the sentence.
model.add(tf.layers.dense({
units: 16,
activation: 'relu',
inputShape: [vocabularySize]
}))
model.add(tf.layers.dense({
units: 16,
activation: 'relu'
}))
} else {
// All other model types use word embedding.
model.add(tf.layers.embedding({
inputDim: vocabularySize,
outputDim: embeddingSize,
inputLength: maxLen
}))
if (modelType === 'flatten') {
model.add(tf.layers.flatten())
} else if (modelType === 'cnn') {
model.add(tf.layers.dropout({ rate: 0.5 }))
model.add(tf.layers.conv1d({
filters: 250,
kernelSize: 5,
strides: 1,
padding: 'valid',
activation: 'relu'
}))
model.add(tf.layers.globalMaxPool1d({}))
model.add(tf.layers.dense({ units: 250, activation: 'relu' }))
} else if (modelType === 'simpleRNN') {
model.add(tf.layers.simpleRNN({ units: 32 }))
} else if (modelType === 'lstm') {
model.add(tf.layers.lstm({ units: 32 }))
} else if (modelType === 'bidirectionalLSTM') {
model.add(tf.layers.bidirectional({
layer: tf.layers.lstm({ units: 32 }) as tf.layers.RNN,
mergeMode: 'concat'
}))
} else {
throw new Error(`Unsupported model type: ${modelType}`)
}
}
model.add(tf.layers.dense({ units: 1, activation: 'sigmoid' }))
return model
}
const parseArguments = (): any => {
const parser = new ArgumentParser(
{ description: 'Train a model for IMDB sentiment analysis' })
parser.addArgument('modelType', {
type: 'string',
optionStrings: [
'multihot', 'flatten', 'cnn', 'simpleRNN', 'lstm', 'bidirectionalLSTM'],
help: 'Model type'
})
parser.addArgument('--numWords', {
type: 'int',
defaultValue: 10000,
help: 'Number of words in the vocabulary'
})
parser.addArgument('--maxLen', {
type: 'int',
defaultValue: 100,
help: 'Maximum sentence length in number of words. ' +
'Shorter sentences will be padded; longers ones will be truncated.'
})
parser.addArgument('--embeddingSize', {
type: 'int',
defaultValue: 128,
help: 'Number of word embedding dimensions'
})
parser.addArgument(
'--gpu', { action: 'storeTrue', help: 'Use GPU for training' })
parser.addArgument('--optimizer', {
type: 'string',
defaultValue: 'adam',
help: 'Optimizer to be used for model training'
})
parser.addArgument(
'--epochs',
{ type: 'int', defaultValue: 10, help: 'Number of training epochs' })
parser.addArgument(
'--batchSize',
{ type: 'int', defaultValue: 128, help: 'Batch size for training' })
parser.addArgument('--validationSplit', {
type: 'float',
defaultValue: 0.2,
help: 'Validation split for training'
})
parser.addArgument('--modelSaveDir', {
type: 'string',
defaultValue: 'dist/resources',
help: 'Optional path for model saving.'
})
parser.addArgument('--embeddingFilesPrefix', {
type: 'string',
defaultValue: '',
help: 'Optional path prefix for saving embedding files that ' +
'can be loaded in the Embedding Projector ' +
'(https://projector.tensorflow.org/). For example, if this flag ' +
'is configured to the value /tmp/embed, then the embedding vectors ' +
'file will be written to /tmp/embed_vectors.tsv and the labels ' +
'file will be written to /tmp/embed_label.tsv'
})
parser.addArgument('--logDir', {
type: 'string',
help: 'Optional tensorboard log directory, to which the loss and ' +
'accuracy will be logged during model training.'
})
parser.addArgument('--logUpdateFreq', {
type: 'string',
defaultValue: 'batch',
optionStrings: ['batch', 'epoch'],
help: 'Frequency at which the loss and accuracy will be logged to ' +
'tensorboard.'
})
return parser.parseArgs()
}
const main = async (): Promise<void> => {
const args = parseArguments()
let tfn
if (args.gpu) {
console.log('Using GPU for training')
tfn = require('@tensorflow/tfjs-node-gpu')
} else {
console.log('Using CPU for training')
tfn = require('@tensorflow/tfjs-node')
}
console.log('Loading data...')
const multihot = args.modelType === 'multihot'
const { xTrain, yTrain, xTest, yTest } = await loadData(args.numWords, args.maxLen, multihot)
console.log('Building model...')
const model = buildModel(
args.modelType, args.maxLen, args.numWords, args.embeddingSize)
model.compile({
loss: 'binaryCrossentropy',
optimizer: args.optimizer,
metrics: ['acc']
})
model.summary()
console.log('Training model...')
await model.fit(xTrain as tf.Tensor, yTrain as tf.Tensor, {
epochs: args.epochs,
batchSize: args.batchSize,
validationSplit: args.validationSplit,
callbacks: args.logDir == null ? null : tfn.node.tensorBoard(args.logDir, {
updateFreq: args.logUpdateFreq
})
})
console.log('Evaluating model...')
const [testLoss, testAcc] = model.evaluate(xTest as tf.Tensor, yTest as tf.Tensor, { batchSize: args.batchSize }) as tf.Scalar[]
console.log(`Evaluation loss: ${(await testLoss.data())[0].toFixed(4)}`)
console.log(`Evaluation accuracy: ${(await testAcc.data())[0].toFixed(4)}`)
// Save model.
let metadata: any
if (args.modelSaveDir != null && args.modelSaveDir.length > 0) {
if (multihot) {
console.warn(
'Skipping saving of multihot model, which is not supported.')
} else {
// Create base directory first.
// shelljs.mkdir('-p', args.modelSaveDir)
// Load metadata template.
console.log('Loading metadata template...')
metadata = await loadMetadataTemplate()
// Save metadata.
metadata.epochs = args.epochs
metadata.embedding_size = args.embeddingSize
metadata.max_len = args.maxLen
metadata.model_type = args.modelType
metadata.batch_size = args.batchSize
metadata.vocabulary_size = args.numWords
const metadataPath = path.join(args.modelSaveDir, 'metadata.json')
fs.writeFileSync(metadataPath, JSON.stringify(metadata))
console.log(`Saved metadata to ${metadataPath}`)
// Save model artifacts.
await model.save(`file://${args.modelSaveDir}`)
console.log(`Saved model to ${args.modelSaveDir}`)
}
}
if (args.embeddingFilesPrefix != null &&
args.embeddingFilesPrefix.length > 0) {
if (metadata == null) {
metadata = await loadMetadataTemplate()
}
await writeEmbeddingMatrixAndLabels(
model, args.embeddingFilesPrefix, metadata.word_index, metadata.index_from)
}
}
// eslint-disable-next-line @typescript-eslint/no-floating-promises
main().then()
<file_sep>/src/components/mnist/modelCoreApi.ts
/**
* @license
* Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs'
import { IMAGE_H, IMAGE_W, IMnistDataset, NUM_CLASSES } from './mnistConsts'
// ///////////////////////////////////
// // Same With follow Layers API Implement
//
// const model = tf.sequential()
//
// model.add(tf.layers.conv2d({
// inputShape: [IMAGE_H, IMAGE_W, 1], kernelSize: 5, filters: 8, activation: 'relu', padding: 'same'
// }))
// model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 }))
// model.add(tf.layers.conv2d({ kernelSize: 5, filters: 16, activation: 'relu', padding: 'same'}))
// model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 }))
// model.add(tf.layers.flatten({}))
//
// model.add(tf.layers.dense({ units: 10, activation: 'softmax' }))
// ///////////////////////////////////
// Variables that we want to optimize
const conv1OutputDepth = 8
const conv1Weights = tf.variable(tf.randomNormal([5, 5, 1, conv1OutputDepth], 0, 0.1))
const conv2InputDepth = conv1OutputDepth
const conv2OutputDepth = 16
const conv2Weights = tf.variable(tf.randomNormal([5, 5, conv2InputDepth, conv2OutputDepth], 0, 0.1))
const fullyConnectedWeights = tf.variable(
tf.randomNormal([7 * 7 * conv2OutputDepth, NUM_CLASSES], 0,
1 / Math.sqrt(7 * 7 * conv2OutputDepth)))
const fullyConnectedBias = tf.variable(tf.zeros([NUM_CLASSES]))
// Loss function
const loss = (labels: tf.Tensor, ys: tf.Tensor): tf.Scalar => {
return tf.losses.softmaxCrossEntropy(labels, ys).mean()
}
// Our actual model
export const model = (inputXs: tf.Tensor): tf.Tensor => {
const xs = inputXs.as4D(-1, IMAGE_H, IMAGE_W, 1)
const strides = 2
const pad = 0
// Conv 1
const layer1 = tf.tidy(() => {
return xs.conv2d(conv1Weights as tf.Tensor4D, 1, 'same')
.relu()
.maxPool([2, 2], strides, pad)
})
// Conv 2
const layer2 = tf.tidy(() => {
return layer1.conv2d(conv2Weights as tf.Tensor4D, 1, 'same')
.relu()
.maxPool([2, 2], strides, pad)
})
// Final layer
return layer2.as2D(-1, fullyConnectedWeights.shape[0])
.matMul(fullyConnectedWeights as tf.Tensor)
.add(fullyConnectedBias)
}
// Train the model.
export const train = async (data: IMnistDataset, log: Function,
steps: number, batchSize: number, learningRate: number): Promise<void> => {
const returnCost = true
const optimizer = tf.train.adam(learningRate)
for (let i = 0; i < steps; i++) {
const cost = optimizer.minimize(() => {
const batch = data.nextTrainBatch(batchSize)
const _labels = batch.ys as tf.Tensor
const _xs = batch.xs as tf.Tensor
return loss(_labels, model(_xs))
}, returnCost)
log(i, cost?.dataSync())
await tf.nextFrame()
}
}
// Predict the digit number from a batch of input images.
export const predict = (x: tf.Tensor): tf.Tensor => {
const pred = tf.tidy(() => {
return model(x)
})
return pred
}
// Given a logits or label vector, return the class indices.
export const classesFromLabel = (y: tf.Tensor): number[] => {
const axis = 1
const pred = y.argMax(axis)
return Array.from(pred.dataSync())
}
<file_sep>/src/components/rnn/modelJena.ts
/**
* @Author: <EMAIL>
* @Comments:
* Adapted from some codes in Google tfjs-examples or tfjs-models.
* Refactoring to typescript for RTL(React Tensorflow.js Lab)'s needs
*/
/**
* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs'
export const buildLinearRegressionModel = (inputShape: tf.Shape): tf.LayersModel => {
const model = tf.sequential()
model.add(tf.layers.flatten({ inputShape }))
model.add(tf.layers.dense({ units: 1 }))
return model
}
export const buildMLPModel = (inputShape: tf.Shape,
options: {
kernelRegularizer?: any
dropoutRate?: number
} = {}): tf.LayersModel => {
const model = tf.sequential()
model.add(tf.layers.flatten({ inputShape }))
const { kernelRegularizer, dropoutRate } = options
if (kernelRegularizer) {
model.add(tf.layers.dense({ units: 32, kernelRegularizer, activation: 'relu' }))
} else {
model.add(tf.layers.dense({ units: 32, activation: 'relu' }))
}
if (dropoutRate && dropoutRate > 0) {
model.add(tf.layers.dropout({ rate: dropoutRate }))
}
model.add(tf.layers.dense({ units: 1 }))
return model
}
export const buildSimpleRNNModel = (inputShape: tf.Shape): tf.LayersModel => {
const model = tf.sequential()
const rnnUnits = 32
model.add(tf.layers.simpleRNN({ units: rnnUnits, inputShape }))
model.add(tf.layers.dense({ units: 1 }))
return model
}
export const buildGRUModel = (inputShape: tf.Shape, dropout?: number, recurrentDropout?: number): tf.LayersModel => {
// TODO(cais): Recurrent dropout is currently not fully working.
// Make it work and add a flag to train-rnn.js.
const model = tf.sequential()
const rnnUnits = 32
model.add(tf.layers.gru({
units: rnnUnits,
inputShape,
dropout: dropout ?? 0,
recurrentDropout: recurrentDropout ?? 0
}))
model.add(tf.layers.dense({ units: 1 }))
return model
}
<file_sep>/public/docs/dev/static-deploy.md
# RTCamp 使用指南
## Web服务器 + 预编译静态文件
### Apache
MAC 自带 Apache
$ httpd -v
Server version: Apache/2.4.41 (Unix)
Server built: Feb 29 2020 02:40:57
启动、停止和重启。因为需要占用 80 端口,会要求你输入本机的管理员密码。
sudo apachectl start
sudo apachectl stop
sudo apachectl restart
启动后能够直接通过 [http://localhost](http://localhost) 访问,如果一切正常,能够看到 “It Works” 字样。
### 修改 Web Root
这一步也就是把 RTCamp 放到 Web Root 目录下。
Apache 的配置文件放置在 `/etc/apache2/httpd.conf`
系统自带的 Web Root 目录是:`/Library/WebServer/Documents`, 将 RTCamp 的 build.zip 包展开到这个目录下即可。
<file_sep>/public/docs/blog/20200316.md
## 20200316 内容组织
在 AI 目录下"抄"了不少内容,发觉想要完全讲清楚,是个工作量不小的事儿。
想想也没必要,最后还是决定参考 Fablab 文档的方式,把网上很多写得不错的内容,直接给链接就好。
AI下的内容,只保留与对应代码例子最相关的部分。
开发教程和AI内容也考虑分开了。
开发教程只讲解操作、代码、API等和开发相关的内容。省得读者一会儿东,一会儿西,抓不住重点。
<file_sep>/public/preload/data/download_jena_data.sh
#!/bin/bash
# wget https://storage.googleapis.com/learnjs-data/speech-commands/speech-commands-data-v0.02-browser.tar.gz
# tar -czvf speech-commands-data-v0.02-browser.tar.gz
wget https://storage.googleapis.com/learnjs-data/jena_climate/jena_climate_2009_2016.csv
<file_sep>/src/components/pretrained/pretrainedUtils.ts
import { logger } from '../../utils'
export const drawPoint = (ctx: CanvasRenderingContext2D, y: number, x: number, r: number): void => {
ctx.beginPath()
ctx.arc(x, y, r, 0, 2 * Math.PI)
ctx.fill()
}
export const drawPath = (ctx: CanvasRenderingContext2D, points: number[][], closePath: boolean): void => {
const region = new Path2D()
region.moveTo(points[0][0], points[0][1])
for (let i = 1; i < points.length; i++) {
const point = points[i]
region.lineTo(point[0], point[1])
}
if (closePath) {
region.closePath()
}
ctx.stroke(region)
}
export const drawSegment = (ctx: CanvasRenderingContext2D, [ay, ax]: number[],
[by, bx]: number[], scale: number): void => {
ctx.beginPath()
ctx.moveTo(ax * scale, ay * scale)
ctx.lineTo(bx * scale, by * scale)
ctx.lineWidth = 2
// ctx.strokeStyle = color;
ctx.stroke()
}
export const downloadJson = (content: any, fileName: string, downloadRef: HTMLAnchorElement): void => {
const a = downloadRef
if (a) {
const blob = new Blob([JSON.stringify(content, null, 2)],
{ type: 'application/json' })
const blobUrl = window.URL.createObjectURL(blob)
logger(blobUrl)
a.href = blobUrl
a.download = fileName
a.click()
window.URL.revokeObjectURL(blobUrl)
}
}
<file_sep>/src/components/common/visulization/cam.ts
/**
* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
/**
* This script contains a function that performs the following operations:
*
* Get visual interpretation of which parts of the image more most
* responsible for a convnet's classification decision, using the
* gradient-based class activation map (CAM) method.
* See function `gradClassActivationMap()`.
*/
import * as tf from '@tensorflow/tfjs'
import * as utils from './camUtils'
import { logger } from '../../../utils'
/**
* Calculate class activation map (CAM) and overlay it on input image.
*
* This function automatically finds the last convolutional layer, get its
* output (activation) under the input image, weights its filters by the
* gradient of the class output with respect to them, and then collapses along
* the filter dimension.
*
* @param {tf.Sequential} model A TensorFlow.js sequential model, assumed to
* contain at least one convolutional layer.
* @param {number} classIndex Index to class in the model's final classification
* output.
* @param {tf.Tensor4d} x Input image, assumed to have shape
* `[1, height, width, 3]`.
* @param {number} overlayFactor Optional overlay factor.
* @returns The input image with a heat-map representation of the class
* activation map overlaid on top of it, as float32-type `tf.Tensor4d` of
* shape `[1, height, width, 3]`.
*/
export const gradClassActivationMap = (model: tf.LayersModel, classIndex: number, x: tf.Tensor4D,
overlayFactor = 2.0): tf.Tensor4D => {
// Try to locate the last conv layer of the model.
let layerIndex = model.layers.length - 1
while (layerIndex >= 0) {
if (model.layers[layerIndex].getClassName().startsWith('Conv')) {
break
}
layerIndex--
}
tf.util.assert(
layerIndex >= 0, () => 'Failed to find a convolutional layer in model')
const lastConvLayer = model.layers[layerIndex]
logger(
'Located last convolutional layer of the model at ' +
`index ${layerIndex}: layer type = ${lastConvLayer.getClassName()}; ` +
`layer name = ${lastConvLayer.name}`)
// Get "sub-model 1", which goes from the original input to the output
// of the last convolutional layer.
const lastConvLayerOutput = lastConvLayer.output as tf.SymbolicTensor
const subModel1 =
tf.model({ inputs: model.inputs, outputs: lastConvLayerOutput })
// Get "sub-model 2", which goes from the output of the last convolutional
// layer to the original output.
const newInput = tf.input({ shape: lastConvLayerOutput.shape.slice(1) })
layerIndex++
let y: tf.SymbolicTensor = newInput
while (layerIndex < model.layers.length) {
y = model.layers[layerIndex++].apply(y) as tf.SymbolicTensor
}
const subModel2 = tf.model({ inputs: newInput, outputs: y })
return tf.tidy(() => {
// This function runs sub-model 2 and extracts the slice of the probability
// output that corresponds to the desired class.
const convOutput2ClassOutput = (input: tf.Tensor): tf.Tensor =>
(subModel2.apply(input, { training: true }) as tf.Tensor).gather([classIndex], 1)
// This is the gradient function of the output corresponding to the desired
// class with respect to its input (i.e., the output of the last
// convolutional layer of the original model).
const gradFunction = tf.grad(convOutput2ClassOutput)
// Calculate the values of the last conv layer's output.
const lastConvLayerOutputValues = subModel1.apply(x) as tf.Tensor
// Calculate the values of gradients of the class output w.r.t. the output
// of the last convolutional layer.
const gradValues = gradFunction(lastConvLayerOutputValues)
// Pool the gradient values within each filter of the last convolutional
// layer, resulting in a tensor of shape [numFilters].
const pooledGradValues = tf.mean(gradValues, [0, 1, 2])
// Scale the convlutional layer's output by the pooled gradients, using
// broadcasting.
const scaledConvOutputValues =
lastConvLayerOutputValues.mul(pooledGradValues)
// Create heat map by averaging and collapsing over all filters.
let heatMap: tf.Tensor4D = scaledConvOutputValues.mean(-1)
// Discard negative values from the heat map and normalize it to the [0, 1]
// interval.
heatMap = heatMap.relu()
heatMap = heatMap.div(heatMap.max()).expandDims(-1)
// Up-sample the heat map to the size of the input image.
heatMap = tf.image.resizeBilinear(heatMap, [x.shape[1], x.shape[2]])
// Apply an RGB colormap on the heatMap. This step is necessary because
// the heatMap is a 1-channel (grayscale) image. It needs to be converted
// into a color (RGB) one through this function call.
heatMap = utils.applyColorMap(heatMap) as tf.Tensor4D
// To form the final output, overlay the color heat map on the input image.
heatMap = heatMap.mul(overlayFactor).add(x.div(255))
return heatMap.div(heatMap.max()).mul(255)
})
}
<file_sep>/node/src/simpleObjDetector/train.ts
/**
* @license
* Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs-node'
import * as fs from 'fs'
import * as path from 'path'
import * as canvas from 'canvas'
import { ArgumentParser } from 'argparse'
import ObjectDetectionImageSynthesizer from './dataObjDetector'
const CANVAS_SIZE = 224 // Matches the input size of MobileNet.
// Name prefixes of layers that will be unfrozen during fine-tuning.
const topLayerGroupNames = ['conv_pw_9', 'conv_pw_10', 'conv_pw_11']
// Name of the layer that will become the top layer of the truncated base.
const topLayerName = `${topLayerGroupNames[topLayerGroupNames.length - 1]}_relu`
// Used to scale the first column (0-1 shape indicator) of `yTrue`
// in order to ensure balanced contributions to the final loss value
// from shape and bounding-box predictions.
const LABEL_MULTIPLIER = [CANVAS_SIZE, 1, 1, 1, 1]
export const MOBILENET_MODEL_PATH = './public/preload/model/mobilenet_v1_0.25_224/model.json'
interface IModelWithFineTuning {
model: tf.LayersModel
fineTuningLayers: tf.layers.Layer[]
}
/**
* Custom loss function for object detection.
*
* The loss function is a sum of two losses
* - shape-class loss, computed as binaryCrossentropy and scaled by
* `classLossMultiplier` to match the scale of the bounding-box loss
* approximatey.
* - bounding-box loss, computed as the meanSquaredError between the
* true and predicted bounding boxes.
* @param {tf.Tensor} yTrue True labels. Shape: [batchSize, 5].
* The first column is a 0-1 indicator for whether the shape is a triangle
* (0) or a rectangle (1). The remaining for columns are the bounding boxes
* for the target shape: [left, right, top, bottom], in unit of pixels.
* The bounding box values are in the range [0, CANVAS_SIZE).
* @param {tf.Tensor} yPred Predicted labels. Shape: the same as `yTrue`.
* @return {tf.Tensor} Loss scalar.
*/
export const customLossFunction = (yTrue: tf.Tensor, yPred: tf.Tensor): tf.Tensor => {
return tf.tidy(() => {
// Scale the the first column (0-1 shape indicator) of `yTrue` in order
// to ensure balanced contributions to the final loss value
// from shape and bounding-box predictions.
return tf.metrics.meanSquaredError(yTrue.mul(LABEL_MULTIPLIER), yPred)
})
}
/**
* Loads MobileNet, removes the top part, and freeze all the layers.
*
* The top removal and layer freezing are preparation for transfer learning.
*
* Also gets handles to the layers that will be unfrozen during the fine-tuning
* phase of the training.
*
* @return {tf.Model} The truncated MobileNet, with all layers frozen.
*/
const loadTruncatedBase = async (): Promise<IModelWithFineTuning> => {
const mobilenet = await tf.loadLayersModel(MOBILENET_MODEL_PATH)
// Return a model that outputs an internal activation.
const fineTuningLayers: tf.layers.Layer[] = []
const layer = mobilenet.getLayer(topLayerName)
const truncatedBase = tf.model({ inputs: mobilenet.inputs, outputs: layer.output })
// Freeze the model's layers.
for (const layer of truncatedBase.layers) {
layer.trainable = false
for (const groupName of topLayerGroupNames) {
if (layer.name.indexOf(groupName) === 0) {
fineTuningLayers.push(layer)
break
}
}
}
// tf.util.assert(
// fineTuningLayers.length > 1,
// ()`Did not find any layers that match the prefixes ${topLayerGroupNames}`)
return { model: truncatedBase, fineTuningLayers }
}
/**
* Build a new head (i.e., output sub-model) that will be connected to
* the top of the truncated base for object detection.
*
* @param {tf.Shape} inputShape Input shape of the new model.
* @returns {tf.Model} The new head model.
*/
const buildNewHead = (inputShape: tf.Shape): tf.LayersModel => {
const newHead = tf.sequential()
newHead.add(tf.layers.flatten({ inputShape }))
newHead.add(tf.layers.dense({ units: 200, activation: 'relu' }))
// Five output units:
// - The first is a shape indictor: predicts whether the target
// shape is a triangle or a rectangle.
// - The remaining four units are for bounding-box prediction:
// [left, right, top, bottom] in the unit of pixels.
newHead.add(tf.layers.dense({ units: 5 }))
return newHead
}
/**
* Builds object-detection model from MobileNet.
*
* @returns {[tf.Model, tf.layers.Layer[]]}
* 1. The newly-built model for simple object detection.
* 2. The layers that can be unfrozen during fine-tuning.
*/
export const buildObjectDetectionModel = async (): Promise<IModelWithFineTuning> => {
const { model: truncatedBase, fineTuningLayers } = await loadTruncatedBase()
// Build the new head model.
const newHead = buildNewHead(truncatedBase.outputs[0].shape.slice(1))
const newOutput = newHead.apply(truncatedBase.outputs[0])
const model = tf.model({ inputs: truncatedBase.inputs, outputs: newOutput as tf.SymbolicTensor })
return { model, fineTuningLayers }
}
const main = async (): Promise<void> => {
// Data-related settings.
const numCircles = 10
const numLines = 10
const parser = new ArgumentParser()
parser.addArgument('--gpu', {
action: 'storeTrue',
help: 'Use tfjs-node-gpu for training (required CUDA and CuDNN)'
})
parser.addArgument(
'--numExamples',
{ type: 'int', defaultValue: 2000, help: 'Number of training exapmles' })
parser.addArgument('--validationSplit', {
type: 'float',
defaultValue: 0.15,
help: 'Validation split to be used during training'
})
parser.addArgument('--batchSize', {
type: 'int',
defaultValue: 128,
help: 'Batch size to be used during training'
})
parser.addArgument('--initialTransferEpochs', {
type: 'int',
defaultValue: 100,
help: 'Number of training epochs in the initial transfer ' +
'learning (i.e., 1st) phase'
})
parser.addArgument('--fineTuningEpochs', {
type: 'int',
defaultValue: 100,
help: 'Number of training epochs in the fine-tuning (i.e., 2nd) phase'
})
parser.addArgument('--logDir', {
type: 'string',
help: 'Optional tensorboard log directory, to which the loss ' +
'values will be logged during model training.'
})
parser.addArgument('--logUpdateFreq', {
type: 'string',
defaultValue: 'batch',
optionStrings: ['batch', 'epoch'],
help: 'Frequency at which the loss will be logged to tensorboard.'
})
const args = parser.parseArgs()
let tfn
if (args.gpu) {
console.log('Training using GPU.')
tfn = require('@tensorflow/tfjs-node-gpu')
} else {
console.log('Training using CPU.')
tfn = require('@tensorflow/tfjs-node')
}
const modelSaveURL = 'file://./dist/object_detection_model'
const tBegin = tf.util.now()
console.log(`Generating ${args.numExamples} training examples...`)
const synthDataCanvas = canvas.createCanvas(CANVAS_SIZE, CANVAS_SIZE)
const synth = new ObjectDetectionImageSynthesizer(synthDataCanvas)
const { images, targets } = await synth.generateExampleBatch(args.numExamples, numCircles, numLines) as tf.TensorContainerObject
const { model, fineTuningLayers } = await buildObjectDetectionModel()
model.compile({ loss: customLossFunction, optimizer: tf.train.rmsprop(5e-3) })
model.summary()
// Initial phase of transfer learning.
console.log('Phase 1 of 2: initial transfer learning')
await model.fit(images as tf.Tensor, targets as tf.Tensor, {
epochs: args.initialTransferEpochs,
batchSize: args.batchSize,
validationSplit: args.validationSplit,
callbacks: args.logDir == null ? null : tfn.node.tensorBoard(args.logDir, {
updateFreq: args.logUpdateFreq
})
})
// Fine-tuning phase of transfer learning.
// Unfreeze layers for fine-tuning.
for (const layer of fineTuningLayers) {
layer.trainable = true
}
model.compile({ loss: customLossFunction, optimizer: tf.train.rmsprop(2e-3) })
model.summary()
// Do fine-tuning.
// The batch size is reduced to avoid CPU/GPU OOM. This has
// to do with the unfreezing of the fine-tuning layers above,
// which leads to higher memory consumption during backpropagation.
console.log('Phase 2 of 2: fine-tuning phase')
await model.fit(images as tf.Tensor, targets as tf.Tensor, {
epochs: args.fineTuningEpochs,
batchSize: args.batchSize / 2,
validationSplit: args.validationSplit,
callbacks: args.logDir == null ? null : tfn.node.tensorBoard(args.logDir, {
updateFreq: args.logUpdateFreq
})
})
// Save model.
// First make sure that the base directory dists.
const modelSavePath = modelSaveURL.replace('file://', '')
const dirName = path.dirname(modelSavePath)
if (!fs.existsSync(dirName)) {
fs.mkdirSync(dirName)
}
await model.save(modelSaveURL)
console.log(`Model training took ${(tf.util.now() - tBegin) / 1e3} s`)
console.log(`Trained model is saved to ${modelSaveURL}`)
console.log('\nNext, run the following command to test the model in the browser:')
console.log('\n yarn watch')
}
main().then()
<file_sep>/src/components/rnn/SentimentPredictor.ts
/**
* @license
* Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs'
import { OOV_INDEX, padSequences } from './sequenceUtils'
import { logger } from '../../utils'
import { DATA_BASE_URL } from './dataSentiment'
const BASE_URL = '/preload/model'
export const PRETRAINED_HOSTED_URLS = {
model: `${BASE_URL}/sentiment_cnn_v1/model.json`,
// 'https://storage.googleapis.com/tfjs-models/tfjs/sentiment_cnn_v1/model.json',
metadata: `${BASE_URL}/sentiment_cnn_v1/metadata.json`
// 'https://storage.googleapis.com/tfjs-models/tfjs/sentiment_cnn_v1/metadata.json'
}
const DEFAULT_METADATA_URLS = {
model: '',
metadata: `${DATA_BASE_URL}/metadata.json`
// 'https://storage.googleapis.com/learnjs-data/imdb/metadata.json.zip'
}
export class SentimentPredictor {
urls = DEFAULT_METADATA_URLS
model: tf.LayersModel | undefined
metadata: any = {}
constructor (urls = DEFAULT_METADATA_URLS) {
this.urls = urls
}
/**
* Initializes the Sentiment demo.
*/
init = async (): Promise<tf.LayersModel | void> => {
try {
if (this.urls.model || this.urls.model.length > 0) {
this.model = await tf.loadLayersModel(this.urls.model)
}
if (this.urls.metadata) {
await this.loadMetadata(this.urls.metadata)
}
return this.model
} catch (err) {
console.error(err)
// ui.status('Loading pretrained model failed.');
}
}
loadMetadata = async (metadataUrl: string): Promise<void> => {
try {
const metadataJson = await fetch(metadataUrl)
const sentimentMetadata = await metadataJson.json()
logger('sentimentMetadata.model_type', sentimentMetadata.model_type)
this.metadata = { ...sentimentMetadata }
} catch (err) {
console.error(err)
// ui.status('Loading metadata failed.')
}
}
setModel = (model: tf.LayersModel): void => {
this.model = model
}
updateMetadata = (options: any, force = false): void => {
if (!this.metadata) {
return
}
if (force) {
this.metadata = { ...this.metadata, ...options }
} else {
const keys = Object.keys(options)
keys.forEach(key => {
if (this.metadata[key] == null) {
this.metadata[key] = options[key]
}
})
}
}
predict = (text: string): any => {
if (!this.model) {
return
}
// Convert to lower case and remove all punctuations.
const inputText =
text.trim().toLowerCase().replace(/(\.|\,|\!)/g, '').split(' ')
// Convert the words to a sequence of word indices.
const sequence = inputText.map((word: any) => {
let wordIndex = this.metadata.word_index[word] + this.metadata.index_from
if (wordIndex > this.metadata.vocabulary_size) {
wordIndex = OOV_INDEX
}
return wordIndex
})
// Perform truncation and padding.
const paddedSequence = padSequences([sequence], this.metadata.max_len)
const input = tf.tensor2d(paddedSequence, [1, this.metadata.max_len])
const beginMs = performance.now()
const predictOut = this.model.predict(input) as tf.Tensor
const score = predictOut.dataSync()[0]
predictOut.dispose()
const endMs = performance.now()
return { score: score, elapsed: (endMs - beginMs) }
}
}
<file_sep>/node/src/jena/modelJena.ts
/**
* @Author: <EMAIL>
* @Comments:
* Adapted from some codes in Google tfjs-examples or tfjs-models.
* Refactoring to typescript for RTL(React Tensorflow.js Lab)'s needs
*/
/**
* @license
* Copyright 2019 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs-node'
import { JenaWeatherData } from './dataJena'
// Row ranges of the training and validation data subsets.
const TRAIN_MIN_ROW = 0
const TRAIN_MAX_ROW = 200000
const VAL_MIN_ROW = 200001
const VAL_MAX_ROW = 300000
/**
* Calculate the commonsense baseline temperture-prediction accuracy.
*
* The latest value in the temperature feature column is used as the
* prediction.
*
* @param {boolean} normalize Whether to used normalized data for training.
* @param {boolean} includeDateTime Whether to include date and time features
* in training.
* @param {number} lookBack Number of look-back time steps.
* @param {number} step Step size used to generate the input features.
* @param {number} delay How many steps in the future to make the prediction
* for.
* @returns {number} The mean absolute error of the commonsense baseline
* prediction.
*/
export const getBaselineMeanAbsoluteError = async (jenaWeatherData: JenaWeatherData, normalize: boolean,
includeDateTime: boolean, lookBack: number, step: number, delay: number): Promise<number> => {
const batchSize = 128
const dataset: tf.data.Dataset<tf.TensorContainerObject> = tf.data.generator(
() => jenaWeatherData.getNextBatchFunction(false, lookBack, delay, batchSize, step,
VAL_MIN_ROW, VAL_MAX_ROW, normalize, includeDateTime))
const batchMeanAbsoluteErrors: tf.Tensor[] = []
const batchSizes: number[] = []
await dataset.forEachAsync((dataItem: tf.TensorContainerObject) => {
const features = dataItem.xs as tf.Tensor
const targets = dataItem.ys as tf.Tensor
const timeSteps = features.shape[1] as number
batchSizes.push(features.shape[0])
batchMeanAbsoluteErrors.push(tf.tidy(
() => tf.losses.absoluteDifference(
targets,
features.gather([timeSteps - 1], 1).gather([1], 2).squeeze([2]))))
})
const meanAbsoluteError = tf.tidy(() => {
const batchSizesTensor = tf.tensor1d(batchSizes)
const batchMeanAbsoluteErrorsTensor = tf.stack(batchMeanAbsoluteErrors)
return batchMeanAbsoluteErrorsTensor.mul(batchSizesTensor)
.sum()
.div(batchSizesTensor.sum())
})
tf.dispose(batchMeanAbsoluteErrors)
return meanAbsoluteError.dataSync()[0]
}
export const buildLinearRegressionModel = (inputShape: tf.Shape): tf.LayersModel => {
const model = tf.sequential()
model.add(tf.layers.flatten({ inputShape }))
model.add(tf.layers.dense({ units: 1 }))
return model
}
export const buildMLPModel = (inputShape: tf.Shape,
options: {
kernelRegularizer?: any
dropoutRate?: number
} = {}): tf.LayersModel => {
const model = tf.sequential()
model.add(tf.layers.flatten({ inputShape }))
const { kernelRegularizer, dropoutRate } = options
if (kernelRegularizer) {
model.add(tf.layers.dense({ units: 32, kernelRegularizer, activation: 'relu' }))
} else {
model.add(tf.layers.dense({ units: 32, activation: 'relu' }))
}
if (dropoutRate && dropoutRate > 0) {
model.add(tf.layers.dropout({ rate: dropoutRate }))
}
model.add(tf.layers.dense({ units: 1 }))
return model
}
export const buildSimpleRNNModel = (inputShape: tf.Shape): tf.LayersModel => {
const model = tf.sequential()
const rnnUnits = 32
model.add(tf.layers.simpleRNN({ units: rnnUnits, inputShape }))
model.add(tf.layers.dense({ units: 1 }))
return model
}
export const buildGRUModel = (inputShape: tf.Shape, dropout?: number, recurrentDropout?: number): tf.LayersModel => {
// TODO(cais): Recurrent dropout is currently not fully working.
// Make it work and add a flag to train-jena.js.
const model = tf.sequential()
const rnnUnits = 32
model.add(tf.layers.gru({
units: rnnUnits,
inputShape,
dropout: dropout ?? 0,
recurrentDropout: recurrentDropout ?? 0
}))
model.add(tf.layers.dense({ units: 1 }))
return model
}
export const buildModel = (modelType: string, numTimeSteps: number, numFeatures: number): tf.LayersModel => {
const inputShape = [numTimeSteps, numFeatures]
console.log(`modelType = ${modelType}`)
let model
if (modelType === 'mlp') {
model = buildMLPModel(inputShape)
} else if (modelType === 'mlp-l2') {
model = buildMLPModel(inputShape, { kernelRegularizer: tf.regularizers.l2() })
} else if (modelType === 'linear-regression') {
model = buildLinearRegressionModel(inputShape)
} else if (modelType === 'mlp-dropout') {
const dropoutRate = 0.25
model = buildMLPModel(inputShape, { dropoutRate: dropoutRate })
} else if (modelType === 'simpleRNN') {
model = buildSimpleRNNModel(inputShape)
} else if (modelType === 'gru') {
model = buildGRUModel(inputShape)
// TODO(cais): Add gru-dropout with recurrentDropout.
} else {
throw new Error(`Unsupported model type: ${modelType}`)
}
model.compile({ loss: 'meanAbsoluteError', optimizer: 'rmsprop' })
model.summary()
return model
}
/**
* Train a model on the Jena weather data.
*
* @param {tf.LayersModel} model A compiled tf.LayersModel object. It is
* expected to have a 3D input shape `[numExamples, timeSteps, numFeatures].`
* and an output shape `[numExamples, 1]` for predicting the temperature value.
* @param {JenaWeatherData} jenaWeatherData A JenaWeatherData object.
* @param {boolean} normalize Whether to used normalized data for training.
* @param {boolean} includeDateTime Whether to include date and time features
* in training.
* @param {number} lookBack Number of look-back time steps.
* @param {number} step Step size used to generate the input features.
* @param {number} delay How many steps in the future to make the prediction
* for.
* @param {number} batchSize batchSize for training.
* @param {number} epochs Number of training epochs.
* @param {tf.Callback | tf.CustomCallbackArgs} customCallback Optional callback
* to invoke at the end of every epoch. Can optionally have `onBatchEnd` and
* `onEpochEnd` fields.
*/
export const trainModel =
async (model: tf.LayersModel, jenaWeatherData: JenaWeatherData, normalize: boolean, includeDateTime: boolean,
lookBack: number, step: number, delay: number, batchSize: number, epochs: number,
customCallback: tf.Callback | tf.CustomCallbackArgs[]): Promise<void> => {
const trainShuffle = true
const trainDataset = tf.data.generator(
() => jenaWeatherData.getNextBatchFunction(
trainShuffle, lookBack, delay, batchSize, step, TRAIN_MIN_ROW,
TRAIN_MAX_ROW, normalize, includeDateTime)).prefetch(8)
const evalShuffle = false
const valDataset = tf.data.generator(
() => jenaWeatherData.getNextBatchFunction(
evalShuffle, lookBack, delay, batchSize, step, VAL_MIN_ROW,
VAL_MAX_ROW, normalize, includeDateTime))
await model.fitDataset(trainDataset, {
batchesPerEpoch: 500,
epochs,
callbacks: customCallback,
validationData: valDataset
})
}
<file_sep>/src/components/mobilenet/mobilenetUtils.ts
import * as tf from '@tensorflow/tfjs'
import {ILabeledImage, ILabeledImageSet} from '../../utils'
// export const MOBILENET_MODEL_PATH = 'https://storage.googleapis.com/tfjs-models/tfjs/mobilenet_v1_0.25_224/model.json'
export const MOBILENET_MODEL_PATH = '/preload/model/mobilenet/mobilenet_v1_0.25_224/model.json'
export const MOBILENET_IMAGE_SIZE = 224
export const formatImageForMobileNet = (imgTensor: tf.Tensor): tf.Tensor => {
const sample = tf.image.resizeBilinear(imgTensor as tf.Tensor3D, [MOBILENET_IMAGE_SIZE, MOBILENET_IMAGE_SIZE])
// logger(JSON.stringify(sample))
const offset = tf.scalar(127.5)
// Normalize the image from [0, 255] to [-1, 1].
const normalized = sample.sub(offset).div(offset)
// Reshape to a single-element batch so we can pass it to predict.
return normalized.reshape([1, MOBILENET_IMAGE_SIZE, MOBILENET_IMAGE_SIZE, 3])
}
export const encodeImageTensor = (labeledImgs: ILabeledImageSet[]): any[] => {
if (!labeledImgs) {
return []
}
labeledImgs.forEach((labeled, index) => {
labeled.imageList?.forEach((imgItem: ILabeledImage) => {
if (imgItem.tensor && !imgItem.img) {
const f32Buf = new Float32Array(imgItem.tensor.dataSync())
// logger(f32Buf.length)
const ui8Buf = new Uint8Array(f32Buf.buffer)
// logger(ui8Buf.length)
imgItem.img = Buffer.from(ui8Buf).toString('base64')
}
})
})
return labeledImgs
}
export const decodeImageTensor = (labeledImgs: ILabeledImageSet[]): any[] => {
// logger('decodeImageTensor', labeledImgs)
if (!labeledImgs) {
return []
}
labeledImgs.forEach((labeled, index) => {
labeled.imageList?.forEach((imgItem: ILabeledImage) => {
if (imgItem.tensor && imgItem.img) {
const buf = Buffer.from(imgItem.img, 'base64')
const ui8Buf = new Uint8Array(buf)
// logger(ui8Buf.length)
const f32Buf = new Float32Array(ui8Buf.buffer)
// logger(f32Buf.length)
imgItem.tensor = tf.tensor3d(f32Buf, imgItem.tensor.shape, imgItem.tensor.dtype)
delete imgItem.img
}
// logger(imgItem)
})
})
return labeledImgs
}
| 373526f6e9f663fd214affda956cbbd7ab444652 | [
"Markdown",
"JavaScript",
"Python",
"TypeScript",
"Dockerfile",
"Shell"
] | 58 | TypeScript | iascchen/react-tfjs-playground | c56fb812ee24c09df87de250a6563316917f0f45 | 6058e57d82684262afe29db576b3c9c99382e22f |
refs/heads/master | <repo_name>robertg/mapbox-gl-native<file_sep>/src/mbgl/util/clip_id.cpp
#include <mbgl/util/clip_id.hpp>
#include <mbgl/platform/log.hpp>
#include <mbgl/util/math.hpp>
#include <mbgl/util/std.hpp>
#include <mbgl/tile/tile.hpp>
#include <list>
#include <vector>
#include <bitset>
#include <cassert>
#include <iostream>
#include <algorithm>
#include <iterator>
namespace mbgl {
ClipIDGenerator::Leaf::Leaf(TileID id_, ClipID& clip_) : id(id_), clip(clip_) {}
void ClipIDGenerator::Leaf::add(const TileID &p) {
if (p.isChildOf(id)) {
// Ensure that no already present child is a parent of the new p.
for (const auto& child : children) {
if (p.isChildOf(child))
return;
}
children.push_front(p);
}
}
bool ClipIDGenerator::Leaf::operator==(const Leaf &other) const {
return id == other.id && children == other.children;
}
void ClipIDGenerator::update(std::forward_list<Tile *> tiles) {
tiles.sort([](const Tile *a, const Tile *b) {
return a->id < b->id;
});
std::size_t size = 0;
const auto end = tiles.end();
for (auto it = tiles.begin(); it != end; it++) {
if (!*it) {
// Handle null pointers.
continue;
}
Tile &tile = **it;
// Use the actual zoom level for computing the clipping mask.
Leaf leaf{ TileID{ tile.id.sourceZ, tile.id.x, tile.id.y, tile.id.sourceZ }, tile.clip };
// Try to add all remaining ids as children. We sorted the tile list
// by z earlier, so all preceding items cannot be children of the current
// tile.
for (auto child_it = std::next(it); child_it != end; child_it++) {
// Use the actual zoom level for computing the clipping mask.
const auto& childID = (*child_it)->id;
leaf.add(TileID { childID.sourceZ, childID.x, childID.y, childID.sourceZ });
}
leaf.children.sort();
// Loop through all existing pools and try to find a matching ClipID.
auto existing = std::find(pool.begin(), pool.end(), leaf);
if (existing != pool.end()) {
leaf.clip = existing->clip;
} else {
// We haven't found an existing clip ID
leaf.clip = {};
size++;
}
pool.emplace_back(std::move(leaf));
}
if (size > 0) {
const uint32_t bit_count = util::ceil_log2(size + 1);
const std::bitset<8> mask = uint64_t(((1ul << bit_count) - 1) << bit_offset);
// We are starting our count with 1 since we need at least 1 bit set to distinguish between
// areas without any tiles whatsoever and the current area.
uint8_t count = 1;
for (auto& tile : tiles) {
tile->clip.mask |= mask;
// Assign only to clip IDs that have no value yet.
if (tile->clip.reference.none()) {
tile->clip.reference = uint32_t(count++) << bit_offset;
}
}
bit_offset += bit_count;
}
if (bit_offset > 8) {
Log::Error(Event::OpenGL, "stencil mask overflow");
}
}
template <typename Container>
bool coveredByChildren(const TileID& id, const Container& container) {
for (const auto& child : id.children()) {
const auto lower = container.lower_bound(child);
if (lower == container.end() || lower->first.w != child.w ||
(lower->first != child && !coveredByChildren(child, container))) {
return false;
}
}
// We looked at all four immediate children and verified that they're covered.
return true;
}
std::map<TileID, ClipID> ClipIDGenerator::getStencils() const {
std::map<TileID, ClipID> stencils;
// Merge everything.
for (auto& leaf : pool) {
auto res = stencils.emplace(leaf.id, leaf.clip);
if (!res.second) {
// Merge with the existing ClipID when there was already an element with the
// same tile ID.
res.first->second |= leaf.clip;
}
}
for (auto it = stencils.begin(); it != stencils.end(); ++it) {
auto& childId = it->first;
auto& childClip = it->second;
// Loop through all preceding stencils, and find all parents.
for (auto parentIt = std::reverse_iterator<std::map<TileID, ClipID>::iterator>(it);
parentIt != stencils.rend(); ++parentIt) {
auto& parentId = parentIt->first;
if (childId.isChildOf(parentId)) {
// Once we have a parent, we add the bits that this ID hasn't set yet.
const auto& parentClip = parentIt->second;
const auto mask = ~(childClip.mask & parentClip.mask);
childClip.reference |= mask & parentClip.reference;
childClip.mask |= parentClip.mask;
}
}
}
// Remove tiles that are entirely covered by children.
util::erase_if(stencils, [&] (const auto& stencil) {
return coveredByChildren(stencil.first, stencils);
});
return stencils;
}
} // namespace mbgl
<file_sep>/src/mbgl/storage/resource.cpp
#include <mbgl/storage/resource.hpp>
#include <mbgl/util/string.hpp>
#include <mbgl/util/token.hpp>
#include <mbgl/util/url.hpp>
namespace mbgl {
Resource Resource::style(const std::string& url) {
return Resource {
Resource::Kind::Style,
url
};
}
Resource Resource::source(const std::string& url) {
return Resource {
Resource::Kind::Source,
url
};
}
Resource Resource::spriteImage(const std::string& base, float pixelRatio) {
return Resource {
Resource::Kind::SpriteImage,
base + (pixelRatio > 1 ? "@2x" : "") + ".png"
};
}
Resource Resource::spriteJSON(const std::string& base, float pixelRatio) {
return Resource {
Resource::Kind::SpriteJSON,
base + (pixelRatio > 1 ? "@2x" : "") + ".json"
};
}
Resource Resource::glyphs(const std::string& urlTemplate, const std::string& fontStack, const std::pair<uint16_t, uint16_t>& glyphRange) {
return Resource {
Resource::Kind::Glyphs,
util::replaceTokens(urlTemplate, [&](const std::string& token) {
if (token == "fontstack") {
return util::percentEncode(fontStack);
} else if (token == "range") {
return util::toString(glyphRange.first) + "-" + util::toString(glyphRange.second);
} else {
return std::string();
}
})
};
}
Resource Resource::tile(const std::string& urlTemplate, float pixelRatio, int32_t x, int32_t y, int8_t z) {
bool supportsRatio = urlTemplate.find("{ratio}") != std::string::npos;
return Resource {
Resource::Kind::Tile,
util::replaceTokens(urlTemplate, [&](const std::string& token) {
if (token == "z") {
return util::toString(z);
} else if (token == "x") {
return util::toString(x);
} else if (token == "y") {
return util::toString(y);
} else if (token == "prefix") {
std::string prefix{ 2 };
prefix[0] = "0123456789abcdef"[x % 16];
prefix[1] = "0123456789abcdef"[y % 16];
return prefix;
} else if (token == "ratio") {
return std::string(pixelRatio > 1.0 ? "@2x" : "");
} else {
return std::string();
}
}),
Resource::TileData {
urlTemplate,
uint8_t(supportsRatio && pixelRatio > 1.0 ? 2 : 1),
x,
y,
z
}
};
}
}
<file_sep>/src/mbgl/text/glyph_store.hpp
#ifndef MBGL_TEXT_GLYPH_STORE
#define MBGL_TEXT_GLYPH_STORE
#include <mbgl/text/font_stack.hpp>
#include <mbgl/text/glyph.hpp>
#include <mbgl/util/exclusive.hpp>
#include <mbgl/util/noncopyable.hpp>
#include <mbgl/util/work_queue.hpp>
#include <exception>
#include <set>
#include <string>
#include <unordered_map>
namespace mbgl {
class FileSource;
class GlyphPBF;
// The GlyphStore manages the loading and storage of Glyphs
// and creation of FontStack objects. The GlyphStore lives
// on the MapThread but can be queried from any thread.
class GlyphStore : private util::noncopyable {
public:
class Observer {
public:
virtual ~Observer() = default;
virtual void onGlyphsLoaded(const std::string& /* fontStack */, const GlyphRange&) {};
virtual void onGlyphsError(const std::string& /* fontStack */, const GlyphRange&, std::exception_ptr) {};
};
GlyphStore(FileSource&);
~GlyphStore();
util::exclusive<FontStack> getFontStack(const std::string& fontStack);
// Returns true if the set of GlyphRanges are available and parsed or false
// if they are not. For the missing ranges, a request on the FileSource is
// made and when the glyph if finally parsed, it gets added to the respective
// FontStack and a signal is emitted to notify the observers. This method
// can be called from any thread.
bool hasGlyphRanges(const std::string& fontStack, const std::set<GlyphRange>& glyphRanges);
void setURL(const std::string &url) {
glyphURL = url;
}
std::string getURL() const {
return glyphURL;
}
void setObserver(Observer* observer);
private:
void requestGlyphRange(const std::string& fontStackName, const GlyphRange& range);
FileSource& fileSource;
std::string glyphURL;
std::unordered_map<std::string, std::map<GlyphRange, std::unique_ptr<GlyphPBF>>> ranges;
std::mutex rangesMutex;
std::unordered_map<std::string, std::unique_ptr<FontStack>> stacks;
std::mutex stacksMutex;
util::WorkQueue workQueue;
Observer nullObserver;
Observer* observer = &nullObserver;
};
} // namespace mbgl
#endif
<file_sep>/test/util/async_task.cpp
#include <mbgl/util/async_task.hpp>
#include <mbgl/util/run_loop.hpp>
#include <mbgl/util/thread.hpp>
#include "../fixtures/util.hpp"
#include <vector>
using namespace mbgl::util;
namespace {
class TestWorker {
public:
TestWorker(AsyncTask *async_)
: async(async_) {}
void run() {
for (unsigned i = 0; i < 100000; ++i) {
async->send();
}
}
void runWithCallback(std::function<void()> cb) {
for (unsigned i = 0; i < 100000; ++i) {
async->send();
}
cb();
}
private:
AsyncTask *async;
};
} // namespace
TEST(AsyncTask, RequestCoalescing) {
RunLoop loop;
unsigned count = 0;
AsyncTask async([&count] { ++count; });
async.send();
async.send();
async.send();
async.send();
async.send();
loop.runOnce();
EXPECT_EQ(count, 1);
}
TEST(AsyncTask, DestroyShouldNotRunQueue) {
RunLoop loop;
unsigned count = 0;
auto async = std::make_unique<AsyncTask>([&count] { ++count; });
async->send();
async.reset();
EXPECT_EQ(count, 0);
}
TEST(AsyncTask, RequestCoalescingMultithreaded) {
RunLoop loop;
unsigned count = 0;
AsyncTask async([&count] { ++count; });
std::vector<std::unique_ptr<Thread<TestWorker>>> threads;
ThreadContext context = {"Test", ThreadType::Map, ThreadPriority::Regular};
unsigned numThreads = 25;
for (unsigned i = 0; i < numThreads; ++i) {
std::unique_ptr<Thread<TestWorker>> thread =
std::make_unique<Thread<TestWorker>>(context, &async);
thread->invoke(&TestWorker::run);
threads.push_back(std::move(thread));
}
// Join all the threads
threads.clear();
loop.runOnce();
EXPECT_EQ(count, 1);
}
TEST(AsyncTask, ThreadSafety) {
RunLoop loop;
unsigned count = 0;
AsyncTask async([&count] { ++count; });
unsigned numThreads = 25;
auto callback = [&] {
if (!--numThreads) {
loop.stop();
}
};
std::vector<std::unique_ptr<Thread<TestWorker>>> threads;
std::vector<std::unique_ptr<mbgl::WorkRequest>> requests;
ThreadContext context = {"Test", ThreadType::Map, ThreadPriority::Regular};
for (unsigned i = 0; i < numThreads; ++i) {
std::unique_ptr<Thread<TestWorker>> thread =
std::make_unique<Thread<TestWorker>>(context, &async);
requests.push_back(
thread->invokeWithCallback(&TestWorker::runWithCallback, callback));
threads.push_back(std::move(thread));
}
loop.run();
// We expect here more than 1 but 1 would also be
// a valid result, although very unlikely (I hope).
EXPECT_GT(count, 1);
}
<file_sep>/src/mbgl/layer/circle_layer.hpp
#ifndef MBGL_CIRCLE_LAYER
#define MBGL_CIRCLE_LAYER
#include <mbgl/style/style_layer.hpp>
#include <mbgl/style/paint_property.hpp>
namespace mbgl {
class CirclePaintProperties {
public:
PaintProperty<float> radius { 5.0f };
PaintProperty<Color> color { {{ 0, 0, 0, 1 }} };
PaintProperty<float> opacity { 1.0f };
PaintProperty<std::array<float, 2>> translate { {{ 0, 0 }} };
PaintProperty<TranslateAnchorType> translateAnchor { TranslateAnchorType::Map };
PaintProperty<float> blur { 0 };
bool isVisible() const {
return radius > 0 && color.value[3] > 0 && opacity > 0;
}
};
class CircleLayer : public StyleLayer {
public:
CircleLayer() : StyleLayer(Type::Circle) {}
std::unique_ptr<StyleLayer> clone() const override;
void parseLayout(const JSValue&) override {};
void parsePaints(const JSValue&) override;
void cascade(const StyleCascadeParameters&) override;
bool recalculate(const StyleCalculationParameters&) override;
std::unique_ptr<Bucket> createBucket(StyleBucketParameters&) const override;
CirclePaintProperties paint;
};
template <>
inline bool StyleLayer::is<CircleLayer>() const {
return type == Type::Circle;
}
} // namespace mbgl
#endif
<file_sep>/test/storage/offline.cpp
#include <mbgl/storage/offline.hpp>
#include <mbgl/source/source_info.hpp>
#include <mbgl/map/tile_id.hpp>
#include <gtest/gtest.h>
using namespace mbgl;
static const LatLngBounds sanFrancisco = LatLngBounds::hull(
{ 37.6609, -122.5744 },
{ 37.8271, -122.3204 });
static const LatLngBounds sanFranciscoWrapped = LatLngBounds::hull(
{ 37.6609, 238.5744 },
{ 37.8271, 238.3204 });
TEST(OfflineTilePyramidRegionDefinition, TileCoverEmpty) {
OfflineTilePyramidRegionDefinition region("", LatLngBounds::empty(), 0, 20, 1.0);
SourceInfo info;
auto result = region.tileCover(SourceType::Vector, 512, info);
ASSERT_TRUE(result.empty());
}
TEST(OfflineTilePyramidRegionDefinition, TileCoverZoomIntersection) {
OfflineTilePyramidRegionDefinition region("", sanFrancisco, 2, 2, 1.0);
SourceInfo info;
info.minZoom = 0;
auto resultIntersection = region.tileCover(SourceType::Vector, 512, info);
ASSERT_EQ(1, resultIntersection.size());
info.minZoom = 3;
auto resultNoIntersection = region.tileCover(SourceType::Vector, 512, info);
ASSERT_TRUE(resultNoIntersection.empty());
}
TEST(OfflineTilePyramidRegionDefinition, TileCoverTileSize) {
OfflineTilePyramidRegionDefinition region("", LatLngBounds::world(), 0, 0, 1.0);
SourceInfo info;
auto result512 = region.tileCover(SourceType::Vector, 512, info);
ASSERT_EQ(1, result512.size());
ASSERT_EQ(0, result512[0].z);
auto result256 = region.tileCover(SourceType::Vector, 256, info);
ASSERT_EQ(4, result256.size());
ASSERT_EQ(1, result256[0].z);
}
TEST(OfflineTilePyramidRegionDefinition, TileCoverZoomRounding) {
OfflineTilePyramidRegionDefinition region("", sanFrancisco, 0.6, 0.7, 1.0);
SourceInfo info;
auto resultVector = region.tileCover(SourceType::Vector, 512, info);
ASSERT_EQ(1, resultVector.size());
ASSERT_EQ(0, resultVector[0].z);
auto resultRaster = region.tileCover(SourceType::Raster, 512, info);
ASSERT_EQ(1, resultRaster.size());
ASSERT_EQ(1, resultRaster[0].z);
}
TEST(OfflineTilePyramidRegionDefinition, TileCoverWrapped) {
OfflineTilePyramidRegionDefinition region("", sanFranciscoWrapped, 0, 0, 1.0);
SourceInfo info;
auto result = region.tileCover(SourceType::Vector, 512, info);
ASSERT_EQ(1, result.size());
ASSERT_EQ(0, result[0].z);
ASSERT_EQ(0, result[0].x);
ASSERT_EQ(0, result[0].y);
}
<file_sep>/test/storage/http_reading.cpp
#include "storage.hpp"
#include <mbgl/storage/online_file_source.hpp>
#include <mbgl/util/exception.hpp>
#include <mbgl/util/chrono.hpp>
#include <mbgl/util/run_loop.hpp>
#include <mbgl/util/thread_context.hpp>
#include <future>
TEST_F(Storage, HTTPTest) {
SCOPED_TEST(HTTPTest)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
std::unique_ptr<FileRequest> req1 = fs.request({ Resource::Unknown, "http://127.0.0.1:3000/test" },
[&](Response res) {
req1.reset();
EXPECT_TRUE(util::ThreadContext::currentlyOn(util::ThreadType::Main));
EXPECT_EQ(nullptr, res.error);
ASSERT_TRUE(res.data.get());
EXPECT_EQ("Hello World!", *res.data);
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
loop.stop();
HTTPTest.finish();
});
loop.run();
}
TEST_F(Storage, HTTP404) {
SCOPED_TEST(HTTP404)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
std::unique_ptr<FileRequest> req2 = fs.request({ Resource::Unknown, "http://127.0.0.1:3000/doesnotexist" },
[&](Response res) {
req2.reset();
EXPECT_TRUE(util::ThreadContext::currentlyOn(util::ThreadType::Main));
ASSERT_NE(nullptr, res.error);
EXPECT_EQ(Response::Error::Reason::NotFound, res.error->reason);
EXPECT_EQ("HTTP status code 404", res.error->message);
EXPECT_FALSE(bool(res.data));
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
loop.stop();
HTTP404.finish();
});
loop.run();
}
TEST_F(Storage, HTTPTile404) {
SCOPED_TEST(HTTPTile404)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
std::unique_ptr<FileRequest> req2 = fs.request({ Resource::Tile, "http://127.0.0.1:3000/doesnotexist" },
[&](Response res) {
req2.reset();
EXPECT_TRUE(util::ThreadContext::currentlyOn(util::ThreadType::Main));
EXPECT_TRUE(res.noContent);
EXPECT_FALSE(bool(res.error));
EXPECT_FALSE(bool(res.data));
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
loop.stop();
HTTPTile404.finish();
});
loop.run();
}
TEST_F(Storage, HTTP200EmptyData) {
SCOPED_TEST(HTTP200EmptyData)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
std::unique_ptr<FileRequest> req = fs.request({ Resource::Unknown, "http://127.0.0.1:3000/empty-data" },
[&](Response res) {
req.reset();
EXPECT_TRUE(util::ThreadContext::currentlyOn(util::ThreadType::Main));
EXPECT_FALSE(res.noContent);
EXPECT_FALSE(bool(res.error));
EXPECT_EQ(*res.data, std::string());
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
loop.stop();
HTTP200EmptyData.finish();
});
loop.run();
}
TEST_F(Storage, HTTP204) {
SCOPED_TEST(HTTP204)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
std::unique_ptr<FileRequest> req2 = fs.request({ Resource::Unknown, "http://127.0.0.1:3000/no-content" },
[&](Response res) {
req2.reset();
EXPECT_TRUE(util::ThreadContext::currentlyOn(util::ThreadType::Main));
EXPECT_TRUE(res.noContent);
EXPECT_FALSE(bool(res.error));
EXPECT_FALSE(bool(res.data));
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
loop.stop();
HTTP204.finish();
});
loop.run();
}
TEST_F(Storage, HTTP500) {
SCOPED_TEST(HTTP500)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
std::unique_ptr<FileRequest> req3 = fs.request({ Resource::Unknown, "http://127.0.0.1:3000/permanent-error" },
[&](Response res) {
req3.reset();
EXPECT_TRUE(util::ThreadContext::currentlyOn(util::ThreadType::Main));
ASSERT_NE(nullptr, res.error);
EXPECT_EQ(Response::Error::Reason::Server, res.error->reason);
EXPECT_EQ("HTTP status code 500", res.error->message);
EXPECT_FALSE(bool(res.data));
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
loop.stop();
HTTP500.finish();
});
loop.run();
}
TEST_F(Storage, HTTPNoCallback) {
SCOPED_TEST(HTTPNoCallback)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
try {
fs.request({ Resource::Unknown, "http://127.0.0.1:3000/test" },
nullptr);
} catch (const util::MisuseException& ex) {
EXPECT_EQ(std::string(ex.what()), "FileSource callback can't be empty");
} catch (const std::exception&) {
EXPECT_TRUE(false) << "Unhandled exception.";
}
HTTPNoCallback.finish();
}
<file_sep>/src/mbgl/util/clip_id.hpp
#ifndef MBGL_UTIL_CLIP_IDS
#define MBGL_UTIL_CLIP_IDS
#include <mbgl/map/tile_id.hpp>
#include <bitset>
#include <string>
#include <list>
#include <set>
#include <vector>
#include <forward_list>
#include <map>
namespace mbgl {
class Tile;
struct ClipID {
inline ClipID() {}
inline ClipID(const std::string &mask_, const std::string &reference_) : mask(mask_), reference(reference_) {}
std::bitset<8> mask;
std::bitset<8> reference;
inline bool operator==(const ClipID &other) const {
return mask == other.mask && reference == other.reference;
}
inline ClipID& operator|=(const ClipID &other) {
mask |= other.mask;
reference |= other.reference;
return *this;
}
};
class ClipIDGenerator {
private:
struct Leaf {
Leaf(TileID, ClipID&);
void add(const TileID &p);
bool operator==(const Leaf &other) const;
const TileID id;
std::forward_list<TileID> children;
ClipID& clip;
};
uint8_t bit_offset = 0;
std::vector<Leaf> pool;
public:
void update(std::forward_list<Tile *> tiles);
std::map<TileID, ClipID> getStencils() const;
};
} // namespace mbgl
#endif
<file_sep>/src/mbgl/layer/fill_layer.cpp
#include <mbgl/layer/fill_layer.hpp>
#include <mbgl/style/style_bucket_parameters.hpp>
#include <mbgl/renderer/fill_bucket.hpp>
#include <mbgl/util/get_geometries.hpp>
namespace mbgl {
std::unique_ptr<StyleLayer> FillLayer::clone() const {
return std::make_unique<FillLayer>(*this);
}
void FillLayer::parsePaints(const JSValue& layer) {
paint.antialias.parse("fill-antialias", layer);
paint.opacity.parse("fill-opacity", layer);
paint.color.parse("fill-color", layer);
paint.outlineColor.parse("fill-outline-color", layer);
paint.translate.parse("fill-translate", layer);
paint.translateAnchor.parse("fill-translate-anchor", layer);
paint.pattern.parse("fill-pattern", layer);
}
void FillLayer::cascade(const StyleCascadeParameters& parameters) {
paint.antialias.cascade(parameters);
paint.opacity.cascade(parameters);
paint.color.cascade(parameters);
paint.outlineColor.cascade(parameters);
paint.translate.cascade(parameters);
paint.translateAnchor.cascade(parameters);
paint.pattern.cascade(parameters);
}
bool FillLayer::recalculate(const StyleCalculationParameters& parameters) {
bool hasTransitions = false;
hasTransitions |= paint.antialias.calculate(parameters);
hasTransitions |= paint.opacity.calculate(parameters);
hasTransitions |= paint.color.calculate(parameters);
hasTransitions |= paint.outlineColor.calculate(parameters);
hasTransitions |= paint.translate.calculate(parameters);
hasTransitions |= paint.translateAnchor.calculate(parameters);
hasTransitions |= paint.pattern.calculate(parameters);
passes = RenderPass::None;
if (paint.antialias) {
passes |= RenderPass::Translucent;
}
if (!paint.pattern.value.from.empty() || (paint.color.value[3] * paint.opacity) < 1.0f) {
passes |= RenderPass::Translucent;
} else {
passes |= RenderPass::Opaque;
}
return hasTransitions;
}
std::unique_ptr<Bucket> FillLayer::createBucket(StyleBucketParameters& parameters) const {
auto bucket = std::make_unique<FillBucket>();
parameters.eachFilteredFeature(filter, [&] (const auto& feature) {
bucket->addGeometry(getGeometries(feature));
});
return std::move(bucket);
}
} // namespace mbgl
<file_sep>/src/mbgl/geometry/line_buffer.cpp
#include <mbgl/geometry/line_buffer.hpp>
#include <mbgl/gl/gl.hpp>
#include <cmath>
using namespace mbgl;
GLsizei LineVertexBuffer::add(vertex_type x, vertex_type y, float ex, float ey, bool tx, bool ty, int8_t dir, int32_t linesofar) {
GLsizei idx = index();
void *data = addElement();
int16_t *coords = static_cast<int16_t *>(data);
coords[0] = (x * 2) | tx;
coords[1] = (y * 2) | ty;
int8_t *extrude = static_cast<int8_t *>(data);
extrude[4] = ::round(extrudeScale * ex);
extrude[5] = ::round(extrudeScale * ey);
// Encode the -1/0/1 direction value into .zw coordinates of a_data, which is normally covered
// by linesofar, so we need to merge them.
// The z component's first bit, as well as the sign bit is reserved for the direction,
// so we need to shift the linesofar.
extrude[6] = ((dir < 0) ? -1 : 1) * ((dir ? 1 : 0) | static_cast<int8_t>((linesofar << 1) & 0x7F));
extrude[7] = (linesofar >> 6) & 0x7F;
return idx;
}
<file_sep>/src/mbgl/style/property_parsing.hpp
#ifndef MBGL_PROPERTY_PARSING
#define MBGL_PROPERTY_PARSING
#include <mbgl/style/types.hpp>
#include <mbgl/util/rapidjson.hpp>
#include <mbgl/util/optional.hpp>
#include <functional>
namespace mbgl {
template <typename T>
using optional = optional<T>;
template <typename T>
optional<T> parseProperty(const char* name, const JSValue&);
} // namespace mbgl
#endif
<file_sep>/platform/ios/scripts/run.sh
#!/usr/bin/env bash
set -e
set -o pipefail
set -u
source ./platform/ios/scripts/setup.sh
BUILDTYPE=${BUILDTYPE:-Release}
PUBLISH_TAG=($(git show -s --format=%B | sed -n 's/.*\[publish \([a-z]\{1,\}\)-v\([0-9a-z.\-]\{1,\}\)\].*/\1 \2/p'))
PUBLISH_PLATFORM=${PUBLISH_TAG[0],-}
PUBLISH_VERSION=${PUBLISH_TAG[1],-}
################################################################################
# Build
################################################################################
if [[ ${PUBLISH_PLATFORM} = 'ios' ]]; then
# default, with debug symbols
mapbox_time "package_ios_symbols" \
make ipackage
mapbox_time "deploy_ios_symbols"
./platform/ios/scripts/publish.sh "${PUBLISH_VERSION}" symbols
# no debug symbols, for smaller distribution
mapbox_time "package_ios_stripped" \
make ipackage-strip
mapbox_time "deploy_ios_stripped"
./platform/ios/scripts/publish.sh "${PUBLISH_VERSION}"
# dynamic, with debug symbols
mapbox_time "package_ios_dynamic" \
make iframework
mapbox_time "deploy_ios_dynamic"
./platform/ios/scripts/publish.sh "${PUBLISH_VERSION}" symbols-dynamic
# dynamic, without debug symbols
mapbox_time "package_ios_dynamic_stripped" \
make iframework SYMBOLS=NO
mapbox_time "deploy_ios_dynamic_stripped"
./platform/ios/scripts/publish.sh "${PUBLISH_VERSION}" dynamic
else
# build & test iOS
mapbox_time "run_ios_tests" \
make itest
fi
<file_sep>/gyp/android.gyp
{
'includes': [
'../platform/android/mapboxgl-app.gypi',
],
}
<file_sep>/platform/linux/scripts/defaults.mk
HEADLESS ?= glx
PLATFORM ?= linux
ASSET ?= fs
HTTP ?= curl
<file_sep>/src/mbgl/style/style_cascade_parameters.hpp
#ifndef STYLE_CASCADE_PARAMETERS
#define STYLE_CASCADE_PARAMETERS
#include <mbgl/util/chrono.hpp>
#include <mbgl/style/types.hpp>
#include <vector>
namespace mbgl {
class PropertyTransition;
class StyleCascadeParameters {
public:
StyleCascadeParameters(const std::vector<ClassID>& classes_,
const TimePoint& now_,
const PropertyTransition& defaultTransition_)
: classes(classes_),
now(now_),
defaultTransition(defaultTransition_) {}
std::vector<ClassID> classes;
TimePoint now;
PropertyTransition defaultTransition;
};
} // namespace mbgl
#endif
<file_sep>/test/style/style_layer.cpp
#include "../fixtures/util.hpp"
#include <mbgl/style/style_layer.hpp>
#include <mbgl/layer/background_layer.hpp>
using namespace mbgl;
TEST(StyleLayer, Create) {
std::unique_ptr<StyleLayer> layer = std::make_unique<BackgroundLayer>();
EXPECT_TRUE(reinterpret_cast<BackgroundLayer*>(layer.get()));
}
TEST(StyleLayer, Clone) {
std::unique_ptr<StyleLayer> layer = std::make_unique<BackgroundLayer>();
std::unique_ptr<StyleLayer> clone = layer->clone();
EXPECT_NE(layer.get(), clone.get());
EXPECT_TRUE(reinterpret_cast<BackgroundLayer*>(layer.get()));
}
TEST(StyleLayer, CloneCopiesBaseProperties) {
std::unique_ptr<BackgroundLayer> layer = std::make_unique<BackgroundLayer>();
layer->id = "test";
EXPECT_EQ("test", layer->clone()->id);
}
<file_sep>/src/mbgl/util/thread_context.hpp
#ifndef MBGL_UTIL_THREAD_CONTEXT
#define MBGL_UTIL_THREAD_CONTEXT
#include <cstdint>
#include <string>
#include <thread>
namespace mbgl {
namespace util {
enum class ThreadPriority : bool {
Regular,
Low,
};
enum class ThreadType : uint8_t {
Main,
Map,
Worker,
Unknown,
};
struct ThreadContext {
public:
ThreadContext(const std::string& name, ThreadType type, ThreadPriority priority);
static void Set(ThreadContext* context);
static bool currentlyOn(ThreadType type);
static std::string getName();
static ThreadPriority getPriority();
std::string name;
ThreadType type;
ThreadPriority priority;
};
} // namespace util
} // namespace mbgl
#endif
<file_sep>/src/mbgl/storage/http_context_base.hpp
#ifndef MBGL_STORAGE_HTTP_CONTEXT_BASE
#define MBGL_STORAGE_HTTP_CONTEXT_BASE
#include <mbgl/storage/http_request_base.hpp>
#include <mbgl/storage/network_status.hpp>
#include <set>
namespace mbgl {
class HTTPContextBase {
public:
static std::unique_ptr<HTTPContextBase> createContext();
static uint32_t maximumConcurrentRequests();
virtual ~HTTPContextBase() = default;
virtual HTTPRequestBase* createRequest(const Resource&, HTTPRequestBase::Callback) = 0;
};
} // namespace mbgl
#endif // MBGL_STORAGE_HTTP_CONTEXT_BASE
<file_sep>/src/mbgl/gl/texture_pool.hpp
#ifndef MBGL_UTIL_TEXTUREPOOL
#define MBGL_UTIL_TEXTUREPOOL
#include <mbgl/util/noncopyable.hpp>
#include <mbgl/gl/gl.hpp>
#include <mbgl/gl/gl_object_store.hpp>
#include <algorithm>
#include <memory>
#include <vector>
namespace mbgl {
namespace gl {
class TexturePool : private util::noncopyable {
public:
GLuint getTextureID(gl::GLObjectStore&);
void releaseTextureID(GLuint);
private:
class Impl : private util::noncopyable {
public:
Impl(gl::GLObjectStore& glObjectStore) : ids(gl::TexturePoolHolder::TextureMax) {
pool.create(glObjectStore);
std::copy(pool.getIDs().begin(), pool.getIDs().end(), ids.begin());
}
Impl(Impl&& o) : pool(std::move(o.pool)), ids(std::move(o.ids)) {}
Impl& operator=(Impl&& o) { pool = std::move(o.pool); ids = std::move(o.ids); return *this; }
gl::TexturePoolHolder pool;
std::vector<GLuint> ids;
};
std::vector<Impl> pools;
};
} // namespace gl
} // namespace mbgl
#endif
<file_sep>/platform/osx/scripts/install.sh
#!/usr/bin/env bash
set -e
set -o pipefail
mapbox_time "checkout_mason" \
git submodule update --init .mason
<file_sep>/src/mbgl/gl/gl_object_store.cpp
#include <mbgl/gl/gl_object_store.hpp>
#include <cassert>
namespace mbgl {
namespace gl {
void ProgramHolder::create(GLObjectStore& objectStore_) {
if (id) return;
objectStore = &objectStore_;
id = MBGL_CHECK_ERROR(glCreateProgram());
}
void ProgramHolder::reset() {
if (!id) return;
objectStore->abandonedPrograms.push_back(id);
id = 0;
}
void ShaderHolder::create(GLObjectStore& objectStore_) {
if (id) return;
objectStore = &objectStore_;
id = MBGL_CHECK_ERROR(glCreateShader(type));
}
void ShaderHolder::reset() {
if (!id) return;
objectStore->abandonedShaders.push_back(id);
id = 0;
}
void BufferHolder::create(GLObjectStore& objectStore_) {
if (id) return;
objectStore = &objectStore_;
MBGL_CHECK_ERROR(glGenBuffers(1, &id));
}
void BufferHolder::reset() {
if (!id) return;
objectStore->abandonedBuffers.push_back(id);
id = 0;
}
void TextureHolder::create(GLObjectStore& objectStore_) {
if (id) return;
objectStore = &objectStore_;
MBGL_CHECK_ERROR(glGenTextures(1, &id));
}
void TextureHolder::reset() {
if (!id) return;
objectStore->abandonedTextures.push_back(id);
id = 0;
}
void TexturePoolHolder::create(GLObjectStore& objectStore_) {
if (bool()) return;
objectStore = &objectStore_;
MBGL_CHECK_ERROR(glGenTextures(TextureMax, ids.data()));
}
void TexturePoolHolder::reset() {
if (!bool()) return;
for (GLuint id : ids) {
if (id) {
objectStore->abandonedTextures.push_back(id);
}
}
ids.fill(0);
}
void VAOHolder::create(GLObjectStore& objectStore_) {
if (id) return;
objectStore = &objectStore_;
MBGL_CHECK_ERROR(gl::GenVertexArrays(1, &id));
}
void VAOHolder::reset() {
if (!id) return;
objectStore->abandonedVAOs.push_back(id);
id = 0;
}
GLObjectStore::~GLObjectStore() {
assert(abandonedPrograms.empty());
assert(abandonedShaders.empty());
assert(abandonedBuffers.empty());
assert(abandonedTextures.empty());
assert(abandonedVAOs.empty());
}
void GLObjectStore::performCleanup() {
for (GLuint id : abandonedPrograms) {
MBGL_CHECK_ERROR(glDeleteProgram(id));
}
abandonedPrograms.clear();
for (GLuint id : abandonedShaders) {
MBGL_CHECK_ERROR(glDeleteShader(id));
}
abandonedShaders.clear();
if (!abandonedBuffers.empty()) {
MBGL_CHECK_ERROR(glDeleteBuffers(int(abandonedBuffers.size()), abandonedBuffers.data()));
abandonedBuffers.clear();
}
if (!abandonedTextures.empty()) {
MBGL_CHECK_ERROR(glDeleteTextures(int(abandonedTextures.size()), abandonedTextures.data()));
abandonedTextures.clear();
}
if (!abandonedVAOs.empty()) {
MBGL_CHECK_ERROR(gl::DeleteVertexArrays(int(abandonedVAOs.size()), abandonedVAOs.data()));
abandonedVAOs.clear();
}
}
} // namespace gl
} // namespace mbgl
<file_sep>/test/util/geo.cpp
#include "../fixtures/util.hpp"
#include <mbgl/util/constants.hpp>
#include <mbgl/util/geo.hpp>
#include <mbgl/map/tile_id.hpp>
using namespace mbgl;
TEST(LatLngBounds, World) {
auto result = LatLngBounds::world();
ASSERT_DOUBLE_EQ(-90, result.south());
ASSERT_DOUBLE_EQ( 90, result.north());
ASSERT_DOUBLE_EQ(-180, result.west());
ASSERT_DOUBLE_EQ( 180, result.east());
}
TEST(LatLngBounds, Singleton) {
auto result = LatLngBounds::singleton({1, 2});
ASSERT_DOUBLE_EQ(1, result.south());
ASSERT_DOUBLE_EQ(1, result.north());
ASSERT_DOUBLE_EQ(2, result.west());
ASSERT_DOUBLE_EQ(2, result.east());
}
TEST(LatLngBounds, Hull) {
double s = 1, w = 2, n = 3, e = 4;
auto swne = LatLngBounds::hull({s, w}, {n, e});
ASSERT_DOUBLE_EQ(s, swne.south());
ASSERT_DOUBLE_EQ(n, swne.north());
ASSERT_DOUBLE_EQ(w, swne.west());
ASSERT_DOUBLE_EQ(e, swne.east());
auto nesw = LatLngBounds::hull({n, e}, {s, w});
ASSERT_DOUBLE_EQ(s, nesw.south());
ASSERT_DOUBLE_EQ(n, nesw.north());
ASSERT_DOUBLE_EQ(w, nesw.west());
ASSERT_DOUBLE_EQ(e, nesw.east());
auto senw = LatLngBounds::hull({s, e}, {n, w});
ASSERT_DOUBLE_EQ(s, senw.south());
ASSERT_DOUBLE_EQ(n, senw.north());
ASSERT_DOUBLE_EQ(w, senw.west());
ASSERT_DOUBLE_EQ(e, senw.east());
auto nwse = LatLngBounds::hull({n, w}, {s, e});
ASSERT_DOUBLE_EQ(s, nwse.south());
ASSERT_DOUBLE_EQ(n, nwse.north());
ASSERT_DOUBLE_EQ(w, nwse.west());
ASSERT_DOUBLE_EQ(e, nwse.east());
}
TEST(LatLngBounds, Empty) {
ASSERT_TRUE(LatLngBounds::empty().isEmpty());
ASSERT_FALSE(LatLngBounds::world().isEmpty());
}
TEST(LatLngBounds, Center) {
auto result = LatLngBounds::hull({1, 2}, {3, 4}).center();
ASSERT_DOUBLE_EQ(2, result.latitude);
ASSERT_DOUBLE_EQ(3, result.longitude);
}
TEST(LatLngBounds, Southwest) {
auto result = LatLngBounds::hull({1, 2}, {3, 4}).southwest();
ASSERT_DOUBLE_EQ(1, result.latitude);
ASSERT_DOUBLE_EQ(2, result.longitude);
}
TEST(LatLngBounds, Northeast) {
auto result = LatLngBounds::hull({1, 2}, {3, 4}).northeast();
ASSERT_DOUBLE_EQ(3, result.latitude);
ASSERT_DOUBLE_EQ(4, result.longitude);
}
TEST(LatLngBounds, Southeast) {
auto result = LatLngBounds::hull({1, 2}, {3, 4}).southeast();
ASSERT_DOUBLE_EQ(1, result.latitude);
ASSERT_DOUBLE_EQ(4, result.longitude);
}
TEST(LatLngBounds, Northwest) {
auto result = LatLngBounds::hull({1, 2}, {3, 4}).northwest();
ASSERT_DOUBLE_EQ(3, result.latitude);
ASSERT_DOUBLE_EQ(2, result.longitude);
}
TEST(LatLng, FromTileID) {
for (int i = 0; i < 20; i++) {
const LatLng ll{ TileID(i, 0, 0, 0) };
ASSERT_DOUBLE_EQ(-util::LONGITUDE_MAX, ll.longitude);
ASSERT_DOUBLE_EQ(util::LATITUDE_MAX, ll.latitude);
}
{
const LatLng ll{ TileID(0, 1, 0, 0) };
ASSERT_DOUBLE_EQ(util::LONGITUDE_MAX, ll.longitude);
ASSERT_DOUBLE_EQ(util::LATITUDE_MAX, ll.latitude);
}
{
const LatLng ll{ TileID(0, -1, 0, 0) };
ASSERT_DOUBLE_EQ(-540, ll.longitude);
ASSERT_DOUBLE_EQ(util::LATITUDE_MAX, ll.latitude);
}
}
TEST(LatLng, Boundaries) {
LatLng coordinate;
ASSERT_DOUBLE_EQ(0, coordinate.latitude);
ASSERT_DOUBLE_EQ(0, coordinate.longitude);
coordinate.longitude = -180.1;
ASSERT_DOUBLE_EQ(-180.1, coordinate.longitude);
coordinate.wrap();
ASSERT_DOUBLE_EQ(179.90000000000001, coordinate.longitude); // 1E-14
coordinate.longitude = 180.9;
coordinate.wrap();
ASSERT_DOUBLE_EQ(-179.09999999999999, coordinate.longitude);
coordinate.longitude = -360.5;
coordinate.wrap();
ASSERT_DOUBLE_EQ(-0.5, coordinate.longitude);
coordinate.longitude = 360.5;
coordinate.wrap();
ASSERT_DOUBLE_EQ(0.5, coordinate.longitude);
coordinate.longitude = 360000.5;
coordinate.wrap();
ASSERT_DOUBLE_EQ(0.5, coordinate.longitude);
}
TEST(LatLngBounds, FromTileID) {
{
const LatLngBounds bounds{ TileID(0, 0, 0, 0) };
ASSERT_DOUBLE_EQ(-util::LONGITUDE_MAX, bounds.west());
ASSERT_DOUBLE_EQ(-util::LATITUDE_MAX, bounds.south());
ASSERT_DOUBLE_EQ(util::LONGITUDE_MAX, bounds.east());
ASSERT_DOUBLE_EQ(util::LATITUDE_MAX, bounds.north());
}
{
const LatLngBounds bounds{ TileID(1, 0, 1, 0) };
ASSERT_DOUBLE_EQ(-util::LONGITUDE_MAX, bounds.west());
ASSERT_DOUBLE_EQ(-util::LATITUDE_MAX, bounds.south());
ASSERT_DOUBLE_EQ(0, bounds.east());
ASSERT_DOUBLE_EQ(0, bounds.north());
}
{
const LatLngBounds bounds{ TileID(1, 1, 1, 0) };
ASSERT_DOUBLE_EQ(0, bounds.west());
ASSERT_DOUBLE_EQ(-util::LATITUDE_MAX, bounds.south());
ASSERT_DOUBLE_EQ(util::LONGITUDE_MAX, bounds.east());
ASSERT_DOUBLE_EQ(0, bounds.north());
}
{
const LatLngBounds bounds{ TileID(1, 0, 0, 0) };
ASSERT_DOUBLE_EQ(-util::LONGITUDE_MAX, bounds.west());
ASSERT_DOUBLE_EQ(0, bounds.south());
ASSERT_DOUBLE_EQ(0, bounds.east());
ASSERT_DOUBLE_EQ(util::LATITUDE_MAX, bounds.north());
}
}
<file_sep>/platform/android/MapboxGLAndroidSDK/src/main/java/com/mapbox/mapboxsdk/maps/widgets/UserLocationView.java
package com.mapbox.mapboxsdk.maps.widgets;
import android.animation.ObjectAnimator;
import android.animation.ValueAnimator;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.location.Location;
import android.os.Build;
import android.os.SystemClock;
import android.support.annotation.Nullable;
import android.support.v4.content.ContextCompat;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import com.mapbox.mapboxsdk.R;
import com.mapbox.mapboxsdk.camera.CameraPosition;
import com.mapbox.mapboxsdk.camera.CameraUpdateFactory;
import com.mapbox.mapboxsdk.constants.MyBearingTracking;
import com.mapbox.mapboxsdk.constants.MyLocationTracking;
import com.mapbox.mapboxsdk.geometry.LatLng;
import com.mapbox.mapboxsdk.location.LocationListener;
import com.mapbox.mapboxsdk.location.LocationServices;
import com.mapbox.mapboxsdk.maps.MapboxMap;
import com.mapbox.mapboxsdk.maps.Projection;
import java.lang.ref.WeakReference;
/**
* UI element overlaid on a map to show the user's location.
*/
public final class UserLocationView extends View {
private MapboxMap mMapboxMap;
private Projection mProjection;
private boolean mShowMarker;
private boolean mShowDirection;
private boolean mShowAccuracy;
private boolean mStaleMarker;
private PointF mMarkerScreenPoint;
private Matrix mMarkerScreenMatrix;
private Paint mAccuracyPaintFill;
private Paint mAccuracyPaintStroke;
private Path mAccuracyPath;
private RectF mAccuracyBounds;
private Drawable mUserLocationDrawable;
private RectF mUserLocationDrawableBoundsF;
private Rect mUserLocationDrawableBounds;
private Drawable mUserLocationBearingDrawable;
private RectF mUserLocationBearingDrawableBoundsF;
private Rect mUserLocationBearingDrawableBounds;
private Drawable mUserLocationStaleDrawable;
private RectF mUserLocationStaleDrawableBoundsF;
private Rect mUserLocationStaleDrawableBounds;
private Rect mDirtyRect;
private RectF mDirtyRectF;
private LatLng mMarkerCoordinate;
private ValueAnimator mMarkerCoordinateAnimator;
private float mGpsMarkerDirection;
private float mCompassMarkerDirection;
private ObjectAnimator mMarkerDirectionAnimator;
private float mMarkerAccuracy;
private ObjectAnimator mMarkerAccuracyAnimator;
private LatLng mCurrentMapViewCoordinate;
private double mCurrentBearing;
private boolean mPaused = false;
private Location mUserLocation;
private UserLocationListener mUserLocationListener;
private MapboxMap.OnMyLocationChangeListener mOnMyLocationChangeListener;
@MyLocationTracking.Mode
private int mMyLocationTrackingMode;
@MyBearingTracking.Mode
private int mMyBearingTrackingMode;
// Compass data
private MyBearingListener mBearingChangeListener;
public UserLocationView(Context context) {
super(context);
initialize(context);
}
public UserLocationView(Context context, AttributeSet attrs) {
super(context, attrs);
initialize(context);
}
public UserLocationView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
initialize(context);
}
private void initialize(Context context) {
// View configuration
setEnabled(false);
setWillNotDraw(false);
// Layout params
ViewGroup.LayoutParams lp = new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT);
setLayoutParams(lp);
// Setup sensors
mBearingChangeListener = new MyBearingListener(context);
// Setup the custom paint
Resources resources = context.getResources();
int accuracyColor = ContextCompat.getColor(context,R.color.my_location_ring);
float density = resources.getDisplayMetrics().density;
mMarkerCoordinate = new LatLng(0.0, 0.0);
mMarkerScreenPoint = new PointF();
mMarkerScreenMatrix = new Matrix();
mAccuracyPaintFill = new Paint();
mAccuracyPaintFill.setAntiAlias(true);
mAccuracyPaintFill.setStyle(Paint.Style.FILL);
mAccuracyPaintFill.setColor(accuracyColor);
mAccuracyPaintFill.setAlpha((int) (255 * 0.25f));
mAccuracyPaintStroke = new Paint();
mAccuracyPaintStroke.setAntiAlias(true);
mAccuracyPaintStroke.setStyle(Paint.Style.STROKE);
mAccuracyPaintStroke.setStrokeWidth(0.5f * density);
mAccuracyPaintStroke.setColor(accuracyColor);
mAccuracyPaintStroke.setAlpha((int) (255 * 0.5f));
mAccuracyPath = new Path();
mAccuracyBounds = new RectF();
mUserLocationDrawable = ContextCompat.getDrawable(getContext(), R.drawable.my_location);
mUserLocationDrawableBounds = new Rect(
-mUserLocationDrawable.getIntrinsicWidth() / 2,
-mUserLocationDrawable.getIntrinsicHeight() / 2,
mUserLocationDrawable.getIntrinsicWidth() / 2,
mUserLocationDrawable.getIntrinsicHeight() / 2);
mUserLocationDrawableBoundsF = new RectF(
-mUserLocationDrawable.getIntrinsicWidth() / 2,
-mUserLocationDrawable.getIntrinsicHeight() / 2,
mUserLocationDrawable.getIntrinsicWidth() / 2,
mUserLocationDrawable.getIntrinsicHeight() / 2);
mUserLocationDrawable.setBounds(mUserLocationDrawableBounds);
mUserLocationBearingDrawable = ContextCompat.getDrawable(getContext(), R.drawable.my_location_bearing);
mUserLocationBearingDrawableBounds = new Rect(
-mUserLocationBearingDrawable.getIntrinsicWidth() / 2,
-mUserLocationBearingDrawable.getIntrinsicHeight() / 2,
mUserLocationBearingDrawable.getIntrinsicWidth() / 2,
mUserLocationBearingDrawable.getIntrinsicHeight() / 2);
mUserLocationBearingDrawableBoundsF = new RectF(
-mUserLocationBearingDrawable.getIntrinsicWidth() / 2,
-mUserLocationBearingDrawable.getIntrinsicHeight() / 2,
mUserLocationBearingDrawable.getIntrinsicWidth() / 2,
mUserLocationBearingDrawable.getIntrinsicHeight() / 2);
mUserLocationBearingDrawable.setBounds(mUserLocationBearingDrawableBounds);
mUserLocationStaleDrawable = ContextCompat.getDrawable(getContext(), R.drawable.my_location_stale);
mUserLocationStaleDrawableBounds = new Rect(
-mUserLocationStaleDrawable.getIntrinsicWidth() / 2,
-mUserLocationStaleDrawable.getIntrinsicHeight() / 2,
mUserLocationStaleDrawable.getIntrinsicWidth() / 2,
mUserLocationStaleDrawable.getIntrinsicHeight() / 2);
mUserLocationStaleDrawableBoundsF = new RectF(
-mUserLocationStaleDrawable.getIntrinsicWidth() / 2,
-mUserLocationStaleDrawable.getIntrinsicHeight() / 2,
mUserLocationStaleDrawable.getIntrinsicWidth() / 2,
mUserLocationStaleDrawable.getIntrinsicHeight() / 2);
mUserLocationStaleDrawable.setBounds(mUserLocationStaleDrawableBounds);
}
public void setMapboxMap(MapboxMap mapboxMap) {
mMapboxMap = mapboxMap;
mProjection = mapboxMap.getProjection();
}
public void onStart() {
if (mMyBearingTrackingMode == MyBearingTracking.COMPASS) {
mBearingChangeListener.onStart(getContext());
}
}
public void onStop() {
mBearingChangeListener.onStop();
cancelAnimations();
}
@Override
public void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (!mShowMarker) {
return;
}
canvas.concat(mMarkerScreenMatrix);
Drawable dotDrawable = mShowDirection ? mUserLocationBearingDrawable : mUserLocationDrawable;
dotDrawable = mStaleMarker ? mUserLocationStaleDrawable : dotDrawable;
// IMPORTANT also update in update()
RectF dotBounds = mShowDirection ? mUserLocationBearingDrawableBoundsF : mUserLocationDrawableBoundsF;
dotBounds = mStaleMarker ? mUserLocationStaleDrawableBoundsF : dotBounds;
boolean willDraw = true;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN || !canvas.isHardwareAccelerated()) {
willDraw = mShowAccuracy && !mStaleMarker && !canvas.quickReject(mAccuracyPath, Canvas.EdgeType.AA);
}
willDraw |= !canvas.quickReject(dotBounds, Canvas.EdgeType.AA);
if (willDraw) {
if (mShowAccuracy && !mStaleMarker) {
canvas.drawPath(mAccuracyPath, mAccuracyPaintFill);
canvas.drawPath(mAccuracyPath, mAccuracyPaintStroke);
}
dotDrawable.draw(canvas);
}
}
public void setMyLocationTrackingMode(@MyLocationTracking.Mode int myLocationTrackingMode) {
mMyLocationTrackingMode = myLocationTrackingMode;
if (myLocationTrackingMode != MyLocationTracking.TRACKING_NONE && mUserLocation != null) {
// center map directly if we have a location fix
mMarkerCoordinate = new LatLng(mUserLocation.getLatitude(), mUserLocation.getLongitude());
mMapboxMap.moveCamera(CameraUpdateFactory.newLatLng(new LatLng(mUserLocation)));
// center view directly
mMarkerScreenMatrix.reset();
mMarkerScreenPoint = getMarkerScreenPoint();
mMarkerScreenMatrix.setTranslate(mMarkerScreenPoint.x, mMarkerScreenPoint.y);
}
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
setVisibility(enabled ? View.VISIBLE : View.INVISIBLE);
toggleGps(enabled);
}
public void update() {
if (isEnabled() && mShowMarker) {
setVisibility(View.VISIBLE);
mStaleMarker = isStale(mUserLocation);
// compute new marker position
// TODO add JNI method that takes existing pointf
if (mMyLocationTrackingMode == MyLocationTracking.TRACKING_NONE) {
mMarkerScreenPoint = getMarkerScreenPoint();
mMarkerScreenMatrix.reset();
mMarkerScreenMatrix.setTranslate(
mMarkerScreenPoint.x,
mMarkerScreenPoint.y);
} else if (mMyLocationTrackingMode == MyLocationTracking.TRACKING_FOLLOW) {
double bearing;
if (mShowDirection) {
bearing = mMyBearingTrackingMode == MyBearingTracking.COMPASS ? mBearingChangeListener.getCompassBearing() : mUserLocation.getBearing();
} else {
bearing = mMapboxMap.getCameraPosition().bearing;
}
if (mCurrentMapViewCoordinate == null) {
mCurrentMapViewCoordinate = mMapboxMap.getCameraPosition().target;
}
// only update if there is an actual change
if ((!mCurrentMapViewCoordinate.equals(mMarkerCoordinate)) || (!(mCurrentBearing == bearing))) {
CameraPosition cameraPosition = new CameraPosition.Builder()
.target(mMarkerCoordinate)
.bearing(bearing)
.build();
mMapboxMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition), 300, null);
mMarkerScreenMatrix.reset();
mMarkerScreenPoint = getMarkerScreenPoint();
mMarkerScreenMatrix.setTranslate(mMarkerScreenPoint.x, mMarkerScreenPoint.y);
// set values for next check for actual change
mCurrentMapViewCoordinate = mMarkerCoordinate;
mCurrentBearing = bearing;
}
}
// rotate so arrow in points to bearing
if (mShowDirection) {
if (mMyBearingTrackingMode == MyBearingTracking.COMPASS && mMyLocationTrackingMode == MyLocationTracking.TRACKING_NONE) {
mMarkerScreenMatrix.preRotate((float)(mCompassMarkerDirection + mMapboxMap.getCameraPosition().bearing));
} else if (mMyBearingTrackingMode == MyBearingTracking.GPS) {
if (mMyLocationTrackingMode == MyLocationTracking.TRACKING_NONE) {
mMarkerScreenMatrix.preRotate((float)(mGpsMarkerDirection + mMapboxMap.getCameraPosition().bearing));
} else {
mMarkerScreenMatrix.preRotate(mGpsMarkerDirection);
}
}
}
// adjust accuracy circle
if (mShowAccuracy && !mStaleMarker) {
mAccuracyPath.reset();
mAccuracyPath.addCircle(0.0f, 0.0f,
(float) (mMarkerAccuracy / mMapboxMap.getProjection().getMetersPerPixelAtLatitude(
mMarkerCoordinate.getLatitude())),
Path.Direction.CW);
mAccuracyPath.computeBounds(mAccuracyBounds, false);
mAccuracyBounds.inset(-1.0f, -1.0f);
}
// invalidate changed pixels
if (mDirtyRect == null) {
mDirtyRect = new Rect();
mDirtyRectF = new RectF();
} else {
// the old marker location
invalidate(mDirtyRect);
}
RectF dotBounds = mShowDirection ? mUserLocationBearingDrawableBoundsF : mUserLocationDrawableBoundsF;
dotBounds = mStaleMarker ? mUserLocationStaleDrawableBoundsF : dotBounds;
RectF largerBounds = mShowAccuracy && !mStaleMarker && mAccuracyBounds.contains(dotBounds)
? mAccuracyBounds : dotBounds;
mMarkerScreenMatrix.mapRect(mDirtyRectF, largerBounds);
mDirtyRectF.roundOut(mDirtyRect);
invalidate(mDirtyRect); // the new marker location
} else {
setVisibility(View.INVISIBLE);
}
}
public Location getLocation() {
return mUserLocation;
}
/**
* Enabled / Disable GPS location updates along with updating the UI
*
* @param enableGps true if GPS is to be enabled, false if GPS is to be disabled
*/
private void toggleGps(boolean enableGps) {
LocationServices locationServices = LocationServices.getLocationServices(getContext());
if (enableGps) {
// Set an initial location if one available
Location lastLocation = locationServices.getLastLocation();
if (lastLocation != null) {
setLocation(lastLocation);
}
if (mUserLocationListener == null) {
mUserLocationListener = new UserLocationListener(this);
}
// Register for Location Updates
locationServices.addLocationListener(mUserLocationListener);
} else {
// Disable location and user dot
setLocation(null);
// Deregister for Location Updates
locationServices.removeLocationListener(mUserLocationListener);
}
locationServices.toggleGPS(enableGps);
}
public void setMyBearingTrackingMode(@MyBearingTracking.Mode int myBearingTrackingMode) {
mMyBearingTrackingMode = myBearingTrackingMode;
if (myBearingTrackingMode == MyBearingTracking.COMPASS) {
mShowAccuracy = false;
mShowDirection = true;
mBearingChangeListener.onStart(getContext());
} else {
mBearingChangeListener.onStop();
if (myBearingTrackingMode == MyBearingTracking.GPS) {
mShowDirection = (mUserLocation != null) && mUserLocation.hasBearing();
} else {
mShowDirection = false;
}
}
update();
}
private class MyBearingListener implements SensorEventListener {
private SensorManager mSensorManager;
private Sensor mAccelerometer;
private Sensor mMagnetometer;
private float[] mLastAccelerometer = new float[3];
private float[] mLastMagnetometer = new float[3];
private boolean mLastAccelerometerSet = false;
private boolean mLastMagnetometerSet = false;
private float[] mR = new float[9];
private float[] mOrientation = new float[3];
private float mCurrentDegree = 0f;
// Controls the sensor update rate in milliseconds
private static final int UPDATE_RATE_MS = 300;
// Compass data
private float mCompassBearing;
private long mCompassUpdateNextTimestamp = 0;
public MyBearingListener(Context context) {
mSensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
mAccelerometer = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
mMagnetometer = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
}
public void onStart(Context context) {
mSensorManager.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_GAME);
mSensorManager.registerListener(this, mMagnetometer, SensorManager.SENSOR_DELAY_GAME);
}
public void onStop() {
mSensorManager.unregisterListener(this, mAccelerometer);
mSensorManager.unregisterListener(this, mMagnetometer);
}
public float getCompassBearing() {
return mCurrentDegree;
}
@Override
public void onSensorChanged(SensorEvent event) {
if (mPaused) {
return;
}
long currentTime = SystemClock.elapsedRealtime();
if (currentTime < mCompassUpdateNextTimestamp) {
return;
}
if (event.sensor == mAccelerometer) {
System.arraycopy(event.values, 0, mLastAccelerometer, 0, event.values.length);
mLastAccelerometerSet = true;
} else if (event.sensor == mMagnetometer) {
System.arraycopy(event.values, 0, mLastMagnetometer, 0, event.values.length);
mLastMagnetometerSet = true;
}
if (mLastAccelerometerSet && mLastMagnetometerSet) {
SensorManager.getRotationMatrix(mR, null, mLastAccelerometer, mLastMagnetometer);
SensorManager.getOrientation(mR, mOrientation);
float azimuthInRadians = mOrientation[0];
mCompassBearing = (float) (Math.toDegrees(azimuthInRadians) + 360) % 360;
if (mCompassBearing < 0) {
// only allow positive degrees
mCompassBearing += 360;
}
if (mCompassBearing > mCurrentDegree + 15 || mCompassBearing < mCurrentDegree - 15) {
mCurrentDegree = mCompassBearing;
setCompass(mCurrentDegree);
}
}
mCompassUpdateNextTimestamp = currentTime + UPDATE_RATE_MS;
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
// TODO add accuracy to the equiation
}
}
private static class UserLocationListener implements LocationListener {
private WeakReference<UserLocationView> mUserLocationView;
public UserLocationListener(UserLocationView userLocationView) {
mUserLocationView = new WeakReference<>(userLocationView);
}
/**
* Callback method for receiving location updates from LocationServices.
*
* @param location The new Location data
*/
@Override
public void onLocationChanged(Location location) {
UserLocationView locationView = mUserLocationView.get();
if (locationView != null && !locationView.isPaused()) {
locationView.setLocation(location);
}
}
}
private boolean isStale(Location location) {
if (location != null && mMyBearingTrackingMode != MyBearingTracking.COMPASS) {
long ageInNanos;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
ageInNanos = SystemClock.elapsedRealtimeNanos() -
location.getElapsedRealtimeNanos();
} else {
ageInNanos = (System.currentTimeMillis() - location.getTime()) * 1000 * 1000;
}
final long oneMinuteInNanos = 60L * 1000 * 1000 * 1000;
return ageInNanos > oneMinuteInNanos;
} else {
return false;
}
}
// Handles location updates from GPS
private void setLocation(Location location) {
// if null we should hide the marker
if (location == null) {
mShowMarker = false;
mShowDirection = false;
mShowAccuracy = false;
cancelAnimations();
mUserLocation = null;
return;
}
if (mMarkerCoordinateAnimator != null) {
mMarkerCoordinateAnimator.end();
mMarkerCoordinateAnimator = null;
}
if (mMarkerDirectionAnimator != null) {
mMarkerDirectionAnimator.end();
mMarkerDirectionAnimator = null;
}
if (mMarkerAccuracyAnimator != null) {
mMarkerAccuracyAnimator.end();
mMarkerAccuracyAnimator = null;
}
mShowMarker = true;
LatLng previousCoordinate;
if (mUserLocation == null) {
previousCoordinate = new LatLng(location);
} else {
previousCoordinate = new LatLng(mUserLocation);
}
if (mMyLocationTrackingMode == MyLocationTracking.TRACKING_NONE) {
// moving marker above map
mMarkerCoordinateAnimator = ValueAnimator.ofFloat(0.0f, 1.0f);
mMarkerCoordinateAnimator.setDuration(1000);
mMarkerCoordinateAnimator.addUpdateListener(new MarkerCoordinateAnimatorListener(
previousCoordinate, new LatLng(location)
));
mMarkerCoordinateAnimator.start();
mMarkerCoordinate = new LatLng(location);
} else {
// moving map under the tracker
mMarkerCoordinate = new LatLng(location);
}
if (mMyLocationTrackingMode == MyLocationTracking.TRACKING_NONE && mMyBearingTrackingMode == MyBearingTracking.GPS) {
// show GPS direction
mShowDirection = location.hasBearing();
if (mShowDirection) {
if (mUserLocation != null && mUserLocation.hasBearing()) {
mGpsMarkerDirection = mUserLocation.getBearing();
}
float oldDir = mGpsMarkerDirection;
float newDir = location.getBearing();
float diff = oldDir - newDir;
if (diff > 180.0f) {
newDir += 360.0f;
} else if (diff < -180.0f) {
newDir -= 360.f;
}
mMarkerDirectionAnimator = ObjectAnimator.ofFloat(this, "direction", oldDir, newDir);
mMarkerDirectionAnimator.setDuration(1000);
mMarkerDirectionAnimator.start();
}
} else if (mMyLocationTrackingMode == MyLocationTracking.TRACKING_FOLLOW && mMyBearingTrackingMode == MyBearingTracking.GPS) {
// always show north & rotate map below
mShowDirection = true;
mGpsMarkerDirection = 0;
}
mShowAccuracy = location.hasAccuracy();
if (mShowAccuracy) {
if (mUserLocation != null && mUserLocation.hasAccuracy()) {
mMarkerAccuracy = mUserLocation.getAccuracy();
}
mMarkerAccuracyAnimator = ObjectAnimator.ofFloat(this, "accuracy", location.getAccuracy());
mMarkerAccuracyAnimator.setDuration(1000);
mMarkerAccuracyAnimator.start();
}
mUserLocation = location;
updateOnNextFrame();
if (mOnMyLocationChangeListener != null) {
mOnMyLocationChangeListener.onMyLocationChange(location);
}
}
// handles compass sensor updates
private void setCompass(float bearing) {
if (mMyLocationTrackingMode == MyLocationTracking.TRACKING_NONE) {
// animate marker
mShowDirection = true;
float oldDir = mCompassMarkerDirection;
float newDir = bearing;
float diff = oldDir - newDir;
if (diff > 180.0f) {
newDir += 360.0f;
} else if (diff < -180.0f) {
newDir -= 360.f;
}
mMarkerDirectionAnimator = ObjectAnimator.ofFloat(this, "direction", oldDir, newDir);
mMarkerDirectionAnimator.setDuration(1000);
mMarkerDirectionAnimator.start();
mCompassMarkerDirection = bearing;
} else if (mMyLocationTrackingMode == MyLocationTracking.TRACKING_FOLLOW) {
cancelAnimations();
if (mMyBearingTrackingMode == MyBearingTracking.COMPASS) {
// always show north & change map direction
mShowDirection = true;
mGpsMarkerDirection = 0;
mCompassMarkerDirection = 0;
update();
}
}
}
void updateOnNextFrame() {
mMapboxMap.invalidate();
}
public void onPause() {
mPaused = true;
toggleGps(false);
}
public void onResume() {
mPaused = false;
if (isEnabled()) {
toggleGps(true);
}
}
public void setOnMyLocationChangeListener(@Nullable MapboxMap.OnMyLocationChangeListener listener) {
mOnMyLocationChangeListener = listener;
}
// public for animator only
public float getDirection() {
if (mMyBearingTrackingMode == MyBearingTracking.COMPASS) {
return mCompassMarkerDirection;
}
return mGpsMarkerDirection;
}
// public for animator only
public void setDirection(float direction) {
if (mMyBearingTrackingMode == MyBearingTracking.COMPASS) {
mCompassMarkerDirection = direction % 360.0f;
} else {
mGpsMarkerDirection = direction % 360.0f;
}
updateOnNextFrame();
}
// public for animator only
public float getAccuracy() {
return mMarkerAccuracy;
}
// public for animator only
public void setAccuracy(float accuracy) {
mMarkerAccuracy = accuracy;
updateOnNextFrame();
}
private class MarkerCoordinateAnimatorListener implements ValueAnimator.AnimatorUpdateListener {
private double mFromLat;
private double mFromLng;
private double mToLat;
private double mToLng;
private MarkerCoordinateAnimatorListener(LatLng from, LatLng to) {
mFromLat = from.getLatitude();
mFromLng = from.getLongitude();
mToLat = to.getLatitude();
mToLng = to.getLongitude();
}
@Override
public void onAnimationUpdate(ValueAnimator animation) {
float frac = animation.getAnimatedFraction();
double latitude = mFromLat + (mToLat - mFromLat) * frac;
double longitude = mFromLng + (mToLng - mFromLng) * frac;
mMarkerCoordinate.setLatitude(latitude);
mMarkerCoordinate.setLongitude(longitude);
updateOnNextFrame();
}
}
public void cancelAnimations() {
if (mMarkerCoordinateAnimator != null) {
mMarkerCoordinateAnimator.cancel();
mMarkerCoordinateAnimator = null;
}
if (mMarkerDirectionAnimator != null) {
mMarkerDirectionAnimator.cancel();
mMarkerDirectionAnimator = null;
}
if (mMarkerAccuracyAnimator != null) {
mMarkerAccuracyAnimator.cancel();
mMarkerAccuracyAnimator = null;
}
}
public boolean isPaused() {
return mPaused;
}
public PointF getMarkerScreenPoint() {
if (mMyLocationTrackingMode == MyLocationTracking.TRACKING_NONE) {
mMarkerScreenPoint = mProjection.toScreenLocation(mMarkerCoordinate);
} else {
int[] contentPadding = mMapboxMap.getPadding();
mMarkerScreenPoint = new PointF(((getMeasuredWidth() + contentPadding[0] - contentPadding[2]) / 2)
, ((getMeasuredHeight() - contentPadding[3] + contentPadding[1]) / 2));
}
return mMarkerScreenPoint;
}
}
<file_sep>/src/mbgl/style/style_layer.cpp
#include <mbgl/style/style_layer.hpp>
namespace mbgl {
const std::string& StyleLayer::bucketName() const {
return ref.empty() ? id : ref;
}
bool StyleLayer::hasRenderPass(RenderPass pass) const {
return bool(passes & pass);
}
bool StyleLayer::needsRendering() const {
return passes != RenderPass::None && visibility != VisibilityType::None;
}
} // namespace mbgl
<file_sep>/test/storage/headers.cpp
#include "storage.hpp"
#include "../fixtures/fixture_log_observer.hpp"
#include <mbgl/util/http_header.hpp>
TEST_F(Storage, HTTPHeaderParsing) {
using namespace mbgl;
http::CacheControl cc;
cc = http::CacheControl::parse(R"#()#");
ASSERT_FALSE(bool(cc.maxAge));
EXPECT_FALSE(cc.mustRevalidate);
cc = http::CacheControl::parse(R"#(max-age =34)#");
ASSERT_TRUE(bool(cc.maxAge));
EXPECT_EQ(34, *cc.maxAge);
EXPECT_FALSE(cc.mustRevalidate);
cc = http::CacheControl::parse(R"#(,max-age=1)#");
ASSERT_TRUE(bool(cc.maxAge));
EXPECT_EQ(1, *cc.maxAge);
EXPECT_FALSE(cc.mustRevalidate);
cc = http::CacheControl::parse(R"#(max-age=-1)#");
ASSERT_FALSE(bool(cc.maxAge));
EXPECT_FALSE(cc.mustRevalidate);
cc = http::CacheControl::parse(R"#(max-age=foo)#");
ASSERT_FALSE(bool(cc.maxAge));
EXPECT_FALSE(cc.mustRevalidate);
cc = http::CacheControl::parse(R"#(max-age="34,max-age="22,max-age=28)#");
ASSERT_TRUE(bool(cc.maxAge));
EXPECT_EQ(28, *cc.maxAge);
EXPECT_FALSE(cc.mustRevalidate);
cc = http::CacheControl::parse(R"#(max-age=3,max-age="34)#");
ASSERT_TRUE(bool(cc.maxAge));
EXPECT_EQ(3, *cc.maxAge);
EXPECT_FALSE(cc.mustRevalidate);
cc = http::CacheControl::parse(R"#(max-age="\",max-age=4,")#");
ASSERT_FALSE(bool(cc.maxAge));
EXPECT_FALSE(cc.mustRevalidate);
cc = http::CacheControl::parse(R"#(private, max-age=0, no-cache)#");
ASSERT_TRUE(bool(cc.maxAge));
EXPECT_EQ(0, *cc.maxAge);
EXPECT_FALSE(cc.mustRevalidate);
cc = http::CacheControl::parse(R"#(max-age=0, no-cache, no-store)#");
ASSERT_TRUE(bool(cc.maxAge));
EXPECT_EQ(0, *cc.maxAge);
EXPECT_FALSE(cc.mustRevalidate);
cc = http::CacheControl::parse(R"#(, private , max-bar=3 , no-cache, "\,",,foo=",",,max-age=32)#");
ASSERT_TRUE(bool(cc.maxAge));
EXPECT_EQ(32, *cc.maxAge);
EXPECT_FALSE(cc.mustRevalidate);
cc = http::CacheControl::parse(R"#(max-age=3600, must-revalidate)#");
ASSERT_TRUE(bool(cc.maxAge));
EXPECT_EQ(3600, *cc.maxAge);
EXPECT_TRUE(cc.mustRevalidate);
cc = http::CacheControl::parse(R"#(no-cache="Expires,Via",max-age=3600, must-revalidate)#");
ASSERT_TRUE(bool(cc.maxAge));
EXPECT_EQ(3600, *cc.maxAge);
EXPECT_TRUE(cc.mustRevalidate);
}
<file_sep>/platform/osx/INSTALL.md
# Integrating the Mapbox OS X SDK into your application
This document explains how to build the Mapbox OS X SDK and integrate it into your own Cocoa application.
### Requirements
The Mapbox OS X SDK requires the OS X 10.10.0 SDK or above.
### Build
1. [Install core dependencies](../../INSTALL.md).
1. Run `make xpackage`, which produces a `Mapbox.framework` in the `gyp/build/Release/` folder.
### Install
1. Copy `gyp/build/Release/Mapbox.framework` into your project.
1. In the project editor, select your application target, go to the General tab, and add `Mapbox.framework` to the *Embedded Binaries* section.
1. Mapbox vector tiles require a Mapbox account and API access token. In the project editor, select the application target. In the Info tab, set `MGLMapboxAccessToken` to your access token. You can obtain one from the [Mapbox account page](https://www.mapbox.com/studio/account/tokens/).
1. In a XIB or storyboard, add a Custom View and set its custom class to `MGLMapView`. If you need to manipulate the map view programmatically, import the `Mapbox` module (Swift) or `Mapbox.h` umbrella header (Objective-C).
## Use
The [Mapbox iOS SDK’s API documentation](https://www.mapbox.com/ios-sdk/api/) applies to the Mapbox OS X SDK with few differences, mostly around unimplemented features like user location tracking.
## Troubleshooting
You can also try clearing the Xcode cache with `make clear_xcode_cache`.
<file_sep>/bin/offline.cpp
#include <mbgl/util/default_styles.hpp>
#include <mbgl/util/run_loop.hpp>
#include <mbgl/util/string.hpp>
#include <mbgl/util/io.hpp>
#include <mbgl/storage/default_file_source.hpp>
#include <cstdlib>
#include <iostream>
#include <csignal>
#include <atomic>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunknown-pragmas"
#pragma GCC diagnostic ignored "-Wunused-local-typedefs"
#pragma GCC diagnostic ignored "-Wshadow"
#include <boost/program_options.hpp>
#pragma GCC diagnostic pop
namespace po = boost::program_options;
using namespace std::literals::chrono_literals;
int main(int argc, char *argv[]) {
std::string style = mbgl::util::default_styles::streets.url;
double north = 37.2, west = -122.8, south = 38.1, east = -121.7; // Bay area
double minZoom = 0.0, maxZoom = 15.0, pixelRatio = 1.0;
std::string output = "offline.db";
const char* tokenEnv = getenv("MAPBOX_ACCESS_TOKEN");
std::string token = tokenEnv ? tokenEnv : std::string();
po::options_description desc("Allowed options");
desc.add_options()
("style,s", po::value(&style)->value_name("URL"), "Map stylesheet")
("north", po::value(&north)->value_name("degrees")->default_value(north), "North latitude")
("west", po::value(&west)->value_name("degrees")->default_value(west), "West longitude")
("south", po::value(&south)->value_name("degrees")->default_value(south), "South latitude")
("east", po::value(&east)->value_name("degrees")->default_value(east), "East longitude")
("minZoom", po::value(&minZoom)->value_name("number")->default_value(minZoom), "Min zoom level")
("maxZoom", po::value(&maxZoom)->value_name("number")->default_value(maxZoom), "Max zoom level")
("pixelRatio", po::value(&pixelRatio)->value_name("number")->default_value(pixelRatio), "Pixel ratio")
("token,t", po::value(&token)->value_name("key")->default_value(token), "Mapbox access token")
("output,o", po::value(&output)->value_name("file")->default_value(output), "Output database file name")
;
try {
po::variables_map vm;
po::store(po::parse_command_line(argc, argv, desc), vm);
po::notify(vm);
} catch(std::exception& e) {
std::cout << "Error: " << e.what() << std::endl << desc;
exit(1);
}
using namespace mbgl;
util::RunLoop loop;
DefaultFileSource fileSource(output, ".");
std::unique_ptr<OfflineRegion> region;
fileSource.setAccessToken(token);
LatLngBounds boundingBox = LatLngBounds::hull(LatLng(north, west), LatLng(south, east));
OfflineTilePyramidRegionDefinition definition(style, boundingBox, minZoom, maxZoom, pixelRatio);
OfflineRegionMetadata metadata;
class Observer : public OfflineRegionObserver {
public:
Observer(OfflineRegion& region_, DefaultFileSource& fileSource_, util::RunLoop& loop_)
: region(region_),
fileSource(fileSource_),
loop(loop_),
start(SystemClock::now()) {
}
void statusChanged(OfflineRegionStatus status) override {
if (status.downloadState == OfflineRegionDownloadState::Inactive) {
std::cout << "stopped" << std::endl;
loop.stop();
return;
}
std::string bytesPerSecond = "-";
auto elapsedSeconds = (SystemClock::now() - start) / 1s;
if (elapsedSeconds != 0) {
bytesPerSecond = util::toString(status.completedResourceSize / elapsedSeconds);
}
std::cout << status.completedResourceCount << " / " << status.requiredResourceCount
<< " resources"
<< (status.requiredResourceCountIsPrecise ? "; " : " (indeterminate); ")
<< status.completedResourceSize << " bytes downloaded"
<< " (" << bytesPerSecond << " bytes/sec)"
<< std::endl;
if (status.complete()) {
std::cout << "Finished" << std::endl;
loop.stop();
}
}
void responseError(Response::Error error) override {
std::cerr << error.reason << " downloading resource: " << error.message << std::endl;
}
void mapboxTileCountLimitExceeded(uint64_t limit) override {
std::cerr << "Error: reached limit of " << limit << " offline tiles" << std::endl;
}
OfflineRegion& region;
DefaultFileSource& fileSource;
util::RunLoop& loop;
SystemTimePoint start;
};
static auto stop = [&] {
if (region) {
std::cout << "Stopping download... ";
fileSource.setOfflineRegionDownloadState(*region, OfflineRegionDownloadState::Inactive);
}
};
std::signal(SIGINT, [] (int) { stop(); });
fileSource.createOfflineRegion(definition, metadata, [&] (std::exception_ptr error, optional<OfflineRegion> region_) {
if (error) {
std::cerr << "Error creating region: " << util::toString(error) << std::endl;
loop.stop();
exit(1);
} else {
assert(region_);
region = std::make_unique<OfflineRegion>(std::move(*region_));
fileSource.setOfflineRegionObserver(*region, std::make_unique<Observer>(*region, fileSource, loop));
fileSource.setOfflineRegionDownloadState(*region, OfflineRegionDownloadState::Active);
}
});
loop.run();
return 0;
}
<file_sep>/test/storage/http_error.cpp
#include "storage.hpp"
#include <mbgl/storage/online_file_source.hpp>
#include <mbgl/storage/network_status.hpp>
#include <mbgl/util/chrono.hpp>
#include <mbgl/util/run_loop.hpp>
#include <cmath>
TEST_F(Storage, HTTPTemporaryError) {
SCOPED_TEST(HTTPTemporaryError)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
const auto start = Clock::now();
std::unique_ptr<FileRequest> req1 = fs.request({ Resource::Unknown, "http://127.0.0.1:3000/temporary-error" }, [&](Response res) {
static int counter = 0;
switch (counter++) {
case 0: {
const auto duration = std::chrono::duration<const double>(Clock::now() - start).count();
EXPECT_GT(0.2, duration) << "Initial error request took too long";
ASSERT_NE(nullptr, res.error);
EXPECT_EQ(Response::Error::Reason::Server, res.error->reason);
EXPECT_EQ("HTTP status code 500", res.error->message);
ASSERT_FALSE(bool(res.data));
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
} break;
case 1: {
req1.reset();
const auto duration = std::chrono::duration<const double>(Clock::now() - start).count();
EXPECT_LT(0.99, duration) << "Backoff timer didn't wait 1 second";
EXPECT_GT(1.2, duration) << "Backoff timer fired too late";
EXPECT_EQ(nullptr, res.error);
ASSERT_TRUE(res.data.get());
EXPECT_EQ("Hello World!", *res.data);
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
loop.stop();
HTTPTemporaryError.finish();
} break;
}
});
loop.run();
}
TEST_F(Storage, HTTPConnectionError) {
SCOPED_TEST(HTTPConnectionError)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
const auto start = Clock::now();
std::unique_ptr<FileRequest> req2 = fs.request({ Resource::Unknown, "http://127.0.0.1:3001/" }, [&](Response res) {
static int counter = 0;
static int wait = 0;
const auto duration = std::chrono::duration<const double>(Clock::now() - start).count();
EXPECT_LT(wait - 0.01, duration) << "Backoff timer didn't wait 1 second";
EXPECT_GT(wait + 0.2, duration) << "Backoff timer fired too late";
ASSERT_NE(nullptr, res.error);
EXPECT_EQ(Response::Error::Reason::Connection, res.error->reason);
#ifdef MBGL_HTTP_NSURL
EXPECT_TRUE(res.error->message ==
"The operation couldn’t be completed. (NSURLErrorDomain error -1004.)" ||
res.error->message == "Could not connect to the server.")
<< "Full message is: \"" << res.error->message << "\"";
#elif MBGL_HTTP_CURL
const std::string prefix { "Couldn't connect to server: " };
EXPECT_STREQ(prefix.c_str(), res.error->message.substr(0, prefix.size()).c_str()) << "Full message is: \"" << res.error->message << "\"";
#else
FAIL();
#endif
ASSERT_FALSE(res.data.get());
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
if (counter == 2) {
req2.reset();
loop.stop();
HTTPConnectionError.finish();
}
wait += (1 << counter);
counter++;
});
loop.run();
}
<file_sep>/src/mbgl/shader/outline_shader.cpp
#include <mbgl/shader/outline_shader.hpp>
#include <mbgl/shader/outline.vertex.hpp>
#include <mbgl/shader/outline.fragment.hpp>
#include <mbgl/gl/gl.hpp>
#include <cstdio>
using namespace mbgl;
OutlineShader::OutlineShader(gl::GLObjectStore& glObjectStore)
: Shader("outline", shaders::outline::vertex, shaders::outline::fragment, glObjectStore) {
}
void OutlineShader::bind(GLbyte* offset) {
MBGL_CHECK_ERROR(glEnableVertexAttribArray(a_pos));
MBGL_CHECK_ERROR(glVertexAttribPointer(a_pos, 2, GL_SHORT, false, 0, offset));
}
<file_sep>/include/mbgl/util/default_styles.hpp
#ifndef MBGL_PLATFORM_DEFAULT_STYLES
#define MBGL_PLATFORM_DEFAULT_STYLES
#include <vector>
#include <string>
namespace mbgl {
namespace util {
namespace default_styles {
struct DefaultStyle {
const char* url;
const char* name;
};
extern const DefaultStyle streets;
extern const DefaultStyle emerald;
extern const DefaultStyle light;
extern const DefaultStyle dark;
extern const DefaultStyle satellite;
extern const DefaultStyle hybrid;
const DefaultStyle orderedStyles[] = {
streets, emerald, light, dark, satellite, hybrid,
};
const size_t numOrderedStyles = sizeof(orderedStyles) / sizeof(DefaultStyle);
} // end namespace default_styles
} // end namespace util
} // end namespace mbgl
#endif
<file_sep>/include/mbgl/annotation/point_annotation.hpp
#ifndef MBGL_ANNOTATION_POINT_ANNOTATION
#define MBGL_ANNOTATION_POINT_ANNOTATION
#include <mbgl/util/geo.hpp>
#include <string>
namespace mbgl {
class PointAnnotation {
public:
PointAnnotation(const LatLng& position_, const std::string& icon_ = "")
: position(position_.wrapped()), icon(icon_) {}
const LatLng position;
const std::string icon;
};
} // namespace mbgl
#endif
<file_sep>/test/storage/default_file_source.cpp
#include "storage.hpp"
#include <mbgl/storage/default_file_source.hpp>
#include <mbgl/util/run_loop.hpp>
class DefaultFileSourceTest : public Storage {};
TEST_F(DefaultFileSourceTest, CacheResponse) {
SCOPED_TEST(CacheResponse);
using namespace mbgl;
util::RunLoop loop;
DefaultFileSource fs(":memory:", ".");
const Resource resource { Resource::Unknown, "http://127.0.0.1:3000/cache" };
Response response;
std::unique_ptr<FileRequest> req1;
std::unique_ptr<FileRequest> req2;
req1 = fs.request(resource, [&](Response res) {
req1.reset();
EXPECT_EQ(nullptr, res.error);
ASSERT_TRUE(res.data.get());
EXPECT_EQ("Response 1", *res.data);
EXPECT_TRUE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
response = res;
// Now test that we get the same values as in the previous request. If we'd go to the server
// again, we'd get different values.
req2 = fs.request(resource, [&](Response res2) {
req2.reset();
EXPECT_EQ(response.error, res2.error);
ASSERT_TRUE(res2.data.get());
EXPECT_EQ(*response.data, *res2.data);
EXPECT_EQ(response.expires, res2.expires);
EXPECT_EQ(response.modified, res2.modified);
EXPECT_EQ(response.etag, res2.etag);
loop.stop();
CacheResponse.finish();
});
});
loop.run();
}
TEST_F(DefaultFileSourceTest, CacheRevalidateSame) {
SCOPED_TEST(CacheRevalidateSame)
using namespace mbgl;
util::RunLoop loop;
DefaultFileSource fs(":memory:", ".");
const Resource revalidateSame { Resource::Unknown, "http://127.0.0.1:3000/revalidate-same" };
std::unique_ptr<FileRequest> req1;
std::unique_ptr<FileRequest> req2;
uint16_t counter = 0;
// First request causes the response to get cached.
req1 = fs.request(revalidateSame, [&](Response res) {
req1.reset();
EXPECT_EQ(nullptr, res.error);
ASSERT_TRUE(res.data.get());
EXPECT_EQ("Response", *res.data);
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_EQ("snowfall", *res.etag);
// Second request returns the cached response, then immediately revalidates.
req2 = fs.request(revalidateSame, [&, res](Response res2) {
if (counter == 0) {
++counter;
EXPECT_FALSE(res2.notModified);
} else {
req2.reset();
EXPECT_EQ(nullptr, res2.error);
EXPECT_TRUE(res2.notModified);
ASSERT_FALSE(res2.data.get());
EXPECT_TRUE(bool(res2.expires));
EXPECT_FALSE(bool(res2.modified));
// We're not sending the ETag in the 304 reply, but it should still be there.
EXPECT_EQ("snowfall", *res2.etag);
loop.stop();
CacheRevalidateSame.finish();
}
});
});
loop.run();
}
TEST_F(DefaultFileSourceTest, CacheRevalidateModified) {
SCOPED_TEST(CacheRevalidateModified)
using namespace mbgl;
util::RunLoop loop;
DefaultFileSource fs(":memory:", ".");
const Resource revalidateModified{ Resource::Unknown,
"http://127.0.0.1:3000/revalidate-modified" };
std::unique_ptr<FileRequest> req1;
std::unique_ptr<FileRequest> req2;
uint16_t counter = 0;
// First request causes the response to get cached.
req1 = fs.request(revalidateModified, [&](Response res) {
req1.reset();
EXPECT_EQ(nullptr, res.error);
ASSERT_TRUE(res.data.get());
EXPECT_EQ("Response", *res.data);
EXPECT_FALSE(bool(res.expires));
EXPECT_EQ(SystemClock::from_time_t(1420070400), *res.modified);
EXPECT_FALSE(res.etag);
// Second request returns the cached response, then immediately revalidates.
req2 = fs.request(revalidateModified, [&, res](Response res2) {
if (counter == 0) {
++counter;
EXPECT_FALSE(res2.notModified);
} else {
req2.reset();
EXPECT_EQ(nullptr, res2.error);
EXPECT_TRUE(res2.notModified);
ASSERT_FALSE(res2.data.get());
EXPECT_TRUE(bool(res2.expires));
EXPECT_EQ(SystemClock::from_time_t(1420070400), *res2.modified);
EXPECT_FALSE(res2.etag);
loop.stop();
CacheRevalidateModified.finish();
}
});
});
loop.run();
}
TEST_F(DefaultFileSourceTest, CacheRevalidateEtag) {
SCOPED_TEST(CacheRevalidateEtag)
using namespace mbgl;
util::RunLoop loop;
DefaultFileSource fs(":memory:", ".");
const Resource revalidateEtag { Resource::Unknown, "http://127.0.0.1:3000/revalidate-etag" };
std::unique_ptr<FileRequest> req1;
std::unique_ptr<FileRequest> req2;
uint16_t counter = 0;
// First request causes the response to get cached.
req1 = fs.request(revalidateEtag, [&](Response res) {
req1.reset();
EXPECT_EQ(nullptr, res.error);
ASSERT_TRUE(res.data.get());
EXPECT_EQ("Response 1", *res.data);
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_EQ("response-1", *res.etag);
// Second request returns the cached response, then immediately revalidates.
req2 = fs.request(revalidateEtag, [&, res](Response res2) {
if (counter == 0) {
++counter;
EXPECT_FALSE(res2.notModified);
} else {
req2.reset();
EXPECT_EQ(nullptr, res2.error);
ASSERT_TRUE(res2.data.get());
EXPECT_NE(res.data, res2.data);
EXPECT_EQ("Response 2", *res2.data);
EXPECT_FALSE(bool(res2.expires));
EXPECT_FALSE(bool(res2.modified));
EXPECT_EQ("response-2", *res2.etag);
loop.stop();
CacheRevalidateEtag.finish();
}
});
});
loop.run();
}
<file_sep>/src/mbgl/gl/texture_pool.cpp
#include <mbgl/gl/texture_pool.hpp>
#include <mbgl/gl/gl_object_store.hpp>
#include <vector>
namespace mbgl {
namespace gl {
GLuint TexturePool::getTextureID(gl::GLObjectStore& glObjectStore) {
for (auto& impl : pools) {
if (impl.ids.empty()) continue;
auto it = impl.ids.begin();
GLuint id = *it;
impl.ids.erase(it);
return id;
}
// All texture IDs are in use.
pools.emplace_back(Impl(glObjectStore));
auto it = pools.back().ids.begin();
GLuint id = *it;
pools.back().ids.erase(it);
return id;
}
void TexturePool::releaseTextureID(GLuint id) {
for (auto it = pools.begin(); it != pools.end(); ++it) {
for (GLsizei i = 0; i < gl::TexturePoolHolder::TextureMax; ++i) {
if (it->pool[i] == id) {
it->ids.push_back(id);
if (GLsizei(it->ids.size()) == gl::TexturePoolHolder::TextureMax) {
pools.erase(it);
}
return;
}
}
}
}
} // namespace gl
} // namespace mbgl
<file_sep>/src/mbgl/layer/symbol_layer.cpp
#include <mbgl/layer/symbol_layer.hpp>
#include <mbgl/renderer/symbol_bucket.hpp>
#include <mbgl/map/tile_id.hpp>
#include <mbgl/style/style_bucket_parameters.hpp>
namespace mbgl {
std::unique_ptr<StyleLayer> SymbolLayer::clone() const {
return std::make_unique<SymbolLayer>(*this);
}
void SymbolLayer::parseLayout(const JSValue& value) {
layout.placement.parse("symbol-placement", value);
layout.spacing.parse("symbol-spacing", value);
layout.avoidEdges.parse("symbol-avoid-edges", value);
layout.icon.allowOverlap.parse("icon-allow-overlap", value);
layout.icon.ignorePlacement.parse("icon-ignore-placement", value);
layout.icon.optional.parse("icon-optional", value);
layout.icon.rotationAlignment.parse("icon-rotation-alignment", value);
layout.icon.size.parse("icon-size", value);
layout.icon.image.parse("icon-image", value);
layout.icon.rotate.parse("icon-rotate", value);
layout.icon.padding.parse("icon-padding", value);
layout.icon.keepUpright.parse("icon-keep-upright", value);
layout.icon.offset.parse("icon-offset", value);
layout.text.rotationAlignment.parse("text-rotation-alignment", value);
layout.text.field.parse("text-field", value);
layout.text.font.parse("text-font", value);
layout.text.size.parse("text-size", value);
layout.text.maxWidth.parse("text-max-width", value);
layout.text.lineHeight.parse("text-line-height", value);
layout.text.letterSpacing.parse("text-letter-spacing", value);
layout.text.justify.parse("text-justify", value);
layout.text.anchor.parse("text-anchor", value);
layout.text.maxAngle.parse("text-max-angle", value);
layout.text.rotate.parse("text-rotate", value);
layout.text.padding.parse("text-padding", value);
layout.text.keepUpright.parse("text-keep-upright", value);
layout.text.transform.parse("text-transform", value);
layout.text.offset.parse("text-offset", value);
layout.text.allowOverlap.parse("text-allow-overlap", value);
layout.text.ignorePlacement.parse("text-ignore-placement", value);
layout.text.optional.parse("text-optional", value);
}
void SymbolLayer::parsePaints(const JSValue& layer) {
paint.icon.opacity.parse("icon-opacity", layer);
paint.icon.color.parse("icon-color", layer);
paint.icon.haloColor.parse("icon-halo-color", layer);
paint.icon.haloWidth.parse("icon-halo-width", layer);
paint.icon.haloBlur.parse("icon-halo-blur", layer);
paint.icon.translate.parse("icon-translate", layer);
paint.icon.translateAnchor.parse("icon-translate-anchor", layer);
paint.text.opacity.parse("text-opacity", layer);
paint.text.color.parse("text-color", layer);
paint.text.haloColor.parse("text-halo-color", layer);
paint.text.haloWidth.parse("text-halo-width", layer);
paint.text.haloBlur.parse("text-halo-blur", layer);
paint.text.translate.parse("text-translate", layer);
paint.text.translateAnchor.parse("text-translate-anchor", layer);
}
void SymbolLayer::cascade(const StyleCascadeParameters& parameters) {
paint.icon.opacity.cascade(parameters);
paint.icon.color.cascade(parameters);
paint.icon.haloColor.cascade(parameters);
paint.icon.haloWidth.cascade(parameters);
paint.icon.haloBlur.cascade(parameters);
paint.icon.translate.cascade(parameters);
paint.icon.translateAnchor.cascade(parameters);
paint.text.opacity.cascade(parameters);
paint.text.color.cascade(parameters);
paint.text.haloColor.cascade(parameters);
paint.text.haloWidth.cascade(parameters);
paint.text.haloBlur.cascade(parameters);
paint.text.translate.cascade(parameters);
paint.text.translateAnchor.cascade(parameters);
}
bool SymbolLayer::recalculate(const StyleCalculationParameters& parameters) {
bool hasTransitions = false;
hasTransitions |= paint.icon.opacity.calculate(parameters);
hasTransitions |= paint.icon.color.calculate(parameters);
hasTransitions |= paint.icon.haloColor.calculate(parameters);
hasTransitions |= paint.icon.haloWidth.calculate(parameters);
hasTransitions |= paint.icon.haloBlur.calculate(parameters);
hasTransitions |= paint.icon.translate.calculate(parameters);
hasTransitions |= paint.icon.translateAnchor.calculate(parameters);
hasTransitions |= paint.text.opacity.calculate(parameters);
hasTransitions |= paint.text.color.calculate(parameters);
hasTransitions |= paint.text.haloColor.calculate(parameters);
hasTransitions |= paint.text.haloWidth.calculate(parameters);
hasTransitions |= paint.text.haloBlur.calculate(parameters);
hasTransitions |= paint.text.translate.calculate(parameters);
hasTransitions |= paint.text.translateAnchor.calculate(parameters);
// text-size and icon-size are layout properties but they also need to be evaluated as paint properties:
layout.icon.size.calculate(parameters);
layout.text.size.calculate(parameters);
paint.icon.size = layout.icon.size;
paint.text.size = layout.text.size;
passes = (paint.icon.isVisible() || paint.text.isVisible())
? RenderPass::Translucent : RenderPass::None;
return hasTransitions;
}
std::unique_ptr<Bucket> SymbolLayer::createBucket(StyleBucketParameters& parameters) const {
auto bucket = std::make_unique<SymbolBucket>(parameters.tileID.overscaleFactor(),
parameters.tileID.z,
parameters.mode);
bucket->layout = layout;
StyleCalculationParameters p(parameters.tileID.z);
bucket->layout.placement.calculate(p);
if (bucket->layout.placement.value == PlacementType::Line) {
bucket->layout.icon.rotationAlignment.value = RotationAlignmentType::Map;
bucket->layout.text.rotationAlignment.value = RotationAlignmentType::Map;
};
bucket->layout.spacing.calculate(p);
bucket->layout.avoidEdges.calculate(p);
bucket->layout.icon.allowOverlap.calculate(p);
bucket->layout.icon.ignorePlacement.calculate(p);
bucket->layout.icon.optional.calculate(p);
bucket->layout.icon.rotationAlignment.calculate(p);
bucket->layout.icon.image.calculate(p);
bucket->layout.icon.padding.calculate(p);
bucket->layout.icon.rotate.calculate(p);
bucket->layout.icon.keepUpright.calculate(p);
bucket->layout.icon.offset.calculate(p);
bucket->layout.text.rotationAlignment.calculate(p);
bucket->layout.text.field.calculate(p);
bucket->layout.text.font.calculate(p);
bucket->layout.text.maxWidth.calculate(p);
bucket->layout.text.lineHeight.calculate(p);
bucket->layout.text.letterSpacing.calculate(p);
bucket->layout.text.maxAngle.calculate(p);
bucket->layout.text.rotate.calculate(p);
bucket->layout.text.padding.calculate(p);
bucket->layout.text.ignorePlacement.calculate(p);
bucket->layout.text.optional.calculate(p);
bucket->layout.text.justify.calculate(p);
bucket->layout.text.anchor.calculate(p);
bucket->layout.text.keepUpright.calculate(p);
bucket->layout.text.transform.calculate(p);
bucket->layout.text.offset.calculate(p);
bucket->layout.text.allowOverlap.calculate(p);
bucket->layout.icon.size.calculate(StyleCalculationParameters(18));
bucket->layout.text.size.calculate(StyleCalculationParameters(18));
bucket->layout.iconMaxSize = bucket->layout.icon.size;
bucket->layout.textMaxSize = bucket->layout.text.size;
bucket->layout.icon.size.calculate(StyleCalculationParameters(p.z + 1));
bucket->layout.text.size.calculate(StyleCalculationParameters(p.z + 1));
bucket->parseFeatures(parameters.layer, filter);
if (bucket->needsDependencies(parameters.glyphStore, parameters.spriteStore)) {
parameters.partialParse = true;
}
// We do not add features if the parser is in a "partial" state because
// the layer ordering needs to be respected when calculating text
// collisions. Although, at this point, we requested all the resources
// needed by this tile.
if (!parameters.partialParse) {
bucket->addFeatures(parameters.tileUID,
*spriteAtlas,
parameters.glyphAtlas,
parameters.glyphStore);
}
return std::move(bucket);
}
} // namespace mbgl
<file_sep>/src/mbgl/shader/circle_shader.cpp
#include <mbgl/shader/circle_shader.hpp>
#include <mbgl/shader/circle.vertex.hpp>
#include <mbgl/shader/circle.fragment.hpp>
#include <mbgl/gl/gl.hpp>
#include <cstdio>
using namespace mbgl;
CircleShader::CircleShader(gl::GLObjectStore& glObjectStore)
: Shader("circle", shaders::circle::vertex, shaders::circle::fragment, glObjectStore) {
}
void CircleShader::bind(GLbyte* offset) {
MBGL_CHECK_ERROR(glEnableVertexAttribArray(a_pos));
MBGL_CHECK_ERROR(glVertexAttribPointer(a_pos, 2, GL_SHORT, false, 4, offset));
}
<file_sep>/test/util/tile_cover.cpp
#include <mbgl/util/tile_cover.hpp>
#include <mbgl/util/geo.hpp>
#include <mbgl/map/tile_id.hpp>
#include <gtest/gtest.h>
#include <unordered_set>
using namespace mbgl;
using set = std::unordered_set<TileID>;
TEST(TileCover, Empty) {
auto result = tileCover(LatLngBounds::empty(), 0, 0);
ASSERT_TRUE(result.empty());
}
TEST(TileCover, Arctic) {
auto result = tileCover(LatLngBounds::hull({ 86, -180 }, { 90, 180 }), 0, 0);
ASSERT_TRUE(result.empty());
}
TEST(TileCover, Antarctic) {
auto result = tileCover(LatLngBounds::hull({ -86, -180 }, { -90, 180 }), 0, 0);
ASSERT_TRUE(result.empty());
}
TEST(TileCover, WorldZ0) {
auto result = tileCover(LatLngBounds::world(), 0, 0);
ASSERT_EQ(1, result.size());
ASSERT_EQ(0, result[0].z);
ASSERT_EQ(0, result[0].x);
ASSERT_EQ(0, result[0].y);
}
TEST(TileCover, WorldZ1) {
auto result = tileCover(LatLngBounds::world(), 1, 1);
ASSERT_EQ(4, result.size());
ASSERT_EQ(
(set {{
TileID(1, 1, 1, 1),
TileID(1, 0, 1, 1),
TileID(1, 1, 0, 1),
TileID(1, 0, 0, 1)
}}),
(set {
result.begin(),
result.end()
}));
}
//TEST(TileCover, SingletonZ0) {
// auto result = tileCover(LatLngBounds::singleton({0, 0}), 0, 0);
// ASSERT_EQ(1, result.size());
// ASSERT_EQ(0, result[0].z);
// ASSERT_EQ(0, result[0].x);
// ASSERT_EQ(0, result[0].y);
//}
//
//TEST(TileCover, SingletonZ1) {
// auto result = tileCover(LatLngBounds::singleton({0, 0}), 1, 1);
// ASSERT_EQ(1, result.size());
// ASSERT_EQ(0, result[0].z);
// ASSERT_EQ(0, result[0].x);
// ASSERT_EQ(0, result[0].y);
//}
static const LatLngBounds sanFrancisco = LatLngBounds::hull(
{ 37.6609, -122.5744 },
{ 37.8271, -122.3204 });
TEST(TileCover, SanFranciscoZ0) {
auto result = tileCover(sanFrancisco, 0, 0);
ASSERT_EQ(1, result.size());
ASSERT_EQ(0, result[0].w);
ASSERT_EQ(0, result[0].z);
ASSERT_EQ(0, result[0].x);
ASSERT_EQ(0, result[0].y);
}
TEST(TileCover, SanFranciscoZ10) {
auto result = tileCover(sanFrancisco, 10, 10);
ASSERT_EQ(4, result.size());
ASSERT_EQ(
(set {{
TileID(10, 163, 395, 10),
TileID(10, 164, 395, 10),
TileID(10, 163, 396, 10),
TileID(10, 164, 396, 10)
}}),
(set {
result.begin(),
result.end()
}));
}
//TEST(TileCover, OrderedByDistanceToCenter) {
// auto result = tileCover(sanFrancisco, 12, 12);
// ASSERT_EQ(12, result.size());
// ASSERT_EQ( 12, result[0].z);
// ASSERT_EQ( 654, result[0].x);
// ASSERT_EQ(1583, result[0].y);
// ASSERT_EQ( 12, result[1].z);
// ASSERT_EQ( 655, result[1].x);
// ASSERT_EQ(1583, result[1].y);
//}
//
//static const LatLngBounds sanFranciscoWrapped = LatLngBounds::hull(
// { 37.6609, 238.5744 },
// { 37.8271, 238.3204 });
//
//TEST(TileCover, SanFranciscoZ0Wrapped) {
// auto result = tileCover(sanFranciscoWrapped, 0, 0);
// ASSERT_EQ(1, result.size());
// ASSERT_EQ(1, result[0].w);
// ASSERT_EQ(0, result[0].z);
// ASSERT_EQ(0, result[0].x);
// ASSERT_EQ(0, result[0].y);
//}
<file_sep>/src/mbgl/util/geo.cpp
#include <mbgl/util/geo.hpp>
#include <mbgl/util/constants.hpp>
#include <mbgl/map/tile_id.hpp>
#include <cmath>
namespace mbgl {
LatLng::LatLng(const TileID& id) {
longitude = id.x / std::pow(2.0, id.z) * util::DEGREES_MAX - util::LONGITUDE_MAX;
const double n = M_PI - 2.0 * M_PI * id.y / std::pow(2.0, id.z);
latitude = util::RAD2DEG * std::atan(0.5 * (std::exp(n) - std::exp(-n)));
}
ScreenCoordinate LatLng::project() const {
// Clamp to the latitude limits of Mercator.
const double constrainedLatitude = ::fmin(::fmax(latitude, -util::LATITUDE_MAX), util::LATITUDE_MAX);
// Project a coordinate into unit space in a square map.
const double sine = std::sin(constrainedLatitude * util::DEG2RAD);
const double x = longitude / util::DEGREES_MAX + 0.5;
const double y = 0.5 - 0.25 * std::log((1.0 + sine) / (1.0 - sine)) / M_PI;
return { x, y };
}
LatLngBounds::LatLngBounds(const TileID& id)
: sw(TileID{ id.z, id.x, id.y + 1, id.sourceZ }),
ne(TileID{ id.z, id.x + 1, id.y, id.sourceZ }) {
}
ScreenCoordinate EdgeInsets::getCenter(uint16_t width, uint16_t height) const {
return {
(width - left - right) / 2.0f + left,
(height - top - bottom) / 2.0f + top,
};
}
} // end namespace mbgl
<file_sep>/platform/osx/app/mapboxgl-app.gypi
{
'includes': [
'../../../gyp/common.gypi',
],
'targets': [
{
'target_name': 'osxapp',
'product_name': 'Mapbox GL',
'type': 'executable',
'product_extension': 'app',
'mac_bundle': 1,
'mac_bundle_resources': [
'Credits.rtf',
'Icon.icns',
'MainMenu.xib',
'MapDocument.xib',
],
'dependencies': [
'osxsdk',
],
'sources': [
'./AppDelegate.h',
'./AppDelegate.m',
'./DroppedPinAnnotation.h',
'./DroppedPinAnnotation.m',
'./LocationCoordinate2DTransformer.h',
'./LocationCoordinate2DTransformer.m',
'./MapDocument.h',
'./MapDocument.m',
'./TimeIntervalTransformer.h',
'./TimeIntervalTransformer.m',
'./NSValue+Additions.h',
'./NSValue+Additions.m',
'./main.m',
],
'xcode_settings': {
'CLANG_ENABLE_OBJC_ARC': 'YES',
'INFOPLIST_FILE': '../platform/osx/app/Info.plist',
'LD_RUNPATH_SEARCH_PATHS': [
'\${inherited}',
'@executable_path/../Frameworks',
],
'PRODUCT_BUNDLE_IDENTIFIER': 'com.mapbox.MapboxGL',
'SDKROOT': 'macosx',
'SUPPORTED_PLATFORMS': 'macosx',
},
'configurations': {
'Debug': {
'xcode_settings': {
'COPY_PHASE_STRIP': 'NO',
},
},
'Release': {
'xcode_settings': {
'COPY_PHASE_STRIP': 'YES',
},
},
},
'copies': [
{
'destination': '<(PRODUCT_DIR)/${FRAMEWORKS_FOLDER_PATH}',
'files': [
'<(PRODUCT_DIR)/Mapbox.framework',
],
'xcode_code_sign': 1,
}
],
},
]
}
<file_sep>/scripts/build-shaders.py
#!/usr/bin/env python
import sys, re, os, errno
input_file = sys.argv[1]
output_file = sys.argv[2]
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
shader_name, shader_type, extension = os.path.basename(input_file).split('.')
with open(input_file, "r") as f:
data = f.read()
content = """// NOTE: DO NOT CHANGE THIS FILE. IT IS AUTOMATICALLY GENERATED.
#ifndef MBGL_SHADER_{NAME}_{TYPE}
#define MBGL_SHADER_{NAME}_{TYPE}
#include <mbgl/gl/gl.hpp>
namespace mbgl {{
namespace shaders {{
namespace {name} {{
#ifdef GL_ES_VERSION_2_0
constexpr const char* {type} = R"MBGL_SHADER(precision highp float;\n{data})MBGL_SHADER";
#else
constexpr const char* {type} = R"MBGL_SHADER(#version 120\n{data})MBGL_SHADER";
#endif
}} // namespace {name}
}} // namespace shaders
}} // namespace mbgl
#endif
""".format(
name = shader_name,
NAME = shader_name.upper(),
type = shader_type,
TYPE = shader_type.upper(),
data = data,
)
mkdir_p(os.path.dirname(output_file))
with open(output_file, 'w') as f: f.write(content)
<file_sep>/src/mbgl/tile/vector_tile.hpp
#ifndef MBGL_MAP_VECTOR_TILE
#define MBGL_MAP_VECTOR_TILE
#include <mbgl/tile/geometry_tile.hpp>
#include <mbgl/map/tile_id.hpp>
#include <mbgl/util/pbf.hpp>
#include <map>
namespace mbgl {
class VectorTileLayer;
class VectorTileFeature : public GeometryTileFeature {
public:
VectorTileFeature(pbf, const VectorTileLayer&);
FeatureType getType() const override { return type; }
optional<Value> getValue(const std::string&) const override;
GeometryCollection getGeometries() const override;
uint32_t getExtent() const override;
private:
const VectorTileLayer& layer;
uint64_t id = 0;
FeatureType type = FeatureType::Unknown;
pbf tags_pbf;
pbf geometry_pbf;
};
class VectorTileLayer : public GeometryTileLayer {
public:
VectorTileLayer(pbf);
std::size_t featureCount() const override { return features.size(); }
util::ptr<const GeometryTileFeature> getFeature(std::size_t) const override;
private:
friend class VectorTile;
friend class VectorTileFeature;
std::string name;
uint32_t extent = 4096;
std::map<std::string, uint32_t> keys;
std::vector<Value> values;
std::vector<pbf> features;
};
class VectorTile : public GeometryTile {
public:
VectorTile(std::shared_ptr<const std::string> data);
util::ptr<GeometryTileLayer> getLayer(const std::string&) const override;
private:
std::shared_ptr<const std::string> data;
mutable bool parsed = false;
mutable std::map<std::string, util::ptr<GeometryTileLayer>> layers;
};
class TileID;
class FileSource;
class VectorTileMonitor : public GeometryTileMonitor {
public:
VectorTileMonitor(const TileID&, float pixelRatio, const std::string& urlTemplate, FileSource&);
std::unique_ptr<FileRequest> monitorTile(const GeometryTileMonitor::Callback&) override;
private:
TileID tileID;
float pixelRatio;
std::string urlTemplate;
FileSource& fileSource;
};
} // namespace mbgl
#endif
<file_sep>/src/mbgl/shader/box_shader.cpp
#include <mbgl/shader/box_shader.hpp>
#include <mbgl/shader/box.vertex.hpp>
#include <mbgl/shader/box.fragment.hpp>
#include <mbgl/gl/gl.hpp>
#include <cstdio>
using namespace mbgl;
CollisionBoxShader::CollisionBoxShader(gl::GLObjectStore& glObjectStore)
: Shader("collisionbox", shaders::box::vertex, shaders::box::fragment, glObjectStore) {
a_extrude = MBGL_CHECK_ERROR(glGetAttribLocation(getID(), "a_extrude"));
a_data = MBGL_CHECK_ERROR(glGetAttribLocation(getID(), "a_data"));
}
void CollisionBoxShader::bind(GLbyte *offset) {
const GLint stride = 12;
MBGL_CHECK_ERROR(glEnableVertexAttribArray(a_pos));
MBGL_CHECK_ERROR(glVertexAttribPointer(a_pos, 2, GL_SHORT, false, stride, offset + 0));
MBGL_CHECK_ERROR(glEnableVertexAttribArray(a_extrude));
MBGL_CHECK_ERROR(glVertexAttribPointer(a_extrude, 2, GL_SHORT, false, stride, offset + 4));
MBGL_CHECK_ERROR(glEnableVertexAttribArray(a_data));
MBGL_CHECK_ERROR(glVertexAttribPointer(a_data, 2, GL_UNSIGNED_BYTE, false, stride, offset + 8));
}
<file_sep>/platform/android/MapboxGLAndroidSDK/src/main/java/com/mapbox/mapboxsdk/maps/MapboxMapOptions.java
package com.mapbox.mapboxsdk.maps;
import android.content.Context;
import android.content.res.TypedArray;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.Gravity;
import com.mapbox.mapboxsdk.R;
import com.mapbox.mapboxsdk.camera.CameraPosition;
import com.mapbox.mapboxsdk.constants.MapboxConstants;
/**
* Defines configuration MapboxMapMapOptions for a MapboxMap. These options can be used when adding a
* map to your application programmatically (as opposed to via XML). If you are using a MapFragment,
* you can pass these options in using the static factory method newInstance(MapboxMapOptions).
* If you are using a MapView, you can pass these options in using the constructor
* MapView(Context, MapboxMapOptions). If you add a map using XML, then you can apply these options
* using custom XML tags.
*/
public class MapboxMapOptions implements Parcelable {
private static final float DIMENSION_SEVEN_DP = 7f;
private static final float DIMENSION_TEN_DP = 10f;
private static final float DIMENSION_SIXTEEN_DP = 16f;
private static final float DIMENSION_SEVENTY_SIX_DP = 76f;
private CameraPosition cameraPosition;
private boolean debugActive;
private boolean compassEnabled = true;
private int compassGravity = Gravity.TOP | Gravity.END;
private int compassMargins[];
private boolean logoEnabled = true;
private int logoGravity = Gravity.BOTTOM | Gravity.START;
private int logoMargins[];
private boolean attributionEnabled = true;
private int attributionGravity = Gravity.BOTTOM;
private int attributionMargins[];
private float minZoom = MapboxConstants.MINIMUM_ZOOM;
private float maxZoom = MapboxConstants.MAXIMUM_ZOOM;
private boolean rotateGesturesEnabled = true;
private boolean scrollGesturesEnabled = true;
private boolean tiltGesturesEnabled = true;
private boolean zoomGesturesEnabled = true;
private boolean zoomControlsEnabled = false;
private boolean locationEnabled;
private String style;
private String accessToken;
/**
* Creates a new MapboxMapOptions object.
*/
public MapboxMapOptions() {
}
private MapboxMapOptions(Parcel in) {
cameraPosition = in.readParcelable(CameraPosition.class.getClassLoader());
debugActive = in.readByte() != 0;
compassEnabled = in.readByte() != 0;
compassGravity = in.readInt();
compassMargins = in.createIntArray();
logoEnabled = in.readByte() != 0;
logoGravity = in.readInt();
logoMargins = in.createIntArray();
attributionEnabled = in.readByte() != 0;
attributionGravity = in.readInt();
attributionMargins = in.createIntArray();
minZoom = in.readFloat();
maxZoom = in.readFloat();
rotateGesturesEnabled = in.readByte() != 0;
scrollGesturesEnabled = in.readByte() != 0;
tiltGesturesEnabled = in.readByte() != 0;
zoomControlsEnabled = in.readByte() != 0;
zoomGesturesEnabled = in.readByte() != 0;
locationEnabled = in.readByte() != 0;
style = in.readString();
accessToken = in.readString();
}
/**
* Creates a GoogleMapsOptions from the attribute set
*
* @param context Context related to a map view.
* @param attrs Attributeset containing configuration
* @return
*/
public static MapboxMapOptions createFromAttributes(@NonNull Context context, @Nullable AttributeSet attrs) {
MapboxMapOptions mapboxMapOptions = new MapboxMapOptions();
float screenDensity = context.getResources().getDisplayMetrics().density;
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.MapView, 0, 0);
try {
mapboxMapOptions.debugActive(typedArray.getBoolean(R.styleable.MapView_debug_active, false));
mapboxMapOptions.camera(new CameraPosition.Builder(typedArray).build());
mapboxMapOptions.accessToken(typedArray.getString(R.styleable.MapView_access_token));
mapboxMapOptions.styleUrl(typedArray.getString(R.styleable.MapView_style_url));
mapboxMapOptions.zoomGesturesEnabled(typedArray.getBoolean(R.styleable.MapView_zoom_enabled, true));
mapboxMapOptions.scrollGesturesEnabled(typedArray.getBoolean(R.styleable.MapView_scroll_enabled, true));
mapboxMapOptions.rotateGesturesEnabled(typedArray.getBoolean(R.styleable.MapView_rotate_enabled, true));
mapboxMapOptions.tiltGesturesEnabled(typedArray.getBoolean(R.styleable.MapView_tilt_enabled, true));
mapboxMapOptions.zoomControlsEnabled(typedArray.getBoolean(R.styleable.MapView_zoom_controls_enabled, false));
mapboxMapOptions.maxZoom(typedArray.getFloat(R.styleable.MapView_zoom_max, MapboxConstants.MAXIMUM_ZOOM));
mapboxMapOptions.minZoom(typedArray.getFloat(R.styleable.MapView_zoom_min, MapboxConstants.MINIMUM_ZOOM));
mapboxMapOptions.compassEnabled(typedArray.getBoolean(R.styleable.MapView_compass_enabled, true));
mapboxMapOptions.compassGravity(typedArray.getInt(R.styleable.MapView_compass_gravity, Gravity.TOP | Gravity.END));
mapboxMapOptions.compassMargins(new int[]{(int) (typedArray.getDimension(R.styleable.MapView_compass_margin_left, DIMENSION_TEN_DP) * screenDensity)
, ((int) typedArray.getDimension(R.styleable.MapView_compass_margin_top, DIMENSION_TEN_DP * screenDensity))
, ((int) typedArray.getDimension(R.styleable.MapView_compass_margin_right, DIMENSION_TEN_DP * screenDensity))
, ((int) typedArray.getDimension(R.styleable.MapView_compass_margin_bottom, DIMENSION_TEN_DP * screenDensity))});
mapboxMapOptions.logoEnabled(typedArray.getBoolean(R.styleable.MapView_logo_visibility, true));
mapboxMapOptions.logoGravity(typedArray.getInt(R.styleable.MapView_logo_gravity, Gravity.BOTTOM | Gravity.START));
mapboxMapOptions.logoMargins(new int[]{(int) (typedArray.getDimension(R.styleable.MapView_logo_margin_left, DIMENSION_SIXTEEN_DP) * screenDensity)
, (int) (typedArray.getDimension(R.styleable.MapView_logo_margin_top, DIMENSION_SIXTEEN_DP) * screenDensity)
, (int) (typedArray.getDimension(R.styleable.MapView_logo_margin_right, DIMENSION_SIXTEEN_DP) * screenDensity)
, (int) (typedArray.getDimension(R.styleable.MapView_logo_margin_bottom, DIMENSION_SIXTEEN_DP) * screenDensity)});
mapboxMapOptions.attributionEnabled(typedArray.getBoolean(R.styleable.MapView_attribution_visibility, true));
mapboxMapOptions.attributionGravity(typedArray.getInt(R.styleable.MapView_attribution_gravity, Gravity.BOTTOM));
mapboxMapOptions.attributionMargins(new int[]{(int) (typedArray.getDimension(R.styleable.MapView_attribution_margin_left, DIMENSION_SEVENTY_SIX_DP) * screenDensity)
, (int) (typedArray.getDimension(R.styleable.MapView_attribution_margin_top, DIMENSION_SEVEN_DP) * screenDensity)
, (int) (typedArray.getDimension(R.styleable.MapView_attribution_margin_right, DIMENSION_SEVEN_DP) * screenDensity)
, (int) (typedArray.getDimension(R.styleable.MapView_attribution_margin_bottom, DIMENSION_SEVEN_DP) * screenDensity)});
mapboxMapOptions.locationEnabled(typedArray.getBoolean(R.styleable.MapView_my_location_enabled, false));
} finally {
typedArray.recycle();
}
return mapboxMapOptions;
}
/**
* Specifies a the initial camera position for the map view.
*
* @param cameraPosition Inital camera position
* @return This
*/
public MapboxMapOptions camera(CameraPosition cameraPosition) {
this.cameraPosition = cameraPosition;
return this;
}
/**
* Specifies the accesstoken associated with a map view.
*
* @param accessToken Token to be used to access the service
* @return This
*/
public MapboxMapOptions accessToken(String accessToken) {
this.accessToken = accessToken;
return this;
}
/**
* Specifies the style url associated with a map view.
*
* @param styleUrl Url to be used to load a style
* @return This
*/
public MapboxMapOptions styleUrl(String styleUrl) {
style = styleUrl;
return this;
}
/**
* Specifies the used debug type for a map view.
*
* @param enabled True is debug is enabled
* @return This
*/
public MapboxMapOptions debugActive(boolean enabled) {
debugActive = enabled;
return this;
}
/**
* Specifies the used minimum zoom level for a map view.
*
* @param minZoom Zoom level to be used
* @return This
*/
public MapboxMapOptions minZoom(float minZoom) {
this.minZoom = minZoom;
return this;
}
/**
* Specifies the used maximum zoom level for a map view.
*
* @param maxZoom Zoom level to be used
* @return This
*/
public MapboxMapOptions maxZoom(float maxZoom) {
this.maxZoom = maxZoom;
return this;
}
/**
* Specifies the visibility state of a compass for a map view.
*
* @param enabled True and compass is shown
* @return This
*/
public MapboxMapOptions compassEnabled(boolean enabled) {
compassEnabled = enabled;
return this;
}
/**
* Specifies the gravity state of compass for a map view.
*
* @param gravity see {@link android.view.Gravity}
* @return This
*/
public MapboxMapOptions compassGravity(int gravity) {
compassGravity = gravity;
return this;
}
/**
* Specifies the margin state of compass for a map view
*
* @param margins 4 long array for LTRB margins
* @return This
*/
public MapboxMapOptions compassMargins(int[] margins) {
compassMargins = margins;
return this;
}
/**
* Specifies the visibility state of a logo for a map view.
*
* @param enabled True and logo is shown
* @return This
*/
public MapboxMapOptions logoEnabled(boolean enabled) {
logoEnabled = enabled;
return this;
}
/**
* Specifies the gravity state of logo for a map view.
*
* @param gravity see {@link android.view.Gravity}
* @return This
*/
public MapboxMapOptions logoGravity(int gravity) {
logoGravity = gravity;
return this;
}
/**
* Specifies the margin state of logo for a map view
*
* @param margins 4 long array for LTRB margins
* @return This
*/
public MapboxMapOptions logoMargins(int[] margins) {
logoMargins = margins;
return this;
}
/**
* Specifies the visibility state of a attribution for a map view.
*
* @param enabled True and attribution is shown
* @return This
*/
public MapboxMapOptions attributionEnabled(boolean enabled) {
attributionEnabled = enabled;
return this;
}
/**
* Specifies the gravity state of attribution for a map view.
*
* @param gravity see {@link android.view.Gravity}
* @return This
*/
public MapboxMapOptions attributionGravity(int gravity) {
attributionGravity = gravity;
return this;
}
/**
* Specifies the margin state of attribution for a map view
*
* @param margins 4 long array for LTRB margins
* @return This
*/
public MapboxMapOptions attributionMargins(int[] margins) {
attributionMargins = margins;
return this;
}
/**
* Specifies if the rotate gesture is enabled for a map view.
*
* @param enabled True and gesture will be enabled
* @return This
*/
public MapboxMapOptions rotateGesturesEnabled(boolean enabled) {
rotateGesturesEnabled = enabled;
return this;
}
/**
* Specifies if the scroll gesture is enabled for a map view.
*
* @param enabled True and gesture will be enabled
* @return This
*/
public MapboxMapOptions scrollGesturesEnabled(boolean enabled) {
scrollGesturesEnabled = enabled;
return this;
}
/**
* Specifies if the tilt gesture is enabled for a map view.
*
* @param enabled True and gesture will be enabled
* @return This
*/
public MapboxMapOptions tiltGesturesEnabled(boolean enabled) {
tiltGesturesEnabled = enabled;
return this;
}
/**
* Specifies if the zoom controls are enabled for a map view.
*
* @param enabled True and gesture will be enabled
* @return This
*/
public MapboxMapOptions zoomControlsEnabled(boolean enabled) {
zoomControlsEnabled = enabled;
return this;
}
/**
* Specifies if the zoom gesture is enabled for a map view.
*
* @param enabled True and gesture will be enabled
* @return This
*/
public MapboxMapOptions zoomGesturesEnabled(boolean enabled) {
zoomGesturesEnabled = enabled;
return this;
}
/**
* Specifies if the user location view is enabled for a map view.
*
* @param locationEnabled True and gesture will be enabled
* @return This
*/
public MapboxMapOptions locationEnabled(boolean locationEnabled) {
this.locationEnabled = locationEnabled;
return this;
}
/**
* Get the current configured initial camera position for a map view.
*
* @return CameraPosition to be initially used.
*/
public CameraPosition getCamera() {
return cameraPosition;
}
/**
* Get the current configured min zoom for a map view.
*
* @return Mininum zoom level to be used.
*/
public float getMinZoom() {
return minZoom;
}
/**
* Get the current configured maximum zoom for a map view.
*
* @return Maximum zoom to be used.
*/
public float getMaxZoom() {
return maxZoom;
}
/**
* Get the current configured visibility state for compass for a map view.
*
* @return Visibility state of the compass
*/
public boolean getCompassEnabled() {
return compassEnabled;
}
/**
* Get the current configured gravity state for compass for a map view.
*
* @return Gravity state of the compass
*/
public int getCompassGravity() {
return compassGravity;
}
/**
* Get the current configured margins for compass for a map view.
*
* @return Margins state of the compass
*/
public int[] getCompassMargins() {
return compassMargins;
}
/**
* Get the current configured visibility state for compass for a map view.
*
* @return Visibility state of the compass
*/
public boolean getLogoEnabled() {
return logoEnabled;
}
/**
* Get the current configured gravity state for logo for a map view.
*
* @return Gravity state of the logo
*/
public int getLogoGravity() {
return logoGravity;
}
/**
* Get the current configured margins for logo for a map view.
*
* @return Margins state of the logo
*/
public int[] getLogoMargins() {
return logoMargins;
}
/**
* Get the current configured access token for a map view.
*
* @return Access token to be used.
*/
public String getAccessToken() {
return accessToken;
}
/**
* Get the current configured style url for a map view.
*
* @return Style url to be used.
*/
public String getStyle() {
return style;
}
/**
* Get the current configured rotate gesture state for a map view.
*
* @return True indicates gesture is enabled
*/
public boolean getRotateGesturesEnabled() {
return rotateGesturesEnabled;
}
/**
* Get the current configured scroll gesture state for a map view.
*
* @return True indicates gesture is enabled
*/
public boolean getScrollGesturesEnabled() {
return scrollGesturesEnabled;
}
/**
* Get the current configured tilt gesture state for a map view.
*
* @return True indicates gesture is enabled
*/
public boolean getTiltGesturesEnabled() {
return tiltGesturesEnabled;
}
/**
* Get the current configured zoom controls state for a map view.
*
* @return True indicates gesture is enabled
*/
public boolean getZoomControlsEnabled() {
return zoomControlsEnabled;
}
/**
* Get the current configured zoom gesture state for a map view.
*
* @return True indicates gesture is enabled
*/
public boolean getZoomGesturesEnabled() {
return zoomGesturesEnabled;
}
/**
* Get the current configured visibility state for attribution for a map view.
*
* @return Visibility state of the attribution
*/
public boolean getAttributionEnabled() {
return attributionEnabled;
}
/**
* Get the current configured gravity state for attribution for a map view.
*
* @return Gravity state of the logo
*/
public int getAttributionGravity() {
return attributionGravity;
}
/**
* Get the current configured margins for attribution for a map view.
*
* @return Margins state of the logo
*/
public int[] getAttributionMargins() {
return attributionMargins;
}
/**
* Get the current configured user location view state for a map view.
*
* @return True and user location will be shown
*/
public boolean getLocationEnabled() {
return locationEnabled;
}
/**
* Get the current configured debug state for a map view.
*
* @return True indicates debug is enabled.
*/
public boolean getDebugActive() {
return debugActive;
}
public static final Parcelable.Creator<MapboxMapOptions> CREATOR
= new Parcelable.Creator<MapboxMapOptions>() {
public MapboxMapOptions createFromParcel(Parcel in) {
return new MapboxMapOptions(in);
}
public MapboxMapOptions[] newArray(int size) {
return new MapboxMapOptions[size];
}
};
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeParcelable(cameraPosition, flags);
dest.writeByte((byte) (debugActive ? 1 : 0));
dest.writeByte((byte) (compassEnabled ? 1 : 0));
dest.writeInt(compassGravity);
dest.writeIntArray(compassMargins);
dest.writeByte((byte) (logoEnabled ? 1 : 0));
dest.writeInt(logoGravity);
dest.writeIntArray(logoMargins);
dest.writeByte((byte) (attributionEnabled ? 1 : 0));
dest.writeInt(attributionGravity);
dest.writeIntArray(attributionMargins);
dest.writeFloat(minZoom);
dest.writeFloat(maxZoom);
dest.writeByte((byte) (rotateGesturesEnabled ? 1 : 0));
dest.writeByte((byte) (scrollGesturesEnabled ? 1 : 0));
dest.writeByte((byte) (tiltGesturesEnabled ? 1 : 0));
dest.writeByte((byte) (zoomControlsEnabled ? 1 : 0));
dest.writeByte((byte) (zoomGesturesEnabled ? 1 : 0));
dest.writeByte((byte) (locationEnabled ? 1 : 0));
dest.writeString(style);
dest.writeString(accessToken);
}
}
<file_sep>/src/mbgl/style/style_render_parameters.hpp
#ifndef STYLE_RENDER_PARAMETERS
#define STYLE_RENDER_PARAMETERS
namespace mbgl {
class TransformState;
class StyleRenderParameters {
public:
StyleRenderParameters(const TransformState& state_)
: state(state_) {}
const TransformState& state;
};
}
#endif
<file_sep>/src/mbgl/shader/raster_shader.cpp
#include <mbgl/shader/raster_shader.hpp>
#include <mbgl/shader/raster.vertex.hpp>
#include <mbgl/shader/raster.fragment.hpp>
#include <mbgl/gl/gl.hpp>
#include <cstdio>
using namespace mbgl;
RasterShader::RasterShader(gl::GLObjectStore& glObjectStore)
: Shader("raster", shaders::raster::vertex, shaders::raster::fragment, glObjectStore) {
}
void RasterShader::bind(GLbyte* offset) {
MBGL_CHECK_ERROR(glEnableVertexAttribArray(a_pos));
MBGL_CHECK_ERROR(glVertexAttribPointer(a_pos, 2, GL_SHORT, false, 0, offset));
}
<file_sep>/gyp/mbgl.gyp
{
'includes': [
'common.gypi',
'shaders.gypi',
'version.gypi',
'certificates.gypi',
'standalone.gypi',
'core.gypi',
'none.gypi',
],
'conditions': [
['headless_lib == "cgl" and host == "osx"', { 'includes': [ 'headless-cgl.gypi' ] } ],
['headless_lib == "glx" and host == "linux"', { 'includes': [ 'headless-glx.gypi' ] } ],
['platform_lib == "osx" and host == "osx"', { 'includes': [ 'platform-osx.gypi' ] } ],
['platform_lib == "ios" and host == "ios"', { 'includes': [ 'platform-ios.gypi' ] } ],
['platform_lib == "linux"', { 'includes': [ 'platform-linux.gypi' ] } ],
['platform_lib == "android" and host == "android"', { 'includes': [ 'platform-android.gypi' ] } ],
['http_lib == "curl"', { 'includes': [ 'http-curl.gypi' ] } ],
['http_lib == "nsurl" and (host == "osx" or host == "ios")', { 'includes': [ 'http-nsurl.gypi' ] } ],
['http_lib == "android" and host == "android"', { 'includes': [ 'http-android.gypi' ] } ],
['asset_lib == "fs"', { 'includes': [ 'asset-fs.gypi' ] } ],
['asset_lib == "zip"', { 'includes': [ 'asset-zip.gypi' ] } ],
['install_prefix != ""', { 'includes': ['install.gypi' ] } ],
],
}
<file_sep>/README.md
# Mapbox GL Native
A library for embedding interactive, customizable vector maps into native applications on multiple platforms. It takes stylesheets that conform to the [Mapbox GL Style Specification](https://github.com/mapbox/mapbox-gl-style-spec/), applies them to vector tiles that conform to the [Mapbox Vector Tile Specification](https://github.com/mapbox/vector-tile-spec), and renders them using OpenGL. [Mapbox GL JS](https://github.com/mapbox/mapbox-gl-js) is the WebGL-based counterpart, designed for use on the Web.
## The Mapbox GL ecosystem
This repository hosts the cross-platform Mapbox GL Native library, plus convenient SDKs for several platforms. The cross-platform library comes with a [GLFW](https://github.com/glfw/glfw)-based demo application for Ubuntu Linux and OS X. The SDKs target the usual languages on their respective platforms:
SDK | Languages | Build status
----|-----------|-------------
[Mapbox GL Native](INSTALL.md) | C++14 | [](https://travis-ci.org/mapbox/mapbox-gl-native/builds) [](https://coveralls.io/github/mapbox/mapbox-gl-native?branch=master)
[Mapbox Android SDK](platform/android/) | Java | [](https://www.bitrise.io/app/79cdcbdc42de4303)
[Mapbox iOS SDK](platform/ios/) | Objective-C or Swift | [](https://www.bitrise.io/app/7514e4cf3da2cc57)
[Mapbox OS X SDK](platform/osx/) | Objective-C or Swift | [](https://www.bitrise.io/app/155ef7da24b38dcd)
[node-mapbox-gl-native](platform/node/) | Node.js | [](https://travis-ci.org/mapbox/mapbox-gl-native/builds)
Additional Mapbox GL Native–based libraries are developed outside of this repository:
* [React Native Mapbox GL](https://github.com/mapbox/react-native-mapbox-gl) for React Native applications on iOS and Android
* [QMapboxGL](https://github.com/tmpsantos/qmapboxgl) for Qt-based applications
* Telerik’s [Mapbox plugin](http://plugins.telerik.com/cordova/plugin/mapbox) for Apache Cordova Hybrid applications
* Telerik’s [Mapbox plugin](http://plugins.telerik.com/nativescript/plugin/mapbox) for NativeScript Hybrid applications
* Xamarin's [Mapbox component](https://components.xamarin.com/view/mapboxsdk) for Xamarin Hybrid applications
If your platform or hybrid application framework isn’t listed here, consider embedding [Mapbox GL JS](https://github.com/mapbox/mapbox-gl-js) using the standard Web capabilities on your platform.
<file_sep>/src/mbgl/annotation/annotation_manager.hpp
#ifndef MBGL_ANNOTATION_MANAGER
#define MBGL_ANNOTATION_MANAGER
#include <mbgl/annotation/annotation.hpp>
#include <mbgl/annotation/point_annotation_impl.hpp>
#include <mbgl/annotation/shape_annotation_impl.hpp>
#include <mbgl/sprite/sprite_store.hpp>
#include <mbgl/sprite/sprite_atlas.hpp>
#include <mbgl/util/geo.hpp>
#include <mbgl/util/noncopyable.hpp>
#include <string>
#include <vector>
#include <set>
namespace mbgl {
class PointAnnotation;
class ShapeAnnotation;
class AnnotationTile;
class AnnotationTileMonitor;
class Style;
class AnnotationManager : private util::noncopyable {
public:
AnnotationManager(float pixelRatio);
~AnnotationManager();
AnnotationIDs addPointAnnotations(const std::vector<PointAnnotation>&, const uint8_t maxZoom);
AnnotationIDs addShapeAnnotations(const std::vector<ShapeAnnotation>&, const uint8_t maxZoom);
void updatePointAnnotation(const AnnotationID&, const PointAnnotation&, const uint8_t maxZoom);
void removeAnnotations(const AnnotationIDs&);
AnnotationIDs getPointAnnotationsInBounds(const LatLngBounds&) const;
void addIcon(const std::string& name, std::shared_ptr<const SpriteImage>);
void removeIcon(const std::string& name);
double getTopOffsetPixelsForIcon(const std::string& name);
SpriteAtlas& getSpriteAtlas() { return spriteAtlas; }
void updateStyle(Style&);
void addTileMonitor(AnnotationTileMonitor&);
void removeTileMonitor(AnnotationTileMonitor&);
static const std::string SourceID;
static const std::string PointLayerID;
private:
std::unique_ptr<AnnotationTile> getTile(const TileID&);
AnnotationID nextID = 0;
PointAnnotationImpl::Tree pointTree;
PointAnnotationImpl::Map pointAnnotations;
ShapeAnnotationImpl::Map shapeAnnotations;
std::vector<std::string> obsoleteShapeAnnotationLayers;
std::set<AnnotationTileMonitor*> monitors;
SpriteStore spriteStore;
SpriteAtlas spriteAtlas;
};
} // namespace mbgl
#endif
<file_sep>/test/util/clip_ids.cpp
#include <iostream>
#include "../fixtures/util.hpp"
#include <algorithm>
#include <mbgl/util/clip_id.hpp>
#include <mbgl/tile/tile.hpp>
using namespace mbgl;
using Stencil = std::pair<const TileID, ClipID>;
template <typename T> void generate(ClipIDGenerator& generator, const T &sources) {
for (size_t j = 0; j < sources.size(); j++) {
std::forward_list<Tile *> tile_ptrs;
std::transform(sources[j].begin(), sources[j].end(), std::front_inserter(tile_ptrs), [](const std::shared_ptr<Tile> &tile) { return tile.get(); });
generator.update(tile_ptrs);
}
}
void print(const std::vector<std::vector<std::shared_ptr<Tile>>> &sources) {
for (size_t j = 0; j < sources.size(); j++) {
for (size_t i = 0; i < sources[j].size(); i++) {
std::cout << " ASSERT_EQ(ClipID(\"" << sources[j][i]->clip.mask << "\", \"" << sources[j][i]->clip.reference << "\"), sources[" << j << "][" << i << "]->clip);\n";
}
}
}
void print(const std::map<TileID, ClipID>& stencils) {
std::cout << " auto it = stencils.begin();\n";
std::cout << " ASSERT_EQ(" << stencils.size() << ", stencils.size());\n";
for (auto& stencil : stencils) {
std::cout << " ASSERT_EQ(Stencil(TileID{ " << (int)stencil.first.z << ", "
<< stencil.first.x << ", " << stencil.first.y << ", "
<< (int)stencil.first.sourceZ << " }, { \"" << stencil.second.mask.to_string()
<< "\", \"" << stencil.second.reference.to_string() << "\"}), *it++);\n";
}
std::cout << " ASSERT_EQ(stencils.end(), it);\n";
}
TEST(ClipIDs, ParentAndFourChildren) {
const std::vector<std::vector<std::shared_ptr<Tile>>> sources = {
{
std::make_shared<Tile>(TileID { 1, 0, 0, 1 }),
std::make_shared<Tile>(TileID { 1, 0, 1, 1 }),
std::make_shared<Tile>(TileID { 1, 1, 0, 1 }),
std::make_shared<Tile>(TileID { 1, 1, 1, 1 }),
std::make_shared<Tile>(TileID { 0, 0, 0, 0 }),
},
};
ClipIDGenerator generator;
generate(generator, sources);
// print(sources);
ASSERT_EQ(ClipID("00000111", "00000010"), sources[0][0]->clip);
ASSERT_EQ(ClipID("00000111", "00000011"), sources[0][1]->clip);
ASSERT_EQ(ClipID("00000111", "00000100"), sources[0][2]->clip);
ASSERT_EQ(ClipID("00000111", "00000101"), sources[0][3]->clip);
ASSERT_EQ(ClipID("00000111", "00000001"), sources[0][4]->clip);
const auto stencils = generator.getStencils();
// print(stencils);
auto it = stencils.begin();
ASSERT_EQ(4, stencils.size());
ASSERT_EQ(Stencil(TileID{ 1, 0, 0, 1 }, { "00000111", "00000010"}), *it++);
ASSERT_EQ(Stencil(TileID{ 1, 0, 1, 1 }, { "00000111", "00000011"}), *it++);
ASSERT_EQ(Stencil(TileID{ 1, 1, 0, 1 }, { "00000111", "00000100"}), *it++);
ASSERT_EQ(Stencil(TileID{ 1, 1, 1, 1 }, { "00000111", "00000101"}), *it++);
ASSERT_EQ(stencils.end(), it);
}
TEST(ClipIDs, ParentAndFourChildrenNegative) {
const std::vector<std::vector<std::shared_ptr<Tile>>> sources = {
{
std::make_shared<Tile>(TileID { 1, -2, 0, 1 }),
std::make_shared<Tile>(TileID { 1, -2, 1, 1 }),
std::make_shared<Tile>(TileID { 1, -1, 0, 1 }),
std::make_shared<Tile>(TileID { 1, -1, 1, 1 }),
std::make_shared<Tile>(TileID { 0, -1, 0, 0 }),
},
};
ClipIDGenerator generator;
generate(generator, sources);
// print(sources);
ASSERT_EQ(ClipID("00000111", "00000010"), sources[0][0]->clip);
ASSERT_EQ(ClipID("00000111", "00000011"), sources[0][1]->clip);
ASSERT_EQ(ClipID("00000111", "00000100"), sources[0][2]->clip);
ASSERT_EQ(ClipID("00000111", "00000101"), sources[0][3]->clip);
ASSERT_EQ(ClipID("00000111", "00000001"), sources[0][4]->clip);
const auto stencils = generator.getStencils();
// print(stencils);
auto it = stencils.begin();
ASSERT_EQ(4, stencils.size());
ASSERT_EQ(Stencil(TileID{ 1, -2, 0, 1 }, { "00000111", "00000010"}), *it++);
ASSERT_EQ(Stencil(TileID{ 1, -2, 1, 1 }, { "00000111", "00000011"}), *it++);
ASSERT_EQ(Stencil(TileID{ 1, -1, 0, 1 }, { "00000111", "00000100"}), *it++);
ASSERT_EQ(Stencil(TileID{ 1, -1, 1, 1 }, { "00000111", "00000101"}), *it++);
ASSERT_EQ(stencils.end(), it);
}
TEST(ClipIDs, NegativeParentAndMissingLevel) {
const std::vector<std::vector<std::shared_ptr<Tile>>> sources = {
{
std::make_shared<Tile>(TileID { 1, -1, 0, 1 }),
std::make_shared<Tile>(TileID { 2, -1, 0, 2 }),
std::make_shared<Tile>(TileID { 2, -2, 1, 2 }),
std::make_shared<Tile>(TileID { 2, -1, 1, 2 }),
std::make_shared<Tile>(TileID { 2, -2, 0, 2 }),
},
};
ClipIDGenerator generator;
generate(generator, sources);
// print(sources);
ASSERT_EQ(ClipID("00000111", "00000001"), sources[0][0]->clip);
ASSERT_EQ(ClipID("00000111", "00000100"), sources[0][1]->clip);
ASSERT_EQ(ClipID("00000111", "00000011"), sources[0][2]->clip);
ASSERT_EQ(ClipID("00000111", "00000101"), sources[0][3]->clip);
ASSERT_EQ(ClipID("00000111", "00000010"), sources[0][4]->clip);
const auto stencils = generator.getStencils();
// print(stencils);
auto it = stencils.begin();
ASSERT_EQ(4, stencils.size());
ASSERT_EQ(Stencil(TileID{ 2, -2, 0, 2 }, { "00000111", "00000010"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, -2, 1, 2 }, { "00000111", "00000011"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, -1, 0, 2 }, { "00000111", "00000100"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, -1, 1, 2 }, { "00000111", "00000101"}), *it++);
ASSERT_EQ(stencils.end(), it);
}
TEST(ClipIDs, SevenOnSameLevel) {
const std::vector<std::vector<std::shared_ptr<Tile>>> sources = {
{
std::make_shared<Tile>(TileID { 2, 0, 0, 2 }),
std::make_shared<Tile>(TileID { 2, 0, 1, 2 }),
std::make_shared<Tile>(TileID { 2, 0, 2, 2 }),
std::make_shared<Tile>(TileID { 2, 1, 0, 2 }),
std::make_shared<Tile>(TileID { 2, 1, 1, 2 }),
std::make_shared<Tile>(TileID { 2, 1, 2, 2 }),
std::make_shared<Tile>(TileID { 2, 2, 0, 2 }),
},
};
ClipIDGenerator generator;
generate(generator, sources);
// print(sources);
ASSERT_EQ(ClipID("00000111", "00000001"), sources[0][0]->clip);
ASSERT_EQ(ClipID("00000111", "00000010"), sources[0][1]->clip);
ASSERT_EQ(ClipID("00000111", "00000011"), sources[0][2]->clip);
ASSERT_EQ(ClipID("00000111", "00000100"), sources[0][3]->clip);
ASSERT_EQ(ClipID("00000111", "00000101"), sources[0][4]->clip);
ASSERT_EQ(ClipID("00000111", "00000110"), sources[0][5]->clip);
ASSERT_EQ(ClipID("00000111", "00000111"), sources[0][6]->clip);
const auto stencils = generator.getStencils();
// print(stencils);
auto it = stencils.begin();
ASSERT_EQ(7, stencils.size());
ASSERT_EQ(Stencil(TileID{ 2, 0, 0, 2 }, { "00000111", "00000001"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, 0, 1, 2 }, { "00000111", "00000010"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, 0, 2, 2 }, { "00000111", "00000011"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, 1, 0, 2 }, { "00000111", "00000100"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, 1, 1, 2 }, { "00000111", "00000101"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, 1, 2, 2 }, { "00000111", "00000110"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, 2, 0, 2 }, { "00000111", "00000111"}), *it++);
ASSERT_EQ(stencils.end(), it);
}
TEST(ClipIDs, MultipleLevels) {
const std::vector<std::vector<std::shared_ptr<Tile>>> sources = {
{
std::make_shared<Tile>(TileID { 2, 0, 0, 2 }),
std::make_shared<Tile>(TileID { 3, 0, 0, 3 }),
std::make_shared<Tile>(TileID { 3, 0, 1, 3 }),
std::make_shared<Tile>(TileID { 4, 0, 2, 4 }),
std::make_shared<Tile>(TileID { 4, 1, 2, 4 }),
std::make_shared<Tile>(TileID { 4, 0, 3, 4 }),
std::make_shared<Tile>(TileID { 4, 1, 3, 4 }),
std::make_shared<Tile>(TileID { 3, 1, 0, 3 }),
std::make_shared<Tile>(TileID { 3, 1, 1, 3 }),
std::make_shared<Tile>(TileID { 2, 1, 0, 2 }),
std::make_shared<Tile>(TileID { 3, 2, 0, 3 }),
std::make_shared<Tile>(TileID { 3, 2, 1, 3 }),
},
};
ClipIDGenerator generator;
generate(generator, sources);
// print(sources);
ASSERT_EQ(ClipID("00001111", "00000001"), sources[0][0]->clip);
ASSERT_EQ(ClipID("00001111", "00000011"), sources[0][1]->clip);
ASSERT_EQ(ClipID("00001111", "00000100"), sources[0][2]->clip);
ASSERT_EQ(ClipID("00001111", "00001001"), sources[0][3]->clip);
ASSERT_EQ(ClipID("00001111", "00001011"), sources[0][4]->clip);
ASSERT_EQ(ClipID("00001111", "00001010"), sources[0][5]->clip);
ASSERT_EQ(ClipID("00001111", "00001100"), sources[0][6]->clip);
ASSERT_EQ(ClipID("00001111", "00000101"), sources[0][7]->clip);
ASSERT_EQ(ClipID("00001111", "00000110"), sources[0][8]->clip);
ASSERT_EQ(ClipID("00001111", "00000010"), sources[0][9]->clip);
ASSERT_EQ(ClipID("00001111", "00000111"), sources[0][10]->clip);
ASSERT_EQ(ClipID("00001111", "00001000"), sources[0][11]->clip);
const auto stencils = generator.getStencils();
// print(stencils);
auto it = stencils.begin();
ASSERT_EQ(10, stencils.size());
ASSERT_EQ(Stencil(TileID{ 2, 1, 0, 2 }, { "00001111", "00000010"}), *it++);
ASSERT_EQ(Stencil(TileID{ 3, 0, 0, 3 }, { "00001111", "00000011"}), *it++);
ASSERT_EQ(Stencil(TileID{ 3, 1, 0, 3 }, { "00001111", "00000101"}), *it++);
ASSERT_EQ(Stencil(TileID{ 3, 1, 1, 3 }, { "00001111", "00000110"}), *it++);
ASSERT_EQ(Stencil(TileID{ 3, 2, 0, 3 }, { "00001111", "00000111"}), *it++);
ASSERT_EQ(Stencil(TileID{ 3, 2, 1, 3 }, { "00001111", "00001000"}), *it++);
ASSERT_EQ(Stencil(TileID{ 4, 0, 2, 4 }, { "00001111", "00001001"}), *it++);
ASSERT_EQ(Stencil(TileID{ 4, 0, 3, 4 }, { "00001111", "00001010"}), *it++);
ASSERT_EQ(Stencil(TileID{ 4, 1, 2, 4 }, { "00001111", "00001011"}), *it++);
ASSERT_EQ(Stencil(TileID{ 4, 1, 3, 4 }, { "00001111", "00001100"}), *it++);
ASSERT_EQ(stencils.end(), it);
}
TEST(ClipIDs, Bug206) {
const std::vector<std::vector<std::shared_ptr<Tile>>> sources = {
{
std::make_shared<Tile>(TileID { 10, 162, 395, 10 }),
std::make_shared<Tile>(TileID { 10, 162, 396, 10 }),
std::make_shared<Tile>(TileID { 10, 163, 395, 10 }),
std::make_shared<Tile>(TileID { 11, 326, 791, 11 }),
std::make_shared<Tile>(TileID { 12, 654, 1582, 12 }),
std::make_shared<Tile>(TileID { 12, 654, 1583, 12 }),
std::make_shared<Tile>(TileID { 12, 655, 1582, 12 }),
std::make_shared<Tile>(TileID { 12, 655, 1583, 12 }),
std::make_shared<Tile>(TileID { 10, 163, 396, 10 }),
std::make_shared<Tile>(TileID { 10, 164, 395, 10 }),
std::make_shared<Tile>(TileID { 10, 164, 396, 10 }),
},
};
ClipIDGenerator generator;
generate(generator, sources);
// print(sources);
ASSERT_EQ(ClipID("00001111", "00000001"), sources[0][0]->clip);
ASSERT_EQ(ClipID("00001111", "00000010"), sources[0][1]->clip);
ASSERT_EQ(ClipID("00001111", "00000011"), sources[0][2]->clip);
ASSERT_EQ(ClipID("00001111", "00000111"), sources[0][3]->clip);
ASSERT_EQ(ClipID("00001111", "00001000"), sources[0][4]->clip);
ASSERT_EQ(ClipID("00001111", "00001001"), sources[0][5]->clip);
ASSERT_EQ(ClipID("00001111", "00001010"), sources[0][6]->clip);
ASSERT_EQ(ClipID("00001111", "00001011"), sources[0][7]->clip);
ASSERT_EQ(ClipID("00001111", "00000100"), sources[0][8]->clip);
ASSERT_EQ(ClipID("00001111", "00000101"), sources[0][9]->clip);
ASSERT_EQ(ClipID("00001111", "00000110"), sources[0][10]->clip);
const auto stencils = generator.getStencils();
// print(stencils);
auto it = stencils.begin();
ASSERT_EQ(11, stencils.size());
ASSERT_EQ(Stencil(TileID{ 10, 162, 395, 10 }, { "00001111", "00000001"}), *it++);
ASSERT_EQ(Stencil(TileID{ 10, 162, 396, 10 }, { "00001111", "00000010"}), *it++);
ASSERT_EQ(Stencil(TileID{ 10, 163, 395, 10 }, { "00001111", "00000011"}), *it++);
ASSERT_EQ(Stencil(TileID{ 10, 163, 396, 10 }, { "00001111", "00000100"}), *it++);
ASSERT_EQ(Stencil(TileID{ 10, 164, 395, 10 }, { "00001111", "00000101"}), *it++);
ASSERT_EQ(Stencil(TileID{ 10, 164, 396, 10 }, { "00001111", "00000110"}), *it++);
ASSERT_EQ(Stencil(TileID{ 11, 326, 791, 11 }, { "00001111", "00000111"}), *it++);
ASSERT_EQ(Stencil(TileID{ 12, 654, 1582, 12 }, { "00001111", "00001000"}), *it++);
ASSERT_EQ(Stencil(TileID{ 12, 654, 1583, 12 }, { "00001111", "00001001"}), *it++);
ASSERT_EQ(Stencil(TileID{ 12, 655, 1582, 12 }, { "00001111", "00001010"}), *it++);
ASSERT_EQ(Stencil(TileID{ 12, 655, 1583, 12 }, { "00001111", "00001011"}), *it++);
ASSERT_EQ(stencils.end(), it);
}
TEST(ClipIDs, MultipleSources) {
const std::vector<std::vector<std::shared_ptr<Tile>>> sources = {
{
std::make_shared<Tile>(TileID { 0, 0, 0, 0 }),
std::make_shared<Tile>(TileID { 1, 1, 1, 1 }),
std::make_shared<Tile>(TileID { 2, 2, 1, 2 }),
std::make_shared<Tile>(TileID { 2, 2, 2, 2 }),
},
{
std::make_shared<Tile>(TileID { 0, 0, 0, 0 }),
std::make_shared<Tile>(TileID { 1, 1, 1, 1 }),
std::make_shared<Tile>(TileID { 2, 1, 1, 2 }),
std::make_shared<Tile>(TileID { 2, 2, 2, 2 }),
},
{
std::make_shared<Tile>(TileID { 1, 0, 0, 1 }),
std::make_shared<Tile>(TileID { 1, 0, 1, 1 }),
std::make_shared<Tile>(TileID { 1, 1, 0, 1 }),
std::make_shared<Tile>(TileID { 1, 1, 1, 1 }),
std::make_shared<Tile>(TileID { 2, 1, 1, 2 }),
},
};
ClipIDGenerator generator;
generate(generator, sources);
// print(sources);
ASSERT_EQ(ClipID("00000111", "00000001"), sources[0][0]->clip);
ASSERT_EQ(ClipID("00000111", "00000010"), sources[0][1]->clip);
ASSERT_EQ(ClipID("00000111", "00000011"), sources[0][2]->clip);
ASSERT_EQ(ClipID("00000111", "00000100"), sources[0][3]->clip);
ASSERT_EQ(ClipID("00011000", "00001000"), sources[1][0]->clip);
ASSERT_EQ(ClipID("00011111", "00000010"), sources[1][1]->clip);
ASSERT_EQ(ClipID("00011000", "00010000"), sources[1][2]->clip);
ASSERT_EQ(ClipID("00011111", "00000100"), sources[1][3]->clip);
ASSERT_EQ(ClipID("11100000", "00100000"), sources[2][0]->clip);
ASSERT_EQ(ClipID("11100000", "01000000"), sources[2][1]->clip);
ASSERT_EQ(ClipID("11100000", "01100000"), sources[2][2]->clip);
ASSERT_EQ(ClipID("11100000", "10000000"), sources[2][3]->clip);
ASSERT_EQ(ClipID("11111000", "00010000"), sources[2][4]->clip);
const auto stencils = generator.getStencils();
// print(stencils);
auto it = stencils.begin();
ASSERT_EQ(7, stencils.size());
ASSERT_EQ(Stencil(TileID{ 1, 0, 0, 1 }, { "11111111", "00101001"}), *it++);
ASSERT_EQ(Stencil(TileID{ 1, 0, 1, 1 }, { "11111111", "01001001"}), *it++);
ASSERT_EQ(Stencil(TileID{ 1, 1, 0, 1 }, { "11111111", "01101001"}), *it++);
ASSERT_EQ(Stencil(TileID{ 1, 1, 1, 1 }, { "11111111", "10000010"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, 1, 1, 2 }, { "11111111", "00010001"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, 2, 1, 2 }, { "11111111", "01101011"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, 2, 2, 2 }, { "11111111", "10000100"}), *it++);
ASSERT_EQ(stencils.end(), it);
}
TEST(ClipIDs, DuplicateIDs) {
const std::vector<std::vector<std::shared_ptr<Tile>>> sources = {
{
std::make_shared<Tile>(TileID { 2, 0, 0, 2 }),
std::make_shared<Tile>(TileID { 2, 0, 1, 2 }),
},
{
std::make_shared<Tile>(TileID { 2, 0, 0, 2 }),
std::make_shared<Tile>(TileID { 2, 0, 1, 2 }),
std::make_shared<Tile>(TileID { 2, 0, 1, 2 }),
}
};
ClipIDGenerator generator;
generate(generator, sources);
// print(sources);
ASSERT_EQ(ClipID("00000011", "00000001"), sources[0][0]->clip);
ASSERT_EQ(ClipID("00000011", "00000010"), sources[0][1]->clip);
ASSERT_EQ(ClipID("00000011", "00000001"), sources[1][0]->clip);
ASSERT_EQ(ClipID("00000011", "00000010"), sources[1][1]->clip);
ASSERT_EQ(ClipID("00000011", "00000010"), sources[1][2]->clip);
const auto stencils = generator.getStencils();
// print(stencils);
auto it = stencils.begin();
ASSERT_EQ(2, stencils.size());
ASSERT_EQ(Stencil(TileID{ 2, 0, 0, 2 }, { "00000011", "00000001"}), *it++);
ASSERT_EQ(Stencil(TileID{ 2, 0, 1, 2 }, { "00000011", "00000010"}), *it++);
ASSERT_EQ(stencils.end(), it);
}
TEST(ClipIDs, SecondSourceHasParentOfFirstSource) {
const std::vector<std::vector<std::shared_ptr<Tile>>> sources = {
{
std::make_shared<Tile>(TileID { 1, 0, 0, 1 }),
},
{
std::make_shared<Tile>(TileID { 0, 0, 0, 0 }),
std::make_shared<Tile>(TileID { 1, 0, 0, 1 }),
},
{
std::make_shared<Tile>(TileID { 0, 0, 0, 0 }),
}
};
ClipIDGenerator generator;
generate(generator, sources);
// print(sources);
ASSERT_EQ(ClipID("00000001", "00000001"), sources[0][0]->clip);
ASSERT_EQ(ClipID("00000010", "00000010"), sources[1][0]->clip);
ASSERT_EQ(ClipID("00000011", "00000001"), sources[1][1]->clip);
const auto stencils = generator.getStencils();
// print(stencils);
auto it = stencils.begin();
ASSERT_EQ(2, stencils.size());
ASSERT_EQ(Stencil(TileID{ 0, 0, 0, 0 }, { "00000110", "00000110"}), *it++);
ASSERT_EQ(Stencil(TileID{ 1, 0, 0, 1 }, { "00000111", "00000101"}), *it++);
ASSERT_EQ(stencils.end(), it);
}
<file_sep>/platform/android/MapboxGLAndroidSDK/src/main/java/com/mapbox/mapboxsdk/maps/TrackingSettings.java
package com.mapbox.mapboxsdk.maps;
import android.support.annotation.NonNull;
import android.support.annotation.UiThread;
import com.mapbox.mapboxsdk.constants.MyBearingTracking;
import com.mapbox.mapboxsdk.constants.MyLocationTracking;
import com.mapbox.mapboxsdk.maps.widgets.UserLocationView;
/**
* Settings for the user location and bearing tracking of a MapboxMap.
*/
public class TrackingSettings {
private MapView mapView;
private UiSettings uiSettings;
private boolean dismissTrackingOnGesture = true;
@MyLocationTracking.Mode
private int mMyLocationTrackingMode;
@MyBearingTracking.Mode
private int mMyBearingTrackingMode;
TrackingSettings(@NonNull MapView mapView, UiSettings uiSettings) {
this.mapView = mapView;
this.uiSettings = uiSettings;
}
/**
* <p>
* Set the current my location tracking mode.
* </p>
* <p>
* Will enable my location if not active.
* </p>
* See {@link MyLocationTracking} for different values.
*
* @param myLocationTrackingMode The location tracking mode to be used.
* @throws SecurityException if no suitable permission is present
* @see MyLocationTracking
*/
@UiThread
public void setMyLocationTrackingMode(@MyLocationTracking.Mode int myLocationTrackingMode) {
mMyLocationTrackingMode = myLocationTrackingMode;
mapView.setMyLocationTrackingMode(myLocationTrackingMode);
validateGesturesForTrackingModes();
}
/**
* Returns the current user location tracking mode.
*
* @return The current user location tracking mode.
* One of the values from {@link MyLocationTracking.Mode}.
* @see MyLocationTracking.Mode
*/
@UiThread
@MyLocationTracking.Mode
public int getMyLocationTrackingMode() {
return mMyLocationTrackingMode;
}
/**
* <p>
* Set the current my bearing tracking mode.
* </p>
* Shows the direction the user is heading.
* <p>
* When location tracking is disabled the direction of {@link UserLocationView} is rotated
* When location tracking is enabled the {@link MapView} is rotated based on bearing value.
* </p>
* See {@link MyBearingTracking} for different values.
*
* @param myBearingTrackingMode The bearing tracking mode to be used.
* @throws SecurityException if no suitable permission is present
* @see MyBearingTracking
*/
@UiThread
public void setMyBearingTrackingMode(@MyBearingTracking.Mode int myBearingTrackingMode) {
mMyBearingTrackingMode = myBearingTrackingMode;
mapView.setMyBearingTrackingMode(myBearingTrackingMode);
}
/**
* Returns the current user bearing tracking mode.
* See {@link MyBearingTracking} for possible return values.
*
* @return the current user bearing tracking mode.
* @see MyBearingTracking
*/
@UiThread
@MyLocationTracking.Mode
public int getMyBearingTrackingMode() {
return mMyBearingTrackingMode;
}
/**
* Returns if the tracking modes will be dismissed when a gesture occurs.
*
* @return True to indicate the tracking modes will be dismissed.
*/
public boolean isDismissTrackingOnGesture() {
return dismissTrackingOnGesture;
}
/**
* Set the dismissal of the tracking modes if a gesture occurs.
*
* @param dismissTrackingOnGesture True to dismiss the tracking modes.
*/
public void setDismissTrackingOnGesture(boolean dismissTrackingOnGesture) {
this.dismissTrackingOnGesture = dismissTrackingOnGesture;
validateGesturesForTrackingModes();
}
private void validateGesturesForTrackingModes() {
if (!dismissTrackingOnGesture) {
int myLocationTrackingMode = getMyLocationTrackingMode();
int myBearingTrackingMode = getMyBearingTrackingMode();
// Enable/disable gestures based on tracking mode
if (myLocationTrackingMode == MyLocationTracking.TRACKING_NONE) {
uiSettings.setScrollGesturesEnabled(true);
uiSettings.setRotateGesturesEnabled(true);
} else {
uiSettings.setScrollGesturesEnabled(false);
uiSettings.setRotateGesturesEnabled((myBearingTrackingMode == MyBearingTracking.NONE));
}
}
}
/**
* Return if location tracking is disabled
*
* @return True if location tracking is disabled.
*/
public boolean isLocationTrackingDisabled() {
return mMyLocationTrackingMode == MyLocationTracking.TRACKING_NONE;
}
}
<file_sep>/platform/linux/scripts/after_script.sh
#!/bin/bash
set -e
set -o pipefail
if [ ! -z "${AWS_ACCESS_KEY_ID}" ] && [ ! -z "${AWS_SECRET_ACCESS_KEY}" ] ; then
# Install and add awscli to PATH for uploading the results
pip install --user awscli
export PATH="`python -m site --user-base`/bin:${PATH}"
REPO_NAME=$(basename $TRAVIS_REPO_SLUG)
aws s3 cp --recursive --acl public-read --exclude "*" --include "*/actual.png" test/fixtures \
s3://mapbox/$REPO_NAME/render-tests/$TRAVIS_JOB_NUMBER
fi
<file_sep>/docker/clang-tidy/tidy.sh
#!/usr/bin/env bash
# set -e
# set -o pipefail
export FLAVOR=linux
export CXX=clang++-3.8
export BUILDTYPE=Release
cd build
# before_install
source ./scripts/travis_helper.sh
# install
./platform/${FLAVOR}/scripts/install.sh
export CLANG_TIDY=clang-tidy-3.8
make tidy
<file_sep>/test/api/offline.cpp
#include "../fixtures/util.hpp"
#include <mbgl/platform/default/headless_display.hpp>
#include <mbgl/platform/default/headless_view.hpp>
#include <mbgl/storage/network_status.hpp>
#include <mbgl/storage/offline_database.hpp>
#include <mbgl/storage/default_file_source.hpp>
#include <mbgl/platform/log.hpp>
#include <mbgl/util/work_request.hpp>
#include <mbgl/util/io.hpp>
using namespace mbgl;
using namespace std::literals::chrono_literals;
using namespace std::literals::string_literals;
namespace {
Response expiredItem(const std::string& path) {
Response response;
response.data = std::make_shared<std::string>(util::read_file("test/fixtures/"s + path));
response.expires = SystemClock::from_time_t(0);
return response;
}
const std::string prefix = "http://127.0.0.1:3000";
}
auto display = std::make_shared<mbgl::HeadlessDisplay>();
TEST(API, Offline) {
HeadlessView view(display, 1);
DefaultFileSource fileSource(":memory:", ".");
fileSource.put(Resource::style(prefix + "/offline/style.json"), expiredItem("offline/style.json"));
fileSource.put(Resource::source(prefix + "/offline/streets.json"), expiredItem("offline/streets.json"));
fileSource.put(Resource::spriteJSON(prefix + "/offline/sprite", 1.0), expiredItem("offline/sprite.json"));
fileSource.put(Resource::spriteImage(prefix + "/offline/sprite", 1.0), expiredItem("offline/sprite.png"));
fileSource.put(Resource::tile(prefix + "/offline/{z}-{x}-{y}.vector.pbf", 1.0, 0, 0, 0), expiredItem("offline/0-0-0.vector.pbf"));
fileSource.put(Resource::glyphs(prefix + "/offline/{fontstack}/{range}.pbf", "Helvetica", {0, 255}), expiredItem("offline/glyph.pbf"));
NetworkStatus::Set(NetworkStatus::Status::Offline);
Map map(view, fileSource, MapMode::Still);
map.setStyleURL(prefix + "/offline/style.json");
test::checkImage("test/fixtures/offline"s,
test::render(map),
0.0015,
0.1);
NetworkStatus::Set(NetworkStatus::Status::Online);
}
<file_sep>/src/mbgl/tile/geojson_tile.hpp
#ifndef MBGL_ANNOTATION_GEOJSON_VT_TILE
#define MBGL_ANNOTATION_GEOJSON_VT_TILE
#include <mbgl/tile/geometry_tile.hpp>
#include <mbgl/map/tile_id.hpp>
#include <unordered_map>
namespace mapbox {
namespace geojsonvt {
class GeoJSONVT;
} // namespace geojsonvt
} // namespace mapbox
namespace mbgl {
// Implements a simple in-memory Tile type that holds GeoJSON values. A GeoJSON tile can only have
// one layer, and it is always returned regardless of which layer is requested.
class GeoJSONTileFeature : public GeometryTileFeature {
public:
using Tags = std::unordered_map<std::string, std::string>;
GeoJSONTileFeature(FeatureType, GeometryCollection&&, Tags&& = Tags{});
FeatureType getType() const override;
optional<Value> getValue(const std::string&) const override;
GeometryCollection getGeometries() const override;
private:
const FeatureType type;
const GeometryCollection geometries;
const Tags tags;
};
class GeoJSONTileLayer : public GeometryTileLayer {
public:
using Features = std::vector<std::shared_ptr<const GeoJSONTileFeature>>;
GeoJSONTileLayer(Features&&);
std::size_t featureCount() const override;
util::ptr<const GeometryTileFeature> getFeature(std::size_t) const override;
private:
const Features features;
};
class GeoJSONTile : public GeometryTile {
public:
GeoJSONTile(std::shared_ptr<GeoJSONTileLayer>);
util::ptr<GeometryTileLayer> getLayer(const std::string&) const override;
private:
const std::shared_ptr<GeoJSONTileLayer> layer;
};
class GeoJSONTileMonitor : public GeometryTileMonitor {
public:
GeoJSONTileMonitor(mapbox::geojsonvt::GeoJSONVT*, const TileID&);
virtual ~GeoJSONTileMonitor();
std::unique_ptr<FileRequest> monitorTile(const GeometryTileMonitor::Callback&) override;
void setGeoJSONVT(mapbox::geojsonvt::GeoJSONVT*);
private:
void update();
public:
const TileID tileID;
private:
mapbox::geojsonvt::GeoJSONVT* geojsonvt = nullptr;
GeometryTileMonitor::Callback callback;
};
} // namespace mbgl
#endif
<file_sep>/src/mbgl/layer/circle_layer.cpp
#include <mbgl/layer/circle_layer.hpp>
#include <mbgl/style/style_bucket_parameters.hpp>
#include <mbgl/renderer/circle_bucket.hpp>
#include <mbgl/util/get_geometries.hpp>
namespace mbgl {
std::unique_ptr<StyleLayer> CircleLayer::clone() const {
return std::make_unique<CircleLayer>(*this);
}
void CircleLayer::parsePaints(const JSValue& layer) {
paint.radius.parse("circle-radius", layer);
paint.color.parse("circle-color", layer);
paint.opacity.parse("circle-opacity", layer);
paint.translate.parse("circle-translate", layer);
paint.translateAnchor.parse("circle-translate-anchor", layer);
paint.blur.parse("circle-blur", layer);
}
void CircleLayer::cascade(const StyleCascadeParameters& parameters) {
paint.radius.cascade(parameters);
paint.color.cascade(parameters);
paint.opacity.cascade(parameters);
paint.translate.cascade(parameters);
paint.translateAnchor.cascade(parameters);
paint.blur.cascade(parameters);
}
bool CircleLayer::recalculate(const StyleCalculationParameters& parameters) {
bool hasTransitions = false;
hasTransitions |= paint.radius.calculate(parameters);
hasTransitions |= paint.color.calculate(parameters);
hasTransitions |= paint.opacity.calculate(parameters);
hasTransitions |= paint.translate.calculate(parameters);
hasTransitions |= paint.translateAnchor.calculate(parameters);
hasTransitions |= paint.blur.calculate(parameters);
passes = paint.isVisible() ? RenderPass::Translucent : RenderPass::None;
return hasTransitions;
}
std::unique_ptr<Bucket> CircleLayer::createBucket(StyleBucketParameters& parameters) const {
auto bucket = std::make_unique<CircleBucket>();
parameters.eachFilteredFeature(filter, [&] (const auto& feature) {
bucket->addGeometry(getGeometries(feature));
});
return std::move(bucket);
}
} // namespace mbgl
<file_sep>/src/mbgl/source/source_info.hpp
#ifndef MBGL_MAP_SOURCE_INFO
#define MBGL_MAP_SOURCE_INFO
#include <mbgl/style/types.hpp>
#include <mbgl/util/constants.hpp>
#include <mbgl/util/geo.hpp>
#include <array>
#include <vector>
#include <string>
#include <cstdint>
namespace mbgl {
class TileID;
class SourceInfo {
public:
std::vector<std::string> tiles;
uint8_t minZoom = 0;
uint8_t maxZoom = 22;
std::string attribution;
LatLng center;
double zoom = 0;
LatLngBounds bounds = LatLngBounds::world();
};
} // namespace mbgl
#endif // MBGL_MAP_SOURCE_INFO
<file_sep>/platform/ios/DEVELOPING.md
# Contributing to the Mapbox iOS SDK
This document explains how to build the Mapbox iOS SDK from source. It is intended for advanced developers who wish to contribute to Mapbox GL and the Mapbox iOS SDK.
### Requirements
The Mapbox iOS SDK and iosapp demo application build against the iOS 7.0 SDK and require Xcode on a computer running OS X.
### Build
[Install core dependencies](../../INSTALL.md), then run
make iproj
Which will create and open an Xcode project that can build the entire library from source, as well as an Objective-C test app. Before building, use the scheme picker button in the toolbar to change the scheme from “All (ios project)” to “iosapp” and the destination from “My Mac” to one of the simulators or connected devices listed in the menu.
If you don't have an Apple Developer account, change the destination from "My Mac" to a simulator such as "iPhone 6" before you run and build the app.
`ipackage` is only one of several available `make` build targets. Others include:
* `make ipackage` builds both dynamic and static frameworks in the Release configuration for devices and the iOS Simulator.
* `make ipackage-sim` builds a dynamic framework in the Debug configuration for the iOS simulator. This is the fastest target.
* `make ipackage-strip` builds both dynamic and static frameworks in the Release configuration, stripped of debug symbols, for devices and the iOS Simulator.
* `make iframework` builds a dynamic framework in the Release configuration for devices and the iOS Simulator. The CocoaPods pod downloads the output of this target.
You can customize the build output by passing the following arguments into the `make` invocation:
* `BITCODE=NO` builds without Bitcode support.
* `BUILD_DEVICE=false` builds only for the iOS Simulator.
* `FORMAT=dynamic` builds only a dynamic framework. `FORMAT=static` builds only a static framework, for compatibility with iOS 7.x.
* `SYMBOLS=NO` strips the build output of any debug symbols, yielding much smaller binaries.
### Access Tokens
_The demo applications use Mapbox vector tiles, which require a Mapbox account and API access token. Obtain an access token on the [Mapbox account page](https://www.mapbox.com/studio/account/tokens/)._
Set up the access token by editing the scheme for the application target, then adding an environment variable with the name `MAPBOX_ACCESS_TOKEN`.


### Test
Run
make itest
To run the included integration tests on the command line.
If you want to run the tests in Xcode instead, first `make ipackage` to create a local static library version, then open `platform/ios/test/ios-tests.xcodeproj`, and lastly `Command + U` on the `Mapbox GL Tests` application target.
### Usage
- Pan to move
- Pinch to zoom
- Use two fingers to rotate
- Double-tap to zoom in one level
- Two-finger single-tap to zoom out one level
- Double-tap, long-pressing the second, then pan up and down to "quick zoom" (iPhone only, meant for one-handed use)
- Use the debug menu to add test annotations, reset position, and cycle through the debug options.
<file_sep>/src/mbgl/style/style_layer.hpp
#ifndef MBGL_STYLE_STYLE_LAYER
#define MBGL_STYLE_STYLE_LAYER
#include <mbgl/style/types.hpp>
#include <mbgl/style/filter_expression.hpp>
#include <mbgl/renderer/render_pass.hpp>
#include <mbgl/util/noncopyable.hpp>
#include <mbgl/util/rapidjson.hpp>
#include <memory>
#include <string>
#include <limits>
namespace mbgl {
class StyleCascadeParameters;
class StyleCalculationParameters;
class StyleBucketParameters;
class Bucket;
class StyleLayer {
public:
virtual ~StyleLayer() = default;
// Check whether this layer is of the given subtype.
template <class T>
bool is() const;
// Dynamically cast this layer to the given subtype.
template <class T>
T* as() {
return is<T>() ? reinterpret_cast<T*>(this) : nullptr;
}
template <class T>
const T* as() const {
return is<T>() ? reinterpret_cast<const T*>(this) : nullptr;
}
// Create a copy of this layer.
virtual std::unique_ptr<StyleLayer> clone() const = 0;
virtual void parseLayout(const JSValue& value) = 0;
virtual void parsePaints(const JSValue& value) = 0;
// If the layer has a ref, the ref. Otherwise, the id.
const std::string& bucketName() const;
// Partially evaluate paint properties based on a set of classes.
virtual void cascade(const StyleCascadeParameters&) = 0;
// Fully evaluate cascaded paint properties based on a zoom level.
// Returns true if any paint properties have active transitions.
virtual bool recalculate(const StyleCalculationParameters&) = 0;
virtual std::unique_ptr<Bucket> createBucket(StyleBucketParameters&) const = 0;
// Checks whether this layer needs to be rendered in the given render pass.
bool hasRenderPass(RenderPass) const;
// Checks whether this layer can be rendered.
bool needsRendering() const;
public:
std::string id;
std::string ref;
std::string source;
std::string sourceLayer;
FilterExpression filter;
float minZoom = -std::numeric_limits<float>::infinity();
float maxZoom = std::numeric_limits<float>::infinity();
VisibilityType visibility = VisibilityType::Visible;
protected:
enum class Type {
Fill,
Line,
Circle,
Symbol,
Raster,
Background,
Custom,
};
StyleLayer(Type type_) : type(type_) {}
StyleLayer(const StyleLayer&) = default;
StyleLayer& operator=(const StyleLayer&) = delete;
const Type type;
// Stores what render passes this layer is currently enabled for. This depends on the
// evaluated StyleProperties object and is updated accordingly.
RenderPass passes = RenderPass::None;
};
} // namespace mbgl
#endif
<file_sep>/test/storage/resource.cpp
#include <mbgl/storage/resource.hpp>
#include <gtest/gtest.h>
TEST(Resource, Style) {
using namespace mbgl;
Resource resource = Resource::style("http://example.com");
EXPECT_EQ(Resource::Kind::Style, resource.kind);
EXPECT_EQ("http://example.com", resource.url);
}
TEST(Resource, Source) {
using namespace mbgl;
Resource resource = Resource::source("http://example.com");
EXPECT_EQ(Resource::Kind::Source, resource.kind);
EXPECT_EQ("http://example.com", resource.url);
}
TEST(Resource, Tile) {
using namespace mbgl;
Resource rasterTile = Resource::tile("http://example.com/{z}/{x}/{y}{ratio}.png", 2.0, 1, 2, 3);
EXPECT_EQ(Resource::Kind::Tile, rasterTile.kind);
EXPECT_EQ("http://example.com/3/1/2@2x.png", rasterTile.url);
EXPECT_EQ("http://example.com/{z}/{x}/{y}{ratio}.png", rasterTile.tileData->urlTemplate);
EXPECT_EQ(2, rasterTile.tileData->pixelRatio);
EXPECT_EQ(1, rasterTile.tileData->x);
EXPECT_EQ(2, rasterTile.tileData->y);
EXPECT_EQ(3, rasterTile.tileData->z);
Resource vectorTile = Resource::tile("http://example.com/{z}/{x}/{y}.mvt", 2.0, 1, 2, 3);
EXPECT_EQ(Resource::Kind::Tile, vectorTile.kind);
EXPECT_EQ("http://example.com/3/1/2.mvt", vectorTile.url);
EXPECT_EQ("http://example.com/{z}/{x}/{y}.mvt", vectorTile.tileData->urlTemplate);
EXPECT_EQ(1, vectorTile.tileData->pixelRatio);
EXPECT_EQ(1, vectorTile.tileData->x);
EXPECT_EQ(2, vectorTile.tileData->y);
EXPECT_EQ(3, vectorTile.tileData->z);
}
TEST(Resource, Glyphs) {
using namespace mbgl;
Resource resource = Resource::glyphs("http://example.com/{fontstack}/{range}", "stack", {0, 255});
EXPECT_EQ(Resource::Kind::Glyphs, resource.kind);
EXPECT_EQ("http://example.com/stack/0-255", resource.url);
}
TEST(Resource, SpriteImage) {
using namespace mbgl;
Resource resource = Resource::spriteImage("http://example.com/sprite", 2.0);
EXPECT_EQ(Resource::Kind::SpriteImage, resource.kind);
EXPECT_EQ("http://example.com/sprite@2x.png", resource.url);
}
TEST(Resource, SpriteJSON) {
using namespace mbgl;
Resource resource = Resource::spriteJSON("http://example.com/sprite", 2.0);
EXPECT_EQ(Resource::Kind::SpriteJSON, resource.kind);
EXPECT_EQ("http://example.com/sprite@2x.json", resource.url);
}
<file_sep>/platform/osx/DEVELOPING.md
# Contributing to the Mapbox OS X SDK
This document explains how to build the Mapbox OS X SDK from source. It is intended for advanced developers who wish to contribute to Mapbox GL and the Mapbox iOS SDK.
### Requirements
The Mapbox OS X SDK and the osxapp demo application run on OS X 10.10.0 and above.
### Build
1. [Install core dependencies](../../INSTALL.md).
1. Run `make xproj`.
1. Switch to the osxsdk or osxapp scheme. The former builds just the Cocoa framework, while the latter also builds a Cocoa demo application based on it.
### Access tokens
The demo applications use Mapbox vector tiles, which require a Mapbox account and API access token. Obtain an access token on the [Mapbox account page](https://www.mapbox.com/studio/account/tokens/). You will be prompted for this access token the first time you launch the demo application.
### Usage
Through the OS X SDK, the demo application supports a variety of standard gestures and keyboard shortcuts. For more details, open Mapbox GL Help from the Help menu.
You can also [integrate the Mapbox OS X SDK into your own Cocoa application](INSTALL.md).
<file_sep>/include/mbgl/annotation/shape_annotation.hpp
#ifndef MBGL_ANNOTATION_SHAPE_ANNOTATION
#define MBGL_ANNOTATION_SHAPE_ANNOTATION
#include <mbgl/annotation/annotation.hpp>
#include <mbgl/style/types.hpp>
#include <mbgl/util/geo.hpp>
#include <mapbox/variant.hpp>
namespace mbgl {
using AnnotationSegment = std::vector<LatLng>;
using AnnotationSegments = std::vector<AnnotationSegment>;
struct FillAnnotationProperties {
float opacity = 1;
Color color = {{ 0, 0, 0, 1 }};
Color outlineColor = {{ 0, 0, 0, -1 }};
};
struct LineAnnotationProperties {
float opacity = 1;
float width = 1;
Color color = {{ 0, 0, 0, 1 }};
};
class ShapeAnnotation {
public:
using Properties = mapbox::util::variant<
FillAnnotationProperties, // creates a fill annotation
LineAnnotationProperties, // creates a line annotation
std::string>; // creates an annotation whose type and properties are sourced from a style layer
ShapeAnnotation(const AnnotationSegments& segments_, const Properties& properties_)
: segments(wrapCoordinates(segments_)), properties(properties_) {}
const AnnotationSegments segments;
const Properties properties;
private:
AnnotationSegments wrapCoordinates(const AnnotationSegments& segments_) {
AnnotationSegments wrappedSegments;
// Wrap all segments coordinates.
for (const auto& segment_ : segments_) {
AnnotationSegment wrappedSegment;
for (const auto& latLng_ : segment_) {
wrappedSegment.push_back(latLng_.wrapped());
}
wrappedSegments.push_back(wrappedSegment);
}
return wrappedSegments;
}
};
} // namespace mbgl
#endif
<file_sep>/src/mbgl/layer/background_layer.cpp
#include <mbgl/layer/background_layer.hpp>
#include <mbgl/renderer/bucket.hpp>
namespace mbgl {
std::unique_ptr<StyleLayer> BackgroundLayer::clone() const {
return std::make_unique<BackgroundLayer>(*this);
}
void BackgroundLayer::parsePaints(const JSValue& layer) {
paint.opacity.parse("background-opacity", layer);
paint.color.parse("background-color", layer);
paint.pattern.parse("background-pattern", layer);
}
void BackgroundLayer::cascade(const StyleCascadeParameters& parameters) {
paint.opacity.cascade(parameters);
paint.color.cascade(parameters);
paint.pattern.cascade(parameters);
}
bool BackgroundLayer::recalculate(const StyleCalculationParameters& parameters) {
bool hasTransitions = false;
hasTransitions |= paint.opacity.calculate(parameters);
hasTransitions |= paint.color.calculate(parameters);
hasTransitions |= paint.pattern.calculate(parameters);
passes = paint.opacity > 0 ? RenderPass::Translucent : RenderPass::None;
return hasTransitions;
}
std::unique_ptr<Bucket> BackgroundLayer::createBucket(StyleBucketParameters&) const {
return nullptr;
}
} // namespace mbgl
<file_sep>/platform/default/online_file_source.cpp
#include <mbgl/storage/online_file_source.hpp>
#include <mbgl/storage/http_context_base.hpp>
#include <mbgl/storage/network_status.hpp>
#include <mbgl/storage/response.hpp>
#include <mbgl/platform/log.hpp>
#include <mbgl/util/constants.hpp>
#include <mbgl/util/thread.hpp>
#include <mbgl/util/mapbox.hpp>
#include <mbgl/util/exception.hpp>
#include <mbgl/util/chrono.hpp>
#include <mbgl/util/async_task.hpp>
#include <mbgl/util/noncopyable.hpp>
#include <mbgl/util/timer.hpp>
#include <algorithm>
#include <cassert>
#include <list>
#include <unordered_set>
#include <unordered_map>
namespace mbgl {
class OnlineFileRequestImpl : public util::noncopyable {
public:
using Callback = std::function<void (Response)>;
OnlineFileRequestImpl(FileRequest*, const Resource&, Callback, OnlineFileSource::Impl&);
~OnlineFileRequestImpl();
void networkIsReachableAgain(OnlineFileSource::Impl&);
void schedule(OnlineFileSource::Impl&, optional<SystemTimePoint> expires);
void completed(OnlineFileSource::Impl&, Response);
FileRequest* key;
Resource resource;
HTTPRequestBase* request = nullptr;
util::Timer timer;
Callback callback;
// Counts the number of times a response was already expired when received. We're using
// this to add a delay when making a new request so we don't keep retrying immediately
// in case of a server serving expired tiles.
uint32_t expiredRequests = 0;
// Counts the number of subsequent failed requests. We're using this value for exponential
// backoff when retrying requests.
uint32_t failedRequests = 0;
Response::Error::Reason failedRequestReason = Response::Error::Reason::Success;
};
class OnlineFileSource::Impl {
public:
// Dummy parameter is a workaround for a gcc 4.9 bug.
Impl(int) {
NetworkStatus::Subscribe(&reachability);
}
~Impl() {
NetworkStatus::Unsubscribe(&reachability);
}
void request(FileRequest* key, Resource resource, Callback callback) {
allRequests[key] = std::make_unique<OnlineFileRequestImpl>(key, resource, callback, *this);
}
void cancel(FileRequest* key) {
allRequests.erase(key);
if (activeRequests.erase(key)) {
activatePendingRequest();
} else {
auto it = pendingRequestsMap.find(key);
if (it != pendingRequestsMap.end()) {
pendingRequestsList.erase(it->second);
pendingRequestsMap.erase(it);
}
}
}
void activateOrQueueRequest(OnlineFileRequestImpl* impl) {
assert(allRequests.find(impl->key) != allRequests.end());
assert(activeRequests.find(impl->key) == activeRequests.end());
assert(!impl->request);
if (activeRequests.size() >= HTTPContextBase::maximumConcurrentRequests()) {
queueRequest(impl);
} else {
activateRequest(impl);
}
}
void queueRequest(OnlineFileRequestImpl* impl) {
auto it = pendingRequestsList.insert(pendingRequestsList.end(), impl->key);
pendingRequestsMap.emplace(impl->key, std::move(it));
}
void activateRequest(OnlineFileRequestImpl* impl) {
activeRequests.insert(impl->key);
impl->request = httpContext->createRequest(impl->resource, [=] (Response response) {
impl->request = nullptr;
activeRequests.erase(impl->key);
activatePendingRequest();
impl->completed(*this, response);
});
}
void activatePendingRequest() {
if (pendingRequestsList.empty()) {
return;
}
FileRequest* key = pendingRequestsList.front();
pendingRequestsList.pop_front();
pendingRequestsMap.erase(key);
auto it = allRequests.find(key);
assert(it != allRequests.end());
activateRequest(it->second.get());
}
private:
void networkIsReachableAgain() {
for (auto& req : allRequests) {
req.second->networkIsReachableAgain(*this);
}
}
/**
* The lifetime of a request is:
*
* 1. Waiting for timeout (revalidation or retry)
* 2. Pending (waiting for room in the active set)
* 3. Active (open network connection)
* 4. Back to #1
*
* Requests in any state are in `allRequests`. Requests in the pending state are in
* `pendingRequests`. Requests in the active state are in `activeRequests`.
*/
std::unordered_map<FileRequest*, std::unique_ptr<OnlineFileRequestImpl>> allRequests;
std::list<FileRequest*> pendingRequestsList;
std::unordered_map<FileRequest*, std::list<FileRequest*>::iterator> pendingRequestsMap;
std::unordered_set<FileRequest*> activeRequests;
const std::unique_ptr<HTTPContextBase> httpContext { HTTPContextBase::createContext() };
util::AsyncTask reachability { std::bind(&Impl::networkIsReachableAgain, this) };
};
OnlineFileSource::OnlineFileSource()
: thread(std::make_unique<util::Thread<Impl>>(
util::ThreadContext{ "OnlineFileSource", util::ThreadType::Unknown, util::ThreadPriority::Low }, 0)) {
}
OnlineFileSource::~OnlineFileSource() = default;
std::unique_ptr<FileRequest> OnlineFileSource::request(const Resource& resource, Callback callback) {
Resource res = resource;
switch (resource.kind) {
case Resource::Kind::Unknown:
break;
case Resource::Kind::Style:
res.url = mbgl::util::mapbox::normalizeStyleURL(resource.url, accessToken);
break;
case Resource::Kind::Source:
res.url = util::mapbox::normalizeSourceURL(resource.url, accessToken);
break;
case Resource::Kind::Glyphs:
res.url = util::mapbox::normalizeGlyphsURL(resource.url, accessToken);
break;
case Resource::Kind::SpriteImage:
case Resource::Kind::SpriteJSON:
res.url = util::mapbox::normalizeSpriteURL(resource.url, accessToken);
break;
case Resource::Kind::Tile:
res.url = util::mapbox::normalizeTileURL(resource.url, accessToken);
break;
}
class OnlineFileRequest : public FileRequest {
public:
OnlineFileRequest(Resource resource_, FileSource::Callback callback_, util::Thread<OnlineFileSource::Impl>& thread_)
: thread(thread_),
workRequest(thread.invokeWithCallback(&OnlineFileSource::Impl::request, callback_, this, resource_)) {
}
~OnlineFileRequest() {
thread.invoke(&OnlineFileSource::Impl::cancel, this);
}
util::Thread<OnlineFileSource::Impl>& thread;
std::unique_ptr<WorkRequest> workRequest;
};
return std::make_unique<OnlineFileRequest>(res, callback, *thread);
}
OnlineFileRequestImpl::OnlineFileRequestImpl(FileRequest* key_, const Resource& resource_, Callback callback_, OnlineFileSource::Impl& impl)
: key(key_),
resource(resource_),
callback(std::move(callback_)) {
// Force an immediate first request if we don't have an expiration time.
schedule(impl, SystemClock::now());
}
OnlineFileRequestImpl::~OnlineFileRequestImpl() {
if (request) {
request->cancel();
}
}
static Duration errorRetryTimeout(Response::Error::Reason failedRequestReason, uint32_t failedRequests) {
if (failedRequestReason == Response::Error::Reason::Server) {
// Retry after one second three times, then start exponential backoff.
return Seconds(failedRequests <= 3 ? 1 : 1 << std::min(failedRequests - 3, 31u));
} else if (failedRequestReason == Response::Error::Reason::Connection) {
// Immediate exponential backoff.
assert(failedRequests > 0);
return Seconds(1 << std::min(failedRequests - 1, 31u));
} else {
// No error, or not an error that triggers retries.
return Duration::max();
}
}
static Duration expirationTimeout(optional<SystemTimePoint> expires, uint32_t expiredRequests) {
if (expiredRequests) {
return Seconds(1 << std::min(expiredRequests - 1, 31u));
} else if (expires) {
return std::max(SystemDuration::zero(), *expires - SystemClock::now());
} else {
return Duration::max();
}
}
SystemTimePoint interpolateExpiration(const SystemTimePoint& current,
optional<SystemTimePoint> prior,
bool& expired) {
auto now = SystemClock::now();
if (current > now) {
return current;
}
if (!bool(prior)) {
expired = true;
return current;
}
// Expiring date is going backwards,
// fallback to exponential backoff.
if (current < *prior) {
expired = true;
return current;
}
auto delta = current - *prior;
// Server is serving the same expired resource
// over and over, fallback to exponential backoff.
if (delta == Duration::zero()) {
expired = true;
return current;
}
// Assume that either the client or server clock is wrong and
// try to interpolate a valid expiration date (from the client POV)
// observing a minimum timeout.
return now + std::max<SystemDuration>(delta, util::CLOCK_SKEW_RETRY_TIMEOUT);
}
void OnlineFileRequestImpl::schedule(OnlineFileSource::Impl& impl, optional<SystemTimePoint> expires) {
if (request) {
// There's already a request in progress; don't start another one.
return;
}
// If we're not being asked for a forced refresh, calculate a timeout that depends on how many
// consecutive errors we've encountered, and on the expiration time, if present.
Duration timeout = std::min(errorRetryTimeout(failedRequestReason, failedRequests),
expirationTimeout(expires, expiredRequests));
if (timeout == Duration::max()) {
return;
}
// Emulate a Connection error when the Offline mode is forced with
// a really long timeout. The request will get re-triggered when
// the NetworkStatus is set back to Online.
if (NetworkStatus::Get() == NetworkStatus::Status::Offline) {
failedRequestReason = Response::Error::Reason::Connection;
failedRequests = 1;
timeout = Duration::max();
}
timer.start(timeout, Duration::zero(), [&] {
impl.activateOrQueueRequest(this);
});
}
void OnlineFileRequestImpl::completed(OnlineFileSource::Impl& impl, Response response) {
// If we didn't get various caching headers in the response, continue using the
// previous values. Otherwise, update the previous values to the new values.
if (!response.modified) {
response.modified = resource.priorModified;
} else {
resource.priorModified = response.modified;
}
bool isExpired = false;
if (response.expires) {
auto prior = resource.priorExpires;
resource.priorExpires = response.expires;
response.expires = interpolateExpiration(*response.expires, prior, isExpired);
}
if (isExpired) {
expiredRequests++;
} else {
expiredRequests = 0;
}
if (!response.etag) {
response.etag = resource.priorEtag;
} else {
resource.priorEtag = response.etag;
}
if (response.error) {
failedRequests++;
failedRequestReason = response.error->reason;
} else {
failedRequests = 0;
failedRequestReason = Response::Error::Reason::Success;
}
callback(response);
schedule(impl, response.expires);
}
void OnlineFileRequestImpl::networkIsReachableAgain(OnlineFileSource::Impl& impl) {
// We need all requests to fail at least once before we are going to start retrying
// them, and we only immediately restart request that failed due to connection issues.
if (failedRequestReason == Response::Error::Reason::Connection) {
schedule(impl, SystemClock::now());
}
}
} // namespace mbgl
<file_sep>/gyp/osx.gyp
{
'includes': [
'../platform/osx/app/mapboxgl-app.gypi',
'../platform/osx/sdk/framework-osx.gypi',
'../platform/osx/test/osxtest.gypi',
'../platform/linux/mapboxgl-app.gypi',
],
'conditions': [
['test', { 'includes': [ '../test/test.gypi' ] } ],
['offline', { 'includes': [ '../bin/offline.gypi' ] } ],
['render', { 'includes': [ '../bin/render.gypi' ] } ],
],
}
<file_sep>/scripts/coveralls.sh
#!/usr/bin/env bash
set -e
set -o pipefail
PATH="`pwd`/.mason:${PATH}" MASON_DIR="`pwd`/.mason"
mapbox_time "install_lcov" \
mason install lcov 1.12
LCOV="`mason prefix lcov 1.12`/usr/bin/lcov"
mapbox_time "check_mason_lcov" \
command -v $LCOV > /dev/null 2>&1 || exit 1
mapbox_time "remove_coverage_info" \
rm -f "build/${HOST_SLUG}/${BUILDTYPE}/coverage.info"
# Run all unit tests
./scripts/run_tests.sh "build/${HOST_SLUG}/${BUILDTYPE}/test"
# Collect coverage data and save it into coverage.info
mapbox_time "lcov_capture" \
$LCOV \
--quiet \
--capture \
--no-external \
--gcov-tool "gcov-4.9" \
--directory "src/mbgl" \
--directory "platform" \
--directory "include/mbgl" \
--directory "build/${HOST_SLUG}/${BUILDTYPE}" \
--base-directory "build/${HOST_SLUG}/${BUILDTYPE}" \
--output-file "build/${HOST_SLUG}/${BUILDTYPE}/coverage.info"
mapbox_time "coveralls_upload" \
coveralls-lcov "build/${HOST_SLUG}/${BUILDTYPE}/coverage.info"
<file_sep>/test/api/annotations.cpp
#include "../fixtures/util.hpp"
#include <mbgl/annotation/point_annotation.hpp>
#include <mbgl/annotation/shape_annotation.hpp>
#include <mbgl/sprite/sprite_image.hpp>
#include <mbgl/map/map.hpp>
#include <mbgl/platform/default/headless_display.hpp>
#include <mbgl/platform/default/headless_view.hpp>
#include <mbgl/storage/online_file_source.hpp>
#include <mbgl/util/io.hpp>
#include <future>
#include <vector>
using namespace mbgl;
std::shared_ptr<SpriteImage> namedMarker(const std::string &name) {
PremultipliedImage image = decodeImage(util::read_file("test/fixtures/sprites/" + name));
return std::make_shared<SpriteImage>(std::move(image), 1.0);
}
namespace {
void checkRendering(Map& map, const char * name) {
test::checkImage(std::string("test/fixtures/annotations/") + name + "/",
test::render(map), 0.0002, 0.1);
}
} // end namespace
TEST(Annotations, PointAnnotation) {
auto display = std::make_shared<mbgl::HeadlessDisplay>();
HeadlessView view(display, 1);
OnlineFileSource fileSource;
Map map(view, fileSource, MapMode::Still);
map.setStyleJSON(util::read_file("test/fixtures/api/empty.json"), "");
map.addAnnotationIcon("default_marker", namedMarker("default_marker.png"));
map.addPointAnnotation(PointAnnotation({ 0, 0 }, "default_marker"));
checkRendering(map, "point_annotation");
}
TEST(Annotations, LineAnnotation) {
auto display = std::make_shared<mbgl::HeadlessDisplay>();
HeadlessView view(display, 1);
OnlineFileSource fileSource;
Map map(view, fileSource, MapMode::Still);
map.setStyleJSON(util::read_file("test/fixtures/api/empty.json"), "");
AnnotationSegments segments = {{ {{ { 0, 0 }, { 45, 45 } }} }};
LineAnnotationProperties properties;
properties.color = {{ 255, 0, 0, 1 }};
properties.width = 5;
map.addShapeAnnotation(ShapeAnnotation(segments, properties));
checkRendering(map, "line_annotation");
}
TEST(Annotations, FillAnnotation) {
auto display = std::make_shared<mbgl::HeadlessDisplay>();
HeadlessView view(display, 1);
OnlineFileSource fileSource;
Map map(view, fileSource, MapMode::Still);
map.setStyleJSON(util::read_file("test/fixtures/api/empty.json"), "");
AnnotationSegments segments = {{ {{ { 0, 0 }, { 0, 45 }, { 45, 45 }, { 45, 0 } }} }};
FillAnnotationProperties properties;
properties.color = {{ 255, 0, 0, 1 }};
map.addShapeAnnotation(ShapeAnnotation(segments, properties));
checkRendering(map, "fill_annotation");
}
TEST(Annotations, StyleSourcedShapeAnnotation) {
auto display = std::make_shared<mbgl::HeadlessDisplay>();
HeadlessView view(display, 1);
OnlineFileSource fileSource;
Map map(view, fileSource, MapMode::Still);
map.setStyleJSON(util::read_file("test/fixtures/api/annotation.json"), "");
AnnotationSegments segments = {{ {{ { 0, 0 }, { 0, 45 }, { 45, 45 }, { 45, 0 } }} }};
map.addShapeAnnotation(ShapeAnnotation(segments, "annotation"));
checkRendering(map, "style_sourced_shape_annotation");
}
TEST(Annotations, AddMultiple) {
auto display = std::make_shared<mbgl::HeadlessDisplay>();
HeadlessView view(display, 1);
OnlineFileSource fileSource;
Map map(view, fileSource, MapMode::Still);
map.setStyleJSON(util::read_file("test/fixtures/api/empty.json"), "");
map.addAnnotationIcon("default_marker", namedMarker("default_marker.png"));
map.addPointAnnotation(PointAnnotation({ 0, -10 }, "default_marker"));
test::render(map);
map.addPointAnnotation(PointAnnotation({ 0, 10 }, "default_marker"));
checkRendering(map, "add_multiple");
}
TEST(Annotations, NonImmediateAdd) {
auto display = std::make_shared<mbgl::HeadlessDisplay>();
HeadlessView view(display, 1);
OnlineFileSource fileSource;
Map map(view, fileSource, MapMode::Still);
map.setStyleJSON(util::read_file("test/fixtures/api/empty.json"), "");
test::render(map);
AnnotationSegments segments = {{ {{ { 0, 0 }, { 0, 45 }, { 45, 45 }, { 45, 0 } }} }};
FillAnnotationProperties properties;
properties.color = {{ 255, 0, 0, 1 }};
map.addShapeAnnotation(ShapeAnnotation(segments, properties));
checkRendering(map, "non_immediate_add");
}
TEST(Annotations, UpdateIcon) {
auto display = std::make_shared<mbgl::HeadlessDisplay>();
HeadlessView view(display, 1);
OnlineFileSource fileSource;
Map map(view, fileSource, MapMode::Still);
map.setStyleJSON(util::read_file("test/fixtures/api/empty.json"), "");
map.addAnnotationIcon("flipped_marker", namedMarker("default_marker.png"));
map.addPointAnnotation(PointAnnotation({ 0, 0 }, "flipped_marker"));
test::render(map);
map.removeAnnotationIcon("flipped_marker");
map.addAnnotationIcon("flipped_marker", namedMarker("flipped_marker.png"));
map.update(Update::Annotations);
checkRendering(map, "update_icon");
}
TEST(Annotations, UpdatePoint) {
auto display = std::make_shared<mbgl::HeadlessDisplay>();
HeadlessView view(display, 1);
OnlineFileSource fileSource;
Map map(view, fileSource, MapMode::Still);
map.setStyleJSON(util::read_file("test/fixtures/api/empty.json"), "");
map.addAnnotationIcon("default_marker", namedMarker("default_marker.png"));
map.addAnnotationIcon("flipped_marker", namedMarker("flipped_marker.png"));
AnnotationID point = map.addPointAnnotation(PointAnnotation({ 0, 0 }, "default_marker"));
test::render(map);
map.updatePointAnnotation(point, PointAnnotation({ 0, -10 }, "flipped_marker"));
checkRendering(map, "update_point");
}
TEST(Annotations, RemovePoint) {
auto display = std::make_shared<mbgl::HeadlessDisplay>();
HeadlessView view(display, 1);
OnlineFileSource fileSource;
Map map(view, fileSource, MapMode::Still);
map.setStyleJSON(util::read_file("test/fixtures/api/empty.json"), "");
map.addAnnotationIcon("default_marker", namedMarker("default_marker.png"));
AnnotationID point = map.addPointAnnotation(PointAnnotation({ 0, 0 }, "default_marker"));
test::render(map);
map.removeAnnotation(point);
checkRendering(map, "remove_point");
}
TEST(Annotations, RemoveShape) {
auto display = std::make_shared<mbgl::HeadlessDisplay>();
HeadlessView view(display, 1);
OnlineFileSource fileSource;
AnnotationSegments segments = {{ {{ { 0, 0 }, { 45, 45 } }} }};
LineAnnotationProperties properties;
properties.color = {{ 255, 0, 0, 1 }};
properties.width = 5;
Map map(view, fileSource, MapMode::Still);
map.setStyleJSON(util::read_file("test/fixtures/api/empty.json"), "");
AnnotationID shape = map.addShapeAnnotation(ShapeAnnotation(segments, properties));
test::render(map);
map.removeAnnotation(shape);
checkRendering(map, "remove_shape");
}
TEST(Annotations, ImmediateRemoveShape) {
auto display = std::make_shared<mbgl::HeadlessDisplay>();
HeadlessView view(display, 1);
OnlineFileSource fileSource;
Map map(view, fileSource, MapMode::Still);
map.removeAnnotation(map.addShapeAnnotation(ShapeAnnotation({}, {})));
map.setStyleJSON(util::read_file("test/fixtures/api/empty.json"), "");
test::render(map);
}
TEST(Annotations, SwitchStyle) {
auto display = std::make_shared<mbgl::HeadlessDisplay>();
HeadlessView view(display, 1);
OnlineFileSource fileSource;
Map map(view, fileSource, MapMode::Still);
map.setStyleJSON(util::read_file("test/fixtures/api/empty.json"), "");
map.addAnnotationIcon("default_marker", namedMarker("default_marker.png"));
map.addPointAnnotation(PointAnnotation({ 0, 0 }, "default_marker"));
test::render(map);
map.setStyleJSON(util::read_file("test/fixtures/api/empty.json"), "");
checkRendering(map, "switch_style");
}
<file_sep>/src/mbgl/layer/line_layer.cpp
#include <mbgl/layer/line_layer.hpp>
#include <mbgl/style/style_bucket_parameters.hpp>
#include <mbgl/renderer/line_bucket.hpp>
#include <mbgl/map/tile_id.hpp>
#include <mbgl/util/get_geometries.hpp>
namespace mbgl {
std::unique_ptr<StyleLayer> LineLayer::clone() const {
return std::make_unique<LineLayer>(*this);
}
void LineLayer::parseLayout(const JSValue& value) {
layout.cap.parse("line-cap", value);
layout.join.parse("line-join", value);
layout.miterLimit.parse("line-miter-limit", value);
layout.roundLimit.parse("line-round-limit", value);
}
void LineLayer::parsePaints(const JSValue& layer) {
paint.opacity.parse("line-opacity", layer);
paint.color.parse("line-color", layer);
paint.translate.parse("line-translate", layer);
paint.translateAnchor.parse("line-translate-anchor", layer);
paint.width.parse("line-width", layer);
paint.gapWidth.parse("line-gap-width", layer);
paint.offset.parse("line-offset", layer);
paint.blur.parse("line-blur", layer);
paint.dasharray.parse("line-dasharray", layer);
paint.pattern.parse("line-pattern", layer);
}
void LineLayer::cascade(const StyleCascadeParameters& parameters) {
paint.opacity.cascade(parameters);
paint.color.cascade(parameters);
paint.translate.cascade(parameters);
paint.translateAnchor.cascade(parameters);
paint.width.cascade(parameters);
paint.gapWidth.cascade(parameters);
paint.offset.cascade(parameters);
paint.blur.cascade(parameters);
paint.dasharray.cascade(parameters);
paint.pattern.cascade(parameters);
}
bool LineLayer::recalculate(const StyleCalculationParameters& parameters) {
// for scaling dasharrays
StyleCalculationParameters dashArrayParams = parameters;
dashArrayParams.z = std::floor(dashArrayParams.z);
paint.width.calculate(dashArrayParams);
paint.dashLineWidth = paint.width;
bool hasTransitions = false;
hasTransitions |= paint.opacity.calculate(parameters);
hasTransitions |= paint.color.calculate(parameters);
hasTransitions |= paint.translate.calculate(parameters);
hasTransitions |= paint.translateAnchor.calculate(parameters);
hasTransitions |= paint.width.calculate(parameters);
hasTransitions |= paint.gapWidth.calculate(parameters);
hasTransitions |= paint.offset.calculate(parameters);
hasTransitions |= paint.blur.calculate(parameters);
hasTransitions |= paint.dasharray.calculate(parameters);
hasTransitions |= paint.pattern.calculate(parameters);
passes = paint.isVisible() ? RenderPass::Translucent : RenderPass::None;
return hasTransitions;
}
std::unique_ptr<Bucket> LineLayer::createBucket(StyleBucketParameters& parameters) const {
auto bucket = std::make_unique<LineBucket>(parameters.tileID.overscaleFactor());
bucket->layout = layout;
StyleCalculationParameters p(parameters.tileID.z);
bucket->layout.cap.calculate(p);
bucket->layout.join.calculate(p);
bucket->layout.miterLimit.calculate(p);
bucket->layout.roundLimit.calculate(p);
parameters.eachFilteredFeature(filter, [&] (const auto& feature) {
bucket->addGeometry(getGeometries(feature));
});
return std::move(bucket);
}
} // namespace mbgl
<file_sep>/platform/android/MapboxGLAndroidSDK/src/main/resources/fabric/com.mapbox.mapboxsdk.mapbox-android-sdk.properties
fabric-identifier=com.mapbox.mapboxsdk.mapbox-android-sdk
fabric-version=3.2.0
fabric-build-type=binary
<file_sep>/src/mbgl/tile/tile.cpp
#include <mbgl/tile/tile.hpp>
using namespace mbgl;
<file_sep>/src/mbgl/style/function.cpp
#include <mbgl/style/function.hpp>
#include <mbgl/style/style_calculation_parameters.hpp>
#include <mbgl/util/interpolate.hpp>
#include <mbgl/util/chrono.hpp>
#include <cmath>
namespace mbgl {
template <typename T>
inline T defaultStopsValue();
template <> inline bool defaultStopsValue() { return true; }
template <> inline float defaultStopsValue() { return 1.0f; }
template <> inline Color defaultStopsValue() { return {{ 0, 0, 0, 1 }}; }
template <> inline std::vector<float> defaultStopsValue() { return {{ 1, 0 }}; }
template <> inline std::array<float, 2> defaultStopsValue() { return {{ 0, 0 }}; }
template <> inline std:: string defaultStopsValue() { return {}; }
template <> inline TranslateAnchorType defaultStopsValue() { return {}; };
template <> inline RotateAnchorType defaultStopsValue() { return {}; };
template <> inline CapType defaultStopsValue() { return {}; };
template <> inline JoinType defaultStopsValue() { return {}; };
template <> inline PlacementType defaultStopsValue() { return {}; };
template <> inline TextAnchorType defaultStopsValue() { return {}; };
template <> inline TextJustifyType defaultStopsValue() { return {}; };
template <> inline TextTransformType defaultStopsValue() { return {}; };
template <> inline RotationAlignmentType defaultStopsValue() { return {}; };
template <typename T>
T Function<T>::evaluate(const StyleCalculationParameters& parameters) const {
float z = parameters.z;
bool smaller = false;
float smaller_z = 0.0f;
T smaller_val = T();
bool larger = false;
float larger_z = 0.0f;
T larger_val = T();
for (uint32_t i = 0; i < stops.size(); i++) {
float stop_z = stops[i].first;
T stop_val = stops[i].second;
if (stop_z <= z && (!smaller || smaller_z < stop_z)) {
smaller = true;
smaller_z = stop_z;
smaller_val = stop_val;
}
if (stop_z >= z && (!larger || larger_z > stop_z)) {
larger = true;
larger_z = stop_z;
larger_val = stop_val;
}
}
if (smaller && larger) {
if (larger_z == smaller_z || larger_val == smaller_val) {
return smaller_val;
}
const float zoomDiff = larger_z - smaller_z;
const float zoomProgress = z - smaller_z;
if (base == 1.0f) {
const float t = zoomProgress / zoomDiff;
return util::interpolate(smaller_val, larger_val, t);
} else {
const float t = (std::pow(base, zoomProgress) - 1) / (std::pow(base, zoomDiff) - 1);
return util::interpolate(smaller_val, larger_val, t);
}
} else if (larger) {
return larger_val;
} else if (smaller) {
return smaller_val;
} else {
// No stop defined.
return defaultStopsValue<T>();
}
}
template class Function<bool>;
template class Function<float>;
template class Function<Color>;
template class Function<std::vector<float>>;
template class Function<std::array<float, 2>>;
template class Function<std::string>;
template class Function<TranslateAnchorType>;
template class Function<RotateAnchorType>;
template class Function<CapType>;
template class Function<JoinType>;
template class Function<PlacementType>;
template class Function<TextAnchorType>;
template class Function<TextJustifyType>;
template class Function<TextTransformType>;
template class Function<RotationAlignmentType>;
template <typename T>
inline size_t getBiggestStopLessThan(const std::vector<std::pair<float, T>>& stops, float z) {
for (uint32_t i = 0; i < stops.size(); i++) {
if (stops[i].first > z) {
return i == 0 ? i : i - 1;
}
}
return stops.size() - 1;
}
template <typename T>
Faded<T> Function<Faded<T>>::evaluate(const StyleCalculationParameters& parameters) const {
Faded<T> result;
float z = parameters.z;
const float fraction = z - std::floor(z);
std::chrono::duration<float> d = parameters.defaultFadeDuration;
float t = std::min((parameters.now - parameters.zoomHistory.lastIntegerZoomTime) / d, 1.0f);
float fromScale = 1.0f;
float toScale = 1.0f;
size_t from, to;
if (z > parameters.zoomHistory.lastIntegerZoom) {
result.t = fraction + (1.0f - fraction) * t;
from = getBiggestStopLessThan(stops, z - 1.0f);
to = getBiggestStopLessThan(stops, z);
fromScale *= 2.0f;
} else {
result.t = 1 - (1 - t) * fraction;
to = getBiggestStopLessThan(stops, z);
from = getBiggestStopLessThan(stops, z + 1.0f);
fromScale /= 2.0f;
}
result.from = stops[from].second;
result.to = stops[to].second;
result.fromScale = fromScale;
result.toScale = toScale;
return result;
}
template class Function<Faded<std::string>>;
template class Function<Faded<std::vector<float>>>;
} // namespace mbgl
<file_sep>/test/storage/http_retry_network_status.cpp
#include "storage.hpp"
#include <mbgl/storage/online_file_source.hpp>
#include <mbgl/storage/network_status.hpp>
#include <mbgl/util/chrono.hpp>
#include <mbgl/util/run_loop.hpp>
#include <mbgl/util/timer.hpp>
// Test for https://github.com/mapbox/mapbox-gl-native/issues/2123
//
// A request is made. While the request is in progress, the network status changes. This should
// trigger an immediate retry of all requests that are not in progress. This test makes sure that
// we don't accidentally double-trigger the request.
TEST_F(Storage, HTTPNetworkStatusChange) {
SCOPED_TEST(HTTPNetworkStatusChange)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
const Resource resource { Resource::Unknown, "http://127.0.0.1:3000/delayed" };
// This request takes 200 milliseconds to answer.
std::unique_ptr<FileRequest> req = fs.request(resource, [&](Response res) {
req.reset();
EXPECT_EQ(nullptr, res.error);
ASSERT_TRUE(res.data.get());
EXPECT_EQ("Response", *res.data);
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
loop.stop();
HTTPNetworkStatusChange.finish();
});
// After 50 milliseconds, we're going to trigger a NetworkStatus change.
util::Timer reachableTimer;
reachableTimer.start(Milliseconds(50), Duration::zero(), [] () {
mbgl::NetworkStatus::Reachable();
});
loop.run();
}
// Tests that a change in network status preempts requests that failed due to connection or
// reachability issues.
TEST_F(Storage, HTTPNetworkStatusChangePreempt) {
SCOPED_TEST(HTTPNetworkStatusChangePreempt)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
const auto start = Clock::now();
const Resource resource{ Resource::Unknown, "http://127.0.0.1:3001/test" };
std::unique_ptr<FileRequest> req = fs.request(resource, [&](Response res) {
static int counter = 0;
const auto duration = std::chrono::duration<const double>(Clock::now() - start).count();
if (counter == 0) {
EXPECT_GT(0.2, duration) << "Response came in too late";
} else if (counter == 1) {
EXPECT_LT(0.39, duration) << "Preempted retry triggered too early";
EXPECT_GT(0.6, duration) << "Preempted retry triggered too late";
} else if (counter > 1) {
FAIL() << "Retried too often";
}
ASSERT_NE(nullptr, res.error);
EXPECT_EQ(Response::Error::Reason::Connection, res.error->reason);
#ifdef MBGL_HTTP_NSURL
EXPECT_TRUE(res.error->message ==
"The operation couldn’t be completed. (NSURLErrorDomain error -1004.)" ||
res.error->message == "Could not connect to the server.")
<< "Full message is: \"" << res.error->message << "\"";
#elif MBGL_HTTP_CURL
const std::string prefix { "Couldn't connect to server: " };
EXPECT_STREQ(prefix.c_str(), res.error->message.substr(0, prefix.size()).c_str()) << "Full message is: \"" << res.error->message << "\"";
#else
FAIL();
#endif
ASSERT_FALSE(res.data.get());
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
if (counter++ == 1) {
req.reset();
loop.stop();
HTTPNetworkStatusChangePreempt.finish();
}
});
// After 400 milliseconds, we're going to trigger a NetworkStatus change.
util::Timer reachableTimer;
reachableTimer.start(Milliseconds(400), Duration::zero(), [] () {
mbgl::NetworkStatus::Reachable();
});
loop.run();
}
TEST_F(Storage, HTTPNetworkStatusOnlineOffline) {
SCOPED_TEST(HTTPNetworkStatusOnlineOffline)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
const Resource resource { Resource::Unknown, "http://127.0.0.1:3000/test" };
EXPECT_EQ(NetworkStatus::Get(), NetworkStatus::Status::Online) << "Default status should be Online";
NetworkStatus::Set(NetworkStatus::Status::Offline);
util::Timer onlineTimer;
onlineTimer.start(Milliseconds(100), Duration::zero(), [&] () {
NetworkStatus::Set(NetworkStatus::Status::Online);
});
std::unique_ptr<FileRequest> req = fs.request(resource, [&](Response res) {
req.reset();
EXPECT_EQ(nullptr, res.error);
ASSERT_TRUE(res.data.get());
EXPECT_EQ(NetworkStatus::Get(), NetworkStatus::Status::Online) << "Triggered before set back to Online";
loop.stop();
HTTPNetworkStatusOnlineOffline.finish();
});
loop.run();
}
<file_sep>/test/storage/http_timeout.cpp
#include "storage.hpp"
#include <mbgl/storage/online_file_source.hpp>
#include <mbgl/storage/network_status.hpp>
#include <mbgl/util/chrono.hpp>
#include <mbgl/util/run_loop.hpp>
TEST_F(Storage, HTTPTimeout) {
SCOPED_TEST(HTTPTimeout)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
int counter = 0;
const Resource resource { Resource::Unknown, "http://127.0.0.1:3000/test?cachecontrol=max-age=1" };
std::unique_ptr<FileRequest> req = fs.request(resource, [&](Response res) {
counter++;
EXPECT_EQ(nullptr, res.error);
ASSERT_TRUE(res.data.get());
EXPECT_EQ("Hello World!", *res.data);
EXPECT_TRUE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
if (counter == 4) {
req.reset();
loop.stop();
HTTPTimeout.finish();
}
});
loop.run();
EXPECT_EQ(4, counter);
}
<file_sep>/src/mbgl/storage/http_request_base.hpp
#ifndef MBGL_STORAGE_HTTP_REQUEST_BASE
#define MBGL_STORAGE_HTTP_REQUEST_BASE
#include <mbgl/storage/response.hpp>
#include <mbgl/storage/resource.hpp>
#include <mbgl/util/noncopyable.hpp>
#include <mbgl/util/chrono.hpp>
#include <mbgl/util/optional.hpp>
#include <functional>
namespace mbgl {
class Response;
class HTTPRequestBase : private util::noncopyable {
public:
using Callback = std::function<void (Response)>;
HTTPRequestBase(const Resource& resource_, Callback notify_)
: resource(resource_)
, notify(std::move(notify_))
, cancelled(false) {
}
virtual ~HTTPRequestBase() = default;
virtual void cancel() { cancelled = true; };
protected:
static optional<SystemTimePoint> parseCacheControl(const char *value);
Resource resource;
Callback notify;
bool cancelled;
};
} // namespace mbgl
#endif // MBGL_STORAGE_HTTP_REQUEST_BASE
<file_sep>/src/mbgl/shader/shader.hpp
#ifndef MBGL_RENDERER_SHADER
#define MBGL_RENDERER_SHADER
#include <mbgl/gl/gl.hpp>
#include <mbgl/gl/gl_object_store.hpp>
#include <mbgl/util/noncopyable.hpp>
namespace mbgl {
class Shader : private util::noncopyable {
public:
Shader(const GLchar *name, const GLchar *vertex, const GLchar *fragment, gl::GLObjectStore&);
~Shader();
const GLchar *name;
GLuint getID() const {
return program.getID();
}
virtual void bind(GLbyte *offset) = 0;
protected:
GLint a_pos = -1;
private:
bool compileShader(gl::ShaderHolder&, const GLchar *source[]);
gl::ProgramHolder program;
gl::ShaderHolder vertexShader = { GL_VERTEX_SHADER };
gl::ShaderHolder fragmentShader = { GL_FRAGMENT_SHADER };
};
} // namespace mbgl
#endif
<file_sep>/scripts/set_compiler.sh
#!/usr/bin/env bash
if [ ! -z "${_CXX}" ]; then export CXX="${_CXX}" ; fi
if [ ! -z "${_CC}" ]; then export CC="${_CC}" ; fi
if [ "${CCACHE:-0}" -ge 1 ]; then
export CXX="ccache ${CXX}"
export CC="ccache ${CC}"
# ccache splits up the compile steps, so we end up with unused arguments in some steps.
# Clang also thinks that ccache isn't interactive, so we explicitly need to enable color.
if [ $(echo | ${CXX} -dM -E - | grep -c "#define __clang__ 1") -ge 1 ]; then
export CXX="${CXX} -Qunused-arguments -fcolor-diagnostics"
export CC="${CC} -Qunused-arguments -fcolor-diagnostics"
else
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=60304
# GCC normally throws this error which is in GTest, but *only* when compilation and
# preprocessing aren't combined in one step. However, when using ccache with GCC, we are
# running them in separate steps, so this warning/error is shown.
export CXX="${CXX} -Wno-conversion-null"
export CC="${CC} -Wno-conversion-null"
fi
fi
echo "export CXX=\"${CXX}\""
echo "export CC=\"${CC}\""
${CXX} --version
<file_sep>/test/storage/http_expires.cpp
#include "storage.hpp"
#include <mbgl/storage/default_file_source.hpp>
#include <mbgl/storage/online_file_source.hpp>
#include <mbgl/util/chrono.hpp>
#include <mbgl/util/run_loop.hpp>
#include <mbgl/util/timer.hpp>
TEST_F(Storage, HTTPRetryDelayOnExpiredTile) {
SCOPED_TEST(HTTPRetryDelayOnExpiredTile)
using namespace mbgl;
util::RunLoop loop;
OnlineFileSource fs;
int counter = 0;
const Resource resource { Resource::Unknown, "http://127.0.0.1:3000/test?expires=10000" };
std::unique_ptr<FileRequest> req = fs.request(resource, [&](Response res) {
counter++;
EXPECT_EQ(nullptr, res.error);
EXPECT_GT(SystemClock::now(), res.expires);
});
util::Timer timer;
timer.start(Milliseconds(500), Duration::zero(), [&] () {
loop.stop();
});
loop.run();
EXPECT_EQ(1, counter);
HTTPRetryDelayOnExpiredTile.finish();
}
TEST_F(Storage, HTTPRetryOnClockSkew) {
SCOPED_TEST(HTTPRetryOnClockSkew)
using namespace mbgl;
util::RunLoop loop;
DefaultFileSource fs(":memory:", ".");
int counter = 0;
const Resource resource { Resource::Unknown, "http://127.0.0.1:3000/clockskew" };
std::unique_ptr<FileRequest> req1 = fs.request(resource, [&](Response res) {
switch (counter++) {
case 0: {
EXPECT_EQ(nullptr, res.error);
EXPECT_GT(SystemClock::now(), res.expires);
} break;
case 1: {
EXPECT_EQ(nullptr, res.error);
auto now = SystemClock::now();
EXPECT_LT(now + Seconds(40), res.expires) << "Expiration not interpolated to 60s";
EXPECT_GT(now + Seconds(80), res.expires) << "Expiration not interpolated to 60s";
loop.stop();
} break;
}
});
loop.run();
HTTPRetryOnClockSkew.finish();
}
<file_sep>/platform/node/scripts/install.sh
#!/usr/bin/env bash
set -e
set -o pipefail
mapbox_time "checkout_mason" \
git submodule update --init .mason
export PATH="`pwd`/.mason:${PATH}" MASON_DIR="`pwd`/.mason"
if [ ${TRAVIS_OS_NAME} == "linux" ]; then
mapbox_time "install_mesa" \
mason install mesa 10.4.3
fi
if [ ! -d ~/.nvm ]; then
curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.28.0/install.sh | bash
fi
source ~/.nvm/nvm.sh
mapbox_time $NODE_VERSION \
nvm install $NODE_VERSION
nvm alias default $NODE_VERSION
node --version
npm --version
<file_sep>/src/mbgl/style/paint_property.hpp
#ifndef MBGL_PAINT_PROPERTY
#define MBGL_PAINT_PROPERTY
#include <mbgl/style/class_dictionary.hpp>
#include <mbgl/style/property_parsing.hpp>
#include <mbgl/style/function.hpp>
#include <mbgl/style/property_transition.hpp>
#include <mbgl/style/style_cascade_parameters.hpp>
#include <mbgl/style/style_calculation_parameters.hpp>
#include <mbgl/util/interpolate.hpp>
#include <mbgl/util/std.hpp>
#include <mbgl/util/rapidjson.hpp>
#include <map>
#include <utility>
namespace mbgl {
template <typename T, typename Result = T>
class PaintProperty {
public:
using Fn = Function<Result>;
explicit PaintProperty(T fallbackValue)
: value(fallbackValue) {
values.emplace(ClassID::Fallback, Fn(fallbackValue));
}
PaintProperty(const PaintProperty& other)
: values(other.values),
transitions(other.transitions) {
}
void parse(const char* name, const JSValue& layer) {
mbgl::util::erase_if(values, [] (const auto& p) { return p.first != ClassID::Fallback; });
std::string transitionName = { name };
transitionName += "-transition";
for (auto it = layer.MemberBegin(); it != layer.MemberEnd(); ++it) {
const std::string paintName { it->name.GetString(), it->name.GetStringLength() };
if (paintName.compare(0, 5, "paint") != 0)
continue;
bool isClass = paintName.compare(0, 6, "paint.") == 0;
if (isClass && paintName.length() <= 6)
continue;
ClassID classID = isClass ? ClassDictionary::Get().lookup(paintName.substr(6)) : ClassID::Default;
if (it->value.HasMember(name)) {
auto v = parseProperty<Fn>(name, it->value[name]);
if (v) {
values.emplace(classID, *v);
}
}
if (it->value.HasMember(transitionName.c_str())) {
auto v = parseProperty<PropertyTransition>(name, it->value[transitionName.c_str()]);
if (v) {
transitions.emplace(classID, *v);
}
}
}
}
void cascade(const StyleCascadeParameters& parameters) {
Duration delay = *parameters.defaultTransition.delay;
Duration duration = *parameters.defaultTransition.duration;
for (auto classID : parameters.classes) {
if (values.find(classID) == values.end())
continue;
if (transitions.find(classID) != transitions.end()) {
const PropertyTransition& transition = transitions[classID];
if (transition.delay) delay = *transition.delay;
if (transition.duration) duration = *transition.duration;
}
cascaded = std::make_unique<CascadedValue>(std::move(cascaded),
parameters.now + delay,
parameters.now + delay + duration,
values.at(classID));
break;
}
assert(cascaded);
}
bool calculate(const StyleCalculationParameters& parameters) {
assert(cascaded);
value = cascaded->calculate(parameters);
return cascaded->prior.operator bool();
}
void operator=(const T& v) { values.emplace(ClassID::Default, Fn(v)); }
operator T() const { return value; }
std::map<ClassID, Fn> values;
std::map<ClassID, PropertyTransition> transitions;
struct CascadedValue {
CascadedValue(std::unique_ptr<CascadedValue> prior_,
TimePoint begin_,
TimePoint end_,
Fn value_)
: prior(std::move(prior_)),
begin(begin_),
end(end_),
value(std::move(value_)) {
}
Result calculate(const StyleCalculationParameters& parameters) {
Result final = value.evaluate(parameters);
if (!prior) {
// No prior value.
return final;
} else if (parameters.now >= end) {
// Transition from prior value is now complete.
prior.reset();
return final;
} else {
// Interpolate between recursively-calculated prior value and final.
float t = std::chrono::duration<float>(parameters.now - begin) / (end - begin);
return util::interpolate(prior->calculate(parameters), final, t);
}
}
std::unique_ptr<CascadedValue> prior;
TimePoint begin;
TimePoint end;
Fn value;
};
std::unique_ptr<CascadedValue> cascaded;
Result value;
};
} // namespace mbgl
#endif
<file_sep>/src/mbgl/text/glyph_pbf.cpp
#include <mbgl/text/glyph_pbf.hpp>
#include <mbgl/storage/file_source.hpp>
#include <mbgl/storage/resource.hpp>
#include <mbgl/storage/response.hpp>
#include <mbgl/text/font_stack.hpp>
#include <mbgl/text/glyph_store.hpp>
#include <mbgl/util/exception.hpp>
#include <mbgl/util/pbf.hpp>
#include <mbgl/util/string.hpp>
#include <mbgl/util/token.hpp>
#include <mbgl/util/url.hpp>
namespace {
void parseGlyphPBF(mbgl::FontStack& stack, const std::string& data) {
mbgl::pbf glyphs_pbf(reinterpret_cast<const uint8_t *>(data.data()), data.size());
while (glyphs_pbf.next()) {
if (glyphs_pbf.tag == 1) { // stacks
mbgl::pbf fontstack_pbf = glyphs_pbf.message();
while (fontstack_pbf.next()) {
if (fontstack_pbf.tag == 3) { // glyphs
mbgl::pbf glyph_pbf = fontstack_pbf.message();
mbgl::SDFGlyph glyph;
while (glyph_pbf.next()) {
if (glyph_pbf.tag == 1) { // id
glyph.id = glyph_pbf.varint();
} else if (glyph_pbf.tag == 2) { // bitmap
glyph.bitmap = glyph_pbf.string();
} else if (glyph_pbf.tag == 3) { // width
glyph.metrics.width = glyph_pbf.varint();
} else if (glyph_pbf.tag == 4) { // height
glyph.metrics.height = glyph_pbf.varint();
} else if (glyph_pbf.tag == 5) { // left
glyph.metrics.left = glyph_pbf.svarint();
} else if (glyph_pbf.tag == 6) { // top
glyph.metrics.top = glyph_pbf.svarint();
} else if (glyph_pbf.tag == 7) { // advance
glyph.metrics.advance = glyph_pbf.varint();
} else {
glyph_pbf.skip();
}
}
stack.insert(glyph.id, glyph);
} else {
fontstack_pbf.skip();
}
}
} else {
glyphs_pbf.skip();
}
}
}
} // namespace
namespace mbgl {
GlyphPBF::GlyphPBF(GlyphStore* store,
const std::string& fontStack,
const GlyphRange& glyphRange,
GlyphStore::Observer* observer_,
FileSource& fileSource)
: parsed(false),
observer(observer_) {
req = fileSource.request(Resource::glyphs(store->getURL(), fontStack, glyphRange), [this, store, fontStack, glyphRange](Response res) {
if (res.error) {
observer->onGlyphsError(fontStack, glyphRange, std::make_exception_ptr(std::runtime_error(res.error->message)));
} else if (res.notModified) {
return;
} else if (res.noContent) {
parsed = true;
observer->onGlyphsLoaded(fontStack, glyphRange);
} else {
try {
parseGlyphPBF(**store->getFontStack(fontStack), *res.data);
} catch (...) {
observer->onGlyphsError(fontStack, glyphRange, std::current_exception());
return;
}
parsed = true;
observer->onGlyphsLoaded(fontStack, glyphRange);
}
});
}
GlyphPBF::~GlyphPBF() = default;
} // namespace mbgl
<file_sep>/src/mbgl/util/thread_context.cpp
#include <mbgl/util/thread_context.hpp>
#include <mbgl/util/thread_local.hpp>
#include <cassert>
namespace {
using namespace mbgl::util;
static ThreadLocal<ThreadContext>& current = *new ThreadLocal<ThreadContext>;
} // namespace
namespace mbgl {
namespace util {
ThreadContext::ThreadContext(const std::string& name_, ThreadType type_, ThreadPriority priority_)
: name(name_),
type(type_),
priority(priority_) {
}
void ThreadContext::Set(ThreadContext* context) {
current.set(context);
}
bool ThreadContext::currentlyOn(ThreadType type) {
return current.get()->type == type;
}
std::string ThreadContext::getName() {
if (current.get() != nullptr) {
return current.get()->name;
} else {
return "Unknown";
}
}
ThreadPriority ThreadContext::getPriority() {
if (current.get() != nullptr) {
return current.get()->priority;
} else {
return ThreadPriority::Regular;
}
}
class MainThreadContextRegistrar {
public:
MainThreadContextRegistrar() : context("Main", ThreadType::Main, ThreadPriority::Regular) {
ThreadContext::Set(&context);
}
~MainThreadContextRegistrar() {
ThreadContext::Set(nullptr);
}
private:
ThreadContext context;
};
// Will auto register the main thread context
// at startup. Must be instantiated after the
// ThreadContext::current object.
MainThreadContextRegistrar registrar;
} // namespace util
} // namespace mbgl
<file_sep>/src/mbgl/gl/gl_object_store.hpp
#ifndef MBGL_MAP_UTIL_GL_OBJECT_STORE
#define MBGL_MAP_UTIL_GL_OBJECT_STORE
#include <mbgl/gl/gl.hpp>
#include <mbgl/util/noncopyable.hpp>
#include <array>
#include <algorithm>
#include <memory>
#include <vector>
namespace mbgl {
namespace gl {
class GLObjectStore : private util::noncopyable {
public:
~GLObjectStore();
// Actually remove the objects we marked as abandoned with the above methods.
// Only call this while the OpenGL context is exclusive to this thread.
void performCleanup();
private:
friend class ProgramHolder;
friend class ShaderHolder;
friend class BufferHolder;
friend class TextureHolder;
friend class TexturePoolHolder;
friend class VAOHolder;
std::vector<GLuint> abandonedPrograms;
std::vector<GLuint> abandonedShaders;
std::vector<GLuint> abandonedBuffers;
std::vector<GLuint> abandonedTextures;
std::vector<GLuint> abandonedVAOs;
};
class GLHolder : private util::noncopyable {
public:
GLHolder() {}
GLHolder(GLHolder&& o) noexcept : id(o.id) { o.id = 0; }
GLHolder& operator=(GLHolder&& o) noexcept { id = o.id; o.id = 0; return *this; }
explicit operator bool() const { return id; }
GLuint getID() const { return id; }
protected:
GLuint id = 0;
GLObjectStore* objectStore = nullptr;
};
class ProgramHolder : public GLHolder {
public:
ProgramHolder() = default;
~ProgramHolder() { reset(); }
ProgramHolder(ProgramHolder&& o) noexcept : GLHolder(std::move(o)) {}
ProgramHolder& operator=(ProgramHolder&& o) noexcept { GLHolder::operator=(std::move(o)); return *this; }
void create(GLObjectStore&);
void reset();
};
class ShaderHolder : public GLHolder {
public:
ShaderHolder(GLenum type_) : type(type_) {}
~ShaderHolder() { reset(); }
ShaderHolder(ShaderHolder&& o) noexcept : GLHolder(std::move(o)), type(o.type) {}
ShaderHolder& operator=(ShaderHolder&& o) noexcept { GLHolder::operator=(std::move(o)); type = o.type; return *this; }
void create(GLObjectStore&);
void reset();
private:
GLenum type = 0;
};
class BufferHolder : public GLHolder {
public:
BufferHolder() = default;
~BufferHolder() { reset(); }
BufferHolder(BufferHolder&& o) noexcept : GLHolder(std::move(o)) {}
BufferHolder& operator=(BufferHolder&& o) noexcept { GLHolder::operator=(std::move(o)); return *this; }
void create(GLObjectStore&);
void reset();
};
class TextureHolder : public GLHolder {
public:
TextureHolder() = default;
~TextureHolder() { reset(); }
TextureHolder(TextureHolder&& o) noexcept : GLHolder(std::move(o)) {}
TextureHolder& operator=(TextureHolder&& o) noexcept { GLHolder::operator=(std::move(o)); return *this; }
void create(GLObjectStore&);
void reset();
};
class TexturePoolHolder : private util::noncopyable {
public:
static const GLsizei TextureMax = 64;
TexturePoolHolder() { ids.fill(0); }
~TexturePoolHolder() { reset(); }
TexturePoolHolder(TexturePoolHolder&& o) noexcept : ids(std::move(o.ids)) {}
TexturePoolHolder& operator=(TexturePoolHolder&& o) noexcept { ids = std::move(o.ids); return *this; }
explicit operator bool() { return std::none_of(ids.begin(), ids.end(), [](int id) { return id == 0; }); }
const std::array<GLuint, TextureMax>& getIDs() const { return ids; }
const GLuint& operator[](size_t pos) { return ids[pos]; }
void create(GLObjectStore&);
void reset();
private:
std::array<GLuint, TextureMax> ids;
GLObjectStore* objectStore = nullptr;
};
class VAOHolder : public GLHolder {
public:
VAOHolder() = default;
~VAOHolder() { reset(); }
VAOHolder(VAOHolder&& o) noexcept : GLHolder(std::move(o)) {}
VAOHolder& operator=(VAOHolder&& o) noexcept { GLHolder::operator=(std::move(o)); return *this; }
void create(GLObjectStore&);
void reset();
};
} // namespace gl
} // namespace mbgl
#endif
<file_sep>/src/mbgl/layer/raster_layer.hpp
#ifndef MBGL_RASTER_LAYER
#define MBGL_RASTER_LAYER
#include <mbgl/style/style_layer.hpp>
#include <mbgl/style/paint_property.hpp>
namespace mbgl {
class RasterPaintProperties {
public:
PaintProperty<float> opacity { 1.0f };
PaintProperty<float> hueRotate { 0.0f };
PaintProperty<float> brightnessMin { 0.0f };
PaintProperty<float> brightnessMax { 1.0f };
PaintProperty<float> saturation { 0.0f };
PaintProperty<float> contrast { 0.0f };
PaintProperty<float> fadeDuration { 0.0f };
};
class RasterLayer : public StyleLayer {
public:
RasterLayer() : StyleLayer(Type::Raster) {}
std::unique_ptr<StyleLayer> clone() const override;
void parseLayout(const JSValue&) override {};
void parsePaints(const JSValue&) override;
void cascade(const StyleCascadeParameters&) override;
bool recalculate(const StyleCalculationParameters&) override;
std::unique_ptr<Bucket> createBucket(StyleBucketParameters&) const override;
RasterPaintProperties paint;
};
template <>
inline bool StyleLayer::is<RasterLayer>() const {
return type == Type::Raster;
}
} // namespace mbgl
#endif
<file_sep>/platform/android/DISTRIBUTE.md
# Distributing Mapbox GL Native for Android
Use the `Makefile` target `make apackage` in order to build JNI libraries for all supported ABI's for eventual distribution of the whole package.
<file_sep>/src/mbgl/util/default_styles.cpp
#include <mbgl/util/default_styles.hpp>
namespace mbgl {
namespace util {
namespace default_styles {
const DefaultStyle streets = { "mapbox://styles/mapbox/streets-v8", "Streets" };
const DefaultStyle emerald = { "mapbox://styles/mapbox/emerald-v8", "Emerald" };
const DefaultStyle light = { "mapbox://styles/mapbox/light-v8", "Light" };
const DefaultStyle dark = { "mapbox://styles/mapbox/dark-v8", "Dark" };
const DefaultStyle satellite = { "mapbox://styles/mapbox/satellite-v8", "Satellite" };
const DefaultStyle hybrid = { "mapbox://styles/mapbox/satellite-hybrid-v8", "Hybrid" };
} // namespace default_styles
} // end namespace util
} // end namespace mbgl
<file_sep>/src/mbgl/layer/custom_layer.cpp
#include <mbgl/layer/custom_layer.hpp>
#include <mbgl/renderer/bucket.hpp>
#include <mbgl/map/transform_state.hpp>
namespace mbgl {
CustomLayer::CustomLayer(const std::string& id_,
CustomLayerInitializeFunction initializeFn_,
CustomLayerRenderFunction renderFn_,
CustomLayerDeinitializeFunction deinitializeFn_,
void* context_)
: StyleLayer(Type::Custom) {
id = id_;
initializeFn = initializeFn_;
renderFn = renderFn_;
deinitializeFn = deinitializeFn_;
context = context_;
}
CustomLayer::CustomLayer(const CustomLayer& other)
: StyleLayer(other) {
id = other.id;
// Don't copy anything else.
}
CustomLayer::~CustomLayer() {
if (deinitializeFn) {
deinitializeFn(context);
}
}
void CustomLayer::initialize() {
assert(initializeFn);
initializeFn(context);
}
void CustomLayer::render(const TransformState& state) const {
assert(renderFn);
CustomLayerRenderParameters parameters;
parameters.width = state.getWidth();
parameters.height = state.getHeight();
parameters.latitude = state.getLatLng().latitude;
parameters.longitude = state.getLatLng().longitude;
parameters.zoom = state.getZoom();
parameters.bearing = -state.getAngle() * util::RAD2DEG;
parameters.pitch = state.getPitch();
parameters.altitude = state.getAltitude();
renderFn(context, parameters);
}
bool CustomLayer::recalculate(const StyleCalculationParameters&) {
passes = RenderPass::Translucent;
return false;
}
std::unique_ptr<StyleLayer> CustomLayer::clone() const {
return std::make_unique<CustomLayer>(*this);
}
std::unique_ptr<Bucket> CustomLayer::createBucket(StyleBucketParameters&) const {
return nullptr;
}
} // namespace mbgl
<file_sep>/platform/ios/framework/framework-ios.gypi
{
'includes': [
'../../../gyp/common.gypi',
],
'targets': [
{
'target_name': 'iossdk',
'product_name': 'Mapbox',
'type': 'shared_library',
'mac_bundle': 1,
'dependencies': [
'mbgl.gyp:core',
'mbgl.gyp:platform-<(platform_lib)',
'mbgl.gyp:http-<(http_lib)',
'mbgl.gyp:asset-<(asset_lib)',
],
'xcode_settings': {
'CLANG_ENABLE_OBJC_ARC': 'YES',
'COMBINE_HIDPI_IMAGES': 'NO', # disable combining @2x, @3x images into .tiff files
'CURRENT_PROJECT_VERSION': '0',
'DEFINES_MODULE': 'YES',
'DYLIB_INSTALL_NAME_BASE': '@rpath',
'INFOPLIST_FILE': '../platform/ios/framework/Info.plist',
'IPHONEOS_DEPLOYMENT_TARGET': '8.0',
'LD_RUNPATH_SEARCH_PATHS': [
'$(inherited)',
'@executable_path/Frameworks',
'@loader_path/Frameworks',
],
'PRODUCT_BUNDLE_IDENTIFIER': 'com.mapbox.sdk.ios',
'OTHER_LDFLAGS': [ '-stdlib=libc++', '-lstdc++' ],
'SDKROOT': 'iphoneos',
'SKIP_INSTALL': 'YES',
'SUPPORTED_PLATFORMS': [
'iphonesimulator',
'iphoneos',
],
'VERSIONING_SYSTEM': 'apple-generic',
},
'mac_framework_headers': [
'Mapbox.h',
'<!@(find ../platform/{darwin,ios}/include -type f \! -name \'.*\' \! -name Mapbox.h)',
],
'sources': [
'Mapbox.m',
],
'configurations': {
'Debug': {
'xcode_settings': {
'CODE_SIGN_IDENTITY': '<PASSWORD>',
'DEAD_CODE_STRIPPING': 'YES',
'GCC_OPTIMIZATION_LEVEL': '0',
},
},
'Release': {
'xcode_settings': {
'ARCHS': [ "armv7", "armv7s", "arm64", "i386", "x86_64" ],
'CODE_SIGN_IDENTITY': '<PASSWORD>',
'DEAD_CODE_STRIPPING': 'YES',
'GCC_OPTIMIZATION_LEVEL': 's',
},
},
},
},
]
}
<file_sep>/src/mbgl/util/vec.cpp
#include <mbgl/util/vec.hpp>
namespace mbgl {
template struct vec2<double>;
template struct vec2<float>;
template struct vec3<double>;
template struct vec4<double>;
} // namespace mbgl
<file_sep>/include/mbgl/util/chrono.hpp
#ifndef MBGL_UTIL_CHRONO
#define MBGL_UTIL_CHRONO
#include <chrono>
#include <string>
namespace mbgl {
using Clock = std::chrono::steady_clock;
using SystemClock = std::chrono::system_clock;
using Seconds = std::chrono::seconds;
using Milliseconds = std::chrono::milliseconds;
using TimePoint = Clock::time_point;
using Duration = Clock::duration;
using SystemTimePoint = SystemClock::time_point;
using SystemDuration = SystemClock::duration;
namespace util {
// Returns the RFC1123 formatted date. E.g. "Tue, 04 Nov 2014 02:13:24 GMT"
std::string rfc1123(SystemTimePoint);
// YYYY-mm-dd HH:MM:SS e.g. "2015-11-26 16:11:23"
std::string iso8601(SystemTimePoint);
SystemTimePoint parseTimePoint(const char *);
// C++17 polyfill
template <class Rep, class Period, class = std::enable_if_t<
std::chrono::duration<Rep, Period>::min() < std::chrono::duration<Rep, Period>::zero()>>
constexpr std::chrono::duration<Rep, Period> abs(std::chrono::duration<Rep, Period> d)
{
return d >= d.zero() ? d : -d;
}
} // namespace util
} // namespace mbgl
#endif
<file_sep>/src/mbgl/shader/dot_shader.cpp
#include <mbgl/shader/dot_shader.hpp>
#include <mbgl/shader/dot.vertex.hpp>
#include <mbgl/shader/dot.fragment.hpp>
#include <mbgl/gl/gl.hpp>
#include <cstdio>
using namespace mbgl;
DotShader::DotShader(gl::GLObjectStore& glObjectStore)
: Shader("dot", shaders::dot::vertex, shaders::dot::fragment, glObjectStore) {
}
void DotShader::bind(GLbyte* offset) {
MBGL_CHECK_ERROR(glEnableVertexAttribArray(a_pos));
MBGL_CHECK_ERROR(glVertexAttribPointer(a_pos, 2, GL_SHORT, false, 8, offset));
}
<file_sep>/src/mbgl/text/check_max_angle.hpp
#ifndef MBGL_TEXT_CHECK_MAX_ANGLE
#define MBGL_TEXT_CHECK_MAX_ANGLE
#include <mbgl/tile/geometry_tile.hpp>
namespace mbgl {
struct Anchor;
bool checkMaxAngle(const GeometryCoordinates &line, Anchor &anchor, const float labelLength,
const float windowSize, const float maxAngle);
} // namespace mbgl
#endif
<file_sep>/test/test.gypi
{
'includes': [
'../gyp/common.gypi',
],
'targets': [
{ 'target_name': 'symlink_TEST_DATA',
'type': 'none',
'hard_dependency': 1,
'actions': [
{
'action_name': 'Symlink Fixture Directory',
'inputs': ['<!@(pwd)/../test'],
'outputs': ['<(PRODUCT_DIR)/TEST_DATA'], # symlinks the test dir into TEST_DATA
'action': ['ln', '-s', '-f', '-n', '<@(_inputs)', '<@(_outputs)' ],
}
],
},
{ 'target_name': 'test',
'type': 'executable',
'include_dirs': [ '../include', '../src', '../platform/default' ],
'dependencies': [
'symlink_TEST_DATA',
'mbgl.gyp:core',
'mbgl.gyp:platform-<(platform_lib)',
'mbgl.gyp:http-<(http_lib)',
'mbgl.gyp:asset-<(asset_lib)',
'mbgl.gyp:headless-<(headless_lib)',
],
'sources': [
'fixtures/main.cpp',
'fixtures/stub_file_source.cpp',
'fixtures/stub_file_source.hpp',
'fixtures/mock_view.hpp',
'fixtures/util.hpp',
'fixtures/util.cpp',
'fixtures/fixture_log_observer.hpp',
'fixtures/fixture_log_observer.cpp',
'util/assert.cpp',
'util/async_task.cpp',
'util/clip_ids.cpp',
'util/geo.cpp',
'util/image.cpp',
'util/mapbox.cpp',
'util/merge_lines.cpp',
'util/run_loop.cpp',
'util/text_conversions.cpp',
'util/thread.cpp',
'util/thread_local.cpp',
'util/tile_cover.cpp',
'util/timer.cpp',
'util/token.cpp',
'util/work_queue.cpp',
'api/annotations.cpp',
'api/api_misuse.cpp',
'api/repeated_render.cpp',
'api/render_missing.cpp',
'api/set_style.cpp',
'api/custom_layer.cpp',
'api/offline.cpp',
'geometry/binpack.cpp',
'map/map.cpp',
'map/map_context.cpp',
'map/tile.cpp',
'map/transform.cpp',
'storage/storage.hpp',
'storage/storage.cpp',
'storage/default_file_source.cpp',
'storage/offline.cpp',
'storage/offline_database.cpp',
'storage/offline_download.cpp',
'storage/asset_file_source.cpp',
'storage/headers.cpp',
'storage/http_cancel.cpp',
'storage/http_error.cpp',
'storage/http_expires.cpp',
'storage/http_header_parsing.cpp',
'storage/http_issue_1369.cpp',
'storage/http_load.cpp',
'storage/http_other_loop.cpp',
'storage/http_retry_network_status.cpp',
'storage/http_reading.cpp',
'storage/http_timeout.cpp',
'storage/resource.cpp',
'style/glyph_store.cpp',
'style/source.cpp',
'style/style.cpp',
'style/style_layer.cpp',
'style/comparisons.cpp',
'style/functions.cpp',
'style/style_parser.cpp',
'style/variant.cpp',
'sprite/sprite_atlas.cpp',
'sprite/sprite_image.cpp',
'sprite/sprite_parser.cpp',
'sprite/sprite_store.cpp',
],
'variables': {
'cflags_cc': [
'<@(gtest_cflags)',
'<@(opengl_cflags)',
'<@(boost_cflags)',
'<@(sqlite_cflags)',
'<@(geojsonvt_cflags)',
'<@(rapidjson_cflags)',
'<@(pixelmatch_cflags)',
'<@(variant_cflags)',
],
'ldflags': [
'<@(gtest_ldflags)',
'<@(sqlite_ldflags)',
],
'libraries': [
'<@(gtest_static_libs)',
'<@(sqlite_static_libs)',
'<@(geojsonvt_static_libs)',
],
},
'conditions': [
['OS == "mac"', {
'xcode_settings': {
'OTHER_CPLUSPLUSFLAGS': [ '<@(cflags_cc)' ],
},
}, {
'cflags_cc': [ '<@(cflags_cc)' ],
}],
],
'link_settings': {
'conditions': [
['OS == "mac"', {
'libraries': [ '<@(libraries)' ],
'xcode_settings': { 'OTHER_LDFLAGS': [ '<@(ldflags)' ] }
}, {
'libraries': [ '<@(libraries)', '<@(ldflags)' ],
}]
],
},
},
]
}
<file_sep>/src/mbgl/style/property_transition.hpp
#ifndef MBGL_STYLE_PROPERTY_TRANSITION
#define MBGL_STYLE_PROPERTY_TRANSITION
#include <mbgl/util/chrono.hpp>
#include <mbgl/util/optional.hpp>
#include <cstdint>
namespace mbgl {
class PropertyTransition {
public:
optional<Duration> duration;
optional<Duration> delay;
};
} // namespace mbgl
#endif
<file_sep>/src/mbgl/layer/fill_layer.hpp
#ifndef MBGL_FILL_LAYER
#define MBGL_FILL_LAYER
#include <mbgl/style/style_layer.hpp>
#include <mbgl/style/paint_property.hpp>
namespace mbgl {
class FillPaintProperties {
public:
PaintProperty<bool> antialias { true };
PaintProperty<float> opacity { 1.0f };
PaintProperty<Color> color { {{ 0, 0, 0, 1 }} };
PaintProperty<Color> outlineColor { {{ 0, 0, 0, -1 }} };
PaintProperty<std::array<float, 2>> translate { {{ 0, 0 }} };
PaintProperty<TranslateAnchorType> translateAnchor { TranslateAnchorType::Map };
PaintProperty<std::string, Faded<std::string>> pattern { "" };
};
class FillLayer : public StyleLayer {
public:
FillLayer() : StyleLayer(Type::Fill) {}
std::unique_ptr<StyleLayer> clone() const override;
void parseLayout(const JSValue&) override {};
void parsePaints(const JSValue&) override;
void cascade(const StyleCascadeParameters&) override;
bool recalculate(const StyleCalculationParameters&) override;
std::unique_ptr<Bucket> createBucket(StyleBucketParameters&) const override;
FillPaintProperties paint;
};
template <>
inline bool StyleLayer::is<FillLayer>() const {
return type == Type::Fill;
}
} // namespace mbgl
#endif
<file_sep>/src/mbgl/tile/tile_data.cpp
#include <mbgl/tile/tile_data.hpp>
#include <mbgl/renderer/debug_bucket.hpp>
#include <mbgl/util/string.hpp>
namespace mbgl {
TileData::TileData(const TileID& id_)
: id(id_),
state(State::initial) {
}
TileData::~TileData() = default;
const char* TileData::StateToString(const State state) {
switch (state) {
case TileData::State::initial: return "initial";
case TileData::State::invalid : return "invalid";
case TileData::State::loading : return "loading";
case TileData::State::loaded : return "loaded";
case TileData::State::obsolete : return "obsolete";
case TileData::State::parsed : return "parsed";
case TileData::State::partial : return "partial";
default: return "<unknown>";
}
}
void TileData::dumpDebugLogs() const {
Log::Info(Event::General, "TileData::id: %s", std::string(id).c_str());
Log::Info(Event::General, "TileData::state: %s", TileData::StateToString(state));
}
} // namespace mbgl
<file_sep>/src/mbgl/layer/background_layer.hpp
#ifndef MBGL_BACKGROUND_LAYER
#define MBGL_BACKGROUND_LAYER
#include <mbgl/style/style_layer.hpp>
#include <mbgl/style/paint_property.hpp>
namespace mbgl {
class BackgroundPaintProperties {
public:
PaintProperty<float> opacity { 1.0f };
PaintProperty<Color> color { {{ 0, 0, 0, 1 }} };
PaintProperty<std::string, Faded<std::string>> pattern { "" };
};
class BackgroundLayer : public StyleLayer {
public:
BackgroundLayer() : StyleLayer(Type::Background) {}
std::unique_ptr<StyleLayer> clone() const override;
void parseLayout(const JSValue&) override {};
void parsePaints(const JSValue&) override;
void cascade(const StyleCascadeParameters&) override;
bool recalculate(const StyleCalculationParameters&) override;
std::unique_ptr<Bucket> createBucket(StyleBucketParameters&) const override;
BackgroundPaintProperties paint;
};
template <>
inline bool StyleLayer::is<BackgroundLayer>() const {
return type == Type::Background;
}
} // namespace mbgl
#endif
<file_sep>/scripts/clang-tidy.sh
#!/usr/bin/env bash
set -e
set -o pipefail
export PATH="`pwd`/.mason:${PATH}" MASON_DIR="`pwd`/.mason"
command -v ${CLANG_TIDY:-clang-tidy} >/dev/null 2>&1 || {
echo "Can't find ${CLANG_TIDY:-clang-tidy} in PATH."
if [ -z ${CLANG_TIDY} ]; then
echo "Alternatively, you can set CLANG_TIDY to point to clang-tidy."
fi
exit 1
}
cd build/${HOST}-${HOST_VERSION}/${BUILDTYPE}
git ls-files '../../../src/mbgl/*.cpp' '../../../platform/*.cpp' '../../../test/*.cpp' | \
xargs -I{} -P ${JOBS} ${CLANG_TIDY:-clang-tidy} -header-filter='\/mbgl\/' {}
<file_sep>/platform/osx/scripts/package.sh
#!/usr/bin/env bash
set -e
set -o pipefail
set -u
NAME=Mapbox
OUTPUT=build/osx/pkg
OSX_SDK_VERSION=`xcrun --sdk macosx --show-sdk-version`
LIBUV_VERSION=1.7.5
if [[ ${#} -eq 0 ]]; then # e.g. "make xpackage"
BUILDTYPE="Release"
GCC_GENERATE_DEBUGGING_SYMBOLS="YES"
else # e.g. "make xpackage-strip"
BUILDTYPE="Release"
GCC_GENERATE_DEBUGGING_SYMBOLS="NO"
fi
function step { >&2 echo -e "\033[1m\033[36m* $@\033[0m"; }
function finish { >&2 echo -en "\033[0m"; }
trap finish EXIT
step "Creating build files..."
export MASON_PLATFORM=osx
export BUILDTYPE=${BUILDTYPE:-Release}
export HOST=osx
make Xcode/osx
VERSION=${TRAVIS_JOB_NUMBER:-${BITRISE_BUILD_NUMBER:-0}}
step "Building OS X framework (build ${VERSION})..."
xcodebuild -sdk macosx${OSX_SDK_VERSION} \
ARCHS="x86_64" \
ONLY_ACTIVE_ARCH=NO \
GCC_GENERATE_DEBUGGING_SYMBOLS=${GCC_GENERATE_DEBUGGING_SYMBOLS} \
CURRENT_PROJECT_VERSION=${VERSION} \
-project ./build/osx-x86_64/gyp/osx.xcodeproj \
-configuration ${BUILDTYPE} \
-target osxsdk \
-jobs ${JOBS}
TARGET_BUILD_DIR=gyp/build/${BUILDTYPE}
INFOPLIST_PATH=Mapbox.framework/Versions/Current/Resources/Info.plist
# Uncomment when we're ready to release an official version.
#VERSION=$( git tag | grep ^osx | sed 's/^osx-//' | sort -r | grep -v '\-rc.' | grep -v '\-pre.' | sed -n '1p' | sed 's/^v//' )
#if [ "$VERSION" ]; then
# plutil \
# -replace CFBundleShortVersionString -string ${VERSION} \
# $TARGET_BUILD_DIR/$INFOPLIST_PATH
# plutil \
# -replace CFBundleVersion -string ${VERSION} \
# $TARGET_BUILD_DIR/$INFOPLIST_PATH
#fi
echo $TARGET_BUILD_DIR/Mapbox.framework
<file_sep>/platform/android/src/http_request_android.cpp
#include <mbgl/storage/http_context_base.hpp>
#include <mbgl/storage/http_request_base.hpp>
#include <mbgl/storage/resource.hpp>
#include <mbgl/storage/response.hpp>
#include <mbgl/platform/log.hpp>
#include <mbgl/util/async_task.hpp>
#include <mbgl/util/util.hpp>
#include <mbgl/util/string.hpp>
#include <jni/jni.hpp>
#include "attach_env.hpp"
namespace mbgl {
namespace android {
class HTTPContext : public HTTPContextBase {
public:
HTTPRequestBase* createRequest(const Resource&, HTTPRequestBase::Callback) final;
UniqueEnv env { android::AttachEnv() };
};
class HTTPRequest : public HTTPRequestBase {
public:
static constexpr auto Name() { return "com/mapbox/mapboxsdk/http/HTTPRequest"; };
HTTPRequest(jni::JNIEnv&, const Resource&, Callback);
void cancel() final;
void onFailure(jni::JNIEnv&, int type, jni::String message);
void onResponse(jni::JNIEnv&, int code,
jni::String etag, jni::String modified,
jni::String cacheControl, jni::String expires,
jni::Array<jni::jbyte> body);
static jni::Class<HTTPRequest> javaClass;
jni::UniqueObject<HTTPRequest> javaRequest;
private:
void finish();
std::unique_ptr<Response> response;
const std::shared_ptr<const Response> existingResponse;
util::AsyncTask async;
static const int connectionError = 0;
static const int temporaryError = 1;
static const int permanentError = 2;
};
jni::Class<HTTPRequest> HTTPRequest::javaClass;
void RegisterNativeHTTPRequest(jni::JNIEnv& env) {
HTTPRequest::javaClass = *jni::Class<HTTPRequest>::Find(env).NewGlobalRef(env).release();
#define METHOD(MethodPtr, name) jni::MakeNativePeerMethod<decltype(MethodPtr), (MethodPtr)>(name)
jni::RegisterNativePeer<HTTPRequest>(env, HTTPRequest::javaClass, "mNativePtr",
METHOD(&HTTPRequest::onFailure, "nativeOnFailure"),
METHOD(&HTTPRequest::onResponse, "nativeOnResponse"));
}
// -------------------------------------------------------------------------------------------------
HTTPRequestBase* HTTPContext::createRequest(const Resource& resource, HTTPRequestBase::Callback callback) {
return new HTTPRequest(*env, resource, callback);
}
HTTPRequest::HTTPRequest(jni::JNIEnv& env, const Resource& resource_, Callback callback_)
: HTTPRequestBase(resource_, callback_),
async([this] { finish(); }) {
std::string etagStr;
std::string modifiedStr;
if (resource.priorEtag) {
etagStr = *resource.priorEtag;
} else if (resource.priorModified) {
modifiedStr = util::rfc1123(*resource.priorModified);
}
jni::UniqueLocalFrame frame = jni::PushLocalFrame(env, 10);
static auto constructor =
javaClass.GetConstructor<jni::jlong, jni::String, jni::String, jni::String, jni::String>(env);
javaRequest = javaClass.New(env, constructor,
reinterpret_cast<jlong>(this),
jni::Make<jni::String>(env, resource.url),
jni::Make<jni::String>(env, "MapboxGL/1.0"),
jni::Make<jni::String>(env, etagStr),
jni::Make<jni::String>(env, modifiedStr)).NewGlobalRef(env);
}
void HTTPRequest::cancel() {
UniqueEnv env = android::AttachEnv();
static auto cancel = javaClass.GetMethod<void ()>(*env, "cancel");
javaRequest->Call(*env, cancel);
delete this;
}
void HTTPRequest::finish() {
assert(response);
notify(*response);
delete this;
}
void HTTPRequest::onResponse(jni::JNIEnv& env, int code,
jni::String etag, jni::String modified, jni::String cacheControl,
jni::String expires, jni::Array<jni::jbyte> body) {
response = std::make_unique<Response>();
using Error = Response::Error;
if (etag) {
response->etag = jni::Make<std::string>(env, etag);
}
if (modified) {
response->modified = util::parseTimePoint(jni::Make<std::string>(env, modified).c_str());
}
if (cacheControl) {
response->expires = parseCacheControl(jni::Make<std::string>(env, cacheControl).c_str());
}
if (expires) {
response->expires = util::parseTimePoint(jni::Make<std::string>(env, expires).c_str());
}
if (code == 200) {
if (body) {
auto data = std::make_shared<std::string>(body.Length(env), char());
jni::GetArrayRegion(env, *body, 0, data->size(), reinterpret_cast<jbyte*>(&(*data)[0]));
response->data = data;
} else {
response->data = std::make_shared<std::string>();
}
} else if (code == 204 || (code == 404 && resource.kind == Resource::Kind::Tile)) {
response->noContent = true;
} else if (code == 304) {
response->notModified = true;
} else if (code == 404) {
response->error = std::make_unique<Error>(Error::Reason::NotFound, "HTTP status code 404");
} else if (code >= 500 && code < 600) {
response->error = std::make_unique<Error>(Error::Reason::Server, std::string{ "HTTP status code " } + std::to_string(code));
} else {
response->error = std::make_unique<Error>(Error::Reason::Other, std::string{ "HTTP status code " } + std::to_string(code));
}
async.send();
}
void HTTPRequest::onFailure(jni::JNIEnv& env, int type, jni::String message) {
std::string messageStr = jni::Make<std::string>(env, message);
response = std::make_unique<Response>();
using Error = Response::Error;
switch (type) {
case connectionError:
response->error = std::make_unique<Error>(Error::Reason::Connection, messageStr);
break;
case temporaryError:
response->error = std::make_unique<Error>(Error::Reason::Server, messageStr);
break;
default:
response->error = std::make_unique<Error>(Error::Reason::Other, messageStr);
}
async.send();
}
} // namespace android
std::unique_ptr<HTTPContextBase> HTTPContextBase::createContext() {
return std::make_unique<android::HTTPContext>();
}
uint32_t HTTPContextBase::maximumConcurrentRequests() {
return 20;
}
} // namespace mbgl
<file_sep>/platform/osx/test/osxtest.gypi
{
'includes': [
'../../../gyp/common.gypi',
],
'targets': [
{
'target_name': 'osxtest',
'product_name': 'osxtest',
'type': 'loadable_module',
'mac_xctest_bundle': 1,
'dependencies': [
'osxsdk',
],
'variables': {
'cflags_cc': [
],
'ldflags': [
'-stdlib=libc++',
'-lstdc++',
],
},
'xcode_settings': {
'CLANG_ENABLE_MODULES': 'YES',
'CLANG_ENABLE_OBJC_ARC': 'YES',
'ENABLE_STRICT_OBJC_MSGSEND': 'YES',
'GCC_DYNAMIC_NO_PIC': 'NO',
'GCC_NO_COMMON_BLOCKS': 'YES',
'INFOPLIST_FILE': '../platform/osx/test/Info.plist',
'LD_RUNPATH_SEARCH_PATHS': [
'${inherited}',
'@executable_path/../Frameworks',
'@loader_path/../Frameworks',
],
'PRODUCT_BUNDLE_IDENTIFIER': 'com.mapbox.osxtest',
'OTHER_CPLUSPLUSFLAGS': [ '<@(cflags_cc)' ],
'OTHER_LDFLAGS': [ '<@(ldflags)' ],
'SDKROOT': 'macosx',
'SKIP_INSTALL': 'YES',
'SUPPORTED_PLATFORMS':'macosx',
},
'sources': [
'./MGLGeometryTests.mm',
'./MGLStyleTests.mm',
],
'include_dirs': [
'../../../platform/osx/include',
'../../../platform/darwin/include',
'../../../include',
'../../../src',
],
'configurations': {
'Debug': {
'xcode_settings': {
'COPY_PHASE_STRIP': 'NO',
'DEBUG_INFORMATION_FORMAT': 'dwarf',
'ENABLE_TESTABILITY': 'YES',
'GCC_OPTIMIZATION_LEVEL': '0',
'GCC_PREPROCESSOR_DEFINITIONS': [
'DEBUG=1',
'${inherited}',
],
'ONLY_ACTIVE_ARCH': 'YES',
},
},
'Release': {
'xcode_settings': {
'COPY_PHASE_STRIP': 'YES',
'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
'ENABLE_NS_ASSERTIONS': 'NO',
},
},
},
},
]
}
<file_sep>/gyp/shaders.gypi
{
'targets': [
{
'target_name': 'shaders',
'type': 'none',
'hard_dependency': 1,
'sources': [
'<!@(find ../src/mbgl/shader -name "*.glsl")'
],
'rules': [
{
'rule_name': 'Build Shaders',
'message': 'Building shader',
'extension': 'glsl',
'inputs': [ '../scripts/build-shaders.py' ],
'outputs': [ '<(SHARED_INTERMEDIATE_DIR)/include/mbgl/shader/<(RULE_INPUT_ROOT).hpp' ],
'action': [ '../scripts/build-shaders.py', '<(RULE_INPUT_PATH)', '<(SHARED_INTERMEDIATE_DIR)/include/mbgl/shader/<(RULE_INPUT_ROOT).hpp' ],
'process_outputs_as_sources': 1,
}
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/include',
]
}
},
]
}
<file_sep>/src/mbgl/style/style_parser.cpp
#include <mbgl/style/style_parser.hpp>
#include <mbgl/layer/fill_layer.hpp>
#include <mbgl/layer/line_layer.hpp>
#include <mbgl/layer/circle_layer.hpp>
#include <mbgl/layer/symbol_layer.hpp>
#include <mbgl/layer/raster_layer.hpp>
#include <mbgl/layer/background_layer.hpp>
#include <mbgl/platform/log.hpp>
#include <mapbox/geojsonvt.hpp>
#include <mapbox/geojsonvt/convert.hpp>
#include <mbgl/util/mapbox.hpp>
#include <rapidjson/document.h>
#include <rapidjson/error/en.h>
#include <algorithm>
#include <sstream>
#include <set>
namespace mbgl {
namespace {
void parseTileJSONMember(const JSValue& value, std::vector<std::string>& target, const char* name) {
if (!value.HasMember(name)) {
return;
}
const JSValue& property = value[name];
if (!property.IsArray()) {
return;
}
for (rapidjson::SizeType i = 0; i < property.Size(); i++) {
if (!property[i].IsString()) {
return;
}
}
for (rapidjson::SizeType i = 0; i < property.Size(); i++) {
target.emplace_back(std::string(property[i].GetString(), property[i].GetStringLength()));
}
}
void parseTileJSONMember(const JSValue& value, std::string& target, const char* name) {
if (!value.HasMember(name)) {
return;
}
const JSValue& property = value[name];
if (!property.IsString()) {
return;
}
target = { property.GetString(), property.GetStringLength() };
}
void parseTileJSONMember(const JSValue& value, uint8_t& target, const char* name) {
if (!value.HasMember(name)) {
return;
}
const JSValue& property = value[name];
if (!property.IsUint()) {
return;
}
unsigned int uint = property.GetUint();
if (uint > std::numeric_limits<uint8_t>::max()) {
return;
}
target = uint;
}
void parseTileJSONMember(const JSValue& value, std::array<double, 4>& target, const char* name) {
if (!value.HasMember(name)) {
return;
}
const JSValue& property = value[name];
if (!property.IsArray() || property.Size() > 4) {
return;
}
for (rapidjson::SizeType i = 0; i < property.Size(); i++) {
if (!property[i].IsNumber()) {
return;
}
}
for (rapidjson::SizeType i = 0; i < property.Size(); i++) {
target[i] = property[i].GetDouble();
}
}
} // end namespace
StyleParser::~StyleParser() = default;
void StyleParser::parse(const std::string& json) {
rapidjson::GenericDocument<rapidjson::UTF8<>, rapidjson::CrtAllocator> document;
document.Parse<0>(json.c_str());
if (document.HasParseError()) {
Log::Error(Event::ParseStyle, "Error parsing style JSON at %i: %s", document.GetErrorOffset(), rapidjson::GetParseError_En(document.GetParseError()));
return;
}
if (document.HasMember("version")) {
int version = document["version"].GetInt();
if (version != 8) {
Log::Warning(Event::ParseStyle, "current renderer implementation only supports style spec version 8; using an outdated style will cause rendering errors");
}
}
if (document.HasMember("sources")) {
parseSources(document["sources"]);
}
if (document.HasMember("layers")) {
parseLayers(document["layers"]);
}
if (document.HasMember("sprite")) {
const JSValue& sprite = document["sprite"];
if (sprite.IsString()) {
spriteURL = { sprite.GetString(), sprite.GetStringLength() };
}
}
if (document.HasMember("glyphs")) {
const JSValue& glyphs = document["glyphs"];
if (glyphs.IsString()) {
glyphURL = { glyphs.GetString(), glyphs.GetStringLength() };
}
}
}
void StyleParser::parseSources(const JSValue& value) {
if (!value.IsObject()) {
Log::Warning(Event::ParseStyle, "sources must be an object");
return;
}
JSValue::ConstMemberIterator itr = value.MemberBegin();
for (; itr != value.MemberEnd(); ++itr) {
const JSValue& nameVal = itr->name;
const JSValue& sourceVal = itr->value;
if (!sourceVal.HasMember("type")) {
Log::Warning(Event::ParseStyle, "source must have a type");
continue;
}
const JSValue& typeVal = sourceVal["type"];
if (!typeVal.IsString()) {
Log::Warning(Event::ParseStyle, "source type must have one of the enum values");
continue;
}
const auto type = SourceTypeClass({ typeVal.GetString(), typeVal.GetStringLength() });
// Sources can have URLs, either because they reference an external TileJSON file, or
// because reference a GeoJSON file. They don't have to have one though when all source
// parameters are specified inline.
std::string url;
uint16_t tileSize = util::tileSize;
std::unique_ptr<SourceInfo> info;
std::unique_ptr<mapbox::geojsonvt::GeoJSONVT> geojsonvt;
switch (type) {
case SourceType::Raster:
if (sourceVal.HasMember("tileSize")) {
const JSValue& tileSizeVal = sourceVal["tileSize"];
if (tileSizeVal.IsNumber() && tileSizeVal.GetUint64() <= std::numeric_limits<uint16_t>::max()) {
tileSize = tileSizeVal.GetUint64();
} else {
Log::Error(Event::ParseStyle, "invalid tileSize");
continue;
}
}
// Fall through. Vector sources are forbidden from having a tileSize.
case SourceType::Vector:
if (sourceVal.HasMember("url")) {
const JSValue& urlVal = sourceVal["url"];
if (urlVal.IsString()) {
url = { urlVal.GetString(), urlVal.GetStringLength() };
} else {
Log::Error(Event::ParseStyle, "source url must be a string");
continue;
}
} else {
info = parseTileJSON(sourceVal);
}
break;
case SourceType::GeoJSON:
// We should probably split this up to have URLs in the url property, and actual data
// in the data property. Until then, we're going to detect the content based on the
// object type.
if (sourceVal.HasMember("data")) {
const JSValue& dataVal = sourceVal["data"];
if (dataVal.IsString()) {
// We need to load an external GeoJSON file
url = { dataVal.GetString(), dataVal.GetStringLength() };
} else if (dataVal.IsObject()) {
// We need to parse dataVal as a GeoJSON object
// TODO: parse GeoJSON data
geojsonvt = parseGeoJSON(dataVal);
} else {
Log::Error(Event::ParseStyle, "GeoJSON data must be a URL or an object");
continue;
}
} else {
Log::Error(Event::ParseStyle, "GeoJSON source must have a data value");
continue;
}
// We always assume the default configuration for GeoJSON sources.
info = std::make_unique<SourceInfo>();
break;
default:
Log::Error(Event::ParseStyle, "source type '%s' is not supported", typeVal.GetString());
continue;
}
const std::string id { nameVal.GetString(), nameVal.GetStringLength() };
std::unique_ptr<Source> source = std::make_unique<Source>(type, id, url, tileSize, std::move(info), std::move(geojsonvt));
sourcesMap.emplace(id, source.get());
sources.emplace_back(std::move(source));
}
}
std::unique_ptr<mapbox::geojsonvt::GeoJSONVT> StyleParser::parseGeoJSON(const JSValue& value) {
using namespace mapbox::geojsonvt;
try {
return std::make_unique<GeoJSONVT>(Convert::convert(value, 0));
} catch (const std::exception& ex) {
Log::Error(Event::ParseStyle, "Failed to parse GeoJSON data: %s", ex.what());
// Create an empty GeoJSON VT object to make sure we're not infinitely waiting for
// tiles to load.
return std::make_unique<GeoJSONVT>(std::vector<ProjectedFeature>{});
}
}
std::unique_ptr<SourceInfo> StyleParser::parseTileJSON(const std::string& json, const std::string& sourceURL, SourceType type, uint16_t tileSize) {
rapidjson::GenericDocument<rapidjson::UTF8<>, rapidjson::CrtAllocator> document;
document.Parse<0>(json.c_str());
if (document.HasParseError()) {
std::stringstream message;
message << document.GetErrorOffset() << " - " << rapidjson::GetParseError_En(document.GetParseError());
throw std::runtime_error(message.str());
}
std::unique_ptr<SourceInfo> result = StyleParser::parseTileJSON(document);
// TODO: Remove this hack by delivering proper URLs in the TileJSON to begin with.
if (util::mapbox::isMapboxURL(sourceURL)) {
for (auto& url : result->tiles) {
url = util::mapbox::canonicalizeTileURL(url, type, tileSize);
}
}
return result;
}
std::unique_ptr<SourceInfo> StyleParser::parseTileJSON(const JSValue& value) {
auto info = std::make_unique<SourceInfo>();
parseTileJSONMember(value, info->tiles, "tiles");
parseTileJSONMember(value, info->minZoom, "minzoom");
parseTileJSONMember(value, info->maxZoom, "maxzoom");
parseTileJSONMember(value, info->attribution, "attribution");
std::array<double, 4> array;
parseTileJSONMember(value, array, "center");
info->center = { array[0], array[1] };
info->zoom = array[2];
parseTileJSONMember(value, array, "bounds");
info->bounds = LatLngBounds::hull({ array[0], array[1] }, { array[2], array[3] });
return info;
}
void StyleParser::parseLayers(const JSValue& value) {
std::vector<std::string> ids;
if (!value.IsArray()) {
Log::Warning(Event::ParseStyle, "layers must be an array");
return;
}
for (rapidjson::SizeType i = 0; i < value.Size(); ++i) {
const JSValue& layerValue = value[i];
if (!layerValue.IsObject()) {
Log::Warning(Event::ParseStyle, "layer must be an object");
continue;
}
if (!layerValue.HasMember("id")) {
Log::Warning(Event::ParseStyle, "layer must have an id");
continue;
}
const JSValue& id = layerValue["id"];
if (!id.IsString()) {
Log::Warning(Event::ParseStyle, "layer id must be a string");
continue;
}
const std::string layerID = { id.GetString(), id.GetStringLength() };
if (layersMap.find(layerID) != layersMap.end()) {
Log::Warning(Event::ParseStyle, "duplicate layer id %s", layerID.c_str());
continue;
}
layersMap.emplace(layerID, std::pair<const JSValue&, std::unique_ptr<StyleLayer>> { layerValue, nullptr });
ids.push_back(layerID);
}
for (const auto& id : ids) {
auto it = layersMap.find(id);
parseLayer(it->first,
it->second.first,
it->second.second);
}
for (const auto& id : ids) {
auto it = layersMap.find(id);
if (it->second.second) {
layers.emplace_back(std::move(it->second.second));
}
}
}
void StyleParser::parseLayer(const std::string& id, const JSValue& value, std::unique_ptr<StyleLayer>& layer) {
if (layer) {
// Skip parsing this again. We already have a valid layer definition.
return;
}
// Make sure we have not previously attempted to parse this layer.
if (std::find(stack.begin(), stack.end(), id) != stack.end()) {
Log::Warning(Event::ParseStyle, "layer reference of '%s' is circular", id.c_str());
return;
}
if (value.HasMember("ref")) {
// This layer is referencing another layer. Recursively parse that layer.
const JSValue& refVal = value["ref"];
if (!refVal.IsString()) {
Log::Warning(Event::ParseStyle, "layer ref of '%s' must be a string", id.c_str());
return;
}
const std::string ref { refVal.GetString(), refVal.GetStringLength() };
auto it = layersMap.find(ref);
if (it == layersMap.end()) {
Log::Warning(Event::ParseStyle, "layer '%s' references unknown layer %s", id.c_str(), ref.c_str());
return;
}
// Recursively parse the referenced layer.
stack.push_front(id);
parseLayer(it->first,
it->second.first,
it->second.second);
stack.pop_front();
StyleLayer* reference = it->second.second.get();
if (!reference) {
return;
}
layer = reference->clone();
layer->id = id;
layer->ref = ref;
} else {
// Otherwise, parse the source/source-layer/filter/render keys to form the bucket.
if (!value.HasMember("type")) {
Log::Warning(Event::ParseStyle, "layer '%s' is missing a type", id.c_str());
return;
}
const JSValue& typeVal = value["type"];
if (!typeVal.IsString()) {
Log::Warning(Event::ParseStyle, "layer '%s' has an invalid type", id.c_str());
return;
}
std::string type { typeVal.GetString(), typeVal.GetStringLength() };
if (type == "fill") {
layer = std::make_unique<FillLayer>();
} else if (type == "line") {
layer = std::make_unique<LineLayer>();
} else if (type == "circle") {
layer = std::make_unique<CircleLayer>();
} else if (type == "symbol") {
layer = std::make_unique<SymbolLayer>();
} else if (type == "raster") {
layer = std::make_unique<RasterLayer>();
} else if (type == "background") {
layer = std::make_unique<BackgroundLayer>();
} else {
Log::Warning(Event::ParseStyle, "unknown type '%s' for layer '%s'", type.c_str(), id.c_str());
return;
}
layer->id = id;
if (value.HasMember("source")) {
const JSValue& value_source = value["source"];
if (value_source.IsString()) {
layer->source = { value_source.GetString(), value_source.GetStringLength() };
auto source_it = sourcesMap.find(layer->source);
if (source_it == sourcesMap.end()) {
Log::Warning(Event::ParseStyle, "can't find source '%s' required for layer '%s'", layer->source.c_str(), layer->id.c_str());
}
} else {
Log::Warning(Event::ParseStyle, "source of layer '%s' must be a string", layer->id.c_str());
}
}
if (value.HasMember("source-layer")) {
const JSValue& value_source_layer = value["source-layer"];
if (value_source_layer.IsString()) {
layer->sourceLayer = { value_source_layer.GetString(), value_source_layer.GetStringLength() };
} else {
Log::Warning(Event::ParseStyle, "source-layer of layer '%s' must be a string", layer->id.c_str());
}
}
if (value.HasMember("filter")) {
layer->filter = parseFilterExpression(value["filter"]);
}
if (value.HasMember("minzoom")) {
const JSValue& min_zoom = value["minzoom"];
if (min_zoom.IsNumber()) {
layer->minZoom = min_zoom.GetDouble();
} else {
Log::Warning(Event::ParseStyle, "minzoom of layer %s must be numeric", layer->id.c_str());
}
}
if (value.HasMember("maxzoom")) {
const JSValue& max_zoom = value["maxzoom"];
if (max_zoom.IsNumber()) {
layer->maxZoom = max_zoom.GetDouble();
} else {
Log::Warning(Event::ParseStyle, "maxzoom of layer %s must be numeric", layer->id.c_str());
}
}
if (value.HasMember("layout")) {
parseVisibility(*layer, value["layout"]);
layer->parseLayout(value["layout"]);
}
}
layer->parsePaints(value);
}
void StyleParser::parseVisibility(StyleLayer& layer, const JSValue& value) {
if (!value.HasMember("visibility")) {
return;
} else if (!value["visibility"].IsString()) {
Log::Warning(Event::ParseStyle, "value of 'visibility' must be a string");
layer.visibility = VisibilityType::Visible;
return;
}
layer.visibility = VisibilityTypeClass({ value["visibility"].GetString(), value["visibility"].GetStringLength() });
}
std::vector<std::string> StyleParser::fontStacks() const {
std::set<std::string> result;
for (const auto& layer : layers) {
if (layer->is<SymbolLayer>()) {
LayoutProperty<std::string> property = layer->as<SymbolLayer>()->layout.text.font;
if (property.parsedValue) {
for (const auto& stop : property.parsedValue->getStops()) {
result.insert(stop.second);
}
} else {
result.insert(property.value);
}
}
}
return std::vector<std::string>(result.begin(), result.end());
}
} // namespace mbgl
<file_sep>/src/mbgl/annotation/shape_annotation_impl.cpp
#include <mapbox/geojsonvt/convert.hpp>
#include <mbgl/annotation/shape_annotation_impl.hpp>
#include <mbgl/annotation/annotation_manager.hpp>
#include <mbgl/annotation/annotation_tile.hpp>
#include <mbgl/util/constants.hpp>
#include <mbgl/util/string.hpp>
#include <mbgl/style/style.hpp>
#include <mbgl/layer/line_layer.hpp>
#include <mbgl/layer/fill_layer.hpp>
namespace mbgl {
namespace geojsonvt = mapbox::geojsonvt;
ShapeAnnotationImpl::ShapeAnnotationImpl(const AnnotationID id_,
const ShapeAnnotation& shape_,
const uint8_t maxZoom_)
: id(id_),
layerID("com.mapbox.annotations.shape." + util::toString(id)),
shape(shape_),
maxZoom(maxZoom_) {
}
void ShapeAnnotationImpl::updateStyle(Style& style) {
if (style.getLayer(layerID))
return;
if (shape.properties.is<LineAnnotationProperties>()) {
type = geojsonvt::ProjectedFeatureType::LineString;
std::unique_ptr<LineLayer> layer = std::make_unique<LineLayer>();
layer->layout.join = JoinType::Round;
const LineAnnotationProperties& properties = shape.properties.get<LineAnnotationProperties>();
layer->paint.opacity = properties.opacity;
layer->paint.width = properties.width;
layer->paint.color = properties.color;
layer->id = layerID;
layer->source = AnnotationManager::SourceID;
layer->sourceLayer = layer->id;
style.addLayer(std::move(layer), AnnotationManager::PointLayerID);
} else if (shape.properties.is<FillAnnotationProperties>()) {
type = geojsonvt::ProjectedFeatureType::Polygon;
std::unique_ptr<FillLayer> layer = std::make_unique<FillLayer>();
const FillAnnotationProperties& properties = shape.properties.get<FillAnnotationProperties>();
layer->paint.opacity = properties.opacity;
layer->paint.color = properties.color;
layer->paint.outlineColor = properties.outlineColor;
layer->id = layerID;
layer->source = AnnotationManager::SourceID;
layer->sourceLayer = layer->id;
style.addLayer(std::move(layer), AnnotationManager::PointLayerID);
} else {
const StyleLayer* sourceLayer = style.getLayer(shape.properties.get<std::string>());
if (!sourceLayer) return;
std::unique_ptr<StyleLayer> layer = sourceLayer->clone();
type = layer->is<LineLayer>()
? geojsonvt::ProjectedFeatureType::LineString
: geojsonvt::ProjectedFeatureType::Polygon;
layer->id = layerID;
layer->ref = "";
layer->source = AnnotationManager::SourceID;
layer->sourceLayer = layer->id;
layer->visibility = VisibilityType::Visible;
style.addLayer(std::move(layer), sourceLayer->id);
}
}
void ShapeAnnotationImpl::updateTile(const TileID& tileID, AnnotationTile& tile) {
static const double baseTolerance = 4;
if (!shapeTiler) {
const uint64_t maxAmountOfTiles = 1 << maxZoom;
const double tolerance = baseTolerance / (maxAmountOfTiles * GeometryTileFeature::defaultExtent);
geojsonvt::ProjectedRings rings;
std::vector<geojsonvt::LonLat> points;
for (size_t i = 0; i < shape.segments[0].size(); ++i) { // first segment for now (no holes)
const double constraintedLatitude = ::fmin(::fmax(shape.segments[0][i].latitude, -util::LATITUDE_MAX), util::LATITUDE_MAX);
points.push_back(geojsonvt::LonLat(shape.segments[0][i].longitude, constraintedLatitude));
}
if (type == geojsonvt::ProjectedFeatureType::Polygon &&
(points.front().lon != points.back().lon || points.front().lat != points.back().lat)) {
points.push_back(geojsonvt::LonLat(points.front().lon, points.front().lat));
}
auto ring = geojsonvt::Convert::projectRing(points, tolerance);
rings.push_back(ring);
std::vector<geojsonvt::ProjectedFeature> features;
features.push_back(geojsonvt::Convert::create(geojsonvt::Tags(), type, rings));
mapbox::geojsonvt::Options options;
options.maxZoom = maxZoom;
options.buffer = 128u;
options.tolerance = baseTolerance;
shapeTiler = std::make_unique<mapbox::geojsonvt::GeoJSONVT>(features, options);
}
const auto& shapeTile = shapeTiler->getTile(tileID.z, tileID.x, tileID.y);
if (!shapeTile)
return;
AnnotationTileLayer& layer = *tile.layers.emplace(layerID,
std::make_unique<AnnotationTileLayer>()).first->second;
for (auto& shapeFeature : shapeTile.features) {
FeatureType featureType = FeatureType::Unknown;
if (shapeFeature.type == geojsonvt::TileFeatureType::LineString) {
featureType = FeatureType::LineString;
} else if (shapeFeature.type == geojsonvt::TileFeatureType::Polygon) {
featureType = FeatureType::Polygon;
}
assert(featureType != FeatureType::Unknown);
GeometryCollection renderGeometry;
for (auto& shapeRing : shapeFeature.tileGeometry.get<geojsonvt::TileRings>()) {
GeometryCoordinates renderLine;
for (auto& shapePoint : shapeRing) {
renderLine.emplace_back(shapePoint.x, shapePoint.y);
}
renderGeometry.push_back(renderLine);
}
layer.features.emplace_back(
std::make_shared<AnnotationTileFeature>(featureType, renderGeometry));
}
}
} // namespace mbgl
<file_sep>/test/storage/http_other_loop.cpp
#include "storage.hpp"
#include <mbgl/storage/online_file_source.hpp>
#include <mbgl/util/chrono.hpp>
#include <mbgl/util/run_loop.hpp>
TEST_F(Storage, HTTPOtherLoop) {
SCOPED_TEST(HTTPOtherLoop)
using namespace mbgl;
// This file source launches a separate thread to do the processing.
util::RunLoop loop;
OnlineFileSource fs;
std::unique_ptr<FileRequest> req = fs.request({ Resource::Unknown, "http://127.0.0.1:3000/test" },
[&](Response res) {
req.reset();
EXPECT_EQ(nullptr, res.error);
ASSERT_TRUE(res.data.get());
EXPECT_EQ("Hello World!", *res.data);
EXPECT_FALSE(bool(res.expires));
EXPECT_FALSE(bool(res.modified));
EXPECT_FALSE(bool(res.etag));
loop.stop();
HTTPOtherLoop.finish();
});
loop.run();
}
<file_sep>/platform/android/MapboxGLAndroidSDKTestApp/src/test/java/com/mapbox/mapboxsdk/maps/UiSettingsTest.java
package com.mapbox.mapboxsdk.maps;
import android.view.Gravity;
import org.junit.Test;
import org.mockito.InjectMocks;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
public class UiSettingsTest {
@InjectMocks
MapView mMapView = mock(MapView.class);
@Test
public void testSanity() {
UiSettings uiSettings = new UiSettings(mMapView);
assertNotNull("uiSettings should not be null", uiSettings);
}
@Test
public void testMinZoom() {
double zoom = 10;
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setMinZoom(zoom);
assertEquals("MinZoom should match", zoom, uiSettings.getMinZoom(), 0);
}
@Test
public void testMaxZoom() {
double zoom = 10;
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setMaxZoom(zoom);
assertEquals("MaxZoom should match", zoom, uiSettings.getMaxZoom(), 0);
}
@Test
public void testInitialZoomLevels() {
//we are mocking MapView we expect a value of 0 to be returned
UiSettings uiSettings = new UiSettings(mMapView);
assertEquals("MaxZoom should match", 0, uiSettings.getMaxZoom(), 0);
assertEquals("MinZoom should match", 0, uiSettings.getMinZoom(), 0);
}
@Test
public void testCompassEnabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setCompassEnabled(true);
assertEquals("Compass should be enabled", true, uiSettings.isCompassEnabled());
}
@Test
public void testCompassDisabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setCompassEnabled(false);
assertEquals("Compass should be disabled", false, uiSettings.isCompassEnabled());
}
@Test
public void testCompassGravity() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setCompassGravity(Gravity.LEFT);
assertEquals("Compass gravity should be same", Gravity.LEFT, uiSettings.getCompassGravity());
}
@Test
public void testCompassMargins() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setCompassMargins(1, 2, 3, 4);
assertTrue("Compass margin left should be same", uiSettings.getCompassMarginLeft() == 1);
assertTrue("Compass margin top should be same", uiSettings.getCompassMarginTop() == 2);
assertTrue("Compass margin right should be same", uiSettings.getCompassMarginRight() == 3);
assertTrue("Compass margin bottom should be same", uiSettings.getCompassMarginBottom() == 4);
}
@Test
public void testLogoEnabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setLogoEnabled(true);
assertEquals("Logo should be enabled", true, uiSettings.isLogoEnabled());
}
@Test
public void testLogoDisabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setLogoEnabled(false);
assertEquals("Logo should be disabled", false, uiSettings.isLogoEnabled());
}
@Test
public void testLogoGravity() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setLogoGravity(Gravity.RIGHT);
assertEquals("Logo gravity should be same", Gravity.RIGHT, uiSettings.getLogoGravity());
}
@Test
public void testLogoMargins() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setLogoMargins(1, 2, 3, 4);
assertTrue("Compass margin left should be same", uiSettings.getLogoMarginLeft() == 1);
assertTrue("Compass margin top should be same", uiSettings.getLogoMarginTop() == 2);
assertTrue("Compass margin right should be same", uiSettings.getLogoMarginRight() == 3);
assertTrue("Compass margin bottom should be same", uiSettings.getLogoMarginBottom() == 4);
}
@Test
public void testAttributionEnabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setAttributionEnabled(true);
assertEquals("Attribution should be enabled", true, uiSettings.isAttributionEnabled());
}
@Test
public void testAttributionDisabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setAttributionEnabled(false);
assertEquals("Attribution should be disabled", false, uiSettings.isLogoEnabled());
}
@Test
public void testAttributionGravity() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setAttributionGravity(Gravity.RIGHT);
assertEquals("Attribution gravity should be same", Gravity.RIGHT, uiSettings.getAttributionGravity());
}
@Test
public void testAttributionMargins() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setAttributionMargins(1, 2, 3, 4);
assertTrue("Attribution margin left should be same", uiSettings.getAttributionMarginLeft() == 1);
assertTrue("Attribution margin top should be same", uiSettings.getAttributionMarginTop() == 2);
assertTrue("Attribution margin right should be same", uiSettings.getAttributionMarginRight() == 3);
assertTrue("Attribution margin bottom should be same", uiSettings.getAttributionMarginBottom() == 4);
}
@Test
public void testRotateGesturesEnabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setRotateGesturesEnabled(true);
assertEquals("Rotate gesture should be enabled", true, uiSettings.isRotateGesturesEnabled());
}
@Test
public void testRotateGesturesDisabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setRotateGesturesEnabled(false);
assertEquals("Rotate gesture should be disabled", false, uiSettings.isRotateGesturesEnabled());
}
@Test
public void testTiltGesturesEnabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setTiltGesturesEnabled(true);
assertEquals("Tilt gesture should be enabled", true, uiSettings.isTiltGesturesEnabled());
}
@Test
public void testTiltGesturesDisabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setTiltGesturesEnabled(false);
assertEquals("Tilt gesture should be disabled", false, uiSettings.isTiltGesturesEnabled());
}
@Test
public void testZoomGesturesEnabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setZoomGesturesEnabled(true);
assertEquals("Zoom gesture should be enabled", true, uiSettings.isZoomGesturesEnabled());
}
@Test
public void testZoomGesturesDisabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setZoomGesturesEnabled(false);
assertEquals("Zoom gesture should be disabled", false, uiSettings.isZoomGesturesEnabled());
}
@Test
public void testZoomControlsEnabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setZoomControlsEnabled(true);
assertEquals("Zoom controls should be enabled", true, uiSettings.isZoomControlsEnabled());
}
@Test
public void testZoomControlsDisabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setZoomControlsEnabled(false);
assertEquals("Zoom controls should be disabled", false, uiSettings.isZoomControlsEnabled());
}
@Test
public void testScrollGesturesEnabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setScrollGesturesEnabled(true);
assertEquals("Scroll gesture should be enabled", true, uiSettings.isScrollGesturesEnabled());
}
@Test
public void testScrollGesturesDisabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setScrollGesturesEnabled(false);
assertEquals("Scroll gesture should be disabled", false, uiSettings.isScrollGesturesEnabled());
}
@Test
public void testAllGesturesEnabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setAllGesturesEnabled(true);
assertEquals("Rotate gesture should be enabled", true, uiSettings.isRotateGesturesEnabled());
assertEquals("Tilt gesture should be enabled", true, uiSettings.isTiltGesturesEnabled());
assertEquals("Zoom gesture should be enabled", true, uiSettings.isZoomGesturesEnabled());
assertEquals("Scroll gesture should be enabled", true, uiSettings.isScrollGesturesEnabled());
}
@Test
public void testAllGesturesDisabled() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.setAllGesturesEnabled(false);
assertEquals("Rotate gesture should be enabled", false, uiSettings.isRotateGesturesEnabled());
assertEquals("Tilt gesture should be disabled", false, uiSettings.isTiltGesturesEnabled());
assertEquals("Zoom gesture should be disabled", false, uiSettings.isZoomGesturesEnabled());
assertEquals("Scroll gesture should be disabled", false, uiSettings.isScrollGesturesEnabled());
}
@Test
public void testInvalidate() {
UiSettings uiSettings = new UiSettings(mMapView);
uiSettings.invalidate();
}
}<file_sep>/platform/node/src/node_mapbox_gl_native.hpp
#pragma once
#include <mbgl/util/run_loop.hpp>
namespace node_mbgl {
mbgl::util::RunLoop& NodeRunLoop();
}
<file_sep>/platform/ios/scripts/configure.sh
#!/usr/bin/env bash
BOOST_VERSION=1.59.0
SQLITE_VERSION=system
LIBUV_VERSION=1.7.5
ZLIB_VERSION=system
GEOJSONVT_VERSION=3.1.0
VARIANT_VERSION=1.0
RAPIDJSON_VERSION=1.0.2
<file_sep>/src/mbgl/layer/custom_layer.hpp
#ifndef MBGL_CUSTOM_LAYER
#define MBGL_CUSTOM_LAYER
#include <mbgl/style/style_layer.hpp>
namespace mbgl {
class TransformState;
class CustomLayer : public StyleLayer {
public:
CustomLayer(const std::string& id,
CustomLayerInitializeFunction,
CustomLayerRenderFunction,
CustomLayerDeinitializeFunction,
void* context);
CustomLayer(const CustomLayer&);
~CustomLayer();
void initialize();
void render(const TransformState&) const;
private:
std::unique_ptr<StyleLayer> clone() const final;
void parseLayout(const JSValue&) final {}
void parsePaints(const JSValue&) final {}
void cascade(const StyleCascadeParameters&) final {}
bool recalculate(const StyleCalculationParameters&) final;
std::unique_ptr<Bucket> createBucket(StyleBucketParameters&) const final;
CustomLayerInitializeFunction initializeFn = nullptr;
CustomLayerRenderFunction renderFn = nullptr;
CustomLayerDeinitializeFunction deinitializeFn = nullptr;
void* context = nullptr;
};
template <>
inline bool StyleLayer::is<CustomLayer>() const {
return type == Type::Custom;
}
} // namespace mbgl
#endif
<file_sep>/src/mbgl/tile/geojson_tile.cpp
#include <mbgl/tile/geojson_tile.hpp>
#include <mbgl/storage/file_source.hpp>
#include <mapbox/geojsonvt.hpp>
namespace mbgl {
GeoJSONTileFeature::GeoJSONTileFeature(FeatureType type_,
GeometryCollection&& geometries_,
GeoJSONTileFeature::Tags&& tags_)
: type(type_), geometries(std::move(geometries_)), tags(std::move(tags_)) {
}
FeatureType GeoJSONTileFeature::getType() const {
return type;
}
optional<Value> GeoJSONTileFeature::getValue(const std::string& key) const {
auto it = tags.find(key);
if (it != tags.end()) {
return optional<Value>(it->second);
}
return optional<Value>();
}
GeometryCollection GeoJSONTileFeature::getGeometries() const {
return geometries;
}
GeoJSONTileLayer::GeoJSONTileLayer(Features&& features_) : features(std::move(features_)) {
}
std::size_t GeoJSONTileLayer::featureCount() const {
return features.size();
}
util::ptr<const GeometryTileFeature> GeoJSONTileLayer::getFeature(std::size_t i) const {
return features[i];
}
GeoJSONTile::GeoJSONTile(std::shared_ptr<GeoJSONTileLayer> layer_) : layer(std::move(layer_)) {
}
util::ptr<GeometryTileLayer> GeoJSONTile::getLayer(const std::string&) const {
// We're ignoring the layer name because GeoJSON tiles only have one layer.
return layer;
}
// Converts the geojsonvt::Tile to a a GeoJSONTile. They have a differing internal structure.
std::unique_ptr<GeoJSONTile> convertTile(const mapbox::geojsonvt::Tile& tile) {
std::shared_ptr<GeoJSONTileLayer> layer;
if (tile) {
std::vector<std::shared_ptr<const GeoJSONTileFeature>> features;
GeometryCoordinates line;
for (auto& feature : tile.features) {
const FeatureType featureType =
(feature.type == mapbox::geojsonvt::TileFeatureType::Point
? FeatureType::Point
: (feature.type == mapbox::geojsonvt::TileFeatureType::LineString
? FeatureType::LineString
: (feature.type == mapbox::geojsonvt::TileFeatureType::Polygon
? FeatureType::Polygon
: FeatureType::Unknown)));
if (featureType == FeatureType::Unknown) {
continue;
}
GeometryCollection geometry;
// Flatten the geometry; GeoJSONVT distinguishes between a Points array and Rings array
// (Points = GeoJSON types Point, MultiPoint, LineString)
// (Rings = GeoJSON types MultiLineString, Polygon, MultiPolygon)
// However, in Mapbox GL, we use one structure for both types, and just have one outer
// element for Points.
if (feature.tileGeometry.is<mapbox::geojsonvt::TilePoints>()) {
line.clear();
for (auto& point : feature.tileGeometry.get<mapbox::geojsonvt::TilePoints>()) {
line.emplace_back(point.x, point.y);
}
geometry.emplace_back(std::move(line));
} else if (feature.tileGeometry.is<mapbox::geojsonvt::TileRings>()) {
for (auto& ring : feature.tileGeometry.get<mapbox::geojsonvt::TileRings>()) {
line.clear();
for (auto& point : ring) {
line.emplace_back(point.x, point.y);
}
geometry.emplace_back(std::move(line));
}
}
GeoJSONTileFeature::Tags tags{ feature.tags.begin(), feature.tags.end() };
features.emplace_back(std::make_shared<GeoJSONTileFeature>(
featureType, std::move(geometry), std::move(tags)));
}
layer = std::make_unique<GeoJSONTileLayer>(std::move(features));
}
return std::make_unique<GeoJSONTile>(layer);
}
GeoJSONTileMonitor::GeoJSONTileMonitor(mapbox::geojsonvt::GeoJSONVT* geojsonvt_, const TileID& id)
: tileID(id), geojsonvt(geojsonvt_) {
}
GeoJSONTileMonitor::~GeoJSONTileMonitor() = default;
// A monitor can have its GeoJSONVT object swapped out (e.g. when loading a new GeoJSON file).
// In that case, we're sending new notifications to all observers.
void GeoJSONTileMonitor::setGeoJSONVT(mapbox::geojsonvt::GeoJSONVT* vt) {
// Don't duplicate notifications in case of nil changes.
if (geojsonvt != vt) {
geojsonvt = vt;
update();
}
}
void GeoJSONTileMonitor::update() {
if (geojsonvt) {
auto tile = convertTile(geojsonvt->getTile(tileID.z, tileID.x, tileID.y));
callback(nullptr, std::move(tile), {}, {});
}
}
std::unique_ptr<FileRequest>
GeoJSONTileMonitor::monitorTile(const GeometryTileMonitor::Callback& cb) {
callback = cb;
update();
return nullptr;
}
} // namespace mbgl
<file_sep>/src/mbgl/map/map_context.cpp
#include <mbgl/map/map_context.hpp>
#include <mbgl/map/map_data.hpp>
#include <mbgl/map/view.hpp>
#include <mbgl/platform/log.hpp>
#include <mbgl/renderer/painter.hpp>
#include <mbgl/storage/file_source.hpp>
#include <mbgl/storage/resource.hpp>
#include <mbgl/storage/response.hpp>
#include <mbgl/style/style.hpp>
#include <mbgl/style/style_layer.hpp>
#include <mbgl/sprite/sprite_atlas.hpp>
#include <mbgl/sprite/sprite_store.hpp>
#include <mbgl/gl/gl_object_store.hpp>
#include <mbgl/gl/texture_pool.hpp>
#include <mbgl/util/worker.hpp>
#include <mbgl/util/exception.hpp>
#include <mbgl/util/string.hpp>
#include <mbgl/util/mapbox.hpp>
#include <algorithm>
namespace mbgl {
MapContext::MapContext(View& view_, FileSource& fileSource_, MapMode mode_, GLContextMode contextMode_, const float pixelRatio_)
: view(view_),
fileSource(fileSource_),
dataPtr(std::make_unique<MapData>(mode_, contextMode_, pixelRatio_)),
data(*dataPtr),
asyncUpdate([this] { update(); }),
asyncInvalidate([&view_] { view_.invalidate(); }),
texturePool(std::make_unique<gl::TexturePool>()) {
assert(util::ThreadContext::currentlyOn(util::ThreadType::Map));
view.activate();
}
MapContext::~MapContext() {
// Make sure we call cleanup() before deleting this object.
assert(!style);
}
void MapContext::cleanup() {
view.notify();
styleRequest = nullptr;
// Explicit resets currently necessary because these abandon resources that need to be
// cleaned up by glObjectStore.performCleanup();
style.reset();
painter.reset();
texturePool.reset();
dataPtr.reset();
glObjectStore.performCleanup();
view.deactivate();
}
void MapContext::pause() {
MBGL_CHECK_ERROR(glFinish());
view.deactivate();
std::unique_lock<std::mutex> lockPause(data.mutexPause);
data.paused = true;
data.condPause.notify_all();
data.condPause.wait(lockPause, [&]{ return !data.paused; });
view.activate();
asyncInvalidate.send();
}
void MapContext::triggerUpdate(const TransformState& state, const Update flags) {
transformState = state;
updateFlags |= flags;
asyncUpdate.send();
}
void MapContext::setStyleURL(const std::string& url) {
if (styleURL == url) {
return;
}
styleRequest = nullptr;
styleURL = url;
styleJSON.clear();
style = std::make_unique<Style>(data, fileSource);
const size_t pos = styleURL.rfind('/');
std::string base = "";
if (pos != std::string::npos) {
base = styleURL.substr(0, pos + 1);
}
styleRequest = fileSource.request(Resource::style(styleURL), [this, base](Response res) {
if (res.error) {
if (res.error->reason == Response::Error::Reason::NotFound &&
util::mapbox::isMapboxURL(styleURL)) {
Log::Error(Event::Setup, "style %s could not be found or is an incompatible legacy map or style", styleURL.c_str());
} else {
Log::Error(Event::Setup, "loading style failed: %s", res.error->message.c_str());
data.loading = false;
}
} else if (res.notModified || res.noContent) {
return;
} else {
loadStyleJSON(*res.data, base);
}
});
}
void MapContext::setStyleJSON(const std::string& json, const std::string& base) {
if (styleJSON == json) {
return;
}
styleURL.clear();
styleJSON.clear();
style = std::make_unique<Style>(data, fileSource);
loadStyleJSON(json, base);
}
void MapContext::loadStyleJSON(const std::string& json, const std::string& base) {
assert(util::ThreadContext::currentlyOn(util::ThreadType::Map));
style->setJSON(json, base);
style->setObserver(this);
styleJSON = json;
// force style cascade, causing all pending transitions to complete.
style->cascade();
// set loading here so we don't get a false loaded event as soon as map is
// created but before a style is loaded
data.loading = true;
updateFlags |= Update::DefaultTransition | Update::Classes | Update::Zoom | Update::Annotations;
asyncUpdate.send();
}
void MapContext::update() {
assert(util::ThreadContext::currentlyOn(util::ThreadType::Map));
if (!style) {
updateFlags = Update::Nothing;
}
if (updateFlags == Update::Nothing || (data.mode == MapMode::Still && !callback)) {
return;
}
data.setAnimationTime(Clock::now());
if (style->loaded && updateFlags & Update::Annotations) {
data.getAnnotationManager()->updateStyle(*style);
updateFlags |= Update::Classes;
}
if (updateFlags & Update::Classes) {
style->cascade();
}
if (updateFlags & Update::Classes || updateFlags & Update::Zoom) {
style->recalculate(transformState.getZoom());
}
style->update(transformState, *texturePool);
if (data.mode == MapMode::Continuous) {
asyncInvalidate.send();
} else if (callback && style->isLoaded()) {
renderSync(transformState, frameData);
}
updateFlags = Update::Nothing;
}
void MapContext::renderStill(const TransformState& state, const FrameData& frame, Map::StillImageCallback fn) {
if (!fn) {
Log::Error(Event::General, "StillImageCallback not set");
return;
}
if (data.mode != MapMode::Still) {
fn(std::make_exception_ptr(util::MisuseException("Map is not in still image render mode")), {});
return;
}
if (callback) {
fn(std::make_exception_ptr(util::MisuseException("Map is currently rendering an image")), {});
return;
}
if (!style) {
fn(std::make_exception_ptr(util::MisuseException("Map doesn't have a style")), {});
return;
}
if (style->getLastError()) {
fn(style->getLastError(), {});
return;
}
callback = fn;
transformState = state;
frameData = frame;
updateFlags |= Update::RenderStill;
asyncUpdate.send();
}
bool MapContext::renderSync(const TransformState& state, const FrameData& frame) {
assert(util::ThreadContext::currentlyOn(util::ThreadType::Map));
// Style was not loaded yet.
if (!style) {
return false;
}
view.beforeRender();
transformState = state;
if (!painter) painter = std::make_unique<Painter>(data, transformState, glObjectStore);
painter->render(*style, frame, data.getAnnotationManager()->getSpriteAtlas());
if (data.mode == MapMode::Still) {
callback(nullptr, view.readStillImage());
callback = nullptr;
}
// Cleanup OpenGL objects that we abandoned since the last render call.
glObjectStore.performCleanup();
view.afterRender();
if (style->hasTransitions()) {
updateFlags |= Update::Zoom;
asyncUpdate.send();
} else if (painter->needsAnimation()) {
updateFlags |= Update::Repaint;
asyncUpdate.send();
}
return isLoaded();
}
bool MapContext::isLoaded() const {
return style->isLoaded();
}
void MapContext::addAnnotationIcon(const std::string& name, std::shared_ptr<const SpriteImage> sprite) {
assert(util::ThreadContext::currentlyOn(util::ThreadType::Map));
data.getAnnotationManager()->addIcon(name, sprite);
}
void MapContext::removeAnnotationIcon(const std::string& name) {
assert(util::ThreadContext::currentlyOn(util::ThreadType::Map));
data.getAnnotationManager()->removeIcon(name);
}
double MapContext::getTopOffsetPixelsForAnnotationIcon(const std::string& name) {
assert(util::ThreadContext::currentlyOn(util::ThreadType::Map));
return data.getAnnotationManager()->getTopOffsetPixelsForIcon(name);
}
void MapContext::addLayer(std::unique_ptr<StyleLayer> layer, optional<std::string> after) {
style->addLayer(std::move(layer), after);
updateFlags |= Update::Classes;
asyncUpdate.send();
}
void MapContext::removeLayer(const std::string& id) {
style->removeLayer(id);
updateFlags |= Update::Classes;
asyncUpdate.send();
}
void MapContext::setSourceTileCacheSize(size_t size) {
assert(util::ThreadContext::currentlyOn(util::ThreadType::Map));
if (size != sourceCacheSize) {
sourceCacheSize = size;
if (!style) return;
style->setSourceTileCacheSize(size);
asyncInvalidate.send();
}
}
void MapContext::onLowMemory() {
assert(util::ThreadContext::currentlyOn(util::ThreadType::Map));
if (!style) return;
style->onLowMemory();
asyncInvalidate.send();
}
void MapContext::onResourceLoaded() {
updateFlags |= Update::Repaint;
asyncUpdate.send();
}
void MapContext::onResourceError(std::exception_ptr error) {
if (data.mode == MapMode::Still && callback) {
callback(error, {});
callback = nullptr;
}
}
void MapContext::dumpDebugLogs() const {
Log::Info(Event::General, "--------------------------------------------------------------------------------");
Log::Info(Event::General, "MapContext::styleURL: %s", styleURL.c_str());
if (style) {
style->dumpDebugLogs();
} else {
Log::Info(Event::General, "no style loaded");
}
Log::Info(Event::General, "--------------------------------------------------------------------------------");
}
} // namespace mbgl
<file_sep>/src/mbgl/annotation/annotation_tile.cpp
#include <mbgl/annotation/annotation_tile.hpp>
#include <mbgl/util/constants.hpp>
#include <mbgl/map/map_data.hpp>
#include <mbgl/storage/file_source.hpp>
#include <utility>
namespace mbgl {
AnnotationTileFeature::AnnotationTileFeature(FeatureType type_, GeometryCollection geometries_,
std::unordered_map<std::string, std::string> properties_)
: type(type_),
properties(std::move(properties_)),
geometries(std::move(geometries_)) {}
optional<Value> AnnotationTileFeature::getValue(const std::string& key) const {
auto it = properties.find(key);
if (it != properties.end()) {
return optional<Value>(it->second);
}
return optional<Value>();
}
util::ptr<GeometryTileLayer> AnnotationTile::getLayer(const std::string& name) const {
auto it = layers.find(name);
if (it != layers.end()) {
return it->second;
}
return nullptr;
}
AnnotationTileMonitor::AnnotationTileMonitor(const TileID& tileID_, MapData& data_)
: tileID(tileID_),
data(data_) {
}
AnnotationTileMonitor::~AnnotationTileMonitor() {
data.getAnnotationManager()->removeTileMonitor(*this);
}
std::unique_ptr<FileRequest> AnnotationTileMonitor::monitorTile(const GeometryTileMonitor::Callback& callback_) {
callback = callback_;
data.getAnnotationManager()->addTileMonitor(*this);
return nullptr;
}
void AnnotationTileMonitor::update(std::unique_ptr<GeometryTile> tile) {
callback(nullptr, std::move(tile), {}, {});
}
} // namespace mbgl
<file_sep>/src/mbgl/util/get_geometries.cpp
#include <mbgl/util/get_geometries.hpp>
#include <mbgl/util/constants.hpp>
#include <cmath>
namespace mbgl {
GeometryCollection getGeometries(const GeometryTileFeature& feature) {
const float scale = float(util::EXTENT) / feature.getExtent();
GeometryCollection geometryCollection = feature.getGeometries();
for (auto& line : geometryCollection) {
for (auto& point : line) {
point.x = ::round(point.x * scale);
point.y = ::round(point.y * scale);
}
}
return geometryCollection;
}
} // namespace mbgl
<file_sep>/platform/default/webp_reader.cpp
#include <mbgl/util/image.hpp>
#include <mbgl/util/premultiply.hpp>
#include <mbgl/platform/log.hpp>
extern "C"
{
#include <webp/decode.h>
}
namespace mbgl {
PremultipliedImage decodeWebP(const uint8_t* data, size_t size) {
int width = 0, height = 0;
if (WebPGetInfo(data, size, &width, &height) == 0) {
throw std::runtime_error("failed to retrieve WebP basic header information");
}
std::unique_ptr<uint8_t[]> webp(WebPDecodeRGBA(data, size, &width, &height));
if (!webp) {
throw std::runtime_error("failed to decode WebP data");
}
UnassociatedImage image { size_t(width), size_t(height), std::move(webp) };
return util::premultiply(std::move(image));
}
} // namespace mbgl
<file_sep>/test/util/timer.cpp
#include <mbgl/util/chrono.hpp>
#include <mbgl/util/timer.hpp>
#include <mbgl/util/run_loop.hpp>
#include <mbgl/util/chrono.hpp>
#include <memory>
#include "../fixtures/util.hpp"
using namespace mbgl::util;
TEST(Timer, Basic) {
RunLoop loop;
Timer timer;
auto callback = [&loop] { loop.stop(); };
auto interval = mbgl::Milliseconds(300);
auto expectedTotalTime = interval;
auto first = mbgl::Clock::now();
timer.start(interval, mbgl::Duration::zero(), callback);
loop.run();
auto totalTime = std::chrono::duration_cast<mbgl::Milliseconds>(mbgl::Clock::now() - first);
// These are not high precision timers. Especially libuv uses
// cached time from the beginning of of the main loop iteration
// and it is very prone to fire earlier, which is, odd.
EXPECT_GE(totalTime, expectedTotalTime * 0.8);
EXPECT_LE(totalTime, expectedTotalTime * 1.2);
}
TEST(Timer, Repeat) {
RunLoop loop;
Timer timer;
unsigned count = 10;
auto callback = [&] {
if (!--count) {
loop.stop();
}
};
auto interval = mbgl::Milliseconds(50);
auto expectedTotalTime = interval * count;
auto first = mbgl::Clock::now();
timer.start(interval, interval, callback);
loop.run();
auto totalTime = std::chrono::duration_cast<mbgl::Milliseconds>(mbgl::Clock::now() - first);
EXPECT_GE(totalTime, expectedTotalTime * 0.8);
EXPECT_LE(totalTime, expectedTotalTime * 1.2);
}
TEST(Timer, Stop) {
RunLoop loop;
Timer timer1;
Timer timer2;
auto interval1 = mbgl::Milliseconds(50);
auto interval2 = mbgl::Milliseconds(250);
auto expectedTotalTime = interval2;
int count = 0;
auto callback1 = [&] {
++count;
timer1.stop();
};
auto callback2 = [&] {
++count;
loop.stop();
};
auto first = mbgl::Clock::now();
timer1.start(interval1, interval1, callback1);
timer2.start(interval2, mbgl::Duration::zero(), callback2);
loop.run();
auto totalTime = std::chrono::duration_cast<mbgl::Milliseconds>(mbgl::Clock::now() - first);
EXPECT_EQ(count, 2);
EXPECT_GE(totalTime, expectedTotalTime * 0.8);
EXPECT_LE(totalTime, expectedTotalTime * 1.2);
}
TEST(Timer, DestroyShouldStop) {
RunLoop loop;
auto timer1 = std::make_unique<Timer>();
Timer timer2;
auto interval1 = mbgl::Milliseconds(50);
auto interval2 = mbgl::Milliseconds(250);
auto expectedTotalTime = interval2;
int count = 0;
auto callback1 = [&] {
++count;
timer1.reset();
};
auto callback2 = [&] {
++count;
loop.stop();
};
auto first = mbgl::Clock::now();
timer1->start(interval1, interval1, callback1);
timer2.start(interval2, mbgl::Duration::zero(), callback2);
loop.run();
auto totalTime = std::chrono::duration_cast<mbgl::Milliseconds>(mbgl::Clock::now() - first);
EXPECT_EQ(count, 2);
EXPECT_GE(totalTime, expectedTotalTime * 0.8);
EXPECT_LE(totalTime, expectedTotalTime * 1.2);
}
TEST(Timer, StartOverrides) {
RunLoop loop;
Timer timer;
auto interval1 = mbgl::Milliseconds(50);
auto interval2 = mbgl::Milliseconds(250);
auto expectedTotalTime = interval1 + interval2;
int count = 0;
auto callback2 = [&] {
++count;
loop.stop();
};
auto callback1 = [&] {
++count;
timer.start(interval2, mbgl::Duration::zero(), callback2);
};
auto first = mbgl::Clock::now();
timer.start(interval1, mbgl::Duration::zero(), callback1);
loop.run();
auto totalTime = std::chrono::duration_cast<mbgl::Milliseconds>(mbgl::Clock::now() - first);
EXPECT_EQ(count, 2);
EXPECT_GE(totalTime, expectedTotalTime * 0.8);
EXPECT_LE(totalTime, expectedTotalTime * 1.2);
}
TEST(Timer, CanStopNonStartedTimer) {
RunLoop loop;
Timer timer;
timer.stop();
}
<file_sep>/test/util/run_loop.cpp
#include <mbgl/util/run_loop.hpp>
#include <mbgl/util/timer.hpp>
#include "../fixtures/util.hpp"
using namespace mbgl::util;
TEST(RunLoop, Stop) {
RunLoop loop(RunLoop::Type::New);
Timer timer;
timer.start(mbgl::Duration::zero(), mbgl::Duration::zero(), [&] {
loop.stop();
});
loop.run();
}
TEST(RunLoop, MultipleStop) {
RunLoop loop(RunLoop::Type::New);
Timer timer;
timer.start(mbgl::Duration::zero(), mbgl::Duration::zero(), [&] {
loop.stop();
loop.stop();
loop.stop();
loop.stop();
});
loop.run();
}
TEST(RunLoop, MultipleRun) {
RunLoop loop(RunLoop::Type::New);
Timer timer;
timer.start(mbgl::Duration::zero(), mbgl::Duration::zero(), [&] {
loop.stop();
});
loop.run();
bool secondTimeout = false;
timer.start(mbgl::Duration::zero(), mbgl::Duration::zero(), [&] {
secondTimeout = true;
loop.stop();
});
loop.run();
EXPECT_TRUE(secondTimeout);
}
<file_sep>/docker/linux/Dockerfile
FROM mapbox/gl-native:travis
# Install compiler
RUN apt-get -y install gdb g++-4.9 gcc-4.9 libllvm3.4
RUN useradd -ms /bin/bash mapbox
USER mapbox
ENV HOME /home/mapbox
WORKDIR /home/mapbox
# Node
RUN git clone https://github.com/creationix/nvm.git ~/.nvm && \
. ~/.nvm/nvm.sh && \
NVM_DIR=~/.nvm nvm install 0.10
<file_sep>/docker/linux/test.sh
#!/usr/bin/env bash
# set -e
# set -o pipefail
export FLAVOR=linux
export CXX=g++-4.9
export BUILDTYPE=Release
# Node
. ~/.nvm/nvm.sh
nvm use 0.10
# Xvfb
start-stop-daemon --start --pidfile ~/xvfb.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -screen 0 1024x768x24 -ac +extension GLX +render -noreset
cd build
# before_install
source ./scripts/travis_helper.sh
# install
./platform/${FLAVOR}/scripts/install.sh
# script
./platform/${FLAVOR}/scripts/run.sh
<file_sep>/test/storage/storage.hpp
#ifndef MBGL_TEST_STORAGE_STORAGE
#define MBGL_TEST_STORAGE_STORAGE
#include "../fixtures/util.hpp"
#include <mbgl/storage/response.hpp>
#include <memory>
class Storage : public testing::Test {
public:
static void SetUpTestCase();
static void TearDownTestCase();
protected:
static std::unique_ptr<mbgl::test::Server> server;
};
#endif
<file_sep>/include/mbgl/storage/file_source.hpp
#ifndef MBGL_STORAGE_FILE_SOURCE
#define MBGL_STORAGE_FILE_SOURCE
#include <mbgl/storage/response.hpp>
#include <mbgl/storage/resource.hpp>
#include <mbgl/util/noncopyable.hpp>
#include <functional>
#include <memory>
namespace mbgl {
class FileRequest : private util::noncopyable {
public:
virtual ~FileRequest() = default;
};
class FileSource : private util::noncopyable {
public:
virtual ~FileSource() = default;
using Callback = std::function<void (Response)>;
// Request a resource. The callback will be called asynchronously, in the same
// thread as the request was made. This thread must have an active RunLoop. The
// request may be cancelled before completion by releasing the returned FileRequest.
// If the request is cancelled before the callback is executed, the callback will
// not be executed.
virtual std::unique_ptr<FileRequest> request(const Resource&, Callback) = 0;
};
} // namespace mbgl
#endif
<file_sep>/src/mbgl/storage/http_request_base.cpp
#include <mbgl/storage/http_request_base.hpp>
#include <mbgl/util/http_header.hpp>
#include <mbgl/util/chrono.hpp>
namespace mbgl {
optional<SystemTimePoint> HTTPRequestBase::parseCacheControl(const char *value) {
if (!value) {
return {};
}
const auto cacheControl = http::CacheControl::parse(value);
if (!cacheControl.maxAge) {
return {};
}
// Round trip through time_t to truncate fractional seconds.
return SystemClock::from_time_t(SystemClock::to_time_t(
SystemClock::now() + std::chrono::seconds(*cacheControl.maxAge)));
}
} // namespace mbgl
<file_sep>/src/mbgl/layer/raster_layer.cpp
#include <mbgl/layer/raster_layer.hpp>
#include <mbgl/renderer/bucket.hpp>
namespace mbgl {
std::unique_ptr<StyleLayer> RasterLayer::clone() const {
return std::make_unique<RasterLayer>(*this);
}
void RasterLayer::parsePaints(const JSValue& layer) {
paint.opacity.parse("raster-opacity", layer);
paint.hueRotate.parse("raster-hue-rotate", layer);
paint.brightnessMin.parse("raster-brightness-min", layer);
paint.brightnessMax.parse("raster-brightness-max", layer);
paint.saturation.parse("raster-saturation", layer);
paint.contrast.parse("raster-contrast", layer);
paint.fadeDuration.parse("raster-fade-duration", layer);
}
void RasterLayer::cascade(const StyleCascadeParameters& parameters) {
paint.opacity.cascade(parameters);
paint.hueRotate.cascade(parameters);
paint.brightnessMin.cascade(parameters);
paint.brightnessMax.cascade(parameters);
paint.saturation.cascade(parameters);
paint.contrast.cascade(parameters);
paint.fadeDuration.cascade(parameters);
}
bool RasterLayer::recalculate(const StyleCalculationParameters& parameters) {
bool hasTransitions = false;
hasTransitions |= paint.opacity.calculate(parameters);
hasTransitions |= paint.hueRotate.calculate(parameters);
hasTransitions |= paint.brightnessMin.calculate(parameters);
hasTransitions |= paint.brightnessMax.calculate(parameters);
hasTransitions |= paint.saturation.calculate(parameters);
hasTransitions |= paint.contrast.calculate(parameters);
hasTransitions |= paint.fadeDuration.calculate(parameters);
passes = paint.opacity > 0 ? RenderPass::Translucent : RenderPass::None;
return hasTransitions;
}
std::unique_ptr<Bucket> RasterLayer::createBucket(StyleBucketParameters&) const {
return nullptr;
}
} // namespace mbgl
<file_sep>/src/mbgl/style/property_parsing.cpp
#include <mbgl/style/property_parsing.hpp>
#include <mbgl/style/property_transition.hpp>
#include <mbgl/style/function.hpp>
#include <mbgl/platform/log.hpp>
#include <csscolorparser/csscolorparser.hpp>
#include <vector>
namespace mbgl {
template <>
optional<bool> parseProperty(const char* name, const JSValue& value) {
if (!value.IsBool()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a boolean", name);
return {};
}
return value.GetBool();
}
template <>
optional<float> parseProperty(const char* name, const JSValue& value) {
if (!value.IsNumber()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a number, or a number function", name);
return {};
}
return value.GetDouble();
}
template <>
optional<std::string> parseProperty(const char* name, const JSValue& value) {
if (std::string { "text-font" } == name) {
if (!value.IsArray()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be an array of strings", name);
return {};
}
std::string result = "";
for (rapidjson::SizeType i = 0; i < value.Size(); ++i) {
const JSValue& stop = value[i];
if (stop.IsString()) {
result += stop.GetString();
if (i < value.Size()-1) {
result += ",";
}
} else {
Log::Warning(Event::ParseStyle, "text-font members must be strings");
return {};
}
}
return result;
}
if (!value.IsString()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a string", name);
return {};
}
return std::string { value.GetString(), value.GetStringLength() };
}
template <>
optional<Color> parseProperty(const char* name, const JSValue& value) {
if (!value.IsString()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a string", name);
return {};
}
CSSColorParser::Color css_color = CSSColorParser::parse({ value.GetString(), value.GetStringLength() });
// Premultiply the color.
const float factor = css_color.a / 255;
return Color{{(float)css_color.r * factor,
(float)css_color.g * factor,
(float)css_color.b * factor,
css_color.a}};
}
template <>
optional<TranslateAnchorType> parseProperty(const char* name, const JSValue& value) {
if (!value.IsString()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a string", name);
return {};
}
return { TranslateAnchorTypeClass({ value.GetString(), value.GetStringLength() }) };
}
template <>
optional<RotateAnchorType> parseProperty<RotateAnchorType>(const char* name, const JSValue& value) {
if (!value.IsString()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a string", name);
return {};
}
return { RotateAnchorTypeClass({ value.GetString(), value.GetStringLength() }) };
}
template <>
optional<CapType> parseProperty<CapType>(const char* name, const JSValue& value) {
if (!value.IsString()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a string", name);
return {};
}
return { CapTypeClass({ value.GetString(), value.GetStringLength() }) };
}
template <>
optional<JoinType> parseProperty<JoinType>(const char* name, const JSValue& value) {
if (!value.IsString()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a string", name);
return {};
}
return { JoinTypeClass({ value.GetString(), value.GetStringLength() }) };
}
template <>
optional<PlacementType> parseProperty<PlacementType>(const char* name, const JSValue& value) {
if (!value.IsString()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a string", name);
return {};
}
return { PlacementTypeClass({ value.GetString(), value.GetStringLength() }) };
}
template <>
optional<TextAnchorType> parseProperty<TextAnchorType>(const char* name, const JSValue& value) {
if (!value.IsString()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a string", name);
return {};
}
return { TextAnchorTypeClass({ value.GetString(), value.GetStringLength() }) };
}
template <>
optional<TextJustifyType> parseProperty<TextJustifyType>(const char* name, const JSValue& value) {
if (!value.IsString()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a string", name);
return {};
}
return { TextJustifyTypeClass({ value.GetString(), value.GetStringLength() }) };
}
template <>
optional<TextTransformType> parseProperty<TextTransformType>(const char* name, const JSValue& value) {
if (!value.IsString()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a string", name);
return {};
}
return { TextTransformTypeClass({ value.GetString(), value.GetStringLength() }) };
}
template <>
optional<RotationAlignmentType> parseProperty<RotationAlignmentType>(const char* name, const JSValue& value) {
if (!value.IsString()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be a string", name);
return {};
}
return { RotationAlignmentTypeClass({ value.GetString(), value.GetStringLength() }) };
}
template <>
optional<std::array<float, 2>> parseProperty(const char* name, const JSValue& value) {
if (value.IsArray() && value.Size() == 2 &&
value[rapidjson::SizeType(0)].IsNumber() &&
value[rapidjson::SizeType(1)].IsNumber()) {
float first = value[rapidjson::SizeType(0)].GetDouble();
float second = value[rapidjson::SizeType(1)].GetDouble();
return { {{ first, second }} };
} else {
Log::Warning(Event::ParseStyle, "value of '%s' must be an array of two numbers", name);
return {};
}
}
template <>
optional<std::vector<float>> parseProperty(const char* name, const JSValue& value) {
if (!value.IsArray()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be an array of numbers", name);
return {};
}
std::vector<float> result;
for (rapidjson::SizeType i = 0; i < value.Size(); ++i) {
const JSValue& part = value[i];
if (!part.IsNumber()) {
Log::Warning(Event::ParseStyle, "value of '%s' must be an array of numbers", name);
return {};
}
result.push_back(part.GetDouble());
}
return result;
}
template <>
optional<PropertyTransition> parseProperty(const char *, const JSValue& value) {
PropertyTransition transition;
if (value.IsObject()) {
bool parsed = false;
if (value.HasMember("duration") && value["duration"].IsNumber()) {
transition.duration.emplace(Milliseconds(value["duration"].GetUint()));
parsed = true;
}
if (value.HasMember("delay") && value["delay"].IsNumber()) {
transition.delay.emplace(Milliseconds(value["delay"].GetUint()));
parsed = true;
}
if (!parsed) {
return {};
}
}
return transition;
}
// --- Function ---
template <typename T>
optional<std::vector<std::pair<float, T>>> parseStops(const char* name, const JSValue& value) {
if (!value.IsArray()) {
Log::Warning(Event::ParseStyle, "stops function must specify a stops array");
return {};
}
std::vector<std::pair<float, T>> stops;
for (rapidjson::SizeType i = 0; i < value.Size(); ++i) {
const JSValue& stop = value[i];
if (!stop.IsArray()) {
Log::Warning(Event::ParseStyle, "function argument must be a numeric value");
return {};
}
if (stop.Size() != 2) {
Log::Warning(Event::ParseStyle, "stop must have zoom level and value specification");
return {};
}
const JSValue& z = stop[rapidjson::SizeType(0)];
if (!z.IsNumber()) {
Log::Warning(Event::ParseStyle, "zoom level in stop must be a number");
return {};
}
optional<T> v = parseProperty<T>(name, stop[rapidjson::SizeType(1)]);
if (!v) {
return {};
}
stops.emplace_back(z.GetDouble(), *v);
}
return stops;
}
template <typename T>
optional<Function<T>> parseFunction(const char* name, const JSValue& value) {
if (!value.IsObject()) {
auto constant = parseProperty<T>(name, value);
if (!constant) {
return {};
}
return { Function<T>(*constant) };
}
if (!value.HasMember("stops")) {
Log::Warning(Event::ParseStyle, "function must specify a function type");
return {};
}
float base = 1.0f;
if (value.HasMember("base")) {
const JSValue& value_base = value["base"];
if (!value_base.IsNumber()) {
Log::Warning(Event::ParseStyle, "base must be numeric");
return {};
}
base = value_base.GetDouble();
}
auto stops = parseStops<T>(name, value["stops"]);
if (!stops) {
return {};
}
return { Function<T>(*stops, base) };
}
template <> optional<Function<std::array<float, 2>>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<std::array<float, 2>>(name, value);
}
template <> optional<Function<std::string>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<std::string>(name, value);
}
template <> optional<Function<TranslateAnchorType>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<TranslateAnchorType>(name, value);
}
template <> optional<Function<RotateAnchorType>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<RotateAnchorType>(name, value);
}
template <> optional<Function<CapType>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<CapType>(name, value);
}
template <> optional<Function<JoinType>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<JoinType>(name, value);
}
template <> optional<Function<PlacementType>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<PlacementType>(name, value);
}
template <> optional<Function<TextAnchorType>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<TextAnchorType>(name, value);
}
template <> optional<Function<TextJustifyType>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<TextJustifyType>(name, value);
}
template <> optional<Function<TextTransformType>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<TextTransformType>(name, value);
}
template <> optional<Function<RotationAlignmentType>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<RotationAlignmentType>(name, value);
}
template <> optional<Function<bool>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<bool>(name, value);
}
template<> optional<Function<float>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<float>(name, value);
}
template<> optional<Function<Color>> parseProperty(const char* name, const JSValue& value) {
return parseFunction<Color>(name, value);
}
template <typename T>
optional<Function<Faded<T>>> parseFadedFunction(const JSValue& value) {
if (!value.HasMember("stops")) {
Log::Warning(Event::ParseStyle, "function must specify a function type");
return {};
}
auto stops = parseStops<T>("", value["stops"]);
if (!stops) {
return {};
}
return Function<Faded<T>>(*stops);
}
template <>
optional<Function<Faded<std::vector<float>>>> parseProperty(const char* name, const JSValue& value) {
if (value.IsObject()) {
return parseFadedFunction<std::vector<float>>(value);
}
auto constant = parseProperty<std::vector<float>>(name, value);
if (!constant) {
return {};
}
return Function<Faded<std::vector<float>>>(*constant);
}
template <>
optional<Function<Faded<std::string>>> parseProperty(const char* name, const JSValue& value) {
if (value.IsObject()) {
return parseFadedFunction<std::string>(value);
}
auto constant = parseProperty<std::string>(name, value);
if (!constant) {
return {};
}
return Function<Faded<std::string>>(*constant);
}
} // namespace mbgl
<file_sep>/test/storage/storage.cpp
#include "storage.hpp"
#include <mbgl/platform/platform.hpp>
std::unique_ptr<mbgl::test::Server> Storage::server;
void Storage::SetUpTestCase() {
const auto program = mbgl::platform::applicationRoot() + "/TEST_DATA/storage/server.js";
server = std::make_unique<mbgl::test::Server>(program.c_str());
}
void Storage::TearDownTestCase() {
server.reset();
}
<file_sep>/CONTRIBUTING.md
# Contributing
If you have a usage question for a product built on Mapbox GL (such as Mapbox Mobile toolkits like our SDKs), please visit https://www.mapbox.com/help/.
If you want to contribute code:
1. Please familiarize yourself with the [install process](INSTALL.md).
1. Ensure that the [existing issues](https://github.com/mapbox/mapbox-gl-native/issues?utf8=✓&q=) don't already cover your question or contribution.
1. Pull requests gladly accepted. If there are any changes that developers using one of the GL SDKs should be aware of, please update the “master” section of the relevant changelog: [iOS](CHANGELOG.md) [Node.js](platform/node/CHANGELOG.md).
1. Prefix your commit messages with the platform(s) your changes affect: `[core]`, `[ios]`, `[android]`, `[node]`, and so on.
# Code of conduct
Everyone is invited to participate in Mapbox’s open source projects and public discussions: we want to create a welcoming and friendly environment. Harassment of participants or other unethical and unprofessional behavior will not be tolerated in our spaces. The [Contributor Covenant](http://contributor-covenant.org) applies to all projects under the Mapbox organization and we ask that you please read [the full text](http://contributor-covenant.org/version/1/2/0/).
You can learn more about our open source philosophy on [mapbox.com](https://www.mapbox.com/about/open/).
<file_sep>/platform/ios/scripts/install.sh
#!/usr/bin/env bash
set -e
set -o pipefail
git fetch --tags
mapbox_time "checkout_mason" \
git submodule update --init .mason
mapbox_time "install_recent_git" \
brew install git
mapbox_time "install_awscli" \
brew install awscli
mapbox_time "install_jazzy" \
gem install jazzy
mkdir -p ${KIF_SCREENSHOTS}
<file_sep>/platform/osx/scripts/test.sh
#!/usr/bin/env bash
set -e
set -o pipefail
set -u
OSX_SDK_VERSION=`xcrun --sdk macosx --show-sdk-version`
OSX_PROJ_PATH=./build/osx-x86_64/gyp/osx.xcodeproj
export BUILDTYPE=${BUILDTYPE:-Release}
if [[ ! -e "${OSX_PROJ_PATH}/xcshareddata/xcschemes/osxsdk.xcscheme" ]]; then
# Generate schemes
open -g "${OSX_PROJ_PATH}"
sleep 20
# Share osxsdk scheme
mkdir -pv "${OSX_PROJ_PATH}/xcshareddata/xcschemes/"
mv -v \
"${OSX_PROJ_PATH}/xcuserdata/${USER}.xcuserdatad/xcschemes/osxsdk.xcscheme" \
"${OSX_PROJ_PATH}/xcshareddata/xcschemes/"
fi
xcodebuild -verbose \
-sdk macosx${OSX_SDK_VERSION} \
-project "${OSX_PROJ_PATH}" \
-scheme osxsdk \
test
<file_sep>/src/mbgl/tile/vector_tile_data.hpp
#ifndef MBGL_MAP_VECTOR_TILE_DATA
#define MBGL_MAP_VECTOR_TILE_DATA
#include <mbgl/tile/tile_data.hpp>
#include <mbgl/tile/tile_worker.hpp>
#include <mbgl/text/placement_config.hpp>
#include <atomic>
#include <memory>
#include <unordered_map>
namespace mbgl {
class Style;
class WorkRequest;
class FileRequest;
class GeometryTileMonitor;
class VectorTileData : public TileData {
public:
VectorTileData(const TileID&,
std::unique_ptr<GeometryTileMonitor> monitor,
std::string sourceID,
Style&,
const MapMode,
const std::function<void(std::exception_ptr)>& callback);
~VectorTileData();
Bucket* getBucket(const StyleLayer&) override;
bool parsePending(std::function<void(std::exception_ptr)> callback) override;
void redoPlacement(PlacementConfig config, const std::function<void()>&) override;
void redoPlacement(const std::function<void()>&) override;
void cancel() override;
private:
Style& style;
Worker& worker;
TileWorker tileWorker;
std::unique_ptr<GeometryTileMonitor> monitor;
std::unique_ptr<FileRequest> tileRequest;
std::unique_ptr<WorkRequest> workRequest;
// Contains all the Bucket objects for the tile. Buckets are render
// objects and they get added by tile parsing operations.
std::unordered_map<std::string, std::unique_ptr<Bucket>> buckets;
// Stores the placement configuration of the text that is currently placed on the screen.
PlacementConfig placedConfig;
// Stores the placement configuration of how the text should be placed. This isn't necessarily
// the one that is being displayed.
PlacementConfig targetConfig;
};
} // namespace mbgl
#endif
<file_sep>/src/mbgl/tile/raster_tile_data.cpp
#include <mbgl/tile/raster_tile_data.hpp>
#include <mbgl/source/source.hpp>
#include <mbgl/storage/resource.hpp>
#include <mbgl/storage/response.hpp>
#include <mbgl/storage/file_source.hpp>
#include <mbgl/util/worker.hpp>
#include <mbgl/util/work_request.hpp>
using namespace mbgl;
RasterTileData::RasterTileData(const TileID& id_,
float pixelRatio,
const std::string& urlTemplate,
gl::TexturePool &texturePool_,
Worker& worker_,
FileSource& fileSource,
const std::function<void(std::exception_ptr)>& callback)
: TileData(id_),
texturePool(texturePool_),
worker(worker_) {
state = State::loading;
const Resource resource = Resource::tile(urlTemplate, pixelRatio, id.x, id.y, id.sourceZ);
req = fileSource.request(resource, [callback, this](Response res) {
if (res.error) {
callback(std::make_exception_ptr(std::runtime_error(res.error->message)));
} else if (res.notModified) {
modified = res.modified;
expires = res.expires;
} else if (res.noContent) {
state = State::parsed;
modified = res.modified;
expires = res.expires;
workRequest.reset();
bucket.reset();
callback(nullptr);
} else {
modified = res.modified;
expires = res.expires;
// Only overwrite the state when we didn't have a previous tile.
if (state == State::loading) {
state = State::loaded;
}
workRequest.reset();
workRequest = worker.parseRasterTile(std::make_unique<RasterBucket>(texturePool), res.data, [this, callback] (RasterTileParseResult result) {
workRequest.reset();
if (state != State::loaded) {
return;
}
std::exception_ptr error;
if (result.is<std::unique_ptr<Bucket>>()) {
state = State::parsed;
bucket = std::move(result.get<std::unique_ptr<Bucket>>());
} else {
error = result.get<std::exception_ptr>();
state = State::obsolete;
bucket.reset();
}
callback(error);
});
}
});
}
RasterTileData::~RasterTileData() {
cancel();
}
Bucket* RasterTileData::getBucket(StyleLayer const&) {
return bucket.get();
}
void RasterTileData::cancel() {
if (state != State::obsolete) {
state = State::obsolete;
}
req = nullptr;
workRequest.reset();
}
<file_sep>/src/mbgl/layer/symbol_layer.hpp
#ifndef MBGL_SYMBOL_LAYER
#define MBGL_SYMBOL_LAYER
#include <mbgl/style/style_layer.hpp>
#include <mbgl/style/layout_property.hpp>
#include <mbgl/style/paint_property.hpp>
namespace mbgl {
class SpriteAtlas;
class SymbolLayoutProperties {
public:
LayoutProperty<PlacementType> placement { PlacementType::Point };
LayoutProperty<float> spacing { 250.0f };
LayoutProperty<bool> avoidEdges { false };
class IconProperties {
public:
LayoutProperty<bool> allowOverlap { false };
LayoutProperty<bool> ignorePlacement { false };
LayoutProperty<bool> optional { false };
LayoutProperty<RotationAlignmentType> rotationAlignment { RotationAlignmentType::Viewport };
LayoutProperty<float> size { 1.0f };
LayoutProperty<std::string> image { "" };
LayoutProperty<float> rotate { 0.0f };
LayoutProperty<float> padding { 2.0f };
LayoutProperty<bool> keepUpright { false };
LayoutProperty<std::array<float, 2>> offset { {{ 0, 0 }} };
} icon;
class TextProperties {
public:
LayoutProperty<RotationAlignmentType> rotationAlignment { RotationAlignmentType::Viewport };
LayoutProperty<std::string> field { "" };
LayoutProperty<std::string> font { "Open Sans Regular, Arial Unicode MS Regular" };
LayoutProperty<float> size { 16.0f };
LayoutProperty<float> maxWidth { 15.0f /* em */ };
LayoutProperty<float> lineHeight { 1.2f /* em */ };
LayoutProperty<float> letterSpacing { 0.0f /* em */ };
LayoutProperty<TextJustifyType> justify { TextJustifyType::Center };
LayoutProperty<TextAnchorType> anchor { TextAnchorType::Center };
LayoutProperty<float> maxAngle { 45.0f /* degrees */ };
LayoutProperty<float> rotate { 0.0f };
LayoutProperty<float> padding { 2.0f };
LayoutProperty<bool> keepUpright { true };
LayoutProperty<TextTransformType> transform { TextTransformType::None };
LayoutProperty<std::array<float, 2>> offset { {{ 0, 0 }} };
LayoutProperty<bool> allowOverlap { false };
LayoutProperty<bool> ignorePlacement { false };
LayoutProperty<bool> optional { false };
} text;
// Special case.
float iconMaxSize = 1.0f;
float textMaxSize = 16.0f;
};
class SymbolPaintProperties {
public:
class PaintProperties {
public:
PaintProperties(float size_) : size(size_) {}
PaintProperty<float> opacity { 1.0f };
PaintProperty<Color> color { {{ 0, 0, 0, 1 }} };
PaintProperty<Color> haloColor { {{ 0, 0, 0, 0 }} };
PaintProperty<float> haloWidth { 0.0f };
PaintProperty<float> haloBlur { 0.0f };
PaintProperty<std::array<float, 2>> translate { {{ 0, 0 }} };
PaintProperty<TranslateAnchorType> translateAnchor { TranslateAnchorType::Map };
// Special case
float size;
bool isVisible() const {
return opacity > 0 && (color.value[3] > 0 || haloColor.value[3] > 0) && size > 0;
}
};
PaintProperties icon { 1.0f };
PaintProperties text { 16.0f };
};
class SymbolLayer : public StyleLayer {
public:
SymbolLayer() : StyleLayer(Type::Symbol) {}
std::unique_ptr<StyleLayer> clone() const override;
void parseLayout(const JSValue&) override;
void parsePaints(const JSValue&) override;
void cascade(const StyleCascadeParameters&) override;
bool recalculate(const StyleCalculationParameters&) override;
std::unique_ptr<Bucket> createBucket(StyleBucketParameters&) const override;
SymbolLayoutProperties layout;
SymbolPaintProperties paint;
SpriteAtlas* spriteAtlas = nullptr;
};
template <>
inline bool StyleLayer::is<SymbolLayer>() const {
return type == Type::Symbol;
}
} // namespace mbgl
#endif
<file_sep>/platform/android/MapboxGLAndroidSDKTestApp/src/test/java/com/mapbox/mapboxsdk/utils/MockParcel.java
package com.mapbox.mapboxsdk.utils;
import android.os.Parcel;
import com.mapbox.mapboxsdk.geometry.LatLng;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.ArrayList;
import java.util.List;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyDouble;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class MockParcel {
public static Parcel obtain() {
return new MockParcel().getMockedParcel();
}
Parcel mockedParcel;
int position;
List<Object> objects;
public Parcel getMockedParcel() {
return mockedParcel;
}
public MockParcel() {
mockedParcel = mock(Parcel.class);
objects = new ArrayList<>();
setupMock();
}
private void setupMock() {
setupWrites();
setupReads();
setupOthers();
}
private void setupWrites() {
Answer<Void> writeValueAnswer = new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Object parameter = invocation.getArguments()[0];
objects.add(parameter);
return null;
}
};
Answer<Void> writeArrayAnswer = new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Object[] parameters = (Object[]) invocation.getArguments()[0];
objects.add(parameters.length);
for (Object o : parameters) {
objects.add(o);
}
return null;
}
};
doAnswer(writeValueAnswer).when(mockedParcel).writeLong(anyLong());
doAnswer(writeValueAnswer).when(mockedParcel).writeString(anyString());
doAnswer(writeValueAnswer).when(mockedParcel).writeDouble(anyDouble());
doAnswer(writeArrayAnswer).when(mockedParcel).writeParcelableArray(any(LatLng[].class), eq(0));
}
private void setupReads() {
when(mockedParcel.readLong()).thenAnswer(new Answer<Long>() {
@Override
public Long answer(InvocationOnMock invocation) throws Throwable {
return (Long) objects.get(position++);
}
});
when(mockedParcel.readString()).thenAnswer(new Answer<String>() {
@Override
public String answer(InvocationOnMock invocation) throws Throwable {
return (String) objects.get(position++);
}
});
when(mockedParcel.readDouble()).thenAnswer(new Answer<Double>() {
@Override
public Double answer(InvocationOnMock invocation) throws Throwable {
return (Double) objects.get(position++);
}
});
when(mockedParcel.readParcelableArray(LatLng.class.getClassLoader())).thenAnswer(new Answer<LatLng[]>() {
@Override
public LatLng[] answer(InvocationOnMock invocation) throws Throwable {
int size = (Integer) objects.get(position++);
LatLng[] latLngs = LatLng.CREATOR.newArray(size);
for (int i = 0; i < size; i++) {
latLngs[i] = (LatLng) objects.get(position++);
}
return latLngs;
}
});
}
private void setupOthers() {
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
position = ((Integer) invocation.getArguments()[0]);
return null;
}
}).when(mockedParcel).setDataPosition(anyInt());
}
}
<file_sep>/gyp/ios.gyp
{
'includes': [
'../platform/ios/app/mapboxgl-app.gypi',
'../platform/ios/framework/framework-ios.gypi',
'../platform/ios/benchmark/benchmark-ios.gypi',
],
}
<file_sep>/src/mbgl/annotation/annotation_tile.hpp
#ifndef MBGL_ANNOTATION_TILE
#define MBGL_ANNOTATION_TILE
#include <mbgl/tile/geometry_tile.hpp>
#include <mbgl/map/tile_id.hpp>
#include <map>
#include <unordered_map>
namespace mbgl {
class AnnotationTileFeature : public GeometryTileFeature {
public:
AnnotationTileFeature(FeatureType, GeometryCollection,
std::unordered_map<std::string, std::string> properties = {{}});
FeatureType getType() const override { return type; }
optional<Value> getValue(const std::string&) const override;
GeometryCollection getGeometries() const override { return geometries; }
const FeatureType type;
const std::unordered_map<std::string, std::string> properties;
const GeometryCollection geometries;
};
class AnnotationTileLayer : public GeometryTileLayer {
public:
std::size_t featureCount() const override { return features.size(); }
util::ptr<const GeometryTileFeature> getFeature(std::size_t i) const override { return features[i]; }
std::vector<util::ptr<const AnnotationTileFeature>> features;
};
class AnnotationTile : public GeometryTile {
public:
util::ptr<GeometryTileLayer> getLayer(const std::string&) const override;
std::map<std::string, util::ptr<AnnotationTileLayer>> layers;
};
class MapData;
class AnnotationTileMonitor : public GeometryTileMonitor {
public:
// TODO: should just take AnnotationManager&, but we need to eliminate util::exclusive<AnnotationManager> from MapData first.
AnnotationTileMonitor(const TileID&, MapData&);
~AnnotationTileMonitor();
void update(std::unique_ptr<GeometryTile>);
std::unique_ptr<FileRequest> monitorTile(const GeometryTileMonitor::Callback&) override;
TileID tileID;
private:
MapData& data;
GeometryTileMonitor::Callback callback;
};
} // namespace mbgl
#endif
<file_sep>/src/mbgl/util/tile_cover.hpp
#ifndef MBGL_UTIL_TILE_COVER
#define MBGL_UTIL_TILE_COVER
#include <mbgl/map/tile_id.hpp>
#include <mbgl/style/types.hpp>
#include <vector>
namespace mbgl {
class TransformState;
class LatLngBounds;
int32_t coveringZoomLevel(double z, SourceType type, uint16_t tileSize);
std::vector<TileID> tileCover(const TransformState&, int32_t z, int32_t actualZ);
std::vector<TileID> tileCover(const LatLngBounds&, int32_t z, int32_t actualZ);
} // namespace mbgl
#endif
<file_sep>/src/mbgl/util/get_geometries.hpp
#ifndef MBGL_UTIL_GET_GEOMETRIES
#define MBGL_UTIL_GET_GEOMETRIES
#include <mbgl/tile/geometry_tile.hpp>
namespace mbgl {
GeometryCollection getGeometries(const GeometryTileFeature& feature);
} // namespace mbgl
#endif
<file_sep>/gyp/platform-ios.gypi
{
'targets': [
{
'target_name': 'platform-ios',
'product_name': 'mbgl-platform-ios',
'type': 'static_library',
'standalone_static_library': 1,
'hard_dependency': 1,
'dependencies': [
'version',
],
'sources': [
'../platform/default/async_task.cpp',
'../platform/default/run_loop.cpp',
'../platform/default/timer.cpp',
'../platform/default/default_file_source.cpp',
'../platform/default/online_file_source.cpp',
'../platform/default/mbgl/storage/offline.hpp',
'../platform/default/mbgl/storage/offline.cpp',
'../platform/default/mbgl/storage/offline_database.hpp',
'../platform/default/mbgl/storage/offline_database.cpp',
'../platform/default/mbgl/storage/offline_download.hpp',
'../platform/default/mbgl/storage/offline_download.cpp',
'../platform/default/sqlite3.hpp',
'../platform/default/sqlite3.cpp',
'../platform/darwin/src/log_nslog.mm',
'../platform/darwin/src/string_nsstring.mm',
'../platform/darwin/src/application_root.mm',
'../platform/darwin/src/image.mm',
'../platform/darwin/src/nsthread.mm',
'../platform/darwin/src/reachability.m',
'../platform/darwin/src/NSException+MGLAdditions.h',
'../platform/darwin/src/NSString+MGLAdditions.h',
'../platform/darwin/src/NSString+MGLAdditions.m',
'../platform/darwin/src/MGLTypes.m',
'../platform/darwin/src/MGLStyle.mm',
'../platform/darwin/src/MGLGeometry_Private.h',
'../platform/darwin/src/MGLGeometry.mm',
'../platform/darwin/src/MGLShape.m',
'../platform/darwin/src/MGLMultiPoint_Private.h',
'../platform/darwin/src/MGLMultiPoint.mm',
'../platform/darwin/src/MGLPointAnnotation.m',
'../platform/darwin/src/MGLPolyline.mm',
'../platform/darwin/src/MGLPolygon.mm',
'../platform/darwin/src/MGLMapCamera.mm',
'../platform/darwin/src/MGLOfflinePack.mm',
'../platform/darwin/src/MGLOfflinePack_Private.h',
'../platform/darwin/src/MGLOfflineStorage.mm',
'../platform/darwin/src/MGLOfflineStorage_Private.h',
'../platform/darwin/src/MGLOfflineRegion_Private.h',
'../platform/darwin/src/MGLTilePyramidOfflineRegion.mm',
'../platform/darwin/src/MGLAccountManager_Private.h',
'../platform/darwin/src/MGLAccountManager.m',
'../platform/darwin/src/NSBundle+MGLAdditions.h',
'../platform/darwin/src/NSBundle+MGLAdditions.m',
'../platform/darwin/src/NSProcessInfo+MGLAdditions.h',
'../platform/darwin/src/NSProcessInfo+MGLAdditions.m',
'../platform/ios/src/MGLMapboxEvents.h',
'../platform/ios/src/MGLMapboxEvents.m',
'../platform/ios/src/MGLAPIClient.h',
'../platform/ios/src/MGLAPIClient.m',
'../platform/ios/src/MGLLocationManager.h',
'../platform/ios/src/MGLLocationManager.m',
'../platform/ios/src/MGLMapView.mm',
'../platform/ios/src/MGLUserLocation_Private.h',
'../platform/ios/src/MGLUserLocation.m',
'../platform/ios/src/MGLUserLocationAnnotationView.h',
'../platform/ios/src/MGLUserLocationAnnotationView.m',
'../platform/ios/src/MGLAnnotationImage_Private.h',
'../platform/ios/src/MGLAnnotationImage.m',
'../platform/ios/src/MGLCompactCalloutView.h',
'../platform/ios/src/MGLCompactCalloutView.m',
'../platform/ios/vendor/SMCalloutView/SMCalloutView.h',
'../platform/ios/vendor/SMCalloutView/SMCalloutView.m',
'../platform/ios/vendor/Fabric/FABAttributes.h',
'../platform/ios/vendor/Fabric/FABKitProtocol.h',
'../platform/ios/vendor/Fabric/Fabric.h',
'../platform/ios/vendor/Fabric/Fabric+FABKits.h',
],
'variables': {
'cflags_cc': [
'<@(libuv_cflags)',
'<@(boost_cflags)',
'<@(sqlite_cflags)',
'<@(zlib_cflags)',
'<@(rapidjson_cflags)',
'<@(variant_cflags)',
],
'ldflags': [
'<@(sqlite_ldflags)',
'<@(zlib_ldflags)',
],
'libraries': [
'<@(libuv_static_libs)',
'<@(sqlite_static_libs)',
'<@(zlib_static_libs)',
'$(SDKROOT)/System/Library/Frameworks/CoreGraphics.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreLocation.framework',
'$(SDKROOT)/System/Library/Frameworks/GLKit.framework',
'$(SDKROOT)/System/Library/Frameworks/ImageIO.framework',
'$(SDKROOT)/System/Library/Frameworks/MobileCoreServices.framework',
'$(SDKROOT)/System/Library/Frameworks/OpenGLES.framework',
'$(SDKROOT)/System/Library/Frameworks/QuartzCore.framework',
'$(SDKROOT)/System/Library/Frameworks/Security.framework',
'$(SDKROOT)/System/Library/Frameworks/SystemConfiguration.framework',
'$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
],
},
'include_dirs': [
'../platform/ios/include',
'../platform/darwin/include',
'../include',
'../src',
'../platform/default',
],
'xcode_settings': {
'OTHER_CPLUSPLUSFLAGS': [ '<@(cflags_cc)' ],
'CLANG_ENABLE_OBJC_ARC': 'YES',
'CLANG_ENABLE_MODULES': 'YES',
},
'link_settings': {
'libraries': [ '<@(libraries)' ],
'xcode_settings': {
'OTHER_LDFLAGS': [ '<@(ldflags)' ],
},
},
'direct_dependent_settings': {
'include_dirs': [
'../platform/ios/include',
'../platform/darwin/include',
'../include',
],
'mac_bundle_resources': [
'<!@(find ../platform/ios/resources -type f \! -name "README" \! -name \'.*\')',
'<!@(find ../platform/default/resources -type f \! -name "README" \! -name \'.der\')',
],
},
},
],
}
<file_sep>/src/mbgl/storage/http_context_base.cpp
#include <mbgl/storage/http_context_base.hpp>
namespace mbgl {
} // namespace mbgl
<file_sep>/src/mbgl/style/layout_property.hpp
#ifndef MBGL_LAYOUT_PROPERTY
#define MBGL_LAYOUT_PROPERTY
#include <mbgl/style/property_parsing.hpp>
#include <mbgl/style/function.hpp>
#include <mbgl/util/rapidjson.hpp>
#include <utility>
namespace mbgl {
template <typename T>
class LayoutProperty {
public:
explicit LayoutProperty(T v) : value(std::move(v)) {}
void parse(const char * name, const JSValue& layout) {
if (layout.HasMember(name)) {
parsedValue = parseProperty<Function<T>>(name, layout[name]);
}
}
void calculate(const StyleCalculationParameters& parameters) {
if (parsedValue) {
value = (*parsedValue).evaluate(parameters);
}
}
void operator=(const T& v) { value = v; }
operator T() const { return value; }
optional<Function<T>> parsedValue;
T value;
};
} // namespace mbgl
#endif
<file_sep>/platform/android/MapboxGLAndroidSDKTestApp/src/androidTest/java/com/mapbox/mapboxsdk/testapp/espresso/TiltActivityTest.java
package com.mapbox.mapboxsdk.testapp.espresso;
import android.support.test.rule.ActivityTestRule;
import com.mapbox.mapboxsdk.testapp.R;
import com.mapbox.mapboxsdk.testapp.TiltActivity;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
public class TiltActivityTest extends BaseTest {
@Rule
public ActivityTestRule<TiltActivity> mActivityRule = new ActivityTestRule<>(
TiltActivity.class);
private TiltActivity mActivity = null;
@Before
public void setActivity() {
mActivity = mActivityRule.getActivity();
}
@Test
public void testSanity() {
checkViewIsDisplayed(R.id.tiltMapView);
}
}
<file_sep>/src/mbgl/annotation/point_annotation_impl.cpp
#include <mbgl/annotation/point_annotation_impl.hpp>
#include <mbgl/annotation/annotation_tile.hpp>
namespace mbgl {
PointAnnotationImpl::PointAnnotationImpl(const AnnotationID id_, const PointAnnotation& point_)
: id(id_),
point(point_) {
}
void PointAnnotationImpl::updateLayer(const TileID& tileID, AnnotationTileLayer& layer) const {
std::unordered_map<std::string, std::string> featureProperties;
featureProperties.emplace("sprite", point.icon.empty() ? std::string("default_marker") : point.icon);
mbgl::ScreenCoordinate projected = point.position.project();
projected *= 1 << tileID.z;
projected.x = std::fmod(projected.x, 1);
projected.y = std::fmod(projected.y, 1);
projected *= GeometryTileFeature::defaultExtent;
layer.features.emplace_back(
std::make_shared<const AnnotationTileFeature>(FeatureType::Point,
GeometryCollection {{ {{ GeometryCoordinate { projected } }} }},
featureProperties));
}
} // namespace mbgl
<file_sep>/src/mbgl/renderer/debug_bucket.hpp
#ifndef MBGL_RENDERER_DEBUGBUCKET
#define MBGL_RENDERER_DEBUGBUCKET
#include <mbgl/tile/tile_data.hpp>
#include <mbgl/map/mode.hpp>
#include <mbgl/geometry/debug_font_buffer.hpp>
#include <mbgl/geometry/vao.hpp>
#include <mbgl/util/chrono.hpp>
namespace mbgl {
class PlainShader;
namespace util {
class GLObjectStore;
}
class DebugBucket : private util::noncopyable {
public:
DebugBucket(TileID id, TileData::State,
optional<SystemTimePoint> modified,
optional<SystemTimePoint> expires,
MapDebugOptions);
void drawLines(PlainShader&, gl::GLObjectStore&);
void drawPoints(PlainShader&, gl::GLObjectStore&);
const TileData::State state;
const optional<SystemTimePoint> modified;
const optional<SystemTimePoint> expires;
const MapDebugOptions debugMode;
private:
DebugFontBuffer fontBuffer;
VertexArrayObject array;
};
} // namespace mbgl
#endif
<file_sep>/platform/android/MapboxGLAndroidSDK/src/main/java/com/mapbox/mapboxsdk/utils/MathUtils.java
package com.mapbox.mapboxsdk.utils;
public class MathUtils {
/**
* Test a value in specified range, returning minimum if it's below, and maximum if it's above
* @param value Value to test
* @param min Minimum value of range
* @param max Maximum value of range
* @return value if it's between min and max, min if it's below, max if it's above
*/
public static double clamp(double value, double min, double max) {
return Math.max(min, Math.min(max, value));
}
/**
* Test a value in specified range, returning minimum if it's below, and maximum if it's above
* @param value Value to test
* @param min Minimum value of range
* @param max Maximum value of range
* @return value if it's between min and max, min if it's below, max if it's above
*/
public static float clamp(float value, float min, float max) {
return Math.max(min, Math.min(max, value));
}
}
<file_sep>/src/mbgl/style/style_calculation_parameters.hpp
#ifndef STYLE_CALCULATION_PARAMETERS
#define STYLE_CALCULATION_PARAMETERS
#include <mbgl/style/zoom_history.hpp>
#include <mbgl/util/chrono.hpp>
namespace mbgl {
class StyleCalculationParameters {
public:
explicit StyleCalculationParameters(float z_)
: z(z_) {}
StyleCalculationParameters(float z_,
const TimePoint& now_,
const ZoomHistory& zoomHistory_,
const Duration& defaultFadeDuration_)
: z(z_),
now(now_),
zoomHistory(zoomHistory_),
defaultFadeDuration(defaultFadeDuration_) {}
float z;
TimePoint now;
ZoomHistory zoomHistory;
Duration defaultFadeDuration;
};
} // namespace mbgl
#endif
<file_sep>/src/mbgl/util/thread_local.hpp
#ifndef MBGL_UTIL_THREAD_LOCAL
#define MBGL_UTIL_THREAD_LOCAL
#include <mbgl/util/noncopyable.hpp>
#include <stdexcept>
#include <pthread.h>
namespace mbgl {
namespace util {
template <class T>
class ThreadLocal : public noncopyable {
public:
inline ThreadLocal(T* val) {
ThreadLocal();
set(val);
}
inline ThreadLocal() {
int ret = pthread_key_create(&key, [](void *ptr) {
delete reinterpret_cast<T *>(ptr);
});
if (ret) {
throw std::runtime_error("Failed to init local storage key.");
}
}
inline ~ThreadLocal() {
if (pthread_key_delete(key)) {
throw std::runtime_error("Failed to delete local storage key.");
}
}
inline T* get() {
T* ret = reinterpret_cast<T*>(pthread_getspecific(key));
if (!ret) {
return nullptr;
}
return ret;
}
inline void set(T* ptr) {
if (pthread_setspecific(key, ptr)) {
throw std::runtime_error("Failed to set local storage.");
}
}
private:
pthread_key_t key;
};
} // namespace util
} // namespace mbgl
#endif
<file_sep>/include/mbgl/annotation/annotation.hpp
#ifndef MBGL_ANNOTATION
#define MBGL_ANNOTATION
#include <cstdint>
#include <vector>
namespace mbgl {
using AnnotationID = uint32_t;
using AnnotationIDs = std::vector<AnnotationID>;
} // namespace mbgl
#endif
<file_sep>/src/mbgl/layer/line_layer.hpp
#ifndef MBGL_LINE_LAYER
#define MBGL_LINE_LAYER
#include <mbgl/style/style_layer.hpp>
#include <mbgl/style/layout_property.hpp>
#include <mbgl/style/paint_property.hpp>
namespace mbgl {
class LineLayoutProperties {
public:
LayoutProperty<CapType> cap { CapType::Butt };
LayoutProperty<JoinType> join { JoinType::Miter };
LayoutProperty<float> miterLimit { 2.0f };
LayoutProperty<float> roundLimit { 1.0f };
};
class LinePaintProperties {
public:
PaintProperty<float> opacity { 1.0f };
PaintProperty<Color> color { {{ 0, 0, 0, 1 }} };
PaintProperty<std::array<float, 2>> translate { {{ 0, 0 }} };
PaintProperty<TranslateAnchorType> translateAnchor { TranslateAnchorType::Map };
PaintProperty<float> width { 1 };
PaintProperty<float> gapWidth { 0 };
PaintProperty<float> blur { 0 };
PaintProperty<float> offset { 0 };
PaintProperty<std::vector<float>, Faded<std::vector<float>>> dasharray { {} };
PaintProperty<std::string, Faded<std::string>> pattern { "" };
// Special case
float dashLineWidth = 1;
bool isVisible() const {
return opacity > 0 && color.value[3] > 0 && width > 0;
}
};
class LineLayer : public StyleLayer {
public:
LineLayer() : StyleLayer(Type::Line) {}
std::unique_ptr<StyleLayer> clone() const override;
void parseLayout(const JSValue&) override;
void parsePaints(const JSValue&) override;
void cascade(const StyleCascadeParameters&) override;
bool recalculate(const StyleCalculationParameters&) override;
std::unique_ptr<Bucket> createBucket(StyleBucketParameters&) const override;
LineLayoutProperties layout;
LinePaintProperties paint;
};
template <>
inline bool StyleLayer::is<LineLayer>() const {
return type == Type::Line;
}
} // namespace mbgl
#endif
<file_sep>/src/mbgl/util/constants.cpp
#include <mbgl/util/constants.hpp>
#include <limits>
namespace mbgl {
namespace util {
const float tileSize = 512.0f;
/*
* The maximum extent of a feature that can be safely stored in the buffer.
* In practice, all features are converted to this extent before being added.
*
* Positions are stored as signed 16bit integers.
* One bit is lost for signedness to support featuers extending past the left edge of the tile.
* One bit is lost because the line vertex buffer packs 1 bit of other data into the int.
* One bit is lost to support features extending past the extent on the right edge of the tile.
* This leaves us with 2^13 = 8192
*/
const int32_t EXTENT = 8192;
const double DEG2RAD = M_PI / 180.0;
const double RAD2DEG = 180.0 / M_PI;
const double M2PI = 2 * M_PI;
const double EARTH_RADIUS_M = 6378137;
const double LATITUDE_MAX = 85.051128779806604;
const double LONGITUDE_MAX = 180;
const double DEGREES_MAX = 360;
const double PITCH_MAX = M_PI / 3;
const double MIN_ZOOM = 0.0;
const double MAX_ZOOM = 25.5;
const uint64_t DEFAULT_MAX_CACHE_SIZE = 50 * 1024 * 1024;
const SystemDuration CLOCK_SKEW_RETRY_TIMEOUT = Seconds(30);
} // namespace util
namespace debug {
#if defined(DEBUG)
const bool tileParseWarnings = false;
const bool styleParseWarnings = false;
const bool spriteWarnings = false;
const bool renderWarnings = false;
const bool renderTree = false;
const bool labelTextMissingWarning = true;
const bool missingFontStackWarning = true;
const bool missingFontFaceWarning = true;
const bool glyphWarning = true;
const bool shapingWarning = true;
#else
const bool tileParseWarnings = false;
const bool styleParseWarnings = false;
const bool spriteWarnings = false;
const bool renderWarnings = false;
const bool renderTree = false;
const bool labelTextMissingWarning = false;
const bool missingFontStackWarning = false;
const bool missingFontFaceWarning = false;
const bool glyphWarning = false;
const bool shapingWarning = false;
#endif
} // namespace debug
} // namespace mbgl
<file_sep>/test/style/functions.cpp
#include <iostream>
#include "../fixtures/util.hpp"
#include <mbgl/style/function.hpp>
#include <mbgl/style/style_calculation_parameters.hpp>
using namespace mbgl;
TEST(Function, Constant) {
EXPECT_EQ(2.0f, mbgl::Function<float>(2).evaluate(StyleCalculationParameters(0)));
EXPECT_EQ(3.8f, mbgl::Function<float>(3.8).evaluate(StyleCalculationParameters(0)));
EXPECT_EQ(22.0f, mbgl::Function<float>(22).evaluate(StyleCalculationParameters(0)));
EXPECT_EQ(2.0f, mbgl::Function<float>(2).evaluate(StyleCalculationParameters(4)));
EXPECT_EQ(3.8f, mbgl::Function<float>(3.8).evaluate(StyleCalculationParameters(4)));
EXPECT_EQ(22.0f, mbgl::Function<float>(22).evaluate(StyleCalculationParameters(4)));
EXPECT_EQ(2.0f, mbgl::Function<float>(2).evaluate(StyleCalculationParameters(22)));
EXPECT_EQ(3.8f, mbgl::Function<float>(3.8).evaluate(StyleCalculationParameters(22)));
EXPECT_EQ(22.0f, mbgl::Function<float>(22).evaluate(StyleCalculationParameters(22)));
}
TEST(Function, Stops) {
// Explicit constant slope in fringe regions.
mbgl::Function<float> slope_1({ { 0, 1.5 }, { 6, 1.5 }, { 8, 3 }, { 22, 3 } }, 1.75);
EXPECT_EQ(1.5, slope_1.evaluate(StyleCalculationParameters(0)));
EXPECT_EQ(1.5, slope_1.evaluate(StyleCalculationParameters(4)));
EXPECT_EQ(1.5, slope_1.evaluate(StyleCalculationParameters(6)));
ASSERT_FLOAT_EQ(2.0454545454545454, slope_1.evaluate(StyleCalculationParameters(7)));
EXPECT_EQ(3.0, slope_1.evaluate(StyleCalculationParameters(8)));
EXPECT_EQ(3.0, slope_1.evaluate(StyleCalculationParameters(9)));
EXPECT_EQ(3.0, slope_1.evaluate(StyleCalculationParameters(15)));
EXPECT_EQ(3.0, slope_1.evaluate(StyleCalculationParameters(22)));
// Test constant values in fringe regions.
mbgl::Function<float> slope_2({ { 6, 1.5 }, { 8, 3 } }, 1.75);
EXPECT_EQ(1.5, slope_2.evaluate(StyleCalculationParameters(0)));
EXPECT_EQ(1.5, slope_2.evaluate(StyleCalculationParameters(4)));
EXPECT_EQ(1.5, slope_2.evaluate(StyleCalculationParameters(6)));
ASSERT_FLOAT_EQ(2.0454545454545454, slope_2.evaluate(StyleCalculationParameters(7)));
EXPECT_EQ(3.0, slope_2.evaluate(StyleCalculationParameters(8)));
EXPECT_EQ(3.0, slope_2.evaluate(StyleCalculationParameters(9)));
EXPECT_EQ(3.0, slope_2.evaluate(StyleCalculationParameters(15)));
EXPECT_EQ(3.0, slope_2.evaluate(StyleCalculationParameters(22)));
// Test no values.
mbgl::Function<float> slope_3({}, 1.75);
EXPECT_EQ(1, slope_3.evaluate(StyleCalculationParameters(2)));
EXPECT_EQ(1, slope_3.evaluate(StyleCalculationParameters(6)));
EXPECT_EQ(1, slope_3.evaluate(StyleCalculationParameters(12)));
// Explicit constant slope in fringe regions.
mbgl::Function<float> slope_4({ { 0, 2 }, { 8, 10 } }, 1);
EXPECT_EQ(2, slope_4.evaluate(StyleCalculationParameters(0)));
EXPECT_EQ(3, slope_4.evaluate(StyleCalculationParameters(1)));
EXPECT_EQ(4, slope_4.evaluate(StyleCalculationParameters(2)));
EXPECT_EQ(4.75, slope_4.evaluate(StyleCalculationParameters(2.75)));
EXPECT_EQ(10, slope_4.evaluate(StyleCalculationParameters(8)));
}
<file_sep>/platform/android/tests/docs/UNIT_TESTS.md
# Unit tests
Our Unit tests are based on JUnit and are located under `/src/test/java/`.
We are using plain JUnit to test classes that aren't calling the Android API,
or are using Android's JUnit extensions to stub/mock Android components.
## Running Unit tests locally
To run Unit tests locally you switch to the Unit Tests build variant, then right click the corresponding test class or method and select "Run ...".
You can also have a run configuration:
* Click on Run -> Edit Configurations...
* Click on "Junit Tests"
* Give a name to the configuration, e.g. `JUnit tests`
* As "Test Kind", choose "All in directory"
* As folder, choose the following folder: `mapbox-gl-native/platforms/android/java/MapboxGLAndroidSDKTestApp/src/test/java`
* Click OK to save the new configuration
You can also run the tests from the command line with:
```
$ ./gradlew test --continue -p MapboxGLAndroidSDKTestApp
```
### Code Coverage
Showing code coverage directly in the IDE.
- Switch your Build Variant to the Unit Tests artifact
- Right click a unit test and select `Run test with coverage`
- Select `Add to active suites` //this will create a run configuration
- Edit the run configuration to include/exclude packages in the `Code coverage`-tab.
## Running Unit tests on CI
The Unit tests are executed as part of the build process on our CI and are
automatically run for each new commit pushed to this repo. If a Unit tests
fails, this will fail and stop the build.
You can find this gradle command in our [buildscript](https://github.com/mapbox/mapbox-gl-native/blob/master/platform/android/bitrise.yml#L48):
```
$ ./gradlew testReleaseUnitTest --continue
```
<file_sep>/src/mbgl/annotation/point_annotation_impl.hpp
#ifndef MBGL_POINT_ANNOTATION_IMPL
#define MBGL_POINT_ANNOTATION_IMPL
#include <mbgl/annotation/annotation.hpp>
#include <mbgl/annotation/point_annotation.hpp>
#include <mbgl/util/geo.hpp>
#include <string>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-function"
#pragma GCC diagnostic ignored "-Wunused-parameter"
#pragma GCC diagnostic ignored "-Wunused-variable"
#pragma GCC diagnostic ignored "-Wshadow"
#ifdef __clang__
#pragma GCC diagnostic ignored "-Wunknown-pragmas"
#endif
#pragma GCC diagnostic ignored "-Wpragmas"
#pragma GCC diagnostic ignored "-Wdeprecated-register"
#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
#pragma GCC diagnostic ignored "-Wunused-local-typedefs"
#pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
#include <boost/geometry.hpp>
#include <boost/geometry/geometries/point.hpp>
#include <boost/geometry/geometries/box.hpp>
#include <boost/geometry/geometries/register/point.hpp>
#include <boost/geometry/geometries/register/box.hpp>
#include <boost/geometry/index/rtree.hpp>
#pragma GCC diagnostic pop
// Make Boost Geometry aware of our LatLng type
BOOST_GEOMETRY_REGISTER_POINT_2D(mbgl::LatLng, double, boost::geometry::cs::cartesian, longitude, latitude)
BOOST_GEOMETRY_REGISTER_BOX(mbgl::LatLngBounds, mbgl::LatLng, southwest(), northeast())
namespace mbgl {
class AnnotationTileLayer;
class PointAnnotationImpl {
public:
using Map = std::map<AnnotationID, std::shared_ptr<PointAnnotationImpl>>;
using Tree = boost::geometry::index::rtree<std::shared_ptr<const PointAnnotationImpl>, boost::geometry::index::rstar<16, 4>>;
PointAnnotationImpl(const AnnotationID, const PointAnnotation&);
void updateLayer(const TileID&, AnnotationTileLayer&) const;
const AnnotationID id;
const PointAnnotation point;
};
} // namespace mbgl
// Tell Boost Geometry how to access a std::shared_ptr<mbgl::PointAnnotation> object.
namespace boost {
namespace geometry {
namespace index {
template <>
struct indexable<std::shared_ptr<const mbgl::PointAnnotationImpl>> {
using result_type = const mbgl::LatLng&;
inline const mbgl::LatLng& operator()(const std::shared_ptr<const mbgl::PointAnnotationImpl>& v) const {
return v->point.position;
}
};
} // end namespace index
} // end namespace geometry
} // end namespace boost
#endif
<file_sep>/src/mbgl/tile/geometry_tile.hpp
#ifndef MBGL_MAP_GEOMETRY_TILE
#define MBGL_MAP_GEOMETRY_TILE
#include <mapbox/variant.hpp>
#include <mbgl/style/value.hpp>
#include <mbgl/util/chrono.hpp>
#include <mbgl/util/ptr.hpp>
#include <mbgl/util/vec.hpp>
#include <mbgl/util/noncopyable.hpp>
#include <mbgl/util/optional.hpp>
#include <cstdint>
#include <string>
#include <vector>
#include <functional>
namespace mbgl {
enum class FeatureType : uint8_t {
Unknown = 0,
Point = 1,
LineString = 2,
Polygon = 3
};
// Normalized vector tile coordinates.
// Each geometry coordinate represents a point in a bidimensional space,
// varying from -V...0...+V, where V is the maximum extent applicable.
using GeometryCoordinate = vec2<int16_t>;
using GeometryCoordinates = std::vector<GeometryCoordinate>;
using GeometryCollection = std::vector<GeometryCoordinates>;
class GeometryTileFeature : private util::noncopyable {
public:
static const uint32_t defaultExtent = 4096;
virtual ~GeometryTileFeature() = default;
virtual FeatureType getType() const = 0;
virtual optional<Value> getValue(const std::string& key) const = 0;
virtual GeometryCollection getGeometries() const = 0;
virtual uint32_t getExtent() const { return defaultExtent; }
};
class GeometryTileLayer : private util::noncopyable {
public:
virtual ~GeometryTileLayer() = default;
virtual std::size_t featureCount() const = 0;
virtual util::ptr<const GeometryTileFeature> getFeature(std::size_t) const = 0;
};
class GeometryTile : private util::noncopyable {
public:
virtual ~GeometryTile() = default;
virtual util::ptr<GeometryTileLayer> getLayer(const std::string&) const = 0;
};
class FileRequest;
class GeometryTileMonitor : private util::noncopyable {
public:
virtual ~GeometryTileMonitor() = default;
using Callback = std::function<void (std::exception_ptr,
std::unique_ptr<GeometryTile>,
optional<SystemTimePoint> modified,
optional<SystemTimePoint> expires)>;
/*
* Monitor the tile held by this object for changes. When the tile is loaded for the first time,
* or updates, the callback is executed. If an error occurs, the first parameter will be set.
* Otherwise it will be null. If there is no data for the requested tile, the second parameter
* will be null.
*
* To cease monitoring, release the returned Request.
*/
virtual std::unique_ptr<FileRequest> monitorTile(const Callback&) = 0;
};
class GeometryTileFeatureExtractor {
public:
GeometryTileFeatureExtractor(const GeometryTileFeature& feature_)
: feature(feature_) {}
optional<Value> getValue(const std::string& key) const;
private:
const GeometryTileFeature& feature;
};
} // namespace mbgl
#endif
<file_sep>/src/mbgl/text/placement_config.hpp
#ifndef MBGL_TEXT_PLACEMENT_CONFIG
#define MBGL_TEXT_PLACEMENT_CONFIG
namespace mbgl {
class PlacementConfig {
public:
inline PlacementConfig(float angle_ = 0, float pitch_ = 0, bool debug_ = false)
: angle(angle_), pitch(pitch_), debug(debug_) {
}
inline bool operator==(const PlacementConfig& rhs) const {
return angle == rhs.angle && pitch == rhs.pitch && debug == rhs.debug;
}
inline bool operator!=(const PlacementConfig& rhs) const {
return !operator==(rhs);
}
public:
float angle;
float pitch;
bool debug;
};
} // namespace mbgl
#endif
<file_sep>/platform/linux/scripts/coveralls.sh
#!/usr/bin/env bash
set -e
set -o pipefail
source ./platform/linux/scripts/setup.sh
################################################################################
# Coveralls
################################################################################
mapbox_time "make_coveralls" \
make coveralls -j${JOBS}
<file_sep>/test/fixtures/util.hpp
#ifndef MBGL_TEST_UTIL
#define MBGL_TEST_UTIL
#include <mbgl/util/image.hpp>
#include <mbgl/util/chrono.hpp>
#include <cstdint>
#include <gtest/gtest.h>
#define SCOPED_TEST(name) \
static class name { \
bool completed = false; \
public: \
void finish() { EXPECT_FALSE(completed) << #name " was already completed."; completed = true; } \
~name() { if (!completed) ADD_FAILURE() << #name " didn't complete."; } \
} name;
namespace mbgl {
class Map;
namespace test {
class Server {
public:
Server(const char* executable);
~Server();
private:
int fd = -1;
};
uint64_t crc64(const char*, size_t);
uint64_t crc64(const std::string&);
uint64_t crc64(const PremultipliedImage&);
PremultipliedImage render(Map&);
void checkImage(const std::string& base,
const PremultipliedImage& actual,
double imageThreshold = 0,
double pixelThreshold = 0);
}
}
#endif
<file_sep>/platform/android/INSTALL_LINUX.md
# Developing for Android on Linux
Install a build dependencies:
apt-get install -y make git build-essential automake \
libtool make cmake pkg-config lib32stdc++6 lib32z1
Install [Oracle JDK 7 (requires license agreement)](http://www.oracle.com/technetwork/java/javase/downloads/jdk7-downloads-1880260.html)
export JAVA_HOME="/dir/to/jdk1.7.0_71"
Install the Android SDK. We recommend doing this by way of [Android Studio](https://developer.android.com/sdk/installing/studio.html).
export ANDROID_HOME="/dir/to/android-sdk-linux"
In the Android SDK Manager also select and install "Android Support Repository" and "Android Support Library" from "Extras":

## Setting Mapbox Access Token
_The demo applications use Mapbox vector tiles, which require a Mapbox account and API access token. Obtain an access token on the [Mapbox account page](https://www.mapbox.com/studio/account/tokens/)._
gradle will take the value of the `MAPBOX_ACCESS_TOKEN` environ variable and save it to `"MapboxGLAndroidSDKTestApp/src/main/res/values/developer-config.xml` where the app will read it from. Otherwise, you can edit `developer-config.xml` and add the value manually as `mapbox_access_token`.
## Building
Run:
make android
You can then open `android` in Android Studio via "Import project (Eclipse ADT, Gradle, etc.)".
**Next: get your app [running on a hardware Android Device](docs/ANDROID_DEVICE.md) or [simulator](docs/ANDROID_SIMULATOR.md)**
<file_sep>/src/mbgl/annotation/shape_annotation_impl.hpp
#ifndef MBGL_SHAPE_ANNOTATION_IMPL
#define MBGL_SHAPE_ANNOTATION_IMPL
#include <mapbox/geojsonvt.hpp>
#include <mbgl/annotation/annotation.hpp>
#include <mbgl/annotation/shape_annotation.hpp>
#include <mbgl/util/geo.hpp>
#include <memory>
#include <string>
#include <map>
namespace mbgl {
class Style;
class AnnotationTile;
class ShapeAnnotationImpl {
public:
using Map = std::map<AnnotationID, std::unique_ptr<ShapeAnnotationImpl>>;
ShapeAnnotationImpl(const AnnotationID, const ShapeAnnotation&, const uint8_t maxZoom);
void updateStyle(Style&);
void updateTile(const TileID&, AnnotationTile&);
const AnnotationID id;
const std::string layerID;
const ShapeAnnotation shape;
private:
const uint8_t maxZoom;
mapbox::geojsonvt::ProjectedFeatureType type;
std::unique_ptr<mapbox::geojsonvt::GeoJSONVT> shapeTiler;
};
} // namespace mbgl
#endif
<file_sep>/src/mbgl/style/style.hpp
#ifndef MBGL_STYLE_STYLE
#define MBGL_STYLE_STYLE
#include <mbgl/style/zoom_history.hpp>
#include <mbgl/source/source.hpp>
#include <mbgl/text/glyph_store.hpp>
#include <mbgl/sprite/sprite_store.hpp>
#include <mbgl/util/noncopyable.hpp>
#include <mbgl/util/chrono.hpp>
#include <mbgl/util/worker.hpp>
#include <mbgl/util/optional.hpp>
#include <cstdint>
#include <string>
#include <vector>
namespace mbgl {
class MapData;
class FileSource;
class GlyphAtlas;
class GlyphStore;
class SpriteStore;
class SpriteAtlas;
class LineAtlas;
class StyleLayer;
class TransformState;
class Tile;
class Bucket;
namespace gl { class TexturePool; }
struct RenderItem {
inline RenderItem(const StyleLayer& layer_,
const Tile* tile_ = nullptr,
Bucket* bucket_ = nullptr)
: tile(tile_), bucket(bucket_), layer(layer_) {
}
const Tile* const tile;
Bucket* const bucket;
const StyleLayer& layer;
};
struct RenderData {
Color backgroundColor = {{ 0, 0, 0, 0 }};
std::set<Source*> sources;
std::vector<RenderItem> order;
};
class Style : public GlyphStore::Observer,
public SpriteStore::Observer,
public Source::Observer,
public util::noncopyable {
public:
Style(MapData&, FileSource&);
~Style();
class Observer : public GlyphStore::Observer,
public SpriteStore::Observer,
public Source::Observer {
public:
/**
* In addition to the individual glyph, sprite, and source events, the
* following "rollup" events are provided for convenience. They are
* strictly additive; e.g. when a source is loaded, both `onSourceLoaded`
* and `onResourceLoaded` will be called.
*/
virtual void onResourceLoaded() {};
virtual void onResourceError(std::exception_ptr) {};
};
void setJSON(const std::string& data, const std::string& base);
void setObserver(Observer*);
bool isLoaded() const;
// Fetch the tiles needed by the current viewport and emit a signal when
// a tile is ready so observers can render the tile.
void update(const TransformState&, gl::TexturePool&);
void cascade();
void recalculate(float z);
bool hasTransitions() const;
std::exception_ptr getLastError() const {
return lastError;
}
Source* getSource(const std::string& id) const;
void addSource(std::unique_ptr<Source>);
std::vector<std::unique_ptr<StyleLayer>> getLayers() const;
StyleLayer* getLayer(const std::string& id) const;
void addLayer(std::unique_ptr<StyleLayer>,
optional<std::string> beforeLayerID = {});
void removeLayer(const std::string& layerID);
RenderData getRenderData() const;
void setSourceTileCacheSize(size_t);
void onLowMemory();
void dumpDebugLogs() const;
MapData& data;
FileSource& fileSource;
std::unique_ptr<GlyphStore> glyphStore;
std::unique_ptr<GlyphAtlas> glyphAtlas;
std::unique_ptr<SpriteStore> spriteStore;
std::unique_ptr<SpriteAtlas> spriteAtlas;
std::unique_ptr<LineAtlas> lineAtlas;
private:
std::vector<std::unique_ptr<Source>> sources;
std::vector<std::unique_ptr<StyleLayer>> layers;
std::vector<std::unique_ptr<StyleLayer>>::const_iterator findLayer(const std::string& layerID) const;
// GlyphStore::Observer implementation.
void onGlyphsLoaded(const std::string& fontStack, const GlyphRange&) override;
void onGlyphsError(const std::string& fontStack, const GlyphRange&, std::exception_ptr) override;
// SpriteStore::Observer implementation.
void onSpriteLoaded() override;
void onSpriteError(std::exception_ptr) override;
// Source::Observer implementation.
void onSourceLoaded(Source&) override;
void onSourceError(Source&, std::exception_ptr) override;
void onTileLoaded(Source&, const TileID&, bool isNewTile) override;
void onTileError(Source&, const TileID&, std::exception_ptr) override;
void onPlacementRedone() override;
bool shouldReparsePartialTiles = false;
Observer nullObserver;
Observer* observer = &nullObserver;
std::exception_ptr lastError;
ZoomHistory zoomHistory;
bool hasPendingTransitions = false;
public:
bool loaded = false;
Worker workers;
};
} // namespace mbgl
#endif
| 4243d487748a729bff33f621d06766171a02c02e | [
"Markdown",
"Makefile",
"Dockerfile",
"INI",
"Java",
"Python",
"C++",
"Shell"
] | 154 | C++ | robertg/mapbox-gl-native | eed429552c34197288a187581cb97bf6fe77f06f | a96df4aa12683957378253a58cabbe2a10bfc42b |
refs/heads/master | <repo_name>marichellebisuna/Hooks-featured-products-ProductConsumer<file_sep>/src/App.js
import React from 'react';
import 'bootstrap/dist/css/bootstrap.min.css';
import './App.css';
import FeaturedProduct from './HomePage/FeaturedProduct';
function App() {
return (
<div className="App">
<FeaturedProduct />
</div>
);
}
export default App;
<file_sep>/src/context/context.js
import React, { Component } from 'react';
import { items } from '../data/productData';
const ProductContext = React.createContext();
class ProductProvider extends Component {
state = {
featuredProducts: []
};
componentDidMount() {
this.setProducts(items);
}
//set products
setProducts = (products) => {
let storeProducts = products.map((item) => {
const { id } = item.sys;
// const image = item.fields.image.map((image) => {
// image.fields.file.url;
// });
const image = item.fields.image.fields.file.url;
const product = { id, ...item.fields, image };
return product;
});
//featured products
let featuredProducts = storeProducts.filter((item) => item.featured === true);
this.setState({
featuredProducts
});
};
addToCart = (id) => {
console.log(`add to cart ${id}`);
};
setSingleProduct = (id) => {
console.log(`set single product ${id}`);
};
render() {
return (
<ProductContext.Provider
value={{ ...this.state, addToCart: this.addToCart, setSingleProduct: this.setSingleProduct }}
>
{this.props.children}
</ProductContext.Provider>
);
}
}
const ProductConsumer = ProductContext.Consumer;
export { ProductContext, ProductProvider, ProductConsumer };
<file_sep>/src/HomePage/FeaturedProduct.js
import React from 'react';
import Products from '../components/Products';
import { ProductConsumer } from '../context/context';
export default function FeaturedProduct() {
return (
<section className="py-5">
<div className="container">
<h1>Featured Products</h1>
<div className="row my-5">
<ProductConsumer>
{(value) => {
const { featuredProducts } = value;
return featuredProducts.map((product) => {
return <Products key={product.id} product={product} />;
});
}}
</ProductConsumer>
</div>
</div>
</section>
);
}
| 7f27dd4ced3a1be22ab788f9960662b497548cf9 | [
"JavaScript"
] | 3 | JavaScript | marichellebisuna/Hooks-featured-products-ProductConsumer | dfaf322031b7582d476fc39859e8e29fe654323b | 82765d17416fb4a36f0653369b5408005c8f29a1 |
refs/heads/master | <file_sep>import { Component, OnInit } from '@angular/core';
import { ProjectsService } from '../services/projects.service';
@Component({
selector: 'app-projects',
templateUrl: './projects.component.html',
styleUrls: ['./projects.component.css']
})
export class ProjectsComponent implements OnInit {
projectsArray:any
constructor(private membersService:ProjectsService) { }
ngOnInit() {
this.getProjectsData();
}
getProjectsData(){
this.membersService.getProjects().subscribe(res=>{
this.projectsArray=res;
})
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { PartnersService } from '../services/partners.service';
@Component({
selector: 'app-partners',
templateUrl: './partners.component.html',
styleUrls: ['./partners.component.css']
})
export class PartnersComponent implements OnInit {
PartnersArray:any
constructor(private partnersService:PartnersService) { }
ngOnInit() {
this.getPartnersData();
}
getPartnersData(){
this.partnersService.getPartners().subscribe(res=>{
this.PartnersArray=res;
})
}
}
<file_sep>import { Injectable } from '@angular/core';
import { HttpClient } from '@angular/common/http';
@Injectable({
providedIn: 'root'
})
export class PartnersService {
constructor(private httpclient:HttpClient) { }
getPartners(){
return this.httpclient.get('http://admin.enactusisetch.tn/api/partners');
}
}
<file_sep>import { Component} from '@angular/core';
import { MembersService } from '../services/members.service';
@Component({
selector: 'app-about',
templateUrl: './about.component.html',
styleUrls: ['./about.component.css']
})
export class AboutComponent {
membersArray:any;
constructor(private membersService:MembersService) {
}
ngOnInit() {
this.getMembersData();
}
getMembersData(){
( this.membersService.getMembers()).subscribe(res=>{
this.membersArray=res;
})
}
verif(ch:string){
return ch == "member";
}
}
| 1ef86493fc13459d41650aed2438ab61de2b3cdb | [
"TypeScript"
] | 4 | TypeScript | Arfaoui-ghaith/EnactusWebsiteFrontEnd | 1e5612d0e859769f62456e0a24d8c96e3443c7f5 | 8ae6e90ead44753db50db7915b7a7e0c63ac4fb9 |
refs/heads/main | <file_sep>def main(:
print('Hello my name is <NAME>')
main() | 786d1404f3b40779766a41f7ded8d735a2bb9583 | [
"Python"
] | 1 | Python | Cmcneil90/fintech | c873e7f11256c436779ce240a516c0e7d66d29b3 | 776d3672a2678f9af9d3e94c0e2429a2e079ed55 |
refs/heads/master | <file_sep>package com.mycompany;
public class FundsNotAvailableException extends Exception {
private static final long serialVersionUID = 1L;
public FundsNotAvailableException(String message) {
super(message);
}
}
<file_sep>DavisbaseJavaExercises
======================
https://www.codeship.io/projects/ed2f6cf0-9659-0131-c1cd-0af10008a5cf/status
<file_sep>import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import org.junit.Ignore;
import org.junit.Test;
public class QueueTest {
@Test
public void testEnqueue() {
MyQueue myQueue = new MyQueue();
String originalItem ="Kevin";
myQueue.enqueue(originalItem);
String returnedItem = myQueue.peek();
assertEquals(originalItem,returnedItem);
}
@Test
@Ignore
public void testEnqueueNull() {
//not implemented
fail("test not implemented");
}
}
//Enqueue
//Dequeue
//IsEmpty
//Peek
<file_sep>package com.davisbase.genepets;
public class PetStore {
}
<file_sep><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.davisbase</groupId>
<artifactId>agileEngineeringCucumber</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>Agile Engineering Cucumber</name>
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
<source>1.6</source>
<target>1.6</target>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-core</artifactId>
<version>1.1.8</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-java</artifactId>
<version>1.1.8</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-spring</artifactId>
<version>1.1.8</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>3.1.0.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>3.2.4.RELEASE</version>
</dependency>
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-html</artifactId>
<version>0.2.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-jvm-deps</artifactId>
<version>1.0.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-junit</artifactId>
<version>1.1.8</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>
<file_sep>package com.davisbase.genepets;
import java.math.BigDecimal;
import junit.framework.Assert;
import cucumber.api.java.Before;
import cucumber.api.java.en.Given;
import cucumber.api.java.en.Then;
import cucumber.api.java.en.When;
public class PricingStepDefinitions {
private PetStore petStore;
private BigDecimal actualPrice;
@Before
public void setUpPetStoreConnection() {
petStore = new PetStore();
}
@Given("^a (.*) costs (.*)$")
public void a_pet_costs_(String petName, BigDecimal price) throws Throwable {
// petStore.addPet(petName, price);
}
@When("^I enter (.*) and search for price$")
public void I_enter_petname_and_search_for_price(String petName) throws Throwable {
// actualPrice = petStore.search(petName);
}
@Then("^the result should be (.*)$")
public void the_result_should_be_(BigDecimal price) throws Throwable {
Assert.assertEquals(price, actualPrice);
}
}
<file_sep>package com.mycompany;
import java.math.BigDecimal;
public class OrderResult {
private int productId;
private int accountId;
private BigDecimal amount;
public int getProductId() {
return productId;
}
public void setProductId(int productId) {
this.productId = productId;
}
public int getAccountId() {
return accountId;
}
public void setAccountId(int accountId) {
this.accountId = accountId;
}
public BigDecimal getAmount() {
return amount;
}
public void setAmount(BigDecimal amount) {
this.amount = amount;
}
}
<file_sep>package primes;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.Arrays;
import org.junit.*;
public class PrimesUnitTest {
private int[] knownPrimes = new int[] { 2, 3, 5, 7, 11, 13, 17, 19, 23, 29 };
@Test
public void Zero() {
int[] primes = Primes.generateArray(0);
assertEquals(0, primes.length);
}
@Test
public void ListZero() {
ArrayList<Integer> primes = Primes.generate(0);
assertEquals(0, primes.size());
}
@Test
public void Single() {
int[] primes = Primes.generateArray(2);
assertEquals(1, primes.length);
assertEquals(2, primes[0]);
}
@Test
public void ListSingle() {
ArrayList<Integer> primes = Primes.generate(2);
assertEquals(1, primes.size());
assertTrue(primes.contains(2));
}
@Test
public void Prime() {
int[] centArray = Primes.generateArray(100);
assertEquals(25, centArray.length);
assertEquals(97, centArray[24]);
}
@Test
public void ListPrime() {
ArrayList<Integer> centList = Primes.generate(100);
assertEquals(25, centList.size());
assertEquals(Integer.valueOf(97), centList.get(24));
}
@Test
public void Basic() {
int[] primes = Primes
.generateArray(knownPrimes[knownPrimes.length - 1]);
assertEquals(knownPrimes.length, primes.length);
int i = 0;
for (int prime : primes) {
assertEquals(knownPrimes[i++], prime);
}
}
@Test
public void ListBasic() {
ArrayList<Integer> primes = Primes
.generate(knownPrimes[knownPrimes.length - 1]);
assertEquals(knownPrimes.length, primes.size());
int i = 0;
for (Integer prime : primes) {
assertEquals(Integer.valueOf(knownPrimes[i++]), prime); // explicit
// boxing
// resolves
// ambiguity
}
}
@Test
public void Lots() {
int bound = 10101;
int[] primes = Primes.generateArray(bound);
for (int i = 0; i < primes.length; i++) {
int prime = primes[i];
assertTrue("is prime", isPrime(prime));
}
for (int i = 0; i < primes.length; i++) {
int prime = primes[i];
if (isPrime(prime))
assertTrue("contains primes", contains(prime, primes));
else
assertFalse("doesn' t contain composites",
contains(prime, primes));
}
}
@Test
public void ListLots() {
int bound = 10101;
ArrayList<Integer> primes = Primes.generate(bound);
for (Integer prime : primes) {
assertTrue("is prime", isPrime(prime));
}
for (Integer prime : primes) {
if (isPrime(prime))
assertTrue("contains primes", primes.contains(prime));
else
assertFalse("doesn' t contain composites",
primes.contains(prime));
}
}
private static boolean isPrime(int n) {
if (n < 2)
return false;
boolean result = true;
double x = Math.sqrt(n);
int i = 2;
while (result && i <= x) {
result = (0 != n % i);
i += 1;
}
return result;
}
private static boolean contains(int value, int[] primes) {
return Arrays.binarySearch(primes, value) != -1;
}
}
| 2b3d029c3c1213bd5aeee5d47b42cbfe78d4051c | [
"Markdown",
"Java",
"Maven POM"
] | 8 | Java | justinelliss/DavisbaseJavaExercises | 1a0b9c23bb12cf03fda25fca5508ab753c027471 | 5ae9fc0e43d2abcd4a3500d2fe215adde0876f65 |
refs/heads/master | <file_sep>//create an empty array on startup
let animeHistory = []
const API_BASE = "https://api.jikan.me/"
const API_ANIME = API_BASE + "anime/"
/**
* generate anime tag from a Javascript Object that containt the anime information
*/
function buildAnimeMarkup(anime) {
return `<div class="anime_item"><img class='anime_image' src=${anime.image_url} />
<h2 class='anime_name'>${anime.title}</h2>
<p class='anime_description'>${anime.premiered}</p></div>`
}<file_sep><?php
$host="localhost";
$user="root";
$password="";
$db = "distress_call";
$con = mysqli_connect($host,$user,$password);
mysqli_select_db($con,$db);
if(isset($_POST['username'])){
$uname=$_POST['username'];
$password=$_POST['password'];
$sql = "Select * from loginForm where user='".$uname."' AND pass='".$password."'
limit 1 ";
$result=mysqli_query($con,$sql);
if(mysqli_num_rows($result)==1){
echo"You have successfully logged in";
exit();
}
else{
echo"You have Entered Incorrect Password";
exit();
}
}
?>
<!DOCTYPE html>
<html lang="en" dir="ltr">
<head>
<meta charset="utf-8">
<title>Form in Design</title>
</head>
<body>
<div class="container">
<form class="" action="#" method="POST">
<div class="form_input">
<input type="text" name="username" placeholder="Enter your username" >
</div>
<div class="form-input">
<input type="<PASSWORD>" name="<PASSWORD>" placeholder="<PASSWORD>" >
</div>
<input type="submit" name="submit" value="LOGIN" class="btn-login">
</form>
</div>
</body>
</html>
<file_sep><?php
include_once('connection.php');
$query ="Select *from distress order by id desc ";
$result = mysqli_query($con,$query);
//?>
<!DOCTYPE html>
<html lang="en" dir="ltr">
<head>
<meta charset="utf-8">
<meta http-equiv="refresh" content="10; url=admin.php">
<title>Fetch Data from Database</title>
<!-- Compiled and minified CSS -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/css/materialize.min.css">
<!-- Compiled and minified JavaScript -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/js/materialize.min.js"></script>
</head>
<body>
<div class="container">
<table class="centered responsive-table">
<thead>
<tr><th colspan="4">Student Record</th></tr>
</thead>
<tr>
<thead>
<th>ID</th>
<th>Latitude</th>
<th>Longitude</th>
<th>Map_URL</th>
</thead>
</tr>
<?php
while($rows=mysqli_fetch_assoc($result)){
?>
<tr>
<td><?php echo $rows['ID']; ?></td>
<td><?php echo $rows['lat']; ?></td>
<td><?php echo $rows['lon']; ?></td>
<?php $url=$rows['map_url'];
echo "<td> <a href='" . $url . "'>" . $url . "</a> </td>" ?>
<?php
}
?>
</table>
</div>
<script>
setTImeout(function(){location.reload();},500)
</script>
</body>
</html>
<file_sep><?php
$host='localhost';
$user='root';
$password='';
$db = 'distress_call';
$con = mysqli_connect($host,$user,$password);
mysqli_select_db($con,$db);
?>
<file_sep><?php
$host='localhost';
$user='root';
$password='';
$db = 'distress_call';
$con = mysqli_connect($host,$user,$password);
mysqli_select_db($con,$db);
if (isset($_POST['submit'])){
//variables for the external values to insert into query
$lat = 2332.343;
$lon = 234.4874;
$map_url = 'http://hope.com';
$sql = "Insert into distress(lat,lon,map_url) values('".$lat."','".$lon."','".$map_url."')";
//entering the data into the database
$result = mysqli_query($con,$sql);
if (mysqli_num_rows($result)==1){
echo"Data inserted Successfully";
exit();
}
else{
echo"Data not inserted";
exit();
}
}
?>
<!DOCTYPE html>
<html lang="en" dir="ltr">
<head>
<meta charset="utf-8">
<title></title>
</head>
<body>
<form class="" action="#" method="POST">
<input type="submit" name="submit" value="submit">
</form>
</body>
</html>
| c7685264f40b7ef680878bd25a5463d7f92c61e8 | [
"JavaScript",
"PHP"
] | 5 | JavaScript | sirnobys/Distress-Call | 98c0e4abb93bc47c17967dc6812b4ca21fad5881 | 000b04711302e3075b40113a03ff619c0cd43716 |
refs/heads/master | <repo_name>davidsteinsland/cpp-concurrency<file_sep>/thread.h
#ifndef THREAD_H
#define THREAD_H
#include <cstring>
namespace concurrency
{
class thread
{
protected:
void* thread_id;
bool is_active;
void (*thread_handler) (thread*);
void* args;
public:
thread(void (*handle) (thread *))
{
thread_handler = handle;
}
thread(void (*handle) (thread *), void* arg)
{
thread_handler = handle;
args = arg;
}
~thread();
void* get_args()
{
return args;
}
void run()
{
if (thread_handler != NULL)
{
thread_handler(this);
}
is_active = false;
}
static void* run_helper(void*a)
{
((thread*)a)->run();
return 0;
}
int start();
void join();
bool active()
{
return is_active;
}
};
};
#endif<file_sep>/mutex.h
#ifndef MUTEX_H
#define MUTEX_H
#ifdef _WIN32
#include <windows.h>
typedef HANDLE mutex_type;
#else
#include <pthread.h>
typedef pthread_mutex_t mutex_type;
#endif
namespace concurrency
{
class mutex
{
private:
mutex_type mutex_var;
public:
mutex();
~mutex();
void lock();
void unlock();
mutex_type& get_mutex();
};
class scoped_lock
{
private:
mutex& mutex_var;
public:
scoped_lock(mutex&);
~scoped_lock();
};
};
#endif<file_sep>/condition_variable.cpp
#include "condition_variable.h"
concurrency::condition_variable::condition_variable()
{
#ifdef _WIN32
cond_var.signal = CreateEvent(NULL, false, false, NULL);
cond_var.broadcast = CreateEvent(NULL, true, false, NULL);
#else
pthread_cond_init (&cond_var,NULL);
#endif
}
concurrency::condition_variable::~condition_variable()
{
#ifdef _WIN32
#else
pthread_cond_destroy(&cond_var);
#endif
}
void concurrency::condition_variable::wait(mutex&m)
{
#ifdef _WIN32
m.unlock();
HANDLE handles[] = {cond_var.signal, cond_var.broadcast};
WaitForMultipleObjects(2, handles, false, INFINITE);
m.lock();
#else
pthread_mutex_t mtx = m.get_mutex();
pthread_cond_wait (&cond_var, &mtx);
#endif
}
void concurrency::condition_variable::notify_one()
{
#ifdef _WIN32
SetEvent(cond_var.signal);
#else
pthread_cond_signal (&cond_var);
#endif
}
void concurrency::condition_variable::notify_all()
{
#ifdef _WIN32
#else
pthread_cond_broadcast (&cond_var);
#endif
}<file_sep>/Makefile
all:
g++ -g -Wall -ansi *.cpp -mthreads -o concurrent.exe
linux:
g++ -g -Wall -ansi *.cpp -pthread -o concurrent<file_sep>/condition_variable.h
#ifndef CONDITION_VARIABLE_H
#define CONDITION_VARIABLE_H
#ifdef _WIN32
#include <windows.h>
typedef struct {HANDLE signal, broadcast;} cond_type;
#else
#include <pthread.h>
typedef pthread_cond_t cond_type;
#endif
#include "mutex.h"
namespace concurrency
{
class condition_variable
{
private:
cond_type cond_var;
public:
condition_variable();
~condition_variable();
void wait(mutex&);
void notify_one();
void notify_all();
};
};
#endif<file_sep>/test.cpp
#include "mutex.h"
#include "condition_variable.h"
#include "thread.h"
#ifdef _WIN32
#include <windows.h>
#define sleep(x) Sleep(x*1000);
#else
#include <unistd.h>
#endif
#include <iostream>
#include <queue>
std::queue<int> produced_nums;
concurrency::mutex m;
concurrency::condition_variable cond_var;
void _producer (concurrency::thread*t)
{
std::cout << "Producer thread active" << std::endl;
for (int i = 0; i < 5; ++i) {
concurrency::scoped_lock lock(m);
std::cout << "producing " << i << '\n';
produced_nums.push(i);
cond_var.notify_one();
}
concurrency::scoped_lock lock(m);
cond_var.notify_one();
}
void _consumer (concurrency::thread*t)
{
int consumer_id = (int) t->get_args();
std::cout << "Consumer thread active" << std::endl;
while (1)
{
{
concurrency::scoped_lock lock(m);
while (produced_nums.empty()) { // loop to avoid spurious wakeups
cond_var.wait(m);
}
std::cout << "#" << consumer_id << " consuming " << produced_nums.front() << '\n';
produced_nums.pop();
}
sleep(1);
}
}
int main()
{
std::cout << "Test started ..." << std::endl;
concurrency::thread producer(_producer);
if (producer.start() == -1)
std::cerr << "Error whilst starting producer thread" << std::endl;
sleep(1);
concurrency::thread consumer(_consumer, (void*)1);
if ( consumer.start() == -1)
std::cerr << "Error whilst starting consumer thread" << std::endl;
concurrency::thread consumer2(_consumer, (void*)2);
if ( consumer2.start() == -1)
std::cerr << "Error whilst starting consumer2 thread" << std::endl;
producer.join();
consumer.join();
consumer2.join();
while (producer.active() || consumer.active() || consumer2.active())
{
sleep(1);
}
}<file_sep>/thread.cpp
#include "thread.h"
#ifdef _WIN32
#include <windows.h>
#include <process.h>
#else
#include <pthread.h>
#endif
concurrency::thread::~thread()
{
}
int concurrency::thread::start()
{
int retval = 0;
#ifdef _WIN32
retval = (long)_beginthreadex(NULL, 0, (unsigned int (__stdcall *)(void *))&thread::run_helper, this, 0, (unsigned int*)&thread_id) == 0 ? -1 : 0;
#else
retval = pthread_create ((pthread_t*)&thread_id, NULL, &thread::run_helper, this);
#endif
is_active = true;
return retval;
}
void concurrency::thread::join()
{
#ifdef _WIN32
WaitForSingleObject( (HANDLE*)&thread_id, INFINITE);
#else
pthread_join ( *(pthread_t*)&thread_id, NULL );
#endif
}<file_sep>/mutex.cpp
#include "mutex.h"
concurrency::mutex::mutex()
{
#ifdef _WIN32
mutex_var = CreateMutex(NULL, false, NULL);
#else
pthread_mutex_init (&mutex_var, NULL);
#endif
}
concurrency::mutex::~mutex()
{
#ifdef _WIN32
CloseHandle(mutex_var);
#else
pthread_mutex_destroy(&mutex_var);
#endif
}
void concurrency::mutex::lock()
{
#ifdef _WIN32
WaitForSingleObject(mutex_var, INFINITE);
#else
pthread_mutex_lock (&mutex_var);
#endif
}
void concurrency::mutex::unlock()
{
#ifdef _WIN32
ReleaseMutex(mutex_var);
#else
pthread_mutex_unlock(&mutex_var);
#endif
}
mutex_type& concurrency::mutex::get_mutex()
{
return mutex_var;
}
concurrency::scoped_lock::scoped_lock (mutex& m) : mutex_var(m)
{
mutex_var.lock();
}
concurrency::scoped_lock::~scoped_lock()
{
mutex_var.unlock();
} | 0da3bacf36208b58666ea8dc41205d9869a7e1ae | [
"Makefile",
"C++"
] | 8 | C++ | davidsteinsland/cpp-concurrency | 8aaac5453e10baf58b1576a4425c69c61cfeaa2e | 6b0e2d41dbfe7a19d5f706b5a7387d4f016b1278 |
refs/heads/master | <repo_name>katzebue/click2home<file_sep>/views/admin.php
<!DOCTYPE html>
<html>
<head>
<meta content="text/html; charset=utf-8" http-equiv="Content-Type" />
<title><?=$page->title?></title>
<link rel="icon" href="/favicon.png" type="image/x-icon"/>
<link rel="shortcut icon" href="/favicon.png" type="image/x-icon"/>
<link href="/bootstrap/css/bootstrap.css" rel="stylesheet">
<link href="/<?=ADMIN_DIR?>/css/style.css" rel="stylesheet">
<link href="/<?=ADMIN_DIR?>/js/jquery-treeview/jquery.treeview.css" rel="stylesheet" type="text/css" />
<link href="/<?=ADMIN_DIR?>/js/jquery-ui/css/smoothness/jquery-ui.min.css" rel="stylesheet" type="text/css" />
<link href="/js/fancybox/jquery.fancybox.css" rel="stylesheet" type="text/css" />
<script type="text/javascript" src="/js/jquery-1.9.1.min.js"></script>
<script type="text/javascript" src="/bootstrap/js/bootstrap.js"></script>
<script type="text/javascript" src="/<?=ADMIN_DIR?>/js/jquery-ui/jquery-ui.min.js"></script>
<script type="text/javascript" src="/<?=ADMIN_DIR?>/js/jquery.cookie.js"></script>
<script type="text/javascript" src="/<?=ADMIN_DIR?>/js/jquery-treeview/jquery.treeview.js"></script>
<script type="text/javascript" src="/<?=ADMIN_DIR?>/ckeditor/ckeditor.js"></script>
<script type="text/javascript" src="/js/fancybox/jquery.fancybox.pack.js"></script>
<script type="text/javascript" src="<?=FileManager::getAssetsPath();?>/plupload.full.js"></script>
<script type="text/javascript" src="<?=FileManager::getAssetsPath();?>/pluploadInitializer.js"></script>
<script type="text/javascript" src="/<?=ADMIN_DIR?>/js/adminCore.js"></script>
<script type="text/javascript" src="/<?=ADMIN_DIR?>/js/script.js"></script>
</head>
<body>
<div class="container">
<div class="navbar">
<div class="navbar-inner">
<div class="container">
<a class="brand" href="/<?=$URL[1];?>"><?=SITE_NAME?></a>
<div class="nav-collapse pull-right">
<ul class="nav">
<li><a href="/<?=$URL[1];?>/logout"><i class="icon-off"></i> Выйти</a></li>
</ul>
</div>
</div>
</div>
</div>
<div class="row">
<div class="span3" style="width: 180px;">
<div class="well sidebar-nav" style="padding: 8px 0">
<ul class="nav nav-list">
<? if($_SESSION['status'] == 'superadmin') :?>
<li class="nav-header">СуперАдминистратор</li>
<? include $page->pathPart('admin.block_admin') ?>
<? endif; ?>
<li class="nav-header">Основные блоки</li>
<? include $page->pathPart('admin.block_main') ?>
</ul>
</div>
</div><!--/span-->
<div class="span9" style="width: 740px;">
<?=$page->content?>
</div><!--/span-->
</div><!--/row-->
<div id="alertsContainer"></div>
<div id="adminModal" data-backdrop="static" class="modal hide" tabindex="-1" role="dialog" aria-labelledby="adminModalLabel" aria-hidden="true">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<h3 id="myModalLabel"></h3>
</div>
<div class="modal-body"></div>
<div class="modal-footer"></div>
</div>
</div>
</body><file_sep>/admin/module/text/index.php
<?
$page->header = 'Управление текстовыми блоками';
$page->title .= $page->header;
?>
<h1><?=$page->header?></h1>
<?
if ($_GET['add']) :
include($URL[2].'/'.$URL[3].'/add.php');
elseif ($_GET['delete']) :
include($URL[2].'/'.$URL[3].'/del.php');
elseif ($_GET['edit']) :
include($URL[2].'/'.$URL[3].'/edit.php');
else :
if($_SESSION['status'] == 'superadmin') :?>
<p><i class="icon-plus"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?add=1">Добавить блок</a></p>
<? endif; ?>
<h3>Текстовые блоки</h3>
<table class="table">
<tr>
<th>Позиция</th>
<th>Часть текста</th>
<th></th>
</tr>
<?//IMPORT ALL PAGE
$sql = "SELECT * FROM `pm_text` ORDER BY `id`";
$data = Helper::fetchAssocInArray($sql);
foreach($data as $el) :?>
<tr>
<td><?=$el['position'];?></td>
<td><?=mb_substr($el['text'],0,200,'UTF-8');?></td>
<td width="36">
<a class="icon-pencil" rel="tooltip" title="Редактировать" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?edit=<?=$el['id'];?>"></a>
<? if($_SESSION['status'] == 'superadmin') :?>
<a class="icon-remove" rel="tooltip" title="Удалить" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?delete=<?=$el['id'];?>" onclick="if(confirm('Удалить? Восстановление будет невозможно!')) return true; else return false;"></a>
<? endif; ?>
</td>
</tr>
<? endforeach; ?>
</table>
<? endif; ?><file_sep>/backend/file.php
<?
$response = new ResponseHelper();
$action = $URL[2];
switch ($action) {
case 'upload' :
$repository = $URL[3];
$fileController = new FileController();
$file = $fileController->uploadFile($repository, $_REQUEST);
$response->fileId = $file['fileId'];
$response->path = $file['path'];
$response->repository = $file['repository'];
$response->file = $file;
if (!empty($file['path'])) {
$response->status = 'ok';
}
$response->request = $_REQUEST;
if ($response->repository =='product') {
$productId = $_REQUEST['product_id'];
if (!empty($productId)) {
$sql = "SELECT `files`, `main_photo` FROM `item` WHERE `id` = :id";
$itemInfo = Helper::fetchAssoc($sql, array(':id' => $productId));
$files = $itemInfo['files'];
$files = empty($files) ? array() : explode(',', $files);
$files[] = $response->fileId;
$sql = Helper::executeStatement(
"UPDATE `item` SET `files` = :files WHERE `id` = :id",
array(':id' => $productId, ':files' => implode(',', $files))
);
if (empty($itemInfo['main_photo'])) {
$sql = Helper::executeStatement(
"UPDATE `item` SET `main_photo` = :main_photo WHERE `id` = :id",
array(':id' => $productId, ':main_photo' => $file['mainPath'])
);
}
}
}
break;
case 'deleteFile' :
$response->request = $_POST;
$repository = $_POST['repository'];
$fileId = $_POST['id'];
$fm = FileManager::getInstance();
$fm->setRepository($repository);
$response->deletedCounter = $fm->deleteFiles($fileId);
if ( !empty($response->deletedCounter) && is_numeric($response->deletedCounter)) {
$response->status = 'ok';
$response->repository = $repository;
$response->fileId = $fileId;
}
if ($repository =='product') {
$productId = $_REQUEST['product_id'];
if (!empty($productId)) {
$sql = "SELECT `files`, `main_photo` FROM `item` WHERE `id` = :id";
$itemInfo = Helper::fetchAssoc($sql, array(':id' => $productId));
$files = $itemInfo['files'];
$files = empty($files) ? array() : explode(',', $files);
$files = array_diff($files, array($fileId));
$sql = Helper::executeStatement(
"UPDATE `item` SET `files` = :files WHERE `id` = :id",
array(':id' => $productId, ':files' => implode(',', $files))
);
$pathInfo = pathinfo($itemInfo['main_photo']);
if (empty($files)) {
$sql = Helper::executeStatement(
"UPDATE `item` SET `main_photo` = '' WHERE `id` = :id",
array(':id' => $productId)
);
} elseif ($pathInfo['filename'] == $fileId) {
$mainPhotoId = reset($files);
$sql = Helper::executeStatement(
"UPDATE `item` SET `main_photo` = :mainPhoto WHERE `id` = :id",
array(
':id' => $productId,
':mainPhoto' => FileManager::getLink($mainPhotoId)
)
);
}
}
}
break;
}
$response->encode();<file_sep>/admin/module/text/edit.php
<?
if($_POST['savestatic']) :
$sql = "UPDATE `pm_text` SET
`text` = :text
WHERE `id`= :id";
$statement = $pdo->prepare($sql);
$statement->execute(array(':id' => $_GET['edit'], ':text' => $_POST['text']));
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3]);
exit;
endif;
$sql = "SELECT * FROM `pm_text` WHERE `id` = :id";
$r = Helper::fetchAssocInArray($sql, array(':id' => $_GET['edit']));
$r = reset($r);?>
<p><i class="icon-arrow-left"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Вернуться к списку блоков</a></p>
<h3>Редактирование блока</h3>
<form class="form-horizontal" action="" method="post">
<div class="control-group">
<label class="control-label">Позиция</label>
<div class="controls"><span class="input-xxlarge uneditable-input"><?=$r['position'];?></span></div>
</div>
<div class="control-group">
<label class="control-label">Текст</label>
<div class="controls"><textarea name="text" rows="5" class="input-xxlarge"><?=stripslashes($r['text']);?></textarea></div>
</div>
<div class="form-actions">
<button class="btn btn-primary" type="submit" name="savestatic" value="save">Сохранить изменения</button>
<a class="btn" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Отмена</a>
</div>
</form><file_sep>/admin/module/catalog/index.php
<?php
//SETUP
Admin::mysqlQuerySQLFile(dirname(__FILE__) . DIRECTORY_SEPARATOR . 'sql.php');
include_once 'functions.php';
$page->header = "Каталог";
// таблица и галерея
$db_table = 'catalog';
$max_level = 4;
?><h2><?=$page->header?></h2><?
if (isset($_GET['add']) || isset($_GET['edit']))
include ('addedit.php');
elseif (isset($_GET['items']))
include ('items.php');
elseif (isset($_GET['del'])) {
delete_from_db ($_GET['del']);
header ('Location: ' . Admin::thisModule());
} else {
$page->title .= $page->header;
// если пришёл запрос на изменение порядка следования сотрудников
if (isset ($_GET['up']) || isset ($_GET['down'])) {
// узнаём старый sort
$sql = 'SELECT `id`, `sort`, `pid` FROM `' . $db_table . '` WHERE `id` = :id';
list ($id, $sort, $pid) = Helper::fetchAssoc($sql, array(':id' => isset($_GET['up']) ? $_GET['up'] : $_GET['down']));
// узнаём границы sort
$sql = 'SELECT MIN(sort), MAX(sort) FROM `' . $db_table . '` WHERE `pid` = :pid';
list ($min_sort, $max_sort) = Helper::fetchAssoc($sql, array(':pid' => (int)$pid));
// узнаём новый sort
if (isset($_GET['up']))
$new_sort = $sort - 1;
else
$new_sort = $sort + 1;
// если он не выходит за границы, сохраняем новый и меняем sort соседнего элемента
if ($min_sort <= $new_sort && $new_sort <= $max_sort) {
$sql = 'UPDATE `' . $db_table . '` ' . Helper::createSetString(array (
array ('sort', $sort, 'int')
)) . ' WHERE `sort` = :sort AND `pid` = :pid';
Helper::executeStatement($sql, array(':sort' => $new_sort, ':pid' => (int)$pid));
$sql = 'UPDATE `' . $db_table . '` ' . Helper::createSetString(array (
array ('sort', $new_sort, 'int')
)) . ' WHERE `id` = :id';
Helper::executeStatement($sql, array(':id' => $id));
header ('Location: ' . Admin::thisModule());
}
}
?>
<script type="text/javascript">
$(document).ready(function() {
$('.catselect').treeview({
collapsed: 'true',
animated: 'fast',
persist: 'cookie'
});
});
</script>
<?
$tree = Admin::readTree();
print_tree (0, 0);
?>
<!-- Пример
<span class="folder">name1</span>
<ul class="catselect">
<li>
<span class="folder">name2</span>
<ul class="catselect">
<li>
<span class="folder">name3</span>
<ul class="catselect">
<li>
<span class="folder">name4</span>
<ul class="catselect">
<li>
<span class="folder">name5</span>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
</ul>
-->
<?php
}
?><file_sep>/admin/module/catalog/addedit.php
<?php
$back_url = Admin::thisModule();
if (isset($_GET['add']))
$page->header = 'Добавить каталог';
else
$page->header = 'Редактировать каталог';
$page->title .= $page->header;
if (isset($_GET['add']) || isset($_GET['edit'])) {
if (isset($_GET['edit']))
$item = Helper::selectAllForId($db_table, $_GET['edit']);
else
$item = array ();
// тут указываем редактируемые поля
$array_data = array (
array ('URL', 'url', $item['url'], 'text'),
array ('Название', 'name', $item['name'], 'text'),
array ('Фильтр', 'filter', $item['filter'],'text'),
array ('Текст', 'text', $item['text'], 'ckeditor'),
array ('Metakey', 'metakey', $item['metakey'],'text'),
array ('Metadesc', 'metadesc', $item['metadesc'],'text'),
);
Admin::createPMTableAddEdit($array_data, $item['id']);
}
// кнопка Отмена
if (isset($_POST['cancel'])) {
header ('Location: ' . $back_url);
}
// кнопка Добавить/Сохранить
if (isset($_POST['addedit'])) {
$sql = 'SELECT MAX(sort) FROM `' . $db_table . '` WHERE `pid` = :pid';
list ($sort) = Helper::fetchAssoc($sql, array(':pid' => (int)$_GET['add']));
++ $sort;
if ($_POST['id'])
$sql = 'UPDATE';
else
$sql = 'INSERT INTO';
$sql .= ' `' . $db_table . '` ';
$arr = array (
array ('url', Admin::safetyURLToCatalog($_POST['url']), 'text'),
array ('name', 'text'),
array ('filter', 'text'),
array ('text', 'text'),
array ('metakey', 'text'),
array ('metadesc', 'text'),
);
if (! $_POST['id']) {
$arr[] = array ('pid', $_GET['add'], 'int');
$arr[] = array ('sort', $sort, 'int');
}
$sql .= Helper::createSetString($arr);
if ($_POST['id'])
$sql .= ' WHERE `id`=' . $_POST['id'];
Helper::executeStatement($sql);
if ($_POST['id'])
$id = $_POST['id'];
else
$id = $pdo->lastInsertId();
$back_url = '/admin/module/catalog/?items&catalog_id='.$id;
header ('Location: ' . $back_url);
}
?>
<h3>Фильтры</h3>
<p>Правила написания фильтров:</p>
<ul>
<li>Каждое правильно в квадратных скобках</li>
<li>Между скобками может стоять произольное количество символов(комментариев)</li>
<li>При обращении по ID, PRICE, NAME, ARTICLE, RATE, TEMPLATE_ID префикс запроса @=</li>
<li>При обращении по параметрам из шаблона префикс не требуется</li>
<li>Для запроса по параметрам шаблона: переменные в опострофах, значения в одинарных кавычках</li>
</ul>
<p>Примеры использования</p>
<ul>
<li>[@=`id` IN (49,50,51)]</li>
<li>[@=`price` > '1000'][@=`rate`>='4']</li>
<li>[`sex` = 'М'][`age` = 'от 0 до 3х лет']</li>
<li>[@=`price` < 1000]цена меньше 1000 и [`sex` = 'Ж'] пол женский</li>
</ul><file_sep>/admin/module/param/index.php
<?php
/**
* OTO v2.1
* (One Table, Once) - все данные в одной таблице и описываются в модуле один раз (в sql.php)
*
* Модуль для системы Мандарин v1.1
* Работа с одной таблицей базы данных, большинство полей можно описать один раз в sql.php
* Модуль требует файлов из /algo/
* Автор: <NAME>
* Дата создания: 23.10.2011
* Вопросы писать на: <EMAIL>
*
*/
include ('functions.php');
// таблица
include ('sql.php');
?><h2>Управление параметрами</h2><br /><?php
if (isset($_GET['add']) || isset($_GET['edit'])) {
include ('addedit.php');
} elseif (isset($_GET['del'])) {
delete_from_db ($_GET['del']);
header ('Location: ' . Admin::thisModule());
} else {
$page->title .= 'Управление параметрами';
// создаём таблицу просмотра элементов
Admin::createPMTableEditDeleteForSQL(
'SELECT * FROM `' . $db_table . '` ORDER BY `desc`',
'?add', 'Добавить параметр',
array ('', '', '', '40'),
array ('Ключ', 'Способ редактирования', 'Описание', ''),
array ('name', 'fieldtype', 'desc', 'id'),
print_cell_table
);
}
?><file_sep>/classes/admin.php
<?
class Admin {
// Картинка-ссылка для функции админки
public static function printImageMenu ($href, $type, $title='') {
switch ($type) {
case 'pass':
?><a rel="tooltip" href="<?=$href?>" title="Сменить пароль"><?
self::printImage ('16', '', '/admin/img/ico_pass.png', 'Сменить пароль');
?></a><?
break;
case 'photos':
?><a rel="tooltip" href="<?=$href?>" title="Фотогалерея"><?
self::printImage ('', '', '/admin/img/ico_photos.png', 'Фотогалерея');
?></a><?
break;
case 'back':
?><a rel="tooltip" href="<?=$href?>" title="Вернуться"><?
self::printImage ('', '', '/admin/img/ico_back.png', 'Вернуться');
?></a><?
break;
case 'minus':
?><a rel="tooltip" href="<?=$href?>" title="Назад"><?
self::printImage ('', '', '/admin/img/ico_arrow_left.png', 'Назад');
?></a><?
break;
case 'plus':
?><a rel="tooltip" href="<?=$href?>" title="Вперёд"><?
self::printImage ('', '', '/admin/img/ico_arrow_right.png', 'Вперёд');
?></a><?
break;
case 'up':
?><a href="<?=$href?>" title="Выше" rel="tooltip" class="icon icon-arrow-up"></a><?
break;
case 'down':
?><a href="<?=$href?>" title="Ниже" class="icon icon-arrow-down" rel="tooltip"></a><?
break;
case 'show':
?><a rel="tooltip" href="<?=$href?>" title="Показать"><?
self::printImage ('', '', '/admin/img/ico_mapping.png', 'Показать');
?></a><?
break;
case 'add':
?><a href="<?=$href?>" class="icon icon-plus" rel="tooltip" title="Добавить"></a><?
break;
case 'edit':
?><a href="<?=$href?>" rel="tooltip" title="Редактировать" class="icon icon-pencil"></a><?
break;
case 'del':
?><a href="<?=$href?>" class="icon icon-trash" rel="tooltip" title="Удалить"
onclick="if(confirm('Удалить? Восстановление будет невозможно!')) return true; else return false;" ></a><?
break;
case 'clone':
?><a href="<?=$href?>" class="icon icon-plus" title="Клонировать" rel="tooltip"></a><?php
break;
default:
break;
}
}
// <img src width height alt title />
public static function printImage ($width, $height, $src, $alt_title) { ?>
<img
src="<?=$src?>"
<?=($width ? 'width="' . $width . '"' : '')?>
<?=($height ? 'height="' . $height . '"' : '')?>
alt="<?=$alt_title?>"
/><?
}
// Вернуть адрес текущего модуля (относительно /)
public static function thisModule () {
global $URL;
return '/' . $URL[1] . '/' . $URL[2] . '/' . $URL[3] . '/';
}
// меняем цифровые индексы на строчные
public static function reindexSimpleItemBase ($new_index) {
global $simple_item_base;
$new_simple_item_base = array ();
if ($simple_item_base)
foreach ($simple_item_base as $i => $ib)
if ($ib)
foreach ($ib as $param => $value)
$new_simple_item_base[$i][$new_index[$param]] = $value;
$simple_item_base = $new_simple_item_base;
}
// Показать содержимое ответа на запрос (использовать на главной в модуле админки)
public static function createPMTableEditDeleteForSQL (
$sql,
$href_add, $text_add,
$list_width, $list_header, $list_colname,
$func_print_cell,
$search = false
) {
if ($href_add) {
//Если нужна форма поиска
if ($search) {
//Модифицируем запрос
if ($_GET['param'] && isset($_GET['value'])) {
if ($_GET['param'] == 'price')
$where = " WHERE `".$_GET['param']."` LIKE '".$_GET['value']."' ";
else
$where = " WHERE `".$_GET['param']."` LIKE '%".$_GET['value']."%' ";
$sql = str_replace('ORDER',$where.' ORDER', $sql);
echo $sql;
} ?>
<form action="" method="get">
Поиск
<select name="param">
<? for($i=0; $i<count($list_header)-1; $i++) { ?>
<option value="<?=$list_colname[$i];?>" <?if($_GET['param']==$list_colname[$i]) {?>selected<?}?>><?=$list_header[$i];?></option>
<? } ?>
</select>
<input type="text" name="value" value="<?=$_GET['value'];?>" />
<input type="submit" class="btn btn-primary" />
</form><br/>
<? } ?>
<a href="<?=$href_add?>">
<span class="icon icon-plus"></span>
<?=$text_add?>
</a>
<? } ?>
<br />
<? $res = Helper::fetchAssocInArray($sql); ?>
<table class="table table-hover table-condensed table-bordered">
<tbody>
<tr>
<?
$m = count ($list_width);
for ($i = 0; $i < $m; ++ $i) {
?><th <?=($list_width[$i] ? 'width="' . ((int)$list_width[$i]) . '"' : '')?>><?=$list_header[$i]?></th><?php
}
?>
</tr>
<?
$num_rows = 0;
if (count($res)) {
foreach ($res as $row) { ?>
<tr>
<? for ($i = 0; $i < $m; ++ $i) { ?>
<td><? $func_print_cell ($i, $list_colname[$i], $row[$list_colname[$i]]); ?></td>
<? } ?>
</tr>
<? }
} ?>
</tbody>
</table><?
}
// Таблица редактирования
public static function createPMTableAddEdit($array_data, $id, $photo = false) {
global $URL; ?>
<style>
input.datepicker{
text-align: center;
}
</style>
<form action="" method="post" enctype="multipart/form-data">
<table>
<? if ($array_data)
foreach ($array_data as $data) { ?>
<tr>
<td width="150px">
<? if ($data[3] == 'image' && file_exists ($_SERVER['DOCUMENT_ROOT'] . $data[2])) { ?>
<div>
<img width="150" src="<?=$data[2];?>" />
<input type="hidden" name="img_name" value="<?=$data[1];?>" />
<button style="cursor: pointer;" name="rm_img"><img src="/admin/img/ico_delete.png" /> Удалить</button>
</div>
<? } else {
echo $data[0];
} ?>
</td>
<td>
<? switch ($data[3]) {
case 'func':
$data[4] ($data[2]);
break;
case 'date':
$value = $data[2];
if (! Helper::isDate ($value))
$value = '';//date ('Y-m-d');
else
$value = Helper::toNormDate ($value);
?>
<input class="datepicker" size="10" name="<?=$data[1]?>" value="<?=$value;?>" />
<?php
break;
case 'check':
?>
<input type="checkbox" name="<?=$data[1]?>" <?=($data[2] ? ' checked ' : '')?> />
<?php
break;
case 'select':
?>
<select name="<?=$data[1];?>" class="input-xxlarge">
<?php
if ($data[4])
foreach ($data[4] as $value => $name)
self::printHTMLOption($value, $name, $value == $data[2]);
?>
</select>
<?php
break;
case 'image': ?>
<input type="file" class="long" name="<?=$data[1];?>" value="<?=$data[2];?>" />
<?php
break;
case 'file': ?>
<input type="file" class="long" name="<?=$data[1];?>" />
<?php
break;
case 'textarea': ?>
<textarea name="<?=$data[1]?>"><?=$data[2]?></textarea>
<?php
break;
case 'ckeditor':
?>
<textarea name="<?=$data[1]?>" class="ckeditor"><?=stripslashes($data[2])?></textarea>
<?
break;
// case 'text':
default: ?>
<input type="text" name="<?=$data[1]?>" class="input-xxlarge" value="<?=htmlspecialchars ($data[2])?>" />
<?php
break;
}
?>
</td>
</tr>
<? } ?>
<? //IMGS
/*
if ($photo) {
for($i=0;$i<4;$i++) { ?>
<tr>
<td>Изображение <?=($i+1);?></td>
<td><input type="file" name="img[<?=$i;?>]" />
<?
$file = '/upload/'.($_GET['edit']%10).'/'.($_GET['edit']).'/'.$_GET['edit'].'_'.$i.'.jpg';
if(file_exists($_SERVER['DOCUMENT_ROOT'].$file)) {
?><img style="width: 100px;" src="<?=$file;?>"/><?
} ?>
</td>
</tr>
<? }
} */?>
<tr>
<td>
<input type="hidden" name="id" value="<?=(int)$id?>" />
<input type="submit" class="btn btn-primary" name="addedit" value="<?=(isset($_GET['add']) ? 'Добавить' : 'Сохранить')?>" />
</td>
<td>
<input type="submit" class="btn" name="cancel" value="Отмена" />
</td>
</tr>
</table>
</form><?
}
// <option value selected disabled > inner </option>
function printHTMLOption ($value, $string, $selected=false, $disabled=false) { ?>
<option value="<?=htmlspecialchars($value)?>"
<?=($selected ? 'selected' : '')?>
<?=($disabled ? 'disabled' : '')?>
><?=htmlspecialchars($string)?></option><?
}
// Выполнить запросы из php-файла (адрес относительно /)
public static function mysqlQuerySQLFile ($file) {
if ( !file_exists($file) )
return;
$array_sql = explode (';', trim (file_get_contents ($file), '<?>'));
if (count ($array_sql)) {
foreach ($array_sql as $sql) {
Helper::executeStatement($sql);
}}
}
// чтение дерева каталогов из базы
public static function readTree () {
$sql = 'SELECT `id`, `name`, `pid`, `url` FROM `catalog` ORDER BY `sort`';
$catalogs = Helper::fetchAssocInArray($sql);
if ($catalogs) {
foreach ($catalogs as $catalog) {
$tree [ $catalog['pid'] ] ['child'][] = $catalog['id'];
$tree [ $catalog['id'] ]['id'] = $catalog['id'];
$tree [ $catalog['id'] ]['name'] = $catalog['name'];
$tree [ $catalog['id'] ]['pid'] = $catalog['pid'];
$tree [ $catalog['id'] ]['url'] = $catalog['url'];
}
}
return $tree;
}
// Создание корректного URL из введённого
public static function safetyURLToCatalog ($url) {
global $db_table;
global $arr_for_translit_ru;
global $arr_for_translit_lat;
$url = str_replace ($arr_for_translit_ru, $arr_for_translit_lat, $url);
if (empty ($url))
$url = 'url-path-to-part-of-site';
/*$sql = 'SELECT * FROM `' . $db_table . '` WHERE `url`="' . $url . '" AND `id` <> ' . (int)$_POST['id'] . ' LIMIT 1';
$res = mysql_query ($sql);
if (mysql_num_rows ($res))
{
for ($i = 1; $i <= 1000000; ++ $i)
{
$sql = 'SELECT * FROM `' . $db_table . '` WHERE `url`="' . $url . '-' . $i . '" LIMIT 1';
$res = mysql_query ($sql);
if (mysql_num_rows ($res))
continue;
else
return $url . '-' . $i;
}
}
else*/
return $url;
}
}
//
// Массивы для транслита URL
//
$arr_for_translit_ru = array ( 'а', 'б', 'в', 'г', 'д', 'е', 'ё', 'ж', 'з', 'и', 'й', 'к', 'л', 'м', 'н', 'о', 'п', 'р', 'с', 'т', 'у', 'ф', 'х', 'ц', 'ч', 'ш', 'щ', 'ъ', 'ы', 'ь', 'э', 'ю', 'я',
'А', 'Б', 'В', 'Г', 'Д', 'Е', 'Ё', 'Ж', 'З', 'И', 'Й', 'К', 'Л', 'М', 'Н', 'О', 'П', 'Р', 'С', 'Т', 'У', 'Ф', 'Х', 'Ц', 'Ч', 'Ш', 'Щ', 'Ъ', 'Ы', 'Ь', 'Э', 'Ю', 'Я',
',', '.', '_', '`', '~', '!', '\\', '|', '/', '@', '#', '$', '%', '^', '&', '*', '(', ')', '=', '+', '"', '№', ';', ':', '?', ' ',
);
$arr_for_translit_lat = array ('a', 'b', 'v', 'g', 'd', 'ye', 'ye', 'zh', 'z', 'i', 'i', 'k', 'l', 'm', 'n', 'o', 'p', 'r', 's', 't', 'u', 'f', 'kh', 'ts', 'ch', 'sh', 'shch', '', 'y', '', 'e', 'yu', 'ya',
'A', 'B', 'V', 'G', 'D', 'Ye', 'Ye', 'Zh', 'Z', 'I', 'I', 'K', 'L', 'M', 'N', 'O', 'P', 'R', 'S', 'T', 'U', 'F', 'Kh', 'Ts', 'Ch', 'Sh', 'Shch', '', 'Y', '', 'E', 'Yu', 'Ya',
'', '', '-', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '-',
);<file_sep>/classes/product.php
<? class Product extends Entity {
public $table = 'item';
protected $attributes = array(
'id' => array(
'title' => 'ID',
'isPrimaryKey' => true,
),
'templateId' => array(
'db' => 'template_id'
),
'name' => array(
'title' => 'Название',
),
'price' => array(
'title' => 'Цена',
),
'article' => array(
'title' => 'Артикул',
),
'alias' => array(
'db' => 'alias',
'title' => 'URL',
),
'description' => array(
'db' => 'description',
),
'metakey' => array(
'db' => 'metakey',
),
'metadesc' => array(
'db' => 'metadesc',
),
'mainPhoto' => array(
'db' => 'main_photo',
),
'files' => array(
'db' => 'files',
'format' => 'delimiter',
'delimiter' => ','
),
'shopPrice' => array(
'db' => 'shop_price',
'title' => 'Цена оригинальная',
),
'shopSellerName' => array(
'db' => 'shop_seller_name',
'title' => 'Поставщик',
),
'created' => array(
'db' => 'created',
'title' => 'Создан',
),
);
public function __construct($primaryKey = '', $fields = array()) {
parent::__construct($primaryKey, $fields);
if(!$this->isNew) {
$sql = 'SELECT p.`id` , p.`name` , p.`fieldtype` , p.`values` , p.`desc` , ip.`value`
FROM `item_param` AS ip
LEFT JOIN `param` AS p ON ( p.`id` = ip.`param_id` )
WHERE ip.`item_id` = :itemId';
$this->params = Helper::fetchAssocInArray($sql, array(':itemId' => $this->id), 'name');
if (!empty($this->files)) {
$sql = "SELECT * FROM `" . FileManager::FILES_TABLE . "` WHERE `id` IN (".implode(',', $this->files).")";
$this->images = Helper::fetchAssocInArray($sql);
}
$this->reviews = Helper::fetchAssocInArray("SELECT * FROM `pm_review` WHERE `product_id` = :productId ORDER BY `created` DESC", array(':productId' => $this->id));
}
}
}<file_sep>/parts/menu/catalog.php
<section class="menuleft">
<? Catalog::showTreeInMenu(); ?>
</section><file_sep>/admin/module/reviews/index.php
<?
$page->header = 'Управление отзывами';
$page->title .= $page->header;
?>
<h2><?=$page->header?></h2>
<? if ($_GET['delete']) :
include($URL[2].'/'.$URL[3].'/del.php');
elseif ($_GET['edit']) :
include($URL[2].'/'.$URL[3].'/edit.php');
else : ?>
<table class="table">
<tr>
<th>Id</th>
<th>Создан</th>
<th>Пользователь</th>
<th>Текст</th>
<th></th>
</tr>
<? //IMPORT ALL PAGE
$sql = "SELECT * FROM `pm_review` ORDER BY `is_moderated` ASC, `created` DESC";
$data = Helper::fetchAssocInArray($sql);
foreach($data as $el) :?>
<tr <? if (empty($el['is_moderated'])) {?>class="error"<?}?>>
<td><?=$el['id']?></td>
<td><?=date('d.m.Y', $el['created'])?></td>
<td><?=$el['user_name']?></td>
<td><?=nl2br($el['text'])?></td>
<td width="36">
<a class="icon-pencil" rel="tooltip" title="Редактировать" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?edit=<?=$el['id'];?>"></a>
<a class="icon-remove" rel="tooltip" title="Удалить"
href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?delete=<?=$el['id'];?>"
onclick="if(confirm('Удалить? Восстановление будет невозможно!')) return true; else return false;"></a>
</td>
</tr>
<? endforeach; ?>
</table>
<? endif; ?><file_sep>/classes/parser.aliexpress.php
<? class AliexpressParser {
public static $sizes = array('XS', 'S', 'M', 'L', 'XL', 'XXL');
public static $colors = array('red', 'blue', 'green', 'yellow', 'orange', 'black', 'white');
public static function getPriceTypeFromArray($price) {
if (is_array($price)) {
foreach ($price as $key => $el) {
$type = self::getPriceType($key);
if ($type) {
return $type;
}
}
}
return false;
}
public static function getPriceType($el) {
$type = false;
if (in_array(mb_strtoupper($el), self::$sizes)) {
$type = array(
'type' => 'size',
'title'=> 'Размер'
);
} elseif (in_array(mb_strtolower($el), self::$colors)) {
$type = array(
'type' => 'color',
'title'=> 'Цвет'
);
}
return $type;
}
public static function getProductInfo($id) {
$url = 'http://www.aliexpress.com/item/-/' . $id . '.html';
$content = file_get_contents($url);
$INFO = array();
$INFO['link'] = $url;
//Name
preg_match('#h1 .*?>(.*?)<#isu',$content,$result);
$INFO['name'] = trim($result[1]);
//Image
preg_match('#property="og:image" content="(.*?)"#isu',$content,$result);
$INFO['image'] = trim($result[1]);
//Seller
preg_match('#company-name.*?>.*?<a.*?store/(\d*?)">(.*?)<#isu',$content,$result);
$INFO['seller_id'] = trim($result[1]);
$INFO['seller_name'] = trim($result[2]);
//Pieces
preg_match('#class="unit-disc">(.{1,8}?)pieces#isu',$content,$result);
if(trim($result[1])) $INFO['pieces-at-lot'] = trim($result[1]); else $INFO['pieces-at-lot'] = 1;
//Prices
//If only price
if (strpos($content,'var skuAttrIds="";')) {
preg_match('#"skuPrice"\:"(.*?)"#isu', $content, $result);
$INFO['price'] = $result[1];
$INFO['price_type'] = 'onlyprice';
}
//If only size || color
preg_match('#var skuAttrIds=\[\[([\d,]*?)\]\];#isu', $content, $result);
if ($result[1]) {
$prices = array();
$params = explode(',',$result[1]);
foreach ($params as $el) {
preg_match('#"skuAttr":"\d*?:'.$el.'".*?"skuPrice":"(.*?)"#isu', $content, $result1);
preg_match('#class="sku-value.*?id=".*?-'.$el.'".*?span>(.*?)<#isu', $content, $result2);
$prices[$result2[1]] = $result1[1];
}
$INFO['price'] = $prices;
$INFO['price_type'] = '1param';
}
//If 2 params
preg_match('#var skuAttrIds=\[\[([\d,]*?)\],\[([\d,]*?)\]\];#isu', $content, $result);
if ($result[1]) {
$prices = array();
$param1 = explode(',',$result[1]);
$param2 = explode(',',$result[2]);
foreach ($param1 as $el) {
preg_match('#class="sku-value.*?id=".*?-'.$el.'".*?span>(.*?)<#isu', $content, $result0);
foreach ($param2 as $el2) {
preg_match('#"skuAttr":"\d*?:'.$el.';\d*?:'.$el2.'".*?"skuPrice":"(.*?)"#isu', $content, $result1);
preg_match('#class="sku-value.*?id=".*?-'.$el2.'".*?span>(.*?)<#isu', $content, $result2);
$prices[$result0[1]][$result2[1]] = $result1[1];
}
}
$INFO['price'] = $prices;
$INFO['price_type'] = '2param';
}
return $INFO;
}
}<file_sep>/js/main.js
function addToCart(id) {
var d = new Date;
$.get(
'/backend/cart',
{
'id': id,
'count': 1,
'time': d.getTime()
},
function(data) {
if (data.status == 'ok') {
var count = 0;
for (id in data.cart) {
count += data.cart[id].count;
}
// TODO price Х покупок - ХХХ руб
var message = 'Покупок: ' + count;
$('header .cart-block .value').html(message);
alertInfo('Товар добавлен в корзину');
/*
$.prompt('<h2>Товар добавлен в корзину</h2><p>Вы можете продолжить покупки или перейти к оформлению заказа.</p>', {
buttons: {
'Перейти к оформлению заказа': true,
'Продолжить покупки': false
},
callback: function(v,m,f){
if (v == true) {
window.location = '/cart';
}
}
});*/
}
},
'JSON'
);
}
function getCookie(name) {
var cookie = " " + document.cookie;
var search = " " + name + "=";
var setStr = null;
var offset = 0;
var end = 0;
if (cookie.length > 0) {
offset = cookie.indexOf(search);
if (offset != -1) {
offset += search.length;
end = cookie.indexOf(";", offset)
if (end == -1) {
end = cookie.length;
}
setStr = unescape(cookie.substring(offset, end));
}
}
return(setStr);
}
function setCookie (name, value, expires, path, domain, secure) {
document.cookie = name + "=" + escape(value) +
((expires) ? "; expires=" + expires : "") +
((path) ? "; path=" + path : "") +
((domain) ? "; domain=" + domain : "") +
((secure) ? "; secure" : "");
}
function openCategory(el) {
var category = el.parent().parent().parent();
if (category.is('.openable')) {
category.find('> ul').toggleClass('opened closed');
category.find('> .close').addClass('minus');
openCategory(category.find('> a'));
}
}
$(document).ready(function() {
/* Placeholder section */
Placeholder.init({
normal : '#6D6F76',
placeholder : '#6d6f76'
});
$('.menuleft a[href="'+window.location.pathname+'"]').each(function(){
$(this).addClass('active');
openCategory($(this));
});
$('.menuleft .openable > a, .menuleft .openable > span').click(function(e) {
var el = $(this);
if (e.target.localName == 'a') el = $(this).next();
el.next('ul').toggleClass('opened closed');
el.toggleClass('minus');
var name = $(this).parent().attr('id');
var state = $(this).parent().hasClass('opened') ? 'opened' : 'closed';
setCookie(name, state, 0, '/');
return false;
});
$('a.add2cart').click(function(){
var id = $(this).attr('data-id');
addToCart(id);
return false;
});
$('[action="/search"]').submit(function(){
var search = $(this).find('[type=text]').val().trim();
window.location = '/search/'+search;
return false;
});
$('.easyslider').easySlider({
auto: true,
continuous: true,
speed: 250,
pause: 5000
});
// позиционирование товаров в списке
$('.products li:nth-child(3n)').addClass('last_column');
$('.products li:gt(' + ($(".products li").length - 4) + ')').addClass('last_row');
$('.products li .title').each(function(indx, el){
if ($(el).height() > 18) {
var parentEl = $(el).closest('.info');
parentEl.prev().append(parentEl.find('.price'));
if (parentEl.find('.price_old')) {
parentEl.prev().find('.price').prepend(parentEl.find('.price_old'));
}
if ($(el).height() > 36) {
$(el).addClass('condensed');
}
}
});
// табы в товаре
$('.tabs_header li').click(function(){
$(this).siblings('.active').removeClass('active');
$(this).addClass('active');
$(this).parent().next().find('> li.active').removeClass('active');
$(this).parent().next().find('> li:nth-child(' + ($(this).index() + 1) + ')').addClass('active');
return false;
});
$('.tabs_header li:first-child').trigger('click');
$('.fancy').fancybox();
// Рейтинг
$('.stars li')
.mouseenter(function(){
$('.stars li:lt(' + ($(this).index() + 1) + ')').addClass('hover');
})
.mouseout(function(){
$('.stars li.hover').removeClass('hover');
})
.click(function(){
var count = $(this).index() + 1;
if ($(this).is('.active') && $(this).next().not('.active')) {
count = '';
$('.stars li.active').removeClass('active');
} else {
$('.stars li:lt(' + count + ')').addClass('active');
$('.stars li:gt(' + (count - 1) + ')').removeClass('active');
}
$('[name="rating"]').val(count);
return false;
});
$('.rating[data-value]').each(function(indx, el){
$(el).append('<span class="hover" style="width: ' + parseInt($(el).data('value')) * 18+ 'px"></span>')
});
// Добавление отзыва
$('[name=review_add]').submit(function(){
var form = $(this);
var isValid = true;
form.find('[required]').each(function(indx, el){
var val = $.trim($(el).val());
if (!val.length || val == $(el).attr('placeholder')) {
isValid = false;
}
});
if (isValid) {
$.post(
'/backend/review',
form.serializeArray(),
function(data){
console.log(data.status);
if (data.status == 'ok') {
window.location.reload();
}
}
);
} else {
alertError('Пожалуйста, заполните все поля');
}
return false;
});
});<file_sep>/backend/cart.php
<?
if (isset($_GET['id']) && isset($_GET['count']) > 0) {
$cart = (isset($_COOKIE['cart'])) ? unserialize($_COOKIE['cart']) : array();
if (!is_array($cart)) $cart = array();
$id = $_GET['id'];
//$size = $_GET['size'];
//$color = $_GET['color'];
//$key = $id . '_' . $size . '_' . $color;
$key = $id;
if (!isset($cart[$key])) {
$cart[$key]['count'] = 0;
}
$cart[$key]['id'] = $id;
$cart[$key]['count'] += $_GET['count'];
// $cart[$key]['size'] = $size;
// $cart[$key]['color'] = $color;
$sCart = serialize($cart);
if (setcookie('cart', $sCart, 0, '/')) {
$response->status = 'ok';
$response->cart = $cart;
}
}
$response->encode();<file_sep>/classes/helper.mail.php
<? class MailHelper {
// отправка письма
public static function mail($to = '', $title = '', $message = '') {
$from_name = SITE_NAME;
$from = "=?UTF-8?B?".base64_encode($from_name)."?= <no-reply@".$_SERVER['HTTP_HOST'].">";
$EOL = "\n"; // ограничитель строк, некоторые почтовые сервера требуют \n - подобрать опытным путём
$boundary = "--".md5(uniqid(time())); // любая строка, которой не будет ниже в потоке данных.
$head = "MIME-Version: 1.0;$EOL";
$head .= "Content-Type: multipart/mixed; boundary=\"$boundary\"$EOL";
$head .= "Content-Transfer-Encoding: base64$EOL";
$head .= "From: $from";
$multipart = "--$boundary$EOL";
$multipart .= "X-Mailer: PHP/" . phpversion() . "$EOL";
$multipart .= "Content-Type: text/plain; charset=utf-8$EOL";
$multipart .= "Content-Transfer-Encoding: base64$EOL";
$multipart .= $EOL; // раздел между заголовками и телом html-части
$title = "=?UTF-8?B?".base64_encode($from_name . ' :: ' . $title)."?=";
$multipart_final = $multipart . chunk_split(base64_encode($message)) . "$EOL--$boundary--$EOL";
return mail($to, $title, $multipart_final, $head);
}
}
<file_sep>/parts/html/footer.php
<!-- FOOTER HERE -->
<footer role="contentinfo">
<? /*<div class="container">
<ul class="footer_columns">
<? //TODO: сделать внемяемый футер ?>
for ($i = 0; $i < 5; $i++) {
?><li class="inlineblock">
<h3>Заголовок</h3>
<ul>
<li><a href="">Ссылка 1</a></li>
<li><a href="">Ссылка 2</a></li>
<li><a href="">Ссылка 3</a></li>
<li><a href="">Ссылка 4</a></li>
</ul>
</li><?
}
</ul>
</div>*/ ?>
<div class="signature">
<div class="container">
Copyright © 2012-<?=date('Y');?> «<?=$TEXT['site_name']?>» — Все права защищены.
</div>
</div>
<?=htmlspecialchars_decode($TEXT['counter'])?>
</footer>
<style type="text/css">
.wrapper .empty {
height: 60px;
}
footer {
margin-top: -51px;
padding-top: 0px;
height: 50px;
}
</style><file_sep>/templates/admin/main.php
<?
$page->title .= 'Добро пожаловать';
?>
<h1>Добро пожаловать</h1>
<p>В данный момент вы находитесь на главной странице системы управления.<br/>
В левом меню вы можете выбрать интересующий вас раздел управления сайтом и внести требуемые изменения.<br/>
В случае возникновения вопросов - пожалуйста, обратитесь к вашим разработчикам.</p>
<p><b>Напоминаем, что в случае, если вы захотите самостоятельно вмешаться в код CMS, вы снимаетесь с гарантийного обслуживания</b><br/>
<i>С уважением, коллектив разработчиков.</p>
<file_sep>/admin/module/static/editpage.php
<?
$isNew = empty($_GET['edit']) || !is_numeric($_GET['edit']) ? true : false;
if($_POST['save']) {
$id = $_GET['edit'];
$fields = array();
$fields[] = "`url` = :url";
$fields[] = "`title` = :title";
$fields[] = "`header` = :header";
$fields[] = "`text` = :text";
$fields[] = "`metadesc` = :metadesc";
$fields[] = "`metakey` = :metakey";
$fields[] = "`lastedit` = :time";
$sqlParams = array(
':url' => $_POST['url'],
':title' => $_POST['title'],
':header' => $_POST['header'],
':text' => $_POST['text'],
':metadesc' => $_POST['metadesc'],
':metakey' => $_POST['metakey'],
':time' => time()
);
if ($isNew) {
$sql = "INSERT INTO `pm_static` SET ".implode(', ', $fields);
} else {
$sql = "UPDATE `pm_static` SET ".implode(', ', $fields)." WHERE `id` = :id";
$sqlParams[':id'] = $id;
}
Helper::executeStatement($sql, $sqlParams);
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3]);
exit;
}
$sql = "SELECT * FROM `pm_static` WHERE `id` = :id";
$r = Helper::fetchAssoc($sql, array(':id' => $_GET['edit'])); ?>
<h3><?=$isNew ? 'Создание' : 'Редактирование'?></h3>
<form class="form-horizontal" action="" method="post">
<div class="control-group">
<label class="control-label">URL страницы</label>
<div class="controls"><input class="input-xlarge" type="text" name="url" value="<?=$r['url']?>"></div>
</div>
<div class="control-group">
<label class="control-label">Название (title)</label>
<div class="controls"><input class="input-xlarge" type="text" name="title" value="<?=$r['title'];?>"></div>
</div>
<div class="control-group">
<label class="control-label">Заголовок (h1)</label>
<div class="controls"><input class="input-xlarge" type="text" name="header" value="<?=$r['header'];?>"></div>
</div>
<div class="control-group">
<textarea class="input-xlarge ckeditor" name="text"><?=stripslashes($r['text']);?></textarea>
</div>
<div class="control-group">
<label class="control-label">MetaKey</label>
<div class="controls"><input class="input-xlarge" type="text" name="metakey" value="<?=$r['metakey'];?>"></div>
</div>
<div class="control-group">
<label class="control-label">MetaDesc</label>
<div class="controls"><input class="input-xlarge" type="text" name="metadesc" value="<?=$r['metadesc'];?>"></div>
</div>
<div class="form-actions">
<button class="btn btn-primary" type="submit" name="save" value="save">Сохранить изменения</button>
<a class="btn" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Отмена</a>
</div>
</form><file_sep>/classes/controller.file.php
<?php
class FileController {
/**
* @return array
* action for file uploading (using Plupload -> only ajax loads get here probably)
* accept repository name as get param (generated in urlManager)
* if response must contain path of resized file the $resized_key is used to format right file path
* if uploaded file must be set as "used" from the start $used must be set into "1"
*
* ! url for this action must be set as /upload/{repository_name}
*/
public function uploadFile($repository, $params = array()) {
$section = $params['section'];
$resizeKey = $params['resize_key'] ? $params['resize_key'] : false;
$used = ($params['used'] === 1) ? true : false;
$fm = FileManager::getInstance();
$fm->setRepository($repository);
if (is_numeric($section) && $section > 0) {
$fm->setSection($section);
}
$fileInfo = $fm->uploadFile($params);
if (!$fm->errorsOccurred()) {
/** @var $file FMFile*/
$file = $fm->addFile($fileInfo['path'], $used);
$result = array(
'fileId' => $file->id,
'path' => ($resizeKey)
? FileManager::getResizedFileName($file->path, $resizeKey)
: $file->path,
'mainPath' =>$file->path,
'repository' => $repository,
);
return $result;
} else {
$errors = $fm->getErrors();
return array('error' => $errors);
}
}
/**
* action for downloading file from repository (wrap for FileManager download method)
* if selected file is somehow secured - some download protection logic can be implemented here
* else its better to use direct link for file downloading (not using this action)
*
* ! url for this action must be set as /get/{file_id} (urlManager rule)
*/
public function getFile() {
$fileId = $_GET['file_id'];
FileManager::getInstance()->getFile($fileId);
}
public function repositoryCreate() {
$fm = FileManager::getInstance();
$fm->initializeManager();
$repoList = $this->repoList();
if (!empty($repoList)) {
foreach($repoList as $repoName => $repoParams) {
if ($fm->createRepository($repoName, $repoParams)) {
echo "Repository {$repoName} created" . PHP_EOL;
}
}
}
}
private function repoList() {
return array(
'product' => array(
'allowed_ext' => 'jpg,jpeg,png,gif',
'type' => 'image',
'nesting' => 4,
'max_size' => '10M',
'resize' => array(
'main' => array(
'type' => 'fill',
'w' => 1200,
'h' => 800
),
'tmb600' => array(
'type' => 'fill',
'w' => 600,
'h' => 600,
),
'tmb300' => array(
'type' => 'fill',
'w' => 300,
'h' => 300,
),
'preview' => array(
'type' => 'fill',
'w' => 238,
'h' => 233
),
'tmb90' => array(
'type' => 'fill',
'w' => 90,
'h' => 90
),
'tmb50' => array(
'type' => 'fill',
'w' => 50,
'h' => 50
)
)
),
);
}
}<file_sep>/classes/helper.image.php
<?
class ImageHelper {
/*
Функция ресайза изображений
входные параметры:
$img-полный путь к изображению
$W требуемая ширина. Если ограничения нету, то требуется ставить 0
$H требуемая высота. Если ограничения нету, то требуется ставить 0
$Key в случае задания и ширины и высоты требуется дать указание как выполнить выравнивание(обрезку) :
0 выровнить по верхнему левому углу(стоит по умолчанию)
1 выровнить по центру
2 выровнить по нижнему правому углу
*/
public static function setImageSize($img,$W=0,$H=0,$Key=1){
$rasshr = substr(strrchr($img,'.'),1);
//организация работы с форматами GIF JPEG PNG
switch($rasshr){
default: case "gif": $srcImage = @ImageCreateFromGIF( $img ); break;
case "jpg": $srcImage = @ImageCreateFromJPEG( $img ); break;
case "png": $srcImage = @ImageCreateFromPNG( $img ); break;
}
//определяем изначальную длинну и высоту
$srcWidth = @ImageSX( $srcImage );
$srcHeight = @ImageSY( $srcImage );
//ресайз по заданной ширине
if ($W != 0 && $H == 0) {
$res = self::resNoDel($srcWidth, $srcHeight,$W,0);
}
//ресайз по заданной высоте
if ($W == 0 && $H != 0) {
$res = self::resNoDel($srcWidth, $srcHeight,0,$H);
}
//ресайз с обрезкой
if ($W != 0 && $H != 0) {
$res = self::resDel($srcWidth, $srcHeight, $W, $H, $Key);
}
//создаем картинку
if ($res) {
$endImage = @ImageCreateTrueColor($res[2], $res[3]);
ImageCopyResampled($endImage, $srcImage, 0, 0, $res[0], $res[1], $res[2], $res[3], $res[4], $res[5]);
unlink($img);
switch($rasshr){
case "gif": ImageGif( $endImage, $img ); break;
default: case "jpg": imagejpeg( $endImage, $img ); break;
case "png": imagepng( $endImage, $img ); break;
}
ImageDestroy($endImage);
}
}
//ресайз без обрезки
protected static function resNoDel($srcWidth, $srcHeight,$W,$H){
//ресайз по заданной ширине
if (($W != 0)&&($H == 0)) {
$endHeight = ($W*$srcHeight)/$srcWidth;
$endWidth = $W;
}
//ресайз по заданной высоте
if (($W == 0)&&($H != 0)) {
$endHeight = $H;
$endWidth = ($H*$srcWidth)/$srcHeight;
}
//возвращае последние 6 значений
return array (0, 0, $endWidth, $endHeight, $srcWidth, $srcHeight);
}
protected static function resDel($srcWidth, $srcHeight, $W,$H,$Key){
//ресайз с обрезкой
if (($W != 0)&&($H != 0)) {
//обрезка вертикали
if (($W/$H)>=($srcWidth/$srcHeight)){
//обрезка низа
if ($Key==0) {
$srcX=0;
$srcY=0;
$srcHeight=($H/$W)*$srcWidth;
}
//обрезка низа и верха
if ($Key==1) {
$srcX=0;
$srcY=($srcHeight-$H/$W*$srcWidth)/2;
$srcHeight=($H/$W)*$srcWidth;
}
//обрезка верха
if ($Key==2) {
$srcX=0;
$srcY=$srcHeight-$H/$W*$srcWidth;;
$srcHeight=($H/$W)*$srcWidth;
}
}
//обрезка горизонтали
if (($W/$H)<($srcWidth/$srcHeight)){
//обрезка справа
if ($Key==0) {
$srcX=0;
$srcY=0;
$srcWidth=($W/$H)*$srcHeight;
}
//обрезка справа и слева
if ($Key==1) {
$srcX=($srcWidth-$W/$H*$srcHeight)/2;
$srcY=0;
$srcWidth=($W/$H)*$srcHeight;
}
//обрезка слева
if ($Key==2) {
$srcX=$srcWidth-$W/$H*$srcHeight;
$srcY=0;
$srcWidth=($W/$H)*$srcHeight;
}
}
}
return array($srcX, $srcY, $W, $H, $srcWidth, $srcHeight);
}
public static function getMD5Filename($filename, $dir) {
$newFilename = false;
$extension = substr(strrchr($filename,'.'),1);
if (file_exists($dir . $filename)) {
$newFilename = md5(filesize($dir . $filename)).'.'.$extension;
}
return $newFilename;
}
public static function getMimeType($file_path) {
$finfo = finfo_open(FILEINFO_MIME_TYPE);
$mime_type = @finfo_file($finfo, $file_path);
return $mime_type;
}
public static function showGallery($repositoryName, $fileIds = array(), $resize = 'main', $productId) {
$fm = FileManager::getInstance();
$fm->initializeManager();
//$fc = new FileController();
//$fc->repositoryCreate();
$repository = Helper::fetchAssoc("SELECT * FROM `file_manager` WHERE `name` = :name", array(':name' => $repositoryName));
$items = empty($fileIds) ? array() : Helper::fetchAssocInArray(
"SELECT * FROM `file_list` WHERE `id` IN (" . implode(',', $fileIds) . ")",
array(), '', '', false);
?>
<section id="gallery_<?=$repository['name'];?>" data-section="<?=$repository['section'];?>" class="gallery">
<div class="well">
<ul class="img_list">
<? if (!empty($items)) :
foreach($items as $item):
$repository['resize'] = @unserialize($repository['resize']);
?>
<li class="img_block">
<img class="img-polaroid" file_id="<?=$item['id'];?>"
src="<?=FileManager::getResizedFileName($item['path'], $resize);?>"/>
</li>
<? endforeach;
endif; ?>
</ul>
<button id="browse_<?=$repository['name'];?>" class="btn btn-large btn-block add_button" type="button">Добавить изображение (<?=$repository['name']?>)</button>
</div>
<input type="hidden" name="controllerUrl" value="/file/"/>
</section>
<script type="text/javascript">
var pluploadSet = {
runtimes : 'html5,flash,html4',
flash_swf_url : '<?=FileManager::getAssetsPath();?>/plupload.flash.swf',
browse_button : 'browse_<?=$repository['name'];?>',
url : '/file/upload/<?=$repository['name'];?>?resize_key=<?=$resize?>&product_id=<?=$productId?>',
dragdrop : 1,
drop_element : 'browse_<?=$repository['name'];?>',
multi_selection: true,
multipart_params: {
section: '<?=$repository['section'];?>',
resize_key: '<?=$resize?>',
used: '<?=intval($repository['setUsed'])?>'
},
callbackList: ['addGalleryImage']
};
initPlupload(pluploadSet);
/* gallery core functions */
function addGalleryImage(response) {
//find gallery
var gallery = $('#gallery_' + response.repository);
if (gallery.length) {
var imageUl = gallery.find('ul.img_list');
var newBlock = $('<li class="img_block">' +
'<img class="img-polaroid" src="'+response.path+'" file_id="'+response.fileId+'"/>' +
'</li>');
imageUl.append(newBlock);
}
}
function deleteGalleryImage(url, file) {
url += 'deleteFile/?product_id=<?=$productId?>';
deleteFile(url, file, 'deleteImageBlock');
}
function deleteImageBlock(response){
var gallery = $('#gallery_' + response.repository);
var block = gallery.find('img[file_id="'+response.fileId+'"]').closest('li');
block.remove();
}
$(document).ready(function(){
$('.gallery').on('click', '.img_block', function(){
var url, repository, section, imageId;
var gallery = $(this).closest('.gallery');
url = gallery.find('input[name="controllerUrl"]').val();
repository = gallery.attr('id').split('gallery_')[1];
section = gallery.data('section');
imageId = parseInt($(this).find('img').attr('file_id'));
if (!isNaN(imageId)) {
showModal(
'Удаление изображения',
'Вы уверены, что хотите удалить выбранное изображение?',
[{
butClass: 'btm-primary',
text: 'Да',
func: 'deleteGalleryImage',
params: [url, {id:imageId, repository:repository, section:section}]
},'close']
);
}
});
});
</script>
<?
}
}<file_sep>/admin/module/menu/add.php
<?
if($_POST['addpage']) :
$sql = "INSERT INTO `pm_menu` SET
`url` = :url,
`title` = :title,
`sort` = :sort";
$paramsSql = array(
':url' => $_POST['url'],
':title' => $_POST['title'],
':sort' => $_POST['sort']
);
Helper::executeStatement($sql, $paramsSql);
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3]);
exit;
endif;
?>
<p><i class="icon-arrow-left"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Вернуться к списку</a></p>
<h3>Добавление пункта меню</h3>
<form class="form-horizontal" action="" method="post">
<div class="control-group">
<label class="control-label">URL</label>
<div class="controls"><input class="input-xlarge" type="text" name="url" value="<?=$r['url'];?>"></div>
</div>
<div class="control-group">
<label class="control-label">Название</label>
<div class="controls"><input class="input-xlarge" type="text" name="title" value="<?=$r['title'];?>"></div>
</div>
<div class="control-group">
<label class="control-label">Сортировка</label>
<div class="controls"><input class="input-xlarge" type="text" name="sort" value="<?=$r['sort'];?>"></div>
</div>
<div class="form-actions">
<button class="btn btn-primary" type="submit" name="addpage" value="add">Добавить</button>
<a class="btn" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Отмена</a>
</div>
</form><file_sep>/templates/search.php
<?
$page->header = $page->title = 'Поиск';
$q = trim(urldecode($URL[2]));
$stopSearch = false;
if (mb_strlen($q, 'UTF-8') < 3) {
$stopSearch = "Задан слишком короткий поисковый запрос.";
} else {
$catalog['filter'] = "[@=`name` LIKE '%$q%']";
$sql = Catalog::filterToSQL($catalog['filter'], array('id', 'name','alias','price','article', 'main_photo'));
if(!$_GET['order']) $_GET['order'] = 'price';
if(!$_GET['page']) $_GET['page'] = 1;
if ($sql) {
if($_GET['order'] === 'price') $sql .= ' ORDER BY `price` ';
if($_GET['order'] === 'name') $sql .= ' ORDER BY `name` ';
if($_GET['order'] === 'rate') $sql .= ' ORDER BY `rate` ';
$sql .= ' LIMIT '.(($_GET['page']-1)*ON_PAGE).', '.ON_PAGE;
$items = Helper::fetchAssocInArray($sql);
}
$sql = Catalog::filterToSQL($catalog['filter'], array('id'));
if ($sql) {
$sql = str_replace('`item`.`id`',' COUNT(`id`) as `count` ',$sql);
$count = Helper::fetchAssocInArray($sql);
$count = $count[0]['count'];
}
$pages = ceil($count / ON_PAGE);
}
?>
<section class="featured">
<h1><?=$page->header?></h1>
<div class="search_info">
<? if ($stopSearch) { ?>
<p><?=$stopSearch?></p>
<? } elseif ($count == 0) { ?>
<p>По вашему запросу <span>"<?=$q?>"</span> товаров не найдено.</p>
<p>Попробуйте изменить поисковый запрос.</p>
<? } else { ?>
<p>По запросу <span>"<?=$q?>"</span> <?=Helper::pluralForm($count, $plural->find)?> <span><?=$count?></span> <?=Helper::pluralForm($count, $plural->product)?>.</p>
<? } ?>
</div>
<? if ($count && !$stopSearch) {
$sql = Catalog::filterToSQL($catalog['filter'], array('id'));
PaginationHelper::showSort($sql); ?>
<ul class="products">
<? foreach ($items as $item) {
Catalog::showProductInCatalog($item);
} ?>
</ul><?
$sql = Catalog::filterToSQL($catalog['filter'], array('id'));
PaginationHelper::showSort($sql);
} ?>
</section><file_sep>/classes/review.php
<? class Review extends Entity {
protected $table = 'pm_review';
protected $attributes = array(
'id' => array(
'isPrimaryKey' => true,
),
'productId' => array(
'db' => 'product_id',
'type' => 'int'
),
'userId' => array(
'db' => 'user_id',
'type' => 'int'
),
'userName' => array(
'db' => 'user_name'
),
'userEmail' => array(
'db' => 'user_email'
),
'text' => array(),
'rating' => array(
'type' => 'int'
),
'created' => array(
'type' => 'int'
),
'edited' => array(
'type' => 'int'
),
'isModerated' => array(
'db' => 'is_moderated',
'type' => 'int'
),
);
}<file_sep>/classes/helper.pagination.php
<? class PaginationHelper {
public static function showPagination($sql) {
global $URL;
if ($sql) {
$sql = str_replace('`item`.`id`',' COUNT(`id`) as `count` ',$sql);
$count = Helper::fetchAssocInArray($sql);
$count = $count[0]['count'];
$pages = ceil($count / ON_PAGE);
$currentURL = implode('/',$URL);
if ($pages > 1) { ?>
<div class="pagination">
<div class="title inlineblock">Страницы:</div>
<? if ($pages > 5 && $_GET['page'] > 5) { ?><a href="<?=$currentURL?>?order=<?=$_GET['order'];?>&page=1" class="gofirst inlineblock">|<</a> <? } ?>
<? if ($_GET['page'] > 1) { ?> <a href="<?=$currentURL?>?order=<?=$_GET['order'];?>&page=<?=($_GET['page']-1);?>" class="goprev inlineblock"><</a> <? } ?>
<ul class="list inlineblock">
<?
$from = $_GET['page'] - 4; if ($from <= 0) $from = 1;
$to = $_GET['page'] + 4; if ($to > $pages) $to = $pages;
for($i = $from; $i <= $to; $i++) { ?>
<li class="inlineblock"><a href="<?=$currentURL?>?order=<?=$_GET['order'];?>&page=<?=$i;?>" class="<? if($_GET['page'] == $i) { ?> active<?}?> inlineblock"><?=$i;?></a></li>
<? } ?>
</ul>
<? if ($_GET['page'] <= ($pages - 1) ) { ?> <a href="<?=$currentURL?>?order=<?=$_GET['order'];?>&page=<?=($_GET['page']+1);?>" class="gonext inlineblock">></a> <? } ?>
<? if ($pages > 5 && $_GET['page'] <= (ON_PAGE - 5) ) { ?><a href="<?=$currentURL?>?order=<?=$_GET['order'];?>&page=<?=$pages;?>" class="golast inlineblock">>|</a> <? } ?>
</div>
<? }
}
}
public static function showSort($sql) {
global $URL;
?>
<div class="catalog_params">
<div class="sort">
<div class="title inlineblock">Сортировать по:</div>
<ul class="inlineblock">
<li class="inlineblock"><a href="<?=implode('/',$URL)?>?order=price" <?if($_GET['order']=='price') {?> class="active" <?}?> >цене</a></li>
<li class="inlineblock"><a href="<?=implode('/',$URL)?>?order=name" <?if($_GET['order']=='name') {?> class="active" <?}?> >названию</a></li>
<!--li class="inlineblock"><a href="<?=implode('/',$URL)?>?order=rate" <?if($_GET['order']=='rate') {?> class="active" <?}?> >рейтингу</a></li-->
</ul>
</div>
<? if ( !empty($sql)) { self::showPagination($sql); } ?>
</div>
<?
}
}<file_sep>/define.php
<?
// DATABASE
define('DB_DRIVER','mysql');
define('DB_HOST', 'click2dogs.ru');
define('DB_NAME', 'click2home');
define('DB_USER', 'katz');
define('DB_PASSWORD', '<PASSWORD>');
define('DB_PREFIX', 'pm_');
define('ON_PAGE', 12);
define('SITE_NAME', 'click2home');
define('PATH_BASE', 'http://' . $_SERVER['HTTP_HOST']);
define('PATH_ROOT', __DIR__ . DIRECTORY_SEPARATOR);
define('PATH_CLASSES', PATH_ROOT . 'classes' . DIRECTORY_SEPARATOR);
define('PATH_VIEWS', PATH_ROOT . 'views' . DIRECTORY_SEPARATOR);
define('PATH_PARTS', PATH_ROOT . 'parts' . DIRECTORY_SEPARATOR);
define('PATH_TEMPLATES',PATH_ROOT . 'templates' . DIRECTORY_SEPARATOR);
define('PATH_FILES', 'files/');
define('PATH_FILES_ABS',PATH_ROOT . 'files' . DIRECTORY_SEPARATOR);
// ADMIN
define('ADMIN_DIR', 'admin');
define('PATH_ADMIN', PATH_ROOT . ADMIN_DIR . DIRECTORY_SEPARATOR);
<file_sep>/admin/index.php
<?
require_once '../init.php';
$CONTENT['title'] = 'Adminzone :: ';
//EXTRACT ADMIN DATA
$sql = "SELECT * FROM `pm_base`";
$admin_data = Helper::fetchAssocInArray($sql, array(), 'param', 'value');
$page = new Page('admin');
ob_start();
//CHECK LOGIN
if(!$_SESSION['isadmin']) {
$page->title .= 'Привет! Пожалуйста, авторизуйтесь.';
$page->setView('auth');
} else {
if ($URL[2] == 'logout') $page->setTemplate('admin.logout');
elseif (empty($URL[2])) $page->setTemplate('admin.main');
elseif ($URL[2]!='' AND $URL[3]!='') {
if (file_exists($URL[2].'/'.$URL[3].'/index.php')) {
// CHECK RIGHTS
if ($URL[2] == 'module_admin') {
if($_SESSION['status'] != 'superadmin') {
die('NOT ACCESS');
}
} elseif ($URL[2] == 'module') {
//Если это супер, то пропускаем сразу
if($_SESSION['status'] != 'superadmin') {
//Проверим права админа
$RIGHT = array();
$t = explode(',',$_SESSION['rights']); foreach($t as $el) $RIGHT[$el] = "1";
//Посмотрим а наш id модуля
$t = file($_SERVER['DOCUMENT_ROOT'].'/'.$URL[1].'/'.$URL[2].'/'.$URL[3].'/about');
if ($RIGHT[trim($t[0])]!=1) die('NOT ACCESS');
}
}
include($URL[2].'/'.$URL[3].'/index.php');
} else {
$page->setTemplate('admin.main');
}
}
}
// showTemplate
if (!empty($page->template)) {
include $page->template;
}
$page->content = ob_get_contents();
ob_clean();
// showView
if (!empty($page->view)) {
include $page->view;
} elseif (!empty($page->content)) {
echo $page->content;
} else {
die ('view and page content are empty');
}<file_sep>/admin/js/script.js
$(document).ready(function(){
$('body')
.tooltip({
selector: '[rel=tooltip]',
placement : 'bottom',
html : true
})
.popover({
selector: '[rel=popover]',
trigger: 'hover',
html : true
})
.on('submit', 'form.need-check', function(){
if (!checkForm($(this))) {
return false;
}
});
$.datepicker.regional['ru'] = {
closeText: 'Закрыть',
prevText: '<Пред',
nextText: 'След>',
currentText: 'Сегодня',
monthNames: ['Январь','Февраль','Март','Апрель','Май','Июнь', 'Июль','Август','Сентябрь','Октябрь','Ноябрь','Декабрь'],
monthNamesShort: ['Янв','Фев','Мар','Апр','Май','Июн', 'Июл','Авг','Сен','Окт','Ноя','Дек'],
dayNames: ['воскресенье','понедельник','вторник','среда','четверг','пятница','суббота'],
dayNamesShort: ['вск','пнд','втр','срд','чтв','птн','сбт'],
dayNamesMin: ['Вс','Пн','Вт','Ср','Чт','Пт','Сб'],
weekHeader: 'Не',
dateFormat: 'dd.mm.yy',
firstDay: 1,
isRTL: false,
showMonthAfterYear: false,
yearSuffix: ''};
$.datepicker.setDefaults($.datepicker.regional['ru']);
$('.datepicker').datepicker();
});
var message = {
sendRequest: '<p><i class="icon-time"></i> Пожалуйста, подождите...</p>',
fillAllFields: 'Заполните все обязятельные поля'
};
function checkForm(form) {
var isValid = true;
form.find('.control-group.error').removeClass('error');
form.find('input').each(function(indx, el){
var newValue = '';
if (typeof $(el).val() !== 'undefined') {
newValue = $(el).val();
}
$(el).val(newValue);
if ($(el).is('[required]') && !newValue.length) {
$(el).closest('.control-group').addClass('error');
isValid = false;
}
});
if (!isValid) {
alertError(message.fillAllFields);
}
return isValid;
}
function alertBox(content, type, autohide, timeout){
if(typeof autohide === 'undefined'){
autohide = true;
}
var hide_timeout = 4000;
if (!isNaN(parseInt(timeout))) hide_timeout = parseInt(timeout);
autohide = !!autohide;
type = typeof type !== 'undefined' ? type : '';
if(type != ''){
type = 'alert-'+type;
}
var time = new Date().getTime();
var itemID = 'notification-'+time;
var alertHTML = '<div class="alert alert-block '+type+' fade in" id="'+itemID+'">'
+'<button class="close" data-dismiss="alert">×</button><strong>'
+ content
+'</strong></div>';
$('#alertsContainer').prepend(alertHTML);
autohide && setTimeout(function(){
$('#'+itemID).fadeOut(900, function(){
$(this).remove()
})
}, hide_timeout);
$('.alert .close').click(function(){
$(this).parent().remove();
});
}
function alertError(message, autohide){
alertBox(message, 'error', autohide)
}
function alertOk(message, autohide){
alertBox(message, 'success', autohide)
}
function alertInfo(message, autohide){
alertBox(message, 'info', autohide)
}
function alertWarning(message, autohide){
alertBox(message, 'warning', autohide)
}<file_sep>/classes/helper.response.php
<? class ResponseHelper {
public $status, $data, $type = 'json';
public function __construct($trimPOST = true) {
if ($trimPOST) {
$_POST = Helper::trimArray($_POST);
}
$this->status = 'error';
}
public function encode($type = '') {
if (empty($this->type)) {
$this->type = $type;
}
$this->setHeaders($this->type);
switch ($type) {
case 'html' :
echo $this->data;
break;
default:
echo json_encode($this);
}
}
public function setHeaders($type = '') {
header('Expires: 0');
header('Last-Modified: '.gmdate("D, d M Y H:i:s T"));
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
switch ($type) {
case 'html' :
header('Content-Type: text/html; charset=utf-8');
break;
default:
header('Content-Type: application/json; charset=utf-8');
}
}
}<file_sep>/admin/module/articles/edit_group.php
<?
$isNew = empty($_GET['edit_group']) || !is_numeric($_GET['edit_group']) ? true : false;
if($_POST['save']) {
$fields = array();
$fields[] = "`title` = :title";
$fields[] = "`url` = :url";
$fields[] = "`sort` = :sort";
$fieldsArray = array();
$fieldsArray[':title'] = $_POST['title'];
$fieldsArray[':url'] = $_POST['url'];
$fieldsArray[':sort'] = $_POST['sort'];
if ($isNew) {
$sql = "INSERT INTO `pm_article_groups` SET".implode(', ', $fields);
} else {
$fieldsArray[':id'] = $_GET['edit_group'];
$sql = "UPDATE `pm_article_groups` SET ".implode(', ', $fields)." WHERE `id` = :id";
}
Helper::executeStatement($sql, $fieldsArray);
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3]);
exit;
}
$sql = "SELECT * FROM `pm_article_groups` WHERE `id` = :id";
$r = Helper::fetchAssoc($sql, array(':id' => $_GET['edit_group']))?>
<p><i class="icon-arrow-left"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Вернуться к списку</a></p>
<h3><?=$isNew ? 'Создание' : 'Редактирование'?></h3>
<form class="form-horizontal" action="" method="post">
<div class="control-group">
<label class="control-label">URL</label>
<div class="controls"><input type="text" name="url" class="input-xxlarge" value="<?=$r['url']?>" /></div>
</div>
<div class="control-group">
<label class="control-label">Название</label>
<div class="controls"><input type="text" name="title" class="input-xxlarge" value="<?=$r['title']?>" /></div>
</div>
<div class="control-group">
<label class="control-label">Порядок сортировки</label>
<div class="controls"><input type="text" name="sort" class="input-xxlarge numeric_only" value="<?=$r['sort']?>" /></div>
</div>
<div class="form-actions">
<button class="btn btn-primary" type="submit" name="save" value="save">Сохранить</button>
<a class="btn" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Отмена</a>
</div>
</form><file_sep>/admin/module/item/functions.php
<?
/*
* Функция удаления элемента таблицы базы данных
*/
function delete_from_db ($id) {
global $db_table;
// удаляем поле
$sql = 'DELETE FROM `' . $db_table . '` WHERE `id` = :id';
Helper::executeStatement($sql, array(':id' => (int) $id));
}
// функция вывода одной ячейки таблицы
function print_cell_table ($index_col, $col_name, $col_value) {
switch ($index_col) {
case 0: case 1: case 2:
print strip_tags (stripslashes ($col_value));
break;
case 3:
global $tt;
print strip_tags (stripslashes ($tt[$col_value]));
break;
default:
Admin::printImageMenu('?edit=' . ((int)$col_value), 'edit');
Admin::printImageMenu('?del=' . ((int)$col_value), 'del');
break;
}
}<file_sep>/admin/module/param/addedit.php
<?
// путь для возвращения из страницы добавления/редактирования
$back_url = Admin::thisModule();
// заголовок в зависимости от цели
if (isset($_GET['add']))
$page->title .= 'Добавить товар';
else
$page->title.= 'Редактировать товар';
?>
<p><a href="<?=$back_url;?>"><span class="icon icon-arrow-left"></span> Вернуться к списку параметров</a></p><?php
// показываем таблицу редактирования
if (isset($_GET['add']) || isset($_GET['edit'])) {
if (isset($_GET['edit']))
$item = Helper::selectAllForId($db_table, $_GET['edit']);
else
$item = array ();
if ($simple_item_base)
foreach ($simple_item_base as $ib) {
$arr_elem = array (
$ib['desc'],
$ib['db_name'],
$item[$ib['db_name']],
$ib['input'],
$ib['array_select'],
);
$array_data[] = $arr_elem;
}
// выводим таблицу добавления/редактирования
Admin::createPMTableAddEdit($array_data, $item['id']);
}
// кнопка Отмена
if (isset($_POST['cancel'])) {
header ('Location: ' . $back_url);
exit;
}
// кнопка Добавить/Сохранить
if (isset($_POST['addedit'])) {
// выставляем флаги, показывающие добавляем ли мы объект или редактируем
$flag_is_add = false;
$flag_is_edit = false;
if ($_POST['id'])
$flag_is_edit = true;
else
$flag_is_add = true;
// начинаем составлять sql добавления/редактирования
if ($flag_is_add)
$sql = 'INSERT INTO';
else
$sql = 'UPDATE';
$sql .= ' `' . $db_table . '` ';
// добавляем в sql поля
$arr_for_sql = array ();
if ($simple_item_base)
foreach ($simple_item_base as $ib)
$arr_for_sql[] = array (
$ib['db_name'],
$ib['db_type'],
);
// массив всех добавляемых/редактируемых значений в sql
$sql .= Helper::createSetString($arr_for_sql);
// если объект редактируемый, указываем его id
if ($flag_is_edit)
$sql .= ' WHERE `id`=' . $_POST['id'];
// выполняем запрос
$r = Helper::executeStatement($sql);
// получаем id объекта
if ($flag_is_edit)
$id = $_POST['id'];
else
$id = $pdo->lastInsertId();
// переходим обратно
header ('Location: ' . $back_url);
exit;
}
?><file_sep>/classes/user.php
<?
class User {
public $id, $email, $name, $surname, $birthday, $foreignPassportNumber, $password, $created;
public $DBFields = array('id', 'email', 'name', 'surname', 'birthday', 'foreignPassportNumber', 'password', 'created');
private static $salt = '<PASSWORD>';
public function __construct($id = 0) {
if (is_numeric($id) && $id > 0) {
$this->id = $id;
$sql = "SELECT * FROM `pm_users` WHERE `id` = ".mysql_real_escape_string($id);
$r = mysql_query($sql);
Helper::fetchRequestInObject($this, mysql_fetch_object($r));
} else {
# new object
$this->created = time();
}
}
public function auth() {
if (mb_strlen($this->email) < 6) {
return 'Минимальная длина пароля - 6 символов';
} elseif (!filter_var($this->email, FILTER_VALIDATE_EMAIL)) {
return 'Проверьте правильность ввода email';
}
$this->password = self::encodePassword($this->password);
$sql = "SELECT *
FROM `pm_users`
WHERE `email` = '".mysql_real_escape_string($this->email)."'
AND `password` = '".mysql_real_escape_string($this->password)."'
LIMIT 1";
$r = mysql_query($sql);
if (mysql_num_rows($r) == 1) {
Helper::fetchRequestInObject($this, mysql_fetch_object($r));
$_SESSION['uid'] = $this->id;
return $this->id;
} else {
return 'Проверьте правильность ввода email и пароля';
}
}
public function logOut() {
@session_destroy();
setcookie('uid');
setcookie('pass');
foreach ($this as $key => $el) {
$this->$key = NULL;
}
return true;
}
public function registration() {
if (!filter_var($this->email, FILTER_VALIDATE_EMAIL)) {
return 'Проверьте правильность ввода email';
}
// Проверка на существование пользователя
$sql = "SELECT count(*)
FROM `pm_users`
WHERE `email` = '".mysql_real_escape_string($this->email)."'";
$count = @mysql_result(mysql_query($sql), 0);
if ($count) {
return 'Пользователь с таким email уже зарегистрирован';
}
$this->password = self::encodePassword($this->password);
// Создание нового пользователя
$params = array();
foreach ($this as $key => $el) {
$params[] = "`".$key."` = '".mysql_real_escape_string($el)."'";
}
$params = implode(', ', $params);
$sql = "INSERT INTO `pm_users` SET $params";
mysql_query($sql) or die ($sql. "<br/>". mysql_error());
$this->id = mysql_insert_id();
$_SESSION['uid'] = $this->id;
return $this->id;
}
public function forgotPassword() {
global $TEXT;
if (!filter_var($this->email, FILTER_VALIDATE_EMAIL)) {
return 'Проверьте правильность ввода email';
}
$sql = "SELECT *
FROM `pm_users`
WHERE `email` = '".mysql_real_escape_string($this->email)."'
LIMIT 1";
$r = mysql_query($sql);
if (mysql_num_rows($r) == 1) {
Helper::fetchRequestInObject($this, mysql_fetch_object($r));
$newPassword = rand(100000, 999999);
$this->password = self::encodePassword($newPassword);
$sql = "UPDATE `pm_users` SET `password` = '".$this->password."'";
if (mysql_query($sql)) {
if (MailHelper::mail($this->email, 'Новый пароль', "Добрый день!\n\nВы запросили восстановление пароля на сайте http://" . $TEXT['site_name'] . ".\nВаш новый пароль: $newPassword")) {
return $newPassword;
} else {
return 'При отправке нового пароля произошла ошибка. Попробуйте позже';
}
} else {
return 'При генерации нового пароля произошла ошибка. Попробуйте позже';
}
} else {
return 'Пользователь с таким email не зарегистрирован';
}
}
public static function encodePassword($password) {
return md5(self::$salt.md5(self::$salt.$password));
}
public static function isLoggedIn() {
if (!empty($_SESSION['uid'])) {
return $_SESSION['uid'];
}
return false;
}
function save() {
if (empty($this->id)) {
// создание нового пользователя
} else {
// изменение текущего пользователя
}
$params = array();
foreach ($this as $key => $el) {
$params[] = "`".$key."` = '".mysql_real_escape_string($el)."'";
}
$params = implode(', ', $params);
$sql = "UPDATE `pm_users` SET $params WHERE `id` = ".$this->id;
if (mysql_query($sql)) {
return true;
} else {
return $sql. "<br/>". mysql_error();
}
}
}
?><file_sep>/parts/html/nav.php
<nav class="menu">
<div class="title">Каталог</div>
<? include $page->pathPart('menu.catalog') ?>
</nav>
<?
$articleGroups = Helper::fetchAssocInArray("SELECT * FROM `pm_article_groups` ORDER BY `sort`");
if (!empty($articleGroups)) { ?>
<nav class="menu">
<div class="title light">Полезные статьи</div>
<section class="menuleft">
<ul>
<?
$articles = Helper::fetchAssocInArray("SELECT `url`, `header`, `groupId` FROM `pm_article_items` ORDER BY `sort`", array(), 'groupId[]');
foreach ($articleGroups as $el) {
if (empty($articles[$el['id']])) continue; ?>
<li class="openable">
<a href="/articles/<?=$el['url']?>"><?=$el['title']?></a>
<span class="close"></span>
<ul class="closed">
<? foreach ($articles[$el['id']] as $article) { ?>
<li><a href="/articles/<?=$el['url']?>/<?=$article['url']?>"><?=$article['header']?></a></li>
<? } ?>
</ul>
</li>
<? } ?>
</ul>
</section>
</nav>
<? } ?>
<?/*
<section class="banners">
<a href="#">
<img src="http://livedemo00.template-help.com/opencart_43001/image/cache/data/banner-1-205x201.png" alt="" title="">
</a>
<a href="#">
<img src="http://livedemo00.template-help.com/opencart_43001/image/cache/data/banner-1-205x201.png" alt="" title="">
</a>
</section>
*/
?><file_sep>/templates/catalog.php
<?
//$url = Catalog::cleanURL($URL);
$url = array(end($URL));
$sql = Catalog::getSQLFromURL($url);
$result = $pdo->query($sql);
$catalog = $result->fetch(PDO::FETCH_ASSOC);
if($catalog['id']) {
$page->title = 'Каталог товаров';
$page->metakey = $catalog['metakey'];
$page->metadesc = $catalog['metadesc'];
?>
<section class="featured">
<h1 class="block-title"><?=$catalog['name'];?></h1>
<div class="text">
<?=$catalog['text']?>
</div>
<!--div class="block-content"-->
<?
$sql = Catalog::filterToSQL($catalog['filter'], array('id', 'name','alias','price','article', 'main_photo'));
if(!$_GET['order']) $_GET['order'] = 'price';
if(!$_GET['page']) $_GET['page'] = 1;
if ($sql) {
if($_GET['order'] === 'price') $sql .= ' ORDER BY `price` ';
if($_GET['order'] === 'name') $sql .= ' ORDER BY `name` ';
if($_GET['order'] === 'rate') $sql .= ' ORDER BY `rate` ';
$sql .= ' LIMIT '.(($_GET['page']-1)*ON_PAGE).', '.ON_PAGE;
$items = Helper::fetchAssocInArray($sql);
}
$sql = Catalog::filterToSQL($catalog['filter'], array('id'));
PaginationHelper::showSort($sql); ?>
<ul class="products">
<? foreach ($items as $item) {
Catalog::showProductInCatalog($item);
} ?>
</ul><?
$sql = Catalog::filterToSQL($catalog['filter'], array('id'));
PaginationHelper::showSort($sql); ?>
<!--/div-->
</section>
<? } else {
$page->setTemplate('static');
include $page->template;
}<file_sep>/templates/404.php
<?
header($_SERVER["SERVER_PROTOCOL"]." 404 Not Found");
$page->title = $page->header ='Ошибка 404. Страница не найдена';
$page->metakey = 'Ошибка 404. Страница не найдена';
$page->metadesc = 'Ошибка 404. Страница не найдена';
?>
<h1><?=$page->header?></h1>
<p>Страница, которую Вы ищите, не существует. Проверьте правильность запроса.</p>
<file_sep>/templates/order.php
<?php
$page->title = 'Заказ успешно оформлен';
$cart = unserialize($_COOKIE['cart']);
ob_start();
?>
<p>Спасибо за ваш заказ в интернет-магазине <?=$TEXT['site_name']?></p>
<p>Ваши данные:<br/>
Имя: <?=$_POST['name'];?><br/>
Телефон: <?=$_POST['phone'];?><br/>
Адрес доставки: <?=$_POST['address'];?><br/>
</p>
<p>Ваш заказ:</p>
<table class="cart_table">
<? foreach( $cart as $key => $item ) {
$sql = "SELECT * FROM `item` WHERE `id` = :id";
$mainData = Helper::fetchAssoc($sql, array(':id' => $item['id']));
if(!is_numeric($mainData['id'])) { die ('ID товара не существует'); }
if (empty($mainData['name'])) $mainData['name'] = $mainData['shop_name'];
if (empty($mainData['alias'])) $mainData['alias'] = $item['id'];
$sql = "SELECT * FROM `item_param` WHERE `item_id` = :itemId";
$paramData = Helper::fetchAssocInArray($sql, array(':itemId' => $mainData['id']), 'param_id', 'value');
?>
<tr>
<td>
<? $file = '/upload/'.($mainData['id']%10).'/'.($mainData['id']).'/'.$mainData['id'].'_0.jpg'; ?>
<img alt="" style="max-width: 76px" src="http://<?=$TEXT['site_name']?>/<?=$file;?>">
</td>
<td>
<a href="http://<?=$TEXT['site_name']?>/item/<?=$mainData['alias'];?>" target="_blank"><?=$mainData['name'];?></a>
</td>
<td class="cart-table-count">
<?=$item['count'];?>шт
</td>
<td class="cart-table-price">
<? echo $mainData['price'] * $item['count']; $total += $mainData['price'] * $item['count'];?> руб.
</td>
</tr>
<? }?>
</table>
<p>Если вы хотите внести корректировку в ваш заказ, <a href="http://<?=$TEXT['site_name']?>/contacts">свяжитесь с нами</a>. </p>
<?
$content = ob_get_clean();
$subject = 'Новый заказ с сайта '.$TEXT['site_name'];
$headers = "Content-type: text/html; charset=utf-8 \r\n";
$headers .= "From: ".$TEXT['site_name']." ".$TEXT['email'].">\r\n";
unset($_COOKIE['cart']);
mail($_POST['email'], $subject, $content, $headers);
mail($BASE['email'], $subject, $content, $headers);
$sms = new Smspilot();
$sms->send( '79516723132', 'c2d: новый заказ ' . $_POST['email']);
?>
<h1>Заказ успешно оформлен</h1>
<p><strong><?=$_POST['name'];?>, спасибо за ваш заказ!</strong></p>
<p>Ваш заказ сформирован. С вами свяжутся в ближайшее время. Копия вашего заказа отправлена вам на e-mail. </p>
<p>Если вы хотите внести корректировку в ваш заказ, <a href="/contacts">свяжитесь с нами</a>. </p>
<file_sep>/index.php
<?
require_once 'init.php';
if ($URL[1] == 'file') {
include PATH_ROOT . 'backend' . DIRECTORY_SEPARATOR . 'file.php';
exit;
}
$page = new Page();
ob_start();
if (empty($URL[1])) $page->setTemplate('main');
elseif ($URL[1] == 'cart') $page->setTemplate('cart');
elseif ($URL[1] == 'order') $page->setTemplate('order');
elseif ($URL[1] == 'search') $page->setTemplate('search');
elseif ($URL[1] == 'articles') $page->setTemplate('article');
elseif ($URL[1] == 'item' ) $page->setTemplate('item');
elseif ($URL[1] == '404') $page->setTemplate('404');
else $page->setTemplate('catalog');
// showTemplate
if (!empty($page->template)) {
include $page->template;
}
$page->content = ob_get_contents();
ob_clean();
// showView
if (!empty($page->view)) {
include $page->view;
} elseif (!empty($page->content)) {
echo $page->content;
} else {
die ('view and page content are empty');
}<file_sep>/classes/page.php
<?
class Page {
public $view, $template, $content, $part, $title, $header, $metakey, $metadesc;
public function __construct($view = 'default') {
$this->setView($view);
}
public function setView($view) {
if (!empty($view)) {
$this->view = PATH_VIEWS . $view . '.php';
if (!file_exists($this->view)) {
die('no such file: '.$this->view);
}
} else {
$this->view = null;
}
}
public function setTemplate($template) {
if (!empty($template)) {
$templateExplode = explode('.', $template);
$this->template = PATH_TEMPLATES;
$fileName = array_pop($templateExplode);
foreach ($templateExplode as $el) {
$this->template .= $el . DIRECTORY_SEPARATOR;
}
$this->template .= $fileName . '.php';
//$this->template = PATH_TEMPLATES . $template . '.php';
if (!file_exists($this->template)) {
die('no such file: '.$this->template);
}
} else {
$this->template = null;
}
}
public function pathPart($part) {
$partExplode = explode('.', $part);
$this->part = PATH_PARTS;
$fileName = array_pop($partExplode);
foreach ($partExplode as $el) {
$this->part .= $el . DIRECTORY_SEPARATOR;
}
$this->part .= $fileName . '.php';
if (file_exists($this->part)) {
return $this->part;
} else {
die('no such file: '.$this->part);
}
}
}
<file_sep>/templates/item.php
<? $productURL = end($URL);
$product = is_numeric($productURL) ? new Product($productURL) : new Product('', array(':alias' => $productURL));
if (!$product->isNew) {
if (empty($product->name)) $product->name = $product->shopName;
$page->title = $product->name;
$page->header = $product->name;
$page->metakey = $product->metakey;
$page->metadesc = $product->metadesc;
$showParams = false;
foreach ($product->params as $param) {
if (!empty($param['value'])) {
$showParams = true;
break;
}
} ?>
<section class="product">
<section class="slider inlineblock">
<a href="<?=FileManager::getResizedFileName($product->mainPhoto, 'main')?>" class="fancy">
<img src="<?=FileManager::getResizedFileName($product->mainPhoto, 'tmb300')?>" alt="">
</a>
<? if (count($product->images) > 1) {
foreach ($product->images as $image) { ?>
<a href="<?=FileManager::getResizedFileName($image['path'], 'main')?>" rel="product" class="fancy">
<img src="<?=FileManager::getResizedFileName($image['path'], 'tmb90')?>" alt="">
</a>
<? } ?>
<? } ?>
</section
><section class="description inlineblock">
<h1><?=$page->header?></h1>
<? if (!empty($product->article)) : ?>
<dl>
<dt>Артикул:</dt>
<dd><?=$product->article?></dd>
</dl>
<? endif ?>
<div class="availability">
<span class="text inlineblock">Наличие:</span>
<span>На складе</span>
</div>
<div class="price_block">
<span class="text">Цена:</span>
<span class="price"><?=$product->price?> руб</span>
<!--span class="price_old">1800 руб</span-->
</div>
<a href="#" data-id="<?=$product->id?>" class="button add2cart inlineblock">в корзину</a>
</section>
<section class="extra">
<ul class="tabs_header">
<li class="inlineblock">Описание</li>
<? if ($showParams) : ?>
<li class="inlineblock">Характеристики</li>
<? endif ?>
<li class="inlineblock">Отзывы (<?=count($product->reviews)?>)</li>
</ul>
<ul class="tabs_content">
<li><?=$product->description?></li>
<? if ($showParams) : ?>
<li>
<? foreach ($product->params as $param) {
if (empty($param['value'])) continue; ?>
<span class="param_title inlineblock"><?=$param['desc']?>:</span>
<span class="param_value inlineblock">
<? switch ($param['fieldtype']) {
case 'text' :
echo $param['value'];
break;
case 'checkbox' :
echo 'Да';
break;
} ?>
</span>
<? } ?>
</li>
<? endif ?>
<li>
<? if (count($product->reviews)) { ?>
<ul class="reviews">
<? foreach ($product->reviews as $review) { ?>
<li>
<span class="inlineblock text">
<span class="rating" data-value="<?=$review['rating']?>"></span>
<?=nl2br($review['text'])?>
</span
><span class="inlineblock user">
<span class="name"><?=$review['user_name']?></span>
<span class="date"><?=date('d.m.Y', $review['created'])?></span>
</span>
</li>
<? } ?>
</ul>
<? } ?>
<div class="title">Написать отзыв</div>
<form name="review_add">
<input type="hidden" name="action" value="add">
<input type="hidden" name="productId" value="<?=$product->id?>">
<input type="hidden" name="rating" required value="">
<div>
<label class="inlineblock field_name">Ваша оценка</label>
<ul class="stars inlineblock">
<li class="inlineblock"></li><li class="inlineblock"></li><li class="inlineblock"></li><li class="inlineblock"></li><li class="inlineblock"></li>
</ul>
</div>
<div><textarea name="text" required rows="3" cols="80" placeholder="Текст отзыва" class="field"></textarea></div>
<div><input type="text" required value="" name="userName" placeholder="<NAME>" class="field"></div>
<div><input type="email" required value="" name="userEmail" placeholder="Ваш email" class="field"></div>
<input type="submit" class="button" value="Опубликовать">
</form>
</li>
</ul>
</section>
</section>
<? } else {
header('Location: /404');
die ('ID товара не существует');
}<file_sep>/admin/module_admin/settings/index.php
<?
$page->header = 'Настройки сайта';
$page->title .= $page->header;
?>
<h1><?=$page->header?></h1>
<?//ADMIN EMAIL
if($_SESSION['status'] != 'superadmin') :
die('NOT ACCESS');
endif;
if($_POST['email']) :
$sql = "UPDATE `pm_base` SET `value` = :email WHERE `param` = 'email'";
Helper::executeStatement($sql, array(':email' => $_POST['email']));
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3].'/?changemail=1&'.time());
exit;
endif;
if($_POST['login'] AND $_POST['pwd']) :
$sql = "UPDATE `pm_base` SET `value` = :login WHERE `param` = 'admin_login'";
Helper::executeStatement($sql, array(':login' => $_POST['login']));
$sql = "UPDATE `pm_base` SET `value` = :pwd WHERE `param` = 'admin_pwd'";
Helper::executeStatement($sql, array(':pwd' => $_POST['pwd']));
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3].'/?changepwd=1&'.time());
exit;
endif; ?>
<h3>Смена e-mail</h3>
<form class="form-inline" method="post">
<label>E-mail <input name="email" value="<?=$admin_data['email'];?>"> </label>
<button class="btn" type="submit" name="changeemail"><?if($_GET['changemail']) :?><i class="icon-ok"></i> <? endif; ?>Сменить e-mail</button>
</form>
<br>
<h3>Доступ администратора</h3>
<form action="" method="post" class="form-horizontal">
<fieldset>
<div class="control-group">
<label class="control-label">Логин</label>
<div class="controls">
<input name="login" value="<?=$admin_data['admin_login'];?>">
</div>
</div>
<div class="control-group">
<label class="control-label">Пароль</label>
<div class="controls"><input name="pwd" value="<?=$admin_data['admin_pwd'];?>"></div>
</div>
<div class="form-actions">
<button class="btn btn-primary" type="submit" name="changepwd">
<?if($_GET['changepwd']) :?><i class="icon-ok icon-white"></i><? endif; ?>
Сменить доступы
</button>
</div>
</fieldset>
</form>
<file_sep>/admin/module_admin/admins/index.php
<?
$page->header = 'Администраторы системы';
$page->title .= $page->header;
?>
<h1><?=$page->header?></h1>
<? if($_SESSION['status'] != 'superadmin') :
die('NOT ACCESS');
endif;
if ($_GET['addadmin']) :
include($URL[2].'/'.$URL[3].'/addadmin.php');
elseif ($_GET['delete']) :
include($URL[2].'/'.$URL[3].'/deladmin.php');
elseif ($_GET['edit']) :
include($URL[2].'/'.$URL[3].'/editadmin.php');
else : ?>
<p><i class="icon-plus"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?addadmin=1">Добавить запись</a></p>
<?
$sql = "SELECT * FROM `pm_admins` ORDER BY `id`";
$data = Helper::fetchAssocInArray($sql);
if (count($data)): ?>
<table class="table">
<tr>
<th width="100">E-mail</th>
<th width="100">Last Login <i class="icon-info-sign" rel="tooltip" title="Время последнего логина в системе"></i></th>
<th>Last IP </th>
<th>Права доступа</th>
<th width="36"></th>
</tr>
<?//IMPORT ALL PAGE
foreach($data as $el) :?>
<tr>
<td><?=$el['email'];?></td>
<td><nobr><?=@date('d.m.Y H:i',$el['login_now_time']);?></nobr></td>
<td><?=stripslashes($el['login_now_ip']);?></td>
<td><?=$el['rights'];?></td>
<td width="18">
<a rel="tooltip" title="Редактировать запись" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?edit=<?=$el['id'];?>"><i class="icon-edit"></i></a>
<a rel="tooltip" title="Удалить запись" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?delete=<?=$el['id'];?>" onclick="if(confirm('Удалить? Восстановление будет невозможно!')) return true; else return false;"><i class="icon-remove"></i></a>
</td>
</tr>
<? endforeach; ?>
</table><?
else:
?><p>Учетных записей не найдено</p><?
endif;
endif;?><file_sep>/classes/manager.file.resizer.php
<?php
/**
* class for resizing images
*
* !! can be used only in conjunction with FileManager class
* all error handling is in FileManager class
*/
class ResizerFileManager {
/**
* @var FMFile
*/
private $file;
/**
* @var FileManager
*/
public $fm;
/**
* @var integer image width
*/
private $_width;
/**
* @var integer image height
*/
private $_height;
/**
* @var float image proportion
* set as $width / $height
*/
private $_proportion;
/**
* @var string image type (used in resize function for working on transparency)
*/
private $_type;
/**
* @param FileManager $fm - "parent" FileManager instance
* @param FMFile $file
*/
function __construct(&$fm, $file) {
$this->fm = $fm;
$this->file = $file;
}
/**
* @return bool
* makes copies of selected file and generate resizes
* using the resize list from repository settings
*/
public function initiateResize() {
$filePath = $this->fm->getRootPath() . $this->file->path;
if (!file_exists($filePath)) {
$this->fm->setError(FileManager::ER_RESIZER_FNF);
return false;
}
// get list of needed resizes
$resizeList = $this->fm->getResizeList();
// set type
$this->setType($this->file->mime);
// if file needs resize
if ($resizeList){
// getting original size and setting the proportion
$info = getimagesize($filePath);
list($this->_width, $this->_height) = $info;
$this->_proportion = $this->_width/$this->_height;
foreach ($resizeList as $item => $set) {
// make copy of the image and resize it
$newFileName = FileManager::getResizedFileName($filePath, $item);
if ($this->makeImageCopy($filePath, $newFileName)) {
$defaultSet = array(
'type' => 'cut',
'w' => 0,
'h' => 0
);
$set = array_merge($defaultSet, $set);
// check if resize is needed
if (
($set['w'] > 0 && $set['h'] == 0 && $this->_width <= $set['w'])
|| ($set['h'] > 0 && $set['w'] == 0 && $this->_height <= $set['h'])
|| ($set['w'] > 0 && $set['h'] > 0 && $this->_width <= $set['w'] && $this->_height <= $set['h'])
) {
continue;
}
$color = (isset($set['color'])) ? $set['color'] : array(255,255,255);
$this->resize($newFileName, $set['w'], $set['h'], $set['type'], $color);
}
}
}
return true;
}
/**
* @param string $file - path to file to be resized
* @param int $w - new width
* @param int $h - new height
* @param string $type - resize type
* @param array $color - fill color for background (for type "fill")
* @return bool
*
* function counts new image proportion
* makes copy of chosen file and resize it
*
* if width is not defined resize proportion will be counted only by height
* (the same goes for width when height is not defined)
*
* resize types:
* "cut" - proportion is counted the way that image will be cropped to fit the new size
* "fill" - image is middled with out proportion change and empty fields are filled with $color
*/
private function resize($file, $w = 0, $h = 0, $type = 'cut', $color = array(255,255,255)) {
if (!file_exists($file)) {
$this->fm->setError(FileManager::ER_RESIZER_FNF);
return false;
}
if ($w == 0 && $h == 0) {
$this->fm->setError(FileManager::ER_RESIZER_INVALID_PARAMS);
return false;
}
if ($w == 0) {
$w = round($h * $this->_proportion);
} elseif ($h == 0) {
$h = round($w / $this->_proportion);
}
$newProportion = $w/$h;
switch ($type) {
case 'cut':
if ($this->_proportion > $newProportion) {
$srcW = round($this->_height * $newProportion);
$srcH = $this->_height;
$srcX = round(($this->_width - $srcW)/2);
$srcY = 0;
} else {
$srcW = $this->_width;
$srcH = round($this->_width / $newProportion);
$srcX = 0;
$srcY = round(($this->_height - $srcH)/2);
}
if ($sourceImage = $this->openImage($file)) {
$newImage = imagecreatetruecolor($w, $h);
// saving transparency for png images
if ($this->_type == 'png') {
imagealphablending ($newImage, false);
imagesavealpha($newImage, true);
}
imagecopyresampled($newImage, $sourceImage, 0, 0, $srcX, $srcY, $w, $h, $srcW, $srcH);
return $this->saveImage($newImage, $file);
}
break;
case 'fill':
if ($this->_proportion > $newProportion) {
$filledWidth = $w;
$filledHeight = $h / $this->_proportion;
$dstX = 0;
$dstY = round(($h - $filledHeight)/2);
} else {
$filledWidth = $w * $this->_proportion;
$filledHeight = $h;
$dstX = round(($w - $filledWidth)/2);
$dstY = 0;
}
if ($sourceImage = $this->openImage($file)) {
$newImage = imagecreatetruecolor($filledWidth, $filledHeight);
// saving transparency for png images
if ($this->_type == 'png') {
imagealphablending ($newImage, false);
imagesavealpha($newImage, true);
}
imagecopyresampled($newImage, $sourceImage, 0, 0, 0, 0,
$filledWidth, $filledHeight, $this->_width, $this->_height);
$emptyImage = imagecreatetruecolor($w, $h);
if ($this->_type == 'png') {
imagealphablending ($emptyImage, false);
imagesavealpha($emptyImage, true);
}
$color = imagecolorallocatealpha($emptyImage, $color[0], $color[1], $color[2], $color[3]);
imagefill($emptyImage, 0, 0, $color);
imagecopy($emptyImage, $newImage, $dstX, $dstY, 0, 0, $filledWidth, $filledHeight);
return $this->saveImage($emptyImage, $file);
}
break;
}
return false;
}
/**
* @param $mime
* @return bool
* reduce mime to image type (for local type checks)
*/
private function setType($mime) {
switch($mime) {
case 'image/jpeg':
$this->_type = "jpg";
return true;
case 'image/png':
$this->_type = "png";
return true;
case 'image/gif':
$this->_type = "gif";
return true;
default:
return false;
}
}
/**
* @param $source - source file path
* @param $destination - new file path
* @return bool - true if success, false otherwise
* makes copy of file
*/
private function makeImageCopy($source, $destination) {
if (!copy($source, $destination)) {
$this->fm->setError(FileManager::ER_RESIZER_FILE_COPYING);
return false;
}
return true;
}
/**
* @param $file
* @return bool|resource
* creates image resource according to its type
*/
private function openImage($file) {
switch($this->_type) {
case 'jpg':
return imagecreatefromjpeg($file);
case 'png':
return imagecreatefrompng($file);
case 'gif':
return imagecreatefromgif($file);
default:
$this->fm->setError(FileManager::ER_RESIZER_NOT_IMAGE);
return false;
}
}
/**
* @param resource $image - resource of image to be saved
* @param string $path - path for saving file
* @param bool $rewrite - true if rewrite is allowed, false otherwise
* @param int $quality - saving quality (used for jpeg files only)
* @return bool - true on success, false otherwise
* saves image resource to file on given path
*/
private function saveImage($image, $path, $rewrite = true, $quality = 100) {
if (empty($path) || $image === false) {
$this->fm->setError(FileManager::ER_RESIZER_FILE_SAVING);
return false;
}
if(!$rewrite && file_exists($path)) {
$this->fm->setError(FileManager::ER_RESIZER_FILE_REWRITE);
return false;
}
switch($this->_type) {
case 'jpg':
if(!is_numeric($quality) || $quality < 0 || $quality > 100)
$quality = 100;
return imagejpeg($image, $path, $quality);
case 'png':
return imagepng($image, $path);
case 'gif':
return imagegif($image, $path);
default:
return false;
}
}
}<file_sep>/admin/module/param/sql.php
<?php
// инициализация переменных
$db_table = 'param';
$edits = array (
'text' => 'text field',
'textarea' => 'textarea',
'select' => 'select',
'checkbox' => 'checkbox',
);
//
// simple_item_base - описывает только данные типов text, int, date
//
$simple_item_base = array (
// 0 1 2 3 4
// desc db_name input db_type array_select
array ('Ключ', 'name', 'text', 'text'),
array ('Поле редактирования', 'fieldtype', 'select', 'text', $edits),
array ('Значения (через " | ")<br />(только для select)', 'values', 'text', 'text'),
array ('Описание', 'desc', 'text', 'text'),
);
// меняем цифровые индексы на строчные
Admin::reindexSimpleItemBase(array ('desc', 'db_name', 'input', 'db_type', 'array_select'));
init_table ();
?><file_sep>/backend/review.php
<?
if ($_POST['action'] == 'add') {
$review = new Review();
$review->setAttributesFromArray($_POST);
if ($review->save()) {
$response->status = 'ok';
} else {
$response->review = $review;
}
}
$response->encode();<file_sep>/classes/manager.file.file.php
<?
/**
* class for working with single file instance
*/
class FileFileManager extends Entity{
public $isImage = NULL;
public $tmpName = '';
public $resizeKeys = false;
public $id;
public $repository_id;
public $section;
public $path;
public $ext;
public $mime;
public $original_name;
public $size;
public $date;
public $used;
protected $table;
protected $attributes = array(
'id' => array(
'isPrimaryKey' => true,
),
'repository_id' => array('type' => 'int'),
'section' => array('type' => 'int'),
'path' => array(),
'ext' => array(),
'mime' => array(),
'original_name' => array(),
'size' => array('type' => 'int'),
'date' => array('type' => 'int'),
'used' => array('type' => 'int')
);
public function __construct($primaryKey = '') {
$this->table = $this->tableName();
parent::__construct($primaryKey);
}
public function tableName() {
return FileManager::FILES_TABLE;
}
public static function model($className=__CLASS__) {
return parent::model($className);
}
/**
* @param string $runtime
* @param CUploadedFile|string $uploadedFile
*/
public function fillData($runtime, $uploadedFile) {
switch($runtime) {
case 'yii':
// using CUploadedFile instance for saving file
$this->tmpName = $uploadedFile->getTempName();
$this->size = $uploadedFile->getSize();
$this->ext = $uploadedFile->getExtensionName();
$this->original_name = $uploadedFile->getName();
$this->mime = FileHelper::getMimeType($this->tmpName);
break;
case 'plupload':
// using standard functions (cause only file path is given)
$this->size = filesize($uploadedFile);
$this->original_name = pathinfo($uploadedFile, PATHINFO_FILENAME);
$this->ext = trim(strtolower(pathinfo($uploadedFile, PATHINFO_EXTENSION)));
$this->mime = FileHelper::getMimeType($uploadedFile);
break;
}
}
/**
* @param $repoId
* @return bool|mixed
* add table record for new (uploaded) file into files table
*/
public function addFileToRepo($repoId, $section = NULL) {
$this->date = time();
$this->repository_id = $repoId;
if (is_numeric($section)) {
$this->section = $section;
}
if (empty($this->path)) $this->path = '';
if (!$this->save()) {
return false;
}
return $this->id;
}
/**
* @param $rootPath - FileManager root path -> used to generate full file path from webroot
* @return bool
* complete file record
* ! separated action because file id (after insert) is used for generating unique file path
*/
public function completeFileRecord($rootPath) {
$this->date = time();
$this->path = $rootPath . $this->path;
if (!$this->save()) {
$this->delIncompleteRecord();
return false;
}
return true;
}
/**
* @return bool
* delete incomplete file record
* used when some error occurred between addFileToRepo() and completeFileRecord() calls
*/
public function delIncompleteRecord() {
return $this->delete();
}
/**
* @return bool
* true if file is an image, false otherwise
* ! file is checked by its mime type
*/
public function isImage() {
$image = array('image/gif', 'image/png', 'image/jpeg');
return (is_null($this->isImage))
? ($this->isImage = in_array($this->mime, $image))
: $this->isImage;
}
}<file_sep>/admin/module/template/addedit.php
<?
// путь для возвращения из страницы добавления/редактирования
$back_url = Admin::thisModule();
if (isset($_POST['save'])) {
if (isset($_GET['add']))
$sql = 'INSERT INTO ';
else
$sql = 'UPDATE ';
$sql .= TEMPLATE_TABLE;
$params = array();
foreach ((array)$_POST['param'] as $param_id => $checked)
$params[] = $param_id;
$sql .= '
SET
`name` = :name,
`params` = :params
';
$sqlParams = array(
':name' => $_POST['name'],
':params' => implode(',', $params)
);
if (isset($_GET['edit'])) {
$sql .= ' WHERE `id` = :id';
$sqlParams[':id'] = (int)$_GET['edit'];
}
Helper::executeStatement($sql, $sqlParams);
header ('Location: '.$back_url);
exit;
}
if (isset($_POST['cancel'])) {
header ('Location: '.$back_url);
exit;
}
$r = array();
if (isset($_GET['edit'])) {
$sql = 'SELECT * FROM '.TEMPLATE_TABLE.' WHERE `id` = :id';
$r = Helper::fetchAssoc($sql, array(':id' => $_GET['edit']));
}
$params = Helper::fetchAssocInArray('SELECT * FROM '.PARAM_TABLE);
$r_params = array_map (intval, (array)explode (',', (string)$r['params']));
?>
<h2 xmlns="http://www.w3.org/1999/html">Редактировать шаблон</h2>
<br />
<a href="<?=$back_url;?>"><span class="icon icon-arrow-left"></span> Вернуться к списку шаблонов</a><br /><br />
<form method="post">
<label>Название <input size="50" type="text" name="name" value="<?=$r['name'];?>" /></label>
<br />
<? foreach ($params as $param) : ?>
<input id="param[<?=$param['id'];?>]" type="checkbox" name="param[<?=$param['id'];?>]" <?=(array_search($param['id'], $r_params) !== false ? 'checked' : '' );?> />
<label style="display: inline-block;" for="param[<?=$param['id'];?>]"><?=$param['name'];?> (<?=$param['desc'];?>)</label>
<br />
<? endforeach; ?>
<br />
<div>
<button name="save" class="btn btn-primary">Сохранить</button>
<button name="cancel" class="btn btn-default">Отмена</button>
</div>
</form><file_sep>/admin/module/item_param/index.php
<?
define ('ITEM_TABLE', '`item`');
define ('PARAM_TABLE', '`param`');
define ('ITEM_PARAM_TABLE', '`item_param`');
define ('TEMPLATE_TABLE', '`template`');
$item_id = (isset ($_GET['item_id']) ? (int)$_GET['item_id'] : 0);
$page->header = 'Параметры товаров';
$page->title .= $page->header;
?>
<h2><?=$page->header?></h2>
<? if (! $item_id) : ?>
<p>Необходимо выбрать товар на странице <a href="/<?=$URL[1];?>/<?=$URL[2];?>/item">Товары</a></p>
<? else :
$item = Helper::fetchAssoc('SELECT * FROM '.ITEM_TABLE.' WHERE `id` = :id', array(':id' => (int)$item_id));
$template = Helper::fetchAssoc('SELECT * FROM '.TEMPLATE_TABLE.' WHERE `id` = :id', array(':id' => (int)$item['template_id']));
$_params = array_map (intval, (array)explode(',', $template['params']));
$params = $template['params'] ? Helper::fetchAssocInArray('SELECT * FROM '.PARAM_TABLE.' WHERE `id` IN ('.$template['params'].')') : array();
if (isset($_POST['save'])) {
foreach ((array)$_POST['param'] as $param_id => $value) {
list ($val_exists) = Helper::fetchAssoc('SELECT COUNT(*) FROM '.ITEM_PARAM_TABLE.' WHERE `item_id`='.(int)$item_id.' AND `param_id`='.(int)$param_id);
if ($val_exists)
$sql = 'UPDATE ';
else
$sql = 'INSERT INTO ';
$sql .= ITEM_PARAM_TABLE;
$sql .= "SET
`item_id` = " . $pdo->quote((int)$item_id) . ",
`param_id` = " . $pdo->quote((int)$param_id) . ",
`value` = " . $pdo->quote($value);
if ($val_exists)
$sql .= ' WHERE `item_id` = '.(int)$item_id.' AND `param_id` = '.(int)$param_id;
Helper::executeStatement($sql);
}
foreach ($_params as $_param_id) {
if ( !isset($_POST['param'][$_param_id]) ) {
Helper::executeStatement(
'DELETE FROM '.ITEM_PARAM_TABLE.' WHERE `item_id` = :itemId AND `param_id` = :paramId',
array(
':itemId' => (int)$item_id,
':paramId' => $_param_id
)
);
}
}
header ('Location: '. Admin::thisModule() .'?item_id='.(int)$item_id);
exit;
}
if (isset($_POST['cancel'])) {
header ('Location: '. Admin::thisModule() .'?item_id='.(int)$item_id);
exit;
}
$_values = Helper::fetchAssocInArray('SELECT * FROM '.ITEM_PARAM_TABLE.' WHERE `item_id`='.(int)$item['id']);
$values = array ();
foreach ($_values as $val)
$values[$val['param_id']] = $val['value'];
?>
<p>Товар <a href="/<?=$URL[1];?>/<?=$URL[2];?>/item?edit=<?=$item['id'];?>"><?=$item['name'];?></a></p>
<p>Шаблон <a href="/<?=$URL[1];?>/<?=$URL[2];?>/template?edit=<?=$template['id'];?>"><?=$template['name'];?></a></p>
<form method="post">
<table class="table table-bordered">
<? foreach ($params as $param) : ?>
<tr>
<td>
<?=$param['name'].($param['desc'] ? ' ('.$param['desc'].')' : '');?>
</td>
<td>
<?
$name = 'param['.$param['id'].']';
$value = $values[$param['id']];
if ($param['fieldtype'] == 'textarea') :
?>
<textarea class="input-xlarge" rows="5" name="<?=$name;?>"><?=$value;?></textarea>
<?
elseif ($param['fieldtype'] == 'checkbox') :
?>
<input type="checkbox" name="<?=$name;?>" <?=($value ? 'checked' : '');?> />
<?
elseif ($param['fieldtype'] == 'select') :
$options = array_map (trim, explode ('|', $param['values']));
?>
<select class="input-xlarge" name="<?=$name;?>">
<? foreach ($options as $opt) : ?>
<option value="<?=trim($opt);?>" <?if(trim($opt) == trim($value)) {?>selected<?}?>><?=$opt;?></option>
<? endforeach; ?>
</select>
<?
else :
?>
<input type="text" class="input-xlarge" name="<?=$name;?>" value="<?=$value;?>" />
<?
endif;
?>
</td>
</tr>
<? endforeach; ?>
</table>
<div>
<button name="save" class="btn btn-primary">Сохранить</button>
<button name="cancel" class="btn btn-default">Отмена</button>
</div>
</form>
<?
$fileIds = empty($item['files']) ? array() : explode(',', $item['files']);
ImageHelper::showGallery('product', $fileIds, 'tmb90', $item_id); ?>
<? endif; ?><file_sep>/admin/module/catalog/items.php
<?php
$catalog_id = (int)$_GET['catalog_id'];
$catalog = Helper::fetchAssoc('SELECT * FROM `catalog` WHERE `id` = :id', array(':id' => (int)$catalog_id));
$sql = Catalog::filterToSQL($catalog['filter'], array('id', 'name'));
if ($sql) {
$items = Helper::fetchAssocInArray($sql);
}
$page->title .= 'Товары каталога';
?>
<p>
Товары каталога <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?edit=<?=$catalog['id'];?>"><?=$catalog['name'];?></a>
</p>
<? if ( !$sql ) : ?>
<p>Ошибка в фильтре.</p>
<? else : ?>
<b>Filter-запрос</b><br />
<code style="display: block;"><?=$catalog['filter'];?></code>
<br />
<b>SQL-запрос</b><br />
<code style="display: block;"><?=$sql;?></code>
<br />
<b>Всего найдено</b><br />
<code style="display: block;"><?=count($items);?></code>
<br />
<table class="table table-bordered table-hovered">
<tr>
<th width="50">ID</th>
<th>Название товара</th>
</tr>
<? foreach ($items as $item) : ?>
<tr>
<td><?=$item['id'];?></td>
<td><a href="/admin/module/item/?edit=<?=$item['id'];?>"><?=$item['name'];?></a></td>
</tr>
<? endforeach; ?>
</table>
<? endif; ?>
<file_sep>/admin/module/static/index.php
<?
$page->header = 'Управление страницами'; ?>
<h1><?=$page->header?></h1>
<?
$page->title .= $page->header;
if ($_GET['delete']) :
include($URL[2].'/'.$URL[3].'/delpage.php');
elseif ($_GET['edit'] || $_GET['add']) :
include($URL[2].'/'.$URL[3].'/editpage.php');
else :
?>
<p><i class="icon-plus"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?add=1">Добавить страницу</a></p>
<table class="table">
<tr>
<th>URL</th>
<th width="100">Дата <i class="icon-info-sign" rel="tooltip" title="Время последнего редактирования страницы"></i></th>
<th>Название страницы</th>
<th></th>
</tr>
<?//IMPORT ALL PAGE
$sql = "SELECT * FROM `pm_static` ORDER BY `url`";
$data = Helper::fetchAssocInArray($sql);
foreach($data as $el) :?>
<tr>
<td><?=$el['url']?></td>
<td><nobr><?=@date('d.m.Y H:i',$el['lastedit']);?></nobr></td>
<td><?=stripslashes($el['title']);?></td>
<td width="36">
<a class="icon-pencil" rel="tooltip" title="Редактировать страницу" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?edit=<?=$el['id'];?>"></a>
<a class="icon-remove" rel="tooltip" title="Удалить страницу" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?delete=<?=$el['id'];?>" onclick="if(confirm('Удалить? Восстановление будет невозможно!')) return true; else return false;"></a>
</td>
</tr>
<? endforeach; ?>
</table>
<? endif;?><file_sep>/parts/menu/top.php
<ul class="inlineblock">
<?
$sql = "SELECT * FROM `pm_menu` ORDER BY `sort`";
$menuTop = Helper::fetchAssocInArray($sql);
foreach ($menuTop as $el) {
?><li class="inlineblock<? if ('/'.$URL[1] == $el['url']) { ?> active<? } ?>"><a href="<?=$el['url']?>"><?=$el['title']?></a></li><?
} ?>
</ul>
<file_sep>/admin/module_admin/sql_backup/index.php
<?
$page->header = 'Архив';
$page->title .= $page->header;
?>
<h1><?=$page->header?></h1>
<? if($_SESSION['status'] != 'superadmin') :
die('NOT ACCESS');
endif;
if ($URL[4]=='do') :
//Подготовим файл
$file = fopen($_SERVER['DOCUMENT_ROOT'].'/'.$URL[1].'/backup/'.time().'.sql','w');
//Вытащим все таблицы из БД
$sql = "SHOW TABLES";
$tables = Helper::fetchAssocInArray($sql);
//Инфо в файл
foreach($tables as $el) $TABLES[] = $el['Tables_in_'.DB_NAME];
fwrite($file,'--'.implode(', ',$TABLES)."\r\n");
$DAMP = '';
foreach($tables as $el) :
//Все колонки
$sql = "SHOW COLUMNS FROM `".$el['Tables_in_'.DB_NAME]."`";
$colls = Helper::fetchAssocInArray($sql);
$tmp = array();
foreach($colls as $el2) :
$tmp[]='`'.$el2['Field'].'`';
endforeach;
//Шапка дампа
$DAMP = "INSERT INTO `".$el['Tables_in_'.DB_NAME]."` (".implode(', ',$tmp).") VALUES ";
//Сам дамп
$sql = "SELECT * FROM `".$el['Tables_in_'.DB_NAME]."`";
$data = Helper::fetchAssocInArray($sql);
if (count($data)) :
$tmp = array();
foreach($data as $el3) :
$t = array();
foreach($el3 as $t1) {$t[]= "'".$t1."'";}
$tmp[] = "(".implode(', ',$t).")";
endforeach;
$DAMP .= implode(",\r\n",$tmp).';';
fwrite($file,$DAMP."\r\n------\r\n");
endif;
endforeach;
fclose($file);
?>
<h3>Создание образа завершено</h3>
<p>BackUp done. All right :-)</p>
<br>
<?
endif; ?>
<p><i class="icon-plus"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>/do">Создать образ сейчас</a></p>
<h3>Доступные образы БД</h3>
<table class="table">
<tr>
<th width="120">Дата <a rel="tooltip" title="Время создания образа"><i class="icon-info-sign"></i></a></th>
<th>Таблицы</th>
<th></th>
</tr>
<? $dir = scandir(PATH_ADMIN . 'backup');
for ($i=2;$i<count($dir);$i++) :?>
<tr>
<td><?=date('d.m.Y H:i',substr($dir[$i],0,10));?></td>
<td>
<?
$file = fopen(PATH_ADMIN . 'backup' . DIRECTORY_SEPARATOR . $dir[$i], 'r');
echo trim(fgets($file),'-');
fclose($file);
?>
</td>
<td><a rel="tooltip" title="Скачать" href="/<?=$URL[1];?>/backup/<?=$dir[$i];?>"><i class="icon-download-alt"></i></a></td>
</tr>
<? endfor; ?>
</table>
<file_sep>/admin/module/reviews/edit.php
<?
$el = new Review($_GET['edit']);
if ($el->isNew) die('No such element');
if($_POST['save']) :
$el->setAttributesFromArray($_POST);
$el->created = strtotime($el->created);
if ($el->isModerated == 'on') {
$el->isModerated = 1;
}
$el->save();
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3]);
exit;
endif;
?>
<p><i class="icon-arrow-left"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Вернуться к списку</a></p>
<h3>Редактирование</h3>
<form class="form-horizontal" action="" method="post">
<p>
<? $product = Helper::fetchAssoc("SELECT `name` FROM `item` WHERE `id` = :productId", array(':productId' => $el->productId), 'name'); ?>
<a href="/<?=$URL[1]?>/<?=$URL[2]?>/<?=$URL[3]?>/?edit=<?=$el->productId?>" target="_blank"><?=$product?></a>
</p>
<div class="control-group">
<label class="control-label">Пользователь</label>
<div class="controls"><input class="input-xxlarge" type="text" name="userName" value="<?=$el->userName?>"></div>
</div>
<div class="control-group">
<label class="control-label">Email</label>
<div class="controls"><input class="input-xxlarge" type="text" name="userEmail" value="<?=$el->userEmail?>"></div>
</div>
<div class="control-group">
<label class="control-label">Рейтинг</label>
<div class="controls"><input class="input-xxlarge" type="text" name="rating" value="<?=$el->rating?>"></div>
</div>
<div class="control-group">
<label class="control-label">Создан</label>
<div class="controls"><input class="input-xxlarge datepicker" type="text" name="created" value="<?=date('d.m.Y', $el->created)?>"></div>
</div>
<div class="control-group">
<label class="control-label">Текст</label>
<div class="controls"><textarea class="input-xxlarge" name="text"><?=$el->text?></textarea></div>
</div>
<div class="control-group">
<label class="control-label" for="isModerated">Проверен</label>
<div class="controls"><input type="checkbox" class="input-xxlarge" name="isModerated" <? if ($el->isModerated) {?>checked<?}?> id="isModerated"></div>
</div>
<div class="form-actions">
<button class="btn btn-primary" type="submit" name="save" value="save">Сохранить изменения</button>
<a class="btn" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Отмена</a>
</div>
</form><file_sep>/admin/module/articles/delete_group.php
<?
if ($_GET['delete_group']) {
$sql = "DELETE FROM `pm_article_groups` WHERE `id` = :id";
Helper::executeStatement($sql, array(':id' => $_GET['delete_group']));
$sql = "DELETE FROM `pm_article_items` WHERE `groupId` = :groupId";
Helper::executeStatement($sql, array(':groupId' => $_GET['delete_group']));
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3]);
exit;
}
<file_sep>/admin/module/catalog/sql.php
<?
CREATE TABLE IF NOT EXISTS `catalog` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`pid` int(11) NOT NULL,
`name` text NOT NULL,
`url` text NOT NULL,
`filter` text NOT NULL,
`sort` int NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 AUTO_INCREMENT=1
?><file_sep>/templates/main.php
<?/*<section class="easyslider">
<ul>
<li>
<img src="http://livedemo00.template-help.com/opencart_43001/image/cache/data/slide-2-806x353.png" alt="" />
</li>
<li>
<img src="http://livedemo00.template-help.com/opencart_43001/image/cache/data/slide-3-806x353.png" alt="" />
</li>
<li>
<img src="http://livedemo00.template-help.com/opencart_43001/image/cache/data/slide-1-806x353.png" alt="" />
</li>
</ul>
</section>*/?>
<section class="featured">
<h2 class="block-title">Спецпредложения</h2>
<div class="block-content">
<?
$sql = Catalog::filterToSQL('[`special` != 0]', array('id', 'name','alias','price','article', 'main_photo'));
$sql .= ' ORDER BY RAND() LIMIT 6';
$items = Helper::fetchAssocInArray($sql);
?>
<ul class="products">
<? foreach ($items as $item) {
Catalog::showProductInCatalog($item);
} ?>
</ul>
</div>
</section>
<file_sep>/admin/module/param/functions.php
<?
// функция вывода одной ячейки таблицы
function print_cell_table ($index_col, $col_name, $col_value) {
switch ($index_col) {
case 0: case 2:
print strip_tags (stripslashes ($col_value));
break;
case 1:
global $edits;
print strip_tags (stripslashes ($edits[$col_value]));
break;
default:
Admin::printImageMenu('?edit=' . ((int)$col_value), 'edit');
Admin::printImageMenu('?del=' . ((int)$col_value), 'del');
break;
}
}
/*
* Функция удаления элемента таблицы базы данных
*/
function delete_from_db ($id) {
global $db_table;
// удаляем поле
$sql = 'DELETE FROM `' . $db_table . '` WHERE `id` = :id';
Helper::executeStatement($sql, array(':id' => (int)$id));
}
// функция создания таблицы
function init_table () {
global $db_table;
ob_start ();
?>
CREATE TABLE IF NOT EXISTS `<?=$db_table;?>` (
`id` int(11) NOT NULL auto_increment,
<?
global $simple_item_base;
foreach ($simple_item_base as $ib) {
$name = $ib['db_name'];
$type = $ib['db_type'];
if ($type == 'int')
$type = 'int (11)';
echo '`' . $name . '` ' . $type . ',';
}
?>
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
<?
$sql = ob_get_contents ();
ob_clean ();
Helper::executeStatement($sql);
}<file_sep>/admin/module/item/sql.php
<?php
// инициализация переменных
$db_table = 'item';
$tt = Helper::fetchAssocInArray('SELECT * FROM `template`', array(), 'id', 'name');
//
// simple_item_base - описывает только данные типов text, int, date
//
$simple_item_base = array (
// 0 1 2 3 4
// desc db_name input db_type array_select
array ('URL', 'alias', 'text', 'text'),
array ('Название', 'name', 'text', 'text'),
array ('Артикул', 'article', 'text', 'text'),
array ('Цена', 'price', 'text', 'text'),
array ('Шаблон', 'template_id', 'select', 'int', $tt),
array ('Поставщик', 'shop_seller_name','text', 'text'),
array ('Цена поставщика','shop_price', 'text', 'text'),
array ('Описание', 'description', 'ckeditor', 'text'),
array ('Metakey', 'metakey', 'text', 'text'),
array ('Metadesc', 'metadesc', 'text', 'text'),
);
// меняем цифровые индексы на строчные
Admin::reindexSimpleItemBase(array ('desc', 'db_name', 'input', 'db_type', 'array_select'));
init_table ();
// функция создания таблицы
function init_table () {
global $db_table;
ob_start ();
?>
CREATE TABLE IF NOT EXISTS `<?=$db_table;?>` (
`id` int(11) NOT NULL auto_increment,
<?php
global $simple_item_base;
foreach ($simple_item_base as $ib) {
$name = $ib['db_name'];
$type = $ib['db_type'];
if ($type == 'int')
$type = 'int (11)';
echo '`' . $name . '` ' . $type . ',';
}
?>
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
<?php
$sql = ob_get_contents ();
ob_clean ();
Helper::executeStatement($sql);
}
?><file_sep>/admin/module_admin/admins/editadmin.php
<?
if($_POST['savestatic']) :
foreach ($_POST['right'] as $k=>$el) :
if ($el=='on') :
$RIGHTS[] = $k;
endif;
endforeach;
$RIGHTS = implode(',',$RIGHTS);
$newpwd = (strlen($_POST['pwd'])>1) ? "`pass` = '".mysql_real_escape_string($_POST['pwd'])."'," : '';
$sql = "UPDATE `pm_admins` SET
".$newpwd."
`rights` = '".$RIGHTS."'
WHERE `id`= '".mysql_real_escape_string($_GET['edit'])."'";
mysql_query($sql) OR die('DB ERROR: CAN\'T UPDATE ADMIN');
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3]);
exit;
endif;
$sql = "SELECT * FROM `pm_admins` WHERE `id` = '".mysql_real_escape_string($_GET['edit'])."'";
$r = mysql_fetch_assoc(mysql_query($sql));
$t = explode(',',$r['rights']);
foreach($t as $el) $RIGHTS[$el] = $el;
?>
<p><i class="icon-arrow-left"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Вернуться к списку записей</a></p>
<h3>Редактирование учетной записи</h3>
<p><b>Изменение пароля для учетной записи <u><?=$r['email'];?></u></b></p>
<form action="" method="post" class="form-horizontal">
<fieldset>
<div class="control-group">
<label class="control-label">Новый пароль</label>
<div class="controls"><input name="pwd" class="long" value="<?=$r['pwd'];?>"></div>
</div>
<div class="control-group">
<label class="control-label">Права доступа</label>
<div class="controls">
<?
$dir = scandir($_SERVER['DOCUMENT_ROOT'].'/'.$URL[1].'/module');
for($i=2;$i<count($dir);$i++) :
$t = @file($_SERVER['DOCUMENT_ROOT'].'/'.$URL[1].'/module/'.$dir[$i].'/about');
$t[0]=trim($t[0]);
if (is_numeric($t[0])) :
?><label class="checkbox"><input type="checkbox" name="right[<?=$t[0];?>]" <?if($RIGHTS[$t[0]]>0){?>checked<?}?>> <?=$t[1];?></label><?
endif;
endfor;
?>
</div>
</div>
<div class="form-actions">
<input class="btn btn-primary" type="submit" name="savestatic" value="Сохранить запись">
<a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>" class="btn">Отмена</a>
</div>
</fieldset>
</form><file_sep>/admin/module/text/add.php
<?
if($_POST['addpage'] AND $_SESSION['status'] == 'superadmin') :
$sql = "INSERT INTO `pm_text` SET
`position`= :position,
`text` = :text";
$statement = $pdo->prepare($sql);
$statement->execute(array(':position' => $_POST['position'], ':text' => $_POST['text']));
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3]);
exit;
endif;
?>
<p><i class="icon-arrow-left"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Вернуться к списку блоков</a></p>
<h3>Добавление текстового блока</h3>
<form class="form-horizontal" action="" method="post">
<div class="control-group">
<label class="control-label">Позиция</label>
<div class="controls"><input class="input-xxlarge" type="text" name="position" value="<?=$r['position'];?>"></div>
</div>
<div class="control-group">
<label class="control-label">Текст</label>
<div class="controls"><textarea name="text" rows="5" class="input-xxlarge"><?=$r['text'];?></textarea></div>
</div>
<div class="form-actions">
<button class="btn btn-primary" type="submit" name="addpage" value="add">Добавить</button>
<a class="btn" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Отмена</a>
</div>
</form><file_sep>/admin/module/menu/edit.php
<?
if($_POST['savestatic']) :
$sql = "UPDATE `pm_menu` SET
`url` = :url,
`title` = :title,
`sort` = :sort
WHERE `id`= :id";
$paramsSql = array(
':url' => $_POST['url'],
':title' => $_POST['title'],
':sort' => $_POST['sort'],
':id' => $_GET['edit']
);
Helper::executeStatement($sql, $paramsSql);
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3]);
exit;
endif;
$sql = "SELECT * FROM `pm_menu` WHERE `id` = :id";
$r = Helper::fetchAssoc($sql, array(':id' => $_GET['edit'])); ?>
<p><i class="icon-arrow-left"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Вернуться к списку</a></p>
<h3>Редактирование пункта меню</h3>
<form class="form-horizontal" action="" method="post">
<div class="control-group">
<label class="control-label">URL</label>
<div class="controls">
<? if (!empty($r['protected'])) { ?>
<span class="input-xlarge uneditable-input"><?=$r['url'];?></span>
<input type="hidden" name="url" value="<?=$r['url'];?>">
<? } else { ?>
<input class="input-xlarge" type="text" name="url" value="<?=$r['url'];?>">
<? } ?>
</div>
</div>
<div class="control-group">
<label class="control-label">Название</label>
<div class="controls"><input class="input-xlarge" type="text" name="title" value="<?=$r['title'];?>"></div>
</div>
<div class="control-group">
<label class="control-label">Сортировка</label>
<div class="controls"><input class="input-xlarge" type="text" name="sort" value="<?=$r['sort'];?>"></div>
</div>
<div class="form-actions">
<button class="btn btn-primary" type="submit" name="savestatic" value="save">Сохранить изменения</button>
<a class="btn" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Отмена</a>
</div>
</form><file_sep>/classes/manager.file.php
<?php
/**
* FileManger v1.0
* class for managing files (uploading (form/ajax), downloading, allocation and resizing)
*
* require classes:
* - FMFile (file managing)
* - FMResizer (image resizing)
* - FileController (enter point for uploading and downloading files + repository creation)
*
* uses to DB tables:
* - repository table -> saving repositories and its settings
* - files table -> saving uploaded files info (all files are linked to repository)
*
* "Plupload" can be used natively for ajax file uploading
*
* @example: file adding to repository
* FileManager::getInstance()
* ->setRepository('example_repo')
* ->addFile($uploadedFileInstance);
*
* @author maximum
*/
class FileManager {
/**
* @var FileManager singleton instance
*/
public static $_instance;
/**
* @var bool whether plupload js file was already registered on page
*/
private static $_pluploadRegistered = false;
private static $_assetsPath = '';
/*
* path alias to files folder from webroot
*/
const FILES_ROOT_DIRECTORY = 'files';
/*
* file manager table names
*/
const REPOSITORY_TABLE = 'file_manager';
const FILES_TABLE = 'file_list';
const PLUPLOAD_JS_FILE = 'pluploadInitializer.js';
const REPOSITORY_TYPE_IMAGE = 1;
const DEF_NESTING = 2;
const HASH_SPLIT_LEN = 2;
/**
* @var integer active repository id
*/
private $_activeId = false;
/**
* @var string repository name
*/
private $_activeRepo = false;
/**
* @var integer
* custom identifier in repository group
* only integer for now (probably id of corresponding object used here)
*/
private $_activeSection = false;
/**
* @var array repository settings
*/
private $_settings = array();
/**
* @var string root files directory (applies to FILES_ROOT_DIRECTORY)
*/
private $_rootPath = '';
private $_tmpPath = '';
/**
* @var bool list of errors occurred
*/
public $_errors = false;
// errors (working with repository)
const ER_REPOSITORY_ALREADY_EXISTS = 1;
const ER_REPOSITORY_CREATION = 2;
const ER_REPOSITORY_NOT_EXIST = 3;
const ER_REPOSITORY_NOT_SPECIFIED = 4;
const ER_REPOSITORY_DIR_CREATION_FAILED = 5;
const ER_REPOSITORY_SETTINGS_ERROR = 6;
const ER_WRONG_SECTION_ID = 7;
const ER_SECTION_ID_NOT_SPECIFIED = 8;
// errors (working with files)
const ER_PATH_GENERATION_FAILED = 11;
const ER_PATH_CREATION_FAILED = 12;
const ER_FILE_COPYING = 13;
const ER_FILE_WRONG_INPUT = 14;
const ER_FILE_MOVING = 15;
const ER_FILE_MOVING_UPLOADED = 16;
// file check errors
const ER_CHECK_MIME = 21;
const ER_CHECK_SIZE_MIN = 22;
const ER_CHECK_SIZE_MAX = 23;
const ER_CHECK_EXT = 24;
// resizer errors
const ER_RESIZER_FNF = 31;
const ER_RESIZER_INVALID_PARAMS = 32;
const ER_RESIZER_FILE_COPYING = 33;
const ER_RESIZER_FILE_SAVING = 34;
const ER_RESIZER_NOT_IMAGE = 35;
const ER_RESIZER_FILE_REWRITE = 36;
/**
* @return FileManager self creator
*/
public static function getInstance() {
return (isset(self::$_instance))
? self::$_instance
: self::$_instance = new FileManager();
}
function __construct() {
/*Yii::app()->setImport(array(
'application.components.FMResizer',
));*/
$this->_rootPath = PATH_ROOT . self::FILES_ROOT_DIRECTORY;
$this->_tmpPath = $this->_rootPath . DIRECTORY_SEPARATOR . 'tmp';
}
/*------------ GETTERS ---------------*/
public function getId() {
return $this->_activeId;
}
public function getSection() {
return $this->_activeSection;
}
public function getRootPath($abs = true) {
if ($abs === false) {
return DIRECTORY_SEPARATOR . self::FILES_ROOT_DIRECTORY;
}
return $this->_rootPath;
}
public function getNesting() {
return (is_numeric($this->_settings['nesting']) && $this->_settings['nesting'] > 0)
? $this->_settings['nesting']
: self::DEF_NESTING;
}
public function getRepositoryName() {
if (!$this->_activeRepo) {
$this->setError(self::ER_REPOSITORY_NOT_SPECIFIED);
return false;
}
return $this->_activeRepo;
}
public function getRepositoryType() {
return (isset($this->_settings['type'])) ? $this->_settings['type'] : 0;
}
public function getSizeLimits() {
$limits = array(
isset($this->_settings['min_size']) ? $this->_settings['min_size'] : 0,
isset($this->_settings['max_size']) ? $this->_settings['max_size'] : 0,
);
return ($limits[0] === 0 && $limits[1] === 0) ? false : $limits;
}
public function getAllowedExt() {
return (empty($this->_settings['allowed_ext']))
? false
: (is_array($this->_settings['allowed_ext']))
? $this->_settings['allowed_ext']
: explode(',', $this->_settings['allowed_ext']);
}
public function getResizeList() {
return (empty($this->_settings['resize']))
? false
: (is_array($this->_settings['resize']))
? $this->_settings['resize']
: unserialize($this->_settings['resize']);
}
/**
* register plupload and file manager scripts and add them to page
*/
public static function registerScripts() {
// TODO make runtimes selectable
// TODO сделать инициализацию
if (!self::$_pluploadRegistered) {
self::$_assetsPath = '/extra/plupload';
self::$_pluploadRegistered = true;
}
}
public static function getAssetsPath() {
self::registerScripts();
return self::$_assetsPath;
}
/* --------------- MANAGER INITIALIZER ------------------ */
public function initializeManager() {
$tableList = Helper::fetchAssocInArray('SHOW TABLES', array(), '', 'Tables_in_'.DB_NAME);
// create repository table (if not exists)
if (!in_array(self::REPOSITORY_TABLE, $tableList)) {
Helper::createTable(self::REPOSITORY_TABLE, $this->repositoryTable());
}
// create files table (if not exists)
if (!in_array(self::FILES_TABLE, $tableList)) {
Helper::createTable(self::FILES_TABLE, $this->filesTable());
}
// TODO create indexes
// create root files directory
if (!is_dir($this->_tmpPath)) {
umask(0000);
mkdir($this->_tmpPath, 777, true);
}
}
public function createRepository($name, $arParams) {
$repository = Helper::fetchAssoc("SELECT id FROM ".self::REPOSITORY_TABLE." WHERE name = :name", array(':name' => $name), 'id');
if ($repository) {
$this->setError(self::ER_REPOSITORY_ALREADY_EXISTS);
return false;
}
// set name
$arParams['name'] = $name;
// resolve repository type
if (isset($arParams['type'])) {
switch ($arParams['type']) {
case 'image':
$arParams['type'] = self::REPOSITORY_TYPE_IMAGE;
break;
default: $arParams['type'] = 0;
}
}
// other settings
$arParams['secured'] = (isset($arParams['secured']) && $arParams['secured'] === true) ? 1 : 0;
$arParams['min_size'] = (isset($arParams['min_size'])) ? $this->getNumericSize($arParams['min_size']) : 0;
$arParams['max_size'] = (isset($arParams['max_size'])) ? $this->getNumericSize($arParams['max_size']) : 0;
// wrap resize settings
$arParams['resize'] = serialize((isset($arParams['resize'])) ? $arParams['resize'] : array());
// create folder for repository files
$this->createRepositoryFolder($name);
return Helper::insert(self::REPOSITORY_TABLE, $arParams);
}
private function createRepositoryFolder($name) {
$newDirPath = $this->getRootPath() . DIRECTORY_SEPARATOR . $name;
if (!is_dir($newDirPath)) {
// make it recursive just in case
umask(0000);
if (!mkdir($newDirPath, 777, true)) {
$this->setError(self::ER_REPOSITORY_DIR_CREATION_FAILED);
return false;
}
// TODO create .htaccess rules if repository is secured
}
return true;
}
/**
* @return array DB table structure for repository table
*/
private function repositoryTable() {
return array(
'id' => 'INT NOT NULL AUTO_INCREMENT PRIMARY KEY',
'name' => 'varchar(32) NOT NULL',
'type' => 'tinyint(1) DEFAULT 0',
'secured' => 'tinyint(1) NOT NULL DEFAULT 0',
'allowed_ext' => 'text',
'nesting' => 'tinyint(2)',
'min_size' => 'int DEFAULT 0',
'max_size' => 'int DEFAULT 0',
'resize' => 'text',
);
}
/**
* @return array DB table structure for files table
*/
private function filesTable() {
return array(
'id' => 'INT NOT NULL AUTO_INCREMENT PRIMARY KEY',
'repository_id' => 'int(11) NOT NULL',
//'repository_name' => 'varchar(32) NOT NULL',
'section' => 'int(11)', // field for custom grouping attribute (some id probably)
'path' => 'varchar(64) NOT NULL',
'ext' => 'varchar(4)',
'mime' => 'varchar(32)',
'original_name' => 'varchar(256)',
'size' => 'int(11) NOT NULL',
'date' => 'int(10)',
'used' => 'tinyint(1) DEFAULT 0',
//'secured' => 'tinyint(1) DEFAULT 0', // commented for now
);
}
/**
* @param $repository - repository name
* @return FileManager
* set active repository
*/
public function setRepository($repository){
// check whether repository exists
// and get its settings
if (is_numeric($repository)) {
$settings = Helper::fetchAssoc(
'SELECT id,type,secured,allowed_ext,nesting,min_size,max_size,resize FROM `'.self::REPOSITORY_TABLE.'` WHERE id = :id' ,
array(':id' => $repository)
);
$repository = $settings['name'];
} else {
$settings = Helper::fetchAssoc(
'SELECT id,type,secured,allowed_ext,nesting,min_size,max_size,resize FROM `'.self::REPOSITORY_TABLE.'` WHERE name = :name' ,
array(':name' => $repository)
);
}
if (!$settings) {
$this->setError(self::ER_REPOSITORY_NOT_EXIST);
return $this;
}
// create repository folder (if needed)
$this->createRepositoryFolder($repository);
// set active repository
$this->_activeRepo = $repository;
$this->_activeId = $settings['id'];
unset($settings['id']);
// section drop automatically when repository is set or chosen
$this->unsetSection();
$settings['resize'] = unserialize($settings['resize']);
$this->_settings = $settings;
return $this;
}
/**
* @param $sectionId
* @return FileManager
* set active section for chosen repository
*/
public function setSection($sectionId) {
if (is_numeric($sectionId)) {
$this->_activeSection = $sectionId;
} else {
$this->setError(self::ER_WRONG_SECTION_ID);
}
return $this;
}
/**
* unset section setting for chosen repository
*/
public function unsetSection() {
$this->_activeSection = false;
}
/* ------------- WORKING WITH FILES -------------- */
/**
* @param CUploadedFile|string $uploadedFile
* @param bool $used
* @param bool $delSource
* @return bool
* check uploaded file and move from tmp folder
* create resizes for images (if needed)
* make table record for file
*/
public function addFile($uploadedFile, $used = true, $delSource = true){
$file = new FileFileManager();
/*// if uploaded using Yii form
if ($uploadedFile instanceof CUploadedFile) {
$usedRuntime = 'yii';
} else*/
if (is_string($uploadedFile) && !empty($uploadedFile)) {
$usedRuntime = 'plupload';
} else {
$this->setError(self::ER_FILE_WRONG_INPUT);
return false;
}
$file->fillData($usedRuntime, $uploadedFile);
$file->resizeKeys = $this->getResizeKeyList();
$file->used = $used;
// proceed file check due to repository settings
if (!$this->checkFile($file)) {
return false;
}
// register new file in files table
$fileID = $file->addFileToRepo($this->getId(), $this->getSection());
// generate file path
if (!($file->path = $this->generateFilePath($file))) {
$file->delIncompleteRecord();
return false;
}
// moving file to repository
if ($usedRuntime == 'yii') {
if ($uploadedFile->saveAs($this->getRootPath() . $file->path, $delSource)) {
$this->setError(self::ER_FILE_MOVING);
$file->delIncompleteRecord();
return false;
}
} else {
if (is_uploaded_file($uploadedFile)) {
if (!move_uploaded_file($uploadedFile, $this->getRootPath() . $file->path)) {
$file->delIncompleteRecord($fileID);
$this->setError(self::ER_FILE_MOVING_UPLOADED);
return false;
}
} else {
if ($delSource) {
if (!rename($uploadedFile, $this->getRootPath() . $file->path)) {
$file->delIncompleteRecord($fileID);
$this->setError(self::ER_FILE_MOVING);
return false;
}
} else {
if (!copy($uploadedFile, $this->getRootPath() . $file->path)) {
$file->delIncompleteRecord($fileID);
$this->setError(self::ER_FILE_COPYING);
return false;
}
}
}
}
// make resize (if needed)
if ($file->isImage() && $resize = $this->getResizeList()) {
$resizer = new ResizerFileManager($this, $file);
$resizer->initiateResize();
}
// complete file record
if (!$file->completeFileRecord($this->getRootPath(false))) {
return false;
}
return $file;
}
/**
* @param integer $oldId - record id of old file to be deleted
* @param CUploadedFile $uploadedFile
* @param bool $used
* @param bool $delSource
* @return bool|FMFile
* deletes old file by id and ads new (just addFile/deleteFiles combo)
*/
public function replaceFile($oldId, $uploadedFile, $used = true, $delSource = true){
/** @var FMFile $file */
$file = $this->addFile($uploadedFile, $used, $delSource);
if (!$file->id) {
return false;
}
$this->deleteFiles($oldId);
return $file;
}
/**
* @param $fileIdList - single id or list of ids in array or string (comma divided)
* @param $checkRelation - if true all files will be checked
* whether they belong to active repository and deleted only in this case
* @return bool|int false or number of deleted files
* delete file, all file resizes and record in files table
*/
public function deleteFiles($fileIdList, $checkRelation = false){
$fileIdList = $this->checkFileIds($fileIdList);
if (empty($fileIdList))
return false;
$resizeKeys = $this->getResizeKeyList();
$deletedCounter = 0;
foreach($fileIdList as $fileID) {
$file = new FileFileManager($fileID);
if ($file) {
if ($checkRelation) {
if (is_numeric($this->getId()) && $file['repository_id'] != $this->getId()) {
continue;
}
if (is_numeric($this->getSection()) && $file['section'] != $this->getSection()) {
continue;
}
// TODO add error throwing here
}
$filePath = realpath(dirname("init.php")) . $file->path; // $this->getRootPath()
// delete main file
if (file_exists($filePath)) {
unlink($filePath);
}
// delete resizes (if exists)
if ($resizeKeys !== false) {
foreach($resizeKeys as $key) {
if (file_exists($path = self::getResizedFileName($filePath, $key))) {
unlink($path);
}
}
}
// delete table record
if ($file->delete()) {
$deletedCounter++;
}
}
}
return $deletedCounter;
}
/**
* @param FMFile $file
* @return bool
*
* ! using FileValidator just doesn't fit for such file managing
*/
public function checkFile($file){
// check if its not image but the repository is set only for image files
if ($this->getRepositoryType() === self::REPOSITORY_TYPE_IMAGE
&& !$file->isImage()) {
$this->setError(self::ER_CHECK_MIME);
return false;
}
// TODO check restricted mime
// check uploaded file size
if ($limits = $this->getSizeLimits()) {
if ($limits[0] > 0 && $file->size < $limits[0]) {
$this->setError(self::ER_CHECK_SIZE_MIN);
return false;
}
if ($limits[1] > 0 && $file->size > $limits[1]) {
$this->setError(self::ER_CHECK_SIZE_MAX);
return false;
}
}
// check uploaded file extension
if ($allowed = $this->getAllowedExt()) {
if (!in_array($file->ext, $allowed)) {
$this->setError(self::ER_CHECK_EXT);
return false;
}
}
return true;
}
/**
* @param $idList
* @return bool|int
* set chosen files as "used" in files table
*/
public function setUsedFiles($idList){
$idList = $this->checkFileIds($idList);
return (empty($idList))
? false
: Yii::app()->db->createCommand()
->update(self::FILES_TABLE, array('used' => 1), array('in', 'id', $idList));
}
/**
* @param FMFile $file
* @param bool $repository
* @return bool|string
* generates path for file according to its id in files table
* and repository settings (name and nesting level)
*/
private function generateFilePath($file, $repository = false){
$hash = md5($file->id);
$split = $this->splitStr($hash, $this->getNesting());
if ($split === false) {
$this->setError(self::ER_PATH_GENERATION_FAILED);
return false;
}
if ($repository === false) {
$repository = $this->getRepositoryName();
if (!$repository)
return false;
}
$path = DIRECTORY_SEPARATOR . $repository . DIRECTORY_SEPARATOR
. implode(DIRECTORY_SEPARATOR, $split);
umask(0000);
if (!is_dir($this->getRootPath() . $path)) {
if (!mkdir($this->getRootPath() . $path, 0777, true)) {
echo "<p>".$this->getRootPath() . $path."</p>";
$this->setError(self::ER_PATH_CREATION_FAILED);
return false;
}
}
return $path . DIRECTORY_SEPARATOR
. $file->id . '.' . $file->ext;
}
public function getFileInfo($fileID) {}
/**
* @param bool $sectionId - id of the section to get files from
* @param bool $limit
* @param bool $offset - limit+offset - kind of paging files
* @param array $fields - fields to be returned from files table
* @param bool $usedOnly (not used temporary -> TODO)
* @return array|bool
* get file list from files table using section id
*/
public function getFileList($sectionId = false, $limit = false, $offset = false, $fields = array('path'), $usedOnly = false) {
if (!$this->_activeRepo) {
$this->setError(self::ER_REPOSITORY_NOT_SPECIFIED);
return false;
}
if (is_numeric($sectionId)) {
$this->setSection($sectionId);
} elseif($sectionId === false && !$this->_activeSection) {
$this->setError(self::ER_SECTION_ID_NOT_SPECIFIED);
return false;
}
// indexes can be forced here manually if needed (rep_id or rep_id + section)
$command = Yii::app()->db->createCommand()
->select($fields)
->from(self::FILES_TABLE)
->where('repository_id=:repo_id AND section=:section',
array(':repo_id'=>$this->getId(),':section'=>$this->getSection()));
if (is_numeric($limit)) {
$command->limit($limit, (is_numeric($offset)) ? $offset : 0);
}
return $command->queryAll();
}
/* ----------- FILE NAMING AND LINK GENERATION ------------- */
/**
* @return array|bool - list of names of resizes for chosen repository
* for right name formation
*/
private function getResizeKeyList() {
$resizeKeys = false;
// check whether there must be resized files to delete
if ($resize = $this->getResizeList()) {
$resizeKeys = array();
// get resize keys (do it once here, not in foreach)
foreach($resize as $name => $param) {
$resizeKeys[] = $name;
}
}
return $resizeKeys;
}
public static function getLink($fileId, $repository = false){
$sql = "SELECT `path` FROM `". self::FILES_TABLE ."` WHERE `id` = :id";
return Helper::fetchAssoc($sql, array(':id' => $fileId), 'path');
}
public function getDownloadLink($fileId, $repository = false){
// TODO need this?
}
public function getSecureDownloadLink($fileId, $repository = false){
// TODO
}
/**
* @param $fileName - raw file name or name of any resize (any modifier is possible)
* @param $resize - new modifier to be included into file path
* @return bool|string
* path for chosen file resize (just changing modifier)
*/
public static function getResizedFileName($fileName, $resize) {
if (empty($fileName)){
return false;
}
$pathInfo = pathinfo($fileName);
return $pathInfo['dirname'] . DIRECTORY_SEPARATOR
. preg_replace("/^(.*?)(_[^_]*)?$/", "$1_".$resize, $pathInfo['filename'])
. '.'. $pathInfo['extension'];
}
/* --------- WORKING WITH RAW FILES (UPLOAD/DOWNLOAD) ---------- */
/**
* @param integer $id file id
* tries finding chosen file and send it to user
*/
public static function getFile($id) {
if (!($file = FMFile::model()->findByPk($id))) {
header ("HTTP/1.0 404 Not Found");
exit;
}
$absPath = $_SERVER['DOCUMENT_ROOT'] . $file->path;
if (!file_exists($absPath)) {
header ("HTTP/1.0 404 Not Found");
exit;
}
$name = $file->original_name . "." . $file->ext;
$fileSize = filesize($absPath);
$fileTime = date("D, d M Y H:i:s T", $file->date);
// check if file can be read
$fd = fopen($absPath, "rb");
if (!$fd){
header ("HTTP/1.0 403 Forbidden");
exit;
}
// check whether file can be send as partial content (using HTTP_RANGE)
$range = 0;
if ($_SERVER["HTTP_RANGE"]) {
$range = $_SERVER["HTTP_RANGE"];
$range = str_replace("bytes=", "", $range);
$range = str_replace("-", "", $range);
if (!empty($range)) {fseek($fd, $range);}
}
$content = fread($fd, $fileSize);
fclose($fd);
if ($range > 0) {
header("HTTP/1.1 206 Partial Content");
} else {
header("HTTP/1.1 200 OK");
}
// send appropriate headers and file body itself
header("Content-Disposition: attachment; filename=".$name);
header("Last-Modified: $fileTime");
header("Accept-Ranges: bytes");
header("Content-Length: ".($fileSize-$range));
header("Content-Range: bytes $range - ".($fileSize -1) . "/" . $fileSize);
header("Content-type: application/octet-stream");
print $content;
exit;
}
/**
* @return array - path to file and it's original name
* Plupload file uploading function
* get file from tmp dir
* create single file (if uploading was multipart)
*/
public function uploadFile($params = array()) {
// HTTP headers for no cache etc
header("Expires: Mon, 26 Jul 1997 05:00:00 GMT");
header("Last-Modified: " . gmdate("D, d M Y H:i:s") . " GMT");
header("Cache-Control: no-store, no-cache, must-revalidate");
header("Cache-Control: post-check=0, pre-check=0", false);
header("Pragma: no-cache");
// Settings
$targetDir = $this->_tmpPath;
$cleanupTargetDir = true; // Remove old files
$maxFileAge = 5 * 3600; // Temp file age in seconds
// 5 minutes execution time
@set_time_limit(5 * 60);
// Uncomment this one to fake upload time
// usleep(5000);
// Get parameters
$chunk = isset($params["chunk"]) ? intval($params["chunk"]) : 0;
$chunks = isset($params["chunks"]) ? intval($params["chunks"]) : 0;
$fileName = isset($params["name"]) ? $params["name"] : '';
// Clean the fileName for security reasons
$originalName = $fileName;
//$fileName = iconv('UTF-8', 'windows-1251', $fileName);
$fileName = preg_replace('/[^\w\._]+/', '_', $fileName);
// Make sure the fileName is unique but only if chunking is disabled
if ($chunks < 2 && file_exists($targetDir . DIRECTORY_SEPARATOR . $fileName)) {
$ext = strrpos($fileName, '.');
$fileName_a = substr($fileName, 0, $ext);
$fileName_b = substr($fileName, $ext);
$count = 1;
while (file_exists($targetDir . DIRECTORY_SEPARATOR . $fileName_a . '_' . $count . $fileName_b))
$count++;
$fileName = $fileName_a . '_' . $count . $fileName_b;
}
$filePath = $targetDir . DIRECTORY_SEPARATOR . $fileName;
// Create target dir
umask(0000);
if (!is_dir($targetDir))
@mkdir($targetDir);
// Remove old temp files
if ($cleanupTargetDir && is_dir($targetDir) && ($dir = opendir($targetDir))) {
while (($file = readdir($dir)) !== false) {
$tmpFilePath = $targetDir . DIRECTORY_SEPARATOR . $file;
// Remove temp file if it is older than the max age and is not the current file
if (preg_match('/\.part$/', $file) && (filemtime($tmpFilePath) < time() - $maxFileAge) && ($tmpFilePath != "{$filePath}.part")) {
@unlink($tmpFilePath);
}
}
closedir($dir);
} else {
$this->setError("Plupload :: Не удалось записать файл во временную директорию");
}
$contentType = false;
// Look for the content type header
if (isset($_SERVER["HTTP_CONTENT_TYPE"]))
$contentType = $_SERVER["HTTP_CONTENT_TYPE"];
if (isset($_SERVER["CONTENT_TYPE"]))
$contentType = $_SERVER["CONTENT_TYPE"];
// Handle non multipart uploads older WebKit versions didn't support multipart in HTML5
if (strpos($contentType, "multipart") !== false) {
if (isset($_FILES['file']['tmp_name']) && is_uploaded_file($_FILES['file']['tmp_name'])) {
//$originalName = $_FILES['file']['name'];
// Open temp file
$out = fopen("{$filePath}.part", $chunk == 0 ? "wb" : "ab");
if ($out) {
// Read binary input stream and append it to temp file
$in = fopen($_FILES['file']['tmp_name'], "rb");
if ($in) {
while ($buff = fread($in, 4096))
fwrite($out, $buff);
} else
$this->setError("Plupload :: Failed to open input stream.");
fclose($in);
fclose($out);
@unlink($_FILES['file']['tmp_name']);
} else
$this->setError("Plupload :: Failed to open output stream.");
} else
$this->setError("Plupload :: Не удалось переместить закачанный файл");
} else {
// Open temp file
$out = fopen("{$filePath}.part", $chunk == 0 ? "wb" : "ab");
if ($out) {
// Read binary input stream and append it to temp file
$in = fopen("php://input", "rb");
//$originalName = $fileName;
if ($in) {
while ($buff = fread($in, 4096))
fwrite($out, $buff);
} else
$this->setError("Plupload :: Failed to open input stream.");
fclose($in);
fclose($out);
} else
$this->setError("Plupload :: Failed to open output stream.");
}
// Check if file has been uploaded
if (!$chunks || $chunk == $chunks - 1) {
// Strip the temp .part suffix off
//$name = preg_replace('#(.*?)\/[^\/]*$#', '$1/'.$originalName, $fileName);
rename("{$filePath}.part", $filePath);
}
return array('path' => $filePath, 'original_name' => $originalName);
}
/*----------- HELPER FUNCTIONS ------------*/
/**
* @param $hash - file id hash
* @param $nesting - num of parts to split
* @param int $block - split block length
* @return array|bool
* function generates parts of the path to the file according to nesting level
* @example fileId = 1 -> HASH: c4ca3248a0b923820dcc509a6f75849b
* nesting level = 4
* result is array containing parts of the path for chosen file -> c4/ca/32/48
*/
private function splitStr($hash, $nesting, $block = self::HASH_SPLIT_LEN) {
$needSize = $nesting * $block;
if (strlen($hash) >= $needSize) {
$split = array();
for ($level = 0; $level < $nesting; $level++) {
$split[] = substr($hash, $block * $level, $block);
}
return $split;
}
return false;
}
/**
* @param $idList array|string list of file ids
* @return array list of checked ids
* checks if there are only valid ids in the list (positive numbers)
*/
public function checkFileIds($idList){
$checked = array();
if (!is_array($idList)) {
$idList = explode(',', $idList);
}
foreach($idList as $id) {
if (is_numeric($id) && $id > 0) {
$checked[] = $id;
}
}
return $checked;
}
/**
* @param $size
* @return bool|int
* converts text sizes into numeric form
* @example 2K -> 2048 (2 * 1024)
*/
private function getNumericSize($size) {
// get size from symbol value
$multipliers = array('K' => 1024, 'M' => 1048576, 'G' => 1073741824);
if (is_numeric($size) && $size >= 0) {
return $size;
} elseif (preg_match("#^([\d]+)([\w]{1})$#", $size, $res)) {
$mult = strtoupper($res[2]);
if (array_key_exists($mult, $multipliers)) {
return $res[1] * $multipliers[$mult];
} else {
$this->setError('Настройки репозитория неверны');
return false;
}
} else {
$this->setError('Настройки репозитория неверны');
return false;
}
}
/*------------- ERROR HANDLING -------------*/
/**
* @param $erCode integer add error to error list (by its code)
*/
public function setError($erCode) {
// TODO fill all error texts
switch($erCode) {
case self::ER_REPOSITORY_ALREADY_EXISTS:
$this->_errors[] = 'Репозиторий с таким именем уже существует';
break;
case self::ER_REPOSITORY_NOT_EXIST:
$this->_errors[] = 'Репозитория с указанным именем не существует';
break;
default:
$this->_errors[] = $erCode;
break;
}
}
public function errorsOccurred() {
return (empty($this->_errors)) ? false : true;
}
public function getErrors() {
return $this->_errors;
}
}
<file_sep>/extra/plupload/pluploadInitializer.js
function initPlupload(settings) {
var defaults = {
multi_selection: false,
multi_part: true
};
$.extend(defaults, settings);
var uploader = new plupload.Uploader(defaults);
//console.log(defaults);
if(typeof defaults.preloader !== 'undefined'){
uploader.bind('BeforeUpload', function(up, file){
$(defaults.preloader).fadeIn(300);
});
uploader.bind('UploadComplete', function(up, files){
$(defaults.preloader).fadeOut(300);
});
}
uploader.bind('UploadFile', function(up) {
$.extend(up.settings.multipart_params, defaults.multipart_params);
up.settings.multipart_params = settings.multipart;
//console.log(up.settings);
});
uploader.bind('Init', function(up, params) {
//console.log('Initialized! ' + params.runtime);
var _this = this;
if (typeof(defaults.start) !== 'undefined') {
$('#' +defaults.start).bind('click', function() {
_this.start();
return false;
});
}
});
uploader.init();
uploader.bind('FilesAdded', function(up, files) {
// console.log('file added!', files);
// getting file list
var list = [];
$.each(files, function(i, file) {
list.push(file.name);
});
//console.log(typeof settings.files_added);
if (typeof(defaults.files_added) !== 'undefined') {
// console.log(typeof defaults.files_added);
var func_name = defaults.files_added + '(list)';
//console.log(func_name);
eval(func_name);
}
// if the is no element for starting upload
// do it after files are added
if (typeof(settings.start) === 'undefined') {
uploader.start();
}
});
uploader.bind('FileUploaded', function(up, file, data) {
//console.log(data);
if (typeof(data.response) !== 'undefined') {
var response = jQuery.parseJSON(data.response);
//console.log(response);
//console.log(defaults);
for (var i in defaults.callbackList){
var callback = defaults.callbackList[i];
if (typeof window[callback] === 'function'){
var fn = window[callback];
fn.call(this, response);
}
}
}
});
uploader.bind('UploadProgress', function(up, file) {
if (typeof defaults.progress !== 'undefined') {
if (typeof window[defaults.progress] === 'function'){
var fn = window[defaults.progress];
fn.call(this, [file.percent, file.id]);
}
}
});
return uploader;
}
function deleteFile(url, file, callback) {
var id = parseInt(file.id);
if (isNaN(id)) return false;
// check repo and section if needed
return requestWrap(url, file, callback);
}<file_sep>/admin/module/test/index.php
<h1>FileManager</h1>
<?
// create FileManager tables
$fm = FileManager::getInstance();
$fm->initializeManager();
$fc = new FileController();
$fc->repositoryCreate();
$repository = Helper::fetchAssoc("SELECT * FROM `file_manager` WHERE `name` = 'product'");
$items = Helper::fetchAssocInArray("SELECT * FROM `file_list`", array(), '', '', false);
?>
<?/** @var $list array */
/** @var $resize string */?>
<section id="gallery_<?=$repository['name'];?>" data-section="<?=$repository['section'];?>" class="gallery">
<div class="well">
<legend class="gallery_title"><?=$repository['name']?></legend>
<ul class="img_list">
<?foreach($items as $item):
$repository['resize'] = @unserialize($repository['resize']);
?>
<li class="img_block">
<img class="img-polaroid" file_id="<?=$item['id'];?>"
src="<?=FileManager::getResizedFileName($item['path'], 'tmb90');?>"/>
</li>
<?endforeach;?>
</ul>
<button id="browse_<?=$repository['name'];?>" class="btn btn-large btn-block add_button" type="button">Добавить изображение</button>
</div>
<input type="hidden" name="controllerUrl" value="/file/"/>
</section>
<script type="text/javascript">
var pluploadSet = {
runtimes : 'html5,flash,html4',
flash_swf_url : '<?=FileManager::getAssetsPath();?>/plupload.flash.swf',
browse_button : 'browse_<?=$repository['name'];?>',
url : '/file/upload/<?=$repository['name'];?>',
dragdrop : 1,
drop_element : 'browse_<?=$repository['name'];?>',
multi_selection: true,
multipart_params: {
section: '<?=$repository['section'];?>',
resize_key: 'tmb90',
used: '<?=intval($repository['setUsed'])?>'
},
callbackList: ['addGalleryImage']
};
initPlupload(pluploadSet);
/* gallery core functions */
function addGalleryImage(response) {
//find gallery
var gallery = $('#gallery_' + response.repository);
if (gallery.length) {
var imageUl = gallery.find('ul.img_list');
var newBlock = $('<li class="img_block">' +
'<img class="img-polaroid" src="'+response.path+'" file_id="'+response.fileId+'"/>' +
'</li>');
imageUl.append(newBlock);
}
}
function deleteGalleryImage(url, file) {
url += 'deleteFile';
deleteFile(url, file, 'deleteImageBlock');
}
function deleteImageBlock(response){
var gallery = $('#gallery_' + response.repository);
var block = gallery.find('img[file_id="'+response.fileId+'"]').closest('li');
block.remove();
}
$(document).ready(function(){
$('.gallery').on('click', '.img_block', function(){
var url, repository, section, imageId;
var gallery = $(this).closest('.gallery');
url = gallery.find('input[name="controllerUrl"]').val();
repository = gallery.attr('id').split('gallery_')[1];
section = gallery.data('section');
imageId = parseInt($(this).find('img').attr('file_id'));
if (!isNaN(imageId)) {
showModal(
'Удаление изображения',
'Вы уверены, что хотите удалить выбранное изображение?',
[{
butClass: 'btm-primary',
text: 'Да',
func: 'deleteGalleryImage',
params: [url, {id:imageId, repository:repository, section:section}]
},'close']
);
}
});
});
</script><file_sep>/classes/entity.php
<? class Entity {
protected $attributes = array(), $table, $primaryKey;
public $isNew;
public function __construct($primaryKey = '', $fields = array()) {
$this->isNew = empty($primaryKey) ? true : false;
$this->prepareAttributes();
if (!$this->isNew) {
$this->getAttributesByPrimaryKey($primaryKey);
} elseif (!empty($fields)) {
$this->getAttributesByFields($fields);
}
}
public function findMe() {
$field = array();
foreach ($this->attributes as $attributeName => $attribute) {
if (!empty($this->$attributeName)) {
$this->attributes[$attributeName]['db'] = $attributeName;
}
}
}
public function getAttributesByPrimaryKey($primaryKey) {
$this->getAttributes(array(), $primaryKey);
}
public function getAttributesByFields($fields) {
$this->getAttributes(array(), '', $fields);
}
protected function prepareAttributes() {
$this->primaryKey = array();
foreach ($this->attributes as $attributeName => $attribute) {
if (empty($attribute['db'])) {
$this->attributes[$attributeName]['db'] = $attributeName;
}
if (empty($attribute['title'])) {
$this->attributes[$attributeName]['title'] = $attributeName;
}
if (!empty($attribute['isPrimaryKey'])) {
$this->primaryKey[] = $attributeName;
}
$this->$attributeName = null;
}
if (count($this->primaryKey) == 1) {
$this->primaryKey = reset($this->primaryKey);
}
}
public function getAttributes($attributes = array(), $primaryKey = '', $fields = array()) {
$sqlParams = array();
$sqlFields = '*';
$hasAND = false;
if (empty($attributes)) {
$sql = "SELECT $sqlFields FROM `".$this->table."` WHERE ";
if (is_array($primaryKey)) {
$sql .= "1";
foreach ($primaryKey as $field => $value) {
$sql .= " AND `" . $this->attributes[$field]['db'] . "` = :" . $field;
$sqlParams[":$field"] = $value;
$hasAND = true;
}
} elseif (!empty($primaryKey)) {
$sql .= "`" . $this->attributes[$this->primaryKey]['db'] . "` = :" . $this->primaryKey;
$sqlParams[':'.$this->primaryKey] = $primaryKey;
}
if (!empty($fields)) {
if (!$hasAND) {
$sql .= "1";
}
$fields = self::formatAttributesToDB($fields);
foreach ($fields as $field => $value) {
$sql .= " AND `" . $this->attributes[$field]['db'] . "` = :" . $field;
$sqlParams[":$field"] = $value;
$hasAND = true;
}
}
$result = Helper::fetchAssoc($sql, $sqlParams);
if (empty($result)) {
$this->isNew = true;
} else {
foreach ($this->attributes as $attributeName => $attribute) {
$this->$attributeName = $result[$attribute['db']];
}
self::formatAttributesFromDB();
$this->isNew = false;
}
}
}
public function setAttributesFromArray($newAttributes) {
$newAttributes = Helper::trimArray($newAttributes);
foreach ($newAttributes as $key => $value) {
if (isset($this->attributes[$key])) {
$this->$key = $value;
}
}
}
protected function formatAttributesToDB($fields = array()) {
if (empty($fields)) {
foreach ($this->attributes as $attributeName => $attribute) {
if (!empty($attribute['format'])) {
switch ($attribute['format']) {
case 'json' :
$this->$attributeName = json_encode($this->$attributeName);
break;
case 'delimiter' :
if (is_array($this->$attributeName)) {
$this->$attributeName = implode($attribute['delimiter'], $this->$attributeName);
}
break;
case 'array' :
if (is_array($this->$attributeName)) {
$this->$attributeName = serialize($this->$attributeName);
}
break;
}
}
}
} else {
foreach ($fields as $attributeName => $value) {
if (!empty($this->attributes[$attributeName]['format'])) {
switch ($this->attributes[$attributeName]['format']) {
case 'json' :
$fields[$attributeName] = json_encode($value);
break;
case 'delimiter' :
if (is_array($value)) {
$fields[$attributeName] = implode($this->attributes[$attributeName]['delimiter'], $value);
}
break;
case 'array' :
if (is_array($value)) {
$fields[$attributeName] = serialize($value);
}
break;
}
}
}
return $fields;
}
}
protected function formatAttributesFromDB() {
foreach ($this->attributes as $attributeName => $attribute) {
if (!empty($attribute['format'])) {
switch ($attribute['format']) {
case 'json' :
$this->$attributeName = json_decode($this->$attributeName);
break;
case 'delimiter' :
if (!empty($this->$attributeName)) {
$this->$attributeName = explode($attribute['delimiter'], $this->$attributeName);
}
break;
case 'array' :
if (!empty($this->$attributeName)) {
$this->$attributeName = @unserialize($this->$attributeName);
}
break;
}
}
}
}
public static function generateMD5() {
return md5(time().rand());
}
public function setCheckbox($attributeName) {
if ($this->attributes[$attributeName]['type'] == 'checkbox') {
$this->$attributeName = empty($this->$attributeName) ? 0 : 1;
}
}
public function save() {
global $pdo;
$sqlParams = array();
$sqlString = array();
foreach ($this->attributes as $attributeName => $attribute) {
if ($attribute['type'] == 'int') {
$this->$attributeName = (int) $this->$attributeName;
} elseif ($attribute['type'] == 'float') {
$this->$attributeName = (float) $this->$attributeName;
} elseif ($attribute['type'] == 'checkbox') {
$this->$attributeName = empty($this->$attributeName) ? 0 : 1;
}
}
if (!empty($this->attributes['edited'])) {
$this->edited = time();
}
self::formatAttributesToDB();
if ($this->isNew) {
if (!empty($this->attributes['created'])) {
$this->created = time();
}
foreach ($this->attributes as $attributeName => $attribute) {
$sqlString[] = "`" . $attribute['db'] . "` = :" . $attributeName;
$sqlParams[":$attributeName"] = $this->$attributeName;
}
$sql = "INSERT INTO `" . $this->table . "` SET " . implode(', ', $sqlString);
$result = Helper::executeStatement($sql, $sqlParams);
if (!is_array($this->primaryKey)) {
$this->{$this->primaryKey} = $pdo->lastInsertId();
}
$this->isNew = false;
} else {
foreach ($this->attributes as $attributeName => $attribute) {
$sqlString[] = "`" . $attribute['db'] . "` = :" . $attributeName;
$sqlParams[":$attributeName"] = $this->$attributeName;
}
$sql = "UPDATE `" . $this->table . "` SET " . implode(', ', $sqlString) . " WHERE ";
if (is_array($this->primaryKey)) {
$sql .= "1";
foreach ($this->primaryKey as $field) {
$sql .= " AND `" . $this->attributes[$field]['db'] . "` = :" . $field;
$sqlParams[':'.$field] = $this->$field;
}
} else {
$sql .= "`" . $this->primaryKey . "` = :" . $this->primaryKey;
$sqlParams[':'.$this->primaryKey] = $this->{$this->primaryKey};
}
$result = Helper::executeStatement($sql, $sqlParams);
}
self::formatAttributesFromDB();
return $result;
}
public function delete() {
if (!$this->isNew) {
$sql = "DELETE FROM `" . $this->table . "` WHERE ";
if (is_array($this->primaryKey)) {
$sql .= "1";
foreach ($this->primaryKey as $field) {
$sql .= " AND `" . $this->attributes[$field]['db'] . "` = :" . $field;
$sqlParams[':'.$field] = $this->$field;
}
} else {
$sql .= "`" . $this->primaryKey . "` = :" . $this->primaryKey;
$sqlParams[':'.$this->primaryKey] = $this->{$this->primaryKey};
}
return Helper::executeStatement($sql, $sqlParams);
} else {
die('The record cannot be deleted because it is new.');
}
}
public function show($showPre = true) {
$object = clone $this;
unset($object->table);
unset($object->attributes);
unset($object->primaryKey);
if ($showPre) {
?><pre><?print_r($object)?></pre><?
}
return $object;
}
}<file_sep>/classes/catalog.php
<? class Catalog {
public static function cleanURL($URL) {
krsort($URL);
unset($URL[0]);
unset($URL[1]);
return $URL;
}
public static function getSQLFromURL($URL, $sql = '') {
$url = array_pop($URL);
if($url) {
if(!$sql) {
if(!count($URL)) $what = ' * '; else $what = ' `id` ';
$sql = "( SELECT ".$what." FROM `catalog` WHERE `url` = '".$url."' )";
$url = '';
} else {
if(!count($URL)) $what = ' * '; else $what = ' `id` ';
$sql = "( SELECT ".$what." FROM `catalog` WHERE `url` = '".$url."' AND `pid` IN ".$sql." )";
}
return self::getSQLFromURL($URL, $sql);
} else {
return $sql;
}
}
/**
* @param string $filterQuery фильтр
* @param array $select массив полей из таблицы товаров, которые нужно извлечь
* @return bool|string если нельзя запрсить, то false, иначе sql-запрос
*/
public static function filterToSQL ($filterQuery, $select = array ('id')) {
preg_match_all('#\[(.*?)\]#i', $filterQuery, $matches);
$WHERE = array('`price` > 0');
foreach($matches[1] as $query) {
//Если запрос по основной таблице
if(substr($query,0,2)=='@=') {
$WHERE[] = substr($query,2);
}
//Иначе строим запрос через таблицу
else {
$query = str_replace('"','\'',$query);
preg_match("#`(.*?)`(.*?)'(.*?)'#i",$query,$res);
if($res[1]) {
$param = $res[1];
$wtf = $res[2];
$value = $res[3];
$WHERE[] = ' `id` IN (SELECT `item_param`.`item_id` FROM `item_param`, `param`
WHERE
`param`.`name` = "'.$param.'" AND
`param`.`id` = `item_param`.`param_id` AND
`item_param`.`value` '.$wtf.' "'.$value.'"
)';
}
}
}
//Сборка SQL
$sql = 'SELECT ';
$arr = array ();
foreach ((array)$select as $field)
$arr[] = '`item`.`'.$field.'`';
$sql .= implode (',', $arr);
$sql .= ' FROM `item` AS `item`';
if(count($WHERE)) {
foreach ($WHERE as $whereKey => $whereEl) {
$WHERE[$whereKey] = "($whereEl)";
}
$sql .= ' WHERE ' .implode(' AND ',$WHERE);
}
return $sql;
}
public static function showProductInCatalog($item) {
global $URL;
if (empty($item['alias'])) $item['alias'] = $item['id'];
?><li class="inlineblock">
<div class="top">
<a href="/item/<?=$item['alias'];?>?from=<?=implode('/',$URL);?>" class="image">
<img src="<?=FileManager::getResizedFileName($item['main_photo'], 'preview')?>" alt="" >
</a>
</div>
<div class="info">
<div class="inlineblock">
<a href="/item/<?=$item['alias'];?>?from=<?=implode('/',$URL);?>" class="title">
<?=Helper::cutText($item['name'],300);?>
</a>
<span class="price"><?=$item['price'];?> руб</span>
<!--span class="price_old">184 руб</span-->
</div
><a class="inlineblock cart add2cart" data-id="<?=$item['id'];?>">
</a>
</div>
</li><?
}
public static function buildTree($data, $parentId = 0) {
if (is_array($data) && count($data[$parentId]) > 0) {
$tree = array();
foreach ($data[$parentId] as $cat) {
$children = self::buildTree($data,$cat['id']);
if (is_array($children)) {
$cat['children'] = $children;
}
$tree[] = $cat;
}
} else {
return null;
}
return $tree;
}
public static function showTreeInMenu() {
$sql = "SELECT `id`, `pid`, `name`, `url` FROM `catalog` ORDER BY `sort` ";
$data = Helper::fetchAssocInArray($sql, array(), 'pid[]');
$tree = self::buildTree($data);
echo self::showTree($tree);
}
private static function showTree($data, $url = '/', $display = true) {
$tree = "<ul" . ($display ? '' : ' class="closed"') . "/>";
if (is_array($data)) {
foreach ($data as $category) {
$openable = empty($category['children']) ? false : true;
$tree .= "<li" . ($openable ? ' class="openable"' : '') . "><a href=\"$url{$category['url']}\">{$category['name']}</a>";
if ($openable) {
$tree .= "<span class=\"close\"></span>";
$tree .= self::showTree($category['children'], $url . $category['url'] . '/', false);
}
$tree .= "</li>";
}
} else {
return null;
}
$tree .= "</ul>";
return $tree;
}
}<file_sep>/init.php
<?
// get constants
require_once "define.php";
ini_set('display_errors',1);
error_reporting(E_ALL ^ E_NOTICE);
session_start();
try {
$pdo = new PDO(
DB_DRIVER.':host='.DB_HOST.';dbname='.DB_NAME,
DB_USER,
DB_PASSWORD,
array(PDO::MYSQL_ATTR_INIT_COMMAND => 'SET NAMES \'UTF8\'')
);
} catch(PDOException $e) {
die("Error: ".$e->getMessage());
}
// Текстовые блоки
$TEXT = Helper::fetchAssocInArray("SELECT * FROM `pm_text`", array(), 'position', 'text');
// Конфиг
$BASE = Helper::fetchAssocInArray("SELECT * FROM `pm_base`", array(), 'param', 'value');
$URL = Helper::explodeURI($_SERVER['REQUEST_URI']);
$plural = new stdClass();
$plural->find = array('найден', 'найдено', 'найдено');
$plural->product = array('товар', 'товара', 'товаров');
// loading class files automatically
function __autoload($className) {
$class = strtolower(preg_replace('#([^A-Z])([A-Z])#', '$1.$2', $className));
$class = array_reverse(explode('.', $class));
$dir = $class[0];
$fileName = strtolower(implode('.', $class)).'.php';
if(is_dir(PATH_CLASSES.$dir)){
$fileName = $dir.'/'.$fileName;
}
$includeFile = PATH_CLASSES . $fileName;
//echo "<p>AUTOLOAD $className FILE $includeFile</p>";
if (file_exists($includeFile)){
include $includeFile;
} else {
throw new Exception("no file to include", 1);
echo 'no file <strong>'.$includeFile.'</strong>';
}
}<file_sep>/admin/module/articles/index.php
<?
$page->header = 'Полезные статьи';
$page->title .= $page->header;
?>
<h1><?=$page->title?></h1>
<?
if ($_GET['add'] || $_GET['edit']) {
include($URL[2].'/'.$URL[3].'/edit.php');
} elseif ($_GET['delete']) {
include($URL[2].'/'.$URL[3].'/delete.php');
} elseif ($_GET['add_group'] || $_GET['edit_group']) {
include($URL[2].'/'.$URL[3].'/edit_group.php');
} elseif ($_GET['view_group']) {
include($URL[2].'/'.$URL[3].'/view_group.php');
} elseif ($_GET['delete_group']) {
include($URL[2].'/'.$URL[3].'/delete_group.php');
} else {?>
<p><i class="icon-plus"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?add_group=1">Добавить группу</a></p>
<h3>Группа</h3>
<table class="table">
<tr>
<th>ID</th>
<th>URL</th>
<th>Название группы</th>
<th>Сортировка</th>
<th></th>
</tr>
<?
$groups = Helper::fetchAssocInArray("SELECT * FROM `pm_article_groups` ORDER BY `sort`");
foreach($groups as $el) { ?>
<tr>
<td><?=$el['id']?></td>
<td><?=$el['url']?></td>
<td><?=$el['title']?></td>
<td><?=$el['sort']?></td>
<td width="50">
<a class="icon-th-list" rel="tooltip" title="Элементы" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?view_group=<?=$el['id'];?>"></a>
<a class="icon-pencil" rel="tooltip" title="Редактировать" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?edit_group=<?=$el['id'];?>"></a>
<a class="icon-remove" rel="tooltip" title="Удалить" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?delete_group=<?=$el['id'];?>" onclick="if(confirm('Удалить? Восстановление будет невозможно!')) return true; else return false;"></a>
</td>
</tr>
<? } ?>
</table>
<? } ?><file_sep>/parts/html/head.php
<head>
<base href="<?=PATH_BASE?>" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title><?=SITE_NAME . (!empty($page->title) ? ' - ' . $page->title : '')?></title>
<meta name="description" content="<?=$page->metadesc?>" />
<meta name="keywords" content="<?=$page->metakey?>" />
<link rel="stylesheet" type="text/css" href="/js/fancybox/jquery.fancybox.css" media="screen, projection, print">
<link rel="stylesheet" type="text/css" href="/css/easyslider.css" media="screen, projection, print">
<link rel="stylesheet" type="text/css" href="/css/alertbox.css" media="screen, projection, print">
<link rel="stylesheet" type="text/css" href="/css/style.css" media="screen, projection, print">
<link rel="icon" href="/favicon.png" type="image/x-icon" />
<link rel="shortcut icon" href="/favicon.png" type="image/x-icon" />
<!--[if lt IE 9]>
<script src="/js/html5.js" type="text/javascript"></script>
<![endif]-->
<script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script>
<script>
!window.jQuery && document.write('<script src="/js/jquery-1.9.1.min.js"><\/script>');
</script>
<script type="text/javascript" src="/js/easyslider1.7.js"></script>
<script type="text/javascript" src="/js/placeholder.min.js"></script>
<script type="text/javascript" src="/js/fancybox/jquery.fancybox.pack.js"></script>
<script type="text/javascript" src="/js/alertbox.js"></script>
<script type="text/javascript" src="/js/main.js"></script>
</head><file_sep>/parts/admin/block_admin.php
<?$dir = scandir($_SERVER['DOCUMENT_ROOT'].'/'.$URL[1].'/module_admin');
for($i=2;$i<count($dir);$i++) :
$name = @file_get_contents($_SERVER['DOCUMENT_ROOT'].'/'.$URL[1].'/module_admin/'.$dir[$i].'/about');
if(strlen($name)>2) :?>
<li<? if ($URL[3] == $dir[$i] && $URL[2] == 'module_admin') :?> class="active"<? endif; ?>><a href="/<?=$URL[1];?>/module_admin/<?=$dir[$i];?>"><?=$name;?></a>
<? endif;
endfor;
?>
<file_sep>/views/default.php
<!DOCTYPE html>
<html>
<? include $page->pathPart('html.head') ?>
<body>
<div class="wrapper">
<div class="maincontent">
<? include $page->pathPart('html.header') ?>
<div class="container">
<div class="left inlineblock">
<? include $page->pathPart('html.nav') ?>
</div
><div class="right inlineblock<? if (empty($URL[1])) { ?> main<? } ?>">
<? include $page->pathPart('html.breadcrumbs') ?>
<?=$page->content?>
</div>
</div>
</div>
<div class="empty"></div>
</div>
<? include $page->pathPart('html.footer') ?>
</body>
</html><file_sep>/admin/module/item/index.php
<?php
/**
* OTO v2.1
* (One Table, Once) - все данные в одной таблице и описываются в модуле один раз (в sql.php)
*
* Модуль для системы Мандарин v1.1
* Работа с одной таблицей базы данных, большинство полей можно описать один раз в sql.php
* Модуль требует файлов из /algo/
* Автор: <NAME>
* Дата создания: 23.10.2011
* Вопросы писать на: <EMAIL>
*
*/
include ('functions.php');
// таблица
include ('sql.php');
$page->header = "Управление товарами";
$page->title .= $page->header;
?><h2><?=$page->header?></h2><?
if (isset ($_GET['add']) || isset ($_GET['edit'])) {
include ('addedit.php');
} elseif (isset($_GET['del'])) {
delete_from_db ($_GET['del']);
header ('Location: ' . Admin::thisModule());
} else {
// создаём таблицу просмотра элементов
Admin::createPMTableEditDeleteForSQL(
'SELECT `id`,`article`,`name`,`price`,`template_id` FROM `' . $db_table . '` ORDER BY `article` ',
'?add', 'Добавить товар',
array ('50', '400', '60', '', '56'),
array ('Артикул', 'Название', 'Цена', 'Шаблон', ''),
array ('article', 'name', 'price', 'template_id', 'id'),
print_cell_table,
true
);
}
?><file_sep>/parts/html/breadcrumbs.php
<? /*if (!empty($URL[1])) { ?>
<ul class="breadcrumbs">
<li class="inlineblock"><a href="">Главная</a></li>
<li class="inlineblock"><a href="">Вложенная</a></li>
<li class="inlineblock">Текущая</li>
</ul>
<? } */?><file_sep>/admin/module/articles/view_group.php
<?
$sql = "SELECT * FROM `pm_article_groups` WHERE `id` = :id";
$group = Helper::fetchAssoc($sql, array(':id' => $_GET['view_group']));
?>
<h3>Группа: <?=$group['title']?></h3>
<p><i class="icon-arrow-left"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Вернуться к списку</a></p>
<p><i class="icon-plus"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?add=1&view_group=<?=$_GET['view_group']?>">Добавить элемент</a></p>
<table class="table">
<tr>
<th>ID</th>
<th>URL</th>
<th>Название</th>
<th>Сортировка</th>
<th></th>
</tr>
<?
$groupItems = Helper::fetchAssocInArray("SELECT * FROM `pm_article_items` WHERE `groupId` = :groupId ORDER BY `sort`", array(':groupId' => $group['id']));
foreach($groupItems as $el) { ?>
<tr>
<td><?=$el['id']?></td>
<td><?=$el['url']?></td>
<td><?=$el['title']?></td>
<td><?=$el['sort']?></td>
<td width="32">
<a class="icon-pencil" rel="tooltip" title="Редактировать" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?edit=<?=$el['id'];?>&view_group=<?=$_GET['view_group']?>"></a>
<a class="icon-remove" rel="tooltip" title="Удалить" href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>?delete=<?=$el['id'];?>&view_group=<?=$_GET['view_group']?>" onclick="if(confirm('Удалить? Восстановление будет невозможно!')) return true; else return false;"></a>
</td>
</tr>
<? } ?>
</table><file_sep>/admin/module/template/index.php
<?php
// инициализация бд
include ('sql.php');
define ('TEMPLATE_TABLE', '`template`');
define ('PARAM_TABLE', '`param`');
define ('ITEM_TABLE', '`item`');
$this_module = '/'.$URL[1].'/'.$URL[2].'/'.$URL[3].'/';
$page->title .= 'Шаблоны';
if (isset ($_GET['add']) || isset ($_GET['edit'])) {
include ('addedit.php');
} elseif (isset ($_GET['del'])) {
$id = (int)$_GET['del'];
Helper::executeStatement(
'UPDATE '.ITEM_TABLE.' SET `template_id`=0 WHERE `template_id` = :templateId',
array(':templateId' => (int)$id)
);
Helper::executeStatement('
DELETE FROM '.TEMPLATE_TABLE.' WHERE `id` = :id',
array(':id' => (int)$id)
);
header ('Location: '. $this_module);
} else {
?>
<h2>Шаблоны</h2>
<p><a href="<?=$this_module;?>?add"><span class="icon icon-plus"></span> Добавить шаблон</a></p>
<? $templates = Helper::fetchAssocInArray('SELECT * FROM '.TEMPLATE_TABLE); ?>
<? if (empty ($templates)) : ?>
<p>Вы пока не добавили ни один шаблон</p>
<? else : ?>
<table class="table table-hovered table-bordered">
<tr>
<th>Название</th>
<th>Параметры шаблона</th>
<th></th>
</tr>
<? foreach ($templates as $t) : ?>
<tr>
<td><?=$t['name'];?></td>
<?
$params = $t['params'] ? Helper::fetchAssocInArray('SELECT * FROM '.PARAM_TABLE.' WHERE `id` IN ('.$t['params'].')') : array();
$param_names = array_map(function($r){return $r['name'];}, $params)
?>
<td><?=implode(', ', (array)$param_names);?></td>
<td>
<? Admin::printImageMenu('?edit='.(int)$t['id'], 'edit') ?>
<? Admin::printImageMenu('?del='.(int)$t['id'], 'del') ?>
</td>
</tr>
<? endforeach; ?>
</table>
<? endif; ?>
<? } ?><file_sep>/views/auth.php
<?
//Если кука стоит, то проверим авторизацию
if($_COOKIE['isadmin']) :
if($_COOKIE['isadmin'] === md5($admin_data['admin_pwd'].'<PASSWORD>')) :
$_SESSION['loggin'] = 1;
endif;
header('Location: /'.$URL[1]);
exit;
endif;
if($_POST['email'] && $_POST['password'] ) :
//Проверим IP из базы - можно ли ему логиниться
//Если никуда не залогинились, то запишем IP в базу
$sql = "INSERT INTO `pm_login` SET `ip` = '".$_SERVER['REMOTE_ADDR']."', `time` = ".time();
Helper::executeStatement($sql);
//Сначала почистим базу от "старых" IP
$sql = "DELETE FROM `pm_login` WHERE `time` < ".(time()-$admin_data['login_time']);
Helper::executeStatement($sql);
//а теперь проверим
$sql = "SELECT COUNT(*) FROM `pm_login` WHERE `ip` = '".$_SERVER['REMOTE_ADDR']."'";
//$r = mysql_fetch_array(mysql_query($sql));
$r = $pdo->query($sql)->fetch(PDO::FETCH_ASSOC);
if ($r[0]>=$admin_data['login_count']) :
header('Location: /'.$URL[1]."?timeout=1");
exit;
endif;
//проверяем - вдруг это суперадмин! о_О
if ($admin_data['admin_login'] == $_POST['email'] AND $admin_data['admin_pwd'] == $_POST['password']) :
//Авторизация ОК
$_SESSION['isadmin'] = 1;
$_SESSION['status'] = 'superadmin';
//Сохраним данные
$f = fopen('db/login.php','a+');
fwrite($f,date('d.m.Y H:i')." --- ".$_SERVER['REMOTE_ADDR']." - as SUPERADMIN\r\n");
fclose($f);
//Cookie
setcookie('isSuperAdmin',md5($_POST['password'].'<PASSWORD>'),time()+3600*24,'/');
//Ушли на фронт
header('Location: /'.$URL[1]);
exit;
endif;
//проверяем среди простых смертных
$sql = "SELECT * FROM `pm_admins` WHERE `email` = :email AND `pass` = :pass";
$r = Helper::fetchAssocInArray($sql, array(':email' => $_POST['email'], ':pass' => $_POST['password']));
$r = reset($r);
if (!empty($r)) :
//Авторизация ОК
$_SESSION['isadmin'] = $r['id'];
$_SESSION['status'] = 'admin';
$_SESSION['rights'] = $r['rights'];
$_SESSION['admin_data'] = $r;
//Сохраним данные
$f = fopen('db/login.php','a+');
fwrite($f,date('d.m.Y H:i')." --- ".$_SERVER['REMOTE_ADDR']." - as ".$r['email']."\r\n");
fclose($f);
//Cookie
setcookie('isAdmin',md5($_POST['password'].'<PASSWORD>'),time()+3600*24,'/');
//Данные для входа
$sql = "UPDATE `pm_admins` SET
`login_last_time` = `login_now_time`,
`login_last_ip` = `login_now_ip`,
`login_now_time` = '".time()."',
`login_now_ip` = '".$_SERVER['REMOTE_ADDR']."'
WHERE `id` = ".$_SESSION['isadmin'];
$pdo->exec($sql) or die('DB ERROR: Enter data error');
//Ушли на фронт
header('Location: /'.$URL[1]);
exit;
endif;
$ERROR = 1;
endif;
if (!empty($_POST) && (empty($_POST['email']) || empty($_POST['password']))) { $ERROR = 1; }
?>
<!DOCTYPE HTML>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Авторизация в системе</title>
<link href="/bootstrap/css/bootstrap.css" rel="stylesheet">
<link rel="shortcut icon" href="/favicon.png">
<!--[if lt IE 9]>
<script type="text/javascript" src="/js/html5.js"></script>
<![endif]-->
<script type="text/javascript" src="/js/jquery-1.9.1.min.js"></script>
<script type="text/javascript" src="/bootstrap/js/bootstrap.min.js"></script>
</head>
<body>
<div class="well auth_form">
<form method="post" name="authform">
<h1><?=SITE_NAME?></h1>
<h2>Панель администратора</h2>
<?if($ERROR) {?>
<div class="alert alert-error fade in">
<button type="button" class="close" data-dismiss="alert">×</button>
Неправильный логин или пароль.<br />Попробуйте ещё раз.
</div>
<?}?>
<?
if($_GET['timeout']) {
$sql = "SELECT COUNT(*) FROM `pm_login` WHERE `ip` = '".$_SERVER['REMOTE_ADDR']."'";
$r = mysql_fetch_array(mysql_query($sql));
if ($r[0]==$admin_data['login_count']) {
?>
<div class="alert alert-error fade in">
<button type="button" class="close" data-dismiss="alert">×</button>
Слишком много попыток входа. Подождите <b><?=$admin_data['login_time'];?> сек</b>.
</div><?
}
}?>
<div class="control-group<?if($ERROR){?> error<?}?>">
<p><input type="text" name="email" placeholder="e-mail" autofocus value="" class="error"></p>
<p><input type="<PASSWORD>" name="password" placeholder="<PASSWORD>" value=""></p>
</div>
<button type="submit" class="btn btn-large btn-primary save" name="go">Войти</button>
</form>
</div>
<style type="text/css">
.auth_form {
margin: 160px auto;
width: 300px;
background-color: #F6F5F5;
padding: 20px;
overflow: hidden;
}
.auth_form h1 {
text-align: center;
color: #009DDB;
font-size: 42px;
}
.auth_form h2 {
font-size: 16px;
text-align: center;
margin: 0 auto 10px;
}
.auth_form input[type=text], .auth_form input[type=password]{
font-size: 24px;
line-height: 27px;
height: auto;
width: 290px;
}
.auth_form button[type=submit]{
float: right;
padding: 9px 24px;
font-weight: bold;
}
</style>
</body>
</html><file_sep>/classes/helper.php
<?php
class Helper {
public static $month = array('jan'=>'Января','feb'=>'Февраля','mar'=>'Марта','apr'=>'Апреля','may'=>'Мая','jun'=>'Июня','jul'=>'Июля','aug'=>'Августа','sep'=>'Сентября','oct'=>'Октября','nov'=>'Ноября','dec'=>'Декабря');
public static $monthEng = array('jan'=>'January','feb'=>'February','mar'=>'March','apr'=>'April','may'=>'May','jun'=>'June','jul'=>'July','aug'=>'August','sep'=>'September','oct'=>'October','nov'=>'November','dec'=>'December');
public static $days = array('Воскресенье','Понедельник','Вторник','Среда','Четверг','Пятница','Суббота');
public static function fetchRequestInObject(&$obj, $req) {
$ReqVars = get_object_vars($req);
foreach ($ReqVars as $ReqName => $ReqValue) {
if (property_exists($obj, $ReqName)) {
$obj->$ReqName = stripslashes($ReqValue);
}
}
}
public static function fetchAssoc($sql, $params = array(), $column = null) {
global $pdo;
$statement = $pdo->prepare($sql);
$statement->execute($params);
if ( (int)$statement->errorCode() ) {
die('<br/><b>SQL:</b> ' . $statement->queryString . '<br/><b>ERROR:</b> ' . $statement->errorInfo()[2]);
}
if (!empty($column)) {
$result = $statement->fetch(PDO::FETCH_ASSOC);
return $result[$column];
}
return $statement->fetch(PDO::FETCH_ASSOC);
}
// Все результаты запросы в список
public static function fetchAssocInArray($sql, $params = array(), $key = '', $value = '', $needClear = true) {
global $pdo;
$statement = $pdo->prepare($sql);
$statement->execute($params);
if ( (int)$statement->errorCode() ) {
die('<br/><b>SQL:</b> ' . $statement->queryString . '<br/><b>ERROR:</b> ' . $statement->errorInfo()[2]);
}
$isSecondLevel = strpos($key, '[]') ? true : false;
$key = str_replace('[]', '', $key);
for ($data = array(), $i = 0; $row = $statement->fetch(PDO::FETCH_ASSOC); $i++) {
if ($needClear) {
foreach ($row as $rKey => $rEl) {
$row[$rKey] = htmlspecialchars(stripslashes($rEl));
}
}
$key_param = $i;
if (!empty($key)) {
if ($isSecondLevel) {
$key_param = $row[$key];
$data[$key_param][] = empty($value) ? $row : $row[$value];
} else {
$key_param = $row[$key];
$data[$key_param] = empty($value) ? $row : $row[$value];
}
} else {
$data[$key_param] = empty($value) ? $row : $row[$value];
}
}
return $data;
}
public static function executeStatement($sql, $params = array()) {
global $pdo;
$statement = $pdo->prepare($sql);
$statement->execute($params);
if ( (int)$statement->errorCode() ) {
die('<br/><b>SQL:</b> ' . $statement->queryString . '<br/><b>ERROR:</b> ' .print_r($statement->errorInfo()));
}
return $statement;
}
public static function createTable($tableName, $columns, $options = null) {
$params = array();
foreach ($columns as $param => $value) {
$params[] = "$param $value";
}
$sql = "CREATE TABLE `" . $tableName . "` (" . implode(', ', $params) . ") ";
$sql = $options===null ? $sql . ' ENGINE=MyISAM DEFAULT CHARSET=utf8' : $sql.' '.$options;
return self::executeStatement($sql);
}
public static function insert($table, $columns) {
$params=array();
$names=array();
$placeholders=array();
foreach($columns as $name=>$value) {
$names[] = "`$name`";
$placeholders[] = ':' . $name;
$params[':' . $name] = $value;
}
$sql='INSERT INTO `' . $table . '` (' . implode(', ',$names) . ') VALUES (' . implode(', ', $placeholders) . ')';
return self::executeStatement($sql, $params);
}
public static function trimArray($array) {
if (is_array($array)) {
foreach ($array as $key => $el) {
if (is_array($el)) {
$array[$key] = self::trimArray($el);
} else {
$array[$key] = trim($el);
}
}
}
return $array;
}
public static function explodeURI($URI) {
$URI = urldecode($URI);
if (strpos($URI,'?')) {
$finish = strpos($URI,'?');
} else {
$finish = strlen($URI);
}
$URI = substr($URI, 0, $finish);
$result = explode ('/', $URI);
if (end($result) == '' ) {
unset($result[count($result)-1]);
}
return $result;
}
// Обрезка текста по словам
public static function cutText ($text, $maxsize, $flagNeedAddPoints = false) {
$points = ($flagNeedAddPoints ? '...' : '');
if (strlen ($text . $points) <= $maxsize)
return $text . $points;
$i = strrpos ($text, ' ');
if (! $i)
$i = strrpos ($text, '.');
if (! $i)
$i = strrpos ($text, ',');
if (! $i)
$i = strrpos ($text, '!');
if (! $i)
$i = strrpos ($text, '?');
if (! $i)
$i = strrpos ($text, ':');
if ($i)
return self::cutText(substr ($text, 0, $i), $maxsize, true);
return self::cutText(substr ($text, 0, $maxsize - 3), true);
}
public static function pluralForm($n, $forms) { return $n%10==1&&$n%100!=11?$forms[0]:($n%10>=2&&$n%10<=4&&($n%100<10||$n%100>=20)?$forms[1]:$forms[2]); }
// Извлечь из таблицы строку с данным id
public static function selectAllForId ($table, $id) {
$sql = 'SELECT * FROM '.$table.' WHERE `id` = :id';
return Helper::fetchAssoc($sql, array(
':id' => (int) $id
));
}
// Является ли строка датой
public static function isDate ($str) {
function isArrDate($arr) {
return
is_numeric ((int)$arr[0]) &&
is_numeric ((int)$arr[1]) &&
is_numeric ((int)$arr[2]);
}
$arr1 = explode ('-', $str);
$arr2 = explode ('.', $str);
return isArrDate ($arr1) || isArrDate ($arr2);
}
// Перевод даты: yyyy-mm-dd -> dd.mm.yyyy
public static function toNormDate($str) {
$arr = explode ('-', $str);
return $arr[2] . '.' . $arr[1] . '.' . $arr[0];
}
// Перевод даты: dd.mm.yyyy -> yyyy-mm-dd
public static function toDBDate($str) {
$arr = explode ('.', $str);
return $arr[2] . '-' . $arr[1] . '-' . $arr[0];
}
// Создание SET-строки
public static function createSetString($arr_data) {
global $pdo;
if ( !count($arr_data) ) return '';
$string = 'SET ';
for ($i = 0, $n = count ($arr_data); $i < $n; ++ $i) {
$string .= '`' . ($arr_data[$i][0]) . '`=';
if (count ($arr_data[$i]) >= 3) {
$type = $arr_data[$i][2];
$value = $arr_data[$i][1];
} else {
$type = $arr_data[$i][1];
$value = $_POST[$arr_data[$i][0]];
}
$value = trim($value);
switch ($type) {
case 'int':
$string .= (int)$value;
break;
case 'check':
$string .= "'" . ($value ? 'on' : '') . "'";
break;
case 'date':
$string .= "'" . Helper::toDBDate($value) . "'";
break;
default:
$string .= $pdo->quote($value);
break;
}
if ($i + 1 < $n)
$string .= ', ';
}
return $string;
}
}<file_sep>/admin/js/adminCore.js
/** Core Admin js **/
function showModal(header, body, buttons) {
var id = 0;
var modal = $('#adminModal');
modal.find('#myModalLabel').html(header);
modal.find('.modal-body').html(body);
var buttonBlock = modal.find('.modal-footer');
buttonBlock.empty();
if (typeof buttons !== 'undefined') {
for (var i in buttons) {
var button = buttons[i];
if (button === 'close') {
buttonBlock.append('<button class="btn" data-dismiss="modal" aria-hidden="true">Отмена</button>');
} else {
if (typeof button.func !== 'undefined') {
var butClass, text, func, params;
(!!button.butClass) ? butClass = button.butClass : butClass = '';
(!!button.text) ? text = button.text : text = '{text}';
var buttonHtml = $('<button class="btn '+butClass+'">'+text+'</button>');
var buttonId = 'action_' + (id++);
buttonHtml.attr('id', buttonId);
buttonBlock.append(buttonHtml);
// bind custom event call on this button click
buttonBlock.find('#'+buttonId)
.on('click', button, function(params){
//console.log(params.data);
callFunctionList(params.data.func, params.data.params);
$('#adminModal').modal('hide');
});
}
}
}
} else {
buttonBlock.append('<button class="btn" data-dismiss="modal" aria-hidden="true">Close</button>');
}
modal.modal({});
}
function callFunctionList(list, params){
//console.log('call function list', params);
var funcList = list.split(',');
for(var i in funcList){
var callback = funcList[i];
if(typeof(window[callback]) === 'function'){
var fn = window[callback];
fn.apply(this, params);
}
}
}
function requestWrap(url, data, callback) {
$.ajax(
url,
{
data: data,
type: 'POST',
success: function(response){
//var param = $.parseJSON(response);
callFunctionList(callback, [response]);
}
}
);
return true;
}<file_sep>/templates/cart.php
<?
$page->title = $page->header = 'Корзина';
if (isset($_POST['cart-refresh']) && $_POST['cart-refresh'] == 1) {
$items = unserialize($_COOKIE['cart']);
foreach ($items as $id => $item) {
if (isset($_POST['del_'.$id])){
unset($items[$id]);
continue;
}
if (isset($_POST['count_'.$id])){
$items[$id]['count'] = intval($_POST['count_'.$id]);
}
}
$cart = $items;
setcookie('cart', serialize($items), time()+60*60*24*7, '/');
header('Location: ' . implode('/', $URL));
exit;
} else {
$cart = unserialize($_COOKIE['cart']);
}
$total = 0;
?>
<h1><?=$page->header?></h1>
<? if (empty($cart)) {
?><h2>Ваша корзина пока пуста.</h2><?
} else { ?>
<form method="post">
<table class="cart_table">
<tr>
<th>Изображение</th>
<th>Товар</th>
<th>Артикул</th>
<th>Количество</th>
<th>Цена</th>
<th><a title="Удалить" class="inlineblock icon_remove"></a></th>
</tr>
<? foreach( $cart as $key => $el ) {
$id = $el['id'];
$sql = "SELECT * FROM `item` WHERE `id` = :id";
$product = Helper::fetchAssoc($sql, array(':id' => $id));
if (empty($product['alias'])) $product['alias'] = $id;
if (!is_numeric($product['id'])) { die ('ID товара не существует'); }
$sql = "SELECT * FROM `item_param` WHERE `item_id` = :itemId";
$paramData = Helper::fetchAssocInArray($sql, array(':itemId' => $product['id']), 'param_id', 'value');
?>
<tr>
<td class="cart-table-img">
<div class="imgwindowgreen">
<div class="vertcont1">
<div class="vertcont2"><img alt="" src="<?=FileManager::getResizedFileName($product['main_photo'], 'tmb50');?>"></div>
</div>
</div>
</td>
<td class="cart-table-name">
<h2><a href="/item/<?=$product['alias'];?>" target="_blank"><?=$product['name'];?></a></h2>
</td>
<td><?=$product['article']?></td>
<td class="cart-table-count">
<input type="text" size="2" value="<?=$el['count'];?>" name="count_<?=$key;?>">
</td>
<td class="cart-table-price">
<div class="costblock">
<div class="costblockin">
<?echo $product['price'] * $el['count']; $total += $product['price'] * $el['count'];?> <span>руб.</span>
</div>
</div>
</td>
<td class="cart-table-price">
<input type="checkbox" name="del_<?=$key;?>">
</td>
</tr>
<? }?>
<tr style="padding-top: 10px">
<td colspan="4" align="right"><h2>Цена всего:</h2></td>
<td class="price_total"><?=$total;?> руб.</td>
<td>
<div id="cart-refresh" >
<input type="hidden" value="1" name="cart-refresh">
<input type="submit" class="icon_update" value="Обновить">
</div>
</td>
</tr>
</table>
</form>
<h1>Контактная информация</h1>
<div class="order_block">
<form action="/order" method="post">
<div class="field_name">Имя</div>
<input type="text" required value="" class="field" name="name">
<div class="field_name">E-mail</div>
<input type="email" required value="" class="field" name="email">
<div class="field_name">Контактный телефон</div>
<input type="text" required value="" class="field" name="phone">
<div class="field_name">Адрес доставки</div>
<textarea required class="field" name="address" rows="3"></textarea>
<div>
<input type="submit" value="Оформить заказ" class="button" name="submit">
</div>
</form>
</div>
<? } ?>
<file_sep>/parts/html/header.php
<header role="banner">
<div class="container">
<div class="logo-block inlineblock">
<a href="/" class="logo"><?=$TEXT['site_name']?></a>
<div class="slogan"><?=htmlspecialchars_decode($TEXT['slogan'])?></div>
<div class="phone"><?=htmlspecialchars_decode($TEXT['phone'])?></div>
</div
><ul class="feedback inlineblock">
<li class="skype"><?=$TEXT['skype']?></li>
<li class="mail"><a href="mailto:<?=$TEXT['email']?>"><?=$TEXT['email']?></a></li>
<li class="vk"><a href="<?=$TEXT['vk_group']?>" target="_blank"><?=$TEXT['vk_group']?></a></li>
</ul
><div class="right inlineblock">
<div class="worktime"><?=htmlspecialchars_decode($TEXT['worktime'])?></div>
<!--div class="user-info inlineblock">
<div class="hello">
Добро пожаловать
<a href="">войдите</a>
или
<a href="">зарегистрируйтесь</a>
</div>
<ul class="user-actions">
<li class="inlineblock"><a href="#">Избранное</a></li>
<li class="inlineblock"><a href="#">Личный кабинет</a></li>
<li class="inlineblock"><a href="#">Корзина</a></li>
</ul>
</div
--><div class="cart-block inlineblock">
<a href="/cart">
<?
$cart = @unserialize($_COOKIE['cart']);
$cartCount = 0;
if (!empty($cart)) {
foreach ($cart as $product) {
$cartCount += $product['count'];
}
}
?>
<span class="title">ваша корзина</span>
<span class="value">Покупок: <?=$cartCount?></span>
</a>
</div>
</div>
<nav class="menu">
<? include $page->pathPart('menu.top') ?>
<form class="search" action="/search">
<input type="text" placeholder="поиск" value="" id="search" name="q" />
<input type="submit" value="" />
</form>
</nav>
</div>
</header><file_sep>/backend/index.php
<?
require_once '..' . DIRECTORY_SEPARATOR . 'init.php';
$response = new ResponseHelper();
$path = rtrim(PATH_ROOT, DIRECTORY_SEPARATOR) . implode(DIRECTORY_SEPARATOR, $URL);
$pathInclude = null;
if (is_file($path . '.php')) {
$pathInclude = $path . '.php';
} elseif (is_file($path . DIRECTORY_SEPARATOR . 'index.php')) {
$pathInclude = $path . DIRECTORY_SEPARATOR . 'index.php';
}
if (!empty($pathInclude) && $pathInclude != __FILE__) {
include $pathInclude;
} else {
$response->status = 'error';
$response->error = 'No such file';
$response->encode();
}
<file_sep>/parts/admin/block_main.php
<?
$t = explode(',',$_SESSION['rights']);
foreach($t as $el) :
$RIGHT[$el] = "1";
endforeach;
$dir = scandir($_SERVER['DOCUMENT_ROOT'].'/'.$URL[1].'/module');
for($i=2;$i<count($dir);$i++) :
$t = @file($_SERVER['DOCUMENT_ROOT'].'/'.$URL[1].'/module/'.$dir[$i].'/about');
$name = $t[1];
if(((strlen($name)>2 AND $RIGHT[trim($t[0])]==1) OR $_SESSION['status'] == 'superadmin') AND count($t)>1) :?>
<li<?
if ($URL[3] == $dir[$i] && $URL[2] == 'module') :
?> class="active"<?
endif;
?>><a href="/<?=$URL[1];?>/module/<?=$dir[$i];?>"><?=$name;?></a>
<? endif;
endfor;
?>
<file_sep>/templates/static.php
<?
if (empty($URL[1])) $URL[1] = 'main';
$sql = 'SELECT * FROM `pm_static` WHERE `url` = :url LIMIT 1';
$statement = $pdo->prepare($sql);
$statement->execute(array(':url' => $URL[1]));
if ($statement->rowCount() == 1) {
$r = $statement->fetch(PDO::FETCH_ASSOC);
$page->title = $r['title'];
$page->metadesc = $r['metadesc'];
$page->metakey = $r['metakey'];
$page->header = $r['header'];
$page->content = $r['text'];
?><h1><?=stripslashes($page->header)?></h1>
<div class="text">
<?=stripslashes($page->content)?>
</div>
<?
} else {
header('Location: /404');
exit;
}<file_sep>/admin/module/catalog/functions.php
<?
/*
* Функция удаления элемента таблицы базы данных
*/
function delete_from_db ($id) {
global $db_table;
global $gallery;
// узнаём pid и sort
$sql = 'SELECT `pid`, `sort` FROM `catalog` WHERE `id`=' . (int)$id;
list ($pid, $sort) = Helper::fetchAssoc($sql);
// делаем уменьшение всех sort, которые больше нашего и лежат в том же родительском каталоге
$sql = 'SELECT `id`, `sort` FROM `' . $db_table .
'` WHERE `sort` > :sort AND `pid` = :pid';
$items = Helper::fetchAssocInArray($sql, array(':sort' => $sort, ':pid' => (int)$pid));
if ($items)
foreach ($items as $item) {
$sql = 'UPDATE `' . $db_table . '` ' . Helper::createSetString(array (
array ('sort', $item['sort'] - 1, 'int')
)) . ' WHERE `id` = :id';
Helper::executeStatement($sql, array(':id', $item['id']));
}
// каскадное удаление
del_catalog ($id);
}
// функции по работе с деревом каталогов
// вывод элементов меню справа от каталога
function print_images_menu ($id) {
Admin::printImageMenu(Admin::thisModule() . '?up=' . (int)$id, 'up', 'Сортировка: выше');
Admin::printImageMenu(Admin::thisModule() . '?down=' . (int)$id, 'down', 'Сортировка: ниже');
?><a href="?items&catalog_id=<?=$id;?>" title="Товары" rel="tooltip"><span class="icon icon-th-list"></span></a><?
Admin::printImageMenu(Admin::thisModule() . '?edit=' . (int)$id, 'edit');
Admin::printImageMenu(Admin::thisModule() . '?del=' . (int)$id, 'del');
}
// вывод элемента меню Добавить
function print_menu_add ($id, $level) {
global $URL;
if ($id == 'root') {
$what = 'корневой раздел';
} elseif ($id && is_numeric ($id)) {
$what = 'подраздел на ' . $level . ' уровень';
} else return;
?>
<span class="icon icon-plus"></span> <a title="Добавить <?=$what;?>" href="<?=Admin::thisModule()?>?add=<?=$id;?>">Добавить раздел</a><?
}
// вывод дерева каталогов
function print_tree ($id, $level)
{
global $tree;
global $max_level;
if ($level > $max_level)
return;
if ($id)
{
?>
<span class="folder"><?=$tree[$id]['name'];?> <?php print_images_menu ($id, $level); ?></span>
<?php
}
if ($level + 1 < $max_level)
{
?><ul class="catselect"><?php
if ($tree[$id]['child'])
foreach ($tree[$id]['child'] as $child_id)
{
?><li><?php
print_tree ($child_id, $level + 1);
?></li><?php
}
?><li><?php
print_menu_add ($id, $level);
?></li><?php
?></ul><?php
}
}
// каскадное удаление каталогов
function del_catalog ($id)
{
global $db_table;
// удаляем подкаталоги
$sql = 'SELECT `id` FROM `' . $db_table . '` WHERE `pid` = :pid';
$arr_id = Helper::fetchAssocInArray($sql, array(':pid' => (int)$id));
if ($arr_id)
foreach ($arr_id as $item_id)
del_catalog ($item_id['id']);
// удаляем каталог
$sql = 'DELETE FROM `' . $db_table . '` WHERE `id` = :id';
Helper::executeStatement($sql, array(':id' => (int)$id));
}
<file_sep>/admin/module/template/sql.php
<?php
Helper::executeStatement('
CREATE TABLE IF NOT EXISTS `template` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(100) NOT NULL,
`params` text,
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
');
?><file_sep>/admin/module/item_param/sql.php
<?php
Helper::executeStatement('
CREATE TABLE IF NOT EXISTS `item_param` (
`item_id` int(11) NOT NULL,
`param_id` int(11) NOT NULL,
`value` text,
PRIMARY KEY (`item_id`,`param_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
');
?><file_sep>/admin/module_admin/admins/addadmin.php
<?
if($_POST['addpage']) :
$sql = "INSERT INTO `pm_admins` SET
`email` = '".mysql_real_escape_string($_POST['email'])."',
`pass` = '".mysql_real_escape_string($_POST['pwd'])."',
`rights` = '21,22,23,24,27,26,20,25,2,1',
`login_now_time` = '0',
`login_now_ip` = '127.0.0.1',
`login_last_time`= '0',
`login_last_ip` = '127.0.0.1'";
mysql_query($sql) OR die('DB ERROR: CAN\'T CREATE ADMIN');
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3]);
exit;
endif;
?>
<p><i class="icon-arrow-left"></i> <a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>">Вернуться к списку записей</a></p>
<h3>Добавление записи</h3>
<form action="" method="post" class="form-horizontal">
<fieldset>
<div class="control-group">
<label class="control-label">E-mail</label>
<div class="controls"><input name="email" class="long" value="<?=$r['email'];?>"></div>
</div>
<div class="control-group">
<label class="control-label">Пароль</label>
<div class="controls"><input name="pwd" class="long" value="<?=$r['pwd'];?>"></div>
</div>
<div class="form-actions">
<input class="btn btn-primary" type="submit" name="addpage" value="Добавить запись">
<a href="/<?=$URL[1];?>/<?=$URL[2];?>/<?=$URL[3];?>" class="btn">Отмена</a>
</div>
</fieldset>
</form><file_sep>/admin/module/articles/delete.php
<?
if($_GET['delete']) {
$sql = "DELETE FROM `pm_article_items` WHERE `id` = :id";
Helper::executeStatement($sql, array(':id' => $_GET['delete']));
header('Location: /'.$URL[1].'/'.$URL[2].'/'.$URL[3].'?view_group='.$_GET['view_group']);
exit;
}
| 6b469c79adbd02aa09053268f7b1859f8371b773 | [
"JavaScript",
"PHP"
] | 87 | PHP | katzebue/click2home | 3c4bcfa579b0403e9b0743b5c831cba454d887a0 | b8ef7713bd26fa7696de589a2f6b5029aad5836b |
refs/heads/master | <repo_name>celio1/minesweeper<file_sep>/server.js
var validator = require('validator');
var connect = require('connect');
var cors = require('cors');
var http = require('http');
var crypto = require('crypto');
var mysql= require('mysql');
var chance = require('chance');
var salt = crypto.createHash('md5');
var bodyParser = require('body-parser');
var corsOpts = { origin: '*' };
var app = connect();
app.use(bodyParser.urlencoded({extended: false}));
app.use(bodyParser.json());
app.use(cors(corsOpts)).use(function (req, res, next) {
//console.log(req.method);
//console.log(req.url);
if(req.url === '/register' && req.method === 'POST')
register(req,res);
else
if(req.url === '/ranking' && req.method === 'POST')
ranking(req, res);
}).listen(8020);
console.log("Server running on 8020");
var conn = mysql.createConnection({
host : 'localhost',
user : 'up201303171',
password : '<PASSWORD>',
database : 'up201303171'
});
conn.connect(function(err) {
if (err) {
console.log("not able to connect DB");
} else {
console.log('Connected!');
}
});
function register(req , res){
res.setHeader('Content-Type', 'application/json');
var somedata = {}; // return this object if JSON req is okay
console.log(req.body);
if( !verify(req) ){
somedata = "Utilizador ou password incorrectos";
res.end(JSON.stringify(somedata));
}
else
registerDB(req,res,somedata);
}
function ranking(req , res){
var somedata = {};
res.setHeader('Content-Type', 'application/json');
if( verify(req) )
rankingDB(req,res,somedata);
else{
somedata.error="Invalid parameter";
res.end(JSON.stringify(somedata));
return;
}
}
function rankingDB(req, res, somedata){
var query = conn.query("SELECT * FROM Rankings WHERE level=? ORDER BY score DESC, timestamp ASC LIMIT 10", [req.body.level] , function(err, result){
if(err){
console.log(err);
return;
}
var somedata = {
"ranking" : []
}
var ranking = [];
if( result.length > 0){
for(var i=0; i < result.length ; i++)
ranking.push( new ranks(result[i].name , result[i].score) );
somedata["ranking"] = ranking;
res.end( JSON.stringify(somedata) );
}
});
}
function ranks(name, score){
this.name = name;
this.score = score;
}
function registerDB(req , res , somedata){
var query = conn.query("SELECT * FROM Users WHERE name=?", [ req.body.name ], function(err, result) {
if(err){
console.error(err);
return;
}
if(result.length > 0){
// console.error(result);
var string=JSON.stringify(result);
var json = JSON.parse(string);
var user_salt = json[0].salt;
var seq = crypto.createHash('md5').update(req.body.pass+user_salt).digest('hex');
if(json[0].pass === seq)
res.end(JSON.stringify(somedata));
else{
somedata.error="Utilizador ou password incorrectos";
//res.writeHead(401,"Utilizador ou password incorrectos");
res.end(JSON.stringify(somedata));
return;
}
}
else{
chance = new chance();
var sal = chance.string({length: 4});
var seq = crypto.createHash('md5').update(req.body.pass+sal).digest('hex');
insertDB(req.body.name, seq,somedata,res,sal);
}
//console.log(query.sql) //mostra a consulta sql
});
}
function insertDB(name, pass, somedata,res,sal){
var q = conn.query('INSERT INTO Users SET name=?, pass=?, salt=?', [name,pass,sal], function(err, result){
if(err)
conn.rollback(function() { throw err; });
else{
//console.error(result);
res.end(JSON.stringify(somedata));
}
});
}
function verify(req){
if(req.url === '/register'){
var user = req.body.name;
if( validator.isAlphanumeric(user) )
return true;
else
return false;
}
else
if( req.url === '/ranking' ){
var lvl = req.body.level;
if( validator.isAlpha(lvl) && ( lvl === "beginner" || lvl === "intermediate" || lvl === "expert") )
return true;
else
return false;
}
}
<file_sep>/1ªe2ªfase/README.md
# minesweeper
Single and multiplayer mode
<file_sep>/README.md

## About
O presente trabalho foi realizado pelos alunos <NAME> (up201303171) e <NAME>(up201303743) no âmbito da unidade curricular de Tecnologias Web na Faculdade de Ciências da Universidade do Porto.
A estrutura do jogo Minesweeper divide-se em 3 etapas que foram submetidas e avaliadas ao longo do semestre, sendo essas:
* **1ª etapa :** Jogo Minesweeper single player;
* **2ª etapa :**Implementação multiplayer com recurso a comunicações http entre o cliente e o servidor desenvolvido pelo docente, a correr na porta 8020 e com recurso a CORS.
* **3ª etapa :** Implementação do servidor capaz de comunicar com o código que foi desenvolvido na 2ª etapa, nomeadamente as funções register e ranking. Implementação em NodeJS.
## Screenshots
**Multiplayer:**

**Singleplayer:**

**Losing the game:**

**Winning and rankings:**

<file_sep>/1ªe2ªfase/script.js
var URL = "http://twserver.alunos.dcc.fc.up.pt:8000/";
var user;
var group=20;
var pass;
var level; // temporario
var cols;
var rows;
var multiplayer=false;
var my_bombs=0;
var enemy_bombs=0;
var reqEnemy;
var game;
var key;
var mines_left; // vai ser usada para single mode
var n_bombs; // vai ser usada para single mode
var time=0; // vai ser usada para single mode
var n_visited=0; // vai ser usada para single mode
var gameOver=false; // vai ser usada para single mode
var primeira_jogada=0; // vai ser usada para single mode
var easyscore = JSON.parse(localStorage.getItem("beginner")) || [];
var medscore = JSON.parse(localStorage.getItem("intermediate")) || [];
var hardyscore = JSON.parse(localStorage.getItem("expert")) || [];
function init_mulbeg(){
document.getElementById("startgamebtn").disabled=false;
level="beginner";
cols=9;
rows=9;
mines_left=10; // para single mode
n_bombs=10; // para single mode
}
function init_mulmed(){
document.getElementById("startgamebtn").disabled=false;
level="intermediate";
cols=16;
rows=16;
mines_left=40; //single mode
n_bombs=40; // single mode
}
function init_mulhard(){
document.getElementById("startgamebtn").disabled=false;
level= "expert";
cols=16;
rows=30;
mines_left=99; //single mode
n_bombs=99; // single mode
}
function single_enable(){
document.getElementById("multiplayerbtn").disabled = true;
document.getElementById("begbtn").disabled = false;
document.getElementById("medbtn").disabled = false;
document.getElementById("hardbtn").disabled = false;
}
function group_display(){
//document.getElementById("multiplayerbtn").disabled =true;
document.getElementById("singlebtn").disabled = true;
document.getElementById("begbtn").disabled = false;
document.getElementById("medbtn").disabled = false;
document.getElementById("hardbtn").disabled = false;
multiplayer=true;
}
function login() {
if(document.querySelector("#inputUserName").value === "" || document.querySelector("#inputUserPassword").value === "") {
alert("Necessário introduzir nome de utilizador e a tua password");
return;
}
if(!(/^[a-zA-Z]+/.test(document.querySelector("#inputUserName").value)) || document.querySelector("#inputUserName").value.length <3) {
alert("Nome de utilizador deve ser constituido por 2 caracteres no minino, sem caracteres especiais e conter letras");
return;
}
/*
if(document.querySelector("#inputUserPassword").value.length < 5 || !(/[a-z]+/.test(document.querySelector("#inputUserPassword").value)) || !(/[A-Z]+/.test(document.querySelector("#inputUserPassword").value)) || !(/\d+/.test(document.querySelector("#inputUserPassword").value))) {
alert("Necessário introduzir password constituida por pelo menos uma letra maiúscula, uma minúscula e um dígito com mais de 4 caracteres");
return;
}
*/
//tudo certo no login
user = document.querySelector("#inputUserName").value;
pass = document.querySelector("#inputUserPassword").value;
// objecto JSON login
var loginInfo = JSON.stringify({'name': user, 'pass': pass});
// novo request
var req = new XMLHttpRequest();
req.open("POST", URL + "register", true);
req.setRequestHeader('Content-Type', 'application/json');
req.onreadystatechange = function() {
if(req.readyState != 4) return;
if(req.status != 200) {
alert(req.status + ": " + req.statusText);
return;
}
// obter/converter resposta JSON
var response = JSON.parse(req.responseText);
if(response.error != undefined) {
alert("O utilizador ou a palavra-chave estão erradas");
document.querySelector("#login input[type='text']").value = "";
document.querySelector("#login input[type='password']").value = "";
document.querySelector("#login input[type='text']").focus();
return;
}
document.getElementById("sign").style.color="green";
// servidor confirma o registo e da opção de logout
document.getElementById("welcome").innerHTML = "Welcome, "+user;
document.getElementById("inputUserName").value = "";
document.getElementById("inputUserPassword").value = "";
document.getElementById("inputUserName").disabled = true;
document.getElementById("inputUserPassword").disabled = true;
document.getElementById("singlebtn").disabled =false;
document.getElementById("multiplayerbtn").disabled =false;
// disable no botao login
document.getElementById("sign").disabled = true;
};
req.send(loginInfo);
//enviar informações para a start(group,user,pass,level);
}
function join() {
document.getElementById("startgamebtn").style.display= "none";
document.getElementById("loadingx").style.display= "block";
if(!multiplayer){
document.getElementById("loadingx").style.display="none";
modosingle();
return;
}
document.getElementById("load_opponent").innerHTML = "Searching an enemy for you";
document.getElementById("mul_leave").style.display="block"; // permite mostrar o botão para sair dá fila
var joinInf = JSON.stringify({'name': user, 'pass': <PASSWORD>, 'level':level, 'group':group});
var req = new XMLHttpRequest();
req.open("POST", URL+"join", true);
req.setRequestHeader('Content-Type', 'application/json');
req.onreadystatechange = function() {
if(req.readyState != 4)
return;
if(req.status != 200) {
alert(req.status + ": " + req.statusText);
return;
}
var response = JSON.parse(req.responseText);
if(response.error){
alert("erro de servidor: " + response.error);
return;
}
//alert(req.responseText);
game = response["game"]; //id do jogo identificado pelo servidor FUNÇÂO UPDATE DO SERVER
key = response["key"]; //chave para o acesso ao jogo gerado pelo servidor FUNÇÂO UPDATE DO SERVER
//alert("Estamos a procurar um adversário :)");
console.log(URL + "update?name=" + user + "&game=" + game + "&key=" + key);
reqEnemy = new EventSource(URL + "update?name=" + user + "&game=" + game + "&key=" + key);
reqEnemy.onmessage = function(event) { //server-sent event
var response = JSON.parse(event.data); //traduzir a resposta JSON do servidor
if(response.turn==user) turn(1);
if(response.winner !== undefined) {
if(user === response.winner){
// servidor responde com nome do user (winner)
alert("Parabéns!! Ganhaste o Jogo contra o teu adversário!");
showPlayerScore(response.winner);
reqEnemy.close();
}else{
alert("O teu adversário " + response.winner + " venceu o jogo!");
reqEnemy.close();
}
}
if(response.move == undefined) {
newGame(); //criar tabela
enemy = response.opponent;
// informacao das jogadas
var cell;
var row;
if(response.turn == user){ // é a minha vez de jogar
alert("O jogo começou, o teu adversário é " + enemy + ". Começas tu a jogar");
document.getElementById("loadingx").style.display="none";
document.getElementById("load_opponent").style.display="none";
document.getElementById("mul_leave").style.display="none";
document.getElementById("mul_player_progress").style.display="block";
document.getElementById("enemymul_bomb_progress").style.display="block";
document.getElementById("mymul_bomb_progress").style.display ="block";
console.log(response);
var board=document.getElementById("tabela");
board.onclick = function (e)
{
var cell=e.target.cellIndex;
var row= e.target.parentNode.rowIndex;
//console.log("row-"+row+", cell "+cell);
mymove(key,game,row+1,cell+1);
}
}
else{
alert("O jogo começou, o teu adversário é " + enemy + ". Começa o teu adversário a jogar");
document.getElementById("loadingx").style.display="none";
document.getElementById("load_opponent").style.display="none";
document.getElementById("mul_player_progress").style.display="block";
document.getElementById("enemymul_bomb_progress").style.display="block";
document.getElementById("mymul_bomb_progress").style.display ="block";
document.getElementById("mul_leave").style.display="none";
console.log(response);
var board=document.getElementById("tabela");
board.onclick = function (e)
{
var cell=e.target.cellIndex;
var row= e.target.parentNode.rowIndex;
//console.log("row-"+row+", cell "+cell);
mymove(key,game,row+1,cell+1);
}
}
}//move != undefined
else
{ // server replies with move != undefined so we must update the field
var move = response.move; // jogada do servidor
var cells= move.cells;
var r;
var c;
var nm;
cells.forEach(function(cell) {
r = cell[0];
c = cell[1];
nm = cell[2];
if(move.name === user)
{
if(nm === -1)
{
fillBoard(r-1,c-1,nm);
progress("me");
console.log("uma bomba");
}
else
{
fillBoard(r-1,c-1,nm); // pq os nossos indices começam em 0 e os do professor em 1
console.log("deves alterar r="+r+", "+c+"e "+nm );
//document.getElementById("c."+(r-1)+"."+(c-1)).style.backgroundImage="url('./img/1.png')";
}
}
else{
if(nm === -1)
{
progress("enemy");
fillBoard(r-1,c-1,nm);
console.log("uma bomba");
}
else
{
fillBoard(r-1,c-1,nm);
console.log("deves alterar r="+r+", "+c+"e "+nm );
//document.getElementById("c."+(r-1)+"."+(c-1)).style.backgroundImage="url('./img/1.png')";
}
}
});
}
}; //onmessage
}; //onreadystatechange
req.send(joinInf);
}
function showPlayerScore(vencedor){
var winnerScore = JSON.stringify({'name': user, 'level': level});
// novo request
var req = new XMLHttpRequest();
req.open("POST", URL + "score", true);
req.setRequestHeader('Content-Type', 'application/json');
req.onreadystatechange = function() {
if(req.readyState != 4) return;
if(req.status != 200) {
alert(req.status + ": " + req.statusText);
return;
}
var response = JSON.parse(req.responseText);
if(response.error !== undefined)
alert(response.error);
alert("O teu score: "+response.score);
};
req.send(winnerScore);
}
function fillBoard(r,c,nm){
if(nm===0)
document.getElementById("c."+(r)+"."+(c)).style.backgroundImage="url('./img/opened.png')";
if(nm===1)
document.getElementById("c."+(r)+"."+(c)).style.backgroundImage="url('./img/1.png')";
if(nm===2)
document.getElementById("c."+(r)+"."+(c)).style.backgroundImage="url('./img/2.png')";
if(nm===3)
document.getElementById("c."+(r)+"."+(c)).style.backgroundImage="url('./img/3.png')";
if(nm===4)
document.getElementById("c."+(r)+"."+(c)).style.backgroundImage="url('./img/4.png')";
if(nm===5)
document.getElementById("c."+(r)+"."+(c)).style.backgroundImage="url('./img/5.png')";
if(nm===6)
document.getElementById("c."+(r)+"."+(c)).style.backgroundImage="url('./img/6.png')";
if(nm===7)
document.getElementById("c."+(r)+"."+(c)).style.backgroundImage="url('./img/7.png')";
if(nm===8)
document.getElementById("c."+(r)+"."+(c)).style.backgroundImage="url('./img/8.png')";
if(nm===-1)
document.getElementById("c."+(r)+"."+(c)).style.backgroundImage="url('./img/clicked_bomb.png')";
}
function progress(playerprog){
if(playerprog === "me"){
my_bombs++;
document.getElementById("mybombs1").innerHTML =my_bombs;
}
else{
enemy_bombs++;
document.getElementById("enemybombs1").innerHTML = enemy_bombs;
}
}
function turn(turn){
if(turn == 1)
{
console.log("entrei no turn do me");
document.getElementById("icon_player").style.width='70px';
document.getElementById("icon_player").style.height='70px';
}
if(turn == 2)
{
console.log("mudar tamanho");
document.getElementById("icon_player").style.width='50px';
document.getElementById("icon_player").style.height='50px';
}
}
function tableofHonor(){
if(!multiplayer)
{
honor();
return;
}
document.getElementById("honor_mul").style.display="block";
var theWinnersOf = JSON.stringify({'level': level});
var req = new XMLHttpRequest();
req.open("POST", URL+"ranking", true);
req.setRequestHeader('Content-Type','application/json');
req.onreadystatechange = function() {
if(req.readyState != 4) return;
if(req.status != 200) {
alert(req.status + ": " + req.statusText);
return;
}
var response = JSON.parse(req.responseText);
if(response.error !== undefined)
alert(response.error);
//aqui dentro da callback criar a tabela para representar a informação da tableofHonor
var container = document.getElementById("honor_mul"); // onde a tabela vai ser criada
var table = document.createElement('table');
table.setAttribute("class","test");
table.setAttribute("id","table_rank");
var tableRow = table.insertRow(0);
var tableCell = tableRow.insertCell(0);
tableCell.appendChild(document.createTextNode("Rank"));
container.appendChild(table);
tableCell = tableRow.insertCell(1);
tableCell.appendChild(document.createTextNode("Nome"));
container.appendChild(table);
tableCell = tableRow.insertCell(2);
tableCell.appendChild(document.createTextNode("Score"));
container.appendChild(table);
for( var i=1; i<=10; i++){
var entrada = response.ranking[i];
if(response.ranking[i] != undefined){
var tableRow = table.insertRow(i);
var tableCell = tableRow.insertCell(0);
tableCell.appendChild(document.createTextNode(i));
container.appendChild(table);
var tableCell = tableRow.insertCell(1);
tableCell.appendChild(document.createTextNode(entrada.name));
container.appendChild(table);
var tableCell = tableRow.insertCell(2);
tableCell.appendChild(document.createTextNode(entrada.score));
}
}
};
req.send(theWinnersOf);
}
function mymove(key,game,x,y){
var mycall = JSON.stringify({'name': user, 'game': game, 'key': key, 'row': x, 'col': y});
var req = new XMLHttpRequest();
req.open("POST", URL+"notify", true);
req.setRequestHeader('Content-Type', 'application/json');
req.onreadystatechange = function() {
if(req.readyState != 4) return;
if(req.status != 200) {
alert(req.status + ": " + req.statusText);
return;
}
var response = JSON.parse(req.responseText);
console.log(response);
if(response.error !== undefined)
alert(response.error);
};
req.send(mycall);
turn(2);
}
function leave() {
var quitInfo = JSON.stringify({'name': user, 'game': game, 'key': key});
var req = new XMLHttpRequest();
req.open("POST", URL + "leave", true);
req.setRequestHeader('Content-Type', 'application/json');
req.onreadystatechange = function() {
if(req.readyState != 4)
return;
if(req.status != 200) {
alert(req.status + ": " + req.statusText);
return;
}
alert("Saiste da fila de espera para aceder ao jogo");
reqEnemy.close();
location.reload();
};
req.send(quitInfo);
}
function newGame(){
document.getElementById("homedisplay").style.display ='none';
document.getElementById("tab").style.display = 'block';
var board=document.getElementById("tabela");
for(var i=0;i<rows;i++)
{
var row = board.insertRow();
for(var j=0;j<cols;j++)
{
var cell = row.insertCell();
cell.id=("c."+i+"."+j);
cell.style.backgroundImage="url('./img/unopened.png')";
cell.style.backgroundSize='cover';
}
}
}
function getout(){
document.getElementById("honor_mul").style.display="none";
document.getElementById("table_rank").remove();
}
// modo single
function modosingle(){
time=0;
n_visited=0;
//clearTable();
campo = new Array();
for(var i=0;i<cols;i++)
{
campo[i]=new Array();
for(var j=0;j<rows;j++)
{
campo[i][j]=0;
}
}
for(var i=0;i<n_bombs;i++)
{
var x=Math.floor(Math.random()*(cols-1));
var y=Math.floor(Math.random()*(rows-1));
campo[x][y]=-1;
}
for(var i=0;i<cols;i++)
{
for(var j=0;j<rows;j++)
{
if(!isBomb(i,j)) campo[i][j]=neighbours(i,j);
}
}
visited=new Array();
for (var i=0;i<cols;i++)
{
visited[i]=new Array();
for (var j=0;j<rows;j++)
{
visited[i][j]=false;
}
}
play();
}
function play(){
document.getElementById("homedisplay").style.display ='none';
document.getElementById("tab").style.display = 'block';
var board=document.getElementById("tabela");
for(var i=0;i<cols;i++)
{
var row= board.insertRow();
for(var j=0;j<rows;j++)
{
var cell = row.insertCell();
cell.id=("c."+i+"."+j);
cell.isFlagged=false;
cell.isValid=true;
cell.isQuestionMark=false;
cell.style.backgroundImage="url('./img/unopened.png')";
cell.style.backgroundSize='cover';
}
}
playing();
function right_click(x,y)
{
var celula = document.getElementById("c."+x+"."+y);
if(celula.isFlagged && !visited[x][y])
{
celula.isFlagged=false;
mines_left++;
celula.isQuestionMark=true;
celula.isValid=true;
document.getElementById("c."+(x)+"."+(y)).style.backgroundImage= "url('./img/question_mark.png')";
document.getElementById("mybombs1").innerHTML=mines_left;
}
else if( !celula.isFlagged && !celula.isQuestionMark && !visited[x][y])
{
celula.isFlagged=true;
celula.isValid=false;
celula.isQuestionMark=false;
document.getElementById("c."+(x)+"."+(y)).style.backgroundImage="url('./img/flag.png')";
mines_left--;
document.getElementById("mybombs1").innerHTML=mines_left;
}
else if(!celula.isFlagged && celula.isQuestionMark && !visited[x][y]){
document.getElementById("c."+(x)+"."+(y)).style.backgroundImage="url('./img/unopened.png')";
celula.isQuestionMark=false;
celula.isValid=true;
}
document.getElementById("bombas").innerHTML = mines_left;
playing();
}
function playing()
{
board.oncontextmenu = function teste(e){
var cellIndex = e.target.cellIndex;
var rowIndexx = e.target.parentNode.rowIndex;
right_click(rowIndexx, cellIndex);
}
board.onclick = function (e)
{
var rowIndex=e.target.cellIndex;
var cellIndex= e.target.parentNode.rowIndex;
var c=document.getElementById("c."+cellIndex+"."+rowIndex);
console.log("coluna: "+cellIndex+" linha: "+rowIndex);
if(primeira_jogada == 0 ){
timer();
document.getElementById("mymul_bomb_progress").style.display="block";
document.getElementById("mybombs1").innerHTML=n_bombs;
}
if(isBomb(cellIndex,rowIndex) && primeira_jogada==0)
{
moveBomb(cellIndex,rowIndex);
refresh();
}
primeira_jogada++;
refresh();
if(c.isValid) expandir(cellIndex,rowIndex,true);
if(win())
{
ganhou();
}
}
function refresh()
{
for(var i=0;i<cols;i++)
{
for(var j=0;j<rows;j++)
{
if(!isBomb(i,j)) campo[i][j]=neighbours(i,j);
}
}
}
}
}//fecha play
function moveBomb(x,y){
var moved=false;
for(var i=0;i<rows;i++)
{
for(var j=0;cols;j++)
{
if(!isBomb(j,i) && campo[j][i]==0)
{
campo[j][i]=-1;
moved=true;
break;
}
}
if(moved==true)
{
campo[x][y]=neighbours(x,y);
break;
}
}
}
function honor()
{
document.getElementById("honor_mul").style.display="block";
var container = document.getElementById("honor_mul"); // onde a tabela vai ser criada
var table = document.createElement('table');
table.setAttribute("class","test");
table.setAttribute("id","table_rank");
var tableRow = table.insertRow(0);
var tableCell = tableRow.insertCell(0);
tableCell.appendChild(document.createTextNode("Rank"));
container.appendChild(table);
tableCell = tableRow.insertCell(1);
tableCell.appendChild(document.createTextNode("Nome"));
container.appendChild(table);
tableCell = tableRow.insertCell(2);
tableCell.appendChild(document.createTextNode("Score"));
container.appendChild(table);
for( var i=0; i<10; i++){
{
var d = JSON.parse(localStorage.getItem(""+level));
if(i<d.length)
{
var teste = JSON.parse(localStorage.getItem("beginner"));
var tableRow = table.insertRow();
var tableCell = tableRow.insertCell(0);
tableCell.appendChild(document.createTextNode(i+1));
container.appendChild(table);
var tableCell = tableRow.insertCell(1);
tableCell.appendChild(document.createTextNode(d[i].name));
container.appendChild(table);
var tableCell = tableRow.insertCell(2);
tableCell.appendChild(document.createTextNode(d[i].score));
}
}
}
}
function neighbours(x,y){
var count=0
if(!isBomb(x,y))
{
for(var a=x-1;a<=x+1;a++)
{
for(var b=y-1;b<=y+1;b++)
{
if(a>=0 && b>=0 && a<cols && b<rows)
{
if(isBomb(a,b)) count++;
}
}
}
}
return count;
}
function isBomb(x,y){
if(campo[x][y]==-1) return true;
else return false;
}
function expandir(x,y,clicked){
var minas_around;
if( valida(x,y)==false || visited[x][y]==true ) // se ja foi destapada ou nao existe
{
return; // condicao de paragem na recursao da expansao
}
if (clicked && isBomb(x,y))
{
document.getElementById("c."+(x)+"."+(y)).style.backgroundImage="url('./img/clicked_bomb.png')";
showAllBombs();
lose();
return;
}
visited[x][y]=true;
n_visited++;
minas_around = campo[x][y];
if (minas_around > 0)
{
// so mostra o seu numero
document.getElementById("c."+(x)+"."+(y)).style.backgroundImage="url('./img/"+minas_around+".png')";
}
else
{
document.getElementById("c."+(x)+"."+(y)).style.backgroundImage="url('./img/opened.png')";
//aqui vai recursao
for (var i = x-1; i <= x+1; ++i)
{
for (var j = y-1; j <= y+1; ++j)
{
// Exclui a posição actual
if (i != x || j != y)
{
expandir(i, j, false); // Chamada recursiva
}
}
}
}
}
function valida(x,y)
{
if(x>=0 && y>=0 && x<cols && y<rows)
return true;
else
return false;
}
function showAllBombs(){
for(var i=0;i<cols;i++)
{
for(var j=0;j<rows;j++)
{
if(isBomb(i,j))
{
document.getElementById("c."+(i)+"."+(j)).style.backgroundImage="url('./img/clicked_bomb.png')";
}
}
}
document.getElementById("tabela").onclick=null;
document.getElementById("tabela").oncontextmenu=null;
console.log("Ups, perdeste tenta de novo para a próxima!");
stop_timer();
}
function win(){
var cell;
var contador=0;
var x = (rows*cols)-n_bombs;
for(var i=0;i<cols;i++)
{
for(var j=0;j<rows;j++)
{
if(visited[i][j]==true && !isBomb(i,j))
{
contador++;
}
}
}
if(contador==x) return true;
else return false;
}
function clearTable(){
var table= document.getElementById("tabela");
while ( table.rows.length > 0 )
{
table.deleteRow(0);
}
}
function ganhou()
{
stop_timer();
document.getElementById("tabela").onclick=null;
document.getElementById("tabela").oncontextmenu=null;
if(level=="beginner")
{
easyscore.push({name:user,score:time});
easyscore.sort(function(a, b)
{
return parseFloat(a.score) - parseFloat(b.score);
});
localStorage.setItem("beginner",JSON.stringify(easyscore));
honor();
}
if(level=="intermediate")
{
medscore.push({name:user,score:time});
medscore.sort(function(a, b)
{
return parseFloat(a.score) - parseFloat(b.score);
});
localStorage.setItem("intermediate",JSON.stringify(medscore));
honor();
}
if(level=="expert")
{
hardyscore.push({name:user,score:time});
hardyscore.sort(function(a, b)
{
return parseFloat(a.score) - parseFloat(b.score);
});
localStorage.setItem("expert",JSON.stringify(hardyscore));
honor();
}
clearTable();
}
function lose(){
alert("Ups perdeste o jogo, tenta para a proxima");
//location.reload();
}
function timer(){
document.getElementById("mul_clock_progress").style.display="none";
document.getElementById("mycanvas").style.display="block";
draw();
}
var trigger=0;
function draw() {
var c=document.getElementById("mycanvas");
var ctx=c.getContext("2d");
time = 0;
var start = 4.72;
var cw = ctx.canvas.width;
var ch = ctx.canvas.height;
var diff;
function justdoit(){
diff = ((time / 100) * Math.PI*2*10);
ctx.clearRect(0, 0, cw, ch);
ctx.lineWidth = 10;
ctx.fillStyle = '#09F';
ctx.strokeStyle = "#09F";
ctx.textAlign = 'center';
ctx.font="30px Arial";
ctx.lineWidth = 15;
ctx.fillText( time+'s', cw*.25, ch*.5, cw);
ctx.beginPath();
ctx.arc(75, 75, 60, start, diff/10+start, false);
ctx.stroke();
if(time >= 100){
ctx.clearRect(0, 0, cw, ch);
time = 0;
}
time++;
}
trigger = setInterval(justdoit, 1000);
}
function stop_timer(){
console.log("vou parar");
clearInterval(trigger);
} | ea8df6342c8a91ba97fe06c0929fc9cf134875bd | [
"JavaScript",
"Markdown"
] | 4 | JavaScript | celio1/minesweeper | 481c184fb69f12c78f688b5b8fbf5a9789906cc8 | 9f2bff5c5cd69b98741167848c95c46ed4784236 |
refs/heads/master | <file_sep>package demorepo;
public class demorepo
{
public static void main(String[] args)
{
System.out.println("welcome");
}
}
| f32eaaeb70a5d7a8bb9ef8e347accc60c9f3cc72 | [
"Java"
] | 1 | Java | 1PRAVIN/Demorepo | edf4dbebb75b83be3107a9859e2505d7b7c30da8 | ff28c6000026f6d8ee6a65f2de5d80753f5b5e6f |
refs/heads/master | <file_sep>puts 'Cleaning database...'
Restaurant.destroy_all
puts 'Creating restaurants...'
restaurants_attributes = [
{
name: 'Fulgurances',
address: '10 rue Alexandre Dumas, 75011 Paris',
phone_number: '01 43 48 14 59',
category: 'french'
},
{
name: 'Septime',
address: '80 rue de Charonne, 75011 Paris',
phone_number: '01 43 67 38 29',
category: 'french'
},
{
name: '<NAME>',
address: '107 Boulevard Richard Lenoir, 75011 Paris',
phone_number: '01 58 30 62 78',
category: 'italian'
},
{
name: '<NAME>',
address: '15 rue Léon Frot, 75011 Paris',
phone_number: '01 43 48 14 59',
category: 'chinese'
},
{
name: '<NAME>',
address: '20 rue de la paix, Ixelles',
phone_number: '+32 2 511 11 21',
category: 'belgian'
},
{
name: 'Nakagawa',
address: '120 rue Oberkampf, 75011 Paris',
phone_number: '01 43 48 14 59',
category: 'japanese'
}
]
Restaurant.create!(restaurants_attributes)
puts 'Finished!'
| d94ad2883b86892e48d345980c402478eff32517 | [
"Ruby"
] | 1 | Ruby | AlexisPipieri/rails-yelp-mvp | 792784842b8d0ebdc17f9844350295385c22c92e | 6192bce1f0c9979e04b46551daef6770c170ea3d |
refs/heads/master | <file_sep>$(document).ready(function(){
$(".nav-abt").click(function(){
$('html,body').animate({
scrollTop: $(".about").offset().top
},1000);
});
$(".nav-event").click(function(){
$('html,body').animate({
scrollTop: $(".events").offset().top
},1000);
});
$(".nav-team").click(function(){
$('html,body').animate({
scrollTop: $(".board").offset().top
},1000);
});
$(".nav-contact").click(function(){
$('html,body').animate({
scrollTop: $(".contact").offset().top
},1000);
});
$(window).scroll(function(){
var d= $(this).scrollTop();
if(d>580)
{
$(".navbar").css({'opacity':'1','transition':'0.3s ease','background-color':'#333'});
}
else{
$(".navbar").css({'transition':'0.3s ease','background-color':'transparent'});
}
});
var countDownDate = new Date("Apr 21, 2017 15:37:25").getTime();
var x = setInterval(function() {
var now = new Date().getTime();
var distance = countDownDate - now;
var days = Math.floor(distance / (1000 * 60 * 60 * 24));
var hours = Math.floor((distance % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60));
var minutes = Math.floor((distance % (1000 * 60 * 60)) / (1000 * 60));
var seconds = Math.floor((distance % (1000 * 60)) / 1000);
document.getElementById("demo").innerHTML = days + "d " + hours + "h "
+ minutes + "m " + seconds + "s ";
// If the count down is finished, write some text
if (distance < 0) {
clearInterval(x);
document.getElementById("demo").innerHTML = "EXPIRED";
}
}, 1000);
}); | f6f6a661a89e99cc3d483cfaa1418c153606fe26 | [
"JavaScript"
] | 1 | JavaScript | aakashvarma/sheild | 76252402defc8687f74c9688e96a9bf10cd2b73a | f25e7c4d74fb7a48ddd8da6fbc3493f1e2571d84 |
refs/heads/master | <file_sep>//
// main.c
// test
//
// Created by Masakiyo on 2017/08/04.
// Copyright © 2017年 Masakiyo. All rights reserved.
//
#include <stdio.h>
#include <stdlib.h>
#include<math.h>
#include<time.h>
#define XMIN 5 //x定義域
#define XMAX 15
#define A 0.01 //微小区間
#define K 1e-3 //定数K
#define E 1e-9 //eps
double func(double x){
double y;
y = 20 - pow(x - 10, 2);
return y;
}
int main(int argc, const char * argv[]) {
unsigned int i = 0;
double dx, before_x, x;
FILE *fp;
fp = fopen("data1.txt","w");
srand((unsigned int)time(NULL));
x = (rand() % (XMAX-XMIN)) + XMIN + 1;
printf("初期x:\t%f\n", x);
while(1){
if(i%50 == 0) fprintf(fp, "%lf %lf\n", x, func(x));
dx = (func(x + A) - func(x - A)) / (2 * A);
before_x = x;
x = x + K * dx;
if(fabs(x - before_x) <= E)
break;
else
i++;
}
printf("x:\t%f\n", before_x);
printf("y:\t%f\n", func(before_x));
printf("探索回数:\t%i\n", i);
}
<file_sep>//
// main.c
// test
//
// Created by Masakiyo on 2017/08/04.
// Copyright © 2017年 Masakiyo. All rights reserved.
//
#include <stdio.h>
#include <stdlib.h>
#include<math.h>
#include<time.h>
#define XMIN -10 //x定義域
#define XMAX 10
#define YMIN -10
#define YMAX 10
#define A 1e-2 //微小区間
#define K 1e-4 //定数
#define E 1e-8 //eps
double func(double x, double y){
double z;
z = exp(-pow(x-3, 2)/30) * exp(-pow(y-4, 2)/30);
return z;
}
int main(int argc, const char * argv[]) {
unsigned int i = 0;
double dx, before_x, x;
double dy, before_y, y;
FILE *fp;
fp = fopen("/Users/shimoda/Desktop/test/test/data3.txt", "w");
srand((unsigned int)time(NULL));
x = (rand() % abs(XMAX-XMIN)) + XMIN + 1; //探索開始位置セット
y = (rand() % abs(YMAX-YMIN)) + YMIN + 1;
printf("初期x:\t%f\n", x);
printf("初期y:\t%f\n", y);
while(1){
if(i%10000==0)fprintf(fp, "%f\t%f\t%f\n", x, y, func(x, y));
dx = (func(x + A, y) - func(x - A, y)) / (2 * A);
dy = (func(x, y + A) - func(x, y - A)) / (2 * A);
before_x = x;
before_y = y;
x = x + K * dx;
y = y + K * dy;
if(sqrt(pow(x - before_x, 2) + pow(y - before_y, 2)) <= E){
break;
}
else{
i++;
}
}
printf("最適解x:\t%f\n", before_x);
printf("最適解y:\t%f\n", before_y);
printf("最適解f(x,y):\t%f\n", func(before_x, before_y));
printf("探索回数:\t%d\n", i);
return 0;
}
| baafbdbc33343f6b509feff29d6b203780d6df3c | [
"C"
] | 2 | C | MasakiyoShimoda/Test | 8c657da3c5f5b6bba52fc2e394edd03c51eee5c4 | 4194bf620fded6fddcff7c5916ed9f273673a24a |
refs/heads/master | <repo_name>linuxmuster/linuxmuster-migration<file_sep>/sbin/linuxmuster-migration-backup
#!/bin/bash
#
# <EMAIL>
# 25.01.2014
# GPL v3
#
# read paedml specific environment
. /usr/share/linuxmuster/config/dist.conf || exit 1
. $HELPERFUNCTIONS || exit 1
. $DATADIR/migration/defaults.conf || exit 1
# print command line help
usage(){
echo
echo "Usage: `basename $0` <options>"
echo
echo "Options:"
echo
echo " -c <config dir> Path to config directory (optional)."
echo " Default is /etc/linuxmuster/migration."
echo " -d <target dir> Path to target directory (must exist, mandatory)."
echo " -f Forced start, skipping free space check."
echo " -h Show this help."
exit 1
}
# parse commandline arguments
while getopts ":c:d:fh" opt; do
case $opt in
c) ALTCONFDIR=$OPTARG ;;
d) TARGETDIR=$OPTARG ;;
f) FORCE=yes ;;
h) usage ;;
\?) echo "Invalid option: -$OPTARG" >&2
usage ;;
:) echo "Option -$OPTARG requires an argument." >&2
usage ;;
esac
done
# check args
[ -z "$TARGETDIR" ] && usage
[ -d "$TARGETDIR" ] || usage
if [ -n "$ALTCONFDIR" ]; then
[ -d "$ALTCONFDIR" ] || usage
MIGCONFDIR="$ALTCONFDIR"
MIGEXUSER="$MIGCONFDIR/exclude.conf"
MIGINUSER="$MIGCONFDIR/include.conf"
fi
cd "$TARGETDIR" || usage
[ -z "$FORCE" ] && FORCE="no"
# delete old config file
rm -f "$MIGBACKLOG"
# print script header info
echo
echo "####" | tee -a "$MIGBACKLOG"
echo "#### Starting backup of migration data" | tee -a "$MIGBACKLOG"
echo "#### Targetdir : $TARGETDIR" | tee -a "$MIGBACKLOG"
echo "#### Configdir : $MIGCONFDIR" | tee -a "$MIGBACKLOG"
echo "#### Skip check: $FORCE" | tee -a "$MIGBACKLOG"
echo "#### `date`" | tee -a "$MIGBACKLOG"
echo "####" | tee -a "$MIGBACKLOG"
# concenate system and user config files
concenate_configs || { error " Failed!" | tee -a "$MIGBACKLOG" && exit 1 ; }
# source backup script
. "$MIGDATADIR/linuxmuster-migration-backup.sh" 2>&1 | tee -a "$MIGBACKLOG"
cp -f "$MIGBACKLOG" "$TARGETDIR"
# clean up
rm -f "$EXCONFTMP"
rm -f "$INCONFTMP"
rm -f "$INCONFILTERED"
<file_sep>/share/linuxmuster-migration-restore.sh
#
# <EMAIL>
# 20161023
# GPL v3
#
################################################################################
# are all necessary files present?
echo
echo "####"
echo "#### Checking for essential restore files"
echo "####"
for i in "$BACKUPFOLDER" "$BASEDATAFILE" "$LDIF" "$FWTYPE" "$FWARCHIVE" "$ISSUE" "$PGSQLMETA" "$MYSQLMETA" "$SELECTIONS" "$QUOTAPARTS"; do
if [ -e "$i" ]; then
echo " * `basename $i` ... OK!"
else
if [ "$i" = "$FWARCHIVE" -a "$FWTYPE" = "custom" ]; then
echo " * `basename $i` ... skipped!"
else
error " * `basename $i` does not exist!"
fi
fi
done
# get firewall from source system
SOURCEFW="$(cat $FWTYPE)"
################################################################################
# if custom.conf is used, check for modified setup values
if [ -s "$MIGCONFDIR/custom.conf" ]; then
echo
echo "####"
echo "#### Verifying config values defined in custom.conf"
echo "####"
echo -n " * INTERNSUBRANGE: "
if [ -n "$INTERNSUBRANGE" ]; then
if stringinstring "$INTERNSUBRANGE" "$SUBRANGES"; then
echo "$INTERNSUBRANGE"
touch "$CUSTOMFLAG"
else
error "$INTERNSUBRANGE is not a valid value!"
fi
else
echo "not set!"
fi
echo -n " * SCHOOLNAME: "
if [ -n "$SCHOOLNAME" ]; then
if check_string "$SCHOOLNAME"; then
echo "$SCHOOLNAME"
touch "$CUSTOMFLAG"
else
error "$SCHOOLNAME contains illegal characters!"
fi
else
echo "not set!"
fi
echo -n " * LOCATION: "
if [ -n "$LOCATION" ]; then
if check_string "$LOCATION"; then
echo "$LOCATION"
touch "$CUSTOMFLAG"
else
error "$LOCATION contains illegal characters!"
fi
else
echo "not set!"
fi
echo -n " * STATE: "
if [ -n "$STATE" ]; then
if check_string "$STATE"; then
echo "$STATE"
touch "$CUSTOMFLAG"
else
error "$STATE contains illegal characters!"
fi
else
echo "not set!"
fi
echo -n " * COUNTRY: "
if [ -n "$COUNTRY" ]; then
if (expr match "$COUNTRY" '\([ABCDEFGHIJKLMNOPQRSTUVWXYZ][ABCDEFGHIJKLMNOPQRSTUVWXYZ]\)'); then
echo "$COUNTRY"
touch "$CUSTOMFLAG"
else
error "$COUNTRY contains illegal characters!"
fi
else
echo "not set!"
fi
echo -n " * WORKGROUP: "
if [ -n "$WORKGROUP" ]; then
if check_string "$WORKGROUP"; then
echo "$WORKGROUP"
touch "$CUSTOMFLAG"
else
error "$WORKGROUP contains illegal characters!"
fi
else
echo "not set!"
fi
echo -n " * SERVERNAME: "
if [ -n "$SERVERNAME" ]; then
if validhostname "$SERVERNAME"; then
echo "$SERVERNAME"
touch "$CUSTOMFLAG"
else
error "$SERVERNAME is no valid hostname!"
fi
else
echo "not set!"
fi
echo -n " * DOMAINNAME: "
if [ -n "$DOMAINNAME" ]; then
if validdomain "$DOMAINNAME"; then
echo "$DOMAINNAME"
touch "$CUSTOMFLAG"
else
error "$DOMAINNAME is no valid domainname!"
fi
else
echo "not set!"
fi
echo -n " * FWCONFIG: "
if [ -n "$FWCONFIG" ]; then
case "$FWCONFIG" in
ipcop|ipfire|custom)
echo "$FWCONFIG"
touch "$CUSTOMFLAG"
;;
*) "$FWCONFIG is no valid firewall!" ;;
esac
else
echo "not set!"
fi
fi
# get target firewall type
if [ -n "$FWCONFIG" ]; then
TARGETFW="$FWCONFIG"
else
TARGETFW="$CURRENTFW"
fi
################################################################################
# version check
echo
echo "####"
echo "#### Checking for supported versions"
echo "####"
# get source version
if grep -q linuxmuster.net "$ISSUE"; then
OLDVERSION="$(awk '{ print $2 }' "$ISSUE")"
else
OLDVERSION="$(awk '{ print $3 }' "$ISSUE")"
fi
echo " * Target version: $DISTFULLVERSION"
echo " * Source version: $OLDVERSION"
# test if target version is supported
match=false
for i in $RESTOREVERSIONS; do
if stringinstring "$i" "$DISTFULLVERSION"; then
match=true
break
fi
done
[ "$match" = "true" ] || error "Sorry, target version $DISTFULLVERSION is not supported."
# test if source version is supported
match=false
for i in $BACKUPVERSIONS; do
if stringinstring "$i" "$OLDVERSION"; then
match=true
break
fi
done
[ "$match" = "true" ] || error "Sorry, source version $OLDVERSION is not supported."
# test if target version is newer
[ "$OLDVERSION" \< "$DISTFULLVERSION" -o "$OLDVERSION" = "$DISTFULLVERSION" ] || error "Sorry, source version is newer than target."
# get postgresql version from target system
if [ "$MAINVERSION" = "6" ]; then
PGOLD="8.3"
PGNEW="9.1"
else
PGOLD="8.1"
PGNEW="8.3"
fi
################################################################################
# firewall and opsi passwords
echo
echo "####"
echo "#### Passwords"
echo "####"
if [ -n "$ipcoppw" ]; then
echo "Firewall password was already set on commandline."
elif [ "$TARGETFW" = "ipfire" ]; then
while true; do
stty -echo
read -p "Please enter $TARGETFW's root password: " ipcoppw; echo
stty echo
stty -echo
read -p "Please re-enter $TARGETFW's root password: " ipcoppwre; echo
stty echo
[ "$ipcoppw" = "$ipcoppwre" ] && break
echo "Passwords do not match!"
sleep 2
done
fi
if [ "$TARGETFW" = "ipfire" ]; then
echo -n " * saving firewall password ..."
# saves firewall password in debconf database
if RET=`echo set linuxmuster-base/ipcoppw "$ipcoppw" | debconf-communicate`; then
echo " OK!"
else
error " Failed!"
fi
fi
if [ -n "$opsiip" ]; then
if [ -n "$opsipw" ]; then
echo "opsi password was already set on commandline."
else
while true; do
stty -echo
read -p "Please enter opsi's root password: " opsipw; echo
stty echo
stty -echo
read -p "Please re-enter opsi's root password: " opsipwre; echo
stty echo
[ "$opsipw" = "$opsipwre" ] && break
echo "Passwords do not match!"
sleep 2
done
fi
echo -n " * saving opsi password ..."
# saves firewall password in debconf database
if RET=`echo set linuxmuster-base/opsipw "$opsipw" | debconf-communicate`; then
echo " OK!"
else
error " Failed!"
fi
fi
# saves dummy password
echo -n " * saving dummy password for admins ..."
RC=0
for i in adminpw pgmadminpw wwwadminpw; do
RET=`echo set linuxmuster-base/$i "muster" | debconf-communicate` || RC=1
done
if [ "$RC" = "0" ]; then
echo " OK!"
else
error " Failed!"
fi
################################################################################
# install optional linuxmuster packages
echo
echo "####"
echo "#### Installing optional linuxmuster packages"
echo "####"
# put apt into unattended mode
export DEBIAN_FRONTEND=noninteractive
export DEBIAN_PRIORITY=critical
export DEBCONF_TERSE=yes
export DEBCONF_NOWARNINGS=yes
# tweak apt to be noninteractive
write_aptconftweak(){
echo 'DPkg::Options {"--force-configure-any";"--force-confmiss";"--force-confold";"--force-confdef";"--force-bad-verify";"--force-overwrite";};' > "$APTCONFTWEAK"
echo 'APT::Get::AllowUnauthenticated "true";' >> "$APTCONFTWEAK"
echo 'APT::Install-Recommends "0";' >> "$APTCONFTWEAK"
echo 'APT::Install-Suggests "0";' >> "$APTCONFTWEAK"
}
write_aptconftweak
# first do an upgrade
apt-get update || exit 1
apt-get -y dist-upgrade || apt-get -fy dist-upgrade
# get packages for reinstalling later
for i in linuxmuster-linbo linuxmuster-linbo-common tftpd-hpa; do
( cd /var/cache/apt/archives && apt-get download "$i" )
done
# remove deinstalled packages from list
SELTMP="/tmp/selections.$$"
grep -v deinstall "$SELECTIONS" > "$SELTMP"
# add kde and other obsolete packages to filter variable for linuxmuster.net 6
if [ "$MAINVERSION" = "6" ]; then
grep -q ^kde "$SELECTIONS" && PKGFILTER="k sysv ttf x $PKGFILTER"
fi
# remove obsolete packages from selections
for i in $PKGFILTER; do
sed "/^$i/d" -i "$SELTMP"
done
# rename various packages, whose names have changed
sed -e 's|^linuxmuster-pykota|linuxmuster-pk|
s|nagios2|nagios3|g' -i "$SELTMP"
# remove obsolete firewall packages from list
sed '/ipcop/d' -i "$SELTMP"
[ "$CURRENTFW" = "custom" ] && sed '/ipfire/d' -i "$SELTMP"
# purge nagios stuff if migrating from prior versions to versions >= 6
if [ "$MAINVERSION" -ge "6" -a "$OLDVERSION" \< "6" ]; then
apt-get -y purge `dpkg -l | grep nagios | awk '{ print $2 }'`
rm -rf /etc/nagios*
fi
# now install optional linuxmuster packages
apt-get -y install `grep ^linuxmuster $SELTMP | awk '{ print $1 }'`
# remove linuxmuster pkgs from selections
sed "/^linuxmuster/d" -i "$SELTMP"
################################################################################
# restore setup data
echo
echo "####"
echo "#### Restoring setup data"
echo "####"
# restore setup values
touch "$SOURCEDIR/debconf.cur" || exit 1
debconf-show linuxmuster-base > "$SOURCEDIR/debconf.cur"
for i in $BASEDATA; do
if grep -q "linuxmuster-base/$i" "$SOURCEDIR/debconf.cur"; then
v="$(grep "linuxmuster-base/$i" "$BASEDATAFILE" | sed -e 's|\*||' | awk '{ print $2 }')"
echo -n " * $i = $v ... "
echo set linuxmuster-base/$i "$v" | debconf-communicate
else
echo -n " * $i = not on target system ... "
fi
done
# firewall
if [ "$TARGETFW" != "$fwconfig" ]; then
echo -n " * fwconfig = $TARGETFW ... "
echo set linuxmuster-base/fwconfig "$TARGETFW" | debconf-communicate
sed -e "s|^fwconfig.*|fwconfig=\"$TARGETFW\"|" -i $NETWORKSETTINGS
fwconfig="$TARGETFW"
fi
################################################################################
# modify firewall ip if net address has changed
internsubrange="$(echo get linuxmuster-base/internsubrange | debconf-communicate | awk '{ print $2 }')"
# only for ipfire|ipcop
if [ "$CURRENTFW" = "ipfire" -a "$internsubrange_old" != "$internsubrange" ]; then
echo
echo "####"
echo "#### Changing $CURRENTFW ip"
echo "####"
internsub=`echo $internsubrange | cut -f1 -d"-"`
internsub_old=`echo $internsubrange_old | cut -f1 -d"-"`
echo -n " * changing from 10.$internsub_old.1.254 to 10.$internsub.1.254 ..."
if exec_ipcop /var/linuxmuster/patch-ips.sh $internsub_old $internsub; then
echo " OK!"
else
error " Failed!"
fi
echo -n " * removing old authorized_keys file and reboot ..."
if exec_ipcop "/bin/rm -f /root/.ssh/authorized_keys && /sbin/reboot"; then
echo " OK!"
else
error " Failed!"
fi
echo -n " * waiting 120 seconds ... "
sleep 120
echo "OK!"
fi
################################################################################
# restore samba sid
echo
echo "####"
echo "#### Restoring Samba SID"
echo "####"
rm -f /var/lib/samba/secrets.tdb
sambasid="$(echo get linuxmuster-base/sambasid | debconf-communicate | awk '{ print $2 }')"
net setlocalsid "$sambasid"
net setdomainsid "$sambasid"
smbpasswd -w `cat /etc/ldap.secret`
################################################################################
# linuxmuster-setup
echo
echo "####"
echo "#### Linuxmuster-Setup (first)"
echo "####"
$SCRIPTSDIR/linuxmuster-patch --first
# refresh environment
. $HELPERFUNCTIONS
################################################################################
# restore postgresql databases
echo
echo "####"
echo "#### Restoring postgresql databases"
echo "####"
# first restore metadata
echo -n " * metadata ..."
if psql -U postgres template1 < "$PGSQLMETA" &> "$PGSQLMETA.log"; then
echo " OK!"
else
error " Failed! See $PGSQLMETA.log for details!"
fi
# sets servername in ldap db
cp ldap.pgsql ldap.pgsql.bak
sed -e 's|\\\\\\\\.*\\\\|\\\\\\\\'"$servername"'\\\\|g' -i ldap.pgsql
# iterate over pgsql files
for dbfile in *.pgsql; do
dbname="$(echo $dbfile | sed -e 's|\.pgsql||')"
dblog="$dbname.log"
echo -n " * $dbname ..."
# drop an existent database
psql -U postgres -c "\q" $dbname &> /dev/null && dropdb -U postgres $dbname
# define db user
case $dbname in
pykota)
dbuser=pykotaadmin
;;
*)
# if a user with same name as db is defined use db name as user name
if grep -q "ALTER ROLE $dbname " "$PGSQLMETA"; then
dbuser=$dbname
else
# in the other case use postgres as dbuser
dbuser=postgres
fi
;;
esac
echo -n " with user $dbuser ..."
# create empty db
createdb -U postgres -O $dbuser $dbname &> $dblog || error " Failed! See $dblog for details!"
# dump database back
if psql -U postgres $dbname < $dbfile 2>> $dblog 1>> $dblog; then
echo " OK!"
else
error " Failed! See $dblog for details!"
fi
done
################################################################################
# restore mysql databases
# function for upgrading horde databases, called if source system was 4.0.6
upgrade40_horde() {
echo "Upgrading horde3 database ..."
HORDEUPGRADE=/usr/share/doc/horde3/examples/scripts/upgrades/3.1_to_3.2.mysql.sql
KRONOUPGRADE=/usr/share/doc/kronolith2/examples/scripts/upgrades/2.1_to_2.2.sql
MNEMOUPGRADE=/usr/share/doc/mnemo2/examples/scripts/upgrades/2.1_to_2.2.sql
NAGUPGRADE=/usr/share/doc/nag2/examples/scripts/upgrades/2.1_to_2.2.sql
TURBAUPGRADE=/usr/share/doc/turba2/examples/scripts/upgrades/2.1_to_2.2_add_sql_share_tables.sql
for i in $HORDEUPGRADE $KRONOUPGRADE $MNEMOUPGRADE $NAGUPGRADE $TURBAUPGRADE; do
t="$(echo $i | awk -F\/ '{ print $5 }')"
if [ -s "$i" ]; then
echo " * $t ..."
mysql horde < $i
fi
done
# create missing columns (#477)
echo 'ALTER TABLE nag_tasks ADD task_creator VARCHAR(255)' | mysql -D horde &> /dev/null
echo 'ALTER TABLE nag_tasks ADD task_assignee VARCHAR(255)' | mysql -D horde &> /dev/null
echo 'ALTER TABLE kronolith_events ADD COLUMN event_recurcount INT' | mysql -D horde &> /dev/null
echo 'ALTER TABLE kronolith_events ADD COLUMN event_private INT DEFAULT 0 NOT NULL' | mysql -D horde &> /dev/null
echo
}
# function for upgrading horde databases, called if source system was 5.x.x
upgrade5_horde() {
echo "Upgrading horde3 database ..."
KRONOUPGRADE=/usr/share/doc/kronolith2/examples/scripts/upgrades/2.2_to_2.3.sql
MNEMOUPGRADE1=/usr/share/doc/mnemo2/examples/scripts/upgrades/2.2_to_2.2.1.sql
MNEMOUPGRADE2=/usr/share/doc/mnemo2/examples/scripts/upgrades/2.2.1_to_2.2.2.sql
NAGUPGRADE=/usr/share/doc/nag2/examples/scripts/upgrades/2.2_to_2.3.sql
TURBAUPGRADE=/usr/share/doc/turba2/examples/scripts/upgrades/2.2.1_to_2.3.sql
for i in $KRONOUPGRADE $MNEMOUPGRADE1 $MNEMOUPGRADE2 $NAGUPGRADE $TURBAUPGRADE; do
t="$(echo $i | awk -F\/ '{ print $5 }')"
if [ -s "$i" ]; then
echo " * $t ..."
mysql horde < $i
fi
done
}
echo
echo "####"
echo "#### Restoring mysql databases"
echo "####"
echo -n " * metadata ..."
if mysql mysql < "$MYSQLMETA" &> "$MYSQLMETA.log"; then
echo " OK!"
else # try again with old credentials
cp -a filesystem/root/.my.cnf /root
if mysql mysql < "$MYSQLMETA" &> "$MYSQLMETA.log"; then
echo " OK!"
else
error " Error! See $MYSQLMETA.log for details!"
fi
fi
for dbfile in *.mysql; do
dbname="$(echo $dbfile | sed -e 's|\.mysql||')"
dblog="$dbname.log"
echo -n " * $dbname ..."
# drop an existing database
mysqlshow | grep -q " $dbname " && mysqladmin -f drop $dbname &> "$dblog"
# create an empty one
mysqladmin create $dbname 2>> "$dblog" 1>> "$dblog" || error " Failed! See $dblog for details!"
# dump db back
if mysql $dbname < $dbfile 2>> "$dblog" 1>> "$dblog"; then
echo " OK!"
else
error " Failed! See $dblog for details!"
fi
done
# 4.0 upgrade: horde db update
if [ "${OLDVERSION:0:3}" = "4.0" ]; then
upgrade40_horde
[ "$MAINVERSION" = "6" ] && upgrade5_horde
fi
# 5.0 upgrade: horde db update
[ "${OLDVERSION:0:1}" = "5" -a "$MAINVERSION" = "6" ] && upgrade5_horde
################################################################################
# install additional packages which were installed on source system
# and essential pkgs from tasks
echo
echo "####"
echo "#### Installing additional and mandatory packages"
echo "####"
# be sure all essential packages are installed
linuxmuster-task --unattended --install=common
linuxmuster-task --unattended --install=server
# imaging task is in 6.x.x obsolete
if [ "$MAINVERSION" != "6" ]; then
imaging="$(echo get linuxmuster-base/imaging | debconf-communicate | awk '{ print $2 }')"
linuxmuster-task --unattended --install=imaging-$imaging
fi
# write it again because it was deleted by linuxmuster-setup
write_aptconftweak
# install additional packages from list
apt-get -y install `awk '{ print $1 }' $SELTMP`
rm "$SELTMP"
################################################################################
# restore filesystem
# upgrade configuration
upgrade_configs() {
echo
echo "####"
echo "#### Upgrading $OLDVERSION configuration"
echo "####"
### common stuff - begin ###
# slapd
echo " * slapd ..."
CONF=/etc/ldap/slapd.conf
LDAPDYNTPLDIR=$DYNTPLDIR/15_ldap
cp $CONF $CONF.migration
ldapadminpw=`grep ^rootpw $CONF | awk '{ print $2 }'`
sed -e "s/@@message1@@/${message1}/
s/@@message2@@/${message2}/
s/@@message3@@/${message3}/
s/@@basedn@@/${basedn}/g
s/@@ipcopip@@/${ipcopip}/g
s/@@serverip@@/${serverip}/g
s/@@ldappassword@@/${ldapadminpw}/" $LDAPDYNTPLDIR/`basename $CONF` > $CONF
chown root:openldap /etc/ldap/slapd.conf*
chmod 640 /etc/ldap/slapd.conf*
chown openldap:openldap /var/lib/ldap -R
chmod 700 /var/lib/ldap
chmod 600 /var/lib/ldap/*
# smbldap-tools
echo " * smbldap-tools ..."
CONF=/etc/smbldap-tools/smbldap.conf
cp $CONF $CONF.migration
sed -e "s/@@sambasid@@/${sambasid}/
s/@@workgroup@@/${workgroup}/
s/@@basedn@@/${basedn}/" $LDAPDYNTPLDIR/`basename $CONF` > $CONF
CONF=/etc/smbldap-tools/smbldap_bind.conf
cp $CONF $CONF.migration
sed -e "s/@@message1@@/${message1}/
s/@@message2@@/${message2}/
s/@@message3@@/${message3}/
s/@@basedn@@/${basedn}/g
s/@@ldappassword@@/${ldapadminpw}/g" $LDAPDYNTPLDIR/`basename $CONF` > $CONF
chmod 600 ${CONF}*
# horde 3
echo " * horde3 ..."
servername="$(hostname | awk -F\. '{ print $1 }')"
domainname="$(dnsdomainname)"
HORDDYNTPLDIR=$DYNTPLDIR/21_horde3
# horde (static)
CONF=/etc/horde/horde3/registry.php
cp $CONF $CONF.migration
cp $STATICTPLDIR/$CONF $CONF
CONF=/etc/horde/horde3/conf.php
cp $CONF $CONF.migration
hordepw="$(grep "^\$conf\['sql'\]\['password'\]" $CONF | awk -F\' '{ print $6 }')"
sed -e "s/\$conf\['auth'\]\['admins'\] =.*/\$conf\['auth'\]\['admins'\] = array\('$WWWADMIN'\);/
s/\$conf\['problems'\]\['email'\] =.*/\$conf\['problems'\]\['email'\] = '$WWWADMIN@$domainname';/
s/\$conf\['mailer'\]\['params'\]\['localhost'\] =.*/\$conf\['mailer'\]\['params'\]\['localhost'\] = '$servername.$domainname';/
s/\$conf\['problems'\]\['maildomain'\] =.*/\$conf\['problems'\]\['maildomain'\] = '$domainname';/
s/\$conf\['sql'\]\['password'\] =.*/\$conf\['sql'\]\['password'\] = '$<PASSWORD>';/" $STATICTPLDIR/$CONF > $CONF
# kronolith (static)
CONF=/etc/horde/kronolith2/conf.php
cp $CONF $CONF.migration
sed -e "s/\$conf\['storage'\]\['default_domain'\] =.*/\$conf\['storage'\]\['default_domain'\] = '$domainname';/
s/\$conf\['reminder'\]\['server_name'\] =.*/\$conf\['reminder'\]\['server_name'\] = '$servername.$domainname';/
s/\$conf\['reminder'\]\['from_addr'\] =.*/\$conf\['reminder'\]\['from_addr'\] = '$WWWADMIN@$domainname';/" $STATICTPLDIR/$CONF > $CONF
# imp, turba (dynamic templates)
for i in imp4.servers.php turba2.sources.php; do
TPL="$HORDDYNTPLDIR/$i"
if [ -e "$TPL" ]; then
CONF="$(cat $TPL.target)"
cp "$CONF" "$CONF.migration"
sed -e "s/'@@servername@@.@@domainname@@'/'$servername.$domainname'/g
s/'@@domainname@@'/'$domainname'/g
s/'@@schoolname@@'/'$schoolname'/g
s/'@@basedn@@'/'$basedn'/g
s/'@@cyradmpw@@'/'$cyradmpw'/" "$TPL" > "$CONF"
fi
done
# ingo, mnemo, nag, turba, gollem (static templates)
for i in ingo1/conf.php mnemo2/conf.php nag2/conf.php turba2/conf.php gollem/prefs.php; do
CONF="/etc/horde/$i"
TPL="${STATICTPLDIR}${CONF}"
if [ -e "$TPL" ]; then
cp "$CONF" "$CONF.migration"
cp "$TPL" "$CONF"
fi
done
# fixing backup.conf
echo " * backup ..."
CONF=/etc/linuxmuster/backup.conf
cp $CONF $CONF.migration
sed -e "s|postgresql-$PGOLD|postgresql-$PGNEW|g
s|cupsys|cups|g
s|nagios2|nagios3|g" -i $CONF
### common stuff - end ###
### versions before 5 stuff - begin ###
if [ $OLDVERSION \< 5 ]; then
# freeradius
CONF=/etc/freeradius/clients.conf
FREEDYNTPLDIR=$DYNTPLDIR/55_freeradius
if [ -s "$CONF" -a -d "$FREEDYNTPLDIR" ]; then
echo " * freeradius ..."
# fetch radiussecret
found=false
while read line; do
if [ "$line" = "client $ipcopip {" ]; then
found=true
continue
fi
if [ "$found" = "true" -a "${line:0:6}" = "secret" ]; then
radiussecret="$(echo "$line" | awk -F\= '{ print $2 }' | awk '{ print $1 }')"
fi
[ -n "$radiussecret" ] && break
done <$CONF
# patch configuration
for i in $FREEDYNTPLDIR/*.target; do
targetcfg=`cat $i`
sourcetpl=`basename $targetcfg`
[ -e "$targetcfg" ] && cp $targetcfg $targetcfg.migration
sed -e "s|@@package@@|linuxmuster-freeradius|
s|@@date@@|$NOW|
s|@@radiussecret@@|$radiussecret|
s|@@ipcopip@@|$ipcopip|
s|@@ldappassword@@|$ldapadminpw|
s|@@basedn@@|$basedn|" $FREEDYNTPLDIR/$sourcetpl > $targetcfg
chmod 640 $targetcfg
chown root:freerad $targetcfg
done # targets
fi
fi
### versions before 5 stuff - end ###
### version 6 stuff - begin ###
if [ "$MAINVERSION" = "6" ]; then
# schulkonsole css img directory
CONF="/etc/linuxmuster/schulkonsole/apache2.conf"
cp "$CONF" "$CONF.migration"
sed -e "s|Alias /schulkonsole/img/ .*|Alias /schulkonsole/img/ /usr/share/schulkonsole/css/img/|g
s|Alias /favicon.ico .*|Alias /favicon.ico /usr/share/schulkonsole/css/img/favicon.ico|g" -i "$CONF"
fi
### version 6 stuff - end ###
} # upgrade_configs
echo
echo "####"
echo "#### Restoring files and folders"
echo "####"
RC=0
servername="$(hostname -s)"
domainname="$(dnsdomainname)"
cyradmpw="$(cat /etc/imap.secret)"
# filter out non existing files from include.conf
BACKUP="$(grep ^/ "$INCONFTMP")"
for i in $BACKUP; do
[ -e "${BACKUPFOLDER}${i}" ] && echo "$i" >> "$INCONFILTERED"
done
# filter out /etc/mysql on 6.x.x systems
[ "$MAINVERSION" = "6" ] && sed "/^\/etc\/mysql/d" -i "$INCONFILTERED"
# save quota.txt
CONF=/etc/sophomorix/user/quota.txt
cp "$CONF" "$CONF.migration.old"
# stop services
start_stop_services stop
# purge nagios stuff if migrating from prior versions to versions >= 6
[ "$MAINVERSION" -ge "6" -a "$OLDVERSION" \< "6" ] && rm -f /etc/nagios3/conf.d/*
# sync back
rsync -a -r -v "$INPARAM" "$EXPARAM" "$BACKUPFOLDER/" / || RC=1
if [ "$RC" = "0" ]; then
echo "Restore successfully completed!"
else
echo "Restore finished with error!"
fi
# upgrade configuration files
upgrade_configs
# repair permissions
chown cyrus:mail /var/spool/cyrus -R
chown cyrus:mail /var/lib/cyrus -R
chown cyrus:mail /var/spool/sieve/ -R
chgrp ssl-cert /etc/ssl/private -R
chown root:www-data /etc/horde -R
chown www-data:www-data /var/log/horde -R
find /etc/horde -type f -exec chmod 440 '{}' \;
[ -d /etc/pykota ] && chown pykota:www-data /etc/pykota -R
# start services again
start_stop_services start
if [ -e "/etc/init/ssh.conf" ]; then
restart ssh
else
/etc/init.d/ssh restart
fi
[ "$RC" = "0" ] || error
################################################################################
# restore opsi stuff after sync
if [ -n "$opsiip" -a -n "$opsipw" -a -s "$CACHEDIR/workstations.opsi" ]; then
echo
echo "####"
echo "#### opsi restore"
echo "####"
# opsi workstation data after sync
grep -v ^# "$WIMPORTDATA" | grep -qw "$opsiip" || cat "$CACHEDIR/workstations.opsi" >> "$WIMPORTDATA"
# opsi ssh
linuxmuster-opsi --setup --password="$<PASSWORD>" --quiet
fi
################################################################################
# only for ipfire: repeat ssh connection stuff part 2
# meanwhile root's ssh key has changed
if [ "$TARGETFW" = "ipfire" ]; then
echo
echo "####"
echo "#### $TARGETFW setup"
echo "####"
linuxmuster-ipfire --setup --first --password="<PASSWORD>"
wait_for_fw
fi
################################################################################
# restore ldap
echo
echo "####"
echo "#### Restoring ldap tree"
echo "####"
# stop service
if [ -e "/etc/init/slapd.conf" ]; then
stop slapd
else
/etc/init.d/slapd stop
fi
# delete old ldap tree
rm -rf /etc/ldap/slapd.d
mkdir -p /etc/ldap/slapd.d
chattr +i /var/lib/ldap/DB_CONFIG
rm /var/lib/ldap/* &> /dev/null
chattr -i /var/lib/ldap/DB_CONFIG
# sets servername and basedn in sambaHomePath
basedn_old="dc=$(grep /domainname "$BASEDATAFILE" | awk -F\: '{ print $2 }' | awk '{ print $1 }' | sed -e 's|\.|,dc=|g')"
cp "$LDIF" "$LDIF.bak"
sed -e 's|^sambaHomePath: \\\\.*\\|sambaHomePath: \\\\'"$servername"'\\|g
s|'"$basedn_old"'|'"$basedn"'|g' -i "$LDIF"
# restore from ldif file
echo -n " * adding $LDIF ..."
if slapadd < "$LDIF"; then
echo " OK!"
RC=0
else
echo " Failed!"
RC=1
fi
# repair permissions
chown openldap:openldap /var/lib/ldap -R
# test
if [ "$RC" = "0" ]; then
echo -n " * testing configuration ..."
if slaptest -f /etc/ldap/slapd.conf -F /etc/ldap/slapd.d 2>> "$MIGRESTLOG" 1>> "$MIGRESTLOG"; then
echo " OK!"
else
echo " Failed!"
RC=1
fi
fi
# repair permissions
chown -R openldap:openldap /etc/ldap
# start service again
/etc/init.d/slapd start
# exit with error
[ "$RC" = "0" ] || error
################################################################################
# reinstall linbo, perhaps it was overwritten
echo
echo "####"
echo "#### Reinstalling LINBO"
echo "####"
apt-get -y --reinstall install linuxmuster-linbo
################################################################################
# activate torrent
if [ "$TORRENT" = "1" ]; then
echo
echo "####"
echo "#### Activating LINBO's torrent"
echo "####"
changed=""
msg="Working on configfiles:"
CONF=/etc/default/bittorrent
. $CONF
if [ "$START_BTTRACK" != "1" ]; then
echo "$msg"
changed=yes
echo -n " * `basename $CONF` ..."
cp $CONF $CONF.migration
if sed -e 's|^START_BTTRACK=.*|START_BTTRACK=1|' -i $CONF; then
echo " OK!"
else
echo " Failed!"
fi
fi
CONF=/etc/default/linbo-bittorrent
. $CONF
if [ "$START_BITTORRENT" != "1" ]; then
if [ -z "$changed" ]; then
echo "$msg"
changed=yes
fi
echo -n " * `basename $CONF` ..."
cp $CONF $CONF.migration
if sed -e 's|^START_BITTORRENT=.*|START_BITTORRENT=1|' -i $CONF; then
echo " OK!"
else
echo " Failed!"
fi
fi
trange="6881:6969"
if ! grep -q ^tcp $ALLOWEDPORTS | grep "$trange"; then
if [ -z "$changed" ]; then
echo "$msg"
changed=yes
fi
echo -n " * `basename $ALLOWEDPORTS` ..."
newports="$(grep ^tcp $ALLOWEDPORTS | awk '{ print $2 }'),$trange"
cp $ALLOWEDPORTS $ALLOWEDPORTS.migration
if sed -e "s|^tcp .*|tcp $newports|" -i $ALLOWEDPORTS; then
echo " OK!"
else
echo " Failed!"
fi
fi
mkdir -p $LINBODIR/backup
for i in $LINBODIR/start.conf.*; do
dltype="$(grep -i ^downloadtype $i | awk -F\= '{ print $2 }' | awk '{ print $1 }' | tr A-Z a-z)"
if [ "$dltype" != "torrent" ]; then
if [ -z "$changed" ]; then
echo "$msg"
changed=yes
fi
echo -n " `basename $i` ..."
cp "$i" "$LINBODIR/backup"
if sed -e "s|^\[[Dd][Oo][Ww][Nn][Ll][Oo][Aa][Dd][Tt][Yy][Pp][Ee]\].*|DownloadType = torrent |g" -i $i; then
echo " OK!"
else
echo " Failed!"
fi
fi
done
fi
################################################################################
# change config values defined in custom.conf
if [ -e "$CUSTOMFLAG" ]; then
echo
echo "####"
echo "#### Custom setup"
echo "####"
rm -f "$CUSTOMFLAG"
echo -n "Reading current configuration ..."
for i in country state location workgroup servername domainname schoolname dsluser dslpasswd smtprelay \
internsubrange fwconfig externtype externip externmask gatewayip dnsforwarders imaging; do
RET="$(echo get linuxmuster-base/$i | debconf-communicate 2> /dev/null | awk '{ print $2 }')"
oldvalue="${i}_old"
echo "$oldvalue=\"$RET\"" >> $OLDVALUES
unset RET
done
chmod 600 $OLDVALUES
. $OLDVALUES
echo " Done!"
echo "Looking for modifications ..."
changed=""
if [ -n "$COUNTRY" -a "$COUNTRY" != "$country_old" ]; then
echo " * COUNTRY: $country_old --> $COUNTRY"
echo set linuxmuster-base/country "$COUNTRY" | debconf-communicate &> /dev/null
changed=yes
fi
if [ -n "$STATE" -a "$STATE" != "$state_old" ]; then
echo " * STATE: $state_old --> $STATE"
echo set linuxmuster-base/state "$STATE" | debconf-communicate &> /dev/null
changed=yes
fi
if [ -n "$LOCATION" -a "$LOCATION" != "$location_old" ]; then
echo " * LOCATION: $location_old --> $LOCATION"
echo set linuxmuster-base/location "$LOCATION" | debconf-communicate &> /dev/null
changed=yes
fi
if [ -n "$SCHOOLNAME" -a "$SCHOOLNAME" != "$schoolname_old" ]; then
echo " * SCHOOLNAME: $schoolname_old --> $SCHOOLNAME"
echo set linuxmuster-base/schoolname "$SCHOOLNAME" | debconf-communicate &> /dev/null
changed=yes
fi
if [ -n "$WORKGROUP" -a "$WORKGROUP" != "$workgroup_old" ]; then
echo " * WORKGROUP: $workgroup_old --> $WORKGROUP"
echo set linuxmuster-base/workgroup "$WORKGROUP" | debconf-communicate &> /dev/null
changed=yes
fi
if [ -n "$SERVERNAME" -a "$SERVERNAME" != "$servername_old" ]; then
echo " * SERVERNAME: $servername_old --> $SERVERNAME"
echo set linuxmuster-base/servername "$SERVERNAME" | debconf-communicate &> /dev/null
changed=yes
fi
if [ -n "$DOMAINAME" -a "$DOMAINAME" != "$domainname_old" ]; then
echo " * DOMAINAME: $domainname_old --> $DOMAINAME"
echo set linuxmuster-base/domainname "$DOMAINAME" | debconf-communicate &> /dev/null
changed=yes
fi
if [ -n "$SMTPRELAY" -a "$SMTPRELAY" != "$smtprelay_old" ]; then
echo " * SMTPRELAY: $smtprelay_old --> $SMTPRELAY"
echo set linuxmuster-base/smtprelay "$SMTPRELAY" | debconf-communicate &> /dev/null
changed=yes
fi
if [ -n "$INTERNSUBRANGE" -a "$INTERNSUBRANGE" != "$internsubrange_old" ]; then
echo " * INTERNSUBRANGE: $internsubrange_old --> $INTERNSUBRANGE"
echo set linuxmuster-base/internsubrange "$INTERNSUBRANGE" | debconf-communicate &> /dev/null
changed=yes
fi
if [ -n "$FWCONFIG" -a "$FWCONFIG" != "$fwconfig_old" ]; then
echo " * FWCONFIG: $fwconfig_old --> $FWCONFIG"
echo set linuxmuster-base/fwconfig "$FWCONFIG" | debconf-communicate &> /dev/null
changed=yes
fi
if [ -n "$changed" ]; then
echo "Applying setup modifications, starting linuxmuster-setup (modify) ..."
RET=`echo set linuxmuster-base/ipcoppw "$ipcoppw" | debconf-communicate`
[ -n "$opsipw" ] && RET=`echo set linuxmuster-base/opsipw "$opsipw" | debconf-communicate`
$SCRIPTSDIR/linuxmuster-patch --modify
fi
fi # custom
################################################################################
# renew server certificate (if incompatible, version 6 or greater) (#107)
RENEWCERT="$(openssl x509 -noout -text -in $SERVERCERT | grep $REQENCRMETHOD)"
if [ $MAINVERSION -ge 6 -a -z "$RENEWCERT" ]; then
echo
echo "####"
echo "#### Renewing server certificate"
echo "####"
$SCRIPTSDIR/create-ssl-cert.sh
echo
echo "IMPORTANT: Browser and E-Mail-Clients have to reimport the new certificate!"
fi
################################################################################
# quota
echo
echo "####"
echo "#### Checking quota"
echo "####"
# determine number of quoted partitions on target
quotaparts="$(mount | grep -c "usrquota,grpquota")"
[ $quotaparts -gt 2 ] && quotaparts=2
if [ $quotaparts -gt 0 ]; then
# get number of quoted partitions from source
quotaparts_old="$(cat "$QUOTAPARTS")"
if [ $quotaparts -ne $quotaparts_old ]; then
echo "Your quota configuration is different from source."
echo "Quota partition(s) on source: $quotaparts_old."
echo "Quota partition(s) on target: $quotaparts."
echo "We try to adjust it accordingly."
echo "Please check your quota settings after migration has finished."
sleep 3
CONF=/etc/sophomorix/user/quota.txt
TCONF=/etc/sophomorix/user/lehrer.txt
cp "$CONF" "$CONF.migration"
cp "$TCONF" "$TCONF.migration"
if [ $quotaparts_old -eq 0 ]; then
# copy default quota.txt if no quota were set on source
echo -n " * using defaults for $quotaparts partition(s) ..."
if cp "${STATICTPLDIR}${CONF}.$quotaparts" "$CONF"; then
echo " OK!"
else
error " Failed!"
fi
elif [ $quotaparts -eq 1 ]; then
# reduce quota to one partition
# work on quota.txt
echo -n "Checking `basename "$CONF"` ..."
changed=""
grep ^[a-zA-Z] "$CONF.migration" | while read line; do
user="$(echo "$line" | awk -F \: '{ print $1 }')"
quota_old="$(echo "$line" | awk -F \: '{ print $2 }' | awk '{ print $1 }')"
quota1="$(echo "$quota_old" | awk -F \+ '{ print $1 }')"
quota2="$(echo "$quota_old" | awk -F \+ '{ print $2 }')"
[ -z "$quota2" ] && continue
if ! quota_new=$(( $quota1 + $quota2 )); then
error " Failed!"
fi
if [ -z "$changed" ]; then
echo
changed=yes
fi
echo -n " * $user: $quota_old --> $quota_new ..."
if sed -e "s|^${user}:.*|${user}: $quota_new|" -i "$CONF"; then
echo " OK!"
else
error " Failed!"
fi
done
[ -z "$changed" ] && echo " nothing to do."
# work on lehrer.txt
echo -n "Checking `basename "$TCONF"` ..."
changed=""
grep ^[a-zA-Z] "$TCONF.migration" | while read line; do
user="$(echo "$line" | awk -F \; '{ print $5 }' | awk '{ print $1 }')"
quota_old="$(echo "$line" | awk -F \; '{ print $8 }' | awk '{ print $1 }')"
quota1="$(echo "$quota_old" | awk -F \+ '{ print $1 }')"
quota2="$(echo "$quota_old" | awk -F \+ '{ print $2 }')"
[ -z "$quota2" ] && continue
if ! quota_new=$(( $quota1 + $quota2 )); then
error " Failed!"
fi
line_new="$(echo "$line" | sed -e "s|\;${quota_old}|\;${quota_new}|")"
if [ -z "$changed" ]; then
echo
changed=yes
fi
echo -n " * $user: $quota_old --> $quota_new ..."
if sed -e "s|$line|$line_new|" -i "$TCONF"; then
echo " OK!"
else
error " Failed!"
fi
done
[ -z "$changed" ] && echo " nothing to do."
else # expand quota for second partition
# work on quota.txt
echo -n "Checking `basename "$CONF"` ..."
# get teachers default quota for second partition from previously backed up config
TDEFAULT="$(grep ^standard-lehrer "$CONF.migration.old" | awk -F \: '{ print $2 }' | awk -F\+ '{ print $2 }')"
[ -z "$TDEFAULT" ] && TDEFAULT=100
changed=""
grep ^[a-zA-Z] "$CONF.migration" | while read line; do
user="$(echo "$line" | awk -F \: '{ print $1 }')"
quota_old="$(echo "$line" | awk -F \: '{ print $2 }' | awk '{ print $1 }')"
stringinstring "+" "$quota_old" && continue
case "$user" in
standard-lehrer) quota_new="${quota_old}+$TDEFAULT" ;;
www-data) quota_new="0+${quota_old}" ;;
*) quota_new="${quota_old}+0" ;;
esac
if [ -z "$changed" ]; then
echo
changed=yes
fi
echo -n " * $user: $quota_old --> $quota_new ..."
if sed -e "s|^${user}:.*|${user}: $quota_new|" -i "$CONF"; then
echo " OK!"
else
error " Failed!"
fi
done
[ "$changed" = "yes" ] || echo " nothing to do."
# work on lehrer.txt
echo -n "Checking `basename "$TCONF"` ..."
changed=""
grep ^[a-zA-Z] "$TCONF.migration" | while read line; do
user="$(echo "$line" | awk -F \; '{ print $5 }' | awk '{ print $1 }')"
quota_old="$(echo "$line" | awk -F \; '{ print $8 }' | awk '{ print $1 }')"
stringinstring "+" "$quota_old" && continue
isinteger "$quota_old" || continue
quota_new="${quota_old}+$TDEFAULT"
line_new="$(echo "$line" | sed -e "s|\;${quota_old}|\;${quota_new}|")"
if [ -z "$changed" ]; then
echo
changed=yes
fi
echo -n " * $user: $quota_old --> $quota_new ..."
if sed -e "s|$line|$line_new|" -i "$TCONF"; then
echo " OK!"
else
error " Failed!"
fi
done
[ "$changed" = "yes" ] || echo " nothing to do."
fi
fi
fi
# quota update
sophomorix-quota --set
################################################################################
# final tasks
echo
echo "####"
echo "#### Final tasks"
echo "####"
apt-get clean
apt-get -y autoremove
echo -n "removing apt's unattended config ..."
rm -f "$APTCONFTWEAK"
echo " OK!"
# be sure samba runs
if [ -e /etc/init/smbd.conf ]; then
restart smbd
else
/etc/init.d/samba restart
fi
# repair cyrus db (#107)
/etc/init.d/cyrus-imapd stop
rm -f /var/lib/cyrus/db/*
rm -f /var/lib/cyrus/deliver.db
su -c '/usr/sbin/ctl_cyrusdb -r' cyrus
/etc/init.d/cyrus-imapd start
# reconfigure linuxmuster-pkgs finally
pkgs="base linbo schulkonsole nagios-base"
[ "$TARGETFW" != "custom" ] && pkgs="$pkgs $TARGETFW"
for i in $pkgs; do
dpkg-reconfigure linuxmuster-$i
done
# finally be sure workstations are up to date
rm -f "$ROOM_SHARE_ACLS"
#touch /tmp/.migration
import_workstations
#rm -f /tmp/.migration
# recreate aliases db
newaliases
################################################################################
# end
echo
echo "####"
echo "#### `date`"
echo -n "#### Finished."
if [ "$REBOOT" = "1" ]; then
echo " Rebooting as requested!"
echo "####"
/sbin/reboot
else
echo " Please reboot the server so the changes take effect!"
echo "####"
fi
<file_sep>/share/linuxmuster-migration-backup.sh
#
# linuxmuster-migration-backup
# <EMAIL>
# 28.01.2014
#
################################################################################
# check if current version is supported
echo
echo "####"
echo "#### Checking version"
echo "####"
match=false
for i in $BACKUPVERSIONS; do
if stringinstring "$i" "$DISTFULLVERSION"; then
match=true
break
fi
done
if [ "$match" = "true" ]; then
echo "Source version: $DISTFULLVERSION."
else
error "Version $DISTFULLVERSION is not supported."
fi
################################################################################
# check if there is still some bind-mounts left over
echo
echo "####"
echo "#### Checking and removing left over bind mounts"
echo "####"
sophomorix-bind --cron ; RC="$?"
if [ "$RC" = "0" ]; then
sophomorix-bind -i | grep home > /dev/null ; bind_mounts=$?
if [ "$bind_mounts" = "0" ]; then
echo "Warning: There are bind mounts in /home!"
else
echo " OK!"
fi
else
error " Failed!"
fi
################################################################################
# computing needed backup space
BACKUP="$(grep ^/ "$INCONFTMP")"
touch "$INCONFILTERED"
# skip check
if [ "$FORCE" = "yes" ]; then
echo
echo "####"
echo "#### Skipping backup space check"
echo "####"
# filter out non existing files
for i in $BACKUP; do
[ -e "$i" ] && echo "$i" >> "$INCONFILTERED"
done
else # do check
echo
echo "####"
echo "#### Computing backup space"
echo "####"
# add all file sizes to SUM
ssum=0 ; tsum=0 ; s=0 ; t=0
for i in $BACKUP; do
# source space
if [ -e "$i" ]; then
# on this occasion write only the really existent files to INCONFILTERED for use with rsync
echo "$i" >> "$INCONFILTERED"
s="$(du --exclude-from="$EXCONFTMP" -sk "$i" | awk '{ print $1 }')"
ssum=$(( $s + $ssum ))
fi
# target space
if [ -e "${BACKUPFOLDER}${i}" ]; then
t="$(du -sk "${BACKUPFOLDER}${i}" | awk '{ print $1 }')"
tsum=$(( $t + $tsum ))
fi
done
# add 200 mb to backup size to be sure it fits
ssum=$(( $ssum + 200000 ))
echo " * total backup size : $ssum kb"
echo " * already on target : $tsum kb"
# free space on TARGETDIR
freespace="$(df -P $TARGETDIR | tail -1 | awk '{ print $4 }')"
echo " * free space on target : $freespace kb"
# really needed space
needed=$(( $ssum - $tsum ))
echo " * needed space on target : $needed kb"
# decide whether it fits
if [ $freespace -lt $needed ]; then
error "Sorry, does not fit!"
else
echo "Great, that fits. :-)"
fi
fi
################################################################################
# check for supported file system type
echo
echo "####"
echo "#### Checking filesystem type on target medium"
echo "####"
FSTYPE="$(stat -f -c %T $TARGETDIR)"
echo -n " * $FSTYPE ..."
if stringinstring "$FSTYPE" "$SUPPORTEDFS"; then
echo " OK!"
else
echo " NOT supported!"
error "I'm sorry, supported filesystems are: $SUPPORTEDFS."
fi
################################################################################
# backup paedml base data
echo
echo "####"
echo "#### Backing up base data"
echo "####"
# dumps debconf values to file
echo -n " * debconf values ..."
debconf-show linuxmuster-base > "$BASEDATAFILE" ; RC="$?"
if [ "$RC" = "0" ]; then
echo " OK!"
else
error " Failed!"
fi
# save issue file because of version info
echo -n " * version info ..."
cp -f /etc/issue "$ISSUE" ; RC="$?"
if [ "$RC" = "0" ]; then
echo " OK!"
else
error " Failed!"
fi
# save number of quoted partitions
echo -n " * quota info ..."
mount | grep -c "usrquota,grpquota" > "$QUOTAPARTS"
echo " $(cat "$QUOTAPARTS") partition(s) are quoted."
################################################################################
# firewall settings, certificates etc.
# get firewall type from system
FIREWALL=ipcop
[ -n "$fwconfig" ] && FIREWALL="$fwconfig"
# backup firewall settings if not custom
if [ "$FIREWALL" != "custom" ]; then
echo
echo "####"
echo "#### Backing up $FIREWALL settings"
echo "####"
echo -n " * creating and downloading $FWARCHIVE ..."
RC=0
if [ "$FIREWALL" = "ipcop" ]; then
exec_ipcop /bin/tar czf /var/linuxmuster/backup.tar.gz --exclude=/var/$FIREWALL/ethernet/settings /etc /root/.ssh /var/$FIREWALL || RC=1
get_ipcop /var/linuxmuster/backup.tar.gz "$FWARCHIVE" || RC=1
else # ipfire
for cmd in makedirs exclude; do
exec_ipcop /usr/local/bin/backupctrl $cmd >/dev/null 2>&1 || RC=1
done
latest_ipf="$(ssh -p 222 root@${ipcopip} ls -1rt /var/ipfire/backup/*.ipf | tail -1)"
if [ -n "$latest_ipf" ]; then
get_ipcop $latest_ipf "$FWARCHIVE" || RC=1
else
RC=1
fi
fi
if [ "$RC" = "0" ]; then
echo " OK!"
else
error " Failed!"
fi
fi
echo "$FIREWALL" > "$FWTYPE"
################################################################################
# package selections
echo
echo "####"
echo "#### Backing up package selections"
echo "####"
echo -n " * get selections ..."
dpkg --get-selections > "$SELECTIONS" ; RC="$?"
if [ "$RC" = "0" ]; then
echo " OK!"
else
error " Failed!"
fi
################################################################################
# save password hash of remoteadmin, if account is present
if [ -n "$REMOTEADMIN" ]; then
if id $REMOTEADMIN &> /dev/null; then
echo
echo "####"
echo "#### Backing up $REMOTEADMIN"
echo "####"
echo -n " * saving password hash ..."
if grep $REMOTEADMIN /etc/shadow | awk -F\: '{ print $2 }' > $REMOTEADMIN.hash; then
echo " OK!"
else
echo " Failed!"
fi
fi
fi
################################################################################
# filesystem backup with rsync using include.conf and exclude.conf
echo
echo "####"
echo "#### Backing up filesystem"
echo "#### `date`"
echo "####"
RC=0
# make sure folder exists
mkdir -p "$BACKUPFOLDER"
# first sync with running services
nice -n 19 rsync -a -r -v --delete --delete-excluded "$INPARAM" "$EXPARAM" / "$BACKUPFOLDER/" || true
# stop services
start_stop_services stop
echo "#### `date`"
# second sync with stopped services
nice -n -20 rsync -a -r -v --delete --delete-excluded "$INPARAM" "$EXPARAM" / "$BACKUPFOLDER/" || RC=1
# start services again
start_stop_services start
if [ "$RC" = "0" ]; then
echo "Backup successfully completed!"
else
error "An error ocurred during backup!"
fi
echo "#### `date`"
################################################################################
# dumping postgresql databases and users
echo
echo "####"
echo "#### Backing up postgresql databases"
echo "####"
# dumping all databases except postgres and templates
for i in `psql -t -l -U postgres | awk '{ print $1 }' | grep ^[a-zA-Z0-9]`; do
case $i in
postgres|template0|template1) continue ;;
esac
echo -n " * $i ..."
if pg_dump --encoding=UTF8 -U postgres $i > $i.pgsql; then
echo " OK!"
else
error " Failed!"
fi
done
# metadata
echo -n " * metadata ..."
if pg_dumpall -U postgres --globals-only > "$PGSQLMETA"; then
echo " OK!"
else
error " Failed!"
fi
################################################################################
# dumping mysql databases and users
echo
echo "####"
echo "#### Backing up mysql databases"
echo "####"
for i in `LANG=C mysqlshow | grep ^"| "[0-9a-zA-Z] | grep -v ^"| mysql" | awk '{ print $2 }'`; do
case $i in
information_schema|performance_schema|test) continue ;;
esac
echo -n " * $i ..."
if mysqldump --databases $i > $i.mysql; then
echo " OK!"
else
error " Failed!"
fi
done
echo -n " * metadata ..."
if mysqldump mysql user > "$MYSQLMETA"; then
echo " OK!"
else
error " Failed!"
fi
################################################################################
# dumping ldap tree
echo
echo "####"
echo "#### Backing up ldap tree"
echo "####"
/etc/init.d/slapd stop
echo -n " * dumping ..."
slapcat > "$LDIF" ; RC="$?"
if [ "$RC" = "0" ]; then
echo " OK!"
else
echo " Failed!"
fi
/etc/init.d/slapd start
[ "$RC" = "0" ] || error
################################################################################
# copying migration config files to TARGETDIR to have them in place for restore
if [ "${MIGCONFDIR%\/}" != "${TARGETDIR%\/}" ]; then
echo
echo "####"
echo "#### Saving config files in $TARGETDIR"
echo "####"
for i in $MIGCONFDIR/*.conf; do
echo -n " * `basename $i` ..."
cp "$i" "$TARGETDIR"
echo " Done!"
done
fi
################################################################################
# end
echo
echo "####"
echo "#### `date`"
echo "#### Backup of migration data finished! :-)"
echo "####"
<file_sep>/sbin/linuxmuster-migration-restore
#!/bin/bash
#
# <EMAIL>
# 20160711
# GPL v3
#
# read paedml specific environment
. /usr/share/linuxmuster/config/dist.conf || exit 1
. $HELPERFUNCTIONS || exit 1
. $DATADIR/migration/defaults.conf || exit 1
# print help
usage(){
echo
echo "Usage: `basename $0` <options>"
echo
echo "Options:"
echo
echo " -c <config dir> Path to directory with config files (optional)."
echo " Per default we look in source dir for them."
echo " -d <source dir> Path to source directory (mandatory,"
echo " where the restore files live)."
echo " -i <password> Firewall root password (optional). If not given you"
echo " will be asked for it."
echo " -o <password> Opsi root password (optional). If not given you"
echo " will be asked for it if Opsi is configured."
echo " -t <temp dir> Path to directory where the restore files are"
echo " temporarily stored in case the source dir is on a"
echo " nfs share (optional, must exist)."
echo " -h Show this help."
exit 1
}
# parse commandline arguments
while getopts ":c:d:hi:o:t:" opt; do
case $opt in
c) ALTCONFDIR="$OPTARG" ;;
d) SOURCEDIR="$OPTARG" ;;
i) ipcoppw="$OPTARG" ;;
o) opsipw="$OPTARG" ;;
t) NFSTMPDIR="$OPTARG" ;;
h) usage ;;
\?) echo "Invalid option: -$OPTARG" >&2
usage ;;
:) echo "Option -$OPTARG requires an argument." >&2
usage ;;
esac
done
# check args
[ -z "$SOURCEDIR" ] && usage
[ -d "$SOURCEDIR" ] || usage
if [ -n "$ALTCONFDIR" ]; then
[ -d "$ALTCONFDIR" ] || usage
fi
if [ -n "$NFSTMPDIR" ]; then
[ -d "$NFSTMPDIR" ] || usage
fi
# delete old logfile
rm -f "$MIGRESTLOG"
# print script header info
echo
echo "####" | tee -a "$MIGRESTLOG"
echo "#### Starting restore of migration data" | tee -a "$MIGRESTLOG"
echo "#### Sourcedir: $SOURCEDIR" | tee -a "$MIGRESTLOG"
echo "#### Configdir: $MIGCONFDIR" | tee -a "$MIGRESTLOG"
[ -n "$NFSTMPDIR" ] && echo "#### NFS-Tempdir: $NFSTMPDIR" | tee -a "$MIGRESTLOG"
echo "#### `date`" | tee -a "$MIGRESTLOG"
echo "####" | tee -a "$MIGRESTLOG"
# nfs check
RET="$(nfs_check)"
if [ -n "$RET" ]; then
NFSSHARE="$(echo "$RET" | awk '{ print $1 }')"
NFSMNTDIR="$(echo "$RET" | awk '{ print $2 }')"
NFSTYPE="$(echo "$RET" | awk '{ print $3 }')"
if [ -n "$NFSSHARE" -a -n "$NFSMNTDIR" -a -n "$NFSTYPE" ]; then
echo "$NFSTYPE filesystem $NFSSHARE is mounted on $NFSMNTDIR." | tee -a "$MIGRESTLOG"
else
echo "NFSSHARE: $NFSSHARE" | tee -a "$MIGRESTLOG"
echo "NFSMNTDIR: $NFSMNTDIR" | tee -a "$MIGRESTLOG"
echo "NFSTYPE: $NFSTYPE" | tee -a "$MIGRESTLOG"
error "NFS error!" | tee -a "$MIGRESTLOG"
fi
fi
# if source is on nfs copy files to a local filesystem
if [ -n "$NFSTYPE" ]; then
# compute needed space on local filesystem
ssum="$(du -sk "$SOURCEDIR" | awk '{ print $1 }')"
# add 200 mb to size to be sure it fits
ssum=$(( $ssum + 200000 ))
# if a tmp dir was given on cmdline use this or iterate over NFSTEMPDIRS defined in defaults.conf
[ -n "$NFSTMPDIR" ] && NFSTEMPDIRS="$NFSTMPDIR"
# check for enough free space on the local filesystem
for i in $NFSTEMPDIRS; do
freespace="$(df -P $i | tail -1 | awk '{ print $4 }')"
if [ $freespace -gt $ssum ]; then
LOCALSRCDIR="$i/migration.tmp"
[ "$i" = "/" ] && LOCALSRCDIR="/migration.tmp"
[ -n "$NFSTMPDIR" ] && LOCALSRCDIR="$NFSTMPDIR"
break
fi
done
if [ -z "$LOCALSRCDIR" ]; then
error "There is not enough free space on the local filesystem! About $(( $ssum / 1024 )) MB are needed." | tee -a "$MIGRESTLOG"
fi
mkdir -p "$LOCALSRCDIR"
echo -n "Copying about $(( $ssum / 1024 )) MB data to $LOCALSRCDIR ... be patient ..." | tee -a "$MIGRESTLOG"
RC=0
rsync -a -v --delete "$SOURCEDIR/" "$LOCALSRCDIR/" || RC=1
if [ "$RC" = "0" ]; then
echo " OK!" | tee -a "$MIGRESTLOG"
else
rm -rf "$LOCALSRCDIR"
error " Failed!" | tee -a "$MIGRESTLOG"
fi
SOURCEDIR="$LOCALSRCDIR"
umount_nfs
fi
# update paths
if [ -n "$ALTCONFDIR" ]; then
MIGCONFDIR="$ALTCONFDIR"
else
MIGCONFDIR="$SOURCEDIR"
fi
cd "$SOURCEDIR"
# concenate config files
MIGEXUSER="$MIGCONFDIR/exclude.conf"
MIGINUSER="$MIGCONFDIR/include.conf"
concenate_configs || error " Failed!"
[ -n "$NFSTYPE" ] && echo "$SOURCEDIR" >> "$EXCONFTMP" | tee -a "$MIGRESTLOG"
# check for custom.conf and read it
if [ -s "$MIGCONFDIR/custom.conf" ]; then
echo -n "Reading $MIGCONFDIR/custom.conf ..." | tee -a "$MIGRESTLOG"
if . "$MIGCONFDIR/custom.conf"; then
echo " OK!" | tee -a "$MIGRESTLOG"
else
error " Failed!" | tee -a "$MIGRESTLOG"
fi
fi
# keep old internsubrange, we need it later if it has to be changed during migration
internsubrange_old="$(echo get linuxmuster-base/internsubrange | debconf-communicate | awk '{ print $2 }')"
[ -z "$internsubrange_old" ] && { error "Cannot get internsubrange value from debconf!" && exit 1 ; }
# source restore script
. "$MIGDATADIR/linuxmuster-migration-restore.sh" 2>&1 | tee -a "$MIGRESTLOG"
cp "$MIGRESTLOG" "$SOURCEDIR"
# clean up
rm -f "$EXCONFTMP"
rm -f "$INCONFTMP"
rm -f "$INCONFILTERED"
cd ~
# remove nfs tempdir if no tempdir was set on cmdline
[ -n "$NFSTYPE" -a -z "$NFSTMPDIR" ] && rm -rf "$SOURCEDIR"
| f94ddc3edd7cfd0015b2e19aa05add343a0e7440 | [
"Shell"
] | 4 | Shell | linuxmuster/linuxmuster-migration | f3e3bd93f002e6153929cf8a561dcf95f451872a | d30a82065a27996a808d0ee1126e0909d09b6fcf |
refs/heads/master | <repo_name>Jeepston/workplace<file_sep>/m.js
var mymodule = require("./h.js");
mymodule.filteredLS(process.argv[2], process.argv[3], function(err, list) {
if (err) {
return;
}
list.forEach(function(e) {
console.log(e);
})
}) | 95c5f10dac96c912c71c9e61fd6c84afc4d458d6 | [
"JavaScript"
] | 1 | JavaScript | Jeepston/workplace | ade143ba008de2d497533b41ce286c8a1c2a5d2d | b14b75af33e3464426468b748f049c5016ca3fa6 |
refs/heads/master | <repo_name>vpmouler/ng-cast<file_sep>/src/components/videoListEntry.js
angular.module('video-player')
.component('videoListEntry', {
bindings: {
video: '<',
videoOnClick: '<',
index: '<',
},
controller: function () {
this.logIndex = (video, event)=>{
console.log('vide',video);
console.log('event', event);
}
// var stringConcat = function () {
// console.log('lksadjflksdajf')
// return 'https://www.youtube.com/embed/' + this.video.id.videoId;
// }
},
templateUrl: 'src/templates/videoListEntry.html'
});
<file_sep>/src/components/search.js
angular.module('video-player')
.component('search', {
controller: () => {
this.searchText = '';
},
bindings: {
getSearchText: '<'
},
templateUrl: 'src/templates/search.html'
});
<file_sep>/src/services/youTube.js
angular.module('video-player')
.service('youTube', function($http){
this.search = function(text, cb) {
$http({
type:'GET',
url:'https://www.googleapis.com/youtube/v3/search',
params: {
q: text || 'jordan body curtain advance',
max: 5,
key: window.YOUTUBE_API_KEY,
part: 'snippet'
}}).then(function(data) {console.log(cb),cb(data)}, function() {console.log('FAILED')})
}
});
| fe7837ef3251b43fa70d15de9baa6215019a8a61 | [
"JavaScript"
] | 3 | JavaScript | vpmouler/ng-cast | 6a31886d5673cb50d63a30c2347bf690b3a8882b | 82d856b3ff6bdbce99d458297e409296369e9a48 |
refs/heads/master | <file_sep> class StripCodes
# pass in the extensions - more versatile
def self.get_orphan_codes(input_file, output_filename)
strings_data = File.read(input_file).split
mp4s = build_array(strings_data, "mp4")
srts = build_array(strings_data, "srt")
mp4_codes = get_codes(mp4s)
srt_codes = get_codes(srts)
orphan_codes = mp4_codes - srt_codes
codes = orphan_codes.uniq
save_to_file(codes, output_filename)
end
def self.update_code_list(codes_file, new_files_list, output_filename)
requested_codes = File.read(codes_file).split
new_files = File.read(new_files_list).split
new_codes = build_codes_array(new_files)
updated_codes = requested_codes - new_codes
save_to_file(updated_codes, output_filename)
end
def self.list_mismatched_file_names(input_file, output_filename)
files_strings = File.read(input_file).split
mp4s = build_array(files_strings, "mp4")
srts = build_array(files_strings, "srt")
mp4s_hash = {}
srts_hash = {}
mp4s.each do |mp4|
code = mp4.split("_")[-2]
mp4s_hash[code] = mp4
end
srts.each do |srt|
code = srt.split("_")[-2]
srts_hash[code] = srt
end
srts_hash.each_pair do |k, v|
next if mp4s_hash[k] == v || mp4s_hash[k].nil?
puts "#{k}:\nSRT filename is #{v}\nMP4 filename is #{mp4s_hash[k]}"
# rather than puts, save output.
end;nil
end
def self.check_for_duplicates(input_file, extension)
files_strings = File.read(input_file).split
file_array = build_array(files_strings, extension)
codes = get_codes(file_array)
uniq?(codes)
end
def self.get_duplicate_codes(input_file, extension, output_filename)
files_strings = File.read(input_file).split
file_array = build_array(files_strings, extension)
codes = get_codes(file_array)
duplicates = codes.group_by{ |e| e }.select { |k, v| v.size > 1 }.map(&:first)
save_to_file(duplicates, output_filename)
end
# input_file, output_filename, codes_file, new_files_list, extension = ARGV
private
def self.build_array(strings_data, extension)
array = []
strings_data.each do |string|
if string.split(".").last == extension
array << string.chomp(".#{extension}")
end
end
array
end
def self.get_codes(filenames)
codes = []
filenames.each do |filename|
code = filename.split("_")[-2]
codes << code
end
codes
end
def self.save_to_file(codes, output_filename)
# binding.pry
output = File.open("#{output_filename}.txt", 'w')
output << codes
output.close
end
def self.build_codes_array(strings_data)
new_codes = []
strings_data.each do |file|
new_code = file.split("_")[-2]
new_codes << new_code
end
new_codes
end
def self.uniq?(array)
array.length == array.uniq.length
end
# rename file:
# File.rename("../rename_me_1.txb", "../rename_me_01.txb")
end
<file_sep># This is a little thought experiment
## Background:
I have a list of video files, and most of them have companion subtitle files, but some are missing and I wanted a list of the missing ones, then I wanted to extract just a portion of the filename of the files shown not to have a match... then I thought 'hrm, would be useful to output to a text file...' and now I'm thinking, I'm at a decent stopping point.
`get_orphan_codes` compares the list of MP4 files to the list of subtitle files and creates a list of MP4s that are missing subtitles.
`update_code_list` compares a list of codes from newly aquired subtitle files to the orphaned list previously created to remove files that are no longer missing
## Notes for me to remember how to use this down the road.
1. Have a file with a list of the files in it, they should each be on their own line, no additional punctuation needed.
2. Can run it from a folder different from the location of the input file and save the output in another file - just include the file locations in the filenames passed in i.e. "../input.txt"
3. It's currently hard coded to compare .mp4 files to .srt files with the assumption that the subtitle files are in the minority, and that there are potentially mp4 files that don't have an associated subtitle file.
4. Position refers to where in the array created when the string is split on underscores the string you want is located.
5. If you pass in an exisiting filename as your output file - the output file will be overwritten with the new data.
To run methods:
`ruby -r "./strip_codes" -e "StripCodes.get_orphan_codes '../230820_azul.txt', '../230820_azul_orphans'"`
## Pro Tips
- Clearing out the folder structure in a `file_list.txt` file in 1 sweep: `^.*\\` that found multiple layers of folders i.e. `\folder 1\folder 2\` and then just replace with blank and done. The script seems to throw out list items from the files that aren't relevant file names (I'm writing this ages after writing the original script - hence "seems").
## Next up/Ideas for changes and improvements:
- improve formatting on the file that's created
- Some files end up duplicated because the filenames are ever so slightly different
- second script to re-name the video files to match the subtitle files.
- should have y/n before changing the filename
- need to do this properly with classes and make this a proper app I guess.
- Could also make the position input an array - allowing a user to grab a series of items from the array that results from splitting the string on underscores.
- Currently the filenames split on an underscore, it might be interesting/wise/useful to make the split character(s) a variable.
| 08da1c516eae65de5c2f659b19c3caccf20da0bf | [
"Markdown",
"Ruby"
] | 2 | Ruby | DeuterGraves/code_splitter | 69c48e8c533f3af44709c9f39e09a0a82922d705 | 7c4ca66ea26ba1e07d38c9b663e4555ae1c28ea0 |
HEAD | <repo_name>cha63506/lib32<file_sep>/lib32-gettext/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
<<<<<<< HEAD
# maintainer (x86_64): <NAME> <<EMAIL>>
=======
>>>>>>> 600514ebcaaf80d91da28d7572f7905dbeed7b0a
# maintainer (x86_64): <NAME> <abveritas[at]chakra-project[dot]org>
_pkgbasename=gettext
pkgname=lib32-$_pkgbasename
<<<<<<< HEAD
pkgver=0.18.1.1
=======
pkgver=0.19.4
>>>>>>> 600514ebcaaf80d91da28d7572f7905dbeed7b0a
pkgrel=1
pkgdesc="GNU internationalization library (32-bit)"
arch=('x86_64')
url="http://www.gnu.org/software/gettext/"
license=('GPL')
<<<<<<< HEAD
depends=('lib32-acl' $_pkgbasename)
makedepends=(gcc-multilib)
optdepends=('cvs: for autopoint tool')
options=(!libtool !docs)
source=(ftp://ftp.gnu.org/pub/gnu/gettext/${_pkgbasename}-${pkgver}.tar.gz)
md5sums=('3dd55b952826d2b32f51308f2f91aa89')
=======
depends=('lib32-acl' lib32-gcc-libs $_pkgbasename=$pkgver)
makedepends=(gcc-multilib)
options=(!libtool !docs)
source=(ftp://ftp.gnu.org/pub/gnu/gettext/${_pkgbasename}-${pkgver}.tar.gz{,.sig})
md5sums=('d3511af1e604a3478900d2c2b4a4a48e'
'SKIP')
validpgpkeys=(<KEY>05848ED7E69871)
>>>>>>> 600514ebcaaf80d91da28d7572f7905dbeed7b0a
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd "${srcdir}/${_pkgbasename}-${pkgver}"
<<<<<<< HEAD
sed -i -e 's/libexpat.so.0/libexpat.so.1/' gettext-tools/src/x-glade.c
./configure --prefix=/usr --enable-csharp --libdir=/usr/lib32
make
}
=======
./configure --prefix=/usr --libdir=/usr/lib32
make
}
check() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
make check
}
>>>>>>> 600514ebcaaf80d91da28d7572f7905dbeed7b0a
package() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{bin,include,share}
}
<file_sep>/lib32-libtxc-dxtn/PKGBUILD
#
# part of chakra-project.org
#
# Maintainer: <NAME> <franzmari[at]chakra-project[dot]it>
_pkgbasename=libtxc_dxtn
pkgname=lib32-$_pkgbasename
pkgver=1.0.1
pkgrel=1
arch=('x86_64')
pkgdesc="Texture compression library for Mesa (32-bit)"
url="http://dri.freedesktop.org/wiki/S3TC"
license=(custom:BSD)
depends=('lib32-glibc' $_pkgbasename=$pkgver)
makedepends=('gcc-multilib' 'lib32-mesa')
options=(!libtool)
source=(http://people.freedesktop.org/~cbrill/$_pkgbasename/$_pkgbasename-$pkgver.tar.bz2)
md5sums=('7105107f07ac49753f4b61ba9d0c79c5')
build() {
cd "$_pkgbasename-$pkgver"
CC="gcc -m32" ./configure --prefix=/usr --libdir=/usr/lib32
make
}
package() {
cd "$_pkgbasename-$pkgver"
make DESTDIR="$pkgdir" install
rm -rf "$pkgdir/usr/include"
# License
mkdir -p "$pkgdir/usr/share/licenses/$pkgname"
sed -n '5,22{s|^ \* \?||;p}' txc_dxtn.h \
> "$pkgdir/usr/share/licenses/$pkgname/LICENSE"
}
<file_sep>/lib32-openal/PKGBUILD
# NOTE: it must be built without ffmpeg installed, at least until a lib32 package for ffmpeg is provided.
_pkgbasename=openal
pkgname=lib32-$_pkgbasename
pkgver=1.17.1
pkgrel=1
pkgdesc="A cross-platform 3D audio library (32-bit)"
arch=(x86_64)
url="http://www.openal.org/"
license=(LGPL)
depends=(lib32-glibc $_pkgbasename=$pkgver)
makedepends=(lib32-alsa-lib lib32-sdl lib32-libpulse lib32-portaudio pkgconfig cmake gcc-multilib)
source=(http://kcat.strangesoft.net/openal-releases/openal-soft-$pkgver.tar.bz2)
md5sums=('4e1cff46cdb3ac147745dea33ad92687')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd $_pkgbasename-soft-$pkgver/build
cmake -D CMAKE_INSTALL_PREFIX=/usr -D CMAKE_BUILD_TYPE=Release -D LIB_SUFFIX=32 ..
make
}
package() {
cd ${srcdir}/${_pkgbasename}-soft-${pkgver}/build
make DESTDIR=${pkgdir}/ install
# Remove files already provided by the 64-bit package.
rm -rf "${pkgdir}"/usr/{include,share,bin}
# Licenses.
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/lib32-libvdpau/PKGBUILD
_pkgbasename=libvdpau
pkgname=lib32-$_pkgbasename
pkgver=1.1.1
pkgrel=1
pkgdesc='Nvidia VDPAU library'
arch=('x86_64')
url='http://cgit.freedesktop.org/~aplattner/libvdpau'
license=('custom')
depends=('lib32-gcc-libs' 'lib32-libxext' $_pkgbasename=$pkgver)
makedepends=('dri2proto')
source=("http://people.freedesktop.org/~aplattner/vdpau/${_pkgbasename}-${pkgver}.tar.bz2")
sha256sums=('857a01932609225b9a3a5bf222b85e39b55c08787d0ad427dbd9ec033d58d736')
build() {
cd ${srcdir}/${_pkgbasename}-${pkgver}
export CC='gcc -m32'
export CXX='g++ -m32'
export PKG_CONFIG_PATH='/usr/lib32/pkgconfig'
./configure --prefix='/usr' --libdir='/usr/lib32' --sysconfdir='/etc'
make
}
package() {
cd ${srcdir}/${_pkgbasename}-${pkgver}
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/{etc,usr/include}
install -dm 755 "${pkgdir}"/usr/share/licenses
ln -s ${pkgname#*-} "${pkgdir}"/usr/share/licenses/${pkgname}
}
<file_sep>/winetricks/PKGBUILD
pkgname=winetricks
pkgver=20160329
pkgrel=1
pkgdesc='Script to install various redistributable runtime libraries in Wine.'
url='http://wiki.winehq.org/winetricks'
license=('LGPL')
arch=('any')
depends=('wine' 'cabextract' 'unzip' 'xorg-apps')
optdepends=('kde-baseapps-kdialog: GUI for KDE desktop')
source=(https://github.com/Winetricks/winetricks/archive/${pkgver}.tar.gz)
md5sums=('2e6eb491d2a36498aeefb62b01f9b7f9')
package() {
cd winetricks-${pkgver}
make DESTDIR="${pkgdir}" install
}
<file_sep>/lib32-libglvnd/PKGBUILD
# Lib32 Packages for Chakra, part of chakraos.org
pkgname=lib32-libglvnd
pkgver=0.1.0.20160411
_commit=5a69af6
pkgrel=1
pkgdesc="The GL Vendor-Neutral Dispatch library"
arch=('x86_64')
url="https://github.com/NVIDIA/libglvnd"
license=('custom:BSD-like')
depends=('lib32-libxext' 'libglvnd')
makedepends=('xorg-server-devel' 'git' 'python2' 'lib32-gcc-libs' )
source=("git+https://github.com/NVIDIA/libglvnd.git#commit=$_commit")
sha1sums=('SKIP')
build() {
export CC='gcc -m32'
export CXX='g++ -m32'
export PKG_CONFIG_PATH='/usr/lib32/pkgconfig'
cd libglvnd
./autogen.sh
./configure --prefix=/usr --libdir=/usr/lib32/ --build=i686-unknown-linux-gnu
make
}
package() {
cd libglvnd
make DESTDIR="${pkgdir}" install
rm -r "$pkgdir/usr/include"
# For compatibility with older nvidia drivers for bumblebee
# Symlinks to /usr/lib will be in nvidia-libgl
mkdir "$pkgdir/usr/lib32/nvidia"
for _lib in libGL.so libGLESv1_CM.so libGLESv2.so; do
mv "$pkgdir"/usr/lib32/$_lib* "$pkgdir"/usr/lib32/nvidia/
done
mkdir -p "$pkgdir/usr/share/licenses"
ln -s libglvnd "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/lib32-orc/PKGBUILD
_pkgbasename=orc
pkgname=lib32-${_pkgbasename}
pkgver=0.4.24
pkgrel=1
pkgdesc="The Oild Runtime Compiler. Multilib"
arch=('x86_64')
license=('custom')
url='http://code.entropywave.com/projects/orc/'
depends=('lib32-glibc' $_pkgbasename=$pkgver)
makedepends=('valgrind')
source=("http://gstreamer.freedesktop.org/data/src/orc/orc-${pkgver}.tar.xz")
md5sums=('9e793ec34c0e20339659dd4bbbf62135')
build() {
cd orc-${pkgver}
# multilib
export CC='gcc -m32'
export CXX='g++ -m32'
export PKG_CONFIG_PATH='/usr/lib32/pkgconfig'
./configure --prefix=/usr \
--libdir=/usr/lib32 \
--disable-static
make
}
package() {
cd "$srcdir/$_pkgbasename-$pkgver"
make DESTDIR="$pkgdir/" install
# Removing unneeded files
rm -rf ${pkgdir}/usr/{bin,include,share}
# license
install -Dm644 COPYING \
"$pkgdir/usr/share/licenses/$pkgname/COPYING"
}
<file_sep>/lib32-sni-qt/PKGBUILD
#
# Contributor: <NAME> <<EMAIL>>
# Contributor: <NAME> <<EMAIL>>
pkgname=lib32-sni-qt
_pkgname=sni-qt
pkgver=0.2.6
pkgrel=3
pkgdesc='Qt4 plugin which turns all QSystemTrayIcon into StatusNotifierItems (appindicators)'
arch=('i686' 'x86_64')
url='https://launchpad.net/sni-qt'
license=('LGPL3')
depends=('lib32-libdbusmenu-qt' "${_pkgname}=${pkgver}")
makedepends=('cmake')
source=("http://launchpad.net/${_pkgname}/trunk/${pkgver}/+download/${_pkgname}-${pkgver}.tar.bz2" "qconfig.h")
md5sums=('e84c66e776f7c22c961817387f618c81'
'8bffe79f81780c300a9a55264badb111')
prepare() {
mkdir -p build
# Disable building tests
sed -i '/tests/ d' ${_pkgname}-${pkgver}/CMakeLists.txt
mkdir -p ${_pkgname}-${pkgver}/src/QtCore
cp ../${source[1]} ${_pkgname}-${pkgver}/src/QtCore
}
build() {
cd build
export CC="gcc -m32"
export CXX="g++ -m32 "
cmake ../${_pkgname}-${pkgver} \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_BUILD_TYPE=Release \
-DQT_PLUGINS_DIR=/usr/lib32/qt/plugins
make
}
package() {
cd build
make DESTDIR="${pkgdir}" install
}
<file_sep>/lib32-libgcrypt/PKGBUILD
_pkgbasename=libgcrypt
pkgname=lib32-$_pkgbasename
pkgver=1.6.5
pkgrel=1
pkgdesc="A general purpose crypto library based on the code used (32-bit)"
arch=(x86_64)
url="http://www.gnupg.org"
license=('LGPL')
depends=('lib32-libgpg-error>=1.10' $_pkgbasename=$pkgver)
makedepends=('lib32-gcc-libs' 'libtool-multilib')
source=("ftp://ftp.gnupg.org/gcrypt/${_pkgbasename}/${_pkgbasename}-${pkgver}.tar.bz2")
sha1sums=('c3a5a13e717f7b3e3895650afc1b6e0d3fe9c726')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd ${srcdir}/${_pkgbasename}-${pkgver}
# Use 32-bit assembler
sed 's:path="amd64":path="i586 i386":' -i mpi/config.links
# keep static library for , needed for cryptsetup
./configure --prefix=/usr \
--disable-padlock-support \
--libdir=/usr/lib32 \
--enable-shared
make
}
check() {
cd ${srcdir}/${_pkgbasename}-${pkgver}
make check
}
package() {
cd ${srcdir}/${_pkgbasename}-${pkgver}
make DESTDIR=${pkgdir} install
rm -rf "${pkgdir}"/usr/{include,share,bin,sbin}
}
<file_sep>/lib32-libgusb/PKGBUILD
_pkgbasename=libgusb
pkgname=lib32-$_pkgbasename
pkgver=0.2.6
pkgrel=1
pkgdesc="GLib wrapper around libusb1 (32 bit)"
arch=(x86_64)
url="https://gitorious.org/gusb/"
license=(LGPL2.1)
depends=('lib32-glib2' 'lib32-systemd' 'lib32-libusb' $_pkgbasename=$pkgver)
makedepends=('gcc-multilib' 'gobject-introspection')
source=(http://people.freedesktop.org/~hughsient/releases/$_pkgbasename-$pkgver.tar.xz)
sha256sums=('6f638bdbc4e77643c0a198a300e50b592ad326e56bddf359de15e04689209c06')
build() {
export CC='gcc -m32'
export CXX='g++ -m32'
export PKG_CONFIG_PATH='/usr/lib32/pkgconfig'
cd $_pkgbasename-$pkgver
./configure --prefix=/usr --disable-static --libdir=/usr/lib32
make
}
package() {
make -C $_pkgbasename-$pkgver DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{include,share}
}
<file_sep>/lib32-libxxf86vm/PKGBUILD
# Part of the X.org group
# maintainer (x86_64): <NAME> <<EMAIL>>
# contributor (x86_64): <NAME> <abveritas[at]chakra-project[dot]org>
_pkgbasename=libxxf86vm
pkgname=lib32-$_pkgbasename
<<<<<<< HEAD
pkgver=1.1.1
pkgrel=2
=======
pkgver=1.1.3
pkgrel=1
>>>>>>> 600514ebcaaf80d91da28d7572f7905dbeed7b0a
pkgdesc="X11 XFree86 video mode extension library (32-bit)"
arch=('x86_64')
license=('custom')
url="http://xorg.freedesktop.org/"
depends=('lib32-libxext' 'xf86vidmodeproto' $_pkgbasename=$pkgver)
makedepends=('xorg-util-macros' gcc-multilib)
options=('!libtool')
source=(${url}/releases/individual/lib/libXxf86vm-${pkgver}.tar.bz2)
<<<<<<< HEAD
sha1sums=('2ff2d2b3c60a5c5c0cc4e0a18492d3b7168a03af')
=======
md5sums=('e46f6ee4f4567349a3189044fe1bb712')
>>>>>>> 600514ebcaaf80d91da28d7572f7905dbeed7b0a
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd "${srcdir}/libXxf86vm-${pkgver}"
./configure --prefix=/usr --disable-static \
--libdir=/usr/lib32
make
}
package() {
cd "${srcdir}/libXxf86vm-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/lib32-libdrm/PKGBUILD
# Maintainer (x86_64): <NAME> <<EMAIL>>
# Contributor (x86_64): AlmAck <<EMAIL>>
_pkgbasename=libdrm
pkgname=lib32-$_pkgbasename
pkgver=2.4.67
pkgrel=1
pkgdesc="Userspace interface to kernel DRM services (32-bit)"
arch=(x86_64)
license=('custom')
depends=('lib32-libpciaccess' 'lib32-systemd' $_pkgbasename=$pkgver)
makedepends=(gcc-multilib xorg-util-macros)
checkdepends=('lib32-cairo')
url="http://dri.freedesktop.org/"
source=(http://dri.freedesktop.org/${_pkgbasename}/${_pkgbasename}-${pkgver}.tar.bz2{,.sig})
sha256sums=('ee5b71e1113be37544d0752681c12f040c01f782e2933df7d7bc21fd0d10cebe'
'SKIP')
validpgpkeys=('B97BD6A80CAC4981091AE547FE558C72A67013C3') # <NAME>ankhorst <<EMAIL>>
validpgpkeys+=('215DEE688925CCB965BE5DA97C03D7797B6E1AE2') # <NAME> <<EMAIL>>
validpgpkeys+=('<KEY>') # <NAME> <<EMAIL>>
validpgpkeys+=('8703B6700E7EE06D7A39B8D6EDAE37B02CEB490D') # <NAME> <<EMAIL>>
validpgpkeys+=('D6285B5E899299F3DA746184191C9B905522B045') # <NAME> <<EMAIL>>
build() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
# pthread is useless in Linux
sed -i "/pthread-stubs/d" configure.ac
autoreconf --force --install
./configure --prefix=/usr \
--libdir=/usr/lib32 \
--enable-udev
make
}
check() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
make -k check
}
package() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/lib32-systemd/PKGBUILD
pkgname=lib32-systemd
_pkgbasename=systemd
pkgver=229
pkgrel=1
pkgdesc="system and service manager (32-bit)"
arch=('x86_64')
url="http://www.freedesktop.org/wiki/Software/systemd"
license=('GPL2' 'LGPL2.1' 'MIT')
depends=('lib32-libgcrypt' 'lib32-xz' 'lib32-libcap' 'lib32-acl' 'lib32-libidn' 'lib32-gcc-libs' "$_pkgbasename=$pkgver")
makedepends=('lib32-gcc-libs' 'gcc-multilib' 'lib32-libidn' 'lib32-glib2' 'intltool' 'gperf'
'lib32-curl' 'lib32-bzip2' 'git')
source=("git+https://github.com/systemd/systemd.git#tag=v$pkgver")
md5sums=('SKIP')
prepare() {
cd "$_pkgbasename"
./autogen.sh
}
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd "${_pkgbasename}"
local timeservers=({0..3}.chakra.pool.ntp.org)
./configure \
--libexecdir=/usr/lib32 \
--libdir=/usr/lib32 \
--localstatedir=/var \
--sysconfdir=/etc \
--disable-audit \
--disable-tests \
--disable-ima \
--disable-seccomp \
--disable-pam \
--disable-kmod \
--disable-networkd \
--disable-blkid \
--disable-libiptc \
--disable-lz4 \
--disable-manpages \
--without-python \
--disable-libcryptsetup \
--disable-microhttpd \
--disable-qrencode \
--disable-xkbcommon \
--with-sysvinit-path= \
--with-sysvrcnd-path= \
--with-ntp-servers="${timeservers[*]}"
make
}
package() {
cd "${_pkgbasename}"
make DESTDIR="$pkgdir" install
rm -rf "${pkgdir}"/{etc,var}
rm -rf "${pkgdir}"/usr/{bin,include,lib,share}
install -m755 -d "${pkgdir}/usr/share/licenses"
ln -s systemd "$pkgdir/usr/share/licenses/lib32-systemd"
}
<file_sep>/lib32-gtk3/lib32-gtk3/PKGBUILD
# Ported from CCR: kote <<EMAIL>>
_pkgbasename=gtk3
pkgname=lib32-$_pkgbasename
pkgver=3.10.9
pkgrel=1
pkgdesc="GObject-based multi-platform GUI toolkit (v3) (32-bit)"
arch=('x86_64')
license=('LGPL')
url="http://www.gtk.org/"
depends=('lib32-atk' 'lib32-gdk-pixbuf2' 'lib32-libxcomposite'
'lib32-libcups' 'lib32-libxcursor' 'lib32-libxdamage'
'lib32-libxi' 'lib32-libxinerama' 'lib32-libxrandr'
'lib32-pango' 'lib32-cairo' 'lib32-keyutils'
'lib32-krb5' 'lib32-e2fsprogs' 'lib32-at-spi2-atk'
'lib32-colord' ${_pkgbasename}=${pkgver} 'lib32-libxkbcommon' 'lib32-wayland' 'lib32-glib2')
makedepends=('gcc-multilib')
options=('!docs')
install=gtk3.install
source=("http://ftp.gnome.org/pub/gnome/sources/gtk+/${pkgver%.*}/gtk+-$pkgver.tar.xz")
sha256sums=('bd05caf5b2eea1d63e721daa990f0c9d86ecbdc8de91480c53b3b7d16a6e43ba')
build() {
export CC="gcc -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd "gtk+-${pkgver}"
CXX=/bin/false ./configure --prefix=/usr \
--sysconfdir=/etc \
--localstatedir=/var \
--enable-gtk2-dependency \
--disable-schemas-compile \
--enable-introspection=no \
--libdir=/usr/lib32
#--enable-wayland-backend see,s broken with 3.10.9 enable later
#https://bugzilla.gnome.org/show_bug.cgi?id=655517
sed -i -e 's/ -shared / -Wl,-O1,--as-needed\0/g' libtool
make
}
package() {
cd "gtk+-${pkgver}"
make DESTDIR="${pkgdir}" install
mv "${pkgdir}"/usr/bin/gtk-query-immodules-3.0{,-32}
rm "${pkgdir}"/usr/bin/gtk3-{demo-application,widget-factory}
rm -rf "${pkgdir}"/usr/{include,share,bin/{gtk3-demo,gtk-launch}} "${pkgdir}/etc"
}
<file_sep>/lib32-boost-libs/PKGBUILD
# Maintainer : <NAME> < <EMAIL> >
# Contributor : <NAME> <<EMAIL>>
# Contributor : andy123 < ajs AT online DOT de >
_pkgbasename=boost-libs
pkgname=lib32-$_pkgbasename
pkgver=1.60.0
_boostver=${pkgver//./_}
pkgrel=1
url="http://www.boost.org"
arch=('x86_64')
pkgdesc="Free peer-reviewed portable C++ source libraries - Runtime (32 bit)"
license=('custom')
groups=('lib32')
depends=('lib32-bzip2' 'lib32-zlib' 'lib32-icu' 'lib32-gcc-libs' "$_pkgbasename=$pkgver")
makedepends=('lib32-icu>=56.1' 'lib32-bzip2' 'lib32-zlib' 'gcc-multilib' 'python3' 'python2')
source=(http://downloads.sourceforge.net/sourceforge/boost/boost_${_boostver}.tar.gz
# upstream patches
cuda_float128.patch::"https://github.com/boostorg/config/commit/a332112317450457c715675686386ec81214b863.patch")
sha1sums=('ac74db1324e6507a309c5b139aea41af624c8110'
'bdc6486e0d90368bbfd872fed8ee3d3c73483933')
prepare() {
cd "${srcdir}/boost_${_boostver}"
# fix https://svn.boost.org/trac/boost/ticket/11852
patch -p2 -i ../cuda_float128.patch
}
build() {
export CC="gcc"
export CFLAGS="-m32"
export CXX="g++"
export CXXFLAGS="-m32"
export LDFLAGS="-m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
export _stagedir="${srcdir}/stagedir"
local JOBS="$(sed -e 's/.*\(-j *[0-9]\+\).*/\1/' <<< ${MAKEFLAGS})"
cd "${srcdir}/boost_${_boostver}"
./bootstrap.sh --with-toolset=gcc --with-icu --with-python=
_bindir="bin.linuxx86_64"
install -d -m 755 "${_stagedir}"/bin
install "${srcdir}"/boost_${_boostver}/tools/build/src/engine/${_bindir}/b2 "${_stagedir}"/bin/b2
# boostbook is needed by quickbook
install -d -m 755 "${_stagedir}"/share/boostbook
cp -a tools/boostbook/{xsl,dtd} "${_stagedir}"/share/boostbook/
# default "minimal" install: "release link=shared,static
# runtime-link=shared threading=single,multi"
# --layout=tagged will add the "-mt" suffix for multithreaded libraries
# and installs includes in /usr/include/boost.
# --layout=system no longer adds the -mt suffix for multi-threaded libs.
# install to ${_stagedir} in preparation for split packaging
"${_stagedir}"/bin/b2 \
variant=release \
debug-symbols=off \
threading=multi \
runtime-link=shared \
link=shared \
toolset=gcc \
address-model=32 \
cflags="${CPPFLAGS} ${CFLAGS} -O3" linkflags="${LDFLAGS}" \
--without-python \
--without-mpi \
--layout=system \
--prefix="${_stagedir}" \
${JOBS} \
install
}
package() {
_stagedir="${srcdir}/stagedir"
install -d -m 755 "${pkgdir}/usr/lib32"
cp -a "${_stagedir}"/lib/*.so{,.*} "${pkgdir}/usr/lib32/"
install -D -m 644 "${srcdir}/boost_${_boostver}/LICENSE_1_0.txt" \
"${pkgdir}"/usr/share/licenses/lib32-boost-libs/LICENSE_1_0.txt
}
<file_sep>/lib32-nettle/PKGBUILD
# contributions from Arch: https://www.archlinux.org/packages/multilib/x86_64/lib32-nettle/
_pkgbasename=nettle
pkgname=lib32-$_pkgbasename
pkgver=3.2
pkgrel=1
pkgdesc="A low-level cryptographic library (32-bit)"
arch=('x86_64')
url="http://www.lysator.liu.se/~nisse/nettle/"
license=('GPL2')
depends=('lib32-gmp' $_pkgbasename=$pkgver)
makedepends=('lib32-gcc-libs')
source=(ftp://ftp.gnu.org/gnu/nettle/$_pkgbasename-$pkgver.tar.gz)
md5sums=('afb15b4764ebf1b4e6d06c62bd4d29e4')
validpgpkeys=('343C2FF0FBEE5EC2EDBEF399F3599FF828C67298') # <NAME> <<EMAIL>>
build() {
cd "$srcdir/$_pkgbasename-$pkgver"
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
./configure --prefix=/usr --libdir=/usr/lib32 \
--enable-shared --with-include-path=/usr/lib32/gmp
make
}
check() {
cd "$srcdir/$_pkgbasename-$pkgver"
make -k check
}
package() {
cd "$srcdir/$_pkgbasename-$pkgver"
make DESTDIR="$pkgdir/" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/lib32-smpeg2/PKGBUILD
# maintainer: <NAME> <<EMAIL>>
_pkgbasename=smpeg2
pkgname=lib32-$_pkgbasename
pkgver=2.0.0
pkgrel=1
pkgdesc="SDL2 MPEG Player Library (32-bit)"
arch=('i686' 'x86_64')
url="http://icculus.org/smpeg/"
license=('LGPL')
depends=('lib32-sdl2' $_pkgbasename=$pkgver)
makedepends=('lib32-mesa' 'lib32-glu' 'svn' 'gcc-multilib')
optdepends=('lib32-glu: to use glmovie')
options=('!libtool')
source=("${_pkgbasename}::svn://svn.icculus.org/smpeg/tags/release_${pkgver//./_}")
md5sums=('SKIP')
build() {
cd ${srcdir}/${_pkgbasename}
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
sed -i s/SDL_CONFIG/SDL2_CONFIG/g smpeg2-config.in
./autogen.sh
./configure --prefix=/usr \
--mandir=/usr/share/man \
--disable-static \
--libdir=/usr/lib32
make
}
package() {
cd ${srcdir}/${_pkgbasename}
make DESTDIR=${pkgdir} install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/lib32-catalyst-legacy-utils/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
_pkgsourcename=catalyst-utils
pkgname=lib32-catalyst-legacy-utils
pkgver=13.1
pkgrel=2
pkgdesc="AMD/ATI catalyst driver utilities and libraries. (32-bit)"
url="http://www.ati.amd.com"
arch=(x86_64)
license=('custom')
depends=('lib32-libxext' 'lib32-libdrm' "catalyst-legacy-utils=${pkgver}")
conflicts=('lib32-libgl' 'lib32-nvidia-utils')
replaces=('lib32-ati-fglrx-utils' 'lib32-fglrx-utils')
provides=('lib32-libgl')
source=("http://www2.ati.com/drivers/legacy/amd-driver-installer-catalyst-${pkgver}-legacy-linux-x86.x86_64.zip"
"lib32-catalyst.sh")
md5sums=('c07fd1332abe4c742a9a0d0e0d0a90de'
'af7fb8ee4fc96fd54c5b483e33dc71c4')
build() {
# Unpack archive
/bin/sh ./amd-driver-installer-catalyst-${pkgver}-legacy-linux-x86.x86_64.run --extract archive_files
}
package() {
cd ${srcdir}
install -D -m755 lib32-catalyst.sh ${pkgdir}/etc/profile.d/lib32-catalyst.sh
# Install lib32 libraries
cd "${srcdir}/archive_files/arch/x86/usr"
install -dm755 "${pkgdir}/usr/lib32"
install -dm755 "${pkgdir}/usr/lib32/fglrx"
install -dm755 "${pkgdir}/usr/lib32/xorg/modules/dri"
install -m755 lib/*.so* "${pkgdir}/usr/lib32/"
install -m755 X11R6/lib/fglrx/fglrx-libGL.so.1.2 "${pkgdir}/usr/lib32/fglrx"
ln -sf /usr/lib32/fglrx/fglrx-libGL.so.1.2 "${pkgdir}/usr/lib32/fglrx/libGL.so.1.2" # since 11.4
ln -sf /usr/lib32/fglrx/fglrx-libGL.so.1.2 "${pkgdir}/usr/lib32/fglrx-libGL.so.1.2" # since 11.4
ln -sf /usr/lib32/fglrx/fglrx-libGL.so.1.2 "${pkgdir}/usr/lib32/libGL.so.1.2" # since 11.4
ln -sf /usr/lib32/fglrx/fglrx-libGL.so.1.2 "${pkgdir}/usr/lib32/libGL.so.1" # since 11.4
ln -sf /usr/lib32/fglrx/fglrx-libGL.so.1.2 "${pkgdir}/usr/lib32/libGL.so" # since 11.4
install -m755 X11R6/lib/libAMDXvBA.so.1.0 "${pkgdir}/usr/lib32/"
install -m755 X11R6/lib/libatiadlxx.so "${pkgdir}/usr/lib32/"
install -m755 X11R6/lib/libfglrx_dm.so.1.0 "${pkgdir}/usr/lib32/"
install -m755 X11R6/lib/libXvBAW.so.1.0 "${pkgdir}/usr/lib32/"
install -m755 X11R6/lib/modules/dri/*.so "${pkgdir}/usr/lib32/xorg/modules/dri/"
ln -snf /usr/lib32/xorg/modules/dri "${pkgdir}/usr/lib32/dri"
cd "$pkgdir/usr/lib32/"
ln -sf libfglrx_dm.so.1.0 libfglrx_dm.so.1
ln -sf libAMDXvBA.so.1.0 libAMDXvBA.so.1
ln -sf libXvBAW.so.1.0 libXvBAW.so.1
ln -sf libatiuki.so.1.0 libatiuki.so.1
ln -sf libatiuki.so.1.0 libatiuki.so
ln -sf libOpenCL.so.1 libOpenCL.so
# since 12.8
install -m755 -d "${pkgdir}/etc/OpenCL/vendors"
install -m644 "${srcdir}/archive_files/arch/x86/etc/OpenCL/vendors/amdocl32.icd" "${pkgdir}/etc/OpenCL/vendors/"
# License
install -m755 -d "${pkgdir}/usr/share/licenses/${pkgname}"
install -m644 "${srcdir}/archive_files/LICENSE.TXT" "${pkgdir}/usr/share/licenses/${pkgname}/"
}
<file_sep>/lib32-atk/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
_pkgbasename=atk
pkgname=lib32-$_pkgbasename
pkgver=2.15.4
pkgrel=1
pkgdesc="A library providing a set of interfaces for accessibility (32-bit)"
arch=('x86_64')
license=('LGPL')
depends=('lib32-glib2' $_pkgbasename=$pkgver)
makedepends=('gcc-multilib')
source=(http://ftp.gnome.org/pub/gnome/sources/${_pkgbasename}/${pkgver%.*}/${_pkgbasename}-${pkgver}.tar.xz)
url='http://www.gtk.org/'
sha256sums=('0dddfa73a02178ca21a8de172c86d699aa887b4efeec736b4c8721eee4ac349c')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd "${srcdir}/${_pkgbasename}-${pkgver}"
./configure --prefix=/usr \
--libdir=/usr/lib32 \
--disable-introspection
make
}
package() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{include,share}
}
<file_sep>/lib32-gtk3/lib32-polkit/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# Maintainer: AlmAck <<EMAIL>>
# Contributor: jtts
_pkgbasename=polkit
pkgname=lib32-$_pkgbasename
pkgver=0.112
pkgrel=2
pkgdesc="Application development toolkit for controlling system-wide privileges (32-bit)"
arch=('x86_64')
license=(LGPL)
url="http://www.freedesktop.org/wiki/Software/polkit"
depends=('lib32-glib2' 'lib32-pam' 'lib32-expat' 'lib32-systemd' 'lib32-js' $_pkgbasename=$pkgver)
makedepends=('intltool')
# Not needed. This is a lib32-package.
#install=polkit.install
#conflicts=(js185 lib32-js185)
source=(http://www.freedesktop.org/software/polkit/releases/$_pkgbasename-$pkgver.tar.gz)
# Not needed. This is a lib32-package.
# polkit.pam)
sha256sums=('d695f43cba4748a822fbe864dd32c4887c5da1c71694a47693ace5e88fcf6af6')
#md5sums=('b0f2fa00a55f47c6a5d88e9b73f80127')
# '6564f95878297b954f0572bc1610dd15')
# add --with-mozjs=mozjs-17.0 \ when lib32-js is updated > 17.0.0
build() {
cd $_pkgbasename-$pkgver
./configure --prefix=/usr --sysconfdir=/etc --localstatedir=/var \
--libdir=/usr/lib32 --libexecdir=/usr/lib32/polkit-1 \
--with-systemdsystemunitdir=/usr/lib/systemd/system \
--disable-static --enable-introspection=no --enable-libsystemd-login=yes \
--enable-man-pages=no --disable-gtk-doc \
CC="gcc -m32" CXX="g++ -m32" PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
make
}
package() {
cd $_pkgbasename-$pkgver
make DESTDIR="$pkgdir" install
# Not needed. This is a lib32-package.
#chown 102 "$pkgdir/etc/polkit-1/rules.d"
#chown 102 "$pkgdir/usr/share/polkit-1/rules.d"
#
#install -m644 "$srcdir/polkit.pam" "$pkgdir/etc/pam.d/polkit-1"
# cleanup for lib32 package
rm -rf $pkgdir/{etc,usr/{bin,lib,include,share}}
}
<file_sep>/lib32-cracklib/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# Maintainer: AlmAck <<EMAIL>>
# Contributor: josephgbr <<EMAIL> at gmail dot com>
_pkgbasename=cracklib
pkgname=lib32-$_pkgbasename
pkgver=2.9.6
pkgrel=1
pkgdesc="Password Checking Library (32 bit)"
arch=('x86_64')
url="http://sourceforge.net/projects/cracklib"
license=('LGPL')
depends=('lib32-glibc' 'lib32-zlib' $_pkgbasename=$pkgver)
makedepends=('gcc-multilib')
options=('!libtool')
source=(https://github.com/cracklib/cracklib/releases/download/${_pkgbasename}-${pkgver}/${_pkgbasename}-${pkgver}.tar.gz)
md5sums=('c52f463585d85924b28cdc1e373ae06d')
build() {
cd "$srcdir/$_pkgbasename-$pkgver"
./configure --prefix=/usr --libdir=/usr/lib32 --without-python CC='gcc -m32'
make
}
package() {
cd "$srcdir/$_pkgbasename-$pkgver"
make DESTDIR="$pkgdir" install
rm -rf "${pkgdir}/usr"/{include,sbin,share}
}
<file_sep>/skype/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
pkgname=skype
pkgver=4.3.0.37
pkgrel=2
arch=('x86_64')
pkgdesc="P2P software for high-quality voice communication"
url="http://www.skype.com/"
license=('custom')
options=('!strip')
install=${pkgname}.install
depends=('xdg-utils' 'hicolor-icon-theme' lib32-{qt,alsa-lib,libxss,libxv,libxcursor,libpulse})
optdepends=('lib32-libcanberra: XDG sound support'
'lib32-qtcurve-qt4: QtCurve integration')
source=("http://download.skype.com/linux/${pkgname}-${pkgver}.tar.bz2"
'PERMISSION')
sha256sums=('8c99dc3978a588fb13303df7c8134379fb55b8dd460efefbc79ae594269b892d'
'20b2755151bd24ae0f759a565744faea95942e6729f9d02e83bf2b633e9306fd')
package() {
cd ${pkgname}-${pkgver}
# Executable
install -D skype "${pkgdir}/usr/bin/skype"
# Data
mkdir -p "${pkgdir}"/usr/share/skype/{avatars,lang,sounds}
install -m 644 avatars/* "${pkgdir}/usr/share/skype/avatars"
install -m 644 lang/* "${pkgdir}/usr/share/skype/lang"
install -m 644 sounds/* "${pkgdir}/usr/share/skype/sounds"
# DBus Service
install -Dm 644 skype.conf \
"${pkgdir}/etc/dbus-1/system.d/skype.conf"
# Icons
for _i in 16 32 48; do
install -Dm 644 icons/SkypeBlue_${_i}x${_i}.png \
"${pkgdir}/usr/share/icons/hicolor/${_i}x${_i}/skype.png"
done
install -Dm 644 icons/SkypeBlue_48x48.png \
"${pkgdir}/usr/share/pixmaps/skype.png"
# Desktop file
install -Dm 644 skype.desktop \
"${pkgdir}/usr/share/applications/skype.desktop"
# License
install -Dm 644 LICENSE \
"${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
install -Dm 644 "$srcdir/PERMISSION" \
"${pkgdir}/usr/share/licenses/${pkgname}/PERMISSION"
}
<file_sep>/lib32-fluidsynth/PKGBUILD
_pkgbasename=fluidsynth
pkgname=lib32-${_pkgbasename}
pkgver=1.1.6
pkgrel=4
pkgdesc="A real-time software synthesizer based on the SoundFont 2 specifications (32bit)"
arch=('x86_64')
url="http://www.fluidsynth.org/"
depends=("$_pkgbasename=$pkgver" 'lib32-glib2' 'lib32-jack' 'lib32-alsa-lib' 'lib32-libpulse')
makedepends=('cmake' 'gcc-multilib')
license=('LGPL')
source=("http://downloads.sourceforge.net/$_pkgbasename/$_pkgbasename-$pkgver.tar.gz")
md5sums=('ae5aca6de824b4173667cbd3a310b263')
build() {
cd "$srcdir/$_pkgbasename-$pkgver"
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cmake . -DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_BUILD_TYPE=Release \
-Denable-ladspa=ON \
-DLIB_SUFFIX=32
make
}
package() {
cd "$srcdir/$_pkgbasename-$pkgver"
make DESTDIR="$pkgdir/" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/lib32-portaudio/PKGBUILD
_pkgbasename=portaudio
pkgname=lib32-${_pkgbasename}
pkgver=19_20140130
pkgrel=2
pkgdesc="A free, cross platform, open-source, audio I/O library. (32 bit)"
arch=('x86_64')
url="http://www.portaudio.com"
license="custom"
depends=('lib32-alsa-lib' 'lib32-jack' $_pkgbasename=$pkgver)
makedepends=('subversion' 'gcc-multilib')
conflicts=('lib32-portaudio-svn')
provides=('lib32-portaudio-svn')
replaces=('lib32-portaudio-svn')
options=('!libtool')
source=("http://www.portaudio.com/archives/pa_stable_v$pkgver.tgz")
md5sums=('7f220406902af9dca009668e198cbd23')
build() {
cd "$srcdir/$_pkgbasename"
export CC='gcc -m32'
export CXX='g++ -m32'
export PKG_CONFIG_PATH='/usr/lib32/pkgconfig'
./configure --prefix=/usr --with-alsa --with-jack --libdir=/usr/lib32
make
}
package() {
cd "$srcdir/$_pkgbasename"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}/usr/include"
install -D -m644 LICENSE.txt "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE.txt"
}<file_sep>/lib32-fontconfig/PKGBUILD
_pkgbasename=fontconfig
pkgname=lib32-$_pkgbasename
pkgver=2.11.1
pkgrel=2
_ubuntu_diff_version=${pkgver}-0ubuntu6
pkgdesc="A library for configuring and customizing font access (32-bit)"
arch=('x86_64')
url="http://www.fontconfig.org/release/"
license=('custom')
depends=('lib32-expat' 'lib32-freetype2' 'lib32-bzip2' $_pkgbasename=$pkgver)
makedepends=(gcc-multilib)
provides=("${pkgname}-ubuntu")
conflicts=("${pkgname}-ubuntu")
replaces=("${pkgname}-ubuntu")
source=("http://www.fontconfig.org/release/$_pkgbasename-$pkgver.tar.bz2"
"http://archive.ubuntu.com/ubuntu/pool/main/f/fontconfig/fontconfig_${_ubuntu_diff_version}.debian.tar.xz")
md5sums=('824d000eb737af6e16c826dd3b2d6c90'
'5d8e082f4d36d6c82853f6b6a5f6997a')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd "${srcdir}/${_pkgbasename}-${pkgver}"
# apply ubuntu patches
for _f in $(cat "$srcdir/debian/patches/series" | grep -v '#') ; do
patch -Np1 -i "$srcdir/debian/patches/$_f"
done
./configure --prefix=/usr \
--sysconfdir=/etc \
--with-templatedir=/etc/fonts/conf.avail \
--with-xmldir=/etc/fonts \
--localstatedir=/var \
--disable-static \
--with-default-fonts=/usr/share/fonts \
--with-add-fonts=/usr/share/fonts \
--libdir=/usr/lib32
make
}
package() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{include,share,bin} "$pkgdir"/{etc,var}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/lib32-libmikmod/PKGBUILD
_pkgbasename=libmikmod
pkgname=lib32-$_pkgbasename
pkgver=3.3.8
pkgrel=1
pkgdesc="A portable sound library"
license=('GPL' 'LGPL')
url="http://sourceforge.net/projects/mikmod/"
arch=('x86_64')
depends=('lib32-glibc' "${_pkgbasename}=${pkgver}")
makedepends=('gcc-multilib')
options=('!libtool')
source=(http://downloads.sourceforge.net/mikmod/$_pkgbasename-${pkgver}.tar.gz)
md5sums=('e100bbc4900953685d876fdd6487bc8a')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd $srcdir/$_pkgbasename-$pkgver
./configure --prefix=/usr --mandir=/usr/share/man --infodir=/usr/share/info --libdir=/usr/lib32
make
}
package() {
cd $srcdir/$_pkgbasename-$pkgver
make DESTDIR=$pkgdir install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/lib32-libdbusmenu-qt/PKGBUILD
# Maintainer (i686): <NAME> <<EMAIL>]org>
# Maintainer (x86_64): <NAME> <<EMAIL>>
pkgname=lib32-libdbusmenu-qt
_pkgname=libdbusmenu-qt
pkgver=0.9.2
pkgrel=1
pkgdesc="A library that provides a Qt implementation of the DBusMenu spec"
arch=('i686' 'x86_64')
url="https://launchpad.net/libdbusmenu-qt"
license=('GPL')
depends=('lib32-qt' "${_pkgname}=${pkgver}")
makedepends=('cmake')
source=("http://launchpad.net/${_pkgname}/trunk/${pkgver}/+download/${_pkgname}-${pkgver}.tar.bz2")
md5sums=('9a49484927669cd2ec91b3bf9ba8b79e')
build() {
cd "${srcdir}"
mkdir -p build
cd build
export CC="gcc -m32"
export CXX="g++ -m32"
cmake ../${_pkgname}-${pkgver} \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_BUILD_TYPE=Release \
-DWITH_DOC=OFF \
-DLIB_SUFFIX=32
make
}
package() {
cd "${srcdir}/build"
make DESTDIR="${pkgdir}" install
rm -r "${pkgdir}"/usr/include
}
<file_sep>/lib32-lcms2/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# Maintainer: AlmAck <<EMAIL>>
# Contributor: <NAME> <<EMAIL>>
_pkgbasename=lcms2
pkgname=lib32-$_pkgbasename
pkgver=2.6
pkgrel=1
pkgdesc="Small-footprint color management engine, version 2 (32-bit)"
arch=('x86_64')
license=('MIT')
depends=('lib32-libtiff' $_pkgbasename=$pkgver)
makedepends=('gcc-multilib')
url="http://www.littlecms.com"
source=(http://downloads.sourceforge.net/sourceforge/lcms/${_pkgbasename}-${pkgver}.tar.gz)
sha1sums=('b0ecee5cb8391338e6c281d1c11dcae2bc22a5d2')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd ${_pkgbasename}-${pkgver}
./configure --prefix=/usr --libdir=/usr/lib32
make
}
check() {
cd ${_pkgbasename}-${pkgver}
make check
}
package() {
cd ${_pkgbasename}-${pkgver}
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
mkdir -p "${pkgdir}/usr/share/licenses"
ln -s ${_pkgbasename} "${pkgdir}/usr/share/licenses/${pkgname}"
}
<file_sep>/lib32-jack/PKGBUILD
# maintainer (x86_64): <NAME> <<EMAIL>>
# contributor (x86_64): <NAME> <abveritas[at]chakra-project[dot]org>
_pkgbasename=jack
pkgname=lib32-$_pkgbasename
_longname=jack-audio-connection-kit
pkgver=0.124.1
pkgrel=1
pkgdesc="A low-latency audio server (32-bit)"
arch=('x86_64')
license=('GPL' 'LGPL')
depends=("$_pkgbasename=$pkgver" 'lib32-db' 'lib32-libsamplerate' 'lib32-readline')
makedepends=('gcc-multilib')
url="http://jackaudio.org/"
source=("http://jackaudio.org/downloads/${_longname}-${pkgver}.tar.gz")
md5sums=('d64e90121be8a54860b870a726fb5b5d')
build() {
cd "${srcdir}/${_longname}-${pkgver}"
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
./configure --prefix=/usr --libdir=/usr/lib32
make -C libjack
}
package() {
cd ${srcdir}/${_longname}-${pkgver}
make DESTDIR="${pkgdir}" install-pkgconfigDATA
make -C libjack DESTDIR="${pkgdir}" install
}
<file_sep>/lib32-libgpg-error/PKGBUILD
_pkgbasename=libgpg-error
pkgname=lib32-$_pkgbasename
pkgver=1.21
pkgrel=1
pkgdesc="Support library for libgcrypt (32-bit)"
arch=(x86_64)
url="http://www.gnupg.org"
license=('LGPL')
depends=('lib32-glibc' $_pkgbasename=$pkgver)
makedepends=('gcc' 'lib32-gcc-libs')
source=(ftp://ftp.gnupg.org/gcrypt/libgpg-error/${_pkgbasename}-${pkgver}.tar.bz2{,.sig})
sha1sums=('ef1dfb2f8761f019091180596e9e638d8cc37513'
'SKIP')
validpgpkeys=('<KEY>')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd "${srcdir}"/${_pkgbasename}-${pkgver}
./configure --prefix=/usr --libdir=/usr/lib32
make
}
check() {
cd "${srcdir}"/${_pkgbasename}-${pkgver}
make clean
}
package() {
cd "${srcdir}"/${_pkgbasename}-${pkgver}
make DESTDIR="${pkgdir}/" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/lib32-sdl2_gfx/PKGBUILD
_pkgbasename=sdl2_gfx
pkgname=lib32-$_pkgbasename
pkgver=1.0.1
pkgrel=1
pkgdesc="SDL Graphic Primitives (Version 2, 32-bit)"
arch=('i686' 'x86_64')
url="http://www.libsdl.org"
license=('zlib')
depends=('lib32-sdl2' $_pkgbasename=$pkgver)
makedepends=('gcc-multilib')
options=(!libtool)
source=("http://www.ferzkopp.net/Software/SDL2_gfx/SDL2_gfx-${pkgver}.tar.gz")
md5sums=('9c96816618e3e086d885d1d214e59d87')
build() {
cd "${srcdir}/SDL2_gfx-${pkgver}"
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
./autogen.sh
./configure --disable-static \
--prefix=/usr \
--libdir=/usr/lib32
make
}
package() {
cd "${srcdir}/SDL2_gfx-${pkgver}"
make DESTDIR="${pkgdir}/" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/lib32-libxtst/PKGBUILD
# Part of the X.org group
# maintainer (x86_64): <NAME> <<EMAIL>>
# Contributor (x86_64): <NAME> <abveritas[at]chakra-project[dot]org>
_pkgbasename=libxtst
pkgname=lib32-$_pkgbasename
<<<<<<< HEAD
pkgver=1.2.0
pkgrel=2
=======
pkgver=1.2.2
pkgrel=1
>>>>>>> 600514ebcaaf80d91da28d7572f7905dbeed7b0a
pkgdesc="X11 Testing -- Resource extension library (32-bit)"
arch=(x86_64)
url="http://xorg.freedesktop.org/"
license=('custom')
depends=('lib32-libxext' 'lib32-libxi' 'recordproto' $_pkgbasename=$pkgver)
makedepends=('gcc-multilib' 'xorg-util-macros')
options=('!libtool')
source=(${url}/releases/individual/lib/libXtst-${pkgver}.tar.bz2)
<<<<<<< HEAD
sha1sums=('9fb06ed599caf7f9e7115cbbfadf02b47c17aa72')
=======
md5sums=('25c6b366ac3dc7a12c5d79816ce96a59')
>>>>>>> 600514ebcaaf80d91da28d7572f7905dbeed7b0a
build() {
cd "${srcdir}/libXtst-${pkgver}"
export CC="gcc -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
./configure --prefix=/usr --disable-static \
--libdir=/usr/lib32
make
}
package() {
cd "${srcdir}/libXtst-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{include,share}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/lib32-audiofile/PKGBUILD
_pkgbasename=audiofile
pkgname=lib32-$_pkgbasename
pkgver=0.3.6
pkgrel=2
pkgdesc="Silicon Graphics Audio File Library (32-bit)"
arch=('x86_64')
url="http://www.68k.org/~michael/audiofile/"
license=('LGPL')
depends=('lib32-glibc' "$_pkgbasename=$pkgver" 'lib32-alsa-lib' 'lib32-flac')
makedepends=(gcc-multilib)
options=('!libtool')
source=("http://audiofile.68k.org/$_pkgbasename-$pkgver.tar.gz")
md5sums=('2731d79bec0acef3d30d2fc86b0b72fd')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd "${srcdir}/${_pkgbasename}-${pkgver}"
./configure --prefix=/usr --libdir=/usr/lib32
make
}
package() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/wine/PKGBUILD
pkgname=wine
pkgver=1.9.10
pkgrel=1
_pkgver=${pkgver/rc/-rc}
source=(https://dl.winehq.org/wine/source/1.9/$pkgname-$_pkgver.tar.bz2
30-win32-aliases.conf)
md5sums=('5e22fd3846fb3df3c7acfd61e544452d'
'1ff4e467f59409272088d92173a0f801')
pkgdesc="A compatibility layer for running Windows programs"
url="http://www.winehq.com"
categories=('system')
screenshot=('http://wstaw.org/m/2012/04/28/wine.png')
arch=('x86_64')
license=('LGPL')
install=wine.install
depends=(
fontconfig lib32-fontconfig
lcms2 lib32-lcms2
libxml2 lib32-libxml2
libxcursor lib32-libxcursor
libxrandr lib32-libxrandr
libxdamage lib32-libxdamage
libxi lib32-libxi
gettext lib32-gettext
freetype2 lib32-freetype2
glu lib32-glu
libsm lib32-libsm
gcc-libs lib32-gcc-libs
desktop-file-utils
)
makedepends=(autoconf ncurses bison perl fontforge flex
giflib lib32-giflib
libpng lib32-libpng
gnutls lib32-gnutls
libxinerama lib32-libxinerama
libxcomposite lib32-libxcomposite
libxmu lib32-libxmu
libxxf86vm lib32-libxxf86vm
libldap lib32-libldap
mpg123 lib32-mpg123
openal lib32-openal
v4l-utils lib32-v4l-utils
alsa-lib lib32-alsa-lib
libpulse lib32-libpulse
mesa lib32-mesa
mesa-libgl lib32-mesa-libgl
libcl lib32-libcl
libxslt lib32-libxslt
jack lib32-jack
samba
gst-plugins-base-libs lib32-gst-plugins-base-libs
opencl-headers
)
optdepends=(
giflib lib32-giflib
libpng lib32-libpng
libldap lib32-libldap
gnutls lib32-gnutls
mpg123 lib32-mpg123
openal lib32-openal
v4l-utils lib32-v4l-utils
libpulse lib32-libpulse
alsa-plugins lib32-alsa-plugins
alsa-lib lib32-alsa-lib
libjpeg-turbo lib32-libjpeg-turbo
libxcomposite lib32-libxcomposite
libxinerama lib32-libxinerama
ncurses lib32-ncurses
libcl lib32-libcl
libxslt lib32-libxslt
cups
samba dosbox
gst-plugins-base-libs lib32-gst-plugins-base-libs
opencl-headers
)
prepare() {
cd $srcdir
# remove once https://bugs.winehq.org/show_bug.cgi?id=38653 is resolved
export CFLAGS="${CFLAGS/-O2/} -O0"
export CXXFLAGS="${CXXFLAGS/-O2/} -O0"
# Get rid of old code dir
rm -rf $pkgname
# Get rid of old build dirs
rm -rf $pkgname-{32,64}-build
# Allow ccache to work
mv $pkgname-$_pkgver $pkgname
sed 's|OpenCL/opencl.h|CL/opencl.h|g' -i $pkgname/configure*
# ncurses fix
sed -i 's|libncurses|libncursesw|g' "$srcdir/$pkgname/configure"
sed -i 's|lncurses|lncursesw|g' "$srcdir/$pkgname/configure"
# These additional CPPFLAGS solve FS#27662 and FS#34195
export CPPFLAGS="${CPPFLAGS/-D_FORTIFY_SOURCE=2/} -D_FORTIFY_SOURCE=0"
}
build() {
cd $srcdir
msg2 "Building Wine-64..."
mkdir -p $pkgname-64-build
cd "$srcdir/$pkgname-64-build"
../$pkgname/configure \
--prefix=/usr \
--libdir=/usr/lib \
--with-x \
--with-gstreamer \
--enable-win64
make
_wine32opts=(
--libdir=/usr/lib32
--with-wine64="$srcdir/$pkgname-64-build"
)
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
msg2 "Building Wine-32..."
mkdir -p "$srcdir/$pkgname-32-build"
cd "$srcdir/$pkgname-32-build"
../$pkgname/configure \
--prefix=/usr \
--with-x \
--with-gstreamer \
"${_wine32opts[@]}"
}
package() {
msg2 "Packaging Wine-32..."
cd "$srcdir/$pkgname-32-build"
make prefix="$pkgdir/usr" \
libdir="$pkgdir/usr/lib32" \
dlldir="$pkgdir/usr/lib32/wine" install
msg2 "Packaging Wine-64..."
cd "$srcdir/$pkgname-64-build"
make prefix="$pkgdir/usr" \
libdir="$pkgdir/usr/lib" \
dlldir="$pkgdir/usr/lib/wine" install
# Font aliasing settings for Win32 applications
install -d "$pkgdir"/etc/fonts/conf.{avail,d}
install -m644 "$srcdir/30-win32-aliases.conf" "$pkgdir/etc/fonts/conf.avail"
ln -s ../conf.avail/30-win32-aliases.conf "$pkgdir/etc/fonts/conf.d/30-win32-aliases.conf"
}
<file_sep>/lib32-gtk3/lib32-colord/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# Maintainer: AlmAck <<EMAIL>>
# Contributor: jtts
_pkgbasename=colord
pkgname=lib32-$_pkgbasename
pkgver=1.2.0
pkgrel=1
pkgdesc="System daemon for managing color devices (32-bit)"
arch=(x86_64)
url="http://www.freedesktop.org/software/colord"
license=(GPL2)
depends=('lib32-lcms2' 'lib32-libgusb' 'lib32-polkit' 'lib32-sqlite3' 'lib32-systemd' 'lib32-dbus-core' 'lib32-glib2' $_pkgbasename=$pkgver) #shared-color-profiles
makedepends=('gcc-multilib' 'intltool' 'gobject-introspection' 'vala') # docbook2x sane bash-completion
source=($url/releases/$_pkgbasename-$pkgver.tar.xz)
sha1sums=('a96619dcca24aea1527054f0117211cf7ac30dec')
build() {
export CC='gcc -m32'
export PKG_CONFIG_PATH='/usr/lib32/pkgconfig'
cd $_pkgbasename-$pkgver
# put udev files in /usr/lib
sed -i "/slashlibdir=/s#/lib#/usr/lib#" configure
./configure --prefix=/usr --sysconfdir=/etc --localstatedir=/var \
--libexecdir=/usr/lib32/$_pkgbasename --libdir=/usr/lib32 --disable-static \
--with-systemdsystemunitdir=/usr/lib/systemd/system \
--enable-vala --disable-volume-search \
--disable-nls --disable-bash-completion \
--with-daemon-user=colord
#--enable-sane
# Not recognized by configure
#--disable-manpages
make
}
package() {
cd $_pkgbasename-$pkgver
make DESTDIR="$pkgdir" install
rm -rf "${pkgdir}"/{etc,lib,var,usr/{bin,include,lib,share}}
}
<file_sep>/lib32-sdl2_image/PKGBUILD
# maintainer: <NAME> <<EMAIL>>
_pkgbasename=sdl2_image
pkgname=lib32-$_pkgbasename
pkgver=2.0.0
pkgrel=2
pkgdesc="SDL2 image libraries (Version 2, 32-bit)"
arch=('i686' 'x86_64')
url="http://www.libsdl.org"
license=('MIT')
depends=('lib32-sdl2' 'lib32-libpng' 'lib32-libtiff' 'lib32-libjpeg' 'lib32-libwebp' $_pkgbasename=$pkgver)
makedepends=('cmake' 'gcc-multilib')
options=(!libtool)
source=("http://www.libsdl.org/projects/SDL_image/release/SDL2_image-${pkgver}.tar.gz")
sha256sums=('b29815c73b17633baca9f07113e8ac476ae66412dec0d29a5045825c27a47234')
build() {
cd "${srcdir}/SDL2_image-${pkgver}/"
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
./configure --disable-static \
--prefix=/usr \
--libdir=/usr/lib32
make
}
package() {
cd "${srcdir}/SDL2_image-${pkgver}/"
make DESTDIR="${pkgdir}/" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/lib32-sdl2_ttf/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# maintainer: <NAME> <<EMAIL>>
_pkgbasename=sdl2_ttf
pkgname=lib32-$_pkgbasename
pkgver=2.0.12
pkgrel=2
pkgdesc="A library that allows you to use TrueType fonts in your SDL applications (Version 2, 32-bit)"
arch=('x86_64')
url="http://www.libsdl.org"
license=('MIT')
depends=('lib32-sdl2' 'lib32-freetype2' $_pkgbasename=$pkgver)
makedepends=('cmake' 'gcc-multilib')
options=(!libtool)
source=("http://www.libsdl.org/projects/SDL_ttf/release/SDL2_ttf-${pkgver}.tar.gz")
sha256sums=('8728605443ea1cca5cad501dc34dc0cb15135d1e575551da6d151d213d356f6e')
build() {
cd "${srcdir}/SDL2_ttf-${pkgver}/"
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
./autogen.sh
./configure --disable-static --prefix=/usr --libdir=/usr/lib32
make
}
package() {
cd "${srcdir}/SDL2_ttf-${pkgver}/"
make DESTDIR="${pkgdir}/" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/lib32-speex/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# maintainer (x86_64): <NAME> <<EMAIL>>
# maintainer (x86_64): <NAME> <abveritas[at]chakra-project[dot]org>
_pkgbasename=speex
pkgname=lib32-$_pkgbasename
pkgver=1.2rc2
pkgrel=1
pkgdesc="A free codec for free speech (32-bit)"
arch=(x86_64)
url="http://www.speex.org/"
license=('BSD')
depends=('lib32-libogg' 'lib32-speexdsp' $_pkgbasename=$pkgver)
makedepends=(gcc-multilib)
source=(http://downloads.us.xiph.org/releases/$_pkgbasename/$_pkgbasename-$pkgver.tar.gz)
md5sums=('6ae7db3bab01e1d4b86bacfa8ca33e81')
build() {
cd $_pkgbasename-$pkgver
export CC="gcc -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
./configure --prefix=/usr --sysconfdir=/etc --localstatedir=/var \
--libdir=/usr/lib32 \
--enable-binaries # Must be given or configure won't use pkg-config correctly
make
}
check() {
cd $_pkgbasename-$pkgver
make -k check
}
package() {
cd $_pkgbasename-$pkgver
make DESTDIR="$pkgdir" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/lib32-libjpeg-turbo/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# maintainer (x86_64): <NAME> <<EMAIL>>
# maintainer (x86_64): <NAME> <abveritas[at]chakra-project[dot]org>
_pkgbasename=libjpeg-turbo
pkgname=lib32-$_pkgbasename
pkgver=1.3.0
pkgrel=1
pkgdesc='JPEG image codec with accelerated baseline compression and decompression (lib32)'
arch=('x86_64')
url='http://libjpeg-turbo.virtualgl.org/'
license=('GPL'
'custom')
makedepends=('nasm')
depends=('lib32-glibc' $_pkgbasename=$pkgver)
makedepends=('nasm' gcc-multilib)
provides=('lib32-libjpeg=8.0.2')
conflicts=('lib32-libjpeg')
replaces=('lib32-libjpeg')
options=('!libtool')
options=('!libtool')
source=("http://sourceforge.net/projects/$_pkgbasename/files/$pkgver/$_pkgbasename-$pkgver.tar.gz"
'cve-2013-6629.patch')
sha512sums=('4d34c3c5f2cdd70b2a3d1b55eeb4ce59cb3d4b8d22bb6d43c2ec844b7eb5685b55a9b1b46ad2bc5f2756b5f5535ccad032791c3b932af9c1efc502aa5e701053'
'c5f063f87305ab48cb40c243df9e7a307efedc02eef8eee65e9ca5006676d5257a6d1dc7a32ff77a2486c8be8792b4f5431d18e46759ad63c182b8332a736099')
prepare() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
patch -i ../cve-2013-6629.patch # FS#38094
}
build() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
sed -i "s|NAFLAGS='-felf64 -DELF -D__x86_64__'|NAFLAGS='-felf32 -DELF -D__x86_64__'|" configure
# Create libjpeg6
./configure \
--prefix=/usr \
--with-jpeg6 \
--mandir=/usr/share/man \
--libdir=/usr/lib32 \
--without-simd
make
mkdir -p ${srcdir}/libjpeg6
cp -d .libs/libjpeg.so.6* ${srcdir}/libjpeg6
rm -R .libs
# Create libjpeg8
./configure \
--prefix=/usr \
--with-jpeg8 \
--mandir=/usr/share/man \
--libdir=/usr/lib32 \
--without-simd
make
}
package() {
cd "$srcdir/$_pkgbasename-$pkgver"
make DESTDIR="$pkgdir/" install
cp -vd $srcdir/libjpeg6/libjpeg.so.6* $pkgdir/usr/lib32
rm -rf "${pkgdir}"/usr/{include,share,bin,sbin}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/steam/PKGBUILD
pkgname=steam
pkgver=1.0.0.52
pkgrel=1
pkgdesc="Digital distribution client bootstrap package"
arch=('x86_64')
url="http://steampowered.com/"
license=('custom')
optdepends=('lib32-flashplugin: for flash video'
'freshplayerplugin: alternative flash video support'
'lib32-alsa-plugins: for pulseaudio on some games'
'lib32-mesa: for open source driver users'
'lib32-catalyst-utils: for AMD Catalyst users'
'lib32-nvidia-utils: for NVIDIA proprietary blob users'
'lib32-primus: for NVIDIA + Bumblebee users')
install=steam.install
source=("http://repo.steampowered.com/$pkgname/pool/$pkgname/s/$pkgname/${pkgname}_$pkgver.tar.gz"
'alsa_sdl_audiodriver.patch'
'steam.patch'
'steam.sh.patch'
'steam_info.sh')
sha256sums=('dd3b1a9f597bf0e088094d6fd1e495068434b604346139f277ea26c41e009304'
'174a110eda1c9d5b1c92a4490b266b31918559bbf8292a94905221c92da4bc0e'
'ae933bda073e11ad6ae61d0ede0b472ba598df251c30690592a61c11779c7ee4'
'7d33435937e553a6cb1e6918d1024d96c6081e8df560ea5bd1252146cfe670a8'
'9b54b38abd3b8b449a445069d21f042b542ca5c4edd<KEY>')
prepare() {
patch -d "$pkgname" -Np1 -i "$srcdir/alsa_sdl_audiodriver.patch"
install_agreement_window
}
install_agreement_window() {
##patches for to be able to approve the 'install agreement' other than through the terminal
patch -d "$pkgname" -Np1 -i "$srcdir/steam.patch"
# patch steam.sh that is inside an archive
cd $srcdir/steam
tar -xJf bootstraplinux_ubuntu12_32.tar.xz
cd ../
# delete the zenity check
sed -i s!zenity!''! $srcdir/steam/steamdeps.txt
patch -d "$pkgname" -Np1 -i "$srcdir/steam.sh.patch"
# create the archive again
cd steam
rm bootstraplinux_ubuntu12_32.tar.xz
tar -cJf bootstraplinux_ubuntu12_32.tar.xz \
linux32 ubuntu12_32 steam.sh steam_install_agreement.txt steamdeps.txt
}
package() {
depends=(
'bash' 'xterm' 'desktop-file-utils' 'hicolor-icon-theme' 'curl'
'dbus' 'freetype2' 'gdk-pixbuf2' 'ttf-liberation' 'kde-baseapps-kdialog'
'python3' 'lib32-sdl' 'lib32-libvorbis' 'lib32-alsa-lib' 'lib32-libgl'
'lib32-libgcrypt' 'lib32-nss' 'lib32-openal' 'lib32-gcc-libs' 'lib32-libx11' 'lib32-libxss' 'lib32-libxshmfence')
make -C "$pkgname" DESTDIR="$pkgdir" install
# Install license
install -Dm644 "$pkgdir/usr/share/doc/steam/steam_install_agreement.txt" "$pkgdir/usr/share/licenses/steam/LICENSE"
# blank steamdeps because apt-get
ln -sf /bin/true "$pkgdir/usr/bin/steamdeps"
install -Dm644 $srcdir/steam/lib/udev/rules.d/99-steam-controller-perms.rules $pkgdir/usr/lib/udev/rules.d/99-steam-controller-perms.rules
# window that points the user to the Steam's wiki page
install -D steam_info.sh $pkgdir/usr/bin/steam_info.sh
sed -i s!"Exec=/usr/bin/steam %U"!"Exec=/usr/bin/steam_info.sh"! \
$pkgdir/usr/share/applications/steam.desktop
}
<file_sep>/lib32-qtwebkit/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# maintainer (x86_64): <NAME> <<EMAIL>>
# maintainer (x86_64): <NAME> <abveritas[at]chakra-project[dot]org>
_pkgbasename=qtwebkit
pkgname="lib32-${_pkgbasename}"
pkgver=2.2.2
pkgrel=2
pkgdesc="Standalone QtWebKit version. (ELF32)"
arch=('x86_64')
url="http://trac.webkit.org/wiki/QtWebKit"
license=('GPL3' 'LGPL')
depends=('lib32-qt' 'lib32-gstreamer0.10' 'lib32-gstreamer0.10-base'
"${_pkgbasename}=${pkgver}")
makedepends=('gperf' 'phonon' 'perl' 'python'
'lib32-sqlite3' 'lib32-fontconfig' 'lib32-mesa' 'lib32-libglapi')
#source=("http://get.qt.nokia.com/qtwebkit/QtWebKit-${pkgver}.tar.gz")
source=("http://chakra-linux.org/sources/${_pkgbasename}/qtwebkit-${pkgver}-source.tar.gz"
"http://chakra-linux.org/sources/${_pkgbasename}/qwebview-4.8.0.tar.bz2"
'glib.patch')
md5sums=('86702e3a908e2968e87baa6600cbf31f'
'13a4ac75d98452c2bf7ef710353e91d8'
'dece50d952593e301007432962ba3000')
build() {
cd "${srcdir}/qtwebkit-${pkgver}-source"
patch -p1 -i "${srcdir}"/glib.patch
export QT4DIR=$srcdir/$_pkgfqn
export LD_LIBRARY_PATH=${QT4DIR}/lib:${LD_LIBRARY_PATH}
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
# some of those are likely unnecessary, but I'm too lazy to find and remove them
sed -i "/^QMAKE_LINK\s/s|g++|g++ -m32|g" mkspecs/common/g++-base.conf
sed -i "s|-O2|${CXXFLAGS} -m32|" mkspecs/common/g++-base.conf
sed -i "s|-O2|${CXXFLAGS} -m32|" mkspecs/common/gcc-base.conf
sed -i "/^QMAKE_LFLAGS_RPATH/s| -Wl,-rpath,||g" mkspecs/common/gcc-base-unix.conf
sed -i "/^QMAKE_LFLAGS\s/s|+=|+= ${LDFLAGS} -m32|g" mkspecs/common/gcc-base.conf
sed -i "s|-Wl,-O1|-m32 -Wl,-O1|" mkspecs/common/g++-unix.conf
sed -e "s|-O2|$CXXFLAGS -m32|" \
-e "/^QMAKE_RPATH/s| -Wl,-rpath,||g" \
-e "/^QMAKE_LINK\s/s|g++|g++ -m32|g" \
-e "/^QMAKE_LFLAGS\s/s|+=|+= $LDFLAGS|g" \
-i mkspecs/common/g++.conf
# move headers
mv include Source/
cd Source
qmake
cd ../
make -C Source
}
package() {
cd "${srcdir}/qtwebkit-${pkgver}-source"
make INSTALL_ROOT="${pkgdir}" -C Source install
rm -rf "${pkgdir}"/usr/{include,share,lib}
}
<file_sep>/lib32-libxft/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# maintainer (x86_64): <NAME> <<EMAIL>>
# maintainer (x86_64): <NAME> <abveritas[at]chakra-project[dot]org>
_pkgbasename=libxft
pkgname=lib32-$_pkgbasename
<<<<<<< HEAD
pkgver=2.2.0
pkgrel=2
=======
pkgver=2.3.2
_ubuntu_ver=2.3.1
pkgrel=1
_ubuntu_diff_version=${_ubuntu_ver}-2
>>>>>>> 600514ebcaaf80d91da28d7572f7905dbeed7b0a
pkgdesc="FreeType-based font drawing library for X (32-bit)"
arch=('x86_64')
license=('custom')
url="http://xorg.freedesktop.org/"
depends=('lib32-fontconfig' 'lib32-libxrender' $_pkgbasename=$pkgver)
makedepends=('gcc-multilib')
options=('!libtool')
<<<<<<< HEAD
source=(${url}/releases/individual/lib/libXft-${pkgver}.tar.bz2)
sha1sums=('ed29784259f4e26df78141035560ae8a7c62e83f')
=======
provides=("${pkgname}-ubuntu")
conflicts=("${pkgname}-ubuntu")
replaces=("${pkgname}-ubuntu")
source=(${url}/releases/individual/lib/libXft-${pkgver}.tar.bz2
#"http://archive.ubuntu.com/ubuntu/pool/main/x/xft/xft_${_ubuntu_diff_version}.diff.gz"
# from original, removed xftglyphs.c patch
xft_2.3.2-1.diff)
sha1sums=('e025d790a7b6c4d283a78d8df06615cb10278e2d'
'75bda08fd01faae18a85140fbc577beb0a89e823')
>>>>>>> 600514ebcaaf80d91da28d7572f7905dbeed7b0a
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd ${srcdir}/libXft-${pkgver}
# apply our custom patche
patch -Np1 -i ../xft_2.3.2-1.diff
# apply ubuntu patches
#patch -Np1 -i ../xft_${_ubuntu_diff_version}.diff
for _f in $(cat "$srcdir/libXft-${pkgver}/debian/patches/series" | grep -v '#') ; do
patch -Np1 -i "$srcdir/libXft-${pkgver}/debian/patches/$_f"
done
./configure --prefix=/usr \
--libdir=/usr/lib32 \
--disable-static
make
}
package() {
cd "${srcdir}/libXft-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{bin,include,share}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/lib32-primus/PKGBUILD
# Maintainer: <NAME> <<EMAIL>>
# Contributions from Arch: https://projects.archlinux.org/svntogit/community.git/tree/trunk?h=packages/lib32-primus
_pkgbasename=primus
pkgname=lib32-$_pkgbasename
pkgver=20151110
pkgrel=1
pkgdesc="Faster OpenGL offloading for Bumblebee (32-bit library, git sources)"
arch=('x86_64')
url="https://github.com/amonakov/primus"
license=('custom:ISC')
depends=($_pkgbasename=$pkgver 'bumblebee' 'lib32-mesa-libgl')
makedepends=('git' 'gcc-multilib')
source=("git://github.com/amonakov/primus.git#commit=d1afbf6"
'register_cleanup.patch')
md5sums=('SKIP'
'266f8b163c5189d594a3d34a6bcab54a')
prepare() {
cd "${srcdir}/${_pkgbasename}"
patch -Np1 < "$srcdir/register_cleanup.patch"
}
build() {
cd "${srcdir}/${_pkgbasename}"
export CC="g++ -m32"
export CXX="g++ -m32"
LIBDIR=lib32 make
}
package() {
cd "${srcdir}/${_pkgbasename}"
install -D "lib32/libGL.so.1" "$pkgdir/usr/lib32/primus/libGL.so.1"
install -D -m644 LICENSE.txt "$pkgdir/usr/share/licenses/$pkgname/LICENSE"
}
<file_sep>/lib32-sdl2/PKGBUILD
_pkgbasename=sdl2
pkgname=lib32-$_pkgbasename
pkgver=2.0.3
pkgrel=2
pkgdesc="A library for portable low-level access to a video framebuffer, audio output, mouse, and keyboard (Version 2.0, 32-bit)."
arch=('i686' 'x86_64')
url="http://www.libsdl.org"
license=('MIT')
depends=('lib32-glibc' 'lib32-libxext' 'lib32-libxrender' 'lib32-libx11' 'lib32-libgl' $_pkgbasename=$pkgver)
makedepends=('lib32-alsa-lib' 'lib32-mesa' 'lib32-libpulse' 'cmake' 'lib32-libxrandr' 'lib32-libxinerama' 'gcc-multilib')
optdepends=('lib32-alsa-lib: ALSA audio driver'
'lib32-libpulse: PulseAudio audio driver')
source=("http://www.libsdl.org/release/SDL2-${pkgver}.tar.gz")
md5sums=('fe6c61d2e9df9ef570e7e80c6e822537')
build() {
cd "${srcdir}/SDL2-${pkgver}"
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
mkdir build && cd build
cmake .. -DCMAKE_INSTALL_PREFIX=/usr \
-DSDL_STATIC=OFF \
-DRPATH=OFF \
-DLIB_SUFFIX=32
make
}
package() {
cd "${srcdir}/SDL2-${pkgver}/build"
make DESTDIR="${pkgdir}/" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/lib32-krb5/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# maintainer (x86_64): <NAME> <<EMAIL>>
# maintainer (x86_64): <NAME> <abveritas[at]chakra-project[dot]org>
_pkgbasename=krb5
pkgname=lib32-$_pkgbasename
pkgver=1.14.1
pkgrel=1
pkgdesc="The Kerberos network authentication system (32-bit)"
arch=('x86_64')
url="http://web.mit.edu/kerberos/"
license=('custom')
depends=('lib32-e2fsprogs' 'lib32-libldap' 'lib32-keyutils' $_pkgbasename=$pkgver)
makedepends=('perl' 'lib32-gcc-libs')
provides=('lib32-heimdal')
replaces=('lib32-heimdal')
conflicts=('lib32-heimdal')
source=("http://web.mit.edu/kerberos/dist/${_pkgbasename}/1.14/${_pkgbasename}-${pkgver}.tar.gz"
'krb5-config_LDFLAGS.patch')
sha1sums=('f12dc3b8630fd1fefb7058cd782754489377308b'
'09e478cddfb9d46d2981dd25ef96b8c3fd91e1aa')
options=('!emptydirs')
prepare() {
cd "${srcdir}/${_pkgbasename}-${pkgver}/src/build-tools"
# cf https://bugs.gentoo.org/show_bug.cgi?id=448778
patch -Np2 -i "${srcdir}"/krb5-config_LDFLAGS.patch
}
build() {
cd "${srcdir}/${_pkgbasename}-${pkgver}/src"
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
export CFLAGS+=" -fPIC -fno-strict-aliasing -fstack-protector-all"
export CPPFLAGS+=" -I/usr/include/et"
./configure --prefix=/usr \
--sysconfdir=/etc/krb5 \
--mandir=/usr/share/man \
--localstatedir=/var/lib \
--libdir=/usr/lib32 \
--enable-shared \
--with-system-et \
--with-system-ss \
--disable-rpath \
--without-tcl \
--enable-dns-for-realm \
--with-ldap \
--without-system-verto
make
}
#check() {
# We can't do this in the build directory.
#cd "${srcdir}/${_pkgbasename}-${pkgver}"
#make -C src check
#}
package() {
cd "${srcdir}/${_pkgbasename}-${pkgver}/src"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{include,share,bin,sbin}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/lib32-qtcurve-qt4/PKGBUILD
# Maintainer: <NAME> <framari [at] opmbx [dot] org>
# CCR Maintainer : ahjolinna <<EMAIL>>
pkgname=lib32-qtcurve-qt4
pkgver=1.8.18
pkgrel=3
pkgdesc='A configurable set of widget styles for KDE and Gtk. Qt4-only version. Multilib.'
arch=('x86_64')
url='https://github.com/QtCurve/qtcurve'
license=('LGPL')
groups=('qtcurve')
depends=('lib32-qt')
makedepends=('cmake' 'gcc-multilib' 'automoc4' 'lib32-qt')
source=("qtcurve-$pkgver.tar.gz::$url/archive/$pkgver.tar.gz")
md5sums=('422d1876d944bb278855f320eda19368')
prepare() {
cd qtcurve-$pkgver
[ -d b ] || mkdir b
# export multilib parameters
# and QT_BUILD_KEY workaround
export CC="gcc -m32 -I$srcdir"
export CXX="g++ -m32 -I$srcdir"
export PKG_CONFIG_PATH='/usr/lib32/pkgconfig'
}
build() {
cd qtcurve-$pkgver/b
cmake .. -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DQT_PLUGINS_DIR=/usr/lib32/qt/plugins \
-DQT_LIBRARY_DIR=/usr/lib32 \
-DLIB_INSTALL_DIR=/usr/lib32 \
-DQTC_QT4_ENABLE_KDE=false \
-DENABLE_GTK2=false \
-DENABLE_QT5=false
make
}
package() {
cd qtcurve-$pkgver/b/qt4
make DESTDIR="$pkgdir" install
}
<file_sep>/lib32-glibc/PKGBUILD
# maintainer: <NAME> <<EMAIL>>
# toolchain build order:
# lib32-glibc > binutils-multilib > gcc-multilib > binutils-multilib > lib32-glibc
_pkgbasename=glibc
pkgname=lib32-$_pkgbasename
pkgver=2.22
pkgrel=1
pkgdesc="GNU C Library (32-bit)"
arch=('x86_64')
url="http://www.gnu.org/software/libc"
license=('GPL' 'LGPL')
#depends=($_pkgbasename=$pkgver)
makedepends=('gcc-multilib>=4.9')
source=(http://ftp.gnu.org/gnu/libc/${_pkgbasename}-${pkgver}.tar.xz{,.sig}
dtv_surplus.patch
lib32-glibc.conf
)
md5sums=('e51e02bf552a0a1fbbdc948fb2f5e83c'
'SKIP'
'6469cba12e2252e16e32db0353ed3f21'
'<KEY>')
validpgpkeys=('F37CDAB708E65EA183FD1AF625EF0A436C2A4AFF') # Carlos O'Donell
prepare() {
cd ${srcdir}/glibc-${pkgver}
# http://chakraos.org/bugtracker/index.php?do=details&task_id=1060&project=8
patch -p1 -i $srcdir/dtv_surplus.patch
mkdir ${srcdir}/glibc-build
}
build() {
cd ${srcdir}/glibc-build
echo "slibdir=/usr/lib32" >> configparms
echo "rtlddir=/usr/lib32" >> configparms
echo "sbindir=/usr/sbin" >> configparms
echo "rootsbindir=/usr/sbin" >> configparms
export CC="gcc -m32"
export CXX="g++ -m32"
# remove hardening options for building libraries
CFLAGS=${CFLAGS/-fstack-protector-strong/}
CPPFLAGS=${CPPFLAGS/-D_FORTIFY_SOURCE=2/}
${srcdir}/${_pkgbasename}-${pkgver}/configure --prefix=/usr \
--libdir=/usr/lib32 --libexecdir=/usr/lib32 \
--with-headers=/usr/include \
--with-bugurl=https://chakraos.org/bugtracker \
--enable-add-ons \
--enable-obsolete-rpc \
--enable-kernel=2.6.32 \
--enable-bind-now --disable-profile \
--enable-stackguard-randomization \
--enable-multi-arch \
--disable-werror \
--enable-multi-arch i686-unknown-linux-gnu
# --enable-lock-elision \
# build libraries with hardening disabled
echo "build-programs=no" >> configparms
make
# re-enable hardening for programs
sed -i "/build-programs=/s#no#yes#" configparms
echo "CC += -fstack-protector-strong -D_FORTIFY_SOURCE=2" >> configparms
echo "CXX += -fstack-protector-strong -D_FORTIFY_SOURCE=2" >> configparms
make
# remove harding in preparation to run test-suite
sed -i '5,7d' configparms
}
check() {
# the linker commands need to be reordered - fixed in 2.19
LDFLAGS=${LDFLAGS/--as-needed,/}
cd ${srcdir}/glibc-build
# tst-cleanupx4 failure on i686 is "expected"
make check || true
}
package() {
cd ${srcdir}/glibc-build
make install_root=${pkgdir} install
rm -rf ${pkgdir}/{etc,sbin,usr/{bin,sbin,share},var}
# We need one 32 bit specific header file
find ${pkgdir}/usr/include -type f -not -name stubs-32.h -delete
# Dynamic linker
install -d -m755 ${pkgdir}/usr/lib
ln -s ../lib32/ld-linux.so.2 ${pkgdir}/usr/lib/
# Add lib32 paths to the default library search path
install -Dm644 "$srcdir/lib32-glibc.conf" "$pkgdir/etc/ld.so.conf.d/lib32-glibc.conf"
# Symlink /usr/lib32/locale to /usr/lib/locale
ln -s ../lib/locale "$pkgdir/usr/lib32/locale"
# remove the static libraries that have a shared counterpart
# libc, libdl, libm and libpthread are required for toolchain testsuites
# in addition libcrypt appears widely required
rm $pkgdir/usr/lib32/lib{anl,BrokenLocale,nsl,resolv,rt,util}.a
# Do not strip the following files for improved debugging support
# ("improved" as in not breaking gdb and valgrind...):
# ld-${pkgver}.so
# libc-${pkgver}.so
# libpthread-${pkgver}.so
# libthread_db-1.0.so
cd $pkgdir
strip $STRIP_BINARIES \
\
\
usr/lib32/getconf/*
strip $STRIP_STATIC usr/lib32/*.a
strip $STRIP_SHARED usr/lib32/{libanl,libBrokenLocale,libcidn,libcrypt}-*.so \
usr/lib32/libnss_{compat,db,dns,files,hesiod,nis,nisplus}-*.so \
usr/lib32/{libdl,libm,libnsl,libresolv,librt,libutil}-*.so \
usr/lib32/{libmemusage,libpcprofile,libSegFault}.so \
usr/lib32/{audit,gconv}/*.so
}
<file_sep>/wine_mono/PKGBUILD
pkgname=wine_mono
pkgver=4.6.2
pkgrel=1
pkgdesc="Mono's built-in replacement for Microsoft's .NET Framework"
arch=('any')
url="http://wiki.winehq.org/Mono"
license=(MPL)
depends=('wine>=1.9.4')
categories=('system')
source=(https://dl.winehq.org/wine/wine-mono/$pkgver/${pkgname/_/-}-$pkgver.msi)
md5sums=('c0ef08cb63a745142d26d093ad4ac299')
package() {
cd "$srcdir"
_monodir="$pkgdir/usr/share/wine/mono"
install -Dm644 ${pkgname/_/-}-$pkgver.msi "$_monodir/${pkgname/_/-}-$pkgver.msi"
}
<file_sep>/lib32-json-c/PKGBUILD
_pkgbasename=json-c
pkgname=lib32-$_pkgbasename
pkgver=0.12
pkgrel=1
pkgdesc="A JSON implementation in C (32-bit)"
arch=('x86_64')
url="https://github.com/json-c/json-c/wiki"
depends=($_pkgbasename=$pkgver)
makedepends=('lib32-gcc-libs')
license=('GPL')
options=(!libtool !makeflags)
source=("https://s3.amazonaws.com/${_pkgbasename}_releases/releases/${_pkgbasename}-${pkgver}.tar.gz")
md5sums=('3ca4bbb881dfc4017e8021b5e0a8c491')
build() {
cd $_pkgbasename-$pkgver
export CC="gcc -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
CFLAGS+=" -Wno-error=unused-but-set-variable"
./configure --prefix=/usr --libdir=/usr/lib32 --disable-static
make
}
package() {
cd $_pkgbasename-$pkgver
make DESTDIR="$pkgdir" install
rm -r "$pkgdir/usr/include"
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
# vim:set ts=2 sw=2 et:
<file_sep>/lib32-libwebp/PKGBUILD
_pkgbasename=libwebp
pkgname=lib32-$_pkgbasename
pkgver=0.5.0
pkgrel=1
pkgdesc="WebP library and conversion tools (32-bit)."
arch=('x86_64')
url="http://code.google.com/intl/en/speed/webp/"
license=('BSD')
depends=('lib32-libpng' 'lib32-libjpeg' $_pkgbasename=$pkgver)
makedepends=('gcc-multilib')
source=("http://downloads.webmproject.org/releases/webp/$_pkgbasename-$pkgver.tar.gz")
md5sums=('ba81eb9bf23e3c69a2f5cc8dcdb5938f')
build() {
cd "$srcdir/$_pkgbasename-$pkgver"
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
./configure --prefix=/usr --libdir=/usr/lib32
make
}
package() {
cd "$srcdir/$_pkgbasename-$pkgver"
make DESTDIR="$pkgdir/" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/lib32-gnutls/PKGBUILD
# Contributions from Arch: https://projects.archlinux.org/svntogit/community.git/tree/trunk?h=packages/lib32-gnutls
_pkgbasename=gnutls
pkgname=lib32-$_pkgbasename
pkgver=3.4.8
pkgrel=1
pkgdesc="A library which provides a secure layer over a reliable transport layer (32-bit)"
arch=('x86_64')
license=('GPL3' 'LGPL2.1')
url="http://gnutls.org/"
depends=('lib32-zlib' 'lib32-nettle' 'lib32-p11-kit' 'lib32-libtasn1' 'lib32-libidn' "$_pkgbasename=${pkgver}")
makedepends=('gcc-multilib')
source=(ftp://ftp.gnutls.org/gcrypt/gnutls/v3.4/${_pkgbasename}-${pkgver}.tar.xz)
md5sums=('a26e6dd8d5ad92016e3f068795b89624')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd ${srcdir}/${_pkgbasename}-${pkgver}
# build fails without --disable-hardware-acceleration because of assembler errors
./configure --prefix=/usr --libdir=/usr/lib32 \
--with-zlib \
--disable-static \
--disable-guile \
--disable-valgrind-tests --disable-hardware-acceleration
make
}
check() {
cd ${srcdir}/${_pkgbasename}-${pkgver}
#make -k check
}
package() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
make DESTDIR="${pkgdir}" install
find $pkgdir
rm -rf "${pkgdir}"/usr/{bin,include,share}
}
<file_sep>/lib32-util-linux/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# maintainer (x86_64): <NAME> <<EMAIL>>
_pkgbasename=util-linux
pkgname=lib32-$_pkgbasename
pkgver=2.27
_pkgver=(${pkgver//./ })
_pkgver=${_pkgver[0]}.${_pkgver[1]}
pkgrel=1
pkgdesc="Miscellaneous system utilities for Linux (32-bit)"
url='http://www.kernel.org/pub/linux/utils/util-linux/'
arch=('x86_64')
depends=('lib32-glibc' $_pkgbasename=$pkgver)
provides=('libuuid.so' 'libblkid.so' 'libfdisk.so' 'libmount.so' 'libsmartcols.so')
makedepends=('gcc-multilib')
license=('GPL2')
options=('!libtool' '!emptydirs')
source=("ftp://ftp.kernel.org/pub/linux/utils/util-linux/v${_pkgver}/util-linux-$pkgver.tar.xz")
md5sums=('5b06bbda9309624ee7add15bc8d8ca22')
build() {
cd "$_pkgbasename-$pkgver"
./configure \
CC="${CC:-cc} -m32" \
PKG_CONFIG_PATH="/usr/lib32/pkgconfig" \
--libdir=/usr/lib32
make lib{uuid,blkid,fdisk,mount,smartcols}.la
}
package() {
make -C "$_pkgbasename-$pkgver" \
DESTDIR="$pkgdir" \
install-usrlib_execLTLIBRARIES \
install-pkgconfigDATA
}
<file_sep>/lib32-flex/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# Maintainer: AlmAck <<EMAIL>>
_pkgbasename=flex
pkgname=lib32-$_pkgbasename
pkgver=2.5.39
pkgrel=2
pkgdesc="A tool for generating text-scanning programs"
arch=('x86_64')
url="http://flex.sourceforge.net"
license=('custom')
groups=('base-devel')
depends=('lib32-glibc' 'm4' 'sh' $_pkgbasename=$pkgver)
options=('libtool' 'staticlibs')
source=(http://downloads.sourceforge.net/sourceforge/flex/flex-$pkgver.tar.bz2
flex-2.5.37-no-bison.patch
lex.sh)
sha256sums=('add2b55f3bc38cb512b48fad7d72f43b11ef244487ff25fc00aabec1e32b617f'
'5ee23f97533c991b82e2aadc06d4682d7d05d99ee2abaf1ef9a82225ba9d0858'
'9d03016a7c4ae1adb051f50f94407b3d7dee9d55924b5c1904261c9f0c1f86f6')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export LD="ld -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd $srcdir/$_pkgbasename-$pkgver
patch -Np1 -i $srcdir/flex-2.5.37-no-bison.patch
./configure --prefix=/usr --libdir=/usr/lib32 \
--mandir=/usr/share/man --infodir=/usr/share/info
make
}
check() {
cd $srcdir/$_pkgbasename-$pkgver
make check
}
package() {
cd $srcdir/$_pkgbasename-$pkgver
make prefix=$pkgdir/usr \
mandir=$pkgdir/usr/share/man \
infodir=$pkgdir/usr/share/info \
libdir=$pkgdir/usr/lib32 \
install
rm -rf "${pkgdir}"/usr/{include,share,bin}
mkdir -p $pkgdir/usr/share/licenses
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/q4wine/PKGBUILD
pkgname=q4wine
pkgver=1.3
pkgrel=1
pkgdesc="Qt GUI for Wine"
arch=("x86_64")
url="http://sourceforge.net/projects/${pkgname}/"
license=("GPL3")
depends=("qt5-base" "wine" "sqlite3" "which" "icoutils" "fuseiso")
makedepends=("cmake" "qt5-tools")
optdepends=("winetricks")
options=('!emptydirs')
source=("http://downloads.sourceforge.net/${pkgname}/${pkgname}-${pkgver/_/-}.tar.bz2")
md5sums=('3290726d2423e28cef0354774ea3a17b')
build() {
cd ${srcdir}/${pkgname}-${pkgver/_/-}
cmake -DCMAKE_INSTALL_PREFIX=/usr \
-DQT5=ON \
-DLIBS_ENTRY_PATH=/usr/lib/$pkgname .
make
}
package() {
cd ${srcdir}/${pkgname}-${pkgver/_/-}
make DESTDIR=${pkgdir} install
}
<file_sep>/lib32-gtk3/lib32-at-spi2-core/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
# Maintainer: AlmAck <<EMAIL>>
# Contributor: <NAME> <<EMAIL>>
_pkgbasename=at-spi2-core
pkgname=lib32-$_pkgbasename
pkgver=2.11.5
pkgrel=2
pkgdesc="Protocol definitions and daemon for D-Bus at-spi (32-bit)"
arch=(x86_64)
url="http://www.gnome.org"
license=(GPL2)
depends=('lib32-dbus-core' 'lib32-glib2' 'lib32-libxtst' $_pkgbasename=$pkgver)
makedepends=('gcc-multilib' 'intltool' 'libtool') # 'gobject-introspection'
source=(http://ftp.gnome.org/pub/GNOME/sources/$_pkgbasename/${pkgver%.*}/$_pkgbasename-$pkgver.tar.xz)
md5sums=('d61793f50af00cd5d9262c2d12d40ac6')
build() {
export CC='gcc -m32'
export CXX='g++ -m32'
export PKG_CONFIG_PATH='/usr/lib32/pkgconfig'
cd $_pkgbasename-$pkgver
sed -i -e '/AC_PATH_XTRA/d' configure.ac
autoreconf --force --install
./configure --prefix=/usr --sysconfdir=/etc \
--libdir=/usr/lib32 --libexecdir=/usr/lib32/at-spi2-core \
--enable-introspection=no \
--disable-xevie
make
}
package() {
cd $_pkgbasename-$pkgver
make DESTDIR="$pkgdir" install
# cleanup for lib32 package
rm -rf "${pkgdir}"/{etc,bin,sbin,usr/{bin,lib,include,share},var}
}
<file_sep>/lib32-gtk3/lib32-libxkbcommon/PKGBUILD
# Maintainer: <NAME> <franzmari [at] chakra-project [dot] it>
_pkgbasename=libxkbcommon
pkgname=lib32-$_pkgbasename
pkgver=0.5.0
pkgrel=1
pkgdesc="Keyboard handling library using XKB data"
arch=('x86_64')
url="http://xkbcommon.org/"
license=('custom')
depends=(xkeyboard-config 'lib32-glibc' $_pkgbasename=$pkgver )
makedepends=('lib32-libxcb' 'xorg-util-macros')
source=("http://xkbcommon.org/download/${pkgname#*-}-${pkgver}.tar.xz")
sha256sums=('90bd7824742b9a6f52a6cf80e2cadd6f5349cf600a358d08260772615b89d19c')
build() {
cd ${_pkgbasename}-${pkgver}
export CC='gcc -m32'
export CXX='g++ -m32'
export PKG_CONFIG_PATH='/usr/lib32/pkgconfig'
./configure --prefix='/usr' --libdir='/usr/lib32' --disable-{docs,static}
make
}
package() {
cd ${_pkgbasename}-${pkgver}
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/include
install -dm 755 "${pkgdir}"/usr/share/licenses
ln -s ${_pkgbasename} "${pkgdir}"/usr/share/licenses/${pkgname}
}
<file_sep>/lib32-libxv/PKGBUILD
_pkgbasename=libxv
pkgname=lib32-$_pkgbasename
pkgver=1.0.10
pkgrel=2
pkgdesc="X11 Video extension library (32-bit)"
arch=(x86_64)
license=('custom')
url="http://xorg.freedesktop.org/"
depends=('lib32-libxext' 'videoproto' $_pkgbasename=$pkgver)
makedepends=('pkgconfig' 'gcc-multilib')
options=('!libtool')
source=(${url}/releases/individual/lib/libXv-${pkgver}.tar.bz2)
sha1sums=('1e93df036bb2bb01e85b7c8886760affb33b8e88')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd ${srcdir}/libXv-${pkgver}
./configure --prefix=/usr --disable-static \
--libdir=/usr/lib32
make
}
package() {
cd ${srcdir}/libXv-${pkgver}
make DESTDIR=${pkgdir} install
rm -rf "${pkgdir}"/usr/{include,share,bin}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/lib32-libtiff/PKGBUILD
_pkgbasename=libtiff
pkgname=lib32-$_pkgbasename
pkgver=4.0.6
pkgrel=2
pkgdesc="Library for manipulation of TIFF images (32-bit)"
arch=('x86_64')
url="http://www.remotesensing.org/libtiff/"
license=('custom')
depends=('lib32-libjpeg-turbo' 'lib32-zlib' $_pkgbasename=$pkgver)
makedepends=('lib32-libgl' 'lib32-libxmu' 'lib32-libxi' gcc-multilib)
options=('!libtool')
source=(ftp://ftp.remotesensing.org/pub/libtiff/tiff-${pkgver}.tar.gz
tiff-4.0.6-buffer-overflow.patch)
md5sums=('d1d2e940dea0b5ad435f21f03d96dd72'
'cdf40bed7ca47252cc7104a17ac0b4da')
prepare() {
cd tiff-${pkgver}
patch -Np1 -i ${srcdir}/tiff-4.0.6-buffer-overflow.patch
}
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd "${srcdir}/tiff-${pkgver}"
./configure --prefix=/usr --sysconfdir=/etc --mandir=/usr/share/man --libdir=/usr/lib32 --includedir=/usr/include/libtiff32
make
}
package() {
cd "${srcdir}/tiff-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{share,bin}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/lib32-libssh2/PKGBUILD
_pkgbasename=libssh2
pkgname=lib32-$_pkgbasename
pkgver=1.7.0
pkgrel=1
pkgdesc="A library implementing the SSH2 protocol as defined by Internet Drafts (32-bit)"
arch=('x86_64')
url="http://www.libssh2.org/"
license=('BSD')
depends=('lib32-openssl' $_pkgbasename=$pkgver)
makedepends=('lib32-gcc-libs')
options=('!libtool')
source=("http://www.libssh2.org/download/$_pkgbasename-$pkgver.tar.gz")
md5sums=('b01662a210e94cccf2f76094db7dac5c')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd ${srcdir}/${_pkgbasename}-${pkgver}
./configure \
--prefix=/usr \
--libdir=/usr/lib32
make
}
package() {
cd ${srcdir}/${_pkgbasename}-${pkgver}
make DESTDIR=${pkgdir} install
rm -rf "${pkgdir}"/usr/{share,bin,sbin,include}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/steam/steam_info.sh
#!/usr/bin/env bash
#
# This script it used to display a warning to the user about:
# - chakra not being officially supported by Steam, so ask for helping on the forum
# - a link to the wiki
#
# also a "don't show this message again" checkbox is present
# then start steam normally
#
# then a hack will be used to make steam works
# see here https://chakraos.org/wiki/index.php?title=Steam#Missing_Direct_Rendering
kdialog --dontagain steam_warnings_chakra:nofilemsg --msgbox \
"Chakra Linux is not officially supported by Valve.
For help visit the wiki page of Steam https://chakraos.org/wiki/index.php?title=Steam
or ask for help in the forum"
LD_PRELOAD="/usr/lib/libstdc++.so.6 /usr/lib32/libstdc++.so.6 /usr/lib/libgcc_s.so.1 /usr/lib32/libgcc_s.so.1" /usr/bin/steam %U<file_sep>/lib32-libxcb/PKGBUILD
# Part of X.org group
#
# maintainer (x86_64): <NAME> <<EMAIL>>
# maintainer (x86_64): <NAME> <abveritas[at]chakra-project[dot]org>
_pkgbasename=libxcb
pkgname=lib32-$_pkgbasename
pkgver=1.11.1
pkgrel=1
pkgdesc="X11 client-side library (32-bit)"
arch=(x86_64)
url="http://xcb.freedesktop.org/"
depends=('lib32-libxdmcp' 'lib32-libxau' $_pkgbasename=$pkgver)
makedepends=('pkg-config' 'libxslt' 'python3' 'xorg-util-macros' 'gcc-multilib'
'autoconf')
conflicts=('libx11<1.1.99.2')
license=('custom')
source=(${url}/dist/${_pkgbasename}-${pkgver}.tar.bz2
libxcb-1.1-no-pthread-stubs.patch)
sha256sums=('b720fd6c7d200e5371affdb3f049cc8f88cff9aed942ff1b824d95eedbf69d30'
'3923bcb1930b851012968435909597d8d5251c72153511cb2982636c97100cc3')
prepare() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
patch -Np1 -i "${srcdir}/libxcb-1.1-no-pthread-stubs.patch"
autoreconf -vfi
}
build() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
export CC="gcc -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
PYTHON=/usr/bin/python3 ./autogen.sh \
--prefix=/usr \
--enable-xinput \
--enable-xkb \
--libdir=/usr/lib32 \
--disable-static
make
}
package() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{include,share}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/lib32-libusb/PKGBUILD
# Maintainer: <NAME> <<EMAIL>>
# Contributor from AUR: 3V0LU710N <db_eee-at-hotmail-dot-com>
_pkgbasename=libusb
pkgname=lib32-$_pkgbasename
pkgver=1.0.19
pkgrel=1
pkgdesc="Library to enable user space application programs to communicate with USB devices. (32-bit). libusb fork."
arch=('x86_64')
url="http://libusbx.sourceforge.net/"
license=('LGPL')
depends=('lib32-glibc' 'lib32-systemd' $_pkgbasename=$pkgver)
makedepends=('gcc-multilib')
provides=('lib32-libusbx')
conflicts=('lib32-libusbx')
replaces=('lib32-libusbx<=1.0.18')
source=(http://downloads.sourceforge.net/${_pkgbasename}/${_pkgbasename}-${pkgver}.tar.bz2)
options=(!libtool)
md5sums=('f9e2bb5879968467e5ca756cb4e1fa7e')
build() {
export CC="gcc -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd "${_pkgbasename}-${pkgver}"
./configure --prefix=/usr --libdir=/usr/lib32
make
}
package () {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf ${pkgdir}/usr/include
}
<file_sep>/lib32-libcl/PKGBUILD
# Maintainer: almack <<EMAIL>>
pkgname=lib32-libcl
pkgver=1.1
_pkgver=337.25
pkgrel=1
pkgdesc="OpenCL library and ICD loader from NVIDIA (32-bit)"
arch=('x86_64')
url="http://www.nvidia.com/"
license=('custom')
options=('!strip')
_arch='x86'
_pkg="NVIDIA-Linux-${_arch}-${_pkgver}"
source=("ftp://download.nvidia.com/XFree86/Linux-${_arch}/${_pkgver}/${_pkg}.run")
md5sums=('2a217632ced8952e21000a51065f85b8')
build() {
cd $srcdir
sh ${_pkg}.run --extract-only
}
package() {
cd $srcdir/${_pkg}
install -D -m755 libOpenCL.so.1.0.0 $pkgdir/usr/lib32/libOpenCL.so.1.0.0
ln -s /usr/lib32/libOpenCL.so.1.0.0 $pkgdir/usr/lib32/libOpenCL.so.1
ln -s /usr/lib32/libOpenCL.so.1 $pkgdir/usr/lib32/libOpenCL.so
install -D -m644 LICENSE $pkgdir/usr/share/licenses/$pkgname/LICENSE
}
<file_sep>/lib32-sdl2_net/PKGBUILD
# maintainer: <NAME> <<EMAIL>>
_pkgbasename=sdl2_net
pkgname=lib32-$_pkgbasename
pkgver=2.0.0
pkgrel=1
pkgdesc="A small sample cross-platform networking library (Version 2, 32-bit)"
arch=('i686' 'x86_64')
url="http://www.libsdl.org/projects/SDL_net"
license=('MIT')
depends=(lib32-sdl2 $_pkgbasename=$pkgver)
makedepends=('gcc-multilib')
options=(!libtool)
source=("$url/release/SDL2_net-${pkgver}.tar.gz")
md5sums=('83bcd0e67796b81b35b08a014c677200')
build() {
cd "${srcdir}/SDL2_net-${pkgver}/"
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
./configure --disable-static \
--prefix=/usr \
--libdir=/usr/lib32
make
}
package() {
cd "${srcdir}/SDL2_net-${pkgver}/"
make DESTDIR="${pkgdir}/" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/lib32-libtasn1/PKGBUILD
# contributions from Arch: https://www.archlinux.org/packages/multilib/x86_64/lib32-libtasn1/
_pkgbasename=libtasn1
pkgname=lib32-${_pkgbasename}
pkgver=4.8
pkgrel=1
pkgdesc="The ASN.1 library used in GNUTLS (32 bit)"
arch=('x86_64')
license=('GPL3' 'LGPL')
url="http://www.gnu.org/software/libtasn1/"
depends=('lib32-glibc' $_pkgbasename=$pkgver)
makedepends=('lib32-gcc-libs')
options=('!libtool')
source=(http://ftp.gnu.org/gnu/libtasn1/${_pkgbasename}-${pkgver}.tar.gz{,.sig})
md5sums=('9a6767705725544f2b86670dcfb34107'
'SKIP')
validpgpkeys=('<KEY>') #<NAME> <<EMAIL>>
build() {
export CC='gcc -m32'
export CXX='g++ -m32'
export PKG_CONFIG_PATH='/usr/lib32/pkgconfig'
cd ${_pkgbasename}-${pkgver}
./configure --prefix=/usr --libdir=/usr/lib32
make
}
package() {
cd ${_pkgbasename}-${pkgver}
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}/usr/"{bin,include,share}
}
<file_sep>/lib32-sqlite3/PKGBUILD
_pkgbasename=sqlite3
pkgname=lib32-sqlite3
_amalgamationver=3120100
pkgver=3.12.1
pkgrel=1
pkgdesc="A C library that implements an SQL database engine (32-bit)"
arch=('x86_64')
license=('custom')
url="http://www.sqlite.org/"
depends=(lib32-glibc $_pkgbasename=$pkgver 'lib32-gcc-libs')
makedepends=('tcl' 'gcc' 'lib32-readline')
source=(https://www.sqlite.org/2016/sqlite-autoconf-${_amalgamationver}.tar.gz)
sha1sums=('fa1f3dbf6e2e8c6d14125b2eceda5b2e16a19f1f')
options=('!makeflags')
build() {
cd ${srcdir}/sqlite-autoconf-${_amalgamationver}
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
export LTLINK_EXTRAS="-ldl"
export CFLAGS="$CFLAGS -DSQLITE_ENABLE_COLUMN_METADATA=1 \
-DSQLITE_ENABLE_UNLOCK_NOTIFY \
-DSQLITE_SECURE_DELETE \
-DSQLITE_ENABLE_DBSTAT_VTAB \
-DSQLITE_ENABLE_FTS3_TOKENIZER=1 \
-DSQLITE_ENABLE_API_ARMOR"
./configure --prefix=/usr \
--libdir=/usr/lib32 \
--disable-static \
--disable-amalgamation \
--enable-fts3 \
--enable-fts4 \
--enable-fts5 \
--enable-rtree \
--enable-json1
make
}
package() {
cd ${srcdir}/sqlite-autoconf-${_amalgamationver}
make DESTDIR=${pkgdir} install
rm -rf "${pkgdir}"/usr/{include,share,bin}
mkdir -p "$pkgdir/usr/share/licenses"
ln -s $_pkgbasename "$pkgdir/usr/share/licenses/$pkgname"
}
<file_sep>/wine_gecko/PKGBUILD
pkgname=wine_gecko
pkgver=2.44
pkgrel=1
pkgdesc="Wine's built-in replacement for Microsoft's Internet Explorer"
arch=('x86_64')
url="http://wiki.winehq.org/Gecko"
license=('MPL')
depends=('wine>=1.9.3')
source=(http://dl.winehq.org/wine/wine-gecko/${pkgver}/wine_gecko-${pkgver}-x86{,_64}.msi)
md5sums=('f6ff2bb5dddc144726bd9a955fb16649'
'59e7a37dcdc87f5e4f030bfafbb1c0d6')
package() {
cd "$srcdir"
install -d -m755 "${pkgdir}/usr/share/wine/gecko/"
install -D -m644 "${pkgname}-${pkgver}-x86.msi" \
"${pkgname}-${pkgver}-x86_64.msi" \
"${pkgdir}/usr/share/wine/gecko/"
}
<file_sep>/lib32-cairo/PKGBUILD
# Lib32 Packages for Chakra, part of chakra-project.org
#
_pkgbasename=cairo
pkgname=lib32-$_pkgbasename
pkgver=1.14.4
pkgrel=1
_ubuntu_diff_version=${pkgver}-1
pkgdesc="Cairo vector graphics library (32-bit)"
arch=('x86_64')
url="http://cairographics.org/"
license=('LGPL' 'MPL')
makedepends=('gcc-multilib' 'libtool-multilib')
depends=('lib32-libpng' 'lib32-libxext' 'lib32-libxrender' 'lib32-fontconfig'
'lib32-pixman' 'lib32-glib2' 'lib32-mesa' 'lib32-libgl'
${_pkgbasename}=${pkgver})
provides=("${pkgname}-ubuntu")
conflicts=("${pkgname}-ubuntu")
replaces=("${pkgname}-ubuntu")
source=("http://cairographics.org/releases/${_pkgbasename}-${pkgver}.tar.xz"
"http://archive.ubuntu.com/ubuntu/pool/main/c/cairo/cairo_${_ubuntu_diff_version}.debian.tar.xz")
sha1sums=('5b44471e7c328f96de6830baf8ea65030de797f9'
'7602b72defd4f540bcc29a1860e30f00db86a808')
build() {
export CC="gcc -m32"
export CXX="g++ -m32"
export PKG_CONFIG_PATH="/usr/lib32/pkgconfig"
cd "${srcdir}/${_pkgbasename}-${pkgver}"
# apply ubuntu patches
for _f in $(cat "$srcdir/debian/patches/series" | grep -v '#') ; do
patch -Np1 -i "$srcdir/debian/patches/$_f"
done
autoreconf -vfi
./configure --prefix=/usr \
--libdir=/usr/lib32 \
--sysconfdir=/etc \
--localstatedir=/var \
--disable-static \
--enable-tee \
--enable-gl \
--enable-egl
# --disable-xlib-xcb
make
}
package() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/libtool-multilib/PKGBUILD
# maintainer: <NAME>
# NOTE: requires rebuilt with each new gcc version
pkgbase=libtool-multilib
_pkgbase=libtool
pkgname=(libtool-multilib lib32-libltdl)
pkgver=2.4.2
_gccver=4.9.1
pkgrel=8
pkgdesc="A generic library support script for multilib"
arch=('x86_64')
url="http://www.gnu.org/software/libtool"
license=('GPL')
makedepends=("gcc-multilib=${_gccver}")
source=(ftp://ftp.gnu.org/pub/gnu/libtool/libtool-${pkgver}.tar.xz{,.sig})
md5sums=('2ec8997e0c07249eb4cbd072417d70fe'
'1e6ba57420c82c663c85e745d11c7eed')
prepare() {
mv libtool-$pkgver libtool-64
cp -a libtool-64 libtool-32
}
build() {
( msg2 "Building libtool-64..."
cd libtool-64
./configure --prefix=/usr
make
)
( msg2 "Building libtool-32..."
export CC="gcc -m32"
export CXX="g++ -m32"
cd libtool-32
./configure --prefix=/usr --libdir=/usr/lib32
make
)
}
check() {
cd libtool-64
make -k check
cd ../libtool-32
make -k check
}
package_libtool-multilib() {
depends=('sh' "libltdl=$pkgver" 'tar' "gcc-multilib=${_gccver}" "lib32-libltdl=$pkgver")
groups=('multilib-devel')
install=libtool.install
provides=("libtool=$pkgver-$pkgrel")
conflicts=(libtool)
cd libtool-64
make DESTDIR=${pkgdir} install-binSCRIPTS install-man install-info \
install-data-local
rm -r ${pkgdir}/usr/share/libtool/libltdl/
}
package_lib32-libltdl() {
pkgdesc="A system independent dlopen wrapper for GNU libtool (32-bit)"
depends=(lib32-glibc libltdl)
replaces=(lib32-libtool)
provides=("lib32-libtool=$pkgver-$pkgrel")
conflicts=(lib32-libtool)
cd libtool-32
make DESTDIR=${pkgdir} install-libLTLIBRARIES
}
<file_sep>/lib32-jack2/PKGBUILD
# Contributions from Arch: https://www.archlinux.org/packages/multilib/x86_64/lib32-jack2/
#
## Legacy jack2-multilib package description (now replaced by lib32-jack2)
#
# This one is in response to a need for an equivalent to lib32-jack for
# jack2. A lib32-jack2 would require much patching and invading the pure
# jack2 package, and what's more, the buildsystem provides a flag just to
# build a hybrid jack2 in full. As such, we have opted to provide multilib
# users with a replacement package instead of the usual lib32 add-on.
#
# See http://mailman.archlinux.org/pipermail/arch-multilib/2011-December/000251.html
## New lib32-jack2 package description (replaces jack2{-dbus,}-multilib
#
# The vanilla mixed mode build is broken since a long time, so we build
# and provide just the 32 bit jack client library using some dynamic
# patching that seems stable enough to last on future build system changes.
_pkgname=jack2
pkgname=lib32-$_pkgname
pkgver=1.9.10
pkgrel=1
pkgdesc="The next-generation JACK with SMP support (32 bit)"
arch=('x86_64')
url="http://jackaudio.org/"
license=('GPL')
depends=('lib32-gcc-libs' $_pkgname=$pkgver)
makedepends=('python2' 'gcc' 'eigen3')
provides=('lib32-jack')
conflicts=('lib32-jack')
source=("https://github.com/jackaudio/jack2/archive/v${pkgver}.tar.gz")
md5sums=('2db990e7c09e4cc163f40add01f6867d')
prepare() {
cd "$srcdir/$_pkgname-$pkgver"
# ugly dynamic patch to build just the 32 bit jack client lib
# (compatible with previous releases and hoping with the next)
sed -e '/serverlib/,/clientlib/{/clientlib/!d}' \
-e '/clientlib32bit/i\\tclientlib.posted = True' \
-e '1,/clientlib32bit/!d' \
-i common/wscript
# celt and opus are not required to build
sed -i 's/, "CELT", "OPUS"//' common/wscript
}
build() {
cd "$srcdir/$_pkgname-$pkgver"
python2 waf configure --prefix=/usr \
--libdir=/usr/lib32 \
--mixed
cd common
python2 ../waf build $MAKEFLAGS
}
package() {
cd "$srcdir/$_pkgname-$pkgver/build/common"
# jack client 32 bit lib
install -Dm755 libjack.so "$pkgdir/usr/lib32/libjack.so.0.1.0"
ln -s libjack.so.0.1.0 "$pkgdir/usr/lib32/libjack.so.0"
ln -s libjack.so.0.1.0 "$pkgdir/usr/lib32/libjack.so"
}
<file_sep>/lib32-libmodplug/PKGBUILD
_pkgbasename=libmodplug
pkgname=lib32-${_pkgbasename}
pkgver=0.8.8.5
pkgrel=1
pkgdesc="A MOD playing library (32 bit)"
arch=('x86_64')
url="http://modplug-xmms.sourceforge.net/"
license=('custom')
depends=('lib32-gcc-libs' $_pkgbasename=$pkgver)
makedepends=('gcc-multilib')
options=('!libtool')
source=("http://downloads.sourceforge.net/modplug-xmms/${_pkgbasename}-${pkgver}.tar.gz")
md5sums=('5f30241db109d647781b784e62ddfaa1')
build() {
export CC='gcc -m32'
export CXX='g++ -m32'
export PKG_CONFIG_PATH='/usr/lib32/pkgconfig'
cd "${srcdir}/${_pkgbasename}-${pkgver}"
./configure --prefix=/usr --libdir=/usr/lib32
make
}
package() {
cd "${srcdir}/${_pkgbasename}-${pkgver}"
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{include,share,bin}
}
<file_sep>/lib32-wayland/PKGBUILD
# Maintainer: <NAME> <franzmari [at] chakra-project [dot] it>
_pkgbasename=wayland
pkgname=lib32-$_pkgbasename
pkgver=1.7.0
pkgrel=1
pkgdesc='A computer display server protocol'
arch=('x86_64')
url='http://wayland.freedesktop.org'
license=('MIT')
depends=('lib32-libffi' 'lib32-expat' $_pkgbasename=$pkgver )
source=("http://wayland.freedesktop.org/releases/wayland-${pkgver}.tar.xz")
sha256sums=('bdea47a2db96f7e53f1ce0351559c0af5b7f7aae7e95f0a884a78af9f1057c9c')
build() {
cd ${_pkgbasename}-${pkgver}
export CC='gcc -m32'
export CXX='g++ -m32'
export PKG_CONFIG_PATH='/usr/lib32/pkgconfig'
./configure \
--prefix='/usr' \
--libdir='/usr/lib32' \
--disable-documentation \
--disable-static
make
}
package() {
cd ${_pkgbasename}-${pkgver}
make DESTDIR="${pkgdir}" install
rm -rf "${pkgdir}"/usr/{bin,include,share}
install -dm 755 "${pkgdir}"/usr/share/licenses
ln -s ${_pkgbasename} "${pkgdir}"/usr/share/licenses/${pkgname}
}
| e6006af76f9b314a717ab0202b13bcb98afec182 | [
"Shell"
] | 72 | Shell | cha63506/lib32 | 2beee5a6c58439ec99ede60ce78742460f4edc26 | 20fc55d5da8ddd9bf3e9a2e3028ca44379c32e2b |
refs/heads/master | <file_sep>package alfredo.gfx;
import alfredo.geom.Vector;
import java.awt.image.BufferedImage;
/**
* A Graphic represents anything that can be rendered on a Canvas.
* @author TheMonsterOfTheDeep
*/
public abstract class Graphic {
public static enum Pivot {
TopLeft,
TopRight,
BottomLeft,
BottomRight
}
/**
* Returns a BufferedImage that represents the current state of this
* Graphic. A return value of "null" means that the image is blank.
* @return Rendered data the Canvas can draw.
*/
public abstract BufferedImage getRender();
public abstract Vector getPivot();
public abstract void setPivot(Vector v);
public final Renderer toRenderer() {
return new Renderer(this);
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package dungeon20;
import alfredo.Canvas;
import alfredo.Component;
import alfredo.Entity;
import alfredo.geom.Vector;
import alfredo.gfx.Graphic;
import alfredo.gfx.ImageGraphic;
/**
*
* @author TheMonsterOfTheDeep
*/
public class Explosion extends Component {
public static final int EXPLOSION_FIRE = 0;
public static final int EXPLOSION_BOLT = 1;
public static final int EXPLOSION_NOVA = 2;
public static final int EXPLOSION_LIGHTNING = 3;
public static final int EXPLOSION_ICE = 4;
public static final int EXPLOSION_ARROW = 5;
public static final int EXPLOSION_SLIME = 6;
public static final int EXPLOSION_BONE = 7;
public static final int EXPLOSION_FLAME = 8;
static Graphic[] explosions = null;
public static void populate(Vector position, int number) {
if(explosions == null) {
explosions = new Graphic[9];
explosions[0] = ImageGraphic.load("/resrc/img/explosion/fire.png");
explosions[1] = ImageGraphic.load("/resrc/img/explosion/bolt.png");
explosions[2] = ImageGraphic.load("/resrc/img/explosion/nova.png");
explosions[3] = ImageGraphic.load("/resrc/img/explosion/lightning.png");
explosions[4] = ImageGraphic.load("/resrc/img/explosion/ice.png");
explosions[5] = ImageGraphic.load("/resrc/img/explosion/arrow.png");
explosions[6] = ImageGraphic.load("/resrc/img/explosion/slime.png");
explosions[7] = ImageGraphic.load("/resrc/img/explosion/bone.png");
explosions[8] = ImageGraphic.load("/resrc/img/explosion/flamearrow.png");
}
for(int i = 0; i < 20; ++i) {
Entity.create(position).chain(new Explosion(number));
}
}
int lifetime = 0;
final int number;
final Vector delta;
double angle;
public Explosion(int number) {
delta = Vector.fromDirection(2 + (float)(Math.random() * 5), (float)(Math.random() * 360));
this.number = number;
}
@Override
public void draw(Canvas c) {
c.draw(explosions[number], parent.position.x, parent.position.y, delta.getDirection(), 1f / (4 * lifetime + 1));
}
@Override
public void tick() {
parent.position.add(delta);
++lifetime;
++angle;
if(lifetime > 5) { parent.destroy(); }
}
}
<file_sep>package alfredo;
import alfredo.geom.Vector;
import alfredo.gfx.Graphic;
import java.awt.AlphaComposite;
import java.awt.Color;
import java.awt.Composite;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.Shape;
import java.awt.TexturePaint;
import java.awt.geom.AffineTransform;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
/**
*
* @author TheMonsterOfTheDeep
*/
public class Canvas {
private BufferedImage buffer;
private Graphics2D graphics;
public Camera camera;
private void createBuffer(int width, int height) {
BufferedImage old = buffer;
buffer = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
Shape oldClip = null;
if(graphics != null) {
oldClip = graphics.getClip();
graphics.dispose();
}
graphics = buffer.createGraphics();
graphics.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
//graphics.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC);
if(oldClip != null) {
graphics.setClip(oldClip);
}
if(old != null) {
graphics.drawImage(old, 0, 0, null);
old.flush();
}
}
public Canvas(int width, int height, Camera c) {
createBuffer(width, height);
this.camera = c;
}
public Canvas(int width, int height) {
this(width, height, new Camera.Static());
}
public void clear() {
graphics.setColor(Color.BLACK);
graphics.fillRect(0, 0, buffer.getWidth(), buffer.getHeight());
}
public void resize(int width, int height) {
createBuffer(width, height);
camera.resize(width, height);
camera.clip(this);
Shape clip = graphics.getClip();
graphics.setClip(null);
graphics.setColor(Color.BLACK);
graphics.fillRect(0, 0, width, height);
graphics.setClip(clip);
}
public void draw(Graphic g, float x, float y, double angle, float opacity) {
BufferedImage image = g.getRender();
if(image == null) { return; }
float scale = camera.getScale();
float w = image.getWidth() / 2f;
float h = image.getHeight() / 2f;
Vector pivot = g.getPivot();
x = camera.screenX(x);
y = camera.screenY(y);
AffineTransform transform = new AffineTransform();
transform.translate(x, y);
transform.rotate(Math.toRadians(angle), 0, 0);
transform.scale(scale, scale);
transform.translate(-w - pivot.x, -h - pivot.y);
Composite old = graphics.getComposite();
graphics.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, opacity));
graphics.drawImage(image, transform, null);
graphics.setComposite(old);
}
public void draw(Graphic g, float x, float y, double angle) {
draw(g, x, y, angle, 1);
}
public void draw(Graphic g, Vector position, double angle) {
draw(g, position.x, position.y, angle);
}
public void fill(int color, float x, float y, float width, float height) {
AffineTransform old = graphics.getTransform();
x = camera.screenX(x);
y = camera.screenY(y);
float scale = camera.getScale();
graphics.translate(x, y);
graphics.scale(width * scale, height * scale);
graphics.setColor(new Color(color));
graphics.fillRect(0, 0, 1, 1);
graphics.setTransform(old);
}
public void fillRaw(int color, float x, float y, float width, float height) {
AffineTransform old = graphics.getTransform();
x = camera.screenX(x);
y = camera.screenY(y);
float scale = camera.getScale();
graphics.translate(x, y);
graphics.scale(width * scale, height * scale);
graphics.setColor(new Color(color, true));
graphics.fillRect(0, 0, 1, 1);
graphics.setTransform(old);
}
public void fill(int color, float x, float y, Vector size) {
fill(color, x, y, size.x, size.y);
}
public void fill(int color, Vector position, float width, float height) {
fill(color, position.x, position.y, width, height);
}
public void fill(int color, Vector position, Vector size) {
fill(color, position.x, position.y, size.x, size.y);
}
public void draw(Graphic g, float x, float y) {
draw(g, x, y, 0);
}
public BufferedImage getRender() {
return buffer;
}
public void clip(int x, int y, int width, int height) {
graphics.setClip(x, y, width, height);
}
public int getWidth() { return buffer.getWidth(); }
public int getHeight() { return buffer.getHeight(); }
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package dungeon20;
import alfredo.geom.Vector;
/**
*
* @author TheMonsterOfTheDeep
*/
public abstract class Wave {
int slimeRate;
int skelRate;
int snailRate;
private int nextSlime;
private int nextSkel;
private int nextSnail;
float slimeProb = 0;
float skelProb = 0;
float snailProb = 0;
int tick;
public boolean over = false;
public Wave() {
}
public abstract void init();
public abstract void update();
public final void begin() {
init();
tick = 0;
over = false;
nextSlime = slimeRate;
nextSkel = skelRate;
nextSnail = snailRate;
TextBubble.newBubble(new Vector(30, 40), TextBubble.TEXT_WAVE, true);
}
public final void tick() {
--nextSlime;
--nextSkel;
--nextSnail;
if(nextSlime <= 0 && slimeRate != 0) {
Slime.newSlime(Math.random() < slimeProb); //Boss spawning is up to the individual wave (sorta)
nextSlime = (int) (slimeRate + Math.random() * 5);
}
if(nextSkel <= 0 && skelRate != 0) {
Skeleton.newSkeleton(Math.random() < skelProb);
nextSkel = (int) (skelRate + Math.random() * 5);
}
if(nextSnail <= 0 && snailRate != 0) {
Firesnail.newFiresnail(Math.random() < snailProb);
nextSnail = (int) (snailRate + Math.random() * 5);
}
update();
++tick;
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package dungeon20;
import alfredo.Component;
import alfredo.Sound;
/**
*
* @author TheMonsterOfTheDeep
*/
public class EnemySound extends Component {
final Sound hurt;
final Sound kill;
public EnemySound(Sound hurt, Sound kill) {
this.hurt = hurt;
this.kill = kill;
}
public void hurt() {
hurt.play();
}
@Override
public void destroy() {
kill.play();
}
}
<file_sep>package alfredo.inpt;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.util.ArrayList;
/**
*
* @author TheMonsterOfTheDeep
*/
public class Keys {
private static final Key[] keys = new Key[256];
public static final KeyListener listener = new KeyListener() {
@Override
public void keyTyped(KeyEvent e) {
}
@Override
public void keyPressed(KeyEvent e) {
for(Key k : keys) {
if(k.id == e.getKeyCode()) {
if(!k.pressed) {
k.performDown();
}
}
}
}
@Override
public void keyReleased(KeyEvent e) {
for(Key k : keys) {
if(k.id == e.getKeyCode()) {
if(k.pressed) {
k.performUp();
}
}
}
}
};
static {
for(int i = 0; i < 256; ++i) {
keys[i] = new Key(i);
}
}
public static final Key SHIFT = keys[KeyEvent.VK_SHIFT];
public static final Key A = keys[KeyEvent.VK_A];
public static final Key B = keys[KeyEvent.VK_B];
public static final Key C = keys[KeyEvent.VK_C];
public static final Key D = keys[KeyEvent.VK_D];
public static final Key E = keys[KeyEvent.VK_E];
public static final Key F = keys[KeyEvent.VK_F];
public static final Key G = keys[KeyEvent.VK_G];
public static final Key H = keys[KeyEvent.VK_H];
public static final Key I = keys[KeyEvent.VK_I];
public static final Key J = keys[KeyEvent.VK_J];
public static final Key K = keys[KeyEvent.VK_K];
public static final Key L = keys[KeyEvent.VK_L];
public static final Key M = keys[KeyEvent.VK_M];
public static final Key N = keys[KeyEvent.VK_N];
public static final Key O = keys[KeyEvent.VK_O];
public static final Key P = keys[KeyEvent.VK_P];
public static final Key Q = keys[KeyEvent.VK_Q];
public static final Key R = keys[KeyEvent.VK_R];
public static final Key S = keys[KeyEvent.VK_S];
public static final Key T = keys[KeyEvent.VK_T];
public static final Key U = keys[KeyEvent.VK_U];
public static final Key V = keys[KeyEvent.VK_V];
public static final Key W = keys[KeyEvent.VK_W];
public static final Key X = keys[KeyEvent.VK_X];
public static final Key Y = keys[KeyEvent.VK_Y];
public static final Key Z = keys[KeyEvent.VK_Z];
public static final Key SPACE = keys[KeyEvent.VK_SPACE];
public static final Key ROW_0 = keys[KeyEvent.VK_0];
public static final Key ROW_1 = keys[KeyEvent.VK_1];
public static final Key ROW_2 = keys[KeyEvent.VK_2];
public static final Key ROW_3 = keys[KeyEvent.VK_3];
public static final Key ROW_4 = keys[KeyEvent.VK_4];
public static final Key ROW_5 = keys[KeyEvent.VK_5];
public static final Key ROW_6 = keys[KeyEvent.VK_6];
public static final Key ROW_7 = keys[KeyEvent.VK_7];
public static final Key ROW_8 = keys[KeyEvent.VK_8];
public static final Key ROW_9 = keys[KeyEvent.VK_9];
public static final Key PAD_0 = keys[KeyEvent.VK_NUMPAD0];
public static final Key PAD_1 = keys[KeyEvent.VK_NUMPAD1];
public static final Key PAD_2 = keys[KeyEvent.VK_NUMPAD2];
public static final Key PAD_3 = keys[KeyEvent.VK_NUMPAD3];
public static final Key PAD_4 = keys[KeyEvent.VK_NUMPAD4];
public static final Key PAD_5 = keys[KeyEvent.VK_NUMPAD5];
public static final Key PAD_6 = keys[KeyEvent.VK_NUMPAD6];
public static final Key PAD_7 = keys[KeyEvent.VK_NUMPAD7];
public static final Key PAD_8 = keys[KeyEvent.VK_NUMPAD8];
public static final Key PAD_9 = keys[KeyEvent.VK_NUMPAD9];
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package dungeon20;
import alfredo.Component;
/**
*
* @author TheMonsterOfTheDeep
*/
public class Enemy extends Component {
public final int level;
public Enemy(int level) {
this.level = level;
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package dungeon20;
import alfredo.Component;
/**
*
* @author TheMonsterOfTheDeep
*/
public class Healer extends Component {
public int damageCooldown;
@Override
public void tick() {
if(damageCooldown > 0) {
--damageCooldown;
}
else {
Healthbar h = parent.getComponent(Healthbar.class);
if(h != null && h.current < h.capacity) {
h.changeBy((h.capacity - h.current) / 5);
if(h.capacity - h.current < 0.5) {
h.current = h.capacity; //This is mostly so it will stop showing +s
}
}
}
}
}
<file_sep>package alfredo.gfx;
import alfredo.geom.Vector;
import java.awt.image.BufferedImage;
import java.io.IOException;
import javax.imageio.ImageIO;
/**
*
* @author TheMonsterOfTheDeep
*/
public class Animation extends Graphic {
private final BufferedImage[] frames;
private int start = 0;
private int end = 0;
private int length = 0;
private int direction = 1;
private float current = 0;
private float step = 0.25f;
public final Vector pivot;
public static Animation load(String path, int frameCount, Vector pivot) {
try {
BufferedImage image = ImageIO.read(ImageGraphic.class.getResourceAsStream(path));
return new Animation(image, frameCount, pivot);
} catch (IOException ex) {
System.err.println("Error loading animation: " + ex.getLocalizedMessage());
return new Animation(pivot);
}
}
public static Animation load(String path, int frameCount) {
return load(path, frameCount, new Vector());
}
private Animation(BufferedImage[] frames, Vector pivot, float step) {
this.frames = frames;
length = frames.length;
end = length - 1;
this.pivot = pivot;
this.step = step;
}
public Animation(Vector pivot) {
this.frames = new BufferedImage[] { null };
this.pivot = pivot;
}
public Animation(BufferedImage base, int frameCount, Vector pivot) {
if(base.getWidth() % frameCount != 0) {
throw new IllegalArgumentException("Image must have an integer number of frames.");
}
frames = new BufferedImage[frameCount];
int width = base.getWidth() / frameCount;
for(int i = 0; i < frames.length; ++i) {
frames[i] = base.getSubimage(i * width, 0, width, base.getHeight());
}
this.pivot = pivot;
length = frames.length;
end = frames.length - 1;
}
@Override
public BufferedImage getRender() {
return frames[(int)current];
}
@Override
public Vector getPivot() {
return pivot;
}
@Override
public void setPivot(Vector v) {
pivot.set(v);
}
public void tick() {
current += step * direction;
if(current - start >= length) {
current -= length;
}
}
public void next() {
++current;
if(current - start >= length) {
current -= length;
}
}
public void setFrame(int frame) {
if(frame < 0 || frame >= frames.length) {
throw new IllegalArgumentException("Frame out of frame range");
}
current = frame;
}
public void setRange(int start, int end) {
if(start < 0 || start >= frames.length) {
throw new IllegalArgumentException("Start out of frame range");
}
if(end < 0 || end >= frames.length) {
throw new IllegalArgumentException("End out of frame range");
}
if(end < start) {
throw new IllegalArgumentException("Alfredo does not support reverse animations yet.");
}
if(current < start || current > end) {
current = start;
}
this.start = start;
this.end = end;
length = end - start + 1;
direction = 1;
}
public int getStart() { return start; }
public int getEnd() { return end; }
public void setStart(int start) {
setRange(start, this.end);
}
public void setEnd(int end) {
setRange(this.start, end);
}
public void setLength(int length) {
setRange(this.start, this.start + length);
}
public void setStep(float step) {
if(step < 0) {
throw new IllegalArgumentException("Alfredo does not support reverse animations yet.");
}
this.step = step;
}
public Animation getInstance() {
return new Animation(frames, pivot, step);
}
}
<file_sep>package alfredo;
import alfredo.gfx.Graphic;
import alfredo.inpt.Keys;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.ComponentListener;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.imageio.ImageIO;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.Timer;
/**
*
* @author TheMonsterOfTheDeep
*/
public class Game {
public static final int DEFAULT_WIDTH = 600;
public static final int DEFAULT_HEIGHT = 400;
private static JFrame frame = null;
private static JPanel panel = null;
private static Timer timer = null;
private static Canvas canvas = null;
private static long tick = 0;
private static boolean init(int width, int height) {
if(frame != null) { return false; }
frame = new JFrame();
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
canvas = new Canvas(width, height);
panel = new JPanel() {
@Override
public void paintComponent(Graphics g) {
super.paintComponent(g);
long start = System.currentTimeMillis();
g.setColor(Color.BLACK);
g.fillRect(0, 0, getWidth(), getHeight());
Scene.getCurrent().render(canvas);
g.drawImage(canvas.getRender(), 0, 0, null);
//System.out.println("Time to paint: " + (System.currentTimeMillis() - start));
repaint();
}
};
frame.addKeyListener(Keys.listener);
panel.addComponentListener(new ComponentAdapter() {
@Override
public void componentResized(ComponentEvent e) {
canvas.resize(panel.getWidth(), panel.getHeight());
}
});
panel.setPreferredSize(new Dimension(width, height));
frame.add(panel);
frame.pack();
timer = new Timer(33, (ActionEvent e) -> {
long start = System.currentTimeMillis();
Scene.getCurrent().loop();
++tick;
//System.out.println("Time to tick: " + (System.currentTimeMillis() - start));
});
return true;
}
private static boolean init() {
return init(DEFAULT_WIDTH, DEFAULT_HEIGHT);
}
public static void setTitle(String title) {
init();
frame.setTitle(title);
}
public static void setSize(int width, int height) {
if(!init(width, height)) {
panel.setPreferredSize(new Dimension(width, height));
frame.pack();
canvas.resize(width, height);
}
}
public static void play() {
init();
frame.setVisible(true);
timer.start();
}
public static void setDelay(int ms) {
init();
timer.setDelay(ms);
}
public static int getDelay() {
init();
return timer.getDelay();
}
public static void saveScreenshot(String path) {
try {
ImageIO.write(canvas.getRender(), "PNG", new File(path + tick + ".png"));
} catch (IOException ex) {
System.err.println("Could not save screenshot: " + ex.getLocalizedMessage());
}
}
public static void setIcon(Graphic icon) {
init();
BufferedImage b = icon.getRender();
if(b != null) {
frame.setIconImage(b);
}
}
public static long getTick() {
return tick;
}
static void updateCamera() {
init();
canvas.camera = Camera.getMain();
Camera.getMain().clip(canvas);
}
}
| 72d81ded9d0168f8c4800b971e630369ab995f40 | [
"Java"
] | 10 | Java | TheMonsterFromTheDeep/dungeon2.0 | 133c4e555c80c85e92c86724d84595bf61cb95a4 | 6e32c08434cd2f2565c4dc06906158b9a57b5dbd |
refs/heads/master | <file_sep>""" Enables several functional tools for programming in an infix style.
TODO: Contact author (maybe)
pull the thing and edit it
push this to my own repository
PEP proposal for pipes?
Better examples for dup?
>>> 'mys' |dup(len)| (lambda x: x.idy * x.len) |pipe| it[0:4]
'mysm'
"""
import operator
from collections import namedtuple
from functools import wraps
import types
# from macropy.core.macros import *
# from macropy.tracing import macros, log, trace
#
# macros = Macros()
#
# @macros.expr
# def my_macro():
# pass
def getoperators(underscore=True):
return ((name, '__'+name+'__', op) for name, op in operator.__dict__.iteritems()\
if name[0]!='_')
class ItError(Exception): pass
class It(object):
""" Shortcut for writing lambdas.
>>> it = It()
>>> (lambda x: x[0])('lol') == it[0]('lol')
True
>>> (lambda s: s.upper())('lol') == it.upper('lol')()
True
>>> (it*it)(5) == 25
True
"""
def __getattr__(self, v):
return lambda y: getattr(y, v)
# def __getitem__(self, v):
# return lambda y: y[v]
for opname, opattr, op in getoperators():
def opfact(op):
return lambda self, *args: \
lambda y: op(y, *(i if i is not self else y for i in args))
setattr(It, opattr, opfact(op))
it = It()
# Who ever came up with this is awesome
class Infix(object):
'Allows you to build custom infix functions'
def __init__(self, function):
self.function = function
def __ror__(self, other):
return Infix(lambda x: self.function(other, x))
def __or__(self, other):
return self.function(other)
class Zipw(object):
""" zipWith/Matrix style operations
>>> zipw = Zipw()
>>> [1, 2, 3] |zipw.add| [1,1,1]
[2, 3, 4]
"""
def __init__(self):
for opname, opattr, op in getoperators(underscore=False):
setattr(Zipw, opname, Infix(lambda La, Lb, op=op: \
[op(a, b) for a, b in zip(La, Lb)]))
zipw = Zipw()
# Used for partial application when using |pipe|
class PartialArg(object):
'Holds data'
def __getindex__(self, v):
return {'__gn__': v}
def __getitem__(self, k):
return {'__gn__': k}
def __getattr__(self, k):
return {'__gn__': k}
__ = PartialArg()
class PipeError(Exception): pass
from StringIO import StringIO
class Log:
log = StringIO()
enableLogging = True
previous = None
def PipeLogger(f):
if Log.enableLogging:
@wraps(f)
def func(x, arg):
res = f(x, arg)
nl = '\n' if Log.previous is not x else ''
Log.log.write(nl+str(x)+'|'+str(res))
Log.previous = res
return res
return func
else:
return f
@Infix
@PipeLogger
def pipe(x, arg):
""" f(g(x)) <=> x |pipe| f |pipe| g α
For multi-argument functions, use "S-Expression" form where each variable gets
__ i.e: ('map', it+1, __)
>>> 'lol' |pipe| len |pipe| range
[0, 1, 2]
>>> 'lol' |pipe| enumerate |pipe| (map, lambda a: (a[1], a[0])) |pipe| it[0]
('l', 0)
>>> 5 |pipe| (list, __, 2))
[5, 2]"""
global __
try:
return arg(x)
except TypeError:
f, args = arg[0], arg[1:]
xtup = x if '__len__' in x else tuple(x)
args = [xtup[i['__gn__']] if '__gn__' in i else i for i in args]
return f(*args)
def idy(x): return x
# TODO: maybe flattening
def dup(*funcs, **kwargs):
"""
Allows you to split a pipe, use it[n] or it.<funcname> to get a certain
value from the pipe.
>>> 'what a world'.split() |dup(len)| list
[['what', 'a', 'world'], 3]
>>> ('what' |dup(len)| it.len) == ('what' |dup(lol=len)| it.lol)
True
>>> 'war love war'.split() |dup(idy, set)| (lambda x: [x.idy, x.set])
[['war', 'love', 'war'], set(['love', 'war'])]
"""
if len(funcs) + len(kwargs) < 2:
funcs = (idy,) + funcs
allfuncs = funcs + tuple(kwargs.values())
toident = lambda x: x.__name__.strip().strip('<>')
idents = tuple(toident(i) for i in funcs) + tuple(kwargs.keys())
tup = namedtuple('dup', idents)
def pipe(x, f):
return f(tup(*(g(x) for g in allfuncs)))
return Infix(PipeLogger(pipe))
if __name__ == '__main__':
import doctest
doctest.testmod()
print Log.log.getvalue() | 256012fb9cf07bca8bc05f369075467a5889e570 | [
"Python"
] | 1 | Python | aoeu256/pipes-functional | 2cce7a3e4eba4fff85900b8f7e8b39dc6eb60baa | bcc321b8269f177ff03e5a8c5693253459deee2c |
refs/heads/master | <repo_name>MidnightLightning/seven-languages<file_sep>/ruby/guess_number.rb
puts "Guess my number (between 1 and 10)!"
my_number = rand(10)+1
correct = false
until correct
print "Your guess: "
gets # Get a line of input from command line
your_guess = $_.to_i # Make sure it's an integer
if your_guess == my_number
puts "That's my number!"
correct = true
elsif your_guess < my_number
puts "Too low.."
else
puts "Too high..."
end
end<file_sep>/README.md
These are scripts I created while working through the *Seven Languages in Seven Weeks* book from The Pragmatic Bookshelf.
# Links
* [*Seven Languages in Seven Weeks*](http://pragprog.com/book/btlang/seven-languages-in-seven-weeks)
* [Source code](http://pragprog.com/titles/btlang/source_code)
## [Ruby](http://www.ruby-lang.org/en/)
* **Source:** [main](http://www.ruby-lang.org/en/downloads/)
* **Guides:** [Ruby Doc](http://www.ruby-doc.org/core-1.9.3/)
* StackOverflow tag [ruby](http://stackoverflow.com/questions/tagged/ruby)
## [Io](http://iolanguage.com/)
* **Source:** [on github](http://github.com/stevedekorte/io), [Mac binary](http://iobin.suspended-chord.info/mac/iobin-mac-current.zip) (or [macports](https://trac.macports.org/browser/trunk/dports/lang/Io/Portfile)), [Win binary](http://iobin.suspended-chord.info/win32/iobin-win32-current.zip)
* **Guides:** [main](http://www.iolanguage.com/scm/io/docs/IoGuide.html), [Io sample code](http://iolanguage.com/about/samplecode/)
* StackOverflow tag [iolanguage](http://stackoverflow.com/questions/tagged/iolanguage) | dc9fb07393052e105b0569923b691bd071fe6f58 | [
"Markdown",
"Ruby"
] | 2 | Ruby | MidnightLightning/seven-languages | bd334438d58bd98be3582830bfdadb04674c993f | 90d993e453f1a2920f2f308b698b841a5d47170d |
refs/heads/master | <repo_name>wscrlhs/memcached<file_sep>/README.MD
## memcached分布式解决方案
- 普通Hash分布
- 一致性Hash分布
## Memcached应用场景
### 缓解数据库压力,提高交互速度。
一、经常被读取并且实时性要求不强可以等到自动过期的数据。例如网站首页最新文章列表、某某排行等数据。也就是虽然新数据产生了,但对用户体验不会产生任何影响的场景。
这类数据就使用典型的缓存策略,设置一过合理的过期时间,当数据过期以后再从数据库中读取。当然你得制定一个缓存清除策略,便于编辑或者其它人员能马上看到效果。
二、经常被读取并且实时性要求强的数据。比如用户的好友列表,用户文章列表,用户阅读记录等。
这类数据首先被载入到memcached中,当发生更改(添加、修改、删除)时就清除缓存。在缓存的时候,我将查询的SQL语句md5()得到它的 hash值作为key,结果数组作为值写入memcached,并且将该SQL涉及的table_name以及hash值配对存入memcached中。 当更改了这个表时,我就将与此表相配对的key的缓存全部删除。
三、统计类缓存,比如文章浏览数、网站PV等
此类缓存是将在数据库的中来累加的数据放在memcached来累加。获取也通过memcached来获取。但这样就产生了一个问题,如果memcached服务器down 掉的话这些数据就有可能丢失,所以一般使用memcached的永固性存储,这方面新浪使用memcachedb。
四、活跃用户的基本信息或者某篇热门文章。
此类数据的一个特点就是数据都是一行,也就是一个一维数组,当数据被update时(比如修改昵称、文章的评论数),在更改数据库数据的同时,使用Memcache::replace替换掉缓存里的数据。这样就有效了避免了再次查询数据库。
五、session数据
使用memcached来存储session的效率是最高的。memcached本身也是非常稳定的,不太用担心它会突然down掉引起session数据的丢失,即使丢失就重新登录了,也没啥。
六、冷热数据交互
在做高访问量的sns应用,比如贴吧和论坛,由于其数据量大,往往采用了分表分库的策略,但真正的热数据仅仅是前两三页的100条数据,这时,我们就可以把这100条数据,在写进数据库之后,同时作为memcache的缓存热数据来使用。
通过以上的策略数据库的压力将会被大大减轻。检验你使用memcached是否得当的方法是查看memcached的命中率。有些策略好的网站的命中率可以到达到90%以上。后续本专题也会讨论一下memcache的分布式算法,提高其命中率;
### 秒杀功能
其实,本场景严格的说应该也属于场景一,单独拎出来说是由于其广泛的应用性。
一个人下单,要牵涉数据库读取,写入订单,更改库存,及事务要求, 对于传统型数据库来说,压力是巨大的。
可以利用 memcached 的 incr/decr 功能, 在内存存储 count 库存量, 秒杀 1000 台每人抢单主要在内存操作,速度非常快,抢到 `count < =1000` 的号人,得一个订单号,这时再去另一个页面慢慢支付。
### 中继 MySQL 主从延迟数据
MySQL 在做 replication 时,主从复制之间必然要经历一个复制过程,即主从延迟的时间.
尤其是主从服务器处于异地机房时,这种情况更加明显.
比如facebook 官方的一篇技术文章,其加州的主数据中心到弗吉尼亚州的主从同步延期达到70ms;
考虑如下场景:
①: 用户 U 购买电子书 B, insert into Master (U,B);
②: 用户 U 观看电子书 B, select 购买记录[user=’A’,book=’B’] from Slave.
③: 由于主从延迟,第②步中无记录,用户无权观看该书.
这时,可以利用 memached 在 master 与 slave 之间做过渡(如下图):

①: 用户 U 购买电子书 B, memcached->add(‘U:B’,true)
②: 主数据库 insert into Master (U,B);
③: 用户 U 观看电子书 B, select 购买记录[user=’U’,book=’B’] from Slave.
如果没查询到,则 memcached->get(‘U:B’),查到则说明已购买但 Slave 延迟.
④: 由于主从延迟,第②步中无记录,用户无权观看该书.
## 不适用memcached的业务场景
1. 缓存对象的大小大于1MB
2. Memcached本身就不是为了处理庞大的多媒体(large media)和巨大的二进制块(streaming huge blobs)而设计的。
key的长度大于250字符(所以我们把一些key先md5再存储)。
3. 应用运行在不安全的环境中Memcached为提供任何安全策略,仅仅通过telnet就可以访问到memcached。如果应用运行在共享的系统上,需要着重考虑安全问题。
4. 业务本身需要的是持久化数据。
来源
[https://blog.csdn.net/qq_35461287/article/details/79675094](https://blog.csdn.net/qq_35461287/article/details/79675094)
<file_sep>/FlexiHash.php
<?php
require 'normalHash.php';
class FlexiHash
{
private $serverList = array();//保存服务器列表
private $isSorted = false;//记录服务器列表是否已经排过序
/**
* @return array
*/
public function getServerList()
{
return $this->serverList;
}
/**
* @param array $serverList
*/
public function setServerList($serverList)
{
$this->serverList = $serverList;
}
/**
* 添加一个服务器到服务器列表中
* @param $server
* @return bool
*/
function addServer($server)
{
$hash = normalHash($server);
if (!isset($this->serverList[$hash])) {
$this->serverList[$hash] = $server;
}
$this->isSorted = false;
return true;
}
/**
* //从服务器列表中删除一个服务器
* @param $server
* @return bool
*/
function remove($server)
{
$hash = normalHash($server);
if (isset($this->serverList[$hash])) {
unset($this->serverList[$hash]);
}
$this->isSorted = false;
return true;
}
/**
* 在当前服务器列表中找到合适的服务器存放数据
* @param $key
* @return mixed
*/
function lookup($key)
{
$hash = normalHash($key);
if (!$this->isSorted) {
krsort($this->serverList, SORT_NUMERIC);
$this->isSorted = true;
}
foreach ($this->serverList as $pos => $server) {
if ($hash >= $pos) return $server;
}
return $this->serverList[count($this->serverList) - 1];
}
}
$hserver = new FlexiHash();
$hserver->addServer("192.168.1.1");
$hserver->addServer("192.168.1.2");
$hserver->addServer("192.168.1.3");
$hserver->addServer("192.168.1.4");
$hserver->addServer("192.168.1.5");
var_dump($hserver->getServerList());
echo "save key1 in server:" . $hserver->lookup('key1');
echo PHP_EOL;
echo "save key2 in server:" . $hserver->lookup('key2');
echo PHP_EOL;
echo "============================";
echo PHP_EOL;
$hserver->remove("192.168.1.4");
var_dump($hserver->getServerList());
echo "save key1 in server:" . $hserver->lookup('key1');
echo PHP_EOL;
echo "save key2 in server:" . $hserver->lookup('key2');
echo PHP_EOL;
echo "============================";
echo PHP_EOL;
$hserver->addServer("192.168.1.6");
var_dump($hserver->getServerList());
echo "save key1 in server:" . $hserver->lookup('key1');
echo PHP_EOL;
echo "save key2 in server:" . $hserver->lookup('key2');
echo PHP_EOL;
<file_sep>/normalHash.php
<?php
//普通Hash分布
function normalHash($key)
{
$md5 = substr(md5($key), 0, 8);
$seed = 31;
$hash = 0;
for ($i = 0; $i < 8; $i++) {
$hash = $hash * $seed + ord($md5[$i]);
$i++;
}
return $hash & 0x7FFFFFFF;
}
$servers = array(
array("host" => '192.168.1.1', "port" => 6379),
array("host" => '192.168.1.2', "port" => 6379),
array("host" => '192.168.1.3', "port" => 6379),
array("host" => '192.168.1.4', "port" => 6379),
);
//$key = "TheKey";
//$value = "TheValue";
//$sc = $servers[normalHash($key) % 4];
//$mem = new Memcached($sc);
//$mem->set($key, $value);
| c162032aedcbd729909ef7097d7b4f8258e31b0c | [
"Markdown",
"PHP"
] | 3 | Markdown | wscrlhs/memcached | e45ee8c6f705674a051a4627dcd98da7fec27a05 | efbe5f39648a65b7653ad19fcb00949287dafe75 |
refs/heads/main | <file_sep>let movies = [
{
name: "falcon and the winter soldier",
des: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Velit porro et veniam excepturi, eaque voluptatem impedit nulla laboriosam facilis ut laboriosam libero!",
image: "https://i.ibb.co/ZXFxBXy/slider-2.png",
},
{
name: "Burrow",
des: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Velit porro et veniam excepturi, eaque voluptatem impedit nulla laboriosam facilis ut laboriosam libero!",
image: "https://i.ibb.co/FWZp9Jb/Screenshot-2021-10-22-at-11-34-02-AM.png",
},
{
name: "loki",
des: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Velit porro et veniam excepturi, eaque voluptatem impedit nulla laboriosam facilis ut laboriosam libero!",
image: "https://i.ibb.co/Wx9qMys/slider-1.png",
},
{
name: "<NAME>",
des: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Velit porro et veniam excepturi, eaque voluptatem impedit nulla laboriosam facilis ut laboriosam libero!",
image: "https://i.ibb.co/KD0tVVc/slider-3.png",
},
{
name: "<NAME>",
des: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Velit porro et veniam excepturi, eaque voluptatem impedit nulla laboriosam facilis ut laboriosam libero!",
image: "https://i.ibb.co/kg8yG7y/slider-4.png",
},
{
name: "luca",
des: "Lorem ipsum dolor sit amet consectetur adipisicing elit. Velit porro et veniam excepturi, eaque voluptatem impedit nulla laboriosam facilis ut laboriosam libero!",
image: "https://i.ibb.co/M8RgzxN/slider-5.png",
},
];
| 6ff5b9c8df7f77b6240c7ebc8713fbff3c90ef73 | [
"JavaScript"
] | 1 | JavaScript | PrerakMathur20/disneyClone | 33f93c5a748601fdfcaa5b676e32602a6287e72c | 67db2290e649b5235ea9bf16c9bfc14433e766de |
refs/heads/master | <repo_name>Epiesteban/gestioIVentaProductesInformatics<file_sep>/src/controladors/mainClients.java
package controladors;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Scanner;
import controladors.mainBotiga;
import models.*;
public class mainClients {
private static LlistaProductes llista_productes = new LlistaProductes();
private static LlistaComandes llista_comandes = new LlistaComandes();
private static LlistaClients llista_clients= new LlistaClients();
public static void main(String[] args) {
// RECUPERAR DADES DE FITXERS
llegirFitxerClients();
llegirFitxerProductes();
llegirDataSerialitzable();
//Cridem al programa principal
// new ProgramaPrincipal (ll_productes, ll_comandes, ll_clients);
}
/*
* FUNCIONS PER LLEGIR I ESCRIURE FITXERS
*/
/**
* Llegir fitxer clients
*
*/
private static void llegirFitxerClients() {
String result="";
try {
Scanner f=new Scanner(new File("clients.txt"));
while (f.hasNextLine()) {
result= f.nextLine();
String[] separador = result.split("\\*");
llista_clients.afegirClient(new Client(separador[0], separador[1], separador[2]));
}
f.close();
}catch (FileNotFoundException e) {
System.out.println("No existeix el fitxer.");
}
catch(Exception e) {
System.out.println("Hi ha hagut algun error en la lectura de l'arxiu o al afegir els elements a la llista.\n");
}
}
/**
* Funcio per guardar Fitxer Clients
* @throws IOException
*/
public static void guardarFitxerClients() throws IOException {
BufferedWriter cl=new BufferedWriter(new FileWriter("clients.txt"));
try {
String frase = "";
Client aux;
for (int i = 0; i < llista_clients.getnClient();i++) {
aux = llista_clients.getLlista()[i];
frase =aux.getDni()+"*"+aux.getCorreu()+"*"+aux.getAdresa()+"\n";
cl.write(frase);
}
cl.close();
} catch (Exception e) {
System.out.println("Hi ha hagut un problema a l'escriure al fitxer!");
}
}
/**
* Mčtode per a llegir les dades d'un fitxer de text i guardar les dades dins la llista_productes
* @throws FileNotFoundException
*/
private static void llegirFitxerProductes() throws FileNotFoundException {
String result="";
try {
Scanner f=new Scanner(new File("productes.txt"));
while (f.hasNextLine()) {
result=f.nextLine();
String[] separador = result.split("\\*");
if(separador[0].equalsIgnoreCase("S")) {
String nom=separador[1];
float preu=Float.parseFloat(separador[2]);
int estoc=Integer.parseInt(separador[3]);
String sist=separador[4];
Software aux_s=new Software(nom, preu, estoc, sist);
llista_productes.afegirProducte(aux_s);
}else if (separador[0].equalsIgnoreCase("H")) {
String nom=separador[1];
float preu=Float.parseFloat(separador[2]);
int estoc=Integer.parseInt(separador[3]);
String tipus=separador[4];
Hardware aux_h=new Hardware(nom, preu, estoc, tipus);
llista_productes.afegirProducte(aux_h);
}else {
String nom=separador[1];
float preu=0;// El precio se calcula de los productos de la configuracion
int estoc=Integer.parseInt(separador[2]);
Hardware[] llista_h= new Hardware[100];
int cont_h =0, cont_s = 0, cont_aux_h = 0, cont_aux_s=0; //contador llistes arrays
Software[] llista_s=new Software[100];
Integer[] llista_auxIntegers_h = new Integer[100];
Integer[] llista_auxIntegers_s = new Integer[100];
int posicio = 3;
if(separador[posicio].equalsIgnoreCase("H")) {
posicio++;
String aux_r= separador[posicio];
while (!aux_r.equals("S")) {
llista_auxIntegers_h[cont_aux_h] = Integer.parseInt(aux_r);
cont_aux_h++;
posicio++;
aux_r= separador[posicio];
}
posicio++;
while (posicio<separador.length) {
aux_r= separador[posicio];
llista_auxIntegers_s[cont_aux_s] = Integer.parseInt(aux_r);
cont_aux_s++;
posicio++;
}
int cont_aux_ultra2 = 0;
boolean trobat = false;
while (!trobat) {
if (llista_auxIntegers_s[cont_aux_ultra2] == null) {
trobat =true;
}
else {
cont_aux_ultra2++;
}
}
for (int i = 0; i < cont_aux_ultra2; i++) {
llista_s[cont_s] = (Software)llista_productes.buscarProducte(llista_auxIntegers_s[i]);
preu+=llista_s[cont_s].getPreu();
cont_s++;
}
trobat=false;
cont_aux_ultra2=0;
while (!trobat) {
if (llista_auxIntegers_h[cont_aux_ultra2] == null) {
trobat =true;
}
else {
cont_aux_ultra2++;
}
}
for (int i = 0; i < cont_aux_ultra2; i++) {
llista_h[cont_h] = (Hardware)llista_productes.buscarProducte(llista_auxIntegers_h[i]);
preu+=llista_h[cont_h].getPreu();
cont_h++;
}
Configuracio aux_c=new Configuracio(nom, preu, estoc, llista_s, llista_h);
llista_productes.afegirProducte(aux_c);
}
}
}
f.close();
}catch (FileNotFoundException e) {
System.out.println("No existeix el fitxer.");
}
catch(Exception e) {
System.out.println("Hi ha hagut algun error en la lectura de l'arxiu o al afegir els elements a la llista.\n");
e.printStackTrace();
}
}
/**
* Funcio per guardar fitxer productes
*/
public static void guardarFitxerProductes() {
try {
BufferedWriter bw= new BufferedWriter(new FileWriter("productes.txt"));
int i=0;
String res="";
for (i=0;i<llista_productes.getnElem();i++) {
if (llista_productes.getLlista()[i] instanceof Hardware) {
res+="H*";
res+=llista_productes.getLlista()[i].getNom()+"*";
res+=String.valueOf(llista_productes.getLlista()[i].getPreu())+"*";
res+=String.valueOf(llista_productes.getLlista()[i].getEstoc())+"*";
res+=String.valueOf(((Hardware)llista_productes.getLlista()[i]).getTipusHardwareString())+"\n";
}
}
for (i=0;i<llista_productes.getnElem();i++) {
if (llista_productes.getLlista()[i] instanceof Software) {
res+="S*";
res+=llista_productes.getLlista()[i].getNom()+"*";
res+=String.valueOf(llista_productes.getLlista()[i].getPreu())+"*";
res+=String.valueOf(llista_productes.getLlista()[i].getEstoc())+"*";
res+=String.valueOf(((Software)llista_productes.getLlista()[i]).getSOString())+"\n";
}
}
for (i = 0; i < llista_productes.getnElem(); i++) {
if (llista_productes.getLlista()[i] instanceof Configuracio) {
res+="C*";
res+=String.valueOf(llista_productes.getLlista()[i].getNom())+"*";
res+=String.valueOf(llista_productes.getLlista()[i].getEstoc())+"*";
res+="H";
int numElem = ((Configuracio)llista_productes.getLlista()[i]).numElementsHardware();
int j=0;
for (j=0;j<numElem;j++) {
res+="*"+((Configuracio)llista_productes.getLlista()[i]).getHardwares()[j].getId();
}
res+="*S";
numElem = ((Configuracio)llista_productes.getLlista()[i]).numElementsSoftware();
for (j=0;j<numElem;j++) {
res+="*"+((Configuracio)llista_productes.getLlista()[i]).getSoftwares()[j].getId();
}
res+="\n";
}
}
bw.write(res);
bw.close();
} catch (Exception e) {
System.out.println("Hi ha hagut un problema a l'escriure al fitxer!");
}
}
/**
* Funció per a escriure en una llista en format serialitzable
*/
public static void guardarDataSerialitzable () {
ObjectOutputStream gfitxer;
try {
gfitxer = new ObjectOutputStream (new FileOutputStream("comandes.ser"));
gfitxer.writeObject(llista_comandes);
gfitxer.close();
} catch (IOException e){
System.out.println("Error a l'hora d'escriure al fitxer");
}
}
/**
* Funció per a llegir una llista que esta guardada en format serialitzable
*/
private static void llegirDataSerialitzable () {
ObjectInputStream lfitxer;
try {
lfitxer = new ObjectInputStream (new FileInputStream("comandes.ser"));
llista_comandes=(LlistaComandes)lfitxer.readObject();
lfitxer.close();
} catch (IOException e) {
System.out.println ("Error a l'hora de llegir l'arxiu");
} catch (ClassNotFoundException e) {
System.out.println ("Error a l'hora de buscar la llista de Comandes");
} catch (ClassCastException e) {
System.out.println ("Error, el format de l'arxiu no és correcte per poder-lo obrir i llegir-lo");
}
}
}<file_sep>/src/ExceptionsBotiga/ClientInexistent.java
package ExceptionsBotiga;
public class ClientInexistent extends Exception{
private static final long serialVersionUID = 1L;
public ClientInexistent(String missatgeError) {
super(missatgeError);
}
}
<file_sep>/src/models/Software.java
package models;
public class Software extends Producte {
//filla
public enum SO {WINDOWS, MACOS, LINUX};
private SO sistema_operatiu;
public Software (String nom, float preu, int estoc, String sistema_operatiu) {
super(nom, preu, estoc);
this.sistema_operatiu = SO.valueOf(sistema_operatiu) ;
}
/**Setters i getters
*
*/
public SO getSistema_operatiu() {
return sistema_operatiu;
}
public String getSOString() {
String aux;
if (sistema_operatiu==SO.WINDOWS) {
aux="WINDOWS";
}else if (sistema_operatiu==SO.LINUX) {
aux="LINUX";
}else {
aux="MACOS";
}
return aux;
}
public void setSistema_operatiu(SO sistema_operatiu) {
this.sistema_operatiu = sistema_operatiu;
}
/**
* To string
*/
public String toString() {
return ( "Software=\t[Nom: "+super.getNom()+", Preu: "+super.getPreu()+ ", Estoc: "+super.getEstoc()+ ", Sistema operatiu: "+sistema_operatiu+ ", Identificador: "+super.getId()+"]");
}
}
<file_sep>/src/controladors/mainBotiga.java
package controladors;
import java.io.*;
import models.*;
import java.util.InputMismatchException;
import java.util.Scanner;
import ExceptionsBotiga.*;
import ExceptionsBotiga.EstocNegatiu;
public class mainBotiga {
static Scanner teclat=new Scanner(System.in);
static LlistaClients llista_clients = new LlistaClients();
static LlistaProductes llista_productes = new LlistaProductes();
static LlistaComandes llista_comandes = new LlistaComandes();
public static void main(String[] args) throws IOException {
llegirFitxerClients();
llegirFitxerProductes();
llegirDataSerialitzable();
// Creacio d'instancies per comprovar que els metodes funcionen correctament.
// llista_productes.afegirProducte(new Software("hola", 65, 26, "WINDOWS"));
// llista_productes.afegirProducte(new Software("adios", 65, 26, "LINUX"));
// llista_productes.afegirProducte(new Software("met", 65, 26, "MACOS"));
//
// llista_productes.afegirProducte(new Hardware("hola2", 25, 26, "PERIFERIC"));
// llista_productes.afegirProducte(new Hardware("adios2", 86, 26, "CPU"));
// llista_productes.afegirProducte(new Hardware("met2", 15, 26, "RAM"));
// llista_productes.afegirProducte(new Hardware("hola3", 25, 26, "HDD"));
// llista_productes.afegirProducte(new Hardware("adios3", 85, 26, "GPU"));
// llista_productes.afegirProducte(new Hardware("met3", 16, 26, "MB"));
// llista_productes.afegirProducte(new Hardware("orde", 25, 26, "PERIFERIC"));
// llista_productes.afegirProducte(new Hardware("ades2", 8, 26, "CPU"));
// llista_productes.afegirProducte(new Hardware("joel", 18, 26, "RAM"));
// llista_productes.afegirProducte(new Hardware("roser3", 5, 26, "HDD"));
// llista_productes.afegirProducte(new Hardware("xenia3", 95, 26, "GPU"));
// llista_productes.afegirProducte(new Hardware("johnny", 15, 26, "MB"));
// llista_comandes.afegirComanda(new Comanda(llista_clients.getLlista()[0]));
// llista_comandes.getLlista()[0].afegirProducteComanda(llista_productes.getLlista()[1]);
// llista_comandes.getLlista()[0].afegirProducteComanda(llista_productes.getLlista()[0]);
// llista_comandes.getLlista()[0].afegirProducteComanda(llista_productes.getLlista()[1]);
// llista_comandes.getLlista()[0].afegirProducteComanda(llista_productes.getLlista()[1]);
//
// llista_comandes.afegirComanda(new Comanda(llista_clients.getLlista()[1]));
// llista_comandes.getLlista()[1].afegirProducteComanda(llista_productes.getLlista()[2]);
// llista_comandes.getLlista()[1].afegirProducteComanda(llista_productes.getLlista()[2]);
// llista_comandes.getLlista()[1].afegirProducteComanda(llista_productes.getLlista()[2]);
// llista_comandes.getLlista()[1].afegirProducteComanda(llista_productes.getLlista()[0]);
int op=0;
do {
menu();
try {
op = teclat.nextInt();
} catch (InputMismatchException e) {
op = -1; // nou valor enter de op per impedir entrar al bucle
teclat.nextLine(); //limpiar buffer
}
switch (op) {
case 1:{
System.out.println("\nHas escollit: afegir un producte de software ");
afegirSoftware();
}break;
case 2:{
System.out.println("\nHas escollit: afegir un producte de hardware");
afegirHardware();
}break;
case 3:{
System.out.println("\nHas escollit: afegir una configuració completa");
afegirConfiguracio();
}break;
case 4:{
System.out.println("\nHas escollit: donar d'alta un client");
altaClient();
}break;
case 5:{
System.out.println("\nHas escollit: donar de baixa a un client");
baixaClient();
}break;
case 6:{
System.out.println("\nHas escollit: treure un llistat de tots els productes que tenen alguna comanda (amb les dades del client) ");
prodComanda();
}break;
case 7:{
System.out.println("\nHas escollit: modificar l'estoc de qualsevol dels productes que s'han donat d'alta a partir del seu identificador");
modificarEstoc();
}break;
case 8:{
System.out.println("\nHas escollit: treure un llistat de tots els productes que tenen estic >= 1, indicant el seu estoc");
if (!productesEstoc().equals("")) System.out.println(productesEstoc());
else System.out.println("No hi ha cap producte amb estoc >= 1.");
}break;
case 9:{
System.out.println("\nHas escollit: treure un llistat de tots els productes que formen part d'alguna configuracio");
if (!productesConfiguracio().equals("")) {
System.out.println(productesConfiguracio());
}
else {
System.out.println("No hi ha cap producte que formi part d'una configuració");
}
}break;
case 10:{
System.out.println("\nHas escollit: mostrar el producte del qual s'han fet més comandes i indicar el numero d'aquestes");
mesComandes();
}break;
case 11:{
System.out.println("\nHas escollit: consultar tots els elements de qualsevol llista que tingueu definida");
consultarLlistes();
}break;
case 12:{
System.out.println("\nHas decidit sortir del programa.");
}break;
default: System.out.println("Escriu una opcio valida.");
}
} while (op != 12);
do {
System.out.println("\nVols guardar tota la informacio als fitxers?(0 = NO 1 = SI)");
try {
op = teclat.nextInt();
} catch (InputMismatchException e) {
op = -1;
teclat.nextLine(); //limpiar buffer
}
}while(op != 0 && op != 1);
if (op == 1) {
guardarFitxerClients();
guardarFitxerProductes();
guardarDataSerialitzable();
}
System.out.println("\nAdeu, fins aviat!");
teclat.close();
System.exit(0);
}
/*
* FUNCIONS PER LLEGIR I ESCRIURE FITXERS
*/
/**
* Llegir fitxer clients
*
*/
private static void llegirFitxerClients() {
String result="";
try {
Scanner f=new Scanner(new File("clients.txt"));
while (f.hasNextLine()) {
result= f.nextLine();
String[] separador = result.split("\\*");
llista_clients.afegirClient(new Client(separador[0], separador[1], separador[2]));
}
f.close();
}catch (FileNotFoundException e) {
System.out.println("No existeix el fitxer.");
}
catch(Exception e) {
System.out.println("Hi ha hagut algun error en la lectura de l'arxiu o al afegir els elements a la llista.\n");
}
}
/**
* Funcio per guardar Fitxer Clients
* @throws IOException
*/
private static void guardarFitxerClients() throws IOException {
BufferedWriter cl=new BufferedWriter(new FileWriter("clients.txt"));
try {
String frase = "";
Client aux;
for (int i = 0; i < llista_clients.getnClient();i++) {
aux = llista_clients.getLlista()[i];
frase =aux.getDni()+"*"+aux.getCorreu()+"*"+aux.getAdresa()+"\n";
cl.write(frase);
}
cl.close();
} catch (Exception e) {
System.out.println("Hi ha hagut un problema a l'escriure al fitxer!");
}
}
/**
* Mètode per a llegir les dades d'un fitxer de text i guardar les dades dins la llista_productes
* @throws FileNotFoundException
*/
private static void llegirFitxerProductes() throws FileNotFoundException {
String result="";
try {
Scanner f=new Scanner(new File("productes.txt"));
while (f.hasNextLine()) {
result=f.nextLine();
String[] separador = result.split("\\*");
if(separador[0].equalsIgnoreCase("S")) {
String nom=separador[1];
float preu=Float.parseFloat(separador[2]);
int estoc=Integer.parseInt(separador[3]);
String sist=separador[4];
Software aux_s=new Software(nom, preu, estoc, sist);
llista_productes.afegirProducte(aux_s);
}else if (separador[0].equalsIgnoreCase("H")) {
String nom=separador[1];
float preu=Float.parseFloat(separador[2]);
int estoc=Integer.parseInt(separador[3]);
String tipus=separador[4];
Hardware aux_h=new Hardware(nom, preu, estoc, tipus);
llista_productes.afegirProducte(aux_h);
}else {
String nom=separador[1];
float preu=0;// El precio se calcula de los productos de la configuracion
int estoc=Integer.parseInt(separador[2]);
Hardware[] llista_h= new Hardware[100];
int cont_h =0, cont_s = 0, cont_aux_h = 0, cont_aux_s=0; //contador llistes arrays
Software[] llista_s=new Software[100];
Integer[] llista_auxIntegers_h = new Integer[100];
Integer[] llista_auxIntegers_s = new Integer[100];
int posicio = 3;
if(separador[posicio].equalsIgnoreCase("H")) {
posicio++;
String aux_r= separador[posicio];
while (!aux_r.equals("S")) {
llista_auxIntegers_h[cont_aux_h] = Integer.parseInt(aux_r);
cont_aux_h++;
posicio++;
aux_r= separador[posicio];
}
posicio++;
while (posicio<separador.length) {
aux_r= separador[posicio];
llista_auxIntegers_s[cont_aux_s] = Integer.parseInt(aux_r);
cont_aux_s++;
posicio++;
}
int cont_aux_ultra2 = 0;
boolean trobat = false;
while (!trobat) {
if (llista_auxIntegers_s[cont_aux_ultra2] == null) {
trobat =true;
}
else {
cont_aux_ultra2++;
}
}
for (int i = 0; i < cont_aux_ultra2; i++) {
llista_s[cont_s] = (Software)llista_productes.buscarProducte(llista_auxIntegers_s[i]);
preu+=llista_s[cont_s].getPreu();
cont_s++;
}
trobat=false;
cont_aux_ultra2=0;
while (!trobat) {
if (llista_auxIntegers_h[cont_aux_ultra2] == null) {
trobat =true;
}
else {
cont_aux_ultra2++;
}
}
for (int i = 0; i < cont_aux_ultra2; i++) {
llista_h[cont_h] = (Hardware)llista_productes.buscarProducte(llista_auxIntegers_h[i]);
preu+=llista_h[cont_h].getPreu();
cont_h++;
}
Configuracio aux_c=new Configuracio(nom, preu, estoc, llista_s, llista_h);
llista_productes.afegirProducte(aux_c);
}
}
}
f.close();
}catch (FileNotFoundException e) {
System.out.println("No existeix el fitxer.");
}
catch(Exception e) {
System.out.println("Hi ha hagut algun error en la lectura de l'arxiu o al afegir els elements a la llista.\n");
e.printStackTrace();
}
}
/**
* Funcio per guardar fitxer productes
*/
private static void guardarFitxerProductes() {
try {
BufferedWriter bw= new BufferedWriter(new FileWriter("productes.txt"));
int i=0;
String res="";
for (i=0;i<llista_productes.getnElem();i++) {
if (llista_productes.getLlista()[i] instanceof Hardware) {
res+="H*";
res+=llista_productes.getLlista()[i].getNom()+"*";
res+=String.valueOf(llista_productes.getLlista()[i].getPreu())+"*";
res+=String.valueOf(llista_productes.getLlista()[i].getEstoc())+"*";
res+=String.valueOf(((Hardware)llista_productes.getLlista()[i]).getTipusHardwareString())+"\n";
}
}
for (i=0;i<llista_productes.getnElem();i++) {
if (llista_productes.getLlista()[i] instanceof Software) {
res+="S*";
res+=llista_productes.getLlista()[i].getNom()+"*";
res+=String.valueOf(llista_productes.getLlista()[i].getPreu())+"*";
res+=String.valueOf(llista_productes.getLlista()[i].getEstoc())+"*";
res+=String.valueOf(((Software)llista_productes.getLlista()[i]).getSOString())+"\n";
}
}
for (i = 0; i < llista_productes.getnElem(); i++) {
if (llista_productes.getLlista()[i] instanceof Configuracio) {
res+="C*";
res+=String.valueOf(llista_productes.getLlista()[i].getNom())+"*";
res+=String.valueOf(llista_productes.getLlista()[i].getEstoc())+"*";
res+="H";
int numElem = ((Configuracio)llista_productes.getLlista()[i]).numElementsHardware();
int j=0;
for (j=0;j<numElem;j++) {
res+="*"+((Configuracio)llista_productes.getLlista()[i]).getHardwares()[j].getId();
}
res+="*S";
numElem = ((Configuracio)llista_productes.getLlista()[i]).numElementsSoftware();
for (j=0;j<numElem;j++) {
res+="*"+((Configuracio)llista_productes.getLlista()[i]).getSoftwares()[j].getId();
}
res+="\n";
}
}
bw.write(res);
bw.close();
} catch (Exception e) {
System.out.println("Hi ha hagut un problema a l'escriure al fitxer!");
}
}
/**
* Funció per a escriure en una llista en format serialitzable
*/
private static void guardarDataSerialitzable () {
ObjectOutputStream gfitxer;
try {
gfitxer = new ObjectOutputStream (new FileOutputStream("comandes.ser"));
gfitxer.writeObject(llista_comandes);
gfitxer.close();
} catch (IOException e){
System.out.println("Error a l'hora d'escriure al fitxer");
}
}
/**
* Funció per a llegir una llista que esta guardada en format serialitzable
*/
private static void llegirDataSerialitzable () {
ObjectInputStream lfitxer;
try {
lfitxer = new ObjectInputStream (new FileInputStream("comandes.ser"));
llista_comandes=(LlistaComandes)lfitxer.readObject();
lfitxer.close();
} catch (IOException e) {
System.out.println ("Error a l'hora de llegir l'arxiu");
} catch (ClassNotFoundException e) {
System.out.println ("Error a l'hora de buscar la llista de Comandes");
} catch (ClassCastException e) {
System.out.println ("Error, el format de l'arxiu no és correcte per poder-lo obrir i llegir-lo");
}
}
/**
* MENU
*/
private static void menu () {
System.out.println("\nBENVINGUT A LA BOTIGA! QUE VOLS FER?\n");
System.out.println("1. Afegir un producte de software");
System.out.println("2. Afegir un producte de hardware");
System.out.println("3. Afegir una configuració completa");
System.out.println("4. Donar d'alta un client");
System.out.println("5. Donar de baixa un client");
System.out.println("6. Visualitzar els productes que tenen alguna comanda");
System.out.println("7. Modificar l'estoc");
System.out.println("8. Visualitzar els productes que estan en estoc");
System.out.println("9. Visualitzar els productes que formen part d'una configuració");
System.out.println("10. Mostrar el producte amb més comandes");
System.out.println("11. Consultar tots els elements d'una llista");
System.out.println("12. Sortir");
}
/**
* FUNCIONS DEL MENU
*/
/**
* CASE 1: Afegir Software
*/
private static void afegirSoftware () {
String nom;
float preu;
int estoc, op=0;
String sist = "";
try {
System.out.println("Introdueix el nom:");
nom=teclat.next();
System.out.println("Introdueix el preu:");
preu=teclat.nextFloat();
System.out.println("Introdueix l'estoc:");
estoc=teclat.nextInt();
op = 0;
while (op < 1 || op > 3) {
System.out.println("Selecciona el sistema operatiu:");
System.out.println("1- WINDOWS, 2-MACOS, 3-LINUX");
op=teclat.nextInt();
switch (op) {
case 1:
sist= "WINDOWS";
break;
case 2:
sist= "MACOS";
break;
case 3:
sist= "LINUX";
break;
default:
System.out.println("Has introduit un nombre erroni! Torna a provar");
}
}
llista_productes.afegirProducte(new Software(nom, preu, estoc, sist));
}catch(InputMismatchException e){
System.out.println("Nom/preu/estoc incorrecte");
teclat.nextLine(); }
}
/**
* CASE 2: Afegir un Hardware
*/
private static void afegirHardware () {
String nom;
float preu;
int estoc, op=2;
String tipus= "";
try {
System.out.println("Introdueix el nom:");
nom=teclat.next();
System.out.println("Introdueix el preu:");
preu=teclat.nextFloat();
System.out.println("Introdueix l'estoc:");
estoc=teclat.nextInt();
do {
System.out.println("Selecciona el tipus de hardware:");
System.out.println("1- CPU, 2-MB, 3-HDD, 4-RAM, 5-GPU, 6-PERIFERIC");
op=teclat.nextInt();
switch (op) {
case 1:
tipus = "CPU";
break;
case 2:
tipus = "MB";
break;
case 3:
tipus = "HDD";
break;
case 4:
tipus = "RAM";
break;
case 5:
tipus = "GPU";
break;
case 6:
tipus = "PERIFERIC";
break;
default:
System.out.println("Has introduit un nombre erroni! Torna a provar");
}
}while(op<1 || op>6);
llista_productes.afegirProducte(new Hardware(nom, preu, estoc, tipus));
} catch(InputMismatchException e) {
System.out.println("Dades introduides erronies, torni a escollir una opcio");
teclat.nextLine();
}
}
/**
* CASE 3: Afegir una nova configuracio
*/
private static void afegirConfiguracio() {
Hardware[] llista_h=new Hardware[50];
Software[] llista_s=new Software[50];
System.out.println("Introdueix el nom de la configuracio:");
String nom=teclat.next();
float preu=0; //El precio se calcula de los productos de la configuracion
System.out.println("Introdueix l'estoc del producte:");
int estoc=teclat.nextInt();
System.out.println("A continuacio es mostraran els components de Hardware,");
System.out.println("Escull un minim de 1 de cada tipus:");
System.out.println("Escull un component HDD:");
int op=0, j=0;
while (op!=2) {
for (int i=0; i<llista_productes.getnElem();i++) {
if ((llista_productes.getLlista()[i])instanceof Hardware) {
if ((((Hardware)llista_productes.getLlista()[i]).getTipusHardwareString()).equalsIgnoreCase("HDD")) {
System.out.println(i+1+"-"+((Hardware)llista_productes.getLlista()[i]).toString());
}
}
}
System.out.println("Intordueix el id del HDD que vols:");
int pos=teclat.nextInt();
llista_h[j]=((Hardware)llista_productes.getLlista()[pos-1]);
preu+=llista_h[j].getPreu();
j++;
System.out.println("Vols afegir un altre compontent HDD?");
System.out.println("1- Si 2- No");
op=teclat.nextInt();
}
System.out.println("Escull un component periferic:");
op=0;
while (op!=2) {
for (int i=0; i<llista_productes.getnElem();i++) {
if ((llista_productes.getLlista()[i])instanceof Hardware) {
if ((((Hardware)llista_productes.getLlista()[i]).getTipusHardwareString()).equalsIgnoreCase("Periferic")) {
System.out.println(i+1+"-"+((Hardware)llista_productes.getLlista()[i]).toString());
}
}
}
System.out.println("Intordueix el id del Periferic que vols:");
int pos=teclat.nextInt();
llista_h[j]=((Hardware)llista_productes.getLlista()[pos-1]);
preu+=llista_h[j].getPreu();
j++;
System.out.println("Vols afegir un altre compontent Periferic?");
System.out.println("1- Si 2- No");
op=teclat.nextInt();
}
System.out.println("Escull un component RAM:");
op=0;
while (op!=2) {
for (int i=0; i<llista_productes.getnElem();i++) {
if ((llista_productes.getLlista()[i])instanceof Hardware) {
if ((((Hardware)llista_productes.getLlista()[i]).getTipusHardwareString()).equalsIgnoreCase("RAM")) {
System.out.println(i+1+"-"+((Hardware)llista_productes.getLlista()[i]).toString());
}
}
}
System.out.println("Intordueix el id del RAM que vols:");
int pos=teclat.nextInt();
llista_h[j]=((Hardware)llista_productes.getLlista()[pos-1]);
preu+=llista_h[j].getPreu();
j++;
System.out.println("Vols afegir un altre compontent RAM?");
System.out.println("1- Si 2- No");
op=teclat.nextInt();
}
System.out.println("Escull un component MB:");
op=0;
while (op!=2) {
for (int i=0; i<llista_productes.getnElem();i++) {
if ((llista_productes.getLlista()[i])instanceof Hardware) {
if ((((Hardware)llista_productes.getLlista()[i]).getTipusHardwareString()).equalsIgnoreCase("MB")) {
System.out.println(i+1+"-"+((Hardware)llista_productes.getLlista()[i]).toString());
}
}
}
System.out.println("Intordueix el id del MB que vols:");
int pos=teclat.nextInt();
llista_h[j]=((Hardware)llista_productes.getLlista()[pos-1]);
preu+=llista_h[j].getPreu();
j++;
System.out.println("Vols afegir un altre compontent MB?");
System.out.println("1- Si 2- No");
op=teclat.nextInt();
}
System.out.println("Escull un component CPU:");
op=0;
while (op!=2) {
for (int i=0; i<llista_productes.getnElem();i++) {
if ((llista_productes.getLlista()[i])instanceof Hardware) {
if ((((Hardware)llista_productes.getLlista()[i]).getTipusHardwareString()).equalsIgnoreCase("CPU")) {
System.out.println(i+1+"-"+((Hardware)llista_productes.getLlista()[i]).toString());
}
}
}
System.out.println("Intordueix el id del CPU que vols:");
int pos=teclat.nextInt();
llista_h[j]=((Hardware)llista_productes.getLlista()[pos-1]);
preu+=llista_h[j].getPreu();
j++;
System.out.println("Vols afegir un altre compontent CPU?");
System.out.println("1- Si 2- No");
op=teclat.nextInt();
}
System.out.println("Escull un component GPU:");
op=0;
while (op!=2) {
for (int i=0; i<llista_productes.getnElem();i++) {
if ((llista_productes.getLlista()[i])instanceof Hardware) {
if ((((Hardware)llista_productes.getLlista()[i]).getTipusHardwareString()).equalsIgnoreCase("GPU")) {
System.out.println(i+1+"-"+((Hardware)llista_productes.getLlista()[i]).toString());
}
}
}
System.out.println("Intordueix el id del GPU que vols:");
int pos=teclat.nextInt();
llista_h[j]=((Hardware)llista_productes.getLlista()[pos-1]);
preu+=llista_h[j].getPreu();
j++;
System.out.println("Vols afegir un altre compontent GPU?");
System.out.println("1- Si 2- No");
op=teclat.nextInt();
}
op=0;j=0;
while (op!=2) {
for (int i=0; i<llista_productes.getnElem();i++) {
if ((llista_productes.getLlista()[i])instanceof Software) {
System.out.println(i+1+"-"+((Software)llista_productes.getLlista()[i]).toString());
}
}
System.out.println("Introdueix el id del SO que vols:");
int pos=teclat.nextInt();
llista_s[j]=((Software)llista_productes.getLlista()[pos-1]);
preu+=llista_s[j].getPreu();
j++;
System.out.println("Vols afegir un altre SO?");
System.out.println("1- Si 2- No");
op=teclat.nextInt();
}
llista_productes.afegirProducte(new Configuracio(nom, preu, estoc, llista_s, llista_h));
}
/**
* CASE 4: Donar de alta a un client.
*/
private static void altaClient () {
System.out.println("Introdueix el dni del client:");
teclat.nextLine();
String dni = teclat.nextLine();
System.out.println("Introdueix el correu electronic del client:");
String correu = teclat.nextLine();
System.out.println("\nIntrodueix la direccio del client:");
String direccio = teclat.nextLine();
llista_clients.afegirClient(new Client (dni, correu, direccio));
}
/**
* CASE 5: donar de baixa a un client
*/
private static void baixaClient () {
String dni="";
System.out.println("\nIntrodueix el dni del client que es vol donar de baixa:");
teclat.nextLine();//limpiar buffer
dni = teclat.nextLine();
try {
llista_clients.eliminarClient(dni);
llista_comandes.eliminarComandes(dni);
} catch (ClientInexistent e) {
System.out.println("No s'ha trobat el client.");
}
}
/**
* CASE 6: Treure un llistat de tots els productes que tenen alguna comanda, mostrant les dades del client
* que l’han fet.
*/
private static void prodComanda () {
LlistaProductes llista_aux = new LlistaProductes();
boolean trobat = false;
if (llista_comandes.getnComanda()!= 0) {
for (int i = 0; i < llista_comandes.getnComanda(); i++) {
for (int j = 0; j < llista_comandes.getLlista()[i].getLlistaProductes().getnElem(); j++) {
for (int k = 0; k < llista_aux.getnElem() && !trobat; k++) {
if ( llista_comandes.getLlista()[i].getLlistaProductes().getLlista()[j].getId() == llista_aux.getLlista()[k].getId()) {
trobat=true;
}
}
if(!trobat) {
llista_aux.afegirProducte(llista_comandes.getLlista()[i].getLlistaProductes().getLlista()[j]);
}
trobat = false;
}
}
for (int i = 0; i < llista_aux.getnElem(); i++) {
System.out.println(llista_aux.getLlista()[i]);
for (int j = 0; j < llista_comandes.getnComanda(); j++) {
if(llista_comandes.getLlista()[j].existeixProducte(llista_aux.getLlista()[i])) {
System.out.println(llista_comandes.getLlista()[j].getClient());
}
}
System.out.println("-----------------------------------------------");
}
}
else {
System.out.println("No hi ha cap comanda feta");
}
}
/**
* CASE 7: Modificar l'estoc de qualsevol producte a partir del seu idenntificador.
*/
private static void modificarEstoc () {
int i, nouEstoc;
System.out.println(llista_productes.toString());
System.out.println("Introdueix el numero del producte del qual vols modificar l'estoc:");
try {
i=teclat.nextInt();
System.out.println("Quin es el nou estoc d'aquest producte?");
nouEstoc=teclat.nextInt();
try {
llista_productes.getLlista()[i-1].setEstoc(nouEstoc);
} catch (EstocNegatiu e) {
System.out.println("No s'ha modificat l'estoc.");
}
System.out.println("L'estoc actual es: "+nouEstoc);
}
catch(InputMismatchException e) {
System.out.println("Has introduit un valor incorrecte.");
}}
/**
* CASE 8: Treure un llistat de tots el productes que tenen estoc>=1, indicant el seu estoc.
* @return aux
*/
private static String productesEstoc() {
String aux="";
for (int i=0; i<llista_productes.getnElem();i++) {
if (llista_productes.getLlista()[i].getEstoc() >= 1) {
aux+=llista_productes.getLlista()[i].toString()+"\n";
}
}
return aux;
}
/**
* CASE 9: Treure un llistat de tots els productes que formen part d’alguna configuració.
* @return aux
*/
private static String productesConfiguracio() {
String aux="";
for (int i=0;i<llista_productes.getnElem();i++) {
if (llista_productes.getLlista()[i] instanceof Configuracio) {
aux+=llista_productes.getLlista()[i].toString()+"\n";
}
}
return aux;
}
/**
* CASE 1O: Mostrar el producte del qual s’han fet més comandes i indicar el número d’aquestes.
*/
private static void mesComandes () {
LlistaProductes llista_aux = new LlistaProductes();
int aux[] = new int [llista_productes.getnElem()];
boolean trobat = false;
int indexGran = 0, k = 0;
for (int i = 0; i < llista_comandes.getnComanda(); i++) {
for (int j = 0; j < llista_comandes.getLlista()[i].getLlistaProductes().getnElem(); j++) {
for (k = 0; k < llista_aux.getnElem() && !trobat; k++) {
if ( llista_comandes.getLlista()[i].getLlistaProductes().getLlista()[j].getId() == llista_aux.getLlista()[k].getId()) trobat = true;
}
if (!trobat) {
aux[llista_aux.getnElem()]++;
llista_aux.afegirProducte(llista_comandes.getLlista()[i].getLlistaProductes().getLlista()[j]);
}
else {
aux[k-1]++; // para que no se salga del array (!trobat) para descontarlo del for
}
trobat = false;
}
}
for (int i = 1; i < aux.length; i++) {
if(aux[i]>aux[indexGran]) indexGran = i;
}
if(llista_aux.getLlista()[indexGran] != null) {
System.out.println(llista_aux.getLlista()[indexGran]);
System.out.println("El numero de comandes que s'han fet es: "+aux[indexGran]);
}
else {
System.out.println("No hi ha cap producte amb comandes.");
}
}
/**
* CASE 11: Mostrar elements de qualsevol llista
*/
private static void consultarLlistes () {
int op;
do {
//System.out.println("\n0. Sortir");
System.out.println("\n1. Lista de productes");
System.out.println("\n2. Lista de clients");
System.out.println("\n3. Lista de comandes");
System.out.println("\nQuina opcio vols escollir?:");
try {
op = teclat.nextInt();
}catch (InputMismatchException e) {
op= -1; //canviem valor op per a que no entri en el bucle
teclat.nextLine();
}
switch (op) {
case 1:{
System.out.println("\nLLISTA DE PRODUCTES:");
System.out.println("\n"+llista_productes.toString());
}break;
case 2:{
System.out.println("\nLLISTA DE CLIENTS:");
System.out.println("\n"+llista_clients.toString());
}break;
case 3:{
System.out.println("\nLLISTA DE COMANDES:");
System.out.println("\n"+llista_comandes.toString());
}break;
default:
System.out.println("\nOpcio no valida, introdueixi un enter");
break;
}
}while (op < 1 || op > 3);
}
}
<file_sep>/src/models/Comanda.java
package models;
import java.io.Serializable;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import ExceptionsBotiga.EstocNegatiu;
import models.Client;
/**
* CLASSE COMANDA
* @author <NAME>
*
*/
public class Comanda implements Serializable {
private static final long serialVersionUID = 1L;
private Date data;
private LlistaProductes llista_p;
private String identificador;
private Client client;
private float preuComanda;
private static int numCorrelatiu = 1;
/**
* Constructor
* @param producte : producte de la comanda
* @param dni : dni del client que realitza una comanda
*/
public Comanda (Client client){
this.client = client;
this.preuComanda = 0;
this.llista_p= new LlistaProductes();
this.data = Calendar.getInstance().getTime();
this.identificador = client.getDni() + numCorrelatiu;
numCorrelatiu++;
}
public Comanda copia () {
return (new Comanda (client)); //no estará bien
}
/**
* GETTERS
*/
/**
* Getter data
* @return --> retorna la data en que s'ha fet la comanda (dia/mes/any)
*/
public Date getData () {
return data;
}
/**
* GETTER CLIENT
* @return client
*/
public Client getClient() {
return client;
}
/**
* Getter producte
* @return -->
*/
public LlistaProductes getLlistaProductes() {
return llista_p;
}
/**
* Getter identificador
* @return
*/
public String getIdentificador() {
return identificador;
}
/**
* Getter numero correlatiu
* @return
*/
public static int getNumCorrelatiu () {
return numCorrelatiu;
}
/**
* Getter preuComanda
* @return preuComanda
*/
public float getPreuComanda() {
return preuComanda;
}
/**
* SETTERS
*/
/**
* Setter data
* @return
*/
public void setData (Date data) {
this.data = data;
}
/**
* Setter producte
* @return
*/
public void setLlistaProductes(LlistaProductes llista_p) {
this.llista_p = llista_p;
}
/**
* Setter identificador
* @return
*/
public void setIdentificador(String identificador) {
this.identificador = identificador;
}
/**
* Setter numero correlatiu
* @return
*/
public static void setNumCorrelatiu (int numCorrelatiu) {
Comanda.numCorrelatiu = numCorrelatiu;
}
/**
* Setter preuComanda
* @param preuComanda
*/
public void setPreuComanda(float preuComanda) {
this.preuComanda = preuComanda;
}
//METODES
/**
* Funcio per afegir un producte a la llista de productes de la comanda
* @param producte
* @throws EstocNegatiu
*/
public void afegirProducteComanda (Producte producte) {
llista_p.afegirProducte(producte);
try {
producte.setEstoc(producte.getEstoc()-1);
} catch (EstocNegatiu e) {
System.out.println("No es pot posar estoc negatiu.");
}
preuComanda += producte.getPreu();
}
/**
* Funcio per comprovar si existeix un producte a una comanda
* @param producte
* @return existeix
*/
public boolean existeixProducte(Producte producte) {
for (int i = 0; i < llista_p.getnElem(); i++) {
if(llista_p.getLlista()[i].getId()==producte.getId()) {
return true;
}
}
return false;
}
@Override
public String toString() {
DateFormat dateformat = new SimpleDateFormat("dd/MM/yyyy");
return "\nData de la comanda: " +dateformat.format(data)+
"\nProductes: \n" + llista_p.toString() +
"\nIdentificador: " + identificador+
"\nPreuComanda: "+preuComanda;
}
}
<file_sep>/src/models/Client.java
package models;
import java.io.Serializable;
/**
* Classe que implementa un client
*
* @author <NAME>
*
*/
public class Client implements Serializable{
private static final long serialVersionUID = 1L;
//ATRIBUTS
private String dni, correu, adresa;
//private LlistaComandes llistaComandes;
/**
* Metode constructor classe Client
* @param dni
* @param correu
* @param adresa
*/
public Client(String dni, String correu, String adresa) {
this.dni = dni;
this.correu = correu;
this.adresa = adresa;
}
/**
* Getter dni
* @return dni
*/
public String getDni() {
return dni;
}
/**
* setter dni
* @param dni
*/
public void setDni(String dni) {
this.dni = dni;
}
/**
* getter correu
* @return
*/
public String getCorreu() {
return correu;
}
/**
* setter correu
* @param correu
*/
public void setCorreu(String correu) {
this.correu = correu;
}
/**
* getter adreša
* @return
*/
public String getAdresa() {
return adresa;
}
/**
* setter adreša
* @param adresa
*/
public void setAdresa(String adresa) {
this.adresa = adresa;
}
/**
* Metode que crea una copia de la instancia.
* @return new Clients(dni, correu, adresa)
*/
public Client copia() {
return new Client(dni, correu, adresa);
}
/**
* metode toString de Client
*/
@Override
public String toString() {
return "Client [dni=" + dni + ", correu=" + correu + ", adresa=" + adresa + "]";
}
}
| 3cd47982082bfb2665c89ae297174592bb4eb68d | [
"Java"
] | 6 | Java | Epiesteban/gestioIVentaProductesInformatics | a09195d105fc4bb5abbe2d2b6483666369addd11 | f0a1eb85a3b2f300843ea495de8cc74ffa52617e |
refs/heads/master | <file_sep>var ballObject1,ballObject2,ballObject3,ballObject4,ballObject5,ground
const Engine = Matter.Engine;
const World = Matter.World;
const Bodies = Matter.Bodies;
const Body = Matter.Body;
const Constraint = Matter.Constraint;
function preload()
{
}
function setup() {
createCanvas(800, 700);
engine = Engine.create();
world = engine.world;
ground = new Ground(400,100,400,40);
ballObject1 = new Bob(300,600,25);
ballObject2 = new Bob(350,600,25);
ballObject3 = new Bob(400,600,25);
ballObject4 = new Bob(450,600,25);
ballObject5 = new Bob(500,600,25);
rope1 = new Rope(ballObject1.body,ground.body,-100,0)
rope2 = new Rope(ballObject2.body,ground.body,-50,0)
rope3 = new Rope(ballObject3.body,ground.body,0,0)
rope4 = new Rope(ballObject4.body,ground.body,50,0)
rope5 = new Rope(ballObject5.body,ground.body,100,0)
//Create the Bodies Here.
Engine.run(engine);
}
function draw() {
rectMode(CENTER);
background(0);
ground.display();
rope1.display();
rope2.display();
rope3.display();
rope4.display();
rope5.display();
ballObject1.display();
ballObject2.display();
ballObject3.display();
ballObject4.display();
ballObject5.display();
drawSprites();
}
| 61ad5f26cebe0f7c00d7cdd5f9eadebf13febff2 | [
"JavaScript"
] | 1 | JavaScript | aaravSharma123/Make-you-own-game-1 | 87cd77d48c46f1f7c30593424ff6431a688f5ed5 | 9f699031cecb15b5d574ca86404cc43d2c920d0e |
refs/heads/master | <repo_name>sandersaares/ScrapeScteDocuments<file_sep>/ScrapeScteDocuments/InputModel/Category.cs
using System.Collections.Generic;
namespace ScrapeScteDocuments.InputModel
{
public sealed class Category
{
public List<Post> Posts { get; set; }
}
}
<file_sep>/ScrapeScteDocuments/InputModel/Post.cs
using System.Collections.Generic;
namespace ScrapeScteDocuments.InputModel
{
public sealed class Post
{
public string Title { get; set; }
public string Status { get; set; }
// Sometimes duplicate keys exist in here...
public List<Meta> Meta { get; set; }
}
}
<file_sep>/ScrapeScteDocuments/Program.cs
using Newtonsoft.Json;
using Polly;
using Polly.Retry;
using ScrapeScteDocuments.InputModel;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Text;
using System.Text.RegularExpressions;
using System.Xml;
namespace ScrapeScteDocuments
{
internal class Program
{
/// <summary>
/// All .json files will be placed in this subdirectory of the working directory.
/// The directory will be cleaned on start.
/// </summary>
public const string OutputDirectory = "SpecRef";
public const string OutFile = "scte.json";
public static string ScrapeUrl = "https://api.scte-website-cms.com/api/v1/standards/category/non-public";
// Retry if something goes wrong (it shouldn't but the web is the web).
private static RetryPolicy<string> ScrapePolicy = Policy<string>.Handle<Exception>().Retry(3);
private sealed class Entry
{
public int SortIndex { get; set; }
public string Url { get; set; }
public string Title { get; set; }
public string RawDate { get; set; }
public string Status { get; set; }
public string[] Aliases { get; set; }
}
// Example document names:
// ANSI/SCTE 05 2014
// SCTE 06 2019
// ANSI/SCTE 24-02 2016
// ANSI/SCTE 82-2012
//
// The base ID is whatever is after "SCTE" and the year.
// Sometimes the final space is a dash instead...
// 1 is base ID, 2 is year
private static readonly Regex ExtractIdComponentsRegex = new Regex(@"SCTE ([0-9-]+?)(?: |-)(\d{4})", RegexOptions.Compiled);
public static (string id, string rawDate, string[] aliases) ParseStandardNumber(string standardNumber)
{
var match = ExtractIdComponentsRegex.Match(standardNumber);
if (!match.Success)
throw new Exception("Failed to parse standard number: " + standardNumber);
var baseName = match.Groups[1].Value;
var rawDate = match.Groups[2].Value;
// It appears that standard numbers of the format 24-02 were renamed with the SCTE website update.
// Now they are listed as 24-2 and cause broken references. Let's add aliases to keep the old 24-02 working.
// Aaaaand sometimes it's the opposite, with 135-3 becoming 135-03! Add this direction of alias, too.
var aliases = new List<string>();
var lastDashIndex = baseName.LastIndexOf('-');
if (lastDashIndex != -1)
{
var prefinalPart = baseName.Substring(0, lastDashIndex + 1);
var finalPart = baseName.Substring(lastDashIndex + 1);
// If it is -1, make an alias -01
if (finalPart.Length == 1)
aliases.Add("scte" + prefinalPart + "0" + finalPart);
// If it is -01, make an alias -1
if (finalPart.Length == 2 && finalPart[0] == '0')
aliases.Add("scte" + prefinalPart + finalPart[1]);
}
return ("scte" + baseName, rawDate, aliases.ToArray());
}
private static void Main(string[] args)
{
if (Directory.Exists(OutputDirectory))
Directory.Delete(OutputDirectory, true);
Directory.CreateDirectory(OutputDirectory);
Console.WriteLine("Output will be saved in " + Path.GetFullPath(OutputDirectory));
var client = new HttpClient();
var entries = new Dictionary<string, Entry>();
var documentIndex = 1;
Console.WriteLine($"Loading catalog page: {ScrapeUrl}");
var pageJson = ScrapePolicy.Execute(() => client.GetStringAsync(ScrapeUrl).Result);
var categories = JsonConvert.DeserializeObject<Category[]>(pageJson);
var documents = categories.SelectMany(c => c.Posts).ToArray();
Console.WriteLine($"Found {documents.Length} documents.");
var knownUrls = new List<string>();
foreach (var document in documents)
{
var standardNumber = document.Meta.First(m => m.Key == "standardNumber").Value;
(var id, var rawDate, var aliases) = ParseStandardNumber(standardNumber);
var absoluteUrl = document.Meta.First(m => m.Key == "PDF").Value;
var title = document.Title;
if (string.IsNullOrWhiteSpace(title))
throw new Exception("Empty title for " + standardNumber);
if (string.IsNullOrWhiteSpace(absoluteUrl))
throw new Exception("Empty URL for " + standardNumber);
if (knownUrls.Contains(absoluteUrl))
{
// SCTE catalog seems to have some errors... ??? Whatever, just skip for now.
// 231 and 232 are in conflict at time of writing (both use 231 URL).
Console.WriteLine($"Skipping {id} because it reuses a URL already used for another document: {absoluteUrl}");
continue;
}
knownUrls.Add(absoluteUrl);
Console.WriteLine($"{standardNumber} is titled \"{title}\", available at {absoluteUrl} and will get the ID {id}");
if (aliases.Length != 0)
Console.WriteLine($"It is also called {string.Join(", ", aliases)}");
var entry = new Entry
{
// We use the same sorting as on the website.
SortIndex = documentIndex++,
Url = absoluteUrl,
Title = $"{standardNumber}: {title}",
Status = document.Status,
Aliases = aliases
};
entries[id] = entry;
}
if (entries.Count == 0)
throw new Exception("Loaded no entries."); // Sanity check.
// Ok, we got all our entries. Serialize.
var json = new Dictionary<string, object>(entries.Count);
foreach (var pair in entries.OrderBy(p => p.Value.SortIndex))
{
json[pair.Key] = new
{
href = pair.Value.Url,
title = pair.Value.Title,
publisher = "SCTE",
rawDate = pair.Value.RawDate,
status = pair.Value.Status
};
foreach (var alias in pair.Value.Aliases)
{
json[alias] = new
{
aliasOf = pair.Key
};
}
}
var outputFilePath = Path.Combine(OutputDirectory, OutFile);
File.WriteAllText(outputFilePath, JsonConvert.SerializeObject(json, JsonSettings), OutputEncoding);
}
private static readonly JsonSerializerSettings JsonSettings = new JsonSerializerSettings
{
Formatting = Newtonsoft.Json.Formatting.Indented,
DefaultValueHandling = DefaultValueHandling.Ignore
};
private static readonly Encoding OutputEncoding = new UTF8Encoding(false);
}
}
<file_sep>/ScrapeScteDocuments/InputModel/Meta.cs
using Newtonsoft.Json;
namespace ScrapeScteDocuments.InputModel
{
public sealed class Meta
{
[JsonProperty("meta_key")]
public string Key { get; set; }
[JsonProperty("meta_value")]
public string Value { get; set; }
}
}
| 4c8e468861ea2faa84754598bfc0bab766f1eeaf | [
"C#"
] | 4 | C# | sandersaares/ScrapeScteDocuments | 3d17e64eb4c553ec7b77c85238be55baf7e33b76 | c17c5cb4142dde77bc5a0596a5d9bad16e8c358b |
refs/heads/master | <file_sep>
create table avaliador(
id serial primary key,
nome varchar(100) not null
);
create table bar(
id serial primary key,
nome varchar(100) not null,
endereco varchar(100),
preco float,
ambiente float,
atendimento float,
gelada float,
drinks float
);
create table japones(
id serial primary key,
nome varchar(100) not null,
endereco varchar(100),
preco float,
ambiente float,
atendimento float,
variedades float,
peixeFresco float
);
create table hamburgueria(
id serial primary key,
nome varchar(100) not null,
endereco varchar(100),
preco float,
ambiente float,
atendimento float,
hamburger float,
acompanhamento float
);
create table restaurantes(
id_avaliador int references avaliador(id),
id_bar int references bar(id),
id_hamburgueria int references hamburgueria(id),
id_japones int references japones(id)
); | 6298841b1357570164b831825f9d3a00d333dbd8 | [
"SQL"
] | 1 | SQL | rfmoreira/ProjetoIntegrador | b96cac5b9bb9485bf65c21bb3bda93f666d7f5e7 | a179c56e09dd5dd16611b8583f352cca6e6aff28 |
refs/heads/master | <repo_name>rbarman-reputation/JsonComparator<file_sep>/src/main/java/Parsing/JsonDiffParser.java
package Parsing;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.flipkart.zjsonpatch.JsonDiff;
public class JsonDiffParser {
public void diff() throws JsonMappingException, JsonProcessingException
{
ObjectMapper mapper = new ObjectMapper();
String s1 = "{\n"
+ " \"firstName\": \"John\",\n"
+ " \"lastName\": \"Smith\",\n"
+ " \"age\": 25,\n"
+ " \"address\": {\n"
+ " \"streetAddress\": \"21 2nd Street\",\n"
+ " \"city\": \"New York\",\n"
+ " \"state\": \"NY\",\n"
+ " \"postalCode\": 10021\n"
+ " },\n"
+ " \"phoneNumbers\": [\n"
+ " {\n"
+ " \"type\": \"home\",\n"
+ " \"number\": \"212 555-1234\"\n"
+ " },\n"
+ " {\n"
+ " \"type\": \"fax\",\n"
+ " \"number\": \"646 555-4567\" \n"
+ " }\n"
+ " ] \n"
+ "}";
String s2 = "{\n"
+ " \"firstName\": \"John123\",\n"
+ " \"lastName\": \"Smith\",\n"
+ " \"age\": 25,\n"
+ " \"address\": {\n"
+ " \"streetAddress\": \"21 2nd Street\",\n"
+ " \"city\": \"New York\",\n"
+ " \"state\": \"NY\",\n"
+ " \"postalCode\": 10021\n"
+ " },\n"
+ " \"phoneNumbers\": [\n"
+ " {\n"
+ " \"type\": \"home\"\n"
+ " },\n"
+ " {\n"
+ " \"type\": \"fax123\",\n"
+ " \"number\": \"646-4567\" \n"
+ " }\n"
+ " ] \n"
+ "}";
JsonNode beforeNode = mapper.readTree(s1);
JsonNode afterNode = mapper.readTree(s2);
JsonNode patch = JsonDiff.asJson(beforeNode, afterNode);
String diffs = patch.toString().replace("[","").replace("]","").replaceAll("},", "}},");
//System.out.println(diffs);
String[] res = diffs.split("},");
for(String s : res)
System.out.println(s);
}
}
| e21178de12daa420e518bddaea9e78588d697b26 | [
"Java"
] | 1 | Java | rbarman-reputation/JsonComparator | 713c23e6a68186fb7c236698a41e6b4b976c0cf0 | b6531c3e3bd2f823b0ef354d095f3ac95dd53213 |
refs/heads/master | <repo_name>tiagom87/connective<file_sep>/app/models/user.rb
# Schema
# add_column :users, :name, :string
# add_column :users, :mini_bio, :string
# add_column :users, :nationality, :string
# add_column :users, :latest_position, :string
# add_column :users, :university, :string
# add_column :users, :experience, :text
class User < ActiveRecord::Base
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable
validates_presence_of :name
after_create :set_guid
def set_guid
self.guid = SecureRandom.uuid()
self.save
end
end
<file_sep>/db/migrate/20150317215733_rename_nationality.rb
class RenameNationality < ActiveRecord::Migration
def change
rename_column :users, :nationality, :country
end
end
<file_sep>/app/controllers/pages_controller.rb
class PagesController < ApplicationController
def home
if current_user
redirect_to users_path
end
end
def login
end
def signup
end
def step2
end
def advisors
end
def students
end
def student
end
def advisor
end
end
<file_sep>/db/migrate/20150309232245_add_extra_to_users.rb
class AddExtraToUsers < ActiveRecord::Migration
def change
add_column :users, :mini_bio, :string
add_column :users, :nationality, :string
add_column :users, :latest_position, :string
add_column :users, :university, :string
add_column :users, :experience, :text
end
end
| 982f5ac5f9d74136a96ade93b509bf9eb037b3f8 | [
"Ruby"
] | 4 | Ruby | tiagom87/connective | 7ec7f5c5639fca92446813fc0b0b790e5dcd4287 | 788c95be12efe63992fe4b560bbe58ee175cd5dd |
refs/heads/master | <file_sep># Akubra
[![Version Widget]][Version] [![Build Status Widget]][Build Status] [![GoDoc Widget]][GoDoc]
[Version]: https://github.com/allegro/akubra/releases/latest
[Version Widget]: https://img.shields.io/github/release/allegro/akubra.svg
[Build Status]: https://travis-ci.org/allegro/akubra
[Build Status Widget]: https://travis-ci.org/allegro/akubra.svg?branch=master
[GoDoc]: https://godoc.org/github.com/allegro/akubra
[GoDoc Widget]: https://godoc.org/github.com/allegro/akubra?status.svg
## Goal
Akubra is a simple solution to keep an independent S3 storages in sync - almost
realtime, eventually consistent.
Keeping redundant storage clusters, which handle great volume of new objects
(about 300k obj/h), is the most efficient by feeding them with all incoming data
at once. That's what Akubra does, with a minimum memory and cpu footprint.
Synchronizing S3 storages offline is almost impossible with a high volume traffic.
It would require keeping track of new objects (or periodical bucket listing),
downloading and uploading them to other storage. It's slow, expensive and hard
to implement.
Akubra way is to put files in all storages at once by copying requests to multiple
backends. Sometimes one of clusters may reject request for various reason, but
that's not a big deal: we simply log that event, and sync that object in an
independent process.
## Build
### Prerequisites
You need go >= 1.7 compiler [see](https://golang.org/doc/install)
### Build
In main directory of this repository do:
```
make build
```
### Test
```
make test
```
## Usage of Akubra:
```
usage: akubra [<flags>]
Flags:
--help Show context-sensitive help (also try --help-long and --help-man).
-c, --conf=CONF Configuration file e.g.: "conf/dev.yaml"
```
### Example:
```
akubra -c devel.yaml
```
## How it works?
Once a request comes to our proxy we copy all its headers and create pipes for
body streaming to each endpoint. If any endpoint returns a positive response it's
immediately returned to a client. If all endpoints return an error, then the
first response is passed to the client
If some nodes respond incorrectly we log which cluster has a problem, is it
storing or reading and where the erroneous file may be found. In that case
we also return positive response as stated above.
We also handle slow endpoint scenario. If there are more connections than safe
limit defined in configuration, the backend with most of them is taken out of
the pool and error is logged.
## Configuration ##
Configuration is read from a YAML configuration file with the following fields:
```yaml
# Listen interface and port e.g. "0:8000", "localhost:9090", ":80"
Listen: ":8080"
# List of backend URI's e.g. "http://s3.mydaracenter.org"
Backends:
- "http://s3.dc1.internal"
- "http://s3.dc2.internal"
# Limit of outgoing connections. When limit is reached, Akubra will omit external backend
# with greatest number of stalled connections
ConnLimit: 100
# Additional not AWS S3 specific headers proxy will add to original request
AdditionalResponseHeaders:
'Access-Control-Allow-Origin': "*"
'Access-Control-Allow-Credentials': "true"
'Access-Control-Allow-Methods': "GET, POST, OPTIONS"
'Access-Control-Allow-Headers': "DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type"
# Additional headers added to backend response
AdditionalRequestHeaders:
'Cache-Control': "public, s-maxage=600, max-age=600"
'X-Akubra-Version': '0.9.26'
# Read timeout on outgoing connections
ConnectionTimeout: "3s"
# Dial timeout on outgoing connections
ConnectionDialTimeout: "1s"
# Backend in maintenance mode. Akubra will skip this endpoint
# MaintainedBackend: "http://s3.dc2.internal"
# List request methods to be logged in synclog in case of backend failure
SyncLogMethods:
- PUT
- DELETE
```
## Limitations
* User's credentials have to be identical on every backend
* We do not support S3 partial uploads
<file_sep>package config
import (
"testing"
"github.com/go-yaml/yaml"
"github.com/stretchr/testify/assert"
)
type TestYaml struct {
Field YAMLURL
}
func TestYAMLURLParsingSuccessful(t *testing.T) {
correct := []byte(`field: http://golang.org:80/pkg/net`)
testyaml := TestYaml{}
err := yaml.Unmarshal(correct, &testyaml)
assert.NoError(t, err, "Should be correct")
}
func TestYAMLURLParsingFailure(t *testing.T) {
incorrect := []byte(`field: golang.org:80/pkg/net`)
testyaml := TestYaml{}
err := yaml.Unmarshal(incorrect, &testyaml)
assert.Error(t, err, "Missing protocol should return error")
}
func TestYAMLURLParsingEmpty(t *testing.T) {
incorrect := []byte(`field:`)
testyaml := TestYaml{}
err := yaml.Unmarshal(incorrect, &testyaml)
assert.NoError(t, err, "Should not even try to parse")
assert.Nil(t, testyaml.Field.URL, "Should be nil")
}
<file_sep>package httphandler
import (
"io"
"log"
"net/http"
"net/url"
"time"
"github.com/allegro/akubra/config"
"github.com/allegro/akubra/dial"
"github.com/allegro/akubra/transport"
)
// Handler implements http.Handler interface
type Handler struct {
config config.Config
roundTripper http.RoundTripper
mainLog *log.Logger
accessLog *log.Logger
}
func (h *Handler) closeBadRequest(w http.ResponseWriter) {
hj, ok := w.(http.Hijacker)
if !ok {
http.Error(w, "webserver doesn't support hijacking", http.StatusInternalServerError)
return
}
conn, _, err := hj.Hijack()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
closeErr := conn.Close()
if closeErr != nil {
h.mainLog.Println(closeErr.Error())
return
}
}
func (h *Handler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
resp, err := h.roundTripper.RoundTrip(req)
if err != nil {
h.closeBadRequest(w)
w.WriteHeader(http.StatusBadRequest)
return
}
wh := w.Header()
for k, v := range resp.Header {
wh[k] = v
}
w.WriteHeader(resp.StatusCode)
_, copyErr := io.Copy(w, resp.Body)
defer func() {
if copyErr != nil {
h.mainLog.Printf("Cannot send response body reason: %q",
copyErr.Error())
}
}()
defer func() {
closeErr := resp.Body.Close()
if closeErr != nil {
h.mainLog.Printf("Cannot send response body reason: %q",
closeErr.Error())
}
}()
}
// NewHandler will create Handler
func NewHandler(conf config.Config) http.Handler {
mainlog := conf.Mainlog
rh := &responseMerger{
conf.Synclog,
mainlog,
conf.SyncLogMethodsSet}
connDuration, _ := time.ParseDuration(conf.ConnectionTimeout)
dialDuration, _ := time.ParseDuration(conf.ConnectionTimeout)
var dialer *dial.LimitDialer
dialer = dial.NewLimitDialer(conf.ConnLimit, connDuration, dialDuration)
if len(conf.MaintainedBackend) > 0 {
dialer.DropEndpoint(conf.MaintainedBackend)
}
httpTransport := &http.Transport{
Dial: dialer.Dial,
DisableKeepAlives: conf.KeepAlive,
MaxIdleConnsPerHost: int(conf.ConnLimit)}
backends := make([]*url.URL, len(conf.Backends))
for i, backend := range conf.Backends {
backends[i] = backend.URL
}
multiTransport := transport.NewMultiTransport(
httpTransport,
backends,
rh.handleResponses)
roundTripper := Decorate(
multiTransport,
HeadersSuplier(conf.AdditionalRequestHeaders, conf.AdditionalResponseHeaders),
AccessLogging(conf.Accesslog),
OptionsHandler,
)
return &Handler{
config: conf,
mainLog: mainlog,
accessLog: conf.Accesslog,
roundTripper: roundTripper,
}
}
<file_sep>package dial
import (
"fmt"
"net"
"sync"
"time"
)
type watchConn struct {
net.Conn
closeCallback func(net.Conn, error)
}
func (wc *watchConn) Close() error {
err := wc.Conn.Close()
if wc.closeCallback != nil {
wc.closeCallback(wc.Conn, err)
}
return err
}
// LimitDialer limits open connections by read and dial timeout. Also provides hard
// limit on number of open connections
type LimitDialer struct {
activeCons map[string]int64
limit int64
dialTimeout time.Duration
readTimeout time.Duration
droppedEndpoint string
countersMx sync.Mutex
}
// ErrSlowOrMaintained is returned if LimitDialer exceeds connection limit
var ErrSlowOrMaintained = fmt.Errorf("Slow or maintained endpoint")
func (d *LimitDialer) incrementCount(addr string) (int64, error) {
d.countersMx.Lock()
defer d.countersMx.Unlock()
_, ok := d.activeCons[addr]
if !ok {
d.activeCons[addr] = 0
}
d.activeCons[addr]++
if d.limitReached(addr) {
d.activeCons[addr]--
return d.activeCons[addr], ErrSlowOrMaintained
}
return d.activeCons[addr], nil
}
func (d *LimitDialer) decrementCount(addr string) {
d.countersMx.Lock()
defer d.countersMx.Unlock()
d.activeCons[addr]--
}
// checks if limit is reached and if given endpoint is most occupied
func (d *LimitDialer) limitReached(endpoint string) bool {
numOfAllConns := int64(0)
maxNumOfEndpointConns := int64(0)
mostLoadedEndpoint := ""
if endpoint == d.droppedEndpoint {
return true
}
for key, count := range d.activeCons {
numOfAllConns += count
if count > maxNumOfEndpointConns {
maxNumOfEndpointConns = count
mostLoadedEndpoint = key
}
}
if numOfAllConns > d.limit {
return mostLoadedEndpoint == endpoint
}
return false
}
// Dial connects to endpoint as net.Dial does, but also keeps track
// on number of connections
func (d *LimitDialer) Dial(network, addr string) (c net.Conn, err error) {
_, incErr := d.incrementCount(addr)
if incErr != nil {
return nil, incErr
}
var netconn net.Conn
if d.dialTimeout > 0 {
netconn, err = net.DialTimeout(network, addr, d.dialTimeout)
} else {
netconn, err = net.Dial(network, addr)
}
if err != nil {
d.decrementCount(addr)
return nil, err
}
if d.readTimeout > 0 {
deadlineErr := netconn.SetDeadline(time.Now().Add(d.readTimeout))
if deadlineErr != nil {
d.decrementCount(addr)
closeErr := netconn.Close()
if closeErr != nil {
return nil, fmt.Errorf("%s error during: %s", closeErr, deadlineErr)
}
return nil, deadlineErr
}
}
c = &watchConn{netconn, func(c net.Conn, e error) {
d.decrementCount(addr)
}}
return c, err
}
// DropEndpoint marks backend as dropped i.e. maintenance x
func (d *LimitDialer) DropEndpoint(endpoint string) {
d.droppedEndpoint = endpoint
}
// NewLimitDialer returns new `LimitDialer`.
func NewLimitDialer(limit int64, readTimeout, dialTimeout time.Duration) *LimitDialer {
return &LimitDialer{
activeCons: make(map[string]int64),
limit: limit,
dialTimeout: dialTimeout,
readTimeout: readTimeout,
}
}
<file_sep>VERSION := `cat VERSION`
LDFLAGS := -X main.version=$(VERSION)
all: lint test build
lint: deps-lint
gometalinter ./... \
--disable=gotype \
--disable=dupl \
--deadline=120s \
--enable=goimports \
--vendor
deps:
glide install
deps-lint: deps
go get github.com/alecthomas/gometalinter
gometalinter --install
build: deps lint
go build -v -ldflags "$(LDFLAGS)" .
test: deps
go test -v -race -cover $$(go list ./... | grep -v /vendor/)
clean:
go clean .
<file_sep>package dial
import (
"net"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
func TestLimitDialer(t *testing.T) {
addr := "198.18.0.254:80"
timeout := 10 * time.Millisecond
dialer := NewLimitDialer(0, timeout, timeout)
conn, err := dialer.Dial("tcp", addr)
assert.NotNil(t, err, "")
if !assert.Nil(t, conn) {
defer func() {
err := conn.Close()
assert.Nil(t, err)
}()
}
}
func autoListener(t *testing.T) (net.Listener, string) {
listener, err := net.Listen("tcp", "127.0.0.1:0")
if err != nil {
t.Error(err.Error())
return nil, ""
}
return listener, listener.Addr().String()
}
func TestLimitDialerMostLoadedEndpoint(t *testing.T) {
timeout := time.Second
l1, addr1 := autoListener(t)
if l1 != nil {
defer func() {
err := l1.Close()
assert.Nil(t, err)
}()
}
l2, addr2 := autoListener(t)
if l2 != nil {
defer func() {
err := l2.Close()
assert.Nil(t, err)
}()
}
dialer := NewLimitDialer(2, timeout, timeout)
conn1, c1Err := dialer.Dial("tcp", addr1)
if assert.NotNil(t, conn1) {
defer func() {
err := conn1.Close()
assert.Nil(t, err)
}()
}
assert.Nil(t, c1Err)
conn2, c2Err := dialer.Dial("tcp", addr2)
if assert.NotNil(t, conn2) {
defer func() {
err := conn2.Close()
assert.Nil(t, err)
}()
}
assert.Nil(t, c2Err)
conn3, c3Err := dialer.Dial("tcp", addr2)
if !assert.Nil(t, conn3) {
defer func() {
err := conn3.Close()
assert.Nil(t, err)
}()
}
assert.NotNil(t, c3Err)
}
func TestLimitDialerConcurrency(t *testing.T) {
l, addr := autoListener(t)
if l != nil {
defer func() {
err := l.Close()
assert.Nil(t, err)
}()
}
timeout := time.Second
dialer := NewLimitDialer(4, timeout, timeout)
gotErr := make(chan bool)
for i := 0; i < 5; i++ {
go func() {
_, err := dialer.Dial("tcp", addr)
if err != nil {
gotErr <- true
}
}()
}
select {
case e := <-gotErr:
assert.True(t, e)
case <-time.After(timeout):
t.Error("At least one dial should return error")
}
}
<file_sep>package main
import (
"fmt"
"log"
"net"
"net/http"
"time"
"github.com/alecthomas/kingpin"
"gopkg.in/tylerb/graceful.v1"
"github.com/allegro/akubra/config"
"github.com/allegro/akubra/httphandler"
)
var (
// filled by linker
version = "development"
// CLI flags
configFile = kingpin.
Flag("config", "Configuration file e.g.: \"conf/dev.yaml\"").
Short('c').
Required().
ExistingFile()
)
func main() {
versionString := fmt.Sprintf("Akubra (%s version)", version)
kingpin.Version(versionString)
kingpin.Parse()
log.Println(versionString)
conf, err := config.Configure(*configFile)
if err != nil {
log.Fatalf("Improperly configured %s", err)
}
mainlog := conf.Mainlog
mainlog.Printf("starting on port %s", conf.Listen)
mainlog.Printf("connlimit %v", conf.ConnLimit)
mainlog.Printf("backends %s", conf.Backends)
srv := newService(conf)
startErr := srv.start()
if startErr != nil {
mainlog.Printf("Could not start service, reason: %q", startErr.Error())
}
}
type service struct {
config config.Config
}
func (s *service) start() error {
handler := httphandler.NewHandler(s.config)
srv := &graceful.Server{
Server: &http.Server{
Addr: s.config.Listen,
Handler: handler,
},
Timeout: 10 * time.Second,
}
srv.SetKeepAlivesEnabled(true)
listener, err := net.Listen("tcp", s.config.Listen)
if err != nil {
panic(err)
}
return srv.Serve(listener)
}
func newService(cfg config.Config) *service {
return &service{config: cfg}
}
<file_sep>package config
import (
"fmt"
"io"
"io/ioutil"
"log"
"log/syslog"
"net/url"
"os"
set "github.com/deckarep/golang-set"
"github.com/go-yaml/yaml"
)
// YamlConfig contains configuration fields of config file
type YamlConfig struct {
// Listen interface and port e.g. "0:8000", "localhost:9090", ":80"
Listen string `yaml:"Listen,omitempty"`
// List of backend uri's e.g. "http:// s3.mydaracenter.org"
Backends []YAMLURL `yaml:"Backends,omitempty,flow"`
// Limit of outgoing connections. When limit is reached, akubra will omit external backend
// with greatest number of stalled connections
ConnLimit int64 `yaml:"ConnLimit,omitempty"`
// Additional not amazon specific headers proxy will add to original request
AdditionalRequestHeaders map[string]string `yaml:"AdditionalRequestHeaders,omitempty"`
// Additional headers added to backend response
AdditionalResponseHeaders map[string]string `yaml:"AdditionalResponseHeaders,omitempty"`
// Read timeout on outgoing connections
ConnectionTimeout string `yaml:"ConnectionTimeout,omitempty"`
// Dial timeout on outgoing connections
ConnectionDialTimeout string `yaml:"ConnectionDialTimeout,omitempty"`
// Backend in maintenance mode. Akubra will not send data there
MaintainedBackend string `yaml:"MaintainedBackend,omitempty"`
// List request methods to be logged in synclog in case of backend failure
SyncLogMethods []string `yaml:"SyncLogMethods,omitempty"`
// Should we keep alive connections with backend servers
KeepAlive bool `yaml:"KeepAlive"`
}
// Config contains processed YamlConfig data
type Config struct {
YamlConfig
SyncLogMethodsSet set.Set
Synclog *log.Logger
Accesslog *log.Logger
Mainlog *log.Logger
}
// YAMLURL type fields in yaml configuration will parse urls
type YAMLURL struct {
*url.URL
}
// UnmarshalYAML parses strings to url.URL
func (j *YAMLURL) UnmarshalYAML(unmarshal func(interface{}) error) error {
var s string
if err := unmarshal(&s); err != nil {
return err
}
url, err := url.Parse(s)
if url.Host == "" {
return fmt.Errorf("url should match proto:// host[:port]/path scheme, got %q", s)
}
j.URL = url
return err
}
// Parse json config
func parseConf(file io.Reader) (YamlConfig, error) {
rc := YamlConfig{}
bs, err := ioutil.ReadAll(file)
if err != nil {
return rc, err
}
err = yaml.Unmarshal(bs, &rc)
return rc, err
}
func setupLoggers(conf *Config) error {
accesslog, slErr := syslog.NewLogger(syslog.LOG_LOCAL0, 0)
conf.Accesslog = accesslog
conf.Accesslog.SetPrefix("")
if slErr != nil {
return slErr
}
conf.Synclog, slErr = syslog.NewLogger(syslog.LOG_LOCAL1, 0)
conf.Synclog.SetPrefix("")
if slErr != nil {
return slErr
}
conf.Mainlog, slErr = syslog.NewLogger(syslog.LOG_LOCAL2, log.LstdFlags)
conf.Mainlog.SetPrefix("main")
return slErr
}
// Configure parse configuration file
func Configure(configFilePath string) (conf Config, err error) {
confFile, err := os.Open(configFilePath)
if err != nil {
return
}
yconf, err := parseConf(confFile)
if err != nil {
return
}
conf.YamlConfig = yconf
if len(conf.SyncLogMethods) > 0 {
conf.SyncLogMethodsSet = set.NewThreadUnsafeSet()
for _, v := range conf.SyncLogMethods {
conf.SyncLogMethodsSet.Add(v)
}
} else {
conf.SyncLogMethodsSet = set.NewThreadUnsafeSetFromSlice(
[]interface{}{"PUT", "GET", "HEAD", "DELETE", "OPTIONS"})
}
err = setupLoggers(&conf)
return
}
<file_sep>package httphandler
import (
"encoding/json"
"io"
"io/ioutil"
"log"
"github.com/allegro/akubra/transport"
set "github.com/deckarep/golang-set"
)
type responseMerger struct {
syncerrlog *log.Logger
runtimeLog *log.Logger
methodSetFilter set.Set
}
func (rd *responseMerger) synclog(r, successfulTup *transport.ReqResErrTuple) {
// don't log if request method was not included in configuration
if rd.methodSetFilter == nil || !rd.methodSetFilter.Contains(r.Req.Method) {
return
}
// do not log if backend response was successful
if !r.Failed {
return
}
// do not log if there was no successful response
if successfulTup == nil {
return
}
// log error entry
errorMsg := "No error"
if r.Err != nil {
errorMsg = r.Err.Error()
}
syncLogMsg := NewSyncLogMessageData(
r.Req.Method,
r.Req.Host,
successfulTup.Req.URL.Path,
successfulTup.Req.Host,
r.Req.Header.Get("User-Agent"),
errorMsg)
logMsg, err := json.Marshal(syncLogMsg)
if err != nil {
return
}
rd.syncerrlog.Println(string(logMsg))
}
func (rd *responseMerger) handleFailedResponces(
tups []*transport.ReqResErrTuple,
out chan<- *transport.ReqResErrTuple,
alreadysent bool,
successfulTup *transport.ReqResErrTuple,
logMethodSet set.Set) bool {
for _, r := range tups {
errorMsg := "No error"
if r.Err != nil {
errorMsg = r.Err.Error()
}
rd.runtimeLog.Printf("RGW resp %q, %q, %q, %t, %q",
r.Req.URL.Path,
r.Req.Method,
r.Req.Host,
r.Failed,
errorMsg)
rd.synclog(r, successfulTup)
if !alreadysent {
out <- r
alreadysent = true
continue // don't discard body
}
// discard body
if r.Res != nil && r.Res.Body != nil {
_, err := io.Copy(ioutil.Discard, r.Res.Body)
if err != nil {
rd.runtimeLog.Printf("Could not discard body %s", err)
}
}
}
return alreadysent
}
func (rd *responseMerger) _handle(in <-chan *transport.ReqResErrTuple, out chan<- *transport.ReqResErrTuple) {
var successfulTup *transport.ReqResErrTuple
errs := []*transport.ReqResErrTuple{}
nonErrs := []*transport.ReqResErrTuple{}
respPassed := false
for {
r, hasMore := <-in
if !hasMore {
break
}
// pass first successful answer to client
if !r.Failed && !respPassed {
// append additional headers
successfulTup = r
out <- r
respPassed = true
continue
}
if r.Err != nil {
errs = append(errs, r)
} else {
nonErrs = append(nonErrs, r)
}
}
respPassed = rd.handleFailedResponces(nonErrs, out, respPassed, successfulTup, rd.methodSetFilter)
rd.handleFailedResponces(errs, out, respPassed, successfulTup, rd.methodSetFilter)
}
func (rd *responseMerger) handleResponses(in <-chan *transport.ReqResErrTuple) *transport.ReqResErrTuple {
out := make(chan *transport.ReqResErrTuple, 1)
go func() {
rd._handle(in, out)
close(out)
}()
return <-out
}
| f88796a736022eeef1c7a6937c46647724436bd7 | [
"Markdown",
"Go",
"Makefile"
] | 9 | Markdown | bieli/akubra | 998cc214370ce6cd2882989bfe33e9a0100c6ec4 | 30eb5618cc10b3b9634894020fb28c4949f7f54a |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Query
{
class Query_Builder
{
}
}
<file_sep>## Vineyard Ground.NET ##
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Newtonsoft.Json;
namespace Query
{
public class Miner
{
public string run(string content)
{
var request = JsonConvert.DeserializeObject<Query_Request>(content);
var response = new Query_Response { objects = new List<object>() };
var item = new Dictionary<string, object>();
item["id"] = 10;
item["name"] = "cat";
response.objects.Add(item);
return JsonConvert.SerializeObject(response, Formatting.Indented);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Query
{
internal class Query_Request
{
public string trellis;
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using Query;
namespace ground
{
class Server
{
private TcpListener tcp_listener;
private Thread listen_thread;
ASCIIEncoding encoder = new ASCIIEncoding();
public void start(int port = 4000)
{
Console.WriteLine("Starting Ground");
tcp_listener = new TcpListener(IPAddress.Any, port);
listen_thread = new Thread(listen);
listen_thread.Start();
Console.WriteLine("Ground is listening on port " + port + ".");
}
private void listen()
{
tcp_listener.Start();
while (true)
{
TcpClient client = tcp_listener.AcceptTcpClient();
var thread = new Thread(handle_request);
thread.Start(client);
}
}
private void handle_request(object client)
{
TcpClient tcp_client = (TcpClient)client;
NetworkStream client_stream = tcp_client.GetStream();
byte[] message = new byte[4096];
while (true)
{
int bytes_read = 0;
try
{
//blocks until a client sends a message
bytes_read = client_stream.Read(message, 0, 4096);
}
catch
{
//a socket error has occured
break;
}
if (bytes_read == 0)
{
//the client has disconnected from the server
break;
}
//message has successfully been received
var raw_request = encoder.GetString(message, 0, bytes_read);
var raw_response = process_response(raw_request);
byte[] buffer = encoder.GetBytes(raw_response);
client_stream.Write(buffer, 0, buffer.Length);
client_stream.Flush();
tcp_client.Close();
}
}
string process_response(string raw_request)
{
Console.WriteLine(raw_request);
var content_regex = new Regex(@"(?<=\r\n\r\n).*", RegexOptions.Singleline);
var match = content_regex.Match(raw_request);
var content = match.Value;
var miner = new Miner();
var raw_response = miner.run(content);
Console.WriteLine(raw_response);
return "HTTP/1.1 200 OK\r\n"
+ "Content-Type: application/json\r\n"
+ "\r\n"
+ raw_response;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Query
{
class Query_Renderer
{
}
}
| ea617e29468cba9fd41b63ba827a4ad8646df9d4 | [
"Markdown",
"C#"
] | 6 | C# | silentorb/ground_dotnet | 65f7745f4ccb5882a4b40d07d53d8152a002390d | 9d1d1fccc6b665490706824aa4cb828192ddf7cb |
refs/heads/master | <file_sep>// $(document).ready(function() {
// })
//-клик по гамбургеру
var menuToggle = document.querySelector('.cmn-toggle-switch');
var menuMobile = document.querySelector('#header-nav');
var menuLinks = menuMobile.children;
menuToggle.onclick = function(){
if( window.innerWidth < 992 ){
menuMobile.classList.toggle('active');
}
}
for (var i = 0; i < menuLinks.length; i++) {
menuLinks[i].onclick = function() {
if( window.innerWidth < 992 ){
menuMobile.classList.toggle('active');
menuToggle.classList.remove("active");
};
}
}
var toggleMenuClass = function(){
if( window.innerWidth >=992 ){
menuMobile.classList.remove('active');
}
}
toggleMenuClass();
window.onresize = function(event) {
menuMobile.classList.remove('active');
menuToggle.classList.remove("active")
toggleMenuClass();
} | 2976c06deb09e7869a2651cf78b366a5d19b9675 | [
"JavaScript"
] | 1 | JavaScript | Lareta064/MP-new | e95fb7075de5793d0e7f49060524804feb7be4b4 | a68c1c962175ff718931f5da16f67cc61f3ae08b |
refs/heads/master | <file_sep>Faça dois scripts para realizar o que segue. (1) O primeiro deve ler todos os arquivos de imagens contidos em uma pasta (diretório), guardar estes nomes em um arquivo teste e em seguida apagar as extensões do mesmo. (2) O segundo script deve ler o arquivo teste gerado e restaurar as extensões apagadas pelo script 1. Coloque o código no github e ponha o link no formulário a seguir.
<file_sep>for line in $(cat aux.txt)
do
filename=$(basename -- "$line")
filename="${filename%.*}"
mv "$filename" "$line"
done
rm aux.txt<file_sep>for f in *
do
filename=$(basename -- "$f")
if [ "$filename" != "retirarExt.sh" ] && [ "$filename" != "voltarExt.sh" ] && [ "$filename" != "aux.txt" ] && [ -f $filename ]
then
filename="${filename%.*}"
echo "$f" >> aux.txt
mv "$f" "$filename"
fi
done | 6413a797f71cb5a63effb9e9a21c686f09cc547d | [
"Markdown",
"Shell"
] | 3 | Markdown | brenaelric/ShellScriptTrabalho | 215536c4ef7ff8150962c6a037320d5d6984b4a8 | 5903c065cc7a8dfe8097d695f015ffc653e27f40 |
refs/heads/master | <repo_name>dtstbnow/honors_project<file_sep>/app/controllers/tags_controller.rb
class TagsController < ApplicationController
before_action :authenticate_user!
def rate
tag = Tag.find(params[:id])
tag.score += params[:score].to_i
tag.save
if tag.score == 0
tag.delete
end
redirect_to edit_image_url(tag.image_id)
end
end
<file_sep>/readme.txt
This is a Ruby On Rails project I developed for honors credit in my software engineering course.
The site is desinged to allow users to upload images, add descriptive tags, favorite images and reccomend to users images based on the tags of their favorited images.
The key files in the project are:
-app/controllers/rec_controller: the algorithm used to provide reccommendations
-app/controllers/images_controller.rb: The controller for posting, searching, and viewing images as well as editing their tags.
-db/migrate: This folder contains the information for all models used<file_sep>/app/models/image.rb
class Image < ActiveRecord::Base
mount_uploader :file, ImageUploader
has_many :tags
end
<file_sep>/app/controllers/users_controller.rb
require 'carrierwave/orm/activerecord'
class UsersController <ApplicationController
def index
@users = User.all
end
def new
@user = User.new
end
def create
upload = User.create(params.require(:user).permit(:avatar))
upload.save
end
end<file_sep>/app/controllers/images_controller.rb
class ImagesController < ApplicationController
before_action :authenticate_user!, except: [:index]
def index
if(params[:offset])
offset = params[:offset].to_i
else
offset = 0
end
@images = Image.limit(5).offset(offset*5)
@offset = offset
end
def search
#Search for images by image and tag names
name = params[:search]
@images = Image.where("name like ?", "%"+name+"%")
tags = Tag.where("name like ?", "%"+name+"%")
for t in tags
@images += Image.where(id: t.image_id)
end
@images.uniq!
render :index
end
def new
@image = Image.new
end
def create
if(not params[:image][:file] or params[:image][:name] == "")
flash[:alert] = "Missing fields"
redirect_to images_new_url
else
upload = Image.create(params.require(:image).permit(:file, :name))
upload.save
redirect_to images_url
end
end
def edit
@image = Image.find(params[:id])
begin
@tags = @image.tags
rescue
@tags = []
end
x = Favorite.where(user_id: current_user.id, image_id:params[:id])
if not x.empty?
@rating = x[0].rating
else
@rating = 0
end
end
def update
image = Image.find(params[:id])
new_tags = params[:tags]
x = Favorite.where(user_id: current_user.id, image_id:image.id)
if params[:commit] == "Like"
if x.empty?
fav = Favorite.create(image: image, rating: 1)
current_user.favorites.append(fav)
elsif x[0].rating != 1
x[0].rating = 1
x[0].save
else
x[0].delete
x = []
end
elsif params[:commit] == "Dislike"
if x.empty?
fav = Favorite.create(image: image, rating: -1)
current_user.favorites.append(fav)
elsif x[0].rating != -1
x[0].rating = -1
x[0].save
else
x[0].delete
x = []
end
end
new_tags = new_tags.split(",")
for t in new_tags
begin
test = Tag.find(image_id: image.id, name:t)
if test == nil
tag = Tag.create!(name: t)
tag.score = 1
tag.save
image.tags.push(tag)
end
rescue
tag = Tag.create!(name: t)
tag.score = 1
tag.save
image.tags.push(tag)
end
end
@tags = image.tags
if not x.empty?
@rating = x[0].rating
else
@rating = 0
end
@image = image
render :edit
end
end
<file_sep>/config/routes.rb
Rails.application.routes.draw do
devise_for :users
root to: 'images#index', as: 'home'
post '/', to: 'images#search', as: 'search'
get 'tags/:id/:score', to: 'tags#rate', as: 'rate_tag'
get 'users', to: 'users#index', as: 'users'
get 'users/new', to: 'users#new', as: 'user'
post 'users/new', to: 'users#create'
get 'images', to: 'images#index', as: 'images'
get 'images/:offset', to: 'images#index', as: 'images_page'
get 'images/new', to: 'images#new', as: 'image'
post 'images/new', to: 'images#create'
get 'images/:id/edit', to: 'images#edit', as: 'edit_image'
post '/images/:id/edit', to: 'images#update'
get 'users/rec', to: 'rec#index', as: 'rec'
get 'users/rec/history', to: 'rec#history', as: 'rec_history'
end
<file_sep>/app/controllers/rec_controller.rb
class RecController < ApplicationController
before_action :authenticate_user!
def history
@images = current_user.favorites.where(rating: 1)
render :history
end
def index
rankings = Hash.new
#Loops through the user's favorited images and creates a hash with the tags names and their score.
tags = []
for x in current_user.favorites
for y in x.image.tags
if not rankings[y.name]
rankings[y.name] = y.score
tags.append(y.name)
else
rankings[y.name] += y.score
end
end
end
window_size =[5, tags.size].min#The number of tags we are looking for at a time
res = []
score_map = {}
while window_size >0 #Score map is a combination of tags and their scores. This part could be improved by putting a stricter cap on the number of loops, as it can run quite often, combining it with the actual db query and getting it to run until it retuns x results
for combination in tags.combination(window_size)
combination.to_set
if not score_map[combination]
score = 0
for r in combination
score += rankings[r]
end
score_map[combination] = score
end
end
window_size -=1
end
score_map = Hash[score_map.sort_by{|k, score| -score}]
seen = []
score_map.each{|element,score|
result = calculate_reccomendations(element)
for r in result
if not seen.include? r.name
res.append(r)
seen.append(r.name)
end
if res.length > 5
break
end
end
}
@images = res
end
def sorter(input)
output = []
input = input.sort_by{|k, score| score}
input.each{ |k, v| output.append(k)}
return output
end
def calculate_reccomendations(tags)
##Returns all images that contain all of the tags that are passed in
t = Tag.where(name: tags.flatten)
favorites = []
for el in current_user.favorites
if el.rating == 1 #1 means the user favorited the images as opposed to -1
favorites.append(el.image_id)
end
end
t = t.sort_by{|q| q.name}
final = false#final = array of image id's
last_seen = []#Array of image id's that contain the last tag
name = false#Name of the current working tag
t.delete_if{|el| favorites.include?(el.image_id)}#We don't want to reccomend an image the user alread favorited
for tag in t
if not final#First tag we are looping through
if not name#this is only used the first loop
name = tag.name
last_seen = [tag.image_id]
elsif tag.name != name#We are changing tags, assign final to all the id's we have found so far, we only remove from final, never add to it
final = last_seen
last_seen = [tag.image_id]
name = tag.name
else
last_seen.append(tag.image_id)
end
elsif tag.name != name#We have final, and are switching tags again
final.delete_if{|el| not last_seen.include?(el)}#Remove from final any id's that we didn't see in the last tag
name = tag.name
last_seen = [tag.image_id]
else
last_seen.append(tag.image_id)
end
end
if not final#edge case, if we are only passed in one tag
if not last_seen
return []
else
final = last_seen
end
else
final.delete_if{|el| not last_seen.include?(el)}
end
res = Image.where(id: final)
res = res.uniq#remove any duplicate images
return res
end
end<file_sep>/app/controllers/static_pages_controller.rb
class StaticPagesController < ApplicationController
def home
# Renders static_pages/home.html.erb by default.
end
end
<file_sep>/app/models/tag.rb
class Tag < ActiveRecord::Base
belongs_to :image
end
| bc2066e33716e562177877064e7d479c227779b7 | [
"Text",
"Ruby"
] | 9 | Ruby | dtstbnow/honors_project | 74b2d04b3d5e02f6b087827c605f5b5cffd8eeca | ba1e1dc99602065e6f15594f5924f4212b0d345a |
refs/heads/master | <repo_name>mkasiak/sfdx-kpn-orders-assignment<file_sep>/force-app/main/default/lwc/orderProducts/orderProducts.js
import {LightningElement, api, wire} from 'lwc';
import pubsub from 'c/pubsub';
import getOrderProducts from '@salesforce/apex/OrderProductsController.getOrderProducts';
import {refreshApex} from '@salesforce/apex';
import {ShowToastEvent} from 'lightning/platformShowToastEvent';
import {
createRecord,
updateRecord
} from 'lightning/uiRecordApi';
import ID_FIELD from '@salesforce/schema/OrderItem.Id';
import ORDER_ID_FIELD from '@salesforce/schema/OrderItem.OrderId';
import PRODUCT_FIELD from '@salesforce/schema/OrderItem.Product2Id';
import UNIT_PRICE_FIELD from '@salesforce/schema/OrderItem.UnitPrice';
import QUANTITY_FIELD from '@salesforce/schema/OrderItem.Quantity';
const PRODUCT_COLUMNS = [
{label: 'Name', fieldName: 'Product_Name__c', type: 'text'},
{label: 'Unit Price', fieldName: 'UnitPrice', type: 'currency'},
{label: 'Quantity', fieldName: 'Quantity', type: 'number'},
{label: 'Total Price', fieldName: 'TotalPrice', type: 'currency'}
];
export default class OrderProducts extends LightningElement {
@api recordId;
orderProductColumns = PRODUCT_COLUMNS;
@wire(getOrderProducts, {orderId: '$recordId'})
orderProducts;
connectedCallback() {
this.register();
}
register() {
pubsub.register('productAdded', this.handleProductAdded.bind(this));
}
handleProductAdded(event) {
const addedProduct = JSON.parse(event.message);
this.processAddition(addedProduct);
}
processAddition(addedProduct) {
for (let i = 0; i < this.orderProducts.data.length; i++) {
let currentProduct = this.orderProducts.data[i];
if (currentProduct.Product2Id === addedProduct.Product2Id) {
this.increaseQuantityByOne(currentProduct);
return;
}
}
this.addOrderProduct(addedProduct);
}
increaseQuantityByOne(currentProduct) {
const recordInput = {
'fields': this.prepareFieldsForUpdate(currentProduct)
};
updateRecord(recordInput)
.then(() => {
this.displaySuccessToast();
return refreshApex(this.orderProducts).then(() => {
});
}).catch(error => {
this.displayErrorToast(error);
});
}
prepareFieldsForUpdate(orderProduct) {
const fields = {};
fields[ID_FIELD.fieldApiName] = orderProduct.Id;
fields[QUANTITY_FIELD.fieldApiName] = orderProduct.Quantity + 1;
return fields;
}
addOrderProduct(addedProduct) {
const recordInput = {
'apiName': 'OrderItem',
'fields': this.prepareFieldsForCreate(addedProduct)
};
createRecord(recordInput)
.then(() => {
this.displaySuccessToast();
return refreshApex(this.orderProducts).then(() => {
});
}).catch(error => {
this.displayErrorToast(error);
});
}
prepareFieldsForCreate(addedProduct) {
const fields = {};
fields[ORDER_ID_FIELD.fieldApiName] = this.recordId;
fields[PRODUCT_FIELD.fieldApiName] = addedProduct.Id;
fields[UNIT_PRICE_FIELD.fieldApiName] = addedProduct.UnitPrice;
fields[QUANTITY_FIELD.fieldApiName] = 1;
return fields;
}
displaySuccessToast() {
this.dispatchEvent(
new ShowToastEvent({
title: 'Success',
message: 'Order is updated',
variant: 'success'
})
);
}
displayErrorToast(error) {
this.dispatchEvent(
new ShowToastEvent({
title: 'Error updating or reloading record',
message: error.body.message,
variant: 'error'
})
);
}
}
| 943dca3a784363701e9319fa5fbf577acc9cd5dc | [
"JavaScript"
] | 1 | JavaScript | mkasiak/sfdx-kpn-orders-assignment | 2672d62198df17cb6a35b4804e2c0b7ad115d9cc | 4cce5207248416c08b0e83870684e1c8038ce67e |
refs/heads/master | <repo_name>vioAron/MyRx<file_sep>/RestrictingOperatorsApp/Program.cs
using System;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using System.Reactive.Subjects;
namespace RestrictingOperatorsApp
{
class Program
{
static void Main()
{
//Distinct();
//Distinct2();
//DistinctUntilChanged();
//TakeUntilApp.SkipUntil1();
TakeUntilApp.SkipUntil1();
TakeUntilApp.TakeUntil1();
Console.ReadKey();
}
private static void DistinctUntilChanged()
{
var subject = new Subject<int>();
var distinct = subject.DistinctUntilChanged().Subscribe(Console.WriteLine);
subject.OnNext(1);
subject.OnNext(1);
subject.OnNext(2);
subject.OnNext(1);
subject.OnNext(1);
subject.OnNext(2);
distinct.Dispose();
}
private static void Distinct2()
{
var subject = new Subject<int>();
subject.Distinct().Subscribe(Console.WriteLine);
subject.OnNext(1);
subject.OnNext(1);
subject.OnNext(2);
subject.OnNext(3);
subject.OnCompleted();
}
private static void Distinct()
{
var observable = Observable.Create<string>(observer =>
{
observer.OnNext("first");
observer.OnNext("first");
observer.OnNext("first");
observer.OnNext("second");
observer.OnNext("third");
return Disposable.Empty;
});
observable.Distinct().Subscribe(Console.WriteLine);
}
}
}
<file_sep>/ConsoleApplication2/MyRepository.cs
using System;
using System.Reactive.Subjects;
namespace ConsoleApplication2
{
public class MyRepository
{
private static readonly Lazy<MyRepository> _lazy = new Lazy<MyRepository>();
private readonly ReplaySubject<string> _subjectOfStrings = new ReplaySubject<string>();
public IObservable<string> StringsObservable
{
get { return _subjectOfStrings; }
}
public static MyRepository Instance
{
get { return _lazy.Value; }
}
public MyRepository()
{
for (var i = 0; i < 10; i++)
{
_subjectOfStrings.OnNext(i.ToString());
}
}
public void Add(string s)
{
_subjectOfStrings.OnNext(s);
}
}
}
<file_sep>/ConsoleApplication2/MyObserver.cs
using System;
namespace ConsoleApplication2
{
public class MyObserver : IObserver<string>
{
public void OnNext(string value)
{
Console.WriteLine(value);
}
public void OnError(Exception error)
{
Console.WriteLine(error.Message);
}
public void OnCompleted()
{
Console.WriteLine("OnCompleted");
}
}
}<file_sep>/CombiningSeqApp/Program.cs
using System;
using System.Reactive.Linq;
using System.Reactive.Subjects;
namespace CombiningSeqApp
{
class Program
{
static void Main()
{
//Concat();
//Repeat();
//StartWith();
//Amb();
Merge();
Console.ReadKey();
}
private static void Merge()
{
var s1 = Observable.Interval(TimeSpan.FromMilliseconds(250)).Take(3);
var s2 = Observable.Interval(TimeSpan.FromMilliseconds(150))
.Take(5)
.Select(i => i + 100);
s1.Merge(s2).Subscribe(Console.WriteLine,
() => Console.WriteLine("Completed"));
}
private static void Amb()
{
var s1 = new Subject<int>();
var s2 = new Subject<int>();
var s3 = new Subject<int>();
var result = Observable.Amb(s1, s2, s3);
result.Subscribe(Console.WriteLine, () => Console.WriteLine("Completed"));
s1.OnNext(1);
s2.OnNext(2);
s3.OnNext(3);
s1.OnNext(1);
s2.OnNext(2);
s3.OnNext(3);
s1.OnCompleted();
s2.OnCompleted();
s3.OnCompleted();
}
private static void StartWith()
{
var s = Observable.Range(0, 3);
s.StartWith(-3, -2, -1).Subscribe(Console.WriteLine, () => Console.WriteLine("Completed"));
}
private static void Repeat()
{
var s = Observable.Range(0, 3);
s.Repeat(3).Subscribe(Console.WriteLine, () => Console.WriteLine("Completed"));
}
private static void Concat()
{
var s1 = Observable.Range(0, 3);
var s2 = Observable.Range(5, 5);
s1.Concat(s2).Subscribe(Console.WriteLine);
}
}
}
<file_sep>/TestingApp/ViewModel/MyViewModel.cs
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Reactive.Concurrency;
using System.Reactive.Linq;
using System.Text;
using System.Threading.Tasks;
using TestingApp.Core;
namespace TestingApp.ViewModel
{
public class MyViewModel
{
private readonly IMyModel _myModel;
private readonly TestSchedulers _schedulers;
private readonly ObservableCollection<decimal> _prices;
public MyViewModel(IMyModel myModel, TestSchedulers schedulers)
{
_myModel = myModel;
_schedulers = schedulers;
_prices = new ObservableCollection<decimal>();
}
public ObservableCollection<decimal> Prices
{
get { return _prices; }
}
public void Show(string symbol)
{
//TODO: resource mgt, exception handling etc...
_myModel.PriceStream(symbol)
.SubscribeOn(_schedulers.ThreadPool)
.ObserveOn(_schedulers.Dispatcher)
.Timeout(TimeSpan.FromSeconds(10), _schedulers.ThreadPool)
.Subscribe(price =>
Prices.Add(price),
ex =>
{
if (ex is TimeoutException)
{
IsConnected = false;
}
});
IsConnected = true;
}
public bool IsConnected { get; set; }
}
}
<file_sep>/TimeShiftedSeqApp/Program.cs
using System;
using System.Reactive.Linq;
namespace TimeShiftedSeqApp
{
class Program
{
static void Main()
{
//Buffer();
//Delay();
//Sample();
Timeout();
Console.ReadKey();
}
private static void Timeout()
{
var source = Observable.Interval(TimeSpan.FromMilliseconds(100)).Take(10)
.Concat(Observable.Interval(TimeSpan.FromSeconds(2)));
var timeout = source.Timeout(TimeSpan.FromSeconds(1));
timeout.Subscribe(Console.WriteLine, Console.WriteLine, () => Console.WriteLine("Completed"));
}
private static void Sample()
{
var source = Observable.Interval(TimeSpan.FromMilliseconds(150));
var onePerSecond = source.Sample(TimeSpan.FromSeconds(1));
Console.WriteLine("~ started at {0}", DateTime.Now.TimeOfDay);
onePerSecond.Subscribe(l => Console.WriteLine("value = {0} at {1}", l, DateTime.Now.TimeOfDay));
}
private static void Delay()
{
var source = Observable.Interval(TimeSpan.FromSeconds(1));
var delayed = source.Delay(TimeSpan.FromSeconds(10));
Console.WriteLine("~ started at {0}", DateTime.Now.TimeOfDay);
delayed.Subscribe(l => Console.WriteLine(DateTime.Now.TimeOfDay));
}
private static void Buffer()
{
var seq1 = Observable.Interval(TimeSpan.FromSeconds(1)).Take(10);
var seq2 = Observable.Interval(TimeSpan.FromMilliseconds(200)).Take(30);
seq1.Concat(seq2).Buffer(TimeSpan.FromSeconds(10), 5).Subscribe(buffer =>
{
Console.WriteLine("Buffered:");
foreach (var l in buffer)
{
Console.WriteLine(l);
}
});
seq1.Buffer(3, 5).Subscribe(buffer =>
{
Console.WriteLine("Buffered:");
foreach (var l in buffer)
{
Console.WriteLine(l);
}
}, () => Console.WriteLine("Completed"));
}
}
}
<file_sep>/RestrictingOperatorsApp/TakeUntilApp.cs
using System;
using System.Reactive;
using System.Reactive.Linq;
using System.Reactive.Subjects;
namespace RestrictingOperatorsApp
{
class TakeUntilApp
{
public static void SkipUntil1()
{
Console.WriteLine("SkipUntil...");
var subject = new Subject<int>();
var otherSubject = new Subject<Unit>();
subject.SkipUntil(otherSubject).Subscribe(Console.WriteLine, () => Console.WriteLine("Completed"));
subject.OnNext(1);
subject.OnNext(2);
subject.OnNext(3);
otherSubject.OnNext(Unit.Default);
subject.OnNext(4);
subject.OnNext(5);
subject.OnNext(6);
subject.OnNext(7);
subject.OnNext(8);
subject.OnCompleted();
}
public static void TakeUntil1()
{
Console.WriteLine("TakeUntil...");
var subject = new Subject<int>();
var otherSubject = new Subject<Unit>();
subject.TakeUntil(otherSubject).Subscribe(Console.WriteLine, () => Console.WriteLine("Completed"));
subject.OnNext(1);
subject.OnNext(2);
subject.OnNext(3);
otherSubject.OnNext(Unit.Default);
subject.OnNext(4);
subject.OnNext(5);
subject.OnNext(6);
subject.OnNext(7);
subject.OnNext(8);
subject.OnCompleted();
}
}
}<file_sep>/TestingApp/Core/ISchedulerProvider.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reactive.Concurrency;
using System.Text;
using System.Threading.Tasks;
using Microsoft.Reactive.Testing;
namespace TestingApp.Core
{
public interface ISchedulerProvider
{
IScheduler CurrentThread { get; }
IScheduler Dispatcher { get; }
IScheduler Immediate { get; }
IScheduler NewThread { get; }
IScheduler ThreadPool { get; }
//IScheduler TaskPool { get; }
}
public sealed class SchedulerProvider : ISchedulerProvider
{
public IScheduler CurrentThread
{
get { return Scheduler.CurrentThread; }
}
public IScheduler Dispatcher
{
get { return DispatcherScheduler.Instance; }
}
public IScheduler Immediate
{
get { return Scheduler.Immediate; }
}
public IScheduler NewThread
{
get { return Scheduler.NewThread; }
}
public IScheduler ThreadPool
{
get { return Scheduler.ThreadPool; }
}
//public IScheduler TaskPool { get { return Scheduler.TaskPool; } }
}
public sealed class TestSchedulers : ISchedulerProvider
{
private readonly TestScheduler _currentThread = new TestScheduler();
private readonly TestScheduler _dispatcher = new TestScheduler();
private readonly TestScheduler _immediate = new TestScheduler();
private readonly TestScheduler _newThread = new TestScheduler();
private readonly TestScheduler _threadPool = new TestScheduler();
#region Explicit implementation of ISchedulerService
IScheduler ISchedulerProvider.CurrentThread { get { return _currentThread; } }
IScheduler ISchedulerProvider.Dispatcher { get { return _dispatcher; } }
IScheduler ISchedulerProvider.Immediate { get { return _immediate; } }
IScheduler ISchedulerProvider.NewThread { get { return _newThread; } }
IScheduler ISchedulerProvider.ThreadPool { get { return _threadPool; } }
#endregion
public TestScheduler CurrentThread { get { return _currentThread; } }
public TestScheduler Dispatcher { get { return _dispatcher; } }
public TestScheduler Immediate { get { return _immediate; } }
public TestScheduler NewThread { get { return _newThread; } }
public TestScheduler ThreadPool { get { return _threadPool; } }
}
}
<file_sep>/ConsoleApplication2/MyObservable.cs
using System;
using System.Globalization;
using System.Reactive.Disposables;
namespace ConsoleApplication2
{
public class MyObservable : IObservable<string>
{
public IDisposable Subscribe(IObserver<string> observer)
{
for (var i = 0; i < 10; i++)
{
observer.OnNext(i.ToString(CultureInfo.InvariantCulture));
}
observer.OnError(new Exception("aaaaaaaaaaaaaa"));
return Disposable.Empty;
}
}
}
<file_sep>/TestingApp/Program.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reactive.Concurrency;
using System.Reactive.Linq;
using Microsoft.Reactive.Testing;
using NUnit.Framework;
namespace TestingApp
{
class Program
{
static void Main()
{
//PrintISchedulerParamMethods();
//First();
//ScheduleAndAdvance();
//ScheduleAndStart();
//ScheduleAndStartStop();
//SchedulerCollisions();
Console.ReadKey();
}
[Test]
public static void Generate5Values_Test()
{
var scheduler = new TestScheduler();
var observable = Observable.Interval(TimeSpan.FromSeconds(5), scheduler).Take(5);
var values = new List<long>();
observable.Subscribe(values.Add);
scheduler.AdvanceTo(TimeSpan.FromSeconds(25).Ticks);
CollectionAssert.AreEqual(new[] { 0, 1, 2, 3, 4 }, values);
}
private static void SchedulerCollisions()
{
var scheduler = new TestScheduler();
scheduler.Schedule(TimeSpan.FromTicks(10), () => Console.WriteLine("A"));
scheduler.Schedule(TimeSpan.FromTicks(10), () => Console.WriteLine("B"));
scheduler.Schedule(TimeSpan.FromTicks(10), () => Console.WriteLine("C"));
scheduler.AdvanceTo(10);
}
private static void ScheduleAndAdvance()
{
var scheduler = new TestScheduler();
scheduler.Schedule(() => Console.WriteLine("A"));
scheduler.Schedule(TimeSpan.FromTicks(10), () => Console.WriteLine("B"));
scheduler.Schedule(TimeSpan.FromTicks(20), () => Console.WriteLine("C"));
scheduler.AdvanceTo(1);
scheduler.AdvanceTo(10);
Console.WriteLine("To 15 ->");
scheduler.AdvanceTo(15);
Console.WriteLine("To 20 ->");
scheduler.AdvanceTo(20);
}
private static void ScheduleAndStart()
{
var scheduler = new TestScheduler();
scheduler.Schedule(() => Console.WriteLine("A"));
scheduler.Schedule(TimeSpan.FromTicks(10), () => Console.WriteLine("B"));
scheduler.Schedule(TimeSpan.FromTicks(20), () => Console.WriteLine("C"));
Console.WriteLine("Start");
scheduler.Start();
scheduler.Schedule(() => Console.WriteLine("D"));
Console.WriteLine("scheduler.Clock:{0}", scheduler.Clock);
}
private static void ScheduleAndStartStop()
{
var scheduler = new TestScheduler();
scheduler.Schedule(() => Console.WriteLine("A"));
scheduler.Schedule(TimeSpan.FromTicks(10), () => Console.WriteLine("B"));
scheduler.Schedule(TimeSpan.FromTicks(15), scheduler.Stop);
scheduler.Schedule(TimeSpan.FromTicks(20), () => Console.WriteLine("C"));
Console.WriteLine("Start");
scheduler.Start();
scheduler.Schedule(() => Console.WriteLine("D"));
Console.WriteLine("scheduler.Clock:{0}", scheduler.Clock);
}
[Test]
public static void First()
{
var scheduler = new TestScheduler();
var wasExecuted = false;
scheduler.Schedule(() => wasExecuted = true);
Assert.False(wasExecuted);
scheduler.AdvanceBy(1);
Assert.IsTrue(wasExecuted);
}
private static void PrintISchedulerParamMethods()
{
var methods = from m in typeof(Observable).GetMethods()
from p in m.GetParameters()
where typeof(IScheduler).IsAssignableFrom(p.ParameterType)
group m by m.Name into method
orderby method.Key
select method.Key;
foreach (var method in methods)
{
Console.WriteLine(method);
}
}
}
}
<file_sep>/DoApp/Program.cs
using System;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using System.Threading.Tasks;
namespace DoApp
{
class Program
{
static void Main()
{
var observable = Observable.Create<int>(observer =>
{
observer.OnNext(1);
observer.OnNext(2);
observer.OnNext(3);
observer.OnCompleted();
return Disposable.Create(() => Console.WriteLine("observer has been unsubscribed!"));
});
observable.Do(n => Console.WriteLine("inside do -> {0}", n)).Subscribe(Console.WriteLine);
Task.Delay(TimeSpan.FromSeconds(10)).Wait();
}
}
}
<file_sep>/FromEventPatternApp/Form1.cs
using System;
using System.Reactive.Linq;
using System.Windows.Forms;
namespace FromEventPatternApp
{
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
Observable.FromEventPattern<MouseEventHandler, MouseEventArgs>(ev => comboBox1.MouseWheel += ev, ev => comboBox1.MouseWheel -= ev).Subscribe(args => Console.WriteLine(args.EventArgs.Delta));
}
}
}
<file_sep>/ObserveOnSubscribeOnApp/Form1.cs
using System;
using System.Reactive.Concurrency;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using System.Windows.Forms;
namespace ObserveOnSubscribeOnApp
{
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
}
private void button1_Click(object sender, System.EventArgs e)
{
var observable = Observable.Create<string>(observer =>
{
observer.OnNext("item1");
observer.OnNext("item2");
observer.OnNext("item3");
observer.OnNext("item4");
observer.OnCompleted();
return Disposable.Empty;
});
observable.ObserveOn(new SynchronizationContextScheduler(WindowsFormsSynchronizationContext.Current)).SubscribeOn(Scheduler.Default).Subscribe(i => listView1.Items.Add(i));
}
}
}
<file_sep>/MDApp/Form1.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Reactive;
using System.Reactive.Concurrency;
using System.Reactive.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace MDApp
{
public partial class Form1 : Form
{
private IObservable<EventPattern<EventArgs>> _clickObservable;
public Form1()
{
InitializeComponent();
_clickObservable = Observable.FromEventPattern<EventHandler, EventArgs>(ev => button1.Click += ev, ev => button1.Click -= ev);
_clickObservable.ObserveOn(Scheduler.Default).Do(e =>
{
Console.WriteLine("Do started @ {0}", DateTime.Now.TimeOfDay);
Thread.Sleep(5000);
Console.WriteLine("Do ended @ {0}", DateTime.Now.TimeOfDay);
}).Subscribe(e =>
{
Console.WriteLine("Subscribe @ {0}", DateTime.Now.TimeOfDay);
});
}
}
}
<file_sep>/AggregationApp/MainWindow.xaml.cs
using System;
using System.Collections.Generic;
using System.Reactive.Concurrency;
using System.Reactive.Linq;
using System.Reactive.Subjects;
using System.Threading;
using System.Windows;
namespace AggregationApp
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow
{
private readonly IScheduler _scheduler;
public MainWindow()
{
InitializeComponent();
_scheduler = new SynchronizationContextScheduler(SynchronizationContext.Current);
Loaded += MainWindow_Loaded;
}
private IObservable<long> _intervalObservable;
private IDisposable _last;
void MainWindow_Loaded(object sender, RoutedEventArgs e)
{
var rangeObservable = Observable.Range(1, 10);
_intervalObservable = Observable.Interval(TimeSpan.FromSeconds(1));
listView.Items.Add("Only after completed");
rangeObservable.Count().Subscribe(count => listView.Items.Add(count.ToString()));
rangeObservable.Min().Subscribe(count => listView.Items.Add(count.ToString()));
rangeObservable.Max().Subscribe(count => listView.Items.Add(count.ToString()));
rangeObservable.Average().Subscribe(count => listView.Items.Add(count.ToString()));
rangeObservable.FirstAsync().Subscribe(count => listView.Items.Add(count.ToString()));
rangeObservable.LastAsync().Subscribe(count => listView.Items.Add(count.ToString()));
listView.Items.Add("Endless observable");
_intervalObservable.FirstAsync().ObserveOn(_scheduler).Subscribe(count => listView.Items.Add(count.ToString()));
_last = _intervalObservable.LastAsync().ObserveOn(_scheduler).Subscribe(count => listView.Items.Add(count.ToString()));
Aggregate();
InfiniteScan();
}
private void Aggregate()
{
listView.Items.Add("Custom Aggregate");
var numbers = Observable.Range(1, 10);
numbers.Aggregate((acc, currentValue) => acc + currentValue).
Subscribe(sum => listView.Items.Add(sum.ToString()));
}
private void InfiniteScan()
{
listView.Items.Add("Infinite Obs with Scan");
var numbers = new Subject<int>();
numbers.Scan(0, (acc, current) => acc + current)
.Subscribe(sum => listView.Items.Add(sum.ToString()));
numbers.RunningMax().Subscribe(max => listView.Items.Add("max -> " + max.ToString()));
numbers.OnNext(1);
numbers.OnNext(2);
numbers.OnNext(3);
numbers.OnNext(4);
numbers.OnNext(2);
numbers.OnNext(2);
}
private void StopButton_Click(object sender, RoutedEventArgs e)
{
_last.Dispose();
}
}
}
<file_sep>/ConcurrencyApp/Program.cs
using System;
using System.Collections.Generic;
using System.Reactive.Concurrency;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using System.Reactive.Subjects;
using System.Threading;
using System.Threading.Tasks;
namespace ConcurrencyApp
{
class Program
{
static void Main()
{
//OnNextDiffThreads();
//SubscribeOn();
//Deadlock();
//PassTheState(Scheduler.NewThread);
//Cancellation(Scheduler.Immediate);
//UseCancellationToken();
//Recursion(Scheduler.NewThread);
//UseImmediateScheduler();
Console.WriteLine("CurrentThread");
Console.WriteLine();
UseCurrentThreadScheduler(Scheduler.CurrentThread);
Console.WriteLine();
Console.WriteLine("Immediate");
Console.WriteLine();
UseCurrentThreadScheduler(Scheduler.Immediate);
Console.ReadKey();
}
private static void UseCurrentThreadScheduler(IScheduler scheduler)
{
var leafAction = new Action(() => Console.WriteLine("leafAction"));
var innerAction = new Action(() =>
{
Console.WriteLine("innerActionBegin");
scheduler.Schedule(leafAction);
Console.WriteLine("innerActionEnd");
});
var outerAction = new Action(() =>
{
Console.WriteLine("outerActionBegin");
scheduler.Schedule(innerAction);
Console.WriteLine("outerActionEnd");
});
scheduler.Schedule(outerAction);
}
private static void UseImmediateScheduler()
{
Console.WriteLine(DateTime.Now.ToLongTimeString());
Scheduler.Immediate.Schedule(TimeSpan.FromSeconds(5), () => Console.WriteLine("Run!" + DateTime.Now.ToLongTimeString()));
}
private static void Recursion(IScheduler scheduler)
{
Action<Action> work = self =>
{
Console.WriteLine("Running");
self();
};
var token = scheduler.Schedule(work);
Console.ReadKey();
Console.WriteLine("Cancelling");
token.Dispose();
}
private static void UseCancellationToken()
{
var ints = new List<int>();
var cancelToken = Work(Scheduler.Default, ints);
Task.Delay(2000).ContinueWith((_, token) => ((IDisposable)token).Dispose(), cancelToken);
}
public static IDisposable Work(IScheduler scheduler, List<int> list)
{
var tokenSource = new CancellationTokenSource();
var cancelToken = tokenSource.Token;
var task = new Task(() =>
{
Console.WriteLine();
for (var i = 0; i < 1000; i++)
{
var sw = new SpinWait();
for (var j = 0; j < 3000; j++) sw.SpinOnce();
Console.Write(".");
list.Add(i);
if (cancelToken.IsCancellationRequested)
{
Console.WriteLine("Cancelation requested");
//cancelToken.ThrowIfCancellationRequested();
return;
}
}
});
task.Start();
return Disposable.Create(tokenSource.Cancel);
}
private static void Cancellation(IScheduler scheduler)
{
Console.WriteLine("started at {0}", DateTime.Now.ToLongTimeString());
var cancellation = scheduler.Schedule(() => Console.WriteLine(DateTime.Now.ToLongTimeString()));
cancellation.Dispose();
}
private static void PassTheState(IScheduler scheduler)
{
const string name = "Lee";
scheduler.Schedule(name, (_, state) =>
{
Console.WriteLine(state);
return Disposable.Empty;
});
var list = new List<int>();
scheduler.Schedule(list, (innerScheduler, state) =>
{
Console.WriteLine(state.Count);
return Disposable.Empty;
});
list.Add(1);
list.Add(2);
list.Add(3);
list.Add(3);
list.Add(3);
list.Add(3);
}
private static void Deadlock()
{
var seq = new Subject<int>();
var value = seq.First();
seq.OnNext(1);
}
private static void SubscribeOn()
{
var observable = Observable.Create<int>(observer =>
{
Console.WriteLine("OnNext() on threadId:{0}", Thread.CurrentThread.ManagedThreadId);
observer.OnNext(1);
observer.OnNext(2);
observer.OnNext(3);
observer.OnCompleted();
Console.WriteLine("Finished on threadId:{0}", Thread.CurrentThread.ManagedThreadId);
return Disposable.Empty;
});
//todo
SynchronizationContext.SetSynchronizationContext(new SynchronizationContext());
var sync = SynchronizationContext.Current;
observable.SubscribeOn(NewThreadScheduler.Default).ObserveOn(sync).Subscribe(
i => Console.WriteLine("Receiving({0}) on threadId: {1}", i, Thread.CurrentThread.ManagedThreadId),
() => Console.WriteLine("OnCompleted on threadId:{0}", Thread.CurrentThread.ManagedThreadId));
Console.WriteLine("-------> Subscribed on threadId:{0}", Thread.CurrentThread.ManagedThreadId);
}
private static void OnNextDiffThreads()
{
Console.WriteLine("Starting on threadId: {0}", Thread.CurrentThread.ManagedThreadId);
var subject = new Subject<int>();
subject.Subscribe(i => Console.WriteLine("Receiving({0}) on threadId: {1}", i, Thread.CurrentThread.ManagedThreadId),
() => Console.WriteLine("Completed"));
ParameterizedThreadStart notify = p =>
{
Console.WriteLine("OnNext({1}) on threadId:{0}", Thread.CurrentThread.ManagedThreadId, p);
subject.OnNext((int)p);
};
notify(1);
var t1 = new Thread(notify);
var t2 = new Thread(notify);
t1.Start(2);
t2.Start(3);
}
}
}
<file_sep>/MyRxTestsApp/NumberGenerator.cs
using System;
using System.Reactive.Concurrency;
using System.Reactive.Linq;
namespace MyRxTestsApp
{
public class NumberGenerator
{
public IObservable<long> GetGeneratedNumbers()
{
return GetGeneratedNumbers(Scheduler.Default);
}
public IObservable<long> GetGeneratedNumbers(IScheduler scheduler)
{
return Observable.Interval(TimeSpan.FromSeconds(1), scheduler).Take(5);
}
}
}
<file_sep>/HotColdApp/Program.cs
using System;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using System.Reactive.Subjects;
using System.Threading;
namespace HotColdApp
{
class Program
{
static void Main()
{
//Cold();
//SimpleCold();
//ShareACold();
//RefCount();
//PublishLast();
//Replay();
Multicast();
Console.ReadKey();
}
private static void Multicast()
{
var period = TimeSpan.FromSeconds(1);
//var observable = Observable.Interval(period).Publish();
var observable = Observable.Interval(period);
var shared = new Subject<long>();
shared.Subscribe(i => Console.WriteLine("first subscription : {0}", i));
observable.Subscribe(shared); //'Connect' the observable.
Thread.Sleep(period);
Thread.Sleep(period);
Thread.Sleep(period);
shared.Subscribe(i => Console.WriteLine("second subscription : {0}", i));
}
private static void Replay()
{
var hot = Observable.Interval(TimeSpan.FromSeconds(1)).Take(5).Do(l => Console.WriteLine("publishing {0}", l)).Publish();
hot.Connect();
var replay = hot.Replay();
replay.Connect();
var first = replay.Subscribe(i => Console.WriteLine("first subscription : {0}", i));
var second = replay.Subscribe(i => Console.WriteLine("second subscription : {0}", i));
Thread.Sleep(TimeSpan.FromSeconds(6));
var third = replay.Subscribe(i => Console.WriteLine("third subscription : {0}", i));
}
private static void PublishLast()
{
var observable = Observable.Interval(TimeSpan.FromSeconds(1)).Take(5).Do(l => Console.WriteLine("publishing {0}", l)).PublishLast();
observable.Connect();
var first = observable.Subscribe(i => Console.WriteLine("first subscription : {0}", i));
Thread.Sleep(TimeSpan.FromSeconds(5));
Console.WriteLine("dispose first!");
first.Dispose();
}
private static void RefCount()
{
var observable = Observable.Interval(TimeSpan.FromSeconds(1)).Do(l => Console.WriteLine("publishing {0}", l)).Publish().RefCount();
var first = observable.Subscribe(i => Console.WriteLine("first subscription : {0}", i));
Thread.Sleep(TimeSpan.FromSeconds(5));
Console.WriteLine("dispose first!");
first.Dispose();
}
private static void ShareACold()
{
var observable = Observable.Interval(TimeSpan.FromSeconds(1)).Do(l => Console.WriteLine("publishing {0}", l)).Publish();
var first = observable.Subscribe(i => Console.WriteLine("first subscription : {0}", i));
var connection = observable.Connect();
Thread.Sleep(TimeSpan.FromSeconds(5));
//var second = observable.Subscribe(i => Console.WriteLine("second subscription : {0}", i));
Thread.Sleep(TimeSpan.FromSeconds(3));
Console.WriteLine("dispose first!");
first.Dispose();
//second.Dispose();
//connection.Dispose();
}
private static void SimpleCold()
{
var observable = Observable.Interval(TimeSpan.FromSeconds(1));
observable.Subscribe(i => Console.WriteLine("first subscription : {0}", i));
Thread.Sleep(TimeSpan.FromSeconds(10));
observable.Subscribe(i => Console.WriteLine("second subscription : {0}", i));
}
private static void Cold()
{
var threeProducts = GetProducts().Take(3);
threeProducts.Subscribe(p => Console.WriteLine("consumed {0}", p));
}
private static IObservable<string> GetProducts()
{
return Observable.Create<string>(obs =>
{
for (var i = 0; i < 10; i++)
{
Console.WriteLine("published on next {0}", i);
obs.OnNext(i.ToString());
}
obs.OnCompleted();
return Disposable.Empty;
});
}
}
}
<file_sep>/TestingApp/ViewModel/IMyModel.cs
using System;
namespace TestingApp.ViewModel
{
public interface IMyModel
{
IObservable<decimal> PriceStream(string symbol);
}
}<file_sep>/SideEffectsApp/Program.cs
using System;
using System.Reactive.Linq;
namespace SideEffectsApp
{
class Program
{
static void Main()
{
SideEffect();
CorrectedSideEffect();
Do();
Console.ReadKey();
}
private static void Log(object onNextValue)
{
Console.WriteLine("Logging OnNext{0} at {1}", onNextValue, DateTime.Now);
}
private static void Log(Exception onErrorValue)
{
Console.WriteLine("Logging OnError{0} at {1}", onErrorValue, DateTime.Now);
}
private static void Log()
{
Console.WriteLine("Logging OnCompleted at {0}", DateTime.Now);
}
private static void Do()
{
var source = Observable.Interval(TimeSpan.FromSeconds(1)).Take(3);
var result = source.Do(i => Log(i), ex => Log(ex), () => Log()).
Subscribe(Console.WriteLine, () => Console.WriteLine("Completed"));
}
private static void CorrectedSideEffect()
{
var source = Observable.Range(0, 3);
var result = source.Select((idx, value) => new
{
Index = idx,
Letter = (char)(value + 65)
});
result.Subscribe(c => Console.WriteLine("Received {0} at index {1}", c.Letter, c.Index),
() => Console.WriteLine("Completed"));
result.Subscribe(c => Console.WriteLine("Also Received {0} at index {1}", c.Letter, c.Index),
() => Console.WriteLine("2nd Completed"));
}
private static void SideEffect()
{
var letters = Observable.Range(0, 3).Select(i => (char)(i + 65));
var index = -1;
var result = letters.Select(c =>
{
index++;
return c;
});
result.Subscribe(c => Console.WriteLine("Received {0} at index {1}", c, index),
() => Console.WriteLine("Completed"));
result.Subscribe(c => Console.WriteLine("Also Received {0} at index {1}", c, index),
() => Console.WriteLine("2nd Completed"));
}
}
}
<file_sep>/AggregationApp/ObservableExtensions.cs
using System;
using System.Collections.Generic;
using System.Reactive.Linq;
namespace AggregationApp
{
public static class ObservableExtensions
{
public static IObservable<T> RunningMax<T>(this IObservable<T> source)
{
return source.Scan(MaxOf).Distinct();
}
private static T MaxOf<T>(T x, T y)
{
var comparer = Comparer<T>.Default;
if (comparer.Compare(x, y) < 0)
return y;
return x;
}
}
}
<file_sep>/SearchApp/MainWindow.xaml.cs
using System;
using System.Collections.Generic;
using System.Reactive.Concurrency;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Controls;
namespace SearchApp
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow
{
private readonly IScheduler _uiScheduler;
private static readonly List<string> _strings = new List<string> { "aaaaaa", "bbbbbbbbb", "cccccccc", "aaaaabbbb", "aaaaacccccc" };
private static IObservable<string> GetSearchResult(string searchText)
{
return Observable.Create<string>(observer =>
{
var index = 0;
_strings.ForEach(s =>
{
if (!s.Contains(searchText)) return;
index += 5;
Task.Delay(TimeSpan.FromSeconds(index)).ContinueWith(t => observer.OnNext(s));
});
return Disposable.Empty;
});
}
public MainWindow()
{
InitializeComponent();
_uiScheduler = new SynchronizationContextScheduler(SynchronizationContext.Current);
var textChangedObservable = Observable.FromEventPattern<TextChangedEventHandler, TextChangedEventArgs>(
ev => SearchTextBox.TextChanged += ev, ev => SearchTextBox.TextChanged -= ev);
textChangedObservable.Select(ep => ((TextBox)ep.Sender).Text).Throttle(TimeSpan.FromSeconds(5)).DistinctUntilChanged().Subscribe(
args =>
{
var resultObservable = GetSearchResult(args);
resultObservable.TakeUntil(textChangedObservable).ObserveOn(_uiScheduler).Subscribe(result =>
{
SubscribeTextBlock.Text += Environment.NewLine +
string.Format("{0} - {1}", DateTime.Now.ToLongTimeString(),
result);
});
});
}
}
}
<file_sep>/TestingApp/MyViewModelTests.cs
using System;
using System.Reactive.Disposables;
using System.Reactive.Subjects;
using Moq;
using NUnit.Framework;
using TestingApp.Core;
using TestingApp.ViewModel;
namespace TestingApp
{
public class MyViewModelTests
{
[Test]
public void Show_NoPriceUpdates_EmptyPrices()
{
var modelMock = new Mock<IMyModel>();
var schedulers = new TestSchedulers();
var viewModel = new MyViewModel(modelMock.Object, schedulers);
var pricesSubject = new Subject<decimal>();
modelMock.Setup(m => m.PriceStream(It.Is<string>(symbol => symbol == "AAPL"))).Returns(pricesSubject);
viewModel.Show("AAPL");
schedulers.ThreadPool.AdvanceTo(1);
schedulers.Dispatcher.AdvanceTo(1);
Assert.AreEqual(0, viewModel.Prices.Count);
}
[Test]
public void Show_OnePriceUpdate_OnePrice()
{
var modelMock = new Mock<IMyModel>();
var schedulers = new TestSchedulers();
var viewModel = new MyViewModel(modelMock.Object, schedulers);
var pricesSubject = new Subject<decimal>();
modelMock.Setup(m => m.PriceStream(It.Is<string>(symbol => symbol == "AAPL"))).Returns(pricesSubject);
viewModel.Show("AAPL");
schedulers.ThreadPool.Schedule<object>(null, (_, a) =>
{
pricesSubject.OnNext(10);
return Disposable.Empty;
});
schedulers.ThreadPool.AdvanceTo(1);
schedulers.Dispatcher.AdvanceTo(1);
Assert.AreEqual(1, viewModel.Prices.Count);
}
[Test]
public void Show_NoPriceUpdatesIn11Seconds_Disconnected()
{
var modelMock = new Mock<IMyModel>();
var schedulers = new TestSchedulers();
var viewModel = new MyViewModel(modelMock.Object, schedulers);
var pricesSubject = new Subject<decimal>();
modelMock.Setup(m => m.PriceStream(It.Is<string>(symbol => symbol == "AAPL"))).Returns(pricesSubject);
viewModel.Show("AAPL");
schedulers.ThreadPool.AdvanceTo(TimeSpan.FromSeconds(11).Ticks);
schedulers.Dispatcher.AdvanceTo(1);
Assert.IsFalse(viewModel.IsConnected);
}
}
}
<file_sep>/ConcurrencyWpfApp/MainWindow.xaml.cs
using System;
using System.ComponentModel;
using System.Reactive.Linq;
using System.Reactive.Subjects;
using System.Runtime.CompilerServices;
using System.Windows;
using ConcurrencyWpfApp.Annotations;
namespace ConcurrencyWpfApp
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window, INotifyPropertyChanged
{
private string _value;
private readonly Subject<string> _subject = new Subject<string>();
public MainWindow()
{
InitializeComponent();
MyButton.Click += MyButton_Click;
DataContext = this;
Value = "default value";
Value = _subject.First();
_subject.Take(1).Subscribe(value => Value = value);
}
void MyButton_Click(object sender, RoutedEventArgs e)
{
_subject.OnNext("New value");
}
public string Value
{
get { return _value; }
set
{
_value = value;
var handler = PropertyChanged;
if (handler != null) handler(this, new PropertyChangedEventArgs("Value"));
}
}
public event PropertyChangedEventHandler PropertyChanged;
[NotifyPropertyChangedInvocator]
protected virtual void OnPropertyChanged([CallerMemberName] string propertyName = null)
{
PropertyChangedEventHandler handler = PropertyChanged;
if (handler != null) handler(this, new PropertyChangedEventArgs(propertyName));
}
}
}
<file_sep>/ObservableTimerApp/Program.cs
using System;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using System.Threading;
using System.Windows.Forms;
namespace ObservableTimerApp
{
class Program
{
static void Main()
{
//Form1 form1 = new Form1();
////form1.Show();
//Application.Run(form1);
//return;
var myProcessor = new MyProcessor();
myProcessor.Process();
//Thread.Sleep(400);
//myProcessor.Dispose();
//Thread.Sleep(TimeSpan.FromMinutes(2));
Console.ReadKey();
}
}
public class MyProcessor : IDisposable
{
private volatile bool _isDisposed;
private readonly SingleAssignmentDisposable _singleAssignmentDisposable = new SingleAssignmentDisposable();
public void Process()
{
int index = 0;
_singleAssignmentDisposable.Disposable = Observable.Timer(TimeSpan.FromMilliseconds(100), TimeSpan.FromMilliseconds(100)).Subscribe(n =>
{
Console.WriteLine("step1 -> {0}", DateTime.Now.ToString("mm:ss:ffff"));
//step1
//Thread.Sleep(TimeSpan.FromSeconds(10));
Console.WriteLine("step2");
}, ex => Console.WriteLine(ex.Message));
}
public void Dispose()
{
_isDisposed = true;
_singleAssignmentDisposable.Dispose();
}
}
}
<file_sep>/SequencesOfCoincidence/Program.cs
using System;
using System.Reactive.Linq;
namespace SequencesOfCoincidence
{
class Program
{
public static void Main()
{
Window();
Console.ReadKey();
}
private static void Window()
{
var ten = Observable.Interval(TimeSpan.FromSeconds(1)).Take(10);
var windowCount = -1;
ten.Window(3).Subscribe(window =>
{
windowCount++;
window.Subscribe(value => Console.WriteLine("window{0}, value{1}", windowCount, value));
});
}
}
}
<file_sep>/ObservableTimerApp/Form1.cs
using System;
using System.Diagnostics;
using System.Reactive.Concurrency;
using System.Reactive.Linq;
using System.Threading;
using System.Windows.Forms;
namespace ObservableTimerApp
{
public partial class Form1 : Form
{
private IScheduler _scheduler;
public Form1()
{
InitializeComponent();
}
protected override void OnLoad(EventArgs e)
{
base.OnLoad(e);
_scheduler = new SynchronizationContextScheduler(SynchronizationContext.Current);
}
private void button1_Click(object sender, EventArgs e)
{
var index = 0;
var s = new Stopwatch();
s.Start();
for (var i = 1; i <= 1000000; i++)
{
Observable.Timer(TimeSpan.FromSeconds(3), _scheduler)
.Subscribe(n =>
{
index++;
if (index == 1000000)
{
s.Stop();
label1.Text = s.ElapsedMilliseconds.ToString();
}
});
}
}
private void button2_Click(object sender, EventArgs e)
{
var index = 0;
var s = new Stopwatch();
s.Start();
for (var i = 1; i <= 1000000; i++)
{
Observable.Timer(TimeSpan.FromSeconds(3)).ObserveOn(_scheduler)
.Subscribe(n =>
{
index++;
if (index == 1000000)
{
s.Stop();
label2.Text = s.ElapsedMilliseconds.ToString();
}
});
}
}
}
}
<file_sep>/MyRxTestsApp/Tests/NumberGeneratorTests.cs
using System;
using System.Collections.Generic;
using System.Threading;
using Microsoft.Reactive.Testing;
using Xunit;
namespace MyRxTestsApp.Tests
{
public class NumberGeneratorTests : IDisposable
{
public NumberGeneratorTests()
{
Console.WriteLine("We are currently in the constructor");
}
[Fact]
public void GetGeneratedNumbers_WrongAndSlow()
{
var numberGenerator = new NumberGenerator();
var numbersObservable = numberGenerator.GetGeneratedNumbers();
var list = new List<long>();
var subcription = numbersObservable.Subscribe(n =>
{
Console.WriteLine(n);
list.Add(n);
});
Thread.Sleep(TimeSpan.FromSeconds(6));
subcription.Dispose();
Assert.Equal(new List<long> { 0, 1, 2, 3, 4 }, list);
Assert.Equal(5, list.Count);
}
[Fact]
public void GetGeneratedNumbers_Observable_Generate5Numbers()
{
var numberGenerator = new NumberGenerator();
var testScheduler = new TestScheduler();
var numbersObservable = numberGenerator.GetGeneratedNumbers(testScheduler);
var list = new List<long>();
var subcription = numbersObservable.Subscribe(n =>
{
Console.WriteLine(n);
list.Add(n);
}, ex => Console.WriteLine(ex.Message));
testScheduler.AdvanceBy(TimeSpan.FromSeconds(6).Ticks);
subcription.Dispose();
Assert.Equal(new List<long> { 0, 1, 2, 3, 4 }, list);
Assert.Equal(5, list.Count);
}
public void Dispose()
{
Console.WriteLine("We are currently in the dispose");
}
}
}
<file_sep>/CombiningSeqWpfApp/MainWindow.xaml.cs
using System;
using System.Reactive.Linq;
using System.Windows;
using System.Windows.Input;
namespace CombiningSeqWpfApp
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow
{
public MainWindow()
{
InitializeComponent();
Loaded += MainWindow_Loaded;
}
private void MainWindow_Loaded(object sender, RoutedEventArgs e)
{
var source = Observable.FromEventPattern<MouseEventHandler, MouseEventArgs>
(ev => MouseMove += ev, ev => MouseMove -= ev).Select(ep =>
new Coord
{
X = ep.EventArgs.GetPosition(null).X,
Y = ep.EventArgs.GetPosition(null).Y
});
var delayedMouse = source.Skip(1);
source.Zip(delayedMouse, (ep1, ep2) => new Coord
{
X = ep1.X - ep2.X,
Y = ep1.Y - ep2.Y
}).Subscribe(c =>
{
if (Math.Abs(c.X) > 3)
MovementXTextBlock.Text = c.X > 0 ? "Left" : "Right";
if (Math.Abs(c.Y) > 3)
MovementYTextBlock.Text = c.Y > 0 ? "Up" : "Down";
});
}
}
public class Coord
{
public double X { get; set; }
public double Y { get; set; }
public override string ToString()
{
return string.Format("{0},{1}", X, Y);
}
}
}
<file_sep>/ConsoleApplication2/Program.cs
using System;
using System.Reactive.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace ConsoleApplication2
{
public class Program
{
static void Main()
{
//var observable = new MyObservable();
//var observer = new MyObserver();
//observable.Subscribe();
UseMyCache();
Task.Delay(TimeSpan.FromMinutes(1)).Wait();
var index = 0;
var hot = MyRepository.Instance.StringsObservable.Publish();
hot.Connect();
var observable = hot.Replay();
observable.Connect();
observable.Subscribe(s => Console.WriteLine("First => {0}", s));
Observable.Timer(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(1)).Subscribe(t =>
{
MyRepository.Instance.Add(index++.ToString());
});
Thread.Sleep(TimeSpan.FromSeconds(5));
observable.Subscribe(s => Console.WriteLine("Second => {0}", s));
Task.Delay(TimeSpan.FromMinutes(5)).Wait();
Console.WriteLine("Press any key!");
Console.ReadKey();
}
public static void UseMyCache()
{
var index = 11;
Observable.Timer(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(1)).Subscribe(n =>
{
MyRepository.Instance.Add(index++.ToString());
});
var connection1 = MyRepository.Instance.StringsObservable.Subscribe(s => Console.WriteLine("First con {0}", s));
var connection2 = MyRepository.Instance.StringsObservable.Subscribe(s => Console.WriteLine("Second con {0}", s));
Task.Delay(TimeSpan.FromSeconds(6)).Wait();
connection1.Dispose();
connection2.Dispose();
}
}
}
<file_sep>/ErrorHandlingApp/Program.cs
using System;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using System.Reactive.Subjects;
using System.Threading.Tasks;
namespace ErrorHandlingApp
{
class Program
{
static void Main()
{
//Swallow();
//Catch();
//Finally();
//Using();
Retry();
Console.ReadKey();
}
private static void Swallow()
{
var source = new Subject<int>();
var result = source.Catch(Observable.Empty<int>());
result.Subscribe(Console.WriteLine);
source.OnNext(1);
source.OnNext(2);
source.OnError(new Exception("Fail!"));
}
private static void Catch()
{
var source = new Subject<int>();
var result = source.Catch<int, TimeoutException>(n => Observable.Return(-1));
result.Subscribe(Console.WriteLine);
source.OnNext(1);
source.OnNext(2);
source.OnError(new TimeoutException("Fail!"));
}
private static void Finally()
{
var source = new Subject<int>();
var result = source.Finally(() => Console.WriteLine("Finally!!!!"));
result.Subscribe(Console.WriteLine);
source.OnNext(1);
source.OnNext(2);
source.OnError(new Exception("Fail!"));
source.OnCompleted();
//source.OnError(new TimeoutException("Fail!"));
}
private static void Using()
{
var source = Observable.Interval(TimeSpan.FromSeconds(1));
var result = Observable.Using(() => new MyResource(), resource => source);
result.Take(5).Subscribe(Console.WriteLine);
}
private static void Retry()
{
var observable = Observable.Create<int>(observer =>
{
observer.OnNext(1);
observer.OnNext(2);
observer.OnError(new Exception("aaaaaaaaaaa"));
return Disposable.Empty;
});
observable.Retry(4).Subscribe(Console.WriteLine);
}
private class MyResource : IDisposable
{
public MyResource()
{
Console.WriteLine("MyResource created!");
}
public void Dispose()
{
Console.WriteLine("MyResource disposed!");
}
}
}
}
<file_sep>/TransformationApp/Program.cs
using System;
using System.Reactive.Linq;
namespace TransformationApp
{
class Program
{
static void Main()
{
var source = Observable.Range(1, 5);
source.Select(n => n + 3).Subscribe(Console.WriteLine);
source.Select(n => new { Number = n, Description = string.Format("n={0}", n) })
.Subscribe(Console.WriteLine);
Observable.Interval(TimeSpan.FromSeconds(1)).Take(3).Timestamp().Subscribe(t => Console.WriteLine(t));
Observable.Interval(TimeSpan.FromSeconds(1)).Take(3).TimeInterval().Subscribe(t => Console.WriteLine(t));
Observable.Interval(TimeSpan.FromSeconds(1)).Take(3).Materialize().Subscribe(Console.WriteLine);
SelectMany();
Console.ReadKey();
}
private static void SelectMany()
{
Console.WriteLine("SelectMany");
Observable.Return(3).SelectMany(i => Observable.Range(1, i)).Subscribe(Console.WriteLine);
Observable.Range(1, 3).SelectMany(i => Observable.Range(1, i)).Subscribe(Console.WriteLine);
}
}
}
<file_sep>/LeavingTheMonadApp/Program.cs
using System;
using System.Reactive;
using System.Reactive.Linq;
using System.Reactive.Threading.Tasks;
namespace LeavingTheMonadApp
{
class Program
{
private static readonly IObservable<long> Source = Observable.Interval(TimeSpan.FromSeconds(1)).Take(5);
static void Main()
{
Source.ForEach(i => Console.WriteLine("received {0} at {1}", i, DateTime.Now));
Console.WriteLine("completed at {0}", DateTime.Now);
var result = Source.ToEnumerable();
foreach (var l in result)
{
Console.WriteLine(l);
}
Console.WriteLine("done");
var array = Source.ToArray();
array.Subscribe(arr =>
{
Console.WriteLine("Received the array");
foreach (var l in arr)
{
Console.WriteLine(l);
}
}, () => Console.WriteLine("Completed"));
Console.WriteLine("Subscribed to array");
ToTask();
Console.ReadKey();
}
private static void ToTask()
{
var task = Source.ToTask();
Console.WriteLine("subscribed to task");
Console.WriteLine("task result => {0}", task.Result);
var sourceWithError = Observable.Throw<long>(new Exception("Fail!"));
var task2 = sourceWithError.ToTask();
try
{
Console.WriteLine(task2.Result);
}
catch (AggregateException e)
{
Console.WriteLine(e.InnerException.Message);
}
ToEvent();
}
private static void ToEvent()
{
Console.WriteLine("ToEvent");
IEventSource<long> @event = Source.ToEvent();
@event.OnNext += val => Console.WriteLine("ToEvent {0}", val);
Console.WriteLine("ToEventPattern");
var source = Observable.Interval(TimeSpan.FromSeconds(1)).
Select(i => new EventPattern<MyEventArgs>(null, new MyEventArgs(i)));
var result = source.ToEventPattern();
result.OnNext += (sender, eventArgs) => Console.WriteLine("ToEventPattern {0}", eventArgs.Value);
}
}
public class MyEventArgs : EventArgs
{
private readonly long _value;
public MyEventArgs(long value)
{
_value = value;
}
public long Value
{
get { return _value; }
}
}
}
| 0623e4610df289664595efbc9ac37857e3aaa191 | [
"C#"
] | 33 | C# | vioAron/MyRx | ae40234ca226d6a1da5920ccecf8b4caaac749de | 9e9c2581f5fc26d5401a462a79285d02b5d2a46a |
refs/heads/master | <file_sep>#!/bin/bash
# If the tests succeed on the specified `DEPLOY_BRANCH`, then prepare git for deployment, and then run the `DEPLOY_COMMAND`.
#
#
# Installation:
#
# after_success:
# - eval "$(curl -s https://raw.githubusercontent.com/balupton/awesome-travis/master/scripts/deploy-custom.bash)"
#
#
# Configuration:
#
# The branch name that you want tested and deployed, set correctly:
# travis env set DEPLOY_BRANCH "master" --public
#
# The command that will do the compilation and git push:
# travis env set DEPLOY_COMMAND "npm run deploy" --public
# User Environment Variables:
# DEPLOY_BRANCH
# DEPLOY_COMMAND
if ([ "$TRAVIS_BRANCH" == "$DEPLOY_BRANCH" ] &&
[ -z "$TRAVIS_TAG" ] &&
[ "$TRAVIS_PULL_REQUEST" == "false" ]); then
echo "deploying..."
eval "$DEPLOY_COMMAND" || exit -1
echo "...deployed"
else
echo "skipped deploy"
fi
<file_sep>#!/bin/bash
# Use the `DESIRED_NODE_VERSION` (defaults to the latest LTS node version) to install dependencies using `SETUP_COMMAND` (defaults to `npm install`).
#
#
# Installation:
#
# install:
# - eval "$(curl -s https://raw.githubusercontent.com/balupton/awesome-travis/master/scripts/node-install.bash)"
#
#
# Configuration:
#
# To specify a specific node version (rather tha the LTS version):
# travis env set DESIRED_NODE_VERSION "7" --public
#
# To setup the project with a custom command, do so with:
# travis env set SETUP_COMMAND "npm run our:setup" --public
# User Environment Variables:
export DESIRED_NODE_VERSION
if test -z "$DESIRED_NODE_VERSION"; then
DESIRED_NODE_VERSION="$(nvm version-remote --lts)" || exit -1
else
DESIRED_NODE_VERSION="$(nvm version-remote "$DESIRED_NODE_VERSION")" || exit -1
fi
export SETUP_COMMAND
if test -z "$SETUP_COMMAND"; then
SETUP_COMMAND="npm install"
fi
# Local Environment Variables:
export ORIGINAL_NODE_VERSION; ORIGINAL_NODE_VERSION="$(node --version)" || exit -1
# Run
if test "$ORIGINAL_NODE_VERSION" = "$DESIRED_NODE_VERSION"; then
echo "running on node version $ORIGINAL_NODE_VERSION which IS the desired $DESIRED_NODE_VERSION"
echo "completing setup with $ORIGINAL_NODE_VERSION..."
eval "$SETUP_COMMAND" || exit -1
echo "...setup complete with $ORIGINAL_NODE_VERSION"
else
echo "running on node version $CURRENT_NODE_VERSION which IS NOT the desired $DESIRED_NODE_VERSION"
echo "installing the desired version..."
nvm install "$DESIRED_NODE_VERSION" || exit -1
echo "...installed the desired $DESIRED_NODE_VERSION"
echo "completing setup with $DESIRED_NODE_VERSION..."
eval "$SETUP_COMMAND" || exit -1
echo "...setup complete with $DESIRED_NODE_VERSION"
echo "switching back to $ORIGINAL_NODE_VERSION"
nvm use "$ORIGINAL_NODE_VERSION" || exit -1
echo "...switched back to $ORIGINAL_NODE_VERSION"
fi
<file_sep>#!/bin/bash
# If the tests succeed on the specified `DEPLOY_BRANCH`, then prepare git for deployment, and then run the `DEPLOY_COMMAND`.
# The `DEPLOY_COMMAND` should be the command responsible for the compilation, git add, git commit, and git push.
#
#
# Installation:
#
# after_success:
# - eval "$(curl -s https://raw.githubusercontent.com/balupton/awesome-travis/master/scripts/deploy-git.bash)"
#
#
# Configuration:
#
# The branch name that you want tested and deployed, set correctly:
# travis env set DEPLOY_BRANCH "master" --public
#
# The command that will do the compilation and git push:
# travis env set DEPLOY_COMMAND "npm run deploy" --public
#
# Your git username:
# travis env set DEPLOY_USER "$GITHUB_USERNAME" --public
#
# Your git password, if using GitHub, this should probably be a new [GitHub Personal Access Token](https://help.github.com/articles/creating-an-access-token-for-command-line-use/) with the `repo` permission:
# travis env set DEPLOY_TOKEN "$GITHUB_TOKEN"
#
# The name hat is used for the deployment commit, set to whatever:
# travis env set DEPLOY_NAME "Travis CI Deployer" --public
#
# The email that is used for the deployment commit, set to whatever:
# travis env set DEPLOY_EMAIL "<EMAIL>" --public
# User Environment Variables:
# DEPLOY_EMAIL
# DEPLOY_NAME
# DEPLOY_USER
# DEPLOY_TOKEN
# DEPLOY_BRANCH
# DEPLOY_COMMAND
if ([ "$TRAVIS_BRANCH" == "$DEPLOY_BRANCH" ] &&
[ -z "$TRAVIS_TAG" ] &&
[ "$TRAVIS_PULL_REQUEST" == "false" ]); then
echo "deploying..."
git config --global user.email "$DEPLOY_EMAIL" || exit -1
git config --global user.name "$DEPLOY_NAME" || exit -1
git remote rm origin || exit -1
git remote add origin "https://$DEPLOY_USER:$DEPLOY_TOKEN@github.com/$TRAVIS_REPO_SLUG.git" || exit -1
eval "$DEPLOY_COMMAND" || exit -1
echo "...deployed"
else
echo "skipped deploy"
fi
<file_sep>#!/bin/bash
# If the tests succeeded, then deploy our release to [Surge](https://surge.sh) URLs for our branch, tag, and commit.
# Useful for rendering documentation and compiling code then deploying the release,
# such that you don't need the rendered documentation and compiled code inside your source repository.
# This is beneficial because sometimes documentation will reference the current commit,
# causing a documentation recompile to always leave a dirty state - this solution avoids that,
# as documentation can be git ignored.
#
#
# Local Installation:
#
# You will need to make sure you have surge installed as a local dependency,
# using npm: npm install --save-dev surge
# using yarn: yarn add --dev surge
#
#
# Installation:
#
# after_success:
# - eval "$(curl -s https://raw.githubusercontent.com/balupton/awesome-travis/master/scripts/surge.bash)"
#
#
# Configuration:
#
# Set your `SURGE_LOGIN` which is your surge.sh username
# travis env set SURGE_LOGIN "$SURGE_LOGIN" --public
#
# Set your `SURGE_TOKEN` (which you can get via the `surge token` command)
# travis env set SURGE_TOKEN "$SURGE_TOKEN"
#
# Set the path that you want to deploy to surge
# travis env set SURGE_PROJECT "." --public
# Node Development Dependencies:
# surge
# External Environment Variables:
# TRAVIS_REPO_SLUG
# User Environment Variables:
# SURGE_LOGIN
# SURGE_TOKEN
export SURGE_PROJECT
if test -z "$SURGE_PROJECT"; then
SURGE_PROJECT="."
fi
export DESIRED_NODE_VERSION
if test -z "$DESIRED_NODE_VERSION"; then
DESIRED_NODE_VERSION="$(nvm version-remote --lts)" || exit -1
else
DESIRED_NODE_VERSION="$(nvm version-remote "$DESIRED_NODE_VERSION")" || exit -1
fi
# Local Environment Variables:
export CURRENT_NODE_VERSION
CURRENT_NODE_VERSION="$(node --version)" || exit -1
# Run
if test "$CURRENT_NODE_VERSION" = "$DESIRED_NODE_VERSION"; then
echo "running on node version $CURRENT_NODE_VERSION which IS the desired $DESIRED_NODE_VERSION"
echo "performing release to surge..."
echo "preparing release"
npm run our:meta || exit -1
echo "performing deploy"
SURGE_SLUG="$(echo $TRAVIS_REPO_SLUG | sed 's/^\(.*\)\/\(.*\)/\2.\1/')" || exit -1
if test "$TRAVIS_BRANCH"; then
echo "deploying branch..."
surge --project $SURGE_PROJECT --domain "$TRAVIS_BRANCH.$SURGE_SLUG.surge.sh" || exit -1
fi
if test "$TRAVIS_TAG"; then
echo "deploying tag..."
surge --project $SURGE_PROJECT --domain "$TRAVIS_TAG.$SURGE_SLUG.surge.sh" || exit -1
fi
if test "$TRAVIS_COMMIT"; then
echo "deploying commit..."
surge --project $SURGE_PROJECT --domain "$TRAVIS_COMMIT.$SURGE_SLUG.surge.sh" || exit -1
fi
echo "...released to surge"
else
echo "running on node version $CURRENT_NODE_VERSION which IS NOT the desired $DESIRED_NODE_VERSION"
echo "skipping release to surge"
fi
<file_sep># awesome-travis
Crowd-sourced list of [Travis CI](https://travis-ci.org) hooks/scripts etc to level up your `.travis.yml` file
## Notifications
### Slack
``` bash
travis encrypt "$SLACK_SUBDOMAIN:$SLACK_TRAVIS_TOKEN#updates" --add notifications.slack
```
### Email
``` bash
travis encrypt "$TRAVIS_NOTIFICATION_EMAIL" --add notifications.email.recipients
```
## Node.js
### Complete Node.js Version Matrix
Complete configuration for the different [node.js versions](https://github.com/nodejs/LTS) one may need to support. With legacy versions allowed to fail.
``` yaml
# https://github.com/balupton/awesome-travis
sudo: false
language: node_js
node_js:
- "0.8" # end of life
- "0.10" # end of life
- "0.12" # maintenance
- "4" # lts
- "6" # lts
- "7" # stable
matrix:
fast_finish: true
allow_failures:
- node_js: "0.8"
- node_js: "0.10"
cache:
directories:
- $HOME/.npm # npm's cache
- $HOME/.yarn-cache # yarn's cache
```
## Scripts
The [`scripts` directory](https://github.com/balupton/awesome-travis/tree/master/scripts) contains scripts you can use.
### Tips
The scripts in this repository are their own files, which the latest are fetched. E.g.
``` yaml
install:
- eval "$(curl -s https://raw.githubusercontent.com/balupton/awesome-travis/master/scripts/node-install.bash)"
```
You probably want to change the `master` to the the current commit hash. For instance:
``` yaml
install:
- eval "$(curl -s https://raw.githubusercontent.com/balupton/awesome-travis/some-commit-hash-instead/scripts/node-install.bash)"
```
Or you could even download it into a `.travis` folder for local use instead:
``` bash
mkdir -p ./.travis
wget https://raw.githubusercontent.com/balupton/awesome-travis/master/scripts/node-install.bash ./.travis/node-install.bash
chmod +x ./.travis/node-install.bash
```
``` yaml
install:
- ./.travis/node-install.bash
```
## Generators
- [`bevry/based`](https://github.com/bevry/based) generates your project, including your `.travis.yml` file, using this awesome list
## Contribution
Send pull requests for your scripts and config nifties! Will be awesome!
Although, avoid changing header titles and file names, as people may reference them when they use parts.
## License
Public Domain via The Unlicense
<file_sep>#!/bin/bash
# Ensure that the npm version is the latest.
#
#
# Installation:
#
# install:
# - eval "$(curl -s https://raw.githubusercontent.com/balupton/awesome-travis/master/scripts/node-upgrade-npm.bash)"
# Local Environment Variables:
export ORIGINAL_NPM_VERSION
export LATEST_NPM_VERSION
ORIGINAL_NPM_VERSION="$(npm --version)" || exit -1
LATEST_NPM_VERSION="$(npm view npm version)" || exit -1
# Ensure npm is the latest
if test "$ORIGINAL_NPM_VERSION" != "$LATEST_NPM_VERSION"; then
echo "running an old npm version $ORIGINAL_NPM_VERSION"
echo "upgrading npm to $LATEST_NPM_VERSION..."
npm update npm --global --cache-min=Infinity || exit -1
echo "...npm upgraded to $CURRENT_NPM_VERSION"
fi
<file_sep>#!/bin/bash
# Use the `DESIRED_NODE_VERSION` (defaults to the latest LTS node version) to login with npm and run `npm publish`.
#
#
# Installation:
#
# after_success:
# - eval "$(curl -s https://raw.githubusercontent.com/balupton/awesome-travis/master/scripts/node-publish.bash)"
#
#
# Configuration:
#
# Specficy a specific node version (rather than the LTS version)
# travis env set DESIRED_NODE_VERSION "7" --public
#
# Specify your npm username:
# travis env set NPM_USERNAME "$NPM_USERNAME" --public
#
# Specify your npm password
# travis env set NPM_PASSWORD "$<PASSWORD>"
#
# Specify your npm email
# travis env set NPM_EMAIL "$NPM_EMAIL"
# External Environment Variables:
export DESIRED_NODE_VERSION
if test -z "$DESIRED_NODE_VERSION"; then
DESIRED_NODE_VERSION="$(nvm version-remote --lts)" || exit -1
else
DESIRED_NODE_VERSION="$(nvm version-remote "$DESIRED_NODE_VERSION")" || exit -1
fi
# Local Environment Variables:
export CURRENT_NODE_VERSION
CURRENT_NODE_VERSION="$(node --version)" || exit -1
# Run
if test "$CURRENT_NODE_VERSION" = "$DESIRED_NODE_VERSION"; then
echo "running on node version $CURRENT_NODE_VERSION which IS the desired $DESIRED_NODE_VERSION"
if test "$TRAVIS_TAG"; then
echo "releasing to npm..."
echo "logging in..."
echo -e "$NPM_USERNAME\n$NPM_PASSWORD\n$NPM_EMAIL" | npm login || exit -1
echo "publishing..."
npm publish || exit -1
echo "...released to npm"
else
echo "non-tag, no need for release"
fi
else
echo "running on node version $CURRENT_NODE_VERSION which IS NOT the desired $DESIRED_NODE_VERSION"
echo "skipping release to npm"
fi
<file_sep>#!/bin/bash
# If our current node version is the `DESIRED_NODE_VERSION` (defaults to the latest LTS node version)
# then compile and lint our project with: `npm run our:compile && npm run our:verify`
# otherwise just compile our project with: `npm run our:compile`
#
#
# Installation:
#
# before_script:
# - eval "$(curl -s https://raw.githubusercontent.com/balupton/awesome-travis/master/scripts/node-verify.bash)"
#
#
# Configuration:
#
# To specify a specific node version (rather than the LTS version)
# travis env set DESIRED_NODE_VERSION "7" --public
# User Environment Variables:
export DESIRED_NODE_VERSION
if test -z "$DESIRED_NODE_VERSION"; then
DESIRED_NODE_VERSION="$(nvm version-remote --lts)" || exit -1
else
DESIRED_NODE_VERSION="$(nvm version-remote "$DESIRED_NODE_VERSION")" || exit -1
fi
# Local Environment Variables:
export CURRENT_NODE_VERSION
CURRENT_NODE_VERSION="$(node --version)" || exit -1
# Run
if test "$CURRENT_NODE_VERSION" = "$DESIRED_NODE_VERSION"; then
echo "running on node version $CURRENT_NODE_VERSION which IS the desired $DESIRED_NODE_VERSION"
echo "compiling and verifying with $CURRENT_NODE_VERSION..."
npm run our:compile && npm run our:verify || exit -1
echo "...compiled and verified with $CURRENT_NODE_VERSION"
else
echo "running on node version $CURRENT_NODE_VERSION which IS NOT the desired $DESIRED_NODE_VERSION"
echo "swapping to $DESIRED_NODE_VERSION..."
nvm install "$DESIRED_NODE_VERSION" || exit -1
echo "...swapped to $DESIRED_NODE_VERSION"
echo "compiling with $DESIRED_NODE_VERSION..."
npm run our:compile || exit -1
echo "...compiled with $DESIRED_NODE_VERSION"
echo "swapping back to $CURRENT_NODE_VERSION"
nvm use "$CURRENT_NODE_VERSION" || exit -1
echo "...swapped back to $CURRENT_NODE_VERSION"
fi
| 30c27df1981e9b955162ad01e1c18e80fe3c343a | [
"Markdown",
"Shell"
] | 8 | Shell | patuf/awesome-travis | cb86e0809528d2ab8cdfb830744bad2cb708fd9d | d525cfc969b537d624e9f14ef2b8fb34991e9e85 |
refs/heads/main | <file_sep>"""
Django settings for OmniConnx project.
Generated by 'django-admin startproject' using Django 2.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '<KEY>'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['localhost', 'omniconnx.herokuapp.com', '127.0.0.1']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.sites',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'crispy_forms',
'categorys',
'users',
'message',
'storages',
]
SITE_ID=1
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'OmniConnx.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(BASE_DIR, 'templates').replace('\\', '/'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
# 'direct.views.checkDirects',
],
},
},
]
WSGI_APPLICATION = 'OmniConnx.wsgi.application'
# the following is email authentication for comment threads
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
ACCOUNT_USERNAME_REQUIRED = False
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_USE_TLS = True
EMAIL_PORT = 587
EMAIL_HOST_USER = '<EMAIL>'
EMAIL_HOST_PASSWORD = '<PASSWORD>'
DEFAULT_FROM_EMAIL = EMAIL_HOST_USER
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Where to redirect during authentication
LOGIN_REDIRECT_URL = "/grid"
LOGOUT_REDIRECT_URL = "/"
DEFAULT_LOGOUT_URL = "/"
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
CRISPY_TEMPLATE_PACK = 'bootstrap4'
STATIC_URL = '/staticfiles/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
WIKI_PAGE_TITLE_MAX_LENGTH = 600
AWS_ACCESS_KEY_ID = 'AKIAT4BYMASPECGCPRSK'
AWS_SECRET_ACCESS_KEY = '<KEY>'
AWS_STORAGE_BUCKET_NAME = 'omniconnx-static'
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
}
AWS_LOCATION = 'static'
STATIC_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, AWS_LOCATION)
STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
if 'DATABASE_URL' in os.environ:
import dj_database_url
DATABASES = {'default': dj_database_url.config()}<file_sep>from django.apps import AppConfig
class Categorys(AppConfig):
name = 'categorys' | 0908d0cef778d7650e96f1b5668352a4e238f905 | [
"Python"
] | 2 | Python | GSCrawley/OmniConnxGid | 254a96d889238ec53f1c98684a80738a0434922a | 1f9555ec1317d735625ba725ea0bd633feb67ba7 |
refs/heads/main | <file_sep>const express = require("express");
const bodyParser = require("body-parser");
const mongoose = require("mongoose");
const session = require('express-session');
const passport = require("passport");
const passportLocalMongoose = require("passport-local-mongoose");
const _ = require("lodash");
const { application } = require("express");
const app = express();
app.use(function(req,res,next){
res.locals.message = "";
next();
});
app.set('view engine', 'ejs');
app.use(bodyParser.urlencoded({extended: true}));
app.use(express.static("public"));
app.use(function(req,res,next){
res.locals.currentUser = req.user;
next();
});
app.use(session({
secret: "Our little secret.",
resave: false,
saveUninitialized: false
}));
app.use(passport.initialize());
app.use(passport.session());
//const URL = mongodb+srv:harshman:<password>@cluster0.kpnay.mongodb.net/<dbname>?retryWrites=true&w=majority;
mongoose.connect("mongodb+srv://harshman:hog@<EMAIL>0.gwha9.mongodb.net/registration?retryWrites=true&w=majority",{ useUnifiedTopology: true, useNewUrlParser: true });
// mongoose.connect("mongodb://localhost:27017/RegistrationDB",{ useUnifiedTopology: true, useNewUrlParser: true });
mongoose.set("useCreateIndex",true);
const matchdetails = [];
const details = [];
const userSchema = new mongoose.Schema ({
email: String,
password: String,
});
userSchema.plugin(passportLocalMongoose);
const User = new mongoose.model("User", userSchema);
passport.use(User.createStrategy());
passport.serializeUser(function(user, done) {
done(null, user.id);
});
passport.deserializeUser(function(id, done) {
User.findById(id, function(err, user) {
done(err, user);
});
});
const applySchema = {
teamname : String,
ign : String,
mobilenumber: Number,
email : String,
ign2 : String,
ign3 :String,
ign4: String,
tourneyname : String
}
const Application = mongoose.model(
"Application" , applySchema
)
const hostSchema ={
name : String,
number : Number,
date : Date,
description : String,
prizepool : Number,
ytlink : String,
walink: String
}
const Hosting = mongoose.model(
"Hosting" , hostSchema
)
// const application1 = new Application({
// teamname : "Blabla",
// ign : "Blabla",
// mobilenumber: 982,
// email : "Blabla",
// ign2 : "Blabla",
// ign3 :"Blabla",
// ign4: "Blabla"
function isLoggedIn(req,res,next){
if (req.isAuthenticated()){
return next();
}
res.redirect("/login",{
message : "Please login"
});
}
app.get("/", function(req,res){
console.log(req.user);
res.render('index',{currentUser : req.user});
});
app.get("/registration", function(req,res){
res.render('registration');
});
app.get("/tournament/:name",function(req,res){
const requestedName = _.lowerCase(req.params.name);
Hosting.find({},function(err,tourneys){
if(!err){
tourneys.forEach(function(tourney){
var storedName = _.lowerCase(tourney.name);
storedName = storedName.substring(0,12);
if(storedName===requestedName){
res.render("registration",{
currentUser : req.user,
matchdetail : tourney
})
//console.log(storedName+" success "+requestedName);
}
else {
console.log(err);
console.log(storedName+"!="+requestedName);
}
})
}
});
})
app.get("/logout", function(req, res){
req.logout();
res.redirect("/");
});
app.get("/list", function(req,res){
Application.find({}, function(err,foundApplications){
res.render('list',{
details : foundApplications
,currentUser : req.user
});
});
})
// app.get("/tournament/list/:tourneyNames", function(req,res){
// const tourneyName = req.body.hostname;
// console.log(tourneyName);
// Application.find({}, function(err,foundApplications){
// res.render('list',{
// details : foundApplications
// });
// });
// })
app.get("/tournament/list/:name",function(req,res){
const requestedName = req.params.name;
// console.log(requestedName);
Application.find({tourneyname : requestedName}, function(err,foundApplications){
res.render('list',{
details : foundApplications
,currentUser : req.user
});
});
});
app.post("/tournament/:name",function(req,res){
const requestedName = req.params.name;
// console.log(requestedName);
const detail = new Application({
teamname : req.body.teamname,
ign : req.body.ign,
mobilenumber: req.body.mobile_number,
email : req.body.email,
ign2 : req.body.ign2,
ign3 :req.body.ign3,
ign4: req.body.ign4,
tourneyname : req.body.hostname
});
// details.push(detail);
detail.save(function(err){
if (!err){
res.redirect("/tournament/list/"+requestedName);
}
});
});
app.get("/host", function(req,res){
if(req.isAuthenticated()){
res.render('host',{currentUser : req.user});
}else{
res.redirect("/login")
}
});
app.post("/host",function(req,res){
const matchdetail = new Hosting( {
name : req.body.name,
number : req.body.number,
date : req.body.date,
description : req.body.description,
prizepool : req.body.prizepool,
ytlink :req.body.youtubelink,
walink : req.body.whatsapplink
});
// console.log(matchdetail.name);
matchdetail.save(function(err){
if (!err){
res.redirect("/tournament");
}
});
});
app.get("/tournament", function(req,res){
Hosting.find({},function(err,foundHosts){
res.render('tournament',{
matchdetails : foundHosts
,currentUser : req.user
});
});
})
app.get("/login", function(req,res){
res.render('login',{currentUser : req.user});
});
app.post("/login", function(req, res){
const user = new User({
username: req.body.username,
password: <PASSWORD>
});
req.login(user, function(err){
if (err) {
console.log(err);
} else {
passport.authenticate("local")(req, res, function(){
res.redirect("/host");
});
}
});
});
app.get("/register", function(req,res){
res.render('register',{currentUser : req.user});
});
// app.post("/register",function(req,res){
// User.register({username: req.body.username}, req.body.password, function(err,user){
// if(err) {
// console.log(err);
// res.redirect("/register");
// } else{
// passport.authenticate("locat")(req,res,function(){
// res.redirect("/host");
// });
// }
// });
// });
app.post("/register", function(req, res){
User.register({username: req.body.username}, req.body.password, function(err, user){
if (err) {
console.log(err);
var alertNode = document.querySelector('.alert')
var alert = bootstrap.Alert.getInstance(alertNode)
alert.close()
res.redirect("/register");
} else {
passport.authenticate("local")(req, res, function(){
res.redirect("/host");
});
}
});
});
// Application.find({},function(err,names){
// if(!err){
// names.forEach(function(name){
// console.log(name.teamname);
// })
// }
// });
app.listen(process.env.PORT||3000, function(){
console.log("M416 goes brrrrrr");
}) | 41dabe5c2d48a188fde45000ac55078785fbb324 | [
"JavaScript"
] | 1 | JavaScript | harshman04dec/HighOnGaming | 620f76148f0cf19a18bacb3851741c4d9f9831d5 | 266cf7cf0c26f9d4ef9c64ea8e6e05ca2ec15295 |
refs/heads/master | <repo_name>alfahaama/r-corpus<file_sep>/R/text_split.R
# Copyright 2017 <NAME>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
text_count <- function(x, units = "sentences",
filter = token_filter(),
crlf_break = FALSE,
suppress = abbreviations("english"))
{
x <- as_text(x)
units <- as_enum("units", units, choices = c("sentences", "tokens"))
if (units == "sentences") {
crlf_break <- as_option("crlf_break", crlf_break)
suppress <- as_character_vector("suppress", suppress)
ans <- .Call(C_text_count_sentences, x, crlf_break, suppress)
} else if (units == "tokens") {
filter <- as_token_filter(filter)
ans <- .Call(C_text_count_tokens, x, filter)
} else {
stop(paste0("unrecognized 'units' value: '", units, "'"))
}
ans
}
text_split <- function(x, units = "sentences", size = 1,
filter = token_filter(),
crlf_break = FALSE,
suppress = abbreviations("english"))
{
x <- as_text(x)
units <- as_enum("units", units, choices = c("sentences", "tokens"))
size <- as_size(size)
if (units == "sentences") {
crlf_break <- as_option("crlf_break", crlf_break)
suppress <- as_character_vector("suppress", suppress)
ans <- .Call(C_text_split_sentences, x, size, crlf_break, suppress)
} else if (units == "tokens") {
filter <- as_token_filter(filter)
ans <- .Call(C_text_split_tokens, x, size, filter)
} else {
stop(paste0("unrecognized 'units' value: '", units, "'"))
}
ans
}
<file_sep>/R/token.R
# Copyright 2017 <NAME>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
token_filter <- function(map_case = TRUE, map_compat = TRUE, map_quote = TRUE,
remove_ignorable = TRUE,
stemmer = NA, stem_except = drop, combine = NULL,
drop_letter = FALSE, drop_mark = FALSE,
drop_number = FALSE, drop_punct = FALSE,
drop_symbol = FALSE, drop_other = FALSE,
drop = NULL, drop_except = NULL)
{
ans <- structure(list(), class="corpus_token_filter")
ans$map_case <- map_case
ans$map_compat <- map_compat
ans$map_quote <- map_quote
ans$remove_ignorable <- remove_ignorable
ans$stemmer <- stemmer
ans$stem_except <- stem_except
ans$combine <- combine
ans$drop_letter <- drop_letter
ans$drop_mark <- drop_mark
ans$drop_number <- drop_number
ans$drop_symbol <- drop_symbol
ans$drop_punct <- drop_punct
ans$drop_other <- drop_other
ans$drop <- drop
ans$drop_except <- drop_except
ans
}
as_token_filter <- function(filter)
{
if (is.null(filter)) {
return(NULL)
}
ans <- structure(list(), class = "corpus_token_filter")
keys <- names(token_filter())
for (key in keys) {
ans[[key]] <- filter[[key]]
}
ans
}
`[<-.corpus_token_filter` <- function(x, i, value)
{
if (anyNA(i)) {
stop("NAs are not allowed in subscripted assignments")
}
if (!is.character(i)) {
i <- names(x)[i]
}
if (length(value) == 1) {
value <- rep(value, length(i))
} else if (length(value) != length(i)) {
stop("number of items to replace differs from the replacement length")
}
for (j in seq_along(i)) {
key <- i[[j]]
val <- value[[j]]
if (!is.na(key)) {
x[[key]] <- val
}
}
x
}
`$<-.corpus_token_filter` <- function(x, name, value)
{
if (name %in% c("map_case", "map_compat", "map_quote",
"remove_ignorable", "drop_letter", "drop_mark",
"drop_number", "drop_symbol", "drop_punct",
"drop_other")) {
value <- as_option(name, value)
} else if (name %in% c("stem_except", "combine", "drop", "drop_except")) {
value <- as_character_vector(name, value)
} else if (name == "stemmer") {
value <- as_stemmer(value)
} else {
stop(paste0("unrecognized token filter property: '", name, "'"))
}
y <- unclass(x)
if (is.null(value)) {
# setting a list element to NULL is tricky; see
# http://stackoverflow.com/a/7945259
y[[name]] <- NA
y[match(name, names(y))] <- list(NULL)
} else {
y[[name]] <- value
}
class(y) <- class(x)
y
}
`[[<-.corpus_token_filter` <- function(x, i, value)
{
if (length(i) > 1) {
stop("no such token filter property")
}
if (!is.character(i)) {
name <- names(x)[[i]]
} else {
name <- i
}
if (is.na(name)) {
stop(paste0("no such token filter property (", i, ")"))
}
`$<-.corpus_token_filter`(x, name, value)
}
print.corpus_token_filter <- function(x, ...)
{
cat("Token filter with the following options:\n\n")
for (k in names(x)) {
val <- x[[k]]
cat(paste0("\t", k, ": "))
if (is.null(val)) {
cat("NULL\n")
} else if (length(val) == 1) {
cat(paste0(val, "\n"))
} else {
utils::str(val, width = getOption("width") - 8 - nchar(k) - 2,
give.attr = FALSE)
}
}
invisible(x)
}
tokens <- function(x, filter = token_filter())
{
x <- as_text(x)
filter <- as_token_filter(filter)
.Call(C_tokens_text, x, filter)
}
<file_sep>/man/term_matrix.Rd
\name{term_matrix}
\alias{term_matrix}
\title{Term Frequency Matrix}
\description{
Tokenize a set of texts and compute a term frequency matrix, with
one column for each term.
}
\usage{
term_matrix(x, filter = token_filter(), weights = NULL,
ngrams = NULL, select = NULL, group = NULL)
}
\arguments{
\item{x}{a text vector to tokenize.}
\item{filter}{a token filter specifying the tokenization rules.}
\item{weights}{a numeric vector the same length of \code{x} assigning
weights to each text, or \code{NULL} for unit weights.}
\item{ngrams}{ an integer vector of n-gram lengths to include, or
\code{NULL} to use the \code{select} argument to determine the
n-gram lengths.}
\item{select}{a character vector of terms to count, or \code{NULL} to
count all terms that appear in \code{x}.}
\item{group}{if non-\code{NULL}, a factor, character string, or
integer vector the same length of \code{x} specifying the grouping
behavior.}
}
\details{
\code{term_matrix} tokenizes a set of texts and computes the occurrence
counts for each term. If \code{weights} is non-\code{NULL}, then each
token in text \code{i} increments the count for the corresponding terms
by \code{weights[i]}; otherwise, each appearance increments the count
by one.
If \code{ngrams} is non-\code{NULL}, then multi-type n-grams are
included in the output for all lengths appearing in the \code{ngrams}
argument. If \code{ngrams} is \code{NULL} but \code{select} is
non-\code{NULL}, then all n-grams appearing in the \code{select} set
are included. If both \code{ngrams} and \code{select} are \code{NULL},
then only unigrams (single type terms) are included.
If \code{group} is \code{NULL}, then the output has one row for each
input text. Otherwise, we convert \code{group} to a \code{factor} and
compute one row for each level. Texts with \code{NA} values for
\code{group} get skipped.
}
\value{
A sparse matrix in \code{"dgCMatrix"} format with one column for each term
and one row for each input text or (if \code{group} is non-\code{NULL}) on
row for each grouping level.
If \code{filter$select} is non-\code{NULL}, then the column names will
be equal to \code{filter$select}. Otherwise, the columns are assigned
in arbitrary order.
}
\seealso{
\code{\link{tokens}}, \code{\link{term_counts}}.
}
\examples{
text <- c("A rose is a rose is a rose.",
"A Rose is red, a violet is blue!",
"A rose by any other name would smell as sweet.")
term_matrix(text)
# select certain terms
term_matrix(text, select = c("rose", "red", "violet", "sweet"))
# specify a grouping factor
term_matrix(text, group = c("Good", "Bad", "Good"))
# weight the texts
term_matrix(text, weights = c(1, 2, 10),
group = c("Good", "Bad", "Good"))
# include higher-order n-grams
term_matrix(text, ngrams = 1:3)
# select certain multi-type terms
term_matrix(text, select = c("a rose", "a violet", "sweet", "smell"))
}
<file_sep>/man/text_split.Rd
\name{text_split}
\alias{sentences}
\alias{text_count}
\alias{text_split}
\title{Segmenting Text}
\description{
Segment text into smaller units.
}
\usage{
text_split(x, units = "sentences", size = 1,
filter = token_filter(), crlf_break = FALSE,
suppress = abbreviations("english"))
text_count(x, units = "sentences",
filter = token_filter(), crlf_break = FALSE,
suppress = abbreviations("english"))
}
\arguments{
\item{x}{a text or character vector.}
\item{units}{the block size units, either \code{"sentences"} or
\code{"tokens"}.}
\item{size}{the block size, a positive integer giving the number of
units per block.}
\item{filter}{when \code{units = "tokens"}, a token filter defining
the token boundaries in the text.}
\item{crlf_break}{when \code{units = "sentences"}, a logical value
indicating whether to break sentences on carriage returns or
line feeds.}
\item{suppress}{when \code{units = "sentences"}, a character vector
of sentence break suppressions.}
}
\details{
\code{text_split} splits text into blocks of the given size, measured
in the specified units.
\code{text_count} counts the number of units in each text element.
When \code{units = "sentences"}, units are sentences as defined in
the \sQuote{Sentences} section below. When \code{units = "tokens"},
units are non-\code{NA} tokens defined by the \code{filter}
argument.
}
\section{Sentences}{
Sentences are defined according to a tailored version of the
boundaries specified by
\href{http://unicode.org/reports/tr29/#Sentence_Boundaries}{Unicode
Standard Annex #29, Section 5}.
The UAX 29 sentence boundaries handle Unicode correctly and they give
reasonable behavior across a variety of languages, but they do not
handle abbreviations correctly and by default they treat carriage
returns and line feeds as paragraph separators, often leading to
incorrect breaks. To get around these shortcomings, tailor the
UAX 29 rules using the \code{crlf_break} and the \code{suppress}
arguments.
The UAX 29 rules break after full stops (periods) whenever they are
followed by uppercase letters. Under these rules, the text
\code{"I saw <NAME> today."} gets split into two sentences. To get
around this, we allow a \code{suppress} argument, a list of sentence
break suppressions which, when followed by uppercase characters, do
not signal the end of a sentence.
The UAX 29 rules also specify that a carriage return (CR) or line
feed (LF) indicates the end of of a sentence, so that
\code{"A split\nsentence."} gets split into two sentences. This often
leads to incorrect breaks, so by default, with \code{crlf_break = FALSE},
we deviate from the UAX 29 rules and we treat CR and LF like spaces.
To break sentences on CRLF, CR, and LF, specify \code{crlf_break = TRUE}.
}
\value{
For \code{text_split}, the result is a data frame with three columns:
\code{parent}, \code{index}, and \code{text}, and one row for each
text block. The \code{parent} value is the integer index of the parent
text in \code{x}; the \code{index} value is the integer index of the
sentence in its parent; the \code{text} value is the text of the
block, a value of type \code{\link{text}}.
For \code{text_count}, the result is a numeric vector with the same
length and names as the text vector, with the elements given the number
of units in the corresponding text element.
}
\seealso{
\code{\link{abbreviations}}, \code{\link{tokens}}.
}
\examples{
text <- c("I saw <NAME> today.",
"Split across\na line.",
"What. Are. You. Doing????",
"She asked 'do you really mean that?' and I said 'yes.'")
# split text into sentences
text_split(text, units = "sentences")
# count the number of sentences or tokens
text_count(text, units = "sentences")
text_count(text, units = "tokens")
# disable the default sentence suppressions
text_split("I saw <NAME> today.", units = "sentences", suppress = NULL)
# break on CR and LF
text_split("Split across\na line.", units = "sentences", crlf_break = TRUE)
# 2-sentence blocks
text_split(c("What. Are. You. Doing????",
"She asked 'do you really mean that?' and I said 'yes.'"),
units = "sentences", size = 2)
# 4-token blocks
text_split(c("What. Are. You. Doing????",
"She asked 'do you really mean that?' and I said 'yes.'"),
units = "tokens", size = 4)
}
<file_sep>/src/util.c
/*
* Copyright 2017 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <string.h>
#include <Rdefines.h>
#include "rcorpus.h"
/* based on R-Exts Section 5.9.6 "handling lists" */
int findListElement(SEXP list, const char *str)
{
SEXP names;
int i, n;
if (list == R_NilValue) {
return -1;
}
names = getAttrib(list, R_NamesSymbol);
if (names == R_NilValue) {
return -1;
}
n = LENGTH(list);
for (i = 0; i < n; i++) {
if(strcmp(CHAR(STRING_ELT(names, i)), str) == 0) {
return i;
}
}
return -1;
}
SEXP getListElement(SEXP list, const char *str)
{
int i = findListElement(list, str);
if (i < 0) {
return R_NilValue;
}
return VECTOR_ELT(list, i);
}
| e8c032e334efeb0a6a70b78ef9a710313ca66a5d | [
"C",
"R"
] | 5 | R | alfahaama/r-corpus | 5f024c231f3dccbd145f4118013931d2f308deae | 1afb9e8ad25b0ea08d0f6da1083c579356ed6438 |
refs/heads/master | <repo_name>Nelavalli/CreateAccount<file_sep>/build.gradle
buildscript {
ext {
springBootVersion = '1.5.2.RELEASE'
camelVersion = '2.18.2'
undertowVersion = '1.2.11.Final'
}
repositories {
mavenCentral()
jcenter()
maven { url 'http://repo.spring.io/plugins-release' }
}
dependencies {
classpath("org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}")
}
}
repositories {
mavenCentral()
jcenter()
maven { url 'http://repo.spring.io/plugins-release' }
}
configurations.all {
resolutionStrategy.eachDependency { DependencyResolveDetails details ->
if (details.requested.group == 'io.undertow') {
details.useVersion undertowVersion
}
}
}
configurations {
compile.exclude module: "spring-boot-starter-tomcat"
}
dependencies {
compile 'org.codehaus.groovy:groovy-all:2.4.8'
compile 'org.springframework.boot:spring-boot-starter'
compile 'org.springframework.boot:spring-boot-starter-web'
compile 'org.springframework.boot:spring-boot-starter-undertow'
compile 'org.springframework.boot:spring-boot-starter-actuator'
compile 'org.springframework.boot:spring-boot-starter-web-services'
compile 'org.springframework.boot:spring-boot-devtools'
compile "org.apache.camel:camel-core:${camelVersion}"
compile "org.apache.camel:camel-restlet:${camelVersion}"
compile "org.restlet.jee:org.restlet.ext.spring:2.3.8"
compile "org.apache.camel:camel-groovy:$camelVersion"
compile "org.apache.camel:camel-undertow:$camelVersion"
compile "org.apache.camel:camel-spring-boot:$camelVersion"
compile "org.apache.camel:camel-jackson:$camelVersion"
compile "org.apache.camel:camel-swagger-java:$camelVersion"
compile "org.apache.camel:camel-spring-boot-starter:$camelVersion"
compile('com.zaxxer:HikariCP:2.5.1')
compile('org.apache.httpcomponents:httpclient:4.5.2')
compile('org.json:json:20160810')
//testCompile('org.springframework.boot:spring-boot-starter-test')
//testCompile('org.hsqldb:hsqldb:2.3.4')
//testCompile('org.mockito:mockito-core:1.10.19')
}
apply plugin: 'groovy'
apply plugin: 'java'
apply plugin: 'eclipse'
apply plugin: 'org.springframework.boot'
apply plugin: 'application'
apply plugin: 'maven'
apply from: 'buildreports.gradle'
jar {
baseName = 'CreateAccount'
version = '1.0.0'
}
archivesBaseName = 'CreateAccount'
sourceCompatibility = 1.8
targetCompatibility = 1.8
springBoot {
executable = true
}
bootRepackage {
mainClass = 'com.dbs.capi.CreateAccountController'
}
<file_sep>/src/main/java/com/dbs/capi/CreateAccountController.java
package com.dbs.capi;
import java.util.HashMap;
import java.util.Map;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.restlet.RestletComponent;
import org.apache.camel.spring.boot.FatJarRouter;
import org.restlet.Component;
import org.restlet.ext.spring.SpringServerServlet;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.ServletRegistrationBean;
import org.springframework.context.annotation.Bean;
@SpringBootApplication
public class CreateAccountController extends FatJarRouter {
@Override
public void configure() throws Exception {
intercept().to("log:intercepted hello");
restConfiguration().component("restlet");
rest("/hello").get().to("direct:hello");
from("direct:hello").transform().simple("Hello World!");
from("direct:getPersons")
.transform()
.simple("Hello World!")
.to("log:bar");
/*
from("timer:bar")
.transform()
.simple("Hello World! timer")
.to("direct:getPersons");
*/
}
@Bean
public ServletRegistrationBean servletRegistrationBean() {
SpringServerServlet serverServlet = new SpringServerServlet();
ServletRegistrationBean regBean = new ServletRegistrationBean( serverServlet, "/api/*");
Map<String,String> params = new HashMap<String,String>();
params.put("org.restlet.component", "restletComponent");
regBean.setInitParameters(params);
return regBean;
}
@Bean
public Component restletComponent() {
return new Component();
}
@Bean
public RestletComponent restletComponentService() {
return new RestletComponent(restletComponent());
}
}
| 660f14bc601847d5bb71f636c98e7f9353d15cf4 | [
"Java",
"Gradle"
] | 2 | Gradle | Nelavalli/CreateAccount | 93c21a9ba3d856b698f5bb0432fae8cdcd4d8fde | 9488c0ec28ab1750835f63532ec49cbeb93210f4 |
refs/heads/master | <repo_name>atillaordog/Report<file_sep>/Report.php
<?php
/**
* The goal of this class is to separate and make more logical the process of creating a report
*
* Every report has 3 parts connected by this class
* The idea is the following: we have a getetr class that gets the data from somewhere, like DB
* The processor's task is to convert, handle, calculate, generate, etc the data needed for the output
* The outputter simply finalizes the process by generating the needed output (the output can be html, array, json, etc)
*/
class Report
{
private $getter;
private $processor;
private $outputter;
public function __construct(Report\Getterinterface $getter, Report\Processorinterface $processor, Report\Outputterinterface $outputter)
{
$this->getter = $getter;
$this->processor = $processor;
$this->outputter = $outputter;
}
/**
* Controlls the whole process and in the end returns the output
* @return mixed
*/
public function generate()
{
$this->processor->pass_data($this->getter->get_data());
return $this->outputter->generate_output($this->processor->process());
}
}<file_sep>/Report/Processor.php
<?php
namespace Report;
use Report\Processorinterface as Processorinterface;
abstract class Processor implements Processorinterface
{
protected $params;
protected $data;
public function set_params(Array $params = array())
{
$this->params = $params;
}
abstract protected function _validate_input();
public function pass_data($data)
{
$this->data = $data;
if ( !$this->_validate_input() )
{
throw new Exception('The incoming data is not in required format.');
}
}
abstract public function process();
}<file_sep>/Report/Outputter.php
<?php
namespace Report;
use Report\Outputterinterface as Outputterinterface;
abstract class Outputter implements Outputterinterface
{
abstract public function generate_output($processed_data);
}<file_sep>/Report/Outputterinterface.php
<?php
namespace Report;
interface Outputterinterface
{
function generate_output($processed_data);
}<file_sep>/Report/Processorinterface.php
<?php
namespace Report;
interface Processorinterface
{
function set_params(Array $params);
function pass_data($data);
function process();
}<file_sep>/Report/Getter.php
<?php
namespace Report;
use Report\Getterinterface as Getterinterface;
abstract class Getter implements Getterinterface
{
protected $params;
public function set_params(Array $params = array())
{
$this->params = $params;
}
abstract function get_data();
}<file_sep>/Report/Getterinterface.php
<?php
namespace Report;
interface Getterinterface
{
function set_params(Array $params);
function get_data();
}<file_sep>/README.md
Report
======
A simple report class
The main goal here is to separate how a report is generated.
The reason is that in many cases one report is generated in one function of a class, making it a huge code, hard to understand and maintain.
In this case everything has its place, getting data in the getter, processing it in the processer and outputting it in the outputter.
How to use example:
```
include('Reports/autoload.php');
class TestGetter extends Report\Getter
{
public function get_data()
{
// Get the data from anywhere
return array('test' => 'a');
}
}
class TestProcessor extends Report\Processor
{
protected function _validate_input()
{
// Validate the input from getter
if ( array_key_exists('test', $this->data) )
{
return true;
}
return false;
}
public function process()
{
// Process the data
$this->data['test'] = 'b';
return $this->data;
}
}
class TestOutputter extends Report\Outputter
{
public function generate_output($processed_data)
{
// Generate the wanted output, let it be anything from array to html, json, anything
print_r($processed_data);
}
}
$report = new Report(
new TestGetter(),
new TestProcessor(),
new TestOutputter()
);
$report->generate();
```
<file_sep>/autoload.php
<?php
/**
* Autoload file that needs to be laoded to use the Report
*/
if (!defined('REPORT_ROOT')) {
define('REPORT_ROOT', dirname(__FILE__) . DIRECTORY_SEPARATOR);
}
spl_autoload_register('autoload');
function autoload($class)
{
if ( class_exists($class,FALSE) ) {
// Already loaded
return FALSE;
}
if ( file_exists(REPORT_ROOT.$class.'.php') )
{
require(REPORT_ROOT.$class.'.php');
}
return false;
} | 2e73d3b1cb323cd8c9eef98c071ea7f8a3a764df | [
"Markdown",
"PHP"
] | 9 | PHP | atillaordog/Report | 016132462a7eab155dc0e6c19b96a5a0244b20b4 | ce325bff9ba4d1e9c1abbc5e2045fdb8a868e575 |
refs/heads/master | <repo_name>NITDroid1995/shifr<file_sep>/Form1.cs
using System;
using System.Linq;
using System.Windows.Forms;
namespace shifr
{
public partial class Form1 : Form
{
public static String textInput = "";
public static String textOutput = "";
public static String key1 = "";
public static String key2 = "";
public static string AlphabetA = "ъ=#VКsэGу6ЮлнzSОqhWЫxCФN.HдM8ИdcFjtQ+ITхЦв;awrтDУп:Бж5ЁХЛLчvёгE2Аu70аГ/3Aошeе)бПТЩЭЕZ9mXРЯpygкыi?%юй4Вl,сДьЪзН(ф-PBШOСb_Ж!рoщUЬR*JямnKЧЗ1YМцfkЙи";//Набор сиволов
public static string AlphabetB = "ъhRbБHOЪЬp;6zяАзуЕDwцэ(_cGmЗж7PаФыоIчЫпП+К%*N4vЙrЦl5KСТШjёшZiьЯ9НUДJУМQ=б)Ч0W:.?щРЩ/XЭХdCГAkкn1SЖ!юqV,Lфт-Bs3н82EрЛYMд#ВйFОеuИЮлftaхЁyмeгTxogсви";//Набор сиволов
//Перемешанный алфавит для повышения качества шифра
private static int columns = 12;//Столбцы
private static int rows = 12;//Ряды
private char[,] FirstSquare = new char[rows, columns];//Первый квадрат Уитстона
private char[,] SecondSquare = new char[rows, columns];//Второй к<NAME>
private string firstKey = "";//Первый ключ
private string secondKey = "";//Второй ключ
private string GetKeyLess(string _Key, string Alphabet_)//Вернуть алфавит без указанного ключа
{
string ResidualAlphabet = Alphabet_;
for (int i = 0; i < _Key.Length; i++)//Алфавит без Ключевого слова
{
ResidualAlphabet = ResidualAlphabet.Replace(_Key[i].ToString(), "");
}
return ResidualAlphabet;
}
//Первый квадрат
private void SetFirstSquare()
{
string KeyLess = GetKeyLess(firstKey, AlphabetA);
int index = 0, k = 0;
for (int i = 0; i < rows; i++)
{
for (int j = 0; j < columns; j++)
{
FirstSquare[i, j] = (index < firstKey.Length) ? firstKey[index++] : KeyLess[k++];
}
}
}
//Второй квадрат
private void SetSecondSquare()
{
string KeyLess = GetKeyLess(secondKey, AlphabetB);
int index = 0, k = 0;
for (int i = 0; i < rows; i++)
{
for (int j = 0; j < columns; j++)
{
SecondSquare[i, j] = (index < secondKey.Length) ? secondKey[index++] : KeyLess[k++];
}
}
}
//Создание двух квадратов и замена пробелов на подчеркивания
private void CipherDerivation()
{
SetFirstSquare();
SetSecondSquare();
textInput = textInput.Replace(" ", "_");
if (textInput.Length % 2 == 1)
textInput += "_";
}
//Зашифровка
public string Encode(String inputText)
{
CipherDerivation();
this.firstKey = new string(firstKey.Distinct().ToArray()).Replace(" ", "");
//Удаление повторяющихся символов из второго ключа
this.secondKey = new string(secondKey.Distinct().ToArray()).Replace(" ", "");
string Pair = "";
string encrypt = "";
if(inputText.Length %2 != 0)
{
inputText += "~";
}
for (int i = 0; i < inputText.Length; i++)
{
Pair += inputText[i];
if (i % 2 == 1)
{
encrypt += EncodeElementPairCipher(Pair);
Pair = "";
}
}
return encrypt;
}
//Дешифровка путем замены расположения, меняем элементы пары местами
public string Decode(String inputShifr)
{
SetFirstSquare();
SetSecondSquare();
this.firstKey = new string(firstKey.Distinct().ToArray()).Replace(" ", "");
//Удаление повторяющихся символов из второго ключа
this.secondKey = new string(secondKey.Distinct().ToArray()).Replace(" ", "");
string decrypt = "";
string Pair = "";
for (int i = 0; i < inputShifr.Length; i++)
{
Pair += inputShifr[i];
if (i % 2 == 1)
{
decrypt += DecodeElementPairCipher(Pair);
Pair = "";
}
}
decrypt = decrypt.Replace("~", "");
return decrypt.Replace("_", " ").Trim();
}
//Поиск позиции элемента в таблице
private void SearchIndexToArray(char[,] Square, char SearchChar, out int TableRows, out int TableColumns)
{
TableRows = -1;
TableColumns = -1;
int Rows = Square.GetUpperBound(0) + 1;//Ряды
int Columns = Square.GetUpperBound(1) + 1;//Колонки
for (int i = 0; i < Rows; i++)
{
for (int j = 0; j < Columns; j++)
{
if (Square[i, j] == SearchChar)
{
TableRows = i;
TableColumns = j;
}
}
}
}
//Замена пары символов на зашифрованную пару
private string EncodeElementPairCipher(string StrPair)
{
string Pair = "";
SearchIndexToArray(FirstSquare, StrPair[0], out int Rows_0, out int Columns_0);//Позиция 1 символа в 1 квадрате
SearchIndexToArray(SecondSquare, StrPair[1], out int Rows_1, out int Columns_1);//Позиция 2 символа в 2 квадрате
if (Rows_0 == Rows_1)//Если в 1 ряду
{
Pair += (Rows_0 == -1 || Rows_1 == -1) ? StrPair : SecondSquare[Rows_0, Columns_0].ToString() + FirstSquare[Rows_1, Columns_1].ToString();
}
else//Если в разных рядах
{
Pair += (Rows_0 == -1 || Rows_1 == -1) ? StrPair : SecondSquare[Rows_0, Columns_1].ToString() + FirstSquare[Rows_1, Columns_0].ToString();
}
return Pair;
}
//Замена пары символов на дешифрованную пару
private string DecodeElementPairCipher(string StrPair)
{
string Pair = "";
SearchIndexToArray(SecondSquare, StrPair[0], out int Rows_0, out int Columns_0);//Позиция 1 символа в 1 квадрате
SearchIndexToArray(FirstSquare, StrPair[1], out int Rows_1, out int Columns_1);//Позиция 2 символа в 2 квадрате
if (Rows_0 == Rows_1)//Если в 1 ряду
{
Pair += (Rows_0 == -1 || Rows_1 == -1) ? StrPair : FirstSquare[Rows_0, Columns_0].ToString() + SecondSquare[Rows_1, Columns_1].ToString();
}
else//Если в разных рядах
{
Pair += (Rows_0 == -1 || Rows_1 == -1) ? StrPair : FirstSquare[Rows_0, Columns_1].ToString() + SecondSquare[Rows_1, Columns_0].ToString();
}
return Pair;
}
public Form1()
{
InitializeComponent();
}
private void button1_Click(object sender, EventArgs e)
{
//Зашифровать
if(textBox1.Text.Length < 1)
{
MessageBox.Show("Введите входящий текст");
return;
}
if (textBox4.Text.Length < 1)
{
MessageBox.Show("Введите ключ №1");
return;
}
if (textBox3.Text.Length < 1)
{
MessageBox.Show("Введите ключ #2");
return;
}
this.firstKey = textBox4.Text;
this.secondKey = textBox3.Text;
textBox2.Text = Encode(textBox1.Text);
}
private void textBox1_TextChanged(object sender, EventArgs e)
{
//Входной текст
}
private void button2_Click(object sender, EventArgs e)
{
//Расшифровать
//Зашифровать
if (textBox1.Text.Length < 1)
{
MessageBox.Show("Введите входящий текст");
return;
}
if (textBox4.Text.Length < 1)
{
MessageBox.Show("Введите ключ №1");
return;
}
if (textBox3.Text.Length < 1)
{
MessageBox.Show("Введите ключ #2");
return;
}
this.firstKey = textBox4.Text;
this.secondKey = textBox3.Text;
textBox2.Text = Decode(textBox1.Text);
}
private void textBox4_TextChanged(object sender, EventArgs e)
{
//Ключ №1
}
private void textBox3_TextChanged(object sender, EventArgs e)
{
//Ключ №2
}
private void textBox2_TextChanged(object sender, EventArgs e)
{
//Итоговый текст
}
}
}
| 7d8e64a3dd0babe6046d57348a252015713b950d | [
"C#"
] | 1 | C# | NITDroid1995/shifr | 9788cd798a75fa77eda4d670319b475a38a93b44 | 93056c03b610183606a3387c1a5dacb02bb89555 |
refs/heads/master | <repo_name>jatinm11/leaderboards<file_sep>/Leaderboards/Leaderboards/LoginViewController.swift
//
// LoginViewController.swift
// Leaderboards
//
// Created by <NAME> on 9/19/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class LoginViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
@IBOutlet weak var usernameTextField: UITextField!
let colorProvider = BackgroundColorProvider()
override func viewDidLoad() {
super.viewDidLoad()
usernameTextField.attributedPlaceholder = NSAttributedString(string: "Enter Username",
attributes: [NSAttributedString.Key.foregroundColor: UIColor.white])
let randomColor = colorProvider.randomColor()
view.backgroundColor = randomColor
navigationItem.setHidesBackButton(true, animated: false)
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
usernameTextField.resignFirstResponder()
}
func presentSimpleAlert(title: String, message: String) {
let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
let dismissAction = UIAlertAction(title: "Dismiss", style: .cancel, handler: nil)
alert.addAction(dismissAction)
present(alert, animated: true, completion: nil)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "toNewUserSelectImageVC" {
guard let username = usernameTextField.text, !username.isEmpty else {
self.presentSimpleAlert(title: "Unable to create an account", message: "Be sure you entered a valid username and try again."); return }
let newUserSelectImageVC = segue.destination as? NewUserSelectImageViewController
newUserSelectImageVC?.username = username
}
}
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
self.usernameTextField.resignFirstResponder()
return true
}
@IBAction func nextButtonTapped(_ sender: Any) {
if FileManager.default.ubiquityIdentityToken != nil {
guard let username = usernameTextField.text, !username.isEmpty else {
self.presentSimpleAlert(title: "Unable to create an account", message: "Be sure you entered a valid username and try again."); return }
self.navigationController?.pushViewController(NewUserSelectImageViewController.controller(username: username), animated: true)
}
else {
self.presentSimpleAlert(title: "Error.", message: "Seems like you're not logged in to your iCloud Account. Please login to continue.")
}
}
}
<file_sep>/Leaderboards/Leaderboards/PlayspaceMembersListViewController.swift
//
// PlayspaceMembersListViewController.swift
// Leaderboards
//
// Created by <NAME> on 10/5/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class PlayspaceMembersListViewController: UIViewController {
var members: [Player]?
let colorProvider = BackgroundColorProvider()
@IBOutlet weak var tableView: UITableView!
override func viewDidLoad() {
super.viewDidLoad()
let randomColor = colorProvider.randomColor()
view.backgroundColor = randomColor
tableView.backgroundColor = randomColor
tableView.delegate = self
tableView.dataSource = self
guard let currentPlayspace = PlayspaceController.shared.currentPlayspace else { return }
title = currentPlayspace.name
PlayerController.shared.fetchPlayersFor(currentPlayspace) { (members, success) in
DispatchQueue.main.async {
self.members = members
self.tableView.reloadData()
}
}
}
}
extension PlayspaceMembersListViewController: UITableViewDelegate, UITableViewDataSource {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
guard let members = members else { return 0 }
return members.count + 1
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
if indexPath.row == 0 {
let cell = tableView.dequeueReusableCell(withIdentifier: "memberTitleCell", for: indexPath)
return cell
}
guard let cell = tableView.dequeueReusableCell(withIdentifier: "memberCell", for: indexPath) as? PlayspaceMemberTableViewCell else { return PlayspaceMemberTableViewCell() }
cell.updateViewWith(member: members?[indexPath.row - 1])
return cell
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
if indexPath.row == 0 {
return 44
}
return 80
}
func tableView(_ tableView: UITableView, willDisplay cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
title = PlayspaceController.shared.currentPlayspace?.name
}
}
func tableView(_ tableView: UITableView, didEndDisplaying cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
title = "Members"
}
}
}
<file_sep>/Leaderboards/Leaderboards/PlayspaceCreatedAlertViewController.swift
//
// PlayspaceCreatedAlertViewController.swift
// Leaderboards
//
// Created by <NAME> on 02/10/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class PlayspaceCreatedAlertViewController: UIViewController {
@IBOutlet var dismissButtonViewContainer: UIView!
@IBOutlet var savePasswordButtonContainer: UIView!
@IBOutlet var passwordMessage: UILabel!
@IBOutlet var messageViewContainer: UIView!
var password: String = ""
var playspaceName: String = ""
override func viewDidLoad() {
super.viewDidLoad()
self.messageViewContainer.layer.cornerRadius = 5
self.messageViewContainer.clipsToBounds = true
self.savePasswordButtonContainer.layer.cornerRadius = 5
self.savePasswordButtonContainer.clipsToBounds = true
self.dismissButtonViewContainer.layer.cornerRadius = 5
self.dismissButtonViewContainer.clipsToBounds = true
passwordMessage.text = "Your password is: \(password)"
}
@IBAction func savePasswordButtonTapped(_ sender: Any) {
let textToShare = "My playspace \(self.playspaceName)'s password is: \(self.password)"
let share = [textToShare]
let activityVC = UIActivityViewController(activityItems: share, applicationActivities: nil)
self.present(activityVC, animated: true, completion: nil)
}
@IBAction func dismissButtonTapped(_ sender: Any) {
guard let addPlayspaceVC = presentingViewController else { return }
dismiss(animated: true) {
addPlayspaceVC.dismiss(animated: true, completion: nil)
}
}
}
<file_sep>/Leaderboards/Leaderboards/PendingMatchesViewController.swift
//
// PendingMatchesViewController.swift
// Leaderboards
//
// Created by <NAME> on 25/09/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import CloudKit
class PendingMatchesViewController: UIViewController {
let colorProvider = BackgroundColorProvider()
@IBOutlet weak var tableView: UITableView!
@IBOutlet weak var navigationBar: UINavigationBar!
@IBOutlet weak var backButton: UIBarButtonItem!
var games: [Game]?
var opponents: [Player]?
override func viewDidLoad() {
super.viewDidLoad()
tableView.dataSource = self
tableView.delegate = self
tableView.tableFooterView = UIView()
let randomColor = colorProvider.randomColor()
tableView.backgroundColor = randomColor
view.backgroundColor = randomColor
navigationBar.setBackgroundImage(UIImage(), for: .default)
navigationBar.shadowImage = UIImage()
navigationBar.isTranslucent = true
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return .lightContent
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
guard let currentPlayer = PlayerController.shared.currentPlayer else { return }
MatchController.shared.fetchPendingMatchesForCurrentPlayer { (success) in
if success {
MatchController.shared.fetchGamesForMatches(MatchController.shared.pendingMatches, completion: { (games, success) in
self.games = games
if success {
MatchController.shared.fetchOpponentsForMatches(MatchController.shared.pendingMatches, player: currentPlayer, completion: { (opponents, success) in
self.opponents = opponents
if success {
DispatchQueue.main.async {
self.tableView.reloadData()
}
}
})
}
})
}
}
}
@IBAction func backButtonTapped(_ sender: Any) {
dismiss(animated: true, completion: nil)
}
func updateBadge(number: Int) {
let operation = CKModifyBadgeOperation(badgeValue: number)
operation.modifyBadgeCompletionBlock = {(error) in
if let error = error{
print("\(error)")
return
}
DispatchQueue.main.async {
UIApplication.shared.applicationIconBadgeNumber = number
}
}
CKContainer.default().add(operation)
}
}
extension PendingMatchesViewController: UITableViewDelegate, UITableViewDataSource {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return MatchController.shared.pendingMatches.count + 1
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
if indexPath.row == 0 {
let cell = tableView.dequeueReusableCell(withIdentifier: "pendingMatchesTitleCell", for: indexPath)
return cell
}
guard let cell = tableView.dequeueReusableCell(withIdentifier: "pendingMatchCell", for: indexPath) as? PendingMatchTableViewCell else { return PendingMatchTableViewCell() }
cell.updateViewsWith(MatchController.shared.pendingMatches[indexPath.row - 1], opponent: opponents?[indexPath.row - 1], game: games?[indexPath.row - 1])
return cell
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
if indexPath.row == 0 {
return 44
}
return 135
}
func tableView(_ tableView: UITableView, canEditRowAt indexPath: IndexPath) -> Bool {
if indexPath.row == 0 {
return false
}
return true
}
func tableView(_ tableView: UITableView, editActionsForRowAt indexPath: IndexPath) -> [UITableViewRowAction]? {
if indexPath.row == 0 {
return nil
}
let confirmTableViewRowAction = UITableViewRowAction(style: .normal, title: "Approve") { (_, indexPath) in
let verifiedMatch = MatchController.shared.verifyMatch(MatchController.shared.pendingMatches[indexPath.row - 1])
MatchController.shared.updateMatch(verifiedMatch, completion: { (success) in
if success {
DispatchQueue.main.async {
MatchController.shared.sendApprovedMatchToSlack(MatchController.shared.pendingMatches[indexPath.row - 1], opponent: self.opponents?[indexPath.row - 1], game: self.games?[indexPath.row - 1])
MatchController.shared.clearPendingMatch(at: indexPath.row - 1)
self.games?.remove(at: indexPath.row - 1)
self.opponents?.remove(at: indexPath.row - 1)
tableView.deleteRows(at: [indexPath], with: .automatic)
self.updateBadge(number: MatchController.shared.pendingMatches.count)
}
}
})
}
let denyTableViewRowAction = UITableViewRowAction(style: .destructive, title: "Decline") { (_, indexPath) in
MatchController.shared.deletePendingMatch(at: indexPath.row - 1, completion: { (success) in
if success {
DispatchQueue.main.async {
MatchController.shared.clearPendingMatch(at: indexPath.row - 1)
self.games?.remove(at: indexPath.row - 1)
self.opponents?.remove(at: indexPath.row - 1)
tableView.deleteRows(at: [indexPath], with: .automatic)
self.updateBadge(number: MatchController.shared.pendingMatches.count)
}
}
})
}
confirmTableViewRowAction.backgroundColor = UIColor(red: 52.0/255.0, green: 216.0/255.0, blue: 132.0/255.0, alpha: 1.0)
denyTableViewRowAction.backgroundColor = .red
return [confirmTableViewRowAction, denyTableViewRowAction]
}
func tableView(_ tableView: UITableView, willDisplay cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
navigationBar.topItem?.title = ""
}
}
func tableView(_ tableView: UITableView, didEndDisplaying cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
navigationBar.topItem?.title = "Pending Matches"
}
}
}
<file_sep>/Leaderboards/Leaderboards/PlayspaceController.swift
//
// PlayspaceController.swift
// Leaderboards
//
// Created by <NAME> on 9/19/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import Foundation
import CloudKit
class PlayspaceController {
static let shared = PlayspaceController()
fileprivate static let playspaceKey = "playspace"
var playspaces: [Playspace] = []
var currentPlayspace: Playspace?
func createPlayspaceWith(name: String, completion: @escaping (_ password: String?, _ success: Bool) -> Void = { _,_ in }) {
let playspace = Playspace(recordID: CKRecord.ID(recordName: UUID().uuidString), name: name, password: randomString(length: 4))
CloudKitManager.shared.saveRecord(playspace.CKRepresentation) { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
completion(playspace.password, true)
}
if let currentPlayer = PlayerController.shared.currentPlayer {
addPlayer(currentPlayer, toPlayspaceRecord: playspace.CKRepresentation)
}
}
func joinPlayspaceWith(password: String, completion: @escaping (_ success: Bool) -> Void = { _ in }) {
let predicate = NSPredicate(format: "password == %@", password)
CloudKitManager.shared.fetchRecordsWithType(Playspace.recordType, predicate: predicate, recordFetchedBlock: nil) { (records, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
guard let currentPlayer = PlayerController.shared.currentPlayer, let records = records, records.count > 0 else { completion(false); return }
self.addPlayer(currentPlayer, toPlayspaceRecord: records[0], completion: { (success) in
if success {
completion(true)
}
})
}
}
func addPlayer(_ player: Player, toPlayspaceRecord playspaceRecord: CKRecord, completion: @escaping (_ success: Bool) -> Void = { _ in }) {
var player = player
player.playspaces.append(CKRecord.Reference(record: playspaceRecord, action: .none))
PlayerController.shared.updatePlayer(player) { (success) in
if success {
PlayerController.shared.fetchCurrentPlayer(completion: { (success) in
if success {
completion(true)
}
})
}
}
}
func randomString(length:Int) -> String {
let charSet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
let c = charSet.map { String($0) }
var s:String = ""
for _ in (1...length) {
s.append(c[Int(arc4random()) % c.count])
}
return s
}
func removeCurrentPlayerFrom(_ playspace: Playspace, completion: @escaping (_ success: Bool) -> Void = { _ in }) {
guard var currentPlayer = PlayerController.shared.currentPlayer,
let index = currentPlayer.playspaces.firstIndex(of: CKRecord.Reference(recordID: playspace.recordID, action: .none)) else { completion(false); return }
currentPlayer.playspaces.remove(at: index)
CloudKitManager.shared.updateRecords([currentPlayer.CKRepresentation], perRecordCompletion: nil) { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
let tempURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(currentPlayer.recordID.recordName + ".dat")
try? FileManager.default.removeItem(at: tempURL)
GameController.shared.fetchGamesFor(playspace) { (games, success) in
if success {
guard let games = games else { completion(false); return }
var updatedGameRecords = [CKRecord]()
for game in games {
guard var players = game.object(forKey: Game.playersKey) as? [CKRecord.Reference],
let index = players.firstIndex(of: CKRecord.Reference(recordID: currentPlayer.recordID, action: .none)) else { continue }
players.remove(at: index)
game.setObject(players as CKRecordValue, forKey: Game.playersKey)
updatedGameRecords.append(game)
}
CloudKitManager.shared.updateRecordsIfServerRecordChanged(updatedGameRecords, perRecordCompletion: { (_, error) in
if let error = error as? CKError,
error.code == CKError.Code.serverRecordChanged,
let game = error.serverRecord {
guard var players = game.object(forKey: Game.playersKey) as? [CKRecord.Reference],
let index = players.firstIndex(of: CKRecord.Reference(recordID: currentPlayer.recordID, action: .none)) else { return }
players.remove(at: index)
game.setObject(players as CKRecordValue, forKey: Game.playersKey)
CloudKitManager.shared.updateRecordsIfServerRecordChanged([game], perRecordCompletion: { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
}, completion: { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
PlayerController.shared.fetchCurrentPlayer(completion: { (success) in
if success {
completion(true)
}
})
})
}
}, completion: { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
PlayerController.shared.fetchCurrentPlayer(completion: { (success) in
if success {
completion(true)
}
})
})
}
}
}
}
}
<file_sep>/Leaderboards/Leaderboards/LeaderboardTableViewCell.swift
//
// LeaderboardTableViewCell.swift
// Leaderboards
//
// Created by <NAME> on 19/09/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class LeaderboardTableViewCell: UITableViewCell {
// MARK:- Outlets
@IBOutlet weak var playerImage: UIImageView!
@IBOutlet weak var matchesPlayedLabel: UILabel!
@IBOutlet weak var matchesWonLabel: UILabel!
@IBOutlet weak var matchesLossLabel: UILabel!
@IBOutlet weak var winPercentageLabel: UILabel!
@IBOutlet weak var playerName: UILabel!
// MARK :- Functions
func updateViewsWith(playerDictionary: [String: Any]?) {
playerImage.image = (playerDictionary?["player"] as? Player)?.photo
playerName.text = (playerDictionary?["player"] as? Player)?.username
matchesPlayedLabel.text = "\(playerDictionary?["played"] ?? 0)"
matchesWonLabel.text = "\(playerDictionary?["wins"] ?? 0)"
matchesLossLabel.text = "\(playerDictionary?["losses"] ?? 0)"
if let winPercentage = playerDictionary?["winPercentage"] as? Double {
winPercentageLabel.text = "\(Int(winPercentage * 100))"
}
playerImage.layer.cornerRadius = playerImage.frame.width / 2
playerImage.clipsToBounds = true
}
}
<file_sep>/Leaderboards/Leaderboards/LeaderboardsViewController.swift
//
// LeaderboardsViewController.swift
// Leaderboards
//
// Created by <NAME> on 10/3/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class LeaderboardsViewController: UIViewController {
@IBOutlet weak var tableView: UITableView!
@IBOutlet weak var allTimeCurrentMonthButton: UIButton!
@IBOutlet weak var allTimeCurrentMonthButtonContainer: UIView!
@IBAction func allTimeCurrentMonthButtonTapped(_ sender: Any) {
showingAllTime = !showingAllTime
playerStatsArrayOfDictionaries = []
allTimeCurrentMonthButton.isEnabled = false
if showingAllTime {
GameController.shared.fetchAllPlayersForCurrentGame { (success) in
if success {
DispatchQueue.main.async {
NotificationCenter.default.post(name: LeaderboardsViewController.fetchAllPlayersComplete, object: nil)
self.createPlayerStatsDictionaries()
}
MatchController.shared.fetchMatchesForCurrentGame(completion: { (success) in
if success {
DispatchQueue.main.async {
self.updatePlayerStatsDictionaries()
self.allTimeCurrentMonthButton.setTitle("Current Month", for: .normal)
self.allTimeCurrentMonthButton.isEnabled = true
}
}
})
}
}
} else {
GameController.shared.fetchAllPlayersForCurrentGame { (success) in
if success {
DispatchQueue.main.async {
NotificationCenter.default.post(name: LeaderboardsViewController.fetchAllPlayersComplete, object: nil)
self.createPlayerStatsDictionaries()
}
MatchController.shared.fetchMatchesForCurrentGameAndCurrentMonth(completion: { (success) in
if success {
DispatchQueue.main.async {
self.updatePlayerStatsDictionaries()
self.allTimeCurrentMonthButton.setTitle("All Time", for: .normal)
self.allTimeCurrentMonthButton.isEnabled = true
}
}
})
}
}
}
}
var playerStatsArrayOfDictionaries = [[String: Any]]()
let colorProvider = BackgroundColorProvider()
var randomColor: UIColor?
var showingAllTime = false
static let fetchAllPlayersComplete = Notification.Name(rawValue:"fetchAllPlayersComplete")
override func viewDidLoad() {
super.viewDidLoad()
tableView.delegate = self
tableView.dataSource = self
randomColor = colorProvider.randomColor()
view.backgroundColor = randomColor
tableView.backgroundColor = randomColor
allTimeCurrentMonthButton.tintColor = randomColor
allTimeCurrentMonthButtonContainer.layer.cornerRadius = 5
allTimeCurrentMonthButtonContainer.clipsToBounds = true
let addMatchBarButtonItem = UIBarButtonItem(barButtonSystemItem: .add, target: self, action: #selector(addMatchBarButtonItemTapped))
navigationItem.rightBarButtonItem = addMatchBarButtonItem
GameController.shared.fetchAllPlayersForCurrentGame { (success) in
if success {
DispatchQueue.main.async {
NotificationCenter.default.post(name: LeaderboardsViewController.fetchAllPlayersComplete, object: nil)
self.createPlayerStatsDictionaries()
}
MatchController.shared.fetchMatchesForCurrentGameAndCurrentMonth(completion: { (success) in
if success {
DispatchQueue.main.async {
self.updatePlayerStatsDictionaries()
}
}
})
}
}
}
func createPlayerStatsDictionaries() {
for player in GameController.shared.playersBelongingToCurrentGame {
self.playerStatsArrayOfDictionaries.append(["player": player, player.recordID.recordName: player.recordID, "played": 0, "wins": 0, "losses": 0, "winPercentage": 0.0])
}
}
func updatePlayerStatsDictionaries() {
let matchesInCurrentGame = MatchController.shared.matchesInCurrentGame
for match in matchesInCurrentGame {
for (index, playerStatsDictionary) in playerStatsArrayOfDictionaries.enumerated() {
if let winner = playerStatsDictionary["player"] as? Player,
winner.recordID == match.winner.recordID,
let playedForWinnerDictionary = playerStatsDictionary["played"] as? Int,
let winsForWinnerDictionary = playerStatsDictionary["wins"] as? Int {
var winnerDictionary = playerStatsDictionary
winnerDictionary["played"] = playedForWinnerDictionary + 1
winnerDictionary["wins"] = winsForWinnerDictionary + 1
winnerDictionary["winPercentage"] = Double((winsForWinnerDictionary + 1)) / Double((playedForWinnerDictionary + 1))
playerStatsArrayOfDictionaries[index] = winnerDictionary
}
if let loser = playerStatsDictionary["player"] as? Player,
loser.recordID == match.loser.recordID,
let playedForLoserDictionary = playerStatsDictionary["played"] as? Int,
let winsForLoserDictionary = playerStatsDictionary["wins"] as? Int,
let lossesForLoserDictionary = playerStatsDictionary["losses"] as? Int {
var loserDictionary = playerStatsDictionary
loserDictionary["played"] = playedForLoserDictionary + 1
loserDictionary["losses"] = lossesForLoserDictionary + 1
loserDictionary["winPercentage"] = Double(winsForLoserDictionary) / Double((playedForLoserDictionary + 1))
playerStatsArrayOfDictionaries[index] = loserDictionary
}
}
}
sortPlayersBy(.wins)
tableView.reloadData()
}
func sortPlayersBy(_ column: Column) {
playerStatsArrayOfDictionaries.sort { (dictionary1, dictionary2) -> Bool in
if let dictionary1Wins = dictionary1["wins"] as? Int,
let dictionary2Wins = dictionary2["wins"] as? Int,
let dictionary1WinPercentage = dictionary1["winPercentage"] as? Double,
let dictionary2WinPercentage = dictionary2["winPercentage"] as? Double,
let dictionary1Played = dictionary1["played"] as? Int,
let dictionary2Played = dictionary2["played"] as? Int {
if column == .wins {
if dictionary1Wins > dictionary2Wins {
return true
} else if dictionary1Wins == dictionary2Wins {
return dictionary1Played > dictionary2Played
}
} else {
if dictionary1WinPercentage > dictionary2WinPercentage {
return true
} else if dictionary1WinPercentage == dictionary2WinPercentage {
return dictionary1Played > dictionary2Played
}
}
}
return false
}
}
@objc func addMatchBarButtonItemTapped() {
let newMatchVC = UIStoryboard(name: "Match", bundle: nil).instantiateViewController(withIdentifier: "newMatchVC")
present(newMatchVC, animated: true, completion: nil)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "toPlayerDetail" {
guard let indexPath = tableView.indexPathForSelectedRow else { return }
let playerProfileVC = segue.destination as? PlayerProfileViewController
playerProfileVC?.player = playerStatsArrayOfDictionaries[indexPath.row]["player"] as? Player
}
}
}
// MARK: - UITableViewDataSource, UITableViewDelegate
extension LeaderboardsViewController: UITableViewDataSource, UITableViewDelegate {
func numberOfSections(in tableView: UITableView) -> Int {
return 2
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
if section == 0 {
return 1
}
return playerStatsArrayOfDictionaries.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
if indexPath.section == 0 {
let cell = tableView.dequeueReusableCell(withIdentifier: "leaderboardsTitleCell", for: indexPath)
return cell
}
guard let cell = tableView.dequeueReusableCell(withIdentifier: "leaderboardsCell", for: indexPath) as? LeaderboardTableViewCell else { return LeaderboardTableViewCell() }
cell.updateViewsWith(playerDictionary: playerStatsArrayOfDictionaries[indexPath.row])
return cell
}
func tableView(_ tableView: UITableView, viewForHeaderInSection section: Int) -> UIView? {
if section == 1 {
let headerView = UIView(frame: CGRect(x: 0, y: 0, width: tableView.frame.width, height: tableView.sectionHeaderHeight))
headerView.backgroundColor = randomColor
let playerLabel = UILabel()
playerLabel.text = "Player"
playerLabel.textColor = .white
playerLabel.font = UIFont(name: "<NAME>", size: 20.0)
let playedLabel = UILabel()
playedLabel.text = "Played"
playedLabel.textColor = .white
playedLabel.font = UIFont(name: "Avenir Next", size: 20.0)
let wonLabel = UILabel()
wonLabel.text = "Won"
wonLabel.textColor = .white
wonLabel.font = UIFont(name: "Avenir Next", size: 20.0)
let lossLabel = UILabel()
lossLabel.text = "Loss"
lossLabel.textColor = .white
lossLabel.font = UIFont(name: "Avenir Next", size: 20.0)
let winPerLabel = UILabel()
winPerLabel.text = "Win %"
winPerLabel.textColor = .white
winPerLabel.font = UIFont(name: "Avenir Next", size: 20.0)
let labelStackView = UIStackView(arrangedSubviews: [playerLabel, playedLabel, wonLabel, lossLabel, winPerLabel])
labelStackView.axis = .horizontal
labelStackView.alignment = .fill
labelStackView.distribution = .equalSpacing
labelStackView.spacing = 0
labelStackView.contentMode = .scaleToFill
labelStackView.autoresizesSubviews = true
labelStackView.clearsContextBeforeDrawing = true
headerView.addSubview(labelStackView)
let views: [String: Any] = ["labelStackView": labelStackView, "headerView": headerView]
let headerViewHorizontalConstraint = NSLayoutConstraint.constraints(withVisualFormat: "|-(8)-[labelStackView]-(8)-|", options: [], metrics: nil, views: views)
let headerViewVerticalConstraint = NSLayoutConstraint.constraints(withVisualFormat: "V:|[labelStackView]|", options: [], metrics: nil, views: views)
labelStackView.translatesAutoresizingMaskIntoConstraints = false
headerView.addConstraints(headerViewHorizontalConstraint)
headerView.addConstraints(headerViewVerticalConstraint)
return headerView
}
return UIView()
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
if indexPath.section == 0 {
return 44
}
return 87
}
func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat {
if section == 0 {
return 0
}
return 28
}
func tableView(_ tableView: UITableView, willDisplay cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.section == 0 && indexPath.row == 0 {
navigationController?.topViewController?.title = GameController.shared.currentGame?.name
}
}
func tableView(_ tableView: UITableView, didEndDisplaying cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.section == 0 && indexPath.row == 0 {
navigationController?.topViewController?.title = "Leaderboards"
}
}
}
<file_sep>/Leaderboards/Leaderboards/CloudKitManager.swift
//
// CloudKitManager.swift
// Leaderboards
//
// Created by <NAME> on 9/19/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import Foundation
import CloudKit
class CloudKitManager {
static let shared = CloudKitManager()
let publicDB = CKContainer.default().publicCloudDatabase
func fetchRecord(withID recordID: CKRecord.ID, completion: ((_ record: CKRecord?, _ error: Error?) -> Void)?) {
publicDB.fetch(withRecordID: recordID) { (record, error) in
completion?(record, error)
}
}
func fetchRecords(withIDs recordIDs: [CKRecord.ID], completion: ((_ records: [CKRecord.ID: CKRecord]?, _ error: Error?) -> Void)?) {
let fetchRecordsOperation = CKFetchRecordsOperation(recordIDs: recordIDs)
fetchRecordsOperation.fetchRecordsCompletionBlock = completion
publicDB.add(fetchRecordsOperation)
}
func fetchRecordsWithType(_ type: String, predicate: NSPredicate = NSPredicate(value: true), recordFetchedBlock: ((_ record: CKRecord) -> Void)?, completion: ((_ records: [CKRecord]?, _ error: Error?) -> Void)?) {
var fetchedRecords: [CKRecord] = []
let query = CKQuery(recordType: type, predicate: predicate)
let queryOperation = CKQueryOperation(query: query)
let perRecordBlock = { (fetchedRecord: CKRecord) -> Void in
fetchedRecords.append(fetchedRecord)
recordFetchedBlock?(fetchedRecord)
}
queryOperation.recordFetchedBlock = perRecordBlock
var queryCompletionBlock: (CKQueryOperation.Cursor?, Error?) -> Void = { (_, _) in }
queryCompletionBlock = { (queryCursor: CKQueryOperation.Cursor?, error: Error?) -> Void in
if let queryCursor = queryCursor {
// there are more results
let continuedQueryOperation = CKQueryOperation(cursor: queryCursor)
continuedQueryOperation.recordFetchedBlock = perRecordBlock
continuedQueryOperation.queryCompletionBlock = queryCompletionBlock
self.publicDB.add(continuedQueryOperation)
} else {
completion?(fetchedRecords, error)
}
}
queryOperation.queryCompletionBlock = queryCompletionBlock
self.publicDB.add(queryOperation)
}
func saveRecord(_ record: CKRecord, completion: ((_ record: CKRecord?, _ error: Error?) -> Void)?) {
publicDB.save(record, completionHandler: { (record, error) in
completion?(record, error)
})
}
func updateRecords(_ records: [CKRecord], perRecordCompletion: ((_ record: CKRecord?, _ error: Error?) -> Void)?, completion: ((_ records: [CKRecord]?, _ error: Error?) -> Void)?) {
let operation = CKModifyRecordsOperation(recordsToSave: records, recordIDsToDelete: nil)
operation.savePolicy = .changedKeys
operation.queuePriority = .high
operation.qualityOfService = .userInteractive
operation.perRecordCompletionBlock = perRecordCompletion
operation.modifyRecordsCompletionBlock = { (records, recordIDs, error) -> Void in
(completion?(records, error))
}
publicDB.add(operation)
}
func updateRecordsIfServerRecordChanged(_ records: [CKRecord], perRecordCompletion: ((_ record: CKRecord?, _ error: Error?) -> Void)?, completion: ((_ records: [CKRecord]?, _ error: Error?) -> Void)?) {
let operation = CKModifyRecordsOperation(recordsToSave: records, recordIDsToDelete: nil)
operation.savePolicy = .ifServerRecordUnchanged
operation.queuePriority = .high
operation.qualityOfService = .userInteractive
operation.perRecordCompletionBlock = perRecordCompletion
operation.modifyRecordsCompletionBlock = { (records, recordIDs, error) -> Void in
(completion?(records, error))
}
publicDB.add(operation)
}
func deleteRecordWithID(_ recordID: CKRecord.ID, completion: ((_ recordID: CKRecord.ID?, _ error: Error?) -> Void)?) {
publicDB.delete(withRecordID: recordID) { (recordID, error) in
completion?(recordID, error)
}
}
}
<file_sep>/Leaderboards/Leaderboards/NewUserSelectImageViewController.swift
//
// NewUserSelectImageViewController.swift
// Leaderboards
//
// Created by <NAME> on 9/20/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class NewUserSelectImageViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
let colorProvider = BackgroundColorProvider()
var username: String?
@IBOutlet weak var playerImageView: UIImageView!
@IBOutlet weak var usernameLabel: UILabel!
@IBOutlet weak var registerButton: UIBarButtonItem!
@IBAction func registerButtonTapped(_ sender: Any) {
registerButton.isEnabled = false
guard let username = username else { return }
PlayerController.shared.createPlayerWith(username: username, photo: playerImageView.image) { (success) in
DispatchQueue.main.async {
if !success {
self.presentSimpleAlert(title: "Unable to create an account", message: "Make sure you have a network connection, and please try again.")
self.registerButton.isEnabled = true
} else {
let playspacesViewController = UIStoryboard(name: "Login", bundle: nil).instantiateViewController(withIdentifier: "playspacesViewController")
self.navigationController?.pushViewController(playspacesViewController, animated: true)
}
}
}
}
override func viewDidLoad() {
super.viewDidLoad()
playerImageView.layer.cornerRadius = playerImageView.frame.width / 2
playerImageView.layer.borderWidth = 3.0
playerImageView.layer.borderColor = UIColor.white.cgColor
playerImageView.clipsToBounds = true
usernameLabel.text = username
let randomColor = colorProvider.randomColor()
view.backgroundColor = randomColor
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return UIStatusBarStyle.lightContent
}
func presentSimpleAlert(title: String, message: String) {
let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
let dismissAction = UIAlertAction(title: "Dismiss", style: .cancel, handler: nil)
alert.addAction(dismissAction)
present(alert, animated: true, completion: nil)
}
@IBAction func playerImageViewTapped(_ sender: Any) {
let imagePicker = UIImagePickerController()
imagePicker.delegate = self
let alert = UIAlertController(title: "Select Photo Location", message: nil, preferredStyle: .actionSheet)
if UIImagePickerController.isSourceTypeAvailable(.photoLibrary) {
alert.addAction(UIAlertAction(title: "Photo Library", style: .default, handler: { (_) -> Void in
imagePicker.sourceType = .photoLibrary
imagePicker.allowsEditing = true
imagePicker.navigationBar.tintColor = .black
imagePicker.navigationBar.titleTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.black]
self.present(imagePicker, animated: true, completion: nil)
}))
}
if UIImagePickerController.isSourceTypeAvailable(.camera) {
alert.addAction(UIAlertAction(title: "Camera", style: .default, handler: { (_) -> Void in
imagePicker.sourceType = .camera
imagePicker.allowsEditing = true
self.present(imagePicker, animated: true, completion: nil)
}))
}
alert.addAction(UIAlertAction(title: "Cancel", style: .cancel, handler: nil))
present(alert, animated: true, completion: nil)
}
// MARK: UIImagePickerControllerDelegate
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
picker.dismiss(animated: true, completion: nil)
if let image = info[UIImagePickerController.InfoKey.editedImage] as? UIImage {
playerImageView.image = image
}
}
}
extension UIImage {
func resizeImage(image: UIImage) -> UIImage {
var actualHeight: Float = Float(image.size.height)
var actualWidth: Float = Float(image.size.width)
let maxHeight: Float = 200.0
let maxWidth: Float = 200.0
var imgRatio: Float = actualWidth / actualHeight
let maxRatio: Float = maxWidth / maxHeight
let compressionQuality: Float = 1.0
//50 percent compression
if actualHeight > maxHeight || actualWidth > maxWidth {
if imgRatio < maxRatio {
//adjust width according to maxHeight
imgRatio = maxHeight / actualHeight
actualWidth = imgRatio * actualWidth
actualHeight = maxHeight
}
else if imgRatio > maxRatio {
//adjust height according to maxWidth
imgRatio = maxWidth / actualWidth
actualHeight = imgRatio * actualHeight
actualWidth = maxWidth
}
else {
actualHeight = maxHeight
actualWidth = maxWidth
}
}
let rect = CGRect(x: 0.0, y: 0.0, width: CGFloat(actualWidth), height: CGFloat(actualHeight))
UIGraphicsBeginImageContext(rect.size)
image.draw(in: rect)
let img = UIGraphicsGetImageFromCurrentImageContext()
let imageData = img!.jpegData(compressionQuality: CGFloat(compressionQuality))
UIGraphicsEndImageContext()
return UIImage(data: imageData!)!
}
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromUIImagePickerControllerInfoKeyDictionary(_ input: [UIImagePickerController.InfoKey: Any]) -> [String: Any] {
return Dictionary(uniqueKeysWithValues: input.map {key, value in (key.rawValue, value)})
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromUIImagePickerControllerInfoKey(_ input: UIImagePickerController.InfoKey) -> String {
return input.rawValue
}
extension NewUserSelectImageViewController {
class func controller(username: String) -> UIViewController {
let vc = UIStoryboard(name: "Login", bundle: nil).instantiateViewController(withIdentifier: "selectImageVC") as! NewUserSelectImageViewController
vc.username = username
return vc
}
}
<file_sep>/Leaderboards/Leaderboards/LeaderboardViewController.swift
//
// LeaderboardViewController.swift
// Leaderboards
//
// Created by <NAME> on 19/09/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import CloudKit
class LeaderboardViewController: UIViewController, UITableViewDelegate, UITableViewDataSource {
@IBOutlet weak var leaderboardTableView: UITableView!
@IBOutlet weak var navigationBar: UINavigationBar!
@IBOutlet weak var playersBarButton: UIBarButtonItem!
@IBOutlet weak var backBarButton: UIBarButtonItem!
let colorProvider = BackgroundColorProvider()
var playerStatsArrayOfDictionaries = [[String: Any]]()
override func viewDidLoad() {
super.viewDidLoad()
leaderboardTableView.delegate = self
leaderboardTableView.dataSource = self
let randomColor = colorProvider.randomColor()
self.view.backgroundColor = randomColor
leaderboardTableView.backgroundColor = randomColor
playersBarButton.tintColor = randomColor
backBarButton.tintColor = randomColor
navigationBar.layer.cornerRadius = 5
navigationBar.clipsToBounds = true
GameController.shared.fetchAllPlayersForCurrentGame { (success) in
if success {
DispatchQueue.main.async {
self.createPlayerStatsDictionaries()
}
MatchController.shared.fetchMatchesForCurrentGame(completion: { (success) in
if success {
DispatchQueue.main.async {
self.updatePlayerStatsDictionaries()
}
}
})
}
}
}
@IBAction func backButtonTapped(_ sender: Any) {
self.dismiss(animated: true, completion: nil)
}
@IBAction func playersButtonTapped(_ sender: Any) {
performSegue(withIdentifier: "toLeaderboardsVC", sender: nil)
self.dismiss(animated: true, completion: nil)
}
@IBAction func swipeGestureSwiped(_ sender: Any) {
navigationController?.popViewController(animated: true)
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return UIStatusBarStyle.lightContent
}
func createPlayerStatsDictionaries() {
for player in GameController.shared.playersBelongingToCurrentGame {
self.playerStatsArrayOfDictionaries.append(["player": player, player.recordID.recordName: player.recordID, "played": 0, "wins": 0, "losses": 0, "winPercentage": 0.0])
}
}
func updatePlayerStatsDictionaries() {
let matchesInCurrentGame = MatchController.shared.matchesInCurrentGame
for match in matchesInCurrentGame {
for (index, playerStatsDictionary) in playerStatsArrayOfDictionaries.enumerated() {
if let winner = playerStatsDictionary["player"] as? Player,
winner.recordID == match.winner.recordID,
let playedForWinnerDictionary = playerStatsDictionary["played"] as? Int,
let winsForWinnerDictionary = playerStatsDictionary["wins"] as? Int {
var winnerDictionary = playerStatsDictionary
winnerDictionary["played"] = playedForWinnerDictionary + 1
winnerDictionary["wins"] = winsForWinnerDictionary + 1
winnerDictionary["winPercentage"] = Double((winsForWinnerDictionary + 1)) / Double((playedForWinnerDictionary + 1))
playerStatsArrayOfDictionaries[index] = winnerDictionary
}
if let loser = playerStatsDictionary["player"] as? Player,
loser.recordID == match.loser.recordID,
let playedForLoserDictionary = playerStatsDictionary["played"] as? Int,
let winsForLoserDictionary = playerStatsDictionary["wins"] as? Int,
let lossesForLoserDictionary = playerStatsDictionary["losses"] as? Int {
var loserDictionary = playerStatsDictionary
loserDictionary["played"] = playedForLoserDictionary + 1
loserDictionary["losses"] = lossesForLoserDictionary + 1
loserDictionary["winPercentage"] = Double(winsForLoserDictionary) / Double((playedForLoserDictionary + 1))
playerStatsArrayOfDictionaries[index] = loserDictionary
}
}
}
sortPlayersBy(.wins)
leaderboardTableView.reloadData()
}
func sortPlayersBy(_ column: Column) {
playerStatsArrayOfDictionaries.sort { (dictionary1, dictionary2) -> Bool in
if let dictionary1Wins = dictionary1["wins"] as? Int,
let dictionary2Wins = dictionary2["wins"] as? Int,
let dictionary1WinPercentage = dictionary1["winPercentage"] as? Double,
let dictionary2WinPercentage = dictionary2["winPercentage"] as? Double,
let dictionary1Played = dictionary1["played"] as? Int,
let dictionary2Played = dictionary2["played"] as? Int {
if column == .wins {
if dictionary1Wins > dictionary2Wins {
return true
} else if dictionary1Wins == dictionary2Wins {
return dictionary1Played > dictionary2Played
}
} else {
if dictionary1WinPercentage > dictionary2WinPercentage {
return true
} else if dictionary1WinPercentage == dictionary2WinPercentage {
return dictionary1Played > dictionary2Played
}
}
}
return false
}
}
// MARK:- Table View Data Source
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return playerStatsArrayOfDictionaries.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "leaderboardCell", for: indexPath) as! LeaderboardTableViewCell
cell.updateViewsWith(playerDictionary: playerStatsArrayOfDictionaries[indexPath.row])
return cell
}
func tableView(_ tableView: UITableView, willDisplay cell: UITableViewCell, forRowAt indexPath: IndexPath) {
cell.alpha = 0
cell.backgroundColor = UIColor.clear
UIView.animate(withDuration: 1.0) {
cell.alpha = 1.0
}
}
func animateTable() {
leaderboardTableView.reloadData()
let cells = leaderboardTableView.visibleCells
let tableViewHeight = leaderboardTableView.bounds.size.height
for cell in cells {
cell.transform = CGAffineTransform(translationX: 0, y: tableViewHeight)
}
var delayCounter = 0
for cell in cells {
UIView.animate(withDuration: 1.0, delay: Double(delayCounter) * 0.05, usingSpringWithDamping: 0.8, initialSpringVelocity: 0, options: .curveEaseInOut, animations: {
cell.transform = CGAffineTransform.identity
}, completion: nil)
delayCounter += 1
}
}
// MARK: - Navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
}
}
// MARK: - Column Enum
enum Column: String {
case wins
case winPercentage
}
<file_sep>/Leaderboards/Leaderboards/NewMatchViewController.swift
//
// NewMatchViewController.swift
// Leaderboards
//
// Created by <NAME> on 9/20/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import CloudKit
class NewMatchViewController: UIViewController {
let colorProvider = BackgroundColorProvider()
@IBOutlet weak var currentPlayerImageView: UIImageView!
@IBOutlet weak var tapOnimageLabel: UILabel!
@IBOutlet weak var currentPlayerNameLabel: UILabel!
@IBOutlet weak var opponentImageView: UIImageView!
@IBOutlet weak var opponentNameLabel: UILabel!
@IBOutlet weak var currentPlayerScoreTextField: UITextField!
@IBOutlet weak var opponentScoreTextField: UITextField!
@IBOutlet weak var currentPlayerTextFieldViewContainer: UIView!
@IBOutlet weak var opponentPlayerTextFieldViewContainer: UIView!
@IBOutlet weak var navigationBar: UINavigationBar!
@IBAction func cancelButtonTapped(_ sender: Any) {
self.dismiss(animated: true, completion: nil)
}
@IBAction func submitButtonTapped(_ sender: Any) {
guard let currentPlayerScoreString = currentPlayerScoreTextField.text,
!currentPlayerScoreString.isEmpty,
let currentPlayerScore = Int(currentPlayerScoreString),
let opponentScoreString = opponentScoreTextField.text,
!opponentScoreString.isEmpty,
let opponentScore = Int(opponentScoreString),
let game = GameController.shared.currentGame,
let currentPlayer = PlayerController.shared.currentPlayer,
let opponent = opponent else { return }
if currentPlayerScore > opponentScore {
MatchController.shared.createMatch(game: game, winner: currentPlayer, winnerScore: currentPlayerScore, loser: opponent, loserScore: opponentScore, completion: { (success) in
if success {
self.dismiss(animated: true, completion: nil)
}
})
} else {
MatchController.shared.createMatch(game: game, winner: opponent, winnerScore: opponentScore, loser: currentPlayer, loserScore: currentPlayerScore, completion: { (success) in
if success {
self.dismiss(animated: true, completion: nil)
}
})
}
}
@IBAction func opponentImageOrLabelTapped(_ sender: Any) {
let selectOpponentVC = UIStoryboard(name: "Match", bundle: nil).instantiateViewController(withIdentifier: "selectOpponentVC") as? SelectOpponentViewController
selectOpponentVC?.newMatchVC = self
tapOnimageLabel.isHidden = true
present(selectOpponentVC!, animated: true, completion: nil)
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
currentPlayerScoreTextField.resignFirstResponder()
opponentScoreTextField.resignFirstResponder()
}
var opponent: Player?
override func viewDidLoad() {
super.viewDidLoad()
let randomColor = colorProvider.randomColor()
view.backgroundColor = randomColor
currentPlayerImageView.layer.cornerRadius = currentPlayerImageView.frame.width / 2
currentPlayerImageView.clipsToBounds = true
currentPlayerImageView.layer.borderColor = UIColor.white.cgColor
currentPlayerImageView.layer.borderWidth = 3.0
opponentImageView.layer.cornerRadius = opponentImageView.frame.width / 2
opponentImageView.clipsToBounds = true
currentPlayerNameLabel.text = "You"
currentPlayerImageView.image = PlayerController.shared.currentPlayer?.photo
currentPlayerTextFieldViewContainer.layer.cornerRadius = 5
opponentPlayerTextFieldViewContainer.layer.cornerRadius = 5
tapOnimageLabel.isHidden = false
navigationBar.setBackgroundImage(UIImage(), for: .default)
navigationBar.shadowImage = UIImage()
navigationBar.isTranslucent = true
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if let opponent = opponent {
opponentNameLabel.text = opponent.username
opponentImageView.image = opponent.photo
}
opponentImageView.layer.borderWidth = 3.0
opponentImageView.layer.borderColor = UIColor.white.cgColor
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return .lightContent
}
}
<file_sep>/Leaderboards/Leaderboards/MatchController.swift
//
// LeaderboardsController.swift
// Leaderboards
//
// Created by <NAME> on 19/09/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import Foundation
import CloudKit
class MatchController {
static let shared = MatchController()
var pendingMatches = [Match]()
var matchesInCurrentGame = [Match]()
func createMatch(game: Game, winner: Player, winnerScore: Int, loser: Player, loserScore: Int, completion: @escaping (_ success: Bool) -> Void) {
guard let creator = PlayerController.shared.currentPlayer else { completion(false); return }
var scoreString = ""
if winner.recordID == creator.recordID {
scoreString = "\(loserScore) - \(winnerScore) loss"
} else {
scoreString = "\(winnerScore) - \(loserScore) win"
}
let match = Match(recordID: CKRecord.ID(recordName: UUID().uuidString), game: CKRecord.Reference(record: game.CKRepresentation, action: .none), winner: CKRecord.Reference(record: winner.CKRepresentation, action: .none), winnerScore: winnerScore, loser: CKRecord.Reference(record: loser.CKRepresentation, action: .none), loserScore: loserScore, verified: false, timestamp: Date(), creator: CKRecord.Reference(record: creator.CKRepresentation, action: .none), participants: [CKRecord.Reference(record: winner.CKRepresentation, action: .none), CKRecord.Reference(record: loser.CKRepresentation, action: .none)], creatorString: "\(creator.username.uppercased())", scoreString: scoreString.uppercased(), gameString: "\(game.name.uppercased())")
CloudKitManager.shared.saveRecord(match.CKRepresentation) { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
completion(true)
}
}
func fetchPendingMatchesForCurrentPlayer(completion: @escaping (_ success: Bool) -> Void = { _ in }) {
guard let currentPlayer = PlayerController.shared.currentPlayer else { completion(false); return }
let currentPlayerIsParticipantPredicate = NSPredicate(format: "participants CONTAINS %@", currentPlayer.CKRepresentation)
let matchIsNotVerifiedPredicate = NSPredicate(format: "verified == false")
let currentPlayerIsNotCreatorPredicate = NSPredicate(format: "creator != %@", currentPlayer.CKRepresentation)
let pendingMatchesForCurrentPlayerCompoundPredicate = NSCompoundPredicate(andPredicateWithSubpredicates: [currentPlayerIsParticipantPredicate, matchIsNotVerifiedPredicate, currentPlayerIsNotCreatorPredicate])
let query = CKQuery(recordType: Match.recordType, predicate: pendingMatchesForCurrentPlayerCompoundPredicate)
query.sortDescriptors = [NSSortDescriptor(key: "timestamp", ascending: false)]
CloudKitManager.shared.publicDB.perform(query, inZoneWith: nil) { (records, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
guard let pendingMatchRecords = records else { completion(false); return }
let pendingMatches = pendingMatchRecords.compactMap { Match(record: $0) }
self.pendingMatches = pendingMatches
completion(true)
}
// CloudKitManager.shared.fetchRecordsWithType(Match.recordType, predicate: pendingMatchesForCurrentPlayerCompoundPredicate, recordFetchedBlock: nil) { (records, error) in
// if let error = error {
// print(error.localizedDescription)
// completion(false)
// return
// }
//
// guard let pendingMatchRecords = records else { completion(false); return }
// let pendingMatches = pendingMatchRecords.flatMap { Match(record: $0) }
//
// self.pendingMatches = pendingMatches
// completion(true)
// }
}
func fetchMatchesForCurrentPlayer(completion: @escaping (_ matches: [Match]?, _ success: Bool) -> Void = { _, _ in }) {
guard let currentPlayer = PlayerController.shared.currentPlayer else { completion(nil, false); return }
let currentPlayerIsParticipantPredicate = NSPredicate(format: "participants CONTAINS %@", currentPlayer.CKRepresentation)
let matchIsVerifiedPredicate = NSPredicate(format: "verified == true")
let matchesForCurrentPlayerCompoundPredicate = NSCompoundPredicate(andPredicateWithSubpredicates: [currentPlayerIsParticipantPredicate, matchIsVerifiedPredicate])
let query = CKQuery(recordType: Match.recordType, predicate: matchesForCurrentPlayerCompoundPredicate)
query.sortDescriptors = [NSSortDescriptor(key: "timestamp", ascending: false)]
CloudKitManager.shared.publicDB.perform(query, inZoneWith: nil) { (records, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
guard let matchRecords = records else { completion(nil, false); return }
let matches = matchRecords.compactMap { Match(record: $0) }
completion(matches, true)
}
// CloudKitManager.shared.fetchRecordsWithType(Match.recordType, predicate: matchesForCurrentPlayerCompoundPredicate, recordFetchedBlock: nil) { (records, error) in
// if let error = error {
// print(error.localizedDescription)
// completion(nil, false)
// return
// }
//
// guard let matchRecords = records else { completion(nil, false); return }
// let matches = matchRecords.flatMap { Match(record: $0) }
//
// completion(matches, true)
// }
}
func fetchGameAndOpponentFor(_ match: Match, completion: @escaping (_ game: Game?, _ opponent: Player?, _ success: Bool) -> Void = { _,_,_ in }) {
let gameRecordID = match.game.recordID
let opponentRecordID = match.creator.recordID
CloudKitManager.shared.fetchRecords(withIDs: [gameRecordID, opponentRecordID]) { (gameOpponentDictionary, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, nil, false)
return
}
guard let gameOpponentDictionary = gameOpponentDictionary,
let gameRecord = gameOpponentDictionary[gameRecordID],
let opponentRecord = gameOpponentDictionary[opponentRecordID],
let game = Game(record: gameRecord),
let opponent = Player(record: opponentRecord) else { completion(nil, nil, false); return }
completion(game, opponent, true)
}
}
func fetchOpponentImageFor(_ match: Match, completion: @escaping (_ opponent: Player?, _ success: Bool) -> Void = { _,_ in }) {
let opponentRecordID = match.creator.recordID
CloudKitManager.shared.fetchRecord(withID: opponentRecordID) { (record, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
guard let record = record else { completion(nil, false); return }
completion(Player(record: record), true)
}
}
func verifyMatch(_ match: Match) -> Match {
var match = match
match.verified = true
return match
}
func updateMatch(_ match: Match, completion: @escaping (_ success: Bool) -> Void = { _ in }) {
let matchRecord = match.CKRepresentation
CloudKitManager.shared.updateRecords([matchRecord], perRecordCompletion: nil) { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
completion(true)
}
}
func clearPendingMatch(at index: Int) {
pendingMatches.remove(at: index)
}
func deletePendingMatch(at index: Int, completion: @escaping (_ success: Bool) -> Void = { _ in }) {
let deletedPendingMatch = pendingMatches[index]
CloudKitManager.shared.deleteRecordWithID(deletedPendingMatch.recordID) { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
completion(true)
}
}
func fetchMatchesForCurrentGame(completion: @escaping (_ success: Bool) -> Void = { _ in }) {
guard let currentGame = GameController.shared.currentGame else { completion(false); return }
let matchIsForCurrentGamePredicate = NSPredicate(format: "game == %@", currentGame.recordID)
let matchIsVerifiedPredicate = NSPredicate(format: "verified == true")
let matchCompoundPredicate = NSCompoundPredicate(andPredicateWithSubpredicates: [matchIsForCurrentGamePredicate, matchIsVerifiedPredicate])
CloudKitManager.shared.fetchRecordsWithType(Match.recordType, predicate: matchCompoundPredicate, recordFetchedBlock: nil) { (records, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
guard let matchRecords = records else { completion(false); return }
let matches = matchRecords.compactMap( { Match(record: $0) })
self.matchesInCurrentGame = matches
completion(true)
}
}
func fetchMatchesForCurrentGameAndCurrentMonth(completion: @escaping (_ success: Bool) -> Void = { _ in }) {
guard let currentGame = GameController.shared.currentGame else { completion(false); return }
let matchIsForCurrentGamePredicate = NSPredicate(format: "game == %@", currentGame.recordID)
let matchIsVerifiedPredicate = NSPredicate(format: "verified == true")
let calendar = Calendar(identifier: .gregorian)
let interval = calendar.dateInterval(of: .month, for: Date())
guard let monthStartDate = interval?.start else { completion(false); return }
let matchIsInCurrentMonthPredicate = NSPredicate(format: "timestamp > %@", monthStartDate as NSDate)
let matchCompoundPredicate = NSCompoundPredicate(andPredicateWithSubpredicates: [matchIsForCurrentGamePredicate, matchIsVerifiedPredicate, matchIsInCurrentMonthPredicate])
CloudKitManager.shared.fetchRecordsWithType(Match.recordType, predicate: matchCompoundPredicate, recordFetchedBlock: nil) { (records, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
guard let matchRecords = records else { completion(false); return }
let matches = matchRecords.compactMap( { Match(record: $0) })
self.matchesInCurrentGame = matches
completion(true)
}
}
func fetchMatchesForGame(_ game: Game, andPlayer player: Player, completion: @escaping (_ matches: [Match]?, _ success: Bool) -> Void = { _, _ in }) {
let matchIsForCurrentGamePredicate = NSPredicate(format: "game == %@", game.recordID)
let matchIsVerifiedPredicate = NSPredicate(format: "verified == true")
let matchIncludesPlayerPredicate = NSPredicate(format: "participants CONTAINS %@", player.recordID)
let matchCompoundPredicate = NSCompoundPredicate(andPredicateWithSubpredicates: [matchIsForCurrentGamePredicate, matchIsVerifiedPredicate, matchIncludesPlayerPredicate])
let query = CKQuery(recordType: Match.recordType, predicate: matchCompoundPredicate)
query.sortDescriptors = [NSSortDescriptor(key: "timestamp", ascending: false)]
CloudKitManager.shared.publicDB.perform(query, inZoneWith: nil) { (records, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
guard let matchRecords = records else { completion(nil, false); return }
let matches = matchRecords.compactMap { Match(record: $0) }
completion(matches, true)
}
// CloudKitManager.shared.fetchRecordsWithType(Match.recordType, predicate: matchCompoundPredicate, recordFetchedBlock: nil) { (records, error) in
// if let error = error {
// print(error.localizedDescription)
// completion(nil, false)
// return
// }
//
// guard let matchRecords = records else { completion(nil, false); return }
//
// let matches = matchRecords.flatMap( { Match(record: $0) })
// completion(matches, true)
// }
}
func fetchOpponentsForMatches(_ matches: [Match], player: Player, completion: @escaping (_ opponents: [Player]?, _ success: Bool) -> Void = { _, _ in }) {
var opponentRecordIDs = [CKRecord.ID]()
for match in matches {
for playerReference in match.participants {
if playerReference.recordID != player.recordID {
opponentRecordIDs.append(playerReference.recordID)
}
}
}
CloudKitManager.shared.fetchRecords(withIDs: opponentRecordIDs) { (opponentRecordsDict, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
var opponents = [Player]()
guard let opponentRecordsDict = opponentRecordsDict else { completion(nil, false); return }
for opponentRecordID in opponentRecordIDs {
if let opponentRecord = opponentRecordsDict[opponentRecordID],
let opponent = Player(record: opponentRecord) {
opponents.append(opponent)
}
}
completion(opponents, true)
}
}
func fetchGamesForMatches(_ matches: [Match], completion: @escaping (_ games: [Game]?, _ success: Bool) -> Void = { _, _ in }) {
var gameRecordIDs = [CKRecord.ID]()
for match in matches {
gameRecordIDs.append(match.game.recordID)
}
CloudKitManager.shared.fetchRecords(withIDs: gameRecordIDs) { (gameRecordsDict, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
var games = [Game]()
guard let gameRecordsDict = gameRecordsDict else { completion(nil, false); return }
for gameRecordID in gameRecordIDs {
if let gameRecord = gameRecordsDict[gameRecordID],
let game = Game(record: gameRecord) {
games.append(game)
}
}
completion(games, true)
}
}
func sendApprovedMatchToSlack(_ match: Match, opponent: Player?, game: Game?) {
guard let opponent = opponent,
let game = game,
let currentPlayer = PlayerController.shared.currentPlayer else { return }
if game.playspace == CKRecord.Reference(recordID: CKRecord.ID(recordName: "03E8257B-5BF0-4A43-98DD-B8B276B79F60"), action: .none) {
var matchString = ""
if match.winner.recordID == currentPlayer.recordID {
matchString = "*\(currentPlayer.username)* `won` vs. *\(opponent.username)* `\(match.winnerScore) - \(match.loserScore)` in *\(game.name.uppercased())*"
} else {
matchString = "*\(currentPlayer.username)* `lost` vs. *\(opponent.username)* `\(match.loserScore) - \(match.winnerScore)` in *\(game.name.uppercased())*"
}
let json: [String: Any] = ["text": matchString]
let jsonData = try? JSONSerialization.data(withJSONObject: json)
let url = URL(string: "https://hooks.slack.com/services/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")!
var request = URLRequest(url: url)
request.setValue("application/json", forHTTPHeaderField: "Content-type")
request.httpMethod = "POST"
request.httpBody = jsonData
let task = URLSession.shared.dataTask(with: request) { data, response, error in
guard let data = data, error == nil else {
print(error?.localizedDescription ?? "No data")
return
}
let responseJSON = try? JSONSerialization.jsonObject(with: data, options: [])
if let responseJSON = responseJSON as? [String: Any] {
print(responseJSON)
}
}
task.resume()
}
}
}
<file_sep>/Leaderboards/Leaderboards/PlayerProfileViewController2.swift
import UIKit
class PlayerProfileViewController2: UIViewController {
@IBOutlet weak var playerImageView: UIImageView!
@IBOutlet weak var usernameLabel: UILabel!
@IBOutlet weak var segmentedControl: UISegmentedControl!
@IBOutlet weak var profileView: UIView!
@IBOutlet weak var pendingMatchesView: UIView!
@IBOutlet weak var tableView: UITableView!
@IBAction func backBarButtonItemTapped(_ sender: Any) {
dismiss(animated: true, completion: nil)
}
@IBAction func segmentedControlSegmentChanged(_ sender: Any) {
switch segmentedControl.selectedSegmentIndex {
case 0:
profileView.alpha = 1
pendingMatchesView.alpha = 0
case 1:
profileView.alpha = 0
pendingMatchesView.alpha = 1
default:
break
}
}
override func viewDidLoad() {
super.viewDidLoad()
tableView.delegate = self
tableView.dataSource = self
pendingMatchesView.alpha = 0
tableView.rowHeight = UITableView.automaticDimension
tableView.estimatedRowHeight = 100
PlayerController.shared.fetchCurrentPlayer { (success) in
if success {
DispatchQueue.main.async {
// self.playerImageView.image = PlayerController.shared.currentPlayer?.photo
// self.usernameLabel.text = PlayerController.shared.currentPlayer?.username
MatchController.shared.fetchPendingMatchesForCurrentPlayer { (success) in
if success {
DispatchQueue.main.async {
self.tableView.reloadData()
}
}
}
}
}
}
}
}
// MARK: - UITableViewDataSource, UITableViewDelegate
extension PlayerProfileViewController2: UITableViewDataSource, UITableViewDelegate {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return MatchController.shared.pendingMatches.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
guard let cell = tableView.dequeueReusableCell(withIdentifier: "pendingMatchCell", for: indexPath) as? PendingMatchTableViewCell else { return PendingMatchTableViewCell() }
//cell.updateViewsWith(MatchController.shared.pendingMatches[indexPath.row])
return cell
}
func tableView(_ tableView: UITableView, editActionsForRowAt indexPath: IndexPath) -> [UITableViewRowAction]? {
let confirmTableViewRowAction = UITableViewRowAction(style: .normal, title: "Confirm") { (_, indexPath) in
let verifiedMatch = MatchController.shared.verifyMatch(MatchController.shared.pendingMatches[indexPath.row])
MatchController.shared.updateMatch(verifiedMatch, completion: { (success) in
if success {
DispatchQueue.main.async {
MatchController.shared.clearPendingMatch(at: indexPath.row)
tableView.deleteRows(at: [indexPath], with: .automatic)
}
}
})
}
let denyTableViewRowAction = UITableViewRowAction(style: .destructive, title: "Deny") { (_, indexPath) in
MatchController.shared.deletePendingMatch(at: indexPath.row, completion: { (success) in
if success {
DispatchQueue.main.async {
MatchController.shared.clearPendingMatch(at: indexPath.row)
tableView.deleteRows(at: [indexPath], with: .automatic)
}
}
})
}
confirmTableViewRowAction.backgroundColor = UIColor(red: 52.0/255.0, green: 216.0/255.0, blue: 132.0/255.0, alpha: 1.0)
denyTableViewRowAction.backgroundColor = .red
return [confirmTableViewRowAction, denyTableViewRowAction]
}
}
<file_sep>/Leaderboards/Leaderboards/PlayersListViewController.swift
//
// PlayersListViewController.swift
// Leaderboards
//
// Created by <NAME> on 10/3/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class PlayersListViewController: UIViewController {
@IBOutlet weak var tableView: UITableView!
override func viewDidLoad() {
super.viewDidLoad()
tableView.delegate = self
tableView.dataSource = self
NotificationCenter.default.addObserver(self, selector: #selector(reloadTableView), name: LeaderboardsViewController.fetchAllPlayersComplete, object: nil)
}
@objc func reloadTableView() {
tableView.reloadData()
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "toPlayerDetail" {
guard let indexPath = tableView.indexPathForSelectedRow else { return }
let playerProfileVC = segue.destination as? PlayerProfileViewController
playerProfileVC?.player = GameController.shared.playersBelongingToCurrentGame[indexPath.row - 1]
}
}
}
// MARK: - UITableViewDataSource, UITableViewDelegate
extension PlayersListViewController: UITableViewDataSource, UITableViewDelegate {
func numberOfSections(in tableView: UITableView) -> Int {
return 1
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return GameController.shared.playersBelongingToCurrentGame.count + 1
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
if indexPath.row == 0 {
let cell = tableView.dequeueReusableCell(withIdentifier: "playersTitleCell", for: indexPath)
return cell
}
guard let cell = tableView.dequeueReusableCell(withIdentifier: "playerCell", for: indexPath) as? PlayerTableViewCell else { return PlayerTableViewCell() }
let player = GameController.shared.playersBelongingToCurrentGame[indexPath.row - 1]
cell.player = player
return cell
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
if indexPath.row == 0 {
return 44
}
return 87
}
func tableView(_ tableView: UITableView, willDisplay cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
navigationController?.topViewController?.title = GameController.shared.currentGame?.name
}
}
func tableView(_ tableView: UITableView, didEndDisplaying cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
navigationController?.topViewController?.title = "Players"
}
}
}
<file_sep>/Leaderboards/Leaderboards/PlayspacesViewController.swift
import UIKit
import CloudKit
class PlayspacesViewController: UIViewController {
let colorProvider = BackgroundColorProvider()
var player: Player?
@IBOutlet weak var playspaceButtonViewContainer: UIView!
@IBOutlet weak var tableView: UITableView!
@IBOutlet weak var addplayspaceButton: UIButton!
@IBOutlet var noPlayspaceView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
tableView.delegate = self
tableView.dataSource = self
tableView.tableFooterView = UIView()
noPlayspaceView.isHidden = true
let randomColor = colorProvider.randomColor()
tableView.backgroundColor = randomColor
view.backgroundColor = randomColor
addplayspaceButton.tintColor = randomColor
noPlayspaceView.backgroundColor = randomColor
playspaceButtonViewContainer.layer.cornerRadius = 5
playspaceButtonViewContainer.clipsToBounds = true
navigationItem.setHidesBackButton(true, animated: false)
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if let currentPlayer = PlayerController.shared.currentPlayer {
let playerImageButton = UIButton(type: .custom)
playerImageButton.addTarget(self, action: #selector(playerImageButtonTapped), for: .touchUpInside)
playerImageButton.setImage(currentPlayer.photo, for: .normal)
playerImageButton.frame = CGRect(x: 0, y: 0, width: 32, height: 32)
playerImageButton.layer.cornerRadius = playerImageButton.frame.height / 2
playerImageButton.clipsToBounds = true
playerImageButton.layer.borderColor = UIColor.white.cgColor
playerImageButton.layer.borderWidth = 2.0
playerImageButton.addConstraint(NSLayoutConstraint(item: playerImageButton, attribute: .width, relatedBy: .equal, toItem: nil, attribute: .notAnAttribute, multiplier: 1, constant: 32))
playerImageButton.addConstraint(NSLayoutConstraint(item: playerImageButton, attribute: .height, relatedBy: .equal, toItem: nil, attribute: .notAnAttribute, multiplier: 1, constant: 32))
let pendingMatchesNotificationBadgeButton = UIButton(type: .system)
pendingMatchesNotificationBadgeButton.addTarget(self, action: #selector(pendingMatchesNotificationBadgeButtonTapped), for: .touchUpInside)
pendingMatchesNotificationBadgeButton.frame = CGRect(x: 0, y: 0, width: 32, height: 32)
pendingMatchesNotificationBadgeButton.backgroundColor = .white
pendingMatchesNotificationBadgeButton.tintColor = view.backgroundColor
pendingMatchesNotificationBadgeButton.layer.cornerRadius = pendingMatchesNotificationBadgeButton.frame.height / 2
pendingMatchesNotificationBadgeButton.clipsToBounds = true
pendingMatchesNotificationBadgeButton.layer.borderColor = UIColor.white.cgColor
pendingMatchesNotificationBadgeButton.layer.borderWidth = 1.0
PlayerController.shared.fetchPlayspacesFor(currentPlayer, completion: { (success) in
if success {
DispatchQueue.main.async {
if PlayspaceController.shared.playspaces.count == 0 {
self.noPlayspaceView.isHidden = false
}
else {
self.noPlayspaceView.isHidden = true
}
self.tableView.reloadData()
MatchController.shared.fetchPendingMatchesForCurrentPlayer(completion: { (success) in
if success {
DispatchQueue.main.async {
let operation = CKModifyBadgeOperation(badgeValue: MatchController.shared.pendingMatches.count)
operation.modifyBadgeCompletionBlock = {(error) in
if let error = error{
print("\(error)")
return
}
DispatchQueue.main.async {
UIApplication.shared.applicationIconBadgeNumber = MatchController.shared.pendingMatches.count
}
}
CKContainer.default().add(operation)
if MatchController.shared.pendingMatches.count > 0 {
pendingMatchesNotificationBadgeButton.setTitle("\(MatchController.shared.pendingMatches.count)", for: .normal)
self.navigationItem.rightBarButtonItems = [UIBarButtonItem(customView: playerImageButton), UIBarButtonItem(customView: pendingMatchesNotificationBadgeButton)]
} else {
self.navigationItem.rightBarButtonItems = [UIBarButtonItem(customView: playerImageButton)]
}
}
}
})
}
}
})
}
}
@IBAction func addplayspaceButtonTapped(_ sender: Any) {
let alert = UIAlertController(title: "Add Playspace", message: nil, preferredStyle: .actionSheet)
alert.addAction(UIAlertAction(title: "Join Playspace", style: .default , handler: { (_) -> Void in
let joinPlayspaceVC = UIStoryboard(name: "JoinPlayspace", bundle: nil).instantiateViewController(withIdentifier: "joinPlayspaceVC")
self.present(joinPlayspaceVC, animated: true, completion: nil)
}))
alert.addAction(UIAlertAction(title: "New Playspace", style: .default, handler: { (_) -> Void in
let addPlayspaceVC = UIStoryboard(name: "Login", bundle: nil).instantiateViewController(withIdentifier: "addPlayspaceVC")
self.present(addPlayspaceVC, animated: true, completion: nil)
}))
alert.addAction(UIAlertAction(title: "Cancel", style: .cancel, handler: nil))
present(alert, animated: true, completion: nil)
}
@objc func playerImageButtonTapped() {
let currentPlayerProfileVC = UIStoryboard(name: "PlayerProfile", bundle: nil).instantiateViewController(withIdentifier: "currentPlayerProfileContainerVC")
present(currentPlayerProfileVC, animated: true, completion: nil)
}
@objc func pendingMatchesNotificationBadgeButtonTapped() {
let pendingMatchesVC = UIStoryboard(name: "PlayerProfile", bundle: nil).instantiateViewController(withIdentifier: "pendingMatchesVC")
present(pendingMatchesVC, animated: true, completion: nil)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "toGamesVC" {
guard let indexPath = tableView.indexPathForSelectedRow,
let gamesVC = segue.destination as? GamesViewController else { return }
title = "Playspaces"
gamesVC.title = PlayspaceController.shared.playspaces[indexPath.row - 1].name
PlayspaceController.shared.currentPlayspace = PlayspaceController.shared.playspaces[indexPath.row - 1]
}
}
}
// MARK: - UITableViewDataSource, UITableViewDelegate
extension PlayspacesViewController: UITableViewDataSource, UITableViewDelegate {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return PlayspaceController.shared.playspaces.count + 1
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
if indexPath.row == 0 {
let cell = tableView.dequeueReusableCell(withIdentifier: "playspacesTitleCell", for: indexPath)
return cell
}
let cell = tableView.dequeueReusableCell(withIdentifier: "playspaceCell", for: indexPath)
cell.textLabel?.text = "\(PlayspaceController.shared.playspaces[indexPath.row - 1].name)"
cell.textLabel?.textColor = UIColor.white
cell.textLabel?.adjustsFontSizeToFitWidth = true
return cell
}
func tableView(_ tableView: UITableView, willDisplay cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
navigationItem.title = nil
}
}
func tableView(_ tableView: UITableView, didEndDisplaying cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
navigationItem.title = "Playspaces"
}
}
func tableView(_ tableView: UITableView, editActionsForRowAt indexPath: IndexPath) -> [UITableViewRowAction]? {
let leaveTableViewRowAction = UITableViewRowAction(style: .normal, title: "Leave") { (_, indexPath) in
let playspace = PlayspaceController.shared.playspaces[indexPath.row - 1]
PlayspaceController.shared.removeCurrentPlayerFrom(playspace, completion: { (success) in
if success {
DispatchQueue.main.async {
PlayspaceController.shared.playspaces.remove(at: indexPath.row - 1)
tableView.deleteRows(at: [indexPath], with: .automatic)
self.tableView.reloadData()
}
}
})
}
leaveTableViewRowAction.backgroundColor = .red
return [leaveTableViewRowAction]
}
func tableView(_ tableView: UITableView, canEditRowAt indexPath: IndexPath) -> Bool {
if indexPath.row == 0 {
return false
}
return true
}
}
<file_sep>/README.md
# Leaderboards
Leaderboards is an application that can be used to create your personal leaderboards, challenge your friends with any game, and see who's the best among all & crushing the charts.
• Create your own groups (we call it playspaces).<br>
• Create games in which you wish to challenge each other.<br>
• Invite friends to your playspaces.<br>
• Compete with each other.<br>
• See match history and scores.<br>
• Keep track of your stats.<br>
For support contact - <EMAIL>, <EMAIL>, <EMAIL>
<file_sep>/Leaderboards/Leaderboards/playspaceJoiningFailedViewController.swift
//
// playspaceJoiningFailedViewController.swift
// Leaderboards
//
// Created by <NAME> on 04/10/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class playspaceJoiningFailedViewController: UIViewController {
@IBOutlet var viewContainer: UIView!
@IBOutlet var buttonViewContainer: UIView!
override func viewDidLoad() {
super.viewDidLoad()
self.buttonViewContainer.layer.cornerRadius = 5
self.viewContainer.layer.cornerRadius = 5
self.viewContainer.clipsToBounds = true
}
@IBAction func dismissButtonTapped(_ sender: Any) {
self.dismiss(animated: true, completion: nil)
}
}
<file_sep>/Leaderboards/Leaderboards/SplashViewController.swift
//
// SplashViewController.swift
// Leaderboards
//
// Created by <NAME> on 9/22/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class SplashViewController: UIViewController {
@IBOutlet weak var activityIndicator: UIActivityIndicatorView!
override func viewDidLoad() {
super.viewDidLoad()
activityIndicator.startAnimating()
navigationController?.navigationBar.setBackgroundImage(UIImage(), for: .default)
navigationController?.navigationBar.shadowImage = UIImage()
PlayerController.shared.fetchCurrentPlayer { (success) in
DispatchQueue.main.async {
if success {
self.performSegue(withIdentifier: "toPlayspacesVC", sender: nil)
self.activityIndicator.stopAnimating()
} else {
self.performSegue(withIdentifier: "toLoginVC", sender: nil)
self.activityIndicator.stopAnimating()
}
}
}
}
}
<file_sep>/Leaderboards/Leaderboards/CurrentPlayerProfileViewController.swift
//
// CurrentPlayerProfileViewController.swift
// Leaderboards
//
// Created by <NAME> on 25/09/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import CloudKit
class CurrentPlayerProfileViewController: UIViewController {
@IBOutlet weak var tableView: UITableView!
var games = [Game]()
var playspaces = [Playspace]()
var uniquePlayspaces = [Playspace]()
var matches = [[Match]]()
var playerStatsArrayOfDictionaries = [[[String: Any]]]()
override func viewDidLoad() {
super.viewDidLoad()
tableView.tableFooterView = UIView()
tableView.delegate = self
tableView.dataSource = self
guard let currentPlayer = PlayerController.shared.currentPlayer else { return }
GameController.shared.fetchAllGamesForCurrentPlayer { (games, success) in
if success {
if let games = games {
for _ in 0..<games.count {
self.matches.append([])
}
self.games = games
GameController.shared.fetchPlayspacesForGames(games, completion: { (playspaces, success) in
if success {
if let playspaces = playspaces {
self.playspaces = playspaces
self.processPlayspaces()
self.createPlayerStatsDictionaries()
let group = DispatchGroup()
for (index, game) in self.games.enumerated() {
group.enter()
MatchController.shared.fetchMatchesForGame(game, andPlayer: currentPlayer, completion: { (matches, success) in
if success {
guard let matches = matches else { return }
self.matches[index] = matches
group.leave()
}
})
}
group.notify(queue: DispatchQueue.main, execute: {
self.processMatches()
self.tableView.reloadData()
})
}
}
})
}
}
}
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return UIStatusBarStyle.lightContent
}
func createPlayerStatsDictionaries() {
for (index, playspace) in uniquePlayspaces.enumerated() {
playerStatsArrayOfDictionaries.append([])
for game in games {
if game.playspace.recordID == playspace.recordID {
playerStatsArrayOfDictionaries[index].append(["game": game, "played": 0, "wins": 0, "losses": 0, "winPercentage": 0.0, "pointsFor": 0, "pointsAgainst": 0])
}
}
}
}
func processMatches() {
guard let currentPlayer = PlayerController.shared.currentPlayer else { return }
for (index, playspace) in uniquePlayspaces.enumerated() {
for (gameIndex, game) in games.enumerated() {
if game.playspace.recordID == playspace.recordID {
for (gameStatsIndex, gameStats) in playerStatsArrayOfDictionaries[index].enumerated() {
guard let gameFromDict = gameStats["game"] as? Game else { return }
if game.recordID == gameFromDict.recordID {
for match in matches[gameIndex] {
guard let played = playerStatsArrayOfDictionaries[index][gameStatsIndex]["played"] as? Int,
let wins = playerStatsArrayOfDictionaries[index][gameStatsIndex]["wins"] as? Int,
let losses = playerStatsArrayOfDictionaries[index][gameStatsIndex]["losses"] as? Int,
let pointsFor = playerStatsArrayOfDictionaries[index][gameStatsIndex]["pointsFor"] as? Int,
let pointsAgainst = playerStatsArrayOfDictionaries[index][gameStatsIndex]["pointsAgainst"] as? Int else { return }
if match.winner.recordID == currentPlayer.recordID {
playerStatsArrayOfDictionaries[index][gameStatsIndex]["played"] = played + 1
playerStatsArrayOfDictionaries[index][gameStatsIndex]["wins"] = wins + 1
playerStatsArrayOfDictionaries[index][gameStatsIndex]["winPercentage"] = Double(wins + 1) / Double(played + 1)
playerStatsArrayOfDictionaries[index][gameStatsIndex]["pointsFor"] = pointsFor + match.winnerScore
playerStatsArrayOfDictionaries[index][gameStatsIndex]["pointsAgainst"] = pointsAgainst + match.loserScore
} else {
playerStatsArrayOfDictionaries[index][gameStatsIndex]["played"] = played + 1
playerStatsArrayOfDictionaries[index][gameStatsIndex]["losses"] = losses + 1
playerStatsArrayOfDictionaries[index][gameStatsIndex]["winPercentage"] = Double(wins) / Double(played + 1)
playerStatsArrayOfDictionaries[index][gameStatsIndex]["pointsFor"] = pointsFor + match.loserScore
playerStatsArrayOfDictionaries[index][gameStatsIndex]["pointsAgainst"] = pointsAgainst + match.winnerScore
}
}
}
}
}
}
}
}
func processPlayspaces() {
var uniquePlayspaces = [Playspace]()
for playspace in playspaces {
if !uniquePlayspaces.contains(playspace) {
uniquePlayspaces.append(playspace)
}
}
self.uniquePlayspaces = uniquePlayspaces
}
}
// MARK: - UITableViewDataSource, UITableViewDelegate
extension CurrentPlayerProfileViewController: UITableViewDataSource, UITableViewDelegate {
func numberOfSections(in tableView: UITableView) -> Int {
return uniquePlayspaces.count
}
func tableView(_ tableView: UITableView, titleForHeaderInSection section: Int) -> String? {
return "\(uniquePlayspaces[section].name)"
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return playerStatsArrayOfDictionaries[section].count + 1
}
func tableView(_ tableView: UITableView, willDisplayHeaderView view: UIView, forSection section: Int) {
if let headerTitle = view as? UITableViewHeaderFooterView {
headerTitle.textLabel?.textColor = UIColor.lightGray
}
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
if indexPath.row == 0 {
let cell = tableView.dequeueReusableCell(withIdentifier: "labelCell", for: indexPath)
return cell
}
guard let cell = tableView.dequeueReusableCell(withIdentifier: "gameStatsCell", for: indexPath) as? GameStatsTableViewCell else { return GameStatsTableViewCell() }
cell.updateViewsWith(playerStatsArrayOfDictionaries[indexPath.section][indexPath.row - 1])
return cell
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
if indexPath.row == 0 {
return 40
}
return 50
}
}
<file_sep>/Leaderboards/Leaderboards/JoinGameViewController.swift
import UIKit
class JoinGameViewController: UIViewController {
@IBOutlet weak var tableView: UITableView!
@IBOutlet weak var navigationBar: UINavigationBar!
@IBOutlet weak var noGamesView: UIView!
let colorProvider = BackgroundColorProvider()
@IBAction func cancelButtonTapped(_ sender: Any) {
dismiss(animated: true, completion: nil)
}
@IBAction func cancelBarButtonItemTapped(_ sender: Any) {
dismiss(animated: true, completion: nil)
}
@IBAction func addGameBarButtonItemTapped(_ sender: Any) {
let alert = UIAlertController(title: "Add New Game", message: nil, preferredStyle: .alert)
alert.addTextField { (textField) in
textField.placeholder = "Enter Name"
}
alert.addAction(UIAlertAction(title: "Cancel", style: .cancel, handler: nil))
alert.addAction(UIAlertAction(title: "Add", style: .default, handler: { (_) in
guard let name = alert.textFields?.first?.text, !name.isEmpty else { return }
GameController.shared.createGameWith(name: name, completion: { (success) in
if success {
self.dismiss(animated: true, completion: nil)
}
})
}))
present(alert, animated: true, completion: nil)
}
override func viewDidLoad() {
super.viewDidLoad()
let randomColor = colorProvider.randomColor()
view.backgroundColor = randomColor
tableView.backgroundColor = randomColor
noGamesView.backgroundColor = randomColor
navigationBar.setBackgroundImage(UIImage(), for: .default)
navigationBar.shadowImage = UIImage()
navigationBar.isTranslucent = true
tableView.delegate = self
tableView.dataSource = self
tableView.tableFooterView = UIView()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if GameController.shared.gamesNotBelongingToCurrentPlayer.count == 0 {
self.noGamesView.isHidden = false
}
else {
self.noGamesView.isHidden = true
}
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return UIStatusBarStyle.lightContent
}
}
extension JoinGameViewController: UITableViewDataSource, UITableViewDelegate {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return GameController.shared.gamesNotBelongingToCurrentPlayer.count + 1
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
if indexPath.row == 0 {
let cell = tableView.dequeueReusableCell(withIdentifier: "gameTitleCell", for: indexPath)
return cell
}
let cell = tableView.dequeueReusableCell(withIdentifier: "gameCell", for: indexPath)
cell.textLabel?.text = ("\(GameController.shared.gamesNotBelongingToCurrentPlayer[indexPath.row - 1].name)")
cell.textLabel?.textColor = UIColor.white
return cell
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
let game = GameController.shared.gamesNotBelongingToCurrentPlayer[indexPath.row - 1]
GameController.shared.addCurrentPlayerToGame2(game) { (game, success) in
if success {
DispatchQueue.main.async {
guard let game = game else { return }
GameController.shared.gamesNotBelongingToCurrentPlayer.remove(at: indexPath.row - 1)
GameController.shared.gamesBelongingToCurrentPlayer.append(game)
self.dismiss(animated: true, completion: nil)
}
}
}
}
func tableView(_ tableView: UITableView, willDisplay cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
navigationBar.topItem?.title = ""
}
}
func tableView(_ tableView: UITableView, didEndDisplaying cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
navigationBar.topItem?.title = "Join Game"
}
}
}
<file_sep>/Leaderboards/Leaderboards/CurrentPlayerProfileContainerViewController.swift
//
// CurrentPlayerProfileContainerViewController.swift
// Leaderboards
//
// Created by <NAME> on 10/4/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class CurrentPlayerProfileContainerViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
@IBOutlet weak var navigationBar: UINavigationBar!
@IBOutlet weak var playerImageView: UIImageView!
@IBOutlet weak var usernameLabel: UILabel!
@IBOutlet weak var statsMatchHistoryButtonContainer: UIView!
@IBOutlet weak var statsMatchHistoryButton: UIButton!
@IBOutlet weak var currentPlayerStatsContainer: UIView!
@IBOutlet weak var currentPlayerMatchHistoryContainer: UIView!
@IBAction func statsMatchHistoryButtonTapped(_ sender: Any) {
if currentPlayerStatsContainer.alpha == 0 {
currentPlayerStatsContainer.alpha = 1
currentPlayerMatchHistoryContainer.alpha = 0
statsMatchHistoryButton.setTitle("Match History", for: .normal)
} else {
currentPlayerStatsContainer.alpha = 0
currentPlayerMatchHistoryContainer.alpha = 1
statsMatchHistoryButton.setTitle("Stats", for: .normal)
}
}
@IBAction func backButtonTapped(_ sender: Any) {
dismiss(animated: true, completion: nil)
}
@IBAction func playerImageViewTapped(_ sender: Any) {
let imagePicker = UIImagePickerController()
imagePicker.delegate = self
let alert = UIAlertController(title: "Select Photo Location", message: nil, preferredStyle: .actionSheet)
if UIImagePickerController.isSourceTypeAvailable(.photoLibrary) {
alert.addAction(UIAlertAction(title: "Photo Library", style: .default, handler: { (_) -> Void in
imagePicker.sourceType = .photoLibrary
imagePicker.allowsEditing = true
imagePicker.navigationBar.tintColor = .black
imagePicker.navigationBar.titleTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.black]
self.present(imagePicker, animated: true, completion: nil)
}))
}
if UIImagePickerController.isSourceTypeAvailable(.camera) {
alert.addAction(UIAlertAction(title: "Camera", style: .default, handler: { (_) -> Void in
imagePicker.sourceType = .camera
imagePicker.allowsEditing = true
self.present(imagePicker, animated: true, completion: nil)
}))
}
alert.addAction(UIAlertAction(title: "Cancel", style: .cancel, handler: nil))
present(alert, animated: true, completion: nil)
}
let colorProvider = BackgroundColorProvider()
override func viewDidLoad() {
super.viewDidLoad()
guard let currentPlayer = PlayerController.shared.currentPlayer else { return }
playerImageView.image = currentPlayer.photo
usernameLabel.text = currentPlayer.username
playerImageView.layer.cornerRadius = playerImageView.frame.width / 2
playerImageView.clipsToBounds = true
playerImageView.layer.borderWidth = 3.0
playerImageView.layer.borderColor = UIColor.white.cgColor
navigationBar.setBackgroundImage(UIImage(), for: .default)
navigationBar.shadowImage = UIImage()
navigationBar.isTranslucent = true
statsMatchHistoryButtonContainer.layer.cornerRadius = 5
statsMatchHistoryButtonContainer.clipsToBounds = true
let randomColor = colorProvider.randomColor()
view.backgroundColor = randomColor
statsMatchHistoryButton.tintColor = randomColor
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return .lightContent
}
// MARK: UIImagePickerControllerDelegate
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let image = info[UIImagePickerController.InfoKey.editedImage] as? UIImage {
let image = image
PlayerController.shared.currentPlayer?.photo = image
guard let currentPlayer = PlayerController.shared.currentPlayer else { return }
PlayerController.shared.updatePlayer(currentPlayer, completion: { (success) in
if success {
DispatchQueue.main.async {
picker.dismiss(animated: true, completion: nil)
self.playerImageView.image = image
}
}
})
}
}
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromUIImagePickerControllerInfoKeyDictionary(_ input: [UIImagePickerController.InfoKey: Any]) -> [String: Any] {
return Dictionary(uniqueKeysWithValues: input.map {key, value in (key.rawValue, value)})
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromUIImagePickerControllerInfoKey(_ input: UIImagePickerController.InfoKey) -> String {
return input.rawValue
}
<file_sep>/Leaderboards/Leaderboards/CurrentPlayerMatchHistoryViewController.swift
//
// CurrentPlayerMatchHistoryViewController.swift
// Leaderboards
//
// Created by <NAME> on 10/4/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class CurrentPlayerMatchHistoryViewController: UIViewController {
@IBOutlet weak var tableView: UITableView!
var matches: [Match]?
var opponents: [Player]?
var games: [Game]?
override func viewDidLoad() {
super.viewDidLoad()
tableView.delegate = self
tableView.dataSource = self
MatchController.shared.fetchMatchesForCurrentPlayer { (matches, success) in
if success {
self.matches = matches
guard let matches = matches,
let currentPlayer = PlayerController.shared.currentPlayer else { return }
MatchController.shared.fetchOpponentsForMatches(matches, player: currentPlayer, completion: { (opponents, success) in
if success {
self.opponents = opponents
MatchController.shared.fetchGamesForMatches(matches, completion: { (games, success) in
if success {
DispatchQueue.main.async {
self.games = games
self.tableView.reloadData()
}
}
})
}
})
}
}
}
}
extension CurrentPlayerMatchHistoryViewController: UITableViewDelegate, UITableViewDataSource {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return matches?.count ?? 0
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
guard let cell = tableView.dequeueReusableCell(withIdentifier: "matchCell", for: indexPath) as? CurrentPlayerMatchHistoryTableViewCell else { return CurrentPlayerMatchHistoryTableViewCell() }
cell.updateViewsWith(opponent: opponents?[indexPath.row], match: matches?[indexPath.row], game: games?[indexPath.row])
return cell
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
return 150
}
}
<file_sep>/Leaderboards/Leaderboards/CurrentPlayerMatchHistoryTableViewCell.swift
//
// CurrentPlayerMatchHistoryTableViewCell.swift
// Leaderboards
//
// Created by <NAME> on 10/4/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class CurrentPlayerMatchHistoryTableViewCell: UITableViewCell {
@IBOutlet var playerImage: UIImageView!
@IBOutlet var playerName: UILabel!
@IBOutlet weak var opponentImage: UIImageView!
@IBOutlet weak var opponentUsername: UILabel!
@IBOutlet weak var gameLabel: UILabel!
@IBOutlet weak var scoreLabel: UILabel!
@IBOutlet weak var timestampLabel: UILabel!
@IBOutlet var scoreView: UIView!
override func awakeFromNib() {
super.awakeFromNib()
playerImage.layer.cornerRadius = playerImage.frame.width / 2
playerImage.clipsToBounds = true
playerImage.layer.borderWidth = 2.0
playerImage.layer.borderColor = UIColor.white.cgColor
opponentImage.layer.cornerRadius = opponentImage.frame.width / 2
opponentImage.clipsToBounds = true
opponentImage.layer.borderWidth = 2.0
opponentImage.layer.borderColor = UIColor.white.cgColor
scoreView.layer.cornerRadius = 5
scoreView.clipsToBounds = true
if let player = PlayerController.shared.currentPlayer {
playerImage.image = player.photo
playerName.text = player.username
}
}
func updateViewsWith(opponent: Player?, match: Match?, game: Game?) {
guard let match = match else { return }
gameLabel.text = game?.name
opponentImage.image = opponent?.photo
opponentUsername.text = opponent?.username
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .full
timestampLabel.text = dateFormatter.string(from: match.timestamp)
if let currentPlayer = PlayerController.shared.currentPlayer {
if match.winner.recordID == currentPlayer.recordID {
// current player won
self.scoreView.backgroundColor = UIColor(red: 52.0/255.0, green: 216.0/255.0, blue: 132.0/255.0, alpha: 1.0)
self.scoreLabel.text = "\(match.winnerScore) - \(match.loserScore)"
} else {
// current player lost
self.scoreView.backgroundColor = UIColor.red
self.scoreLabel.text = "\(match.loserScore) - \(match.winnerScore)"
}
}
}
}
<file_sep>/Leaderboards/Leaderboards/ApprovalViewController.swift
////
//// ApprovalViewController.swift
//// Leaderboards
////
//// Created by <NAME> on 25/09/17.
//// Copyright © 2017 <NAME>. All rights reserved.
////
//
//import UIKit
//
//class ApprovalViewController: UIViewController {
//
// var score: String = ""
// var game: String = ""
// var date: String = ""
// var opponent: String = ""
//
// var matchIndex: Int?
//
// @IBOutlet var playerImageView: UIImageView!
// @IBOutlet var detailViewContainer: UIView!
// @IBOutlet var aprooveButton: UIButton!
// @IBOutlet var declineButton: UIButton!
// @IBOutlet var opponentLabel: UILabel!
// @IBOutlet var scoreLabel: UILabel!
// @IBOutlet var dateLabel: UILabel!
// @IBOutlet var gameLabel: UILabel!
//
//
//
// override func viewDidLoad() {
// super.viewDidLoad()
//
// self.gameLabel.text = game
// self.scoreLabel.text = score
// self.dateLabel.text = date
// self.opponentLabel.text = opponent
//
// detailViewContainer.layer.cornerRadius = 5
// detailViewContainer.clipsToBounds = true
//
// }
//
// @IBAction func approveButtonTapped(_ sender: Any) {
// guard let matchIndex = matchIndex else { return }
// let verifiedMatch = MatchController.shared.verifyMatch(MatchController.shared.pendingMatches[matchIndex])
// MatchController.shared.updateMatch(verifiedMatch) { (success) in
// if success {
// DispatchQueue.main.async {
// //MatchController.shared.clearPendingMatch(at: matchIndex)
// self.dismiss(animated: true, completion: nil)
// }
// }
// }
// }
//
// @IBAction func declineButtonTapped(_ sender: Any) {
//
// guard let matchIndex = matchIndex else { return }
//
// MatchController.shared.deletePendingMatch(at: matchIndex) { (success) in
// if success {
// DispatchQueue.main.async {
// //MatchController.shared.clearPendingMatch(at: matchIndex)
// self.dismiss(animated: true, completion: nil)
// }
// }
// }
//
// }
//
//
//
// override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
// self.dismiss(animated: true, completion: nil)
//
// }
//
//}
<file_sep>/Leaderboards/Leaderboards/Game.swift
//
// Game.swift
// Leaderboards
//
// Created by <NAME> on 9/18/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import Foundation
import CloudKit
struct Game {
let recordID: CKRecord.ID
let name: String
let playspace: CKRecord.Reference
var players: [CKRecord.Reference]
}
// MARK: - CloudKit
extension Game {
static let nameKey = "name"
static let playspaceKey = "playspace"
static let recordType = "Game"
static let playersKey = "players"
init?(record: CKRecord) {
guard let name = record[Game.nameKey] as? String,
let playspace = record[Game.playspaceKey] as? CKRecord.Reference else { return nil }
self.recordID = record.recordID
self.name = name
self.playspace = playspace
if let players = record[Game.playersKey] as? [CKRecord.Reference] {
self.players = players
} else {
self.players = []
}
}
var CKRepresentation: CKRecord {
let record = CKRecord(recordType: Game.recordType, recordID: recordID)
record.setValue(name, forKey: Game.nameKey)
record.setValue(playspace, forKey: Game.playspaceKey)
if players.count == 0 {
record.setValue(nil, forKey: Game.playersKey)
} else {
record.setValue(players, forKey: Game.playersKey)
}
return record
}
}
//
<file_sep>/Leaderboards/Leaderboards/PlayerTableViewCell.swift
//
// PlayerTableViewCell.swift
// Leaderboards
//
// Created by <NAME> on 19/09/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class PlayerTableViewCell: UITableViewCell {
// MARK:- Outlets
@IBOutlet weak var playerImageView: UIImageView!
@IBOutlet weak var playerNameLabel: UILabel!
// MARK:- Properties
var player: Player? {
didSet {
updateViews()
}
}
// MARK :- Functions
func updateViews() {
if let player = player {
playerNameLabel.text = player.username
playerImageView.image = player.photo
playerImageView.layer.cornerRadius = playerImageView.frame.size.width / 2
playerImageView.clipsToBounds = true
playerNameLabel.textColor = UIColor.white
}
}
}
<file_sep>/Leaderboards/Leaderboards/LeaderboardsPlayersListContainerViewController.swift
//
// LeaderboardsPlayersListContainerViewController.swift
// Leaderboards
//
// Created by <NAME> on 10/3/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class LeaderboardsPlayersListContainerViewController: UIViewController {
@IBOutlet weak var leaderboardsContainerView: UIView!
@IBOutlet weak var playersListContainerView: UIView!
@IBOutlet weak var leaderboardsPlayersButton: UIButton!
@IBOutlet weak var leaderboardsPlayersListView: UIView!
@IBOutlet weak var leaderboardsPlayersButtonViewContainer: UIView!
let colorProvider = BackgroundColorProvider()
var randomColor: UIColor?
override func viewDidLoad() {
super.viewDidLoad()
leaderboardsPlayersListView.backgroundColor = randomColor
let addMatchBarButtonItem = UIBarButtonItem(barButtonSystemItem: .add, target: self, action: #selector(addMatchBarButtonItemTapped))
navigationItem.rightBarButtonItem = addMatchBarButtonItem
leaderboardsPlayersButtonViewContainer.layer.cornerRadius = 5
leaderboardsPlayersButtonViewContainer.clipsToBounds = true
leaderboardsPlayersButton.tintColor = randomColor
}
@IBAction func leaderboardsPlayersButtonTapped(_ sender: Any) {
if playersListContainerView.alpha == 0 {
playersListContainerView.alpha = 1
leaderboardsContainerView.alpha = 0
leaderboardsPlayersButton.setTitle("Leaderboards", for: .normal)
} else {
playersListContainerView.alpha = 0
leaderboardsContainerView.alpha = 1
leaderboardsPlayersButton.setTitle("Players", for: .normal)
}
}
@objc func addMatchBarButtonItemTapped() {
let newMatchVC = UIStoryboard(name: "Match", bundle: nil).instantiateViewController(withIdentifier: "newMatchVC")
present(newMatchVC, animated: true, completion: nil)
}
// MARK: - Navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "toLeaderboardsVC" {
randomColor = colorProvider.randomColor()
let leaderboardsVC = segue.destination as? LeaderboardsViewController
leaderboardsVC?.randomColor = randomColor
}
}
}
<file_sep>/Leaderboards/Leaderboards/PlayerController.swift
//
// PlayerController.swift
// Leaderboards
//
// Created by <NAME> on 9/19/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import CloudKit
class PlayerController {
static let shared = PlayerController()
var currentPlayer: Player?
var opponents = [Player]()
func createPlayerWith(username: String, photo: UIImage?, completion: @escaping (_ success: Bool) -> Void) {
CKContainer.default().fetchUserRecordID { (appleUsersRecordID, error) in
guard let appleUsersRecordID = appleUsersRecordID else { completion(false); return }
let appleUserRef = CKRecord.Reference(recordID: appleUsersRecordID, action: .deleteSelf)
let player = Player(recordID: CKRecord.ID(recordName: UUID().uuidString), playspaces: [], username: username, photo: photo, appleUserRef: appleUserRef)
let playerRecord = player.CKRepresentation
CloudKitManager.shared.saveRecord(playerRecord) { (record, error) in
if let error = error { print(error.localizedDescription); completion(false); return }
guard let record = record,
let currentPlayer = Player(record: record) else { completion(false); return }
let tempURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(record.recordID.recordName + ".dat")
try? FileManager.default.removeItem(at: tempURL)
self.currentPlayer = currentPlayer
completion(true)
}
}
}
func fetchCurrentPlayer(completion: @escaping (_ success: Bool) -> Void = { _ in }) {
// Fetch default Apple 'Users' recordID
CKContainer.default().fetchUserRecordID { (appleUserRecordID, error) in
if let error = error { print("Here 2 -> \(error.localizedDescription)") }
guard let appleUserRecordID = appleUserRecordID else { completion(false); return }
// Create a CKReference with the Apple 'Users' recordID so that we can fetch OUR cust user record
let appleUserReference = CKRecord.Reference(recordID: appleUserRecordID, action: .deleteSelf)
// Create a predicate with the reference that was just created.
// This predicate will search through all the Users and filter them based on a matching reference
let predicate = NSPredicate(format: "appleUserRef == %@", appleUserReference)
// Perform the fetch on our 'User' record
CloudKitManager.shared.fetchRecordsWithType(Player.recordType, predicate: predicate, recordFetchedBlock: nil, completion: { (records, error) in
guard let currentPlayerRecord = records?.first else { completion(false); return }
let currentPlayer = Player(record: currentPlayerRecord)
self.currentPlayer = currentPlayer
completion(true)
})
}
}
func updatePlayer(_ player: Player, completion: @escaping (_ success: Bool) -> Void = { _ in }) {
let playerRecord = player.CKRepresentation
CloudKitManager.shared.updateRecords([playerRecord], perRecordCompletion: nil) { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
let tempURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(player.recordID.recordName + ".dat")
try? FileManager.default.removeItem(at: tempURL)
completion(true)
}
}
func fetchPlayspacesFor(_ player: Player, completion: @escaping (_ success: Bool) -> Void = { _ in }) {
var playspaceRecordIDs = [CKRecord.ID]()
for playspace in player.playspaces {
playspaceRecordIDs.append(playspace.recordID)
}
CloudKitManager.shared.fetchRecords(withIDs: playspaceRecordIDs) { (playspacesDictionary, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
var playspaceRecords = [CKRecord]()
guard let playspacesDictionary = playspacesDictionary else { completion(false); return }
for playspaceRecord in playspacesDictionary.values {
playspaceRecords.append(playspaceRecord)
}
let playspaces = playspaceRecords.compactMap { Playspace(record: $0) }
PlayspaceController.shared.playspaces = playspaces
completion(true)
}
}
func fetchPlayersFor(_ playspace: Playspace, completion: @escaping (_ players: [Player]?, _ success: Bool) -> Void = { _, _ in }) {
let playerIsInPlayspacePredicate = NSPredicate(format: "playspaces CONTAINS %@", playspace.recordID)
let query = CKQuery(recordType: Player.recordType, predicate: playerIsInPlayspacePredicate)
query.sortDescriptors = [NSSortDescriptor(key: "username", ascending: true)]
CloudKitManager.shared.publicDB.perform(query, inZoneWith: nil) { (playerRecords, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
guard let playerRecords = playerRecords else { completion(nil, false); return }
let players = playerRecords.compactMap { Player(record: $0) }
completion(players, true)
}
// CloudKitManager.shared.fetchRecordsWithType(Player.recordType, predicate: playerIsInPlayspacePredicate, recordFetchedBlock: nil) { (playerRecords, error) in
// if let error = error {
// print(error.localizedDescription)
// completion(nil, false)
// return
// }
//
// guard let playerRecords = playerRecords else { completion(nil, false); return }
// let players = playerRecords.flatMap { Player(record: $0) }
// completion(players, true)
// }
}
func fetchPlayer(_ playerRecordID: CKRecord.ID, completion: @escaping (_ player: Player?, _ success: Bool) -> Void = { _,_ in }) {
CloudKitManager.shared.fetchRecord(withID: playerRecordID) { (record, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
guard let record = record else { completion(nil, false); return }
completion(Player(record: record), true)
}
}
}
<file_sep>/Leaderboards/Leaderboards/PlayspaceFailedViewController.swift
//
// PlayspaceFailedViewController.swift
// Leaderboards
//
// Created by <NAME> on 02/10/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class PlayspaceFailedViewController: UIViewController {
@IBOutlet var viewContainer: UIView!
@IBOutlet var dismissButtonViewContainer: UIView!
override func viewDidLoad() {
super.viewDidLoad()
viewContainer.layer.cornerRadius = 5
viewContainer.clipsToBounds = true
dismissButtonViewContainer.layer.cornerRadius = 5
dismissButtonViewContainer.clipsToBounds = true
}
@IBAction func dismissButtonTapped(_ sender: Any) {
self.dismiss(animated: true, completion: nil)
}
}
<file_sep>/Leaderboards/Leaderboards/PlayerProfileViewController.swift
//
// PlayerProfileViewController.swift
// Leaderboards
//
// Created by <NAME> on 10/4/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class PlayerProfileViewController: UIViewController {
let colorProvider = BackgroundColorProvider()
@IBOutlet weak var tableView: UITableView!
var matches: [Match]?
var opponents: [Player]?
var player: Player?
override func viewDidLoad() {
super.viewDidLoad()
title = player?.username
let randomColor = colorProvider.randomColor()
self.view.backgroundColor = randomColor
self.tableView.backgroundColor = randomColor
tableView.delegate = self
tableView.dataSource = self
tableView.tableFooterView = UIView()
guard let currentGame = GameController.shared.currentGame,
let player = player else { return }
MatchController.shared.fetchMatchesForGame(currentGame, andPlayer: player) { (matches, success) in
if success {
self.matches = matches
guard let matches = matches else { return }
MatchController.shared.fetchOpponentsForMatches(matches, player: player, completion: { (opponents, success) in
if success {
DispatchQueue.main.async {
self.opponents = opponents
self.tableView.reloadData()
}
}
})
}
}
}
}
extension PlayerProfileViewController: UITableViewDelegate, UITableViewDataSource {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
guard let count = matches?.count else { return 0 }
return count + 2
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
if indexPath.row == 0 {
let cell = tableView.dequeueReusableCell(withIdentifier: "matchHistoryTitleCell", for: indexPath)
return cell
}
if indexPath.row == 1 {
guard let cell = tableView.dequeueReusableCell(withIdentifier: "lastFiveCell", for: indexPath) as? PlayerProfileLastFiveTableViewCell,
let matches = matches,
let player = player else { return PlayerProfileLastFiveTableViewCell() }
if matches.count < 5 {
cell.updateViewsWith(matches: matches, player: player)
} else {
cell.updateViewsWith(matches: [matches[0], matches[1], matches[2], matches[3], matches[4]], player: player)
}
return cell
}
guard let cell = tableView.dequeueReusableCell(withIdentifier: "matchCell", for: indexPath) as? PlayerProfileMatchHistoryTableViewCell else { return PlayerProfileMatchHistoryTableViewCell() }
cell.updateViewsWith(opponent: opponents?[indexPath.row - 2], match: matches?[indexPath.row - 2], player: player)
return cell
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
if indexPath.row == 0 {
return 44
}
return 140
}
func tableView(_ tableView: UITableView, willDisplay cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
title = player?.username
}
}
func tableView(_ tableView: UITableView, didEndDisplaying cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
title = "Match History"
}
}
}
<file_sep>/Leaderboards/Leaderboards/GamesViewController.swift
//
// GamesViewController.swift
// Leaderboards
//
// Created by <NAME> on 9/20/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import CloudKit
class GamesViewController: UIViewController {
let colorProvider = BackgroundColorProvider()
@IBOutlet var nogamesView: UIView!
@IBOutlet weak var tableView: UITableView!
@IBOutlet weak var addgameButtonViewContainer: UIView!
@IBOutlet weak var addGameButton: UIButton!
var isFirstTime = true
override func viewDidLoad() {
super.viewDidLoad()
tableView.delegate = self
tableView.dataSource = self
tableView.tableFooterView = UIView()
let randomColor = colorProvider.randomColor()
tableView.backgroundColor = randomColor
view.backgroundColor = randomColor
addGameButton.tintColor = randomColor
nogamesView.backgroundColor = randomColor
addgameButtonViewContainer.layer.cornerRadius = 5
addgameButtonViewContainer.clipsToBounds = true
self.nogamesView.isHidden = true
GameController.shared.fetchGamesForCurrentPlayspace { (success) in
if success {
DispatchQueue.main.async {
if GameController.shared.gamesBelongingToCurrentPlayer.count == 0 {
self.nogamesView.isHidden = false
}
else {
self.nogamesView.isHidden = true
}
self.isFirstTime = false
self.tableView.reloadData()
}
}
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if !isFirstTime{
if GameController.shared.gamesBelongingToCurrentPlayer.count == 0 {
self.nogamesView.isHidden = false
}
else {
self.nogamesView.isHidden = true
}
}
tableView.reloadData()
if let currentPlayer = PlayerController.shared.currentPlayer {
let playerImageButton = UIButton(type: .custom)
playerImageButton.addTarget(self, action: #selector(playerImageButtonTapped), for: .touchUpInside)
playerImageButton.setImage(currentPlayer.photo, for: .normal)
playerImageButton.frame = CGRect(x: 0, y: 0, width: 32, height: 32)
playerImageButton.layer.cornerRadius = playerImageButton.frame.height / 2
playerImageButton.clipsToBounds = true
playerImageButton.layer.borderColor = UIColor.white.cgColor
playerImageButton.layer.borderWidth = 2.0
playerImageButton.addConstraint(NSLayoutConstraint(item: playerImageButton, attribute: .width, relatedBy: .equal, toItem: nil, attribute: .notAnAttribute, multiplier: 1, constant: 32))
playerImageButton.addConstraint(NSLayoutConstraint(item: playerImageButton, attribute: .height, relatedBy: .equal, toItem: nil, attribute: .notAnAttribute, multiplier: 1, constant: 32))
let shareShowPasswordButton = UIBarButtonItem(barButtonSystemItem: .action, target: self, action: #selector(shareShowPasswordButtonTapped))
let pendingMatchesNotificationBadgeButton = UIButton(type: .system)
pendingMatchesNotificationBadgeButton.addTarget(self, action: #selector(pendingMatchesNotificationBadgeButtonTapped), for: .touchUpInside)
pendingMatchesNotificationBadgeButton.frame = CGRect(x: 0, y: 0, width: 32, height: 32)
pendingMatchesNotificationBadgeButton.backgroundColor = .white
pendingMatchesNotificationBadgeButton.tintColor = view.backgroundColor
pendingMatchesNotificationBadgeButton.layer.cornerRadius = pendingMatchesNotificationBadgeButton.frame.height / 2
pendingMatchesNotificationBadgeButton.clipsToBounds = true
pendingMatchesNotificationBadgeButton.layer.borderColor = UIColor.white.cgColor
pendingMatchesNotificationBadgeButton.layer.borderWidth = 1.0
MatchController.shared.fetchPendingMatchesForCurrentPlayer { (success) in
if success {
DispatchQueue.main.async {
let operation = CKModifyBadgeOperation(badgeValue: MatchController.shared.pendingMatches.count)
operation.modifyBadgeCompletionBlock = {(error) in
if let error = error{
print("\(error)")
return
}
DispatchQueue.main.async {
UIApplication.shared.applicationIconBadgeNumber = MatchController.shared.pendingMatches.count
}
}
CKContainer.default().add(operation)
if MatchController.shared.pendingMatches.count > 0 {
pendingMatchesNotificationBadgeButton.setTitle("\(MatchController.shared.pendingMatches.count)", for: .normal)
self.navigationItem.rightBarButtonItems = [UIBarButtonItem(customView: playerImageButton), UIBarButtonItem(customView: pendingMatchesNotificationBadgeButton), shareShowPasswordButton]
} else {
self.navigationItem.rightBarButtonItems = [UIBarButtonItem(customView: playerImageButton), shareShowPasswordButton]
}
}
}
}
}
}
@IBAction func addGameButtonTapped(_ sender: Any) {
let alert = UIAlertController(title: "Add Game", message: nil, preferredStyle: .actionSheet)
alert.addAction(UIAlertAction(title: "Join Game", style: .default , handler: { (_) -> Void in
let joinGameVC = UIStoryboard(name: "Login", bundle: nil).instantiateViewController(withIdentifier: "joinGameVC")
self.present(joinGameVC, animated: true, completion: nil)
}))
alert.addAction(UIAlertAction(title: "New Game", style: .default, handler: { (_) -> Void in
let newGameVC = UIStoryboard(name: "NewGame", bundle: nil).instantiateViewController(withIdentifier: "newGameVC")
self.present(newGameVC, animated: true, completion: nil)
}))
alert.addAction(UIAlertAction(title: "Cancel", style: .cancel, handler: nil))
present(alert, animated: true, completion: nil)
}
@objc func playerImageButtonTapped() {
let currentPlayerProfileVC = UIStoryboard(name: "PlayerProfile", bundle: nil).instantiateViewController(withIdentifier: "currentPlayerProfileContainerVC")
present(currentPlayerProfileVC, animated: true, completion: nil)
}
@objc func pendingMatchesNotificationBadgeButtonTapped() {
let pendingMatchesVC = UIStoryboard(name: "PlayerProfile", bundle: nil).instantiateViewController(withIdentifier: "pendingMatchesVC")
present(pendingMatchesVC, animated: true, completion: nil)
}
@objc func shareShowPasswordButtonTapped() {
guard let playspacePassword = PlayspaceController.shared.currentPlayspace?.password, let playspaceName = PlayspaceController.shared.currentPlayspace?.name else { return }
let textToShare = "Hey come join me at my playspace \(playspaceName) the password to join is: \(playspacePassword)"
let share = [textToShare]
let activityVC = UIActivityViewController(activityItems: share, applicationActivities: nil)
self.present(activityVC, animated: true, completion: nil)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "toGameDetail" {
guard let indexPath = tableView.indexPathForSelectedRow else { return }
GameController.shared.currentGame = GameController.shared.gamesBelongingToCurrentPlayer[indexPath.row - 1]
}
}
}
// MARK: - UITableViewDataSource, UITableViewDelegate
extension GamesViewController: UITableViewDataSource, UITableViewDelegate {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return GameController.shared.gamesBelongingToCurrentPlayer.count + 1
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
if indexPath.row == 0 {
let cell = tableView.dequeueReusableCell(withIdentifier: "gamesTitleCell", for: indexPath)
return cell
}
let cell = tableView.dequeueReusableCell(withIdentifier: "gameCell", for: indexPath)
cell.textLabel?.text = ("\(GameController.shared.gamesBelongingToCurrentPlayer[indexPath.row - 1].name)")
cell.detailTextLabel?.textColor = UIColor.white
cell.textLabel?.textColor = UIColor.white
return cell
}
func tableView(_ tableView: UITableView, willDisplay cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
navigationItem.title = PlayspaceController.shared.currentPlayspace?.name
}
}
func tableView(_ tableView: UITableView, didEndDisplaying cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if indexPath.row == 0 {
navigationItem.title = "Games"
}
}
func tableView(_ tableView: UITableView, editActionsForRowAt indexPath: IndexPath) -> [UITableViewRowAction]? {
let leaveTableViewRowAction = UITableViewRowAction(style: .normal, title: "Leave") { (_, indexPath) in
let game = GameController.shared.gamesBelongingToCurrentPlayer[indexPath.row - 1]
GameController.shared.removeCurrentPlayerFrom2(game, completion: { (game, success) in
if success {
DispatchQueue.main.async {
guard let game = game else { return }
GameController.shared.gamesBelongingToCurrentPlayer.remove(at: indexPath.row - 1)
GameController.shared.gamesNotBelongingToCurrentPlayer.append(game)
tableView.deleteRows(at: [indexPath], with: .automatic)
}
}
})
}
leaveTableViewRowAction.backgroundColor = .red
return [leaveTableViewRowAction]
}
func tableView(_ tableView: UITableView, canEditRowAt indexPath: IndexPath) -> Bool {
if indexPath.row == 0 {
return false
}
return true
}
}
<file_sep>/Leaderboards/Leaderboards/PlayerViewController.swift
//
// PlayerViewController.swift
// Leaderboards
//
// Created by <NAME> on 19/09/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class PlayerViewController: UIViewController, UITableViewDataSource, UITableViewDelegate {
@IBOutlet weak var playerTableView: UITableView!
@IBOutlet weak var leaderboardsButton: UIButton!
@IBOutlet weak var leaderboardTableView: UITableView!
@IBOutlet weak var playersView: UIView!
@IBOutlet weak var leaderboardsView: UIView!
@IBOutlet weak var leaderboardsButtonViewContainer: UIView!
var playerStatsArrayOfDictionaries = [[String: Any]]()
let colorProvider = BackgroundColorProvider()
override func viewDidLoad() {
super.viewDidLoad()
playerTableView.delegate = self
playerTableView.dataSource = self
playerTableView.tag = 0
playerTableView.alpha = 0
leaderboardTableView.delegate = self
leaderboardTableView.dataSource = self
leaderboardTableView.tag = 1
leaderboardsView.alpha = 1
leaderboardsButtonViewContainer.layer.cornerRadius = 5
leaderboardsButtonViewContainer.clipsToBounds = true
randomColor()
let addMatchBarButtonItem = UIBarButtonItem(barButtonSystemItem: .add, target: self, action: #selector(addMatchBarButtonItemTapped))
navigationItem.rightBarButtonItem = addMatchBarButtonItem
}
@IBAction func leaderboardsButtonTapped(_ sender: Any) {
if leaderboardsView.alpha == 0 {
leaderboardsView.alpha = 1
playersView.alpha = 0
leaderboardsButton.setTitle("Players", for: .normal)
leaderboardTableView.reloadData()
randomColor()
} else {
leaderboardsView.alpha = 0
playersView.alpha = 1
leaderboardsButton.setTitle("Leaderboards", for: .normal)
playerTableView.reloadData()
randomColor()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
playerStatsArrayOfDictionaries.removeAll()
GameController.shared.fetchAllPlayersForCurrentGame { (success) in
if success {
DispatchQueue.main.async {
self.playerTableView.reloadData()
self.createPlayerStatsDictionaries()
}
MatchController.shared.fetchMatchesForCurrentGame(completion: { (success) in
if success {
DispatchQueue.main.async {
self.updatePlayerStatsDictionaries()
}
}
})
}
}
}
func randomColor() {
let randomColor = colorProvider.randomColor()
view.backgroundColor = randomColor
playersView.backgroundColor = randomColor
leaderboardsView.backgroundColor = randomColor
leaderboardsButton.tintColor = randomColor
playerTableView.backgroundColor = randomColor
leaderboardTableView.backgroundColor = randomColor
}
func createPlayerStatsDictionaries() {
for player in GameController.shared.playersBelongingToCurrentGame {
self.playerStatsArrayOfDictionaries.append(["player": player, player.recordID.recordName: player.recordID, "played": 0, "wins": 0, "losses": 0, "winPercentage": 0.0])
}
}
func updatePlayerStatsDictionaries() {
let matchesInCurrentGame = MatchController.shared.matchesInCurrentGame
for match in matchesInCurrentGame {
for (index, playerStatsDictionary) in playerStatsArrayOfDictionaries.enumerated() {
if let winner = playerStatsDictionary["player"] as? Player,
winner.recordID == match.winner.recordID,
let playedForWinnerDictionary = playerStatsDictionary["played"] as? Int,
let winsForWinnerDictionary = playerStatsDictionary["wins"] as? Int {
var winnerDictionary = playerStatsDictionary
winnerDictionary["played"] = playedForWinnerDictionary + 1
winnerDictionary["wins"] = winsForWinnerDictionary + 1
winnerDictionary["winPercentage"] = Double((winsForWinnerDictionary + 1)) / Double((playedForWinnerDictionary + 1))
playerStatsArrayOfDictionaries[index] = winnerDictionary
}
if let loser = playerStatsDictionary["player"] as? Player,
loser.recordID == match.loser.recordID,
let playedForLoserDictionary = playerStatsDictionary["played"] as? Int,
let winsForLoserDictionary = playerStatsDictionary["wins"] as? Int,
let lossesForLoserDictionary = playerStatsDictionary["losses"] as? Int {
var loserDictionary = playerStatsDictionary
loserDictionary["played"] = playedForLoserDictionary + 1
loserDictionary["losses"] = lossesForLoserDictionary + 1
loserDictionary["winPercentage"] = Double(winsForLoserDictionary) / Double((playedForLoserDictionary + 1))
playerStatsArrayOfDictionaries[index] = loserDictionary
}
}
}
sortPlayersBy(.wins)
leaderboardTableView.reloadData()
}
func sortPlayersBy(_ column: Column) {
playerStatsArrayOfDictionaries.sort { (dictionary1, dictionary2) -> Bool in
if let dictionary1Wins = dictionary1["wins"] as? Int,
let dictionary2Wins = dictionary2["wins"] as? Int,
let dictionary1WinPercentage = dictionary1["winPercentage"] as? Double,
let dictionary2WinPercentage = dictionary2["winPercentage"] as? Double,
let dictionary1Played = dictionary1["played"] as? Int,
let dictionary2Played = dictionary2["played"] as? Int {
if column == .wins {
if dictionary1Wins > dictionary2Wins {
return true
} else if dictionary1Wins == dictionary2Wins {
return dictionary1Played > dictionary2Played
}
} else {
if dictionary1WinPercentage > dictionary2WinPercentage {
return true
} else if dictionary1WinPercentage == dictionary2WinPercentage {
return dictionary1Played > dictionary2Played
}
}
}
return false
}
}
@objc func addMatchBarButtonItemTapped() {
let newMatchVC = UIStoryboard(name: "Match", bundle: nil).instantiateViewController(withIdentifier: "newMatchVC")
present(newMatchVC, animated: true, completion: nil)
}
// MARK:- TableView data source
func numberOfSections(in tableView: UITableView) -> Int {
if tableView.tag == 1 {
return 2
}
return 1
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
if tableView.tag == 0 {
return GameController.shared.playersBelongingToCurrentGame.count + 1
}
if section == 0 {
return 1
}
return playerStatsArrayOfDictionaries.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
if tableView.tag == 0 {
if indexPath.row == 0 {
let cell = tableView.dequeueReusableCell(withIdentifier: "playersTitleCell", for: indexPath)
return cell
}
guard let cell = tableView.dequeueReusableCell(withIdentifier: "playerCell", for: indexPath) as? PlayerTableViewCell else { return PlayerTableViewCell() }
let player = GameController.shared.playersBelongingToCurrentGame[indexPath.row - 1]
cell.player = player
return cell
}
if indexPath.section == 0 {
let cell = tableView.dequeueReusableCell(withIdentifier: "leaderboardsTitleCell", for: indexPath)
return cell
}
guard let cell = tableView.dequeueReusableCell(withIdentifier: "leaderboardsCell", for: indexPath) as? LeaderboardTableViewCell else { return LeaderboardTableViewCell() }
cell.updateViewsWith(playerDictionary: playerStatsArrayOfDictionaries[indexPath.row])
return cell
}
func tableView(_ tableView: UITableView, viewForHeaderInSection section: Int) -> UIView? {
if tableView.tag == 1 && section == 1 {
let headerView = UIView(frame: CGRect(x: 0, y: 0, width: tableView.frame.width, height: tableView.sectionHeaderHeight))
headerView.backgroundColor = view.backgroundColor
let playerLabel = UILabel()
playerLabel.text = "Player"
playerLabel.textColor = .white
playerLabel.font = UIFont(name: "Avenir Next", size: 20.0)
let playedLabel = UILabel()
playedLabel.text = "Played"
playedLabel.textColor = .white
playedLabel.font = UIFont(name: "Avenir Next", size: 20.0)
let wonLabel = UILabel()
wonLabel.text = "Won"
wonLabel.textColor = .white
wonLabel.font = UIFont(name: "Avenir Next", size: 20.0)
let lossLabel = UILabel()
lossLabel.text = "Loss"
lossLabel.textColor = .white
lossLabel.font = UIFont(name: "Avenir Next", size: 20.0)
let winPerLabel = UILabel()
winPerLabel.text = "Win %"
winPerLabel.textColor = .white
winPerLabel.font = UIFont(name: "Avenir Next", size: 20.0)
let labelStackView = UIStackView(arrangedSubviews: [playerLabel, playedLabel, wonLabel, lossLabel, winPerLabel])
labelStackView.axis = .horizontal
labelStackView.alignment = .fill
labelStackView.distribution = .equalSpacing
labelStackView.spacing = 0
labelStackView.contentMode = .scaleToFill
labelStackView.autoresizesSubviews = true
labelStackView.clearsContextBeforeDrawing = true
headerView.addSubview(labelStackView)
let views: [String: Any] = ["labelStackView": labelStackView, "headerView": headerView]
let headerViewHorizontalConstraint = NSLayoutConstraint.constraints(withVisualFormat: "|-(8)-[labelStackView]-(8)-|", options: [], metrics: nil, views: views)
let headerViewVerticalConstraint = NSLayoutConstraint.constraints(withVisualFormat: "V:|[labelStackView]|", options: [], metrics: nil, views: views)
labelStackView.translatesAutoresizingMaskIntoConstraints = false
headerView.addConstraints(headerViewHorizontalConstraint)
headerView.addConstraints(headerViewVerticalConstraint)
return headerView
}
return UIView()
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
if tableView.tag == 0 {
if indexPath.row == 0 {
return 44
}
return 87
}
if indexPath.section == 0 {
return 44
}
return 87
}
func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat {
if tableView.tag == 0 {
return 0
}
if section == 0 {
return 0
}
return 28
}
func tableView(_ tableView: UITableView, willDisplay cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if tableView.tag == 0 && indexPath.row == 0 {
title = GameController.shared.currentGame?.name
}
if tableView.tag == 1 && indexPath.section == 0 && indexPath.row == 0 {
title = GameController.shared.currentGame?.name
}
}
func tableView(_ tableView: UITableView, didEndDisplaying cell: UITableViewCell, forRowAt indexPath: IndexPath) {
if tableView.tag == 0 && indexPath.row == 0 {
title = "Players"
}
if tableView.tag == 1 && indexPath.section == 0 && indexPath.row == 0 {
title = "Leaderboards"
}
}
}
<file_sep>/Leaderboards/Leaderboards/JoinPlayspaceViewController.swift
//
// JoinPlayspaceViewController.swift
// Leaderboards
//
// Created by <NAME> on 28/09/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class JoinPlayspaceViewController: UIViewController {
@IBOutlet weak var passwordTextField: UITextField!
@IBOutlet weak var navigationBar: UINavigationBar!
let colorProvider = BackgroundColorProvider()
override func viewDidLoad() {
super.viewDidLoad()
let randomColor = colorProvider.randomColor()
self.view.backgroundColor = randomColor
navigationBar.setBackgroundImage(UIImage(), for: .default)
navigationBar.shadowImage = UIImage()
navigationBar.isTranslucent = true
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
passwordTextField.resignFirstResponder()
}
@IBAction func cancelButtonTapped(_ sender: Any) {
dismiss(animated: true, completion: nil)
}
@IBAction func submitButtonTapped(_ sender: Any) {
guard let password = passwordTextField.text, !password.isEmpty else {
let failedstoryboard = UIStoryboard(name: "playspaceJoiningFailed", bundle: nil).instantiateViewController(withIdentifier: "failedToJoinPS")
present(failedstoryboard, animated: true, completion: nil)
return
}
PlayspaceController.shared.joinPlayspaceWith(password: password) { (success) in
if success {
DispatchQueue.main.async {
self.dismiss(animated: true, completion: nil)
}
} else {
DispatchQueue.main.async {
let failedstoryboard = UIStoryboard(name: "playspaceJoiningFailed", bundle: nil).instantiateViewController(withIdentifier: "failedToJoinPS")
self.present(failedstoryboard, animated: true, completion: nil)
}
}
}
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return UIStatusBarStyle.lightContent
}
}
<file_sep>/Leaderboards/Leaderboards/PlayspaceMemberTableViewCell.swift
//
// PlayspaceMemberTableViewCell.swift
// Leaderboards
//
// Created by <NAME> on 10/5/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class PlayspaceMemberTableViewCell: UITableViewCell {
@IBOutlet weak var playerImageView: UIImageView!
@IBOutlet weak var playerNameLabel: UILabel!
func updateViewWith(member: Player?) {
playerImageView.layer.cornerRadius = playerImageView.frame.size.width / 2
playerImageView.clipsToBounds = true
playerImageView.image = member?.photo
playerNameLabel.text = member?.username
}
}
<file_sep>/Leaderboards/Leaderboards/PendingMatchTableViewCell.swift
//
// PendingMatchTableViewCell.swift
// Leaderboards
//
// Created by <NAME> on 9/21/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class PendingMatchTableViewCell: UITableViewCell {
@IBOutlet weak var gameLabel: UILabel!
@IBOutlet weak var opponentLabel: UILabel!
@IBOutlet weak var dateLabel: UILabel!
@IBOutlet weak var scoreLabel: UILabel!
@IBOutlet weak var opponentImage: UIImageView!
func updateViewsWith(_ pendingMatch: Match, opponent: Player?, game: Game?) {
self.opponentImage.image = opponent?.photo
self.opponentImage.layer.cornerRadius = self.opponentImage.frame.size.width / 2
self.opponentImage.clipsToBounds = true
self.gameLabel.text = game?.name
self.opponentLabel.text = opponent?.username
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .medium
self.dateLabel.text = dateFormatter.string(from: pendingMatch.timestamp)
if let currentPlayer = PlayerController.shared.currentPlayer {
if pendingMatch.winner.recordID == currentPlayer.recordID {
// current player won
self.opponentImage.layer.borderColor = UIColor(red: 52.0/255.0, green: 216.0/255.0, blue: 132.0/255.0, alpha: 1.0).cgColor
self.opponentImage.layer.borderWidth = 3.0
self.scoreLabel.text = "\(pendingMatch.winnerScore) - \(pendingMatch.loserScore)"
} else {
// current player lost
self.opponentImage.layer.borderColor = UIColor.red.cgColor
self.opponentImage.layer.borderWidth = 3.0
self.scoreLabel.text = "\(pendingMatch.loserScore) - \(pendingMatch.winnerScore)"
self.scoreLabel.textColor = UIColor.red
}
}
}
}
<file_sep>/Leaderboards/Leaderboards/Player.swift
//
// Player.swift
// Leaderboards
//
// Created by <NAME> on 9/18/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import CloudKit
struct Player {
let recordID: CKRecord.ID
var playspaces: [CKRecord.Reference]
let username: String
var photo: UIImage?
let appleUserRef: CKRecord.Reference
var photoAsset: CKAsset? {
do {
guard let photo = photo else { return nil }
let data = photo.pngData()
let tempURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(recordID.recordName + ".dat")
try data?.write(to: tempURL)
let asset = CKAsset(fileURL: tempURL)
return asset
} catch {
print("Error writing photo data", error)
}
return nil
}
}
// MARK: CloudKit
extension Player {
static let playspacesKey = "playspaces"
static let recordType = "Player"
static let usernameKey = "username"
static let photoKey = "photo"
static let appleUserRefKey = "appleUserRef"
init?(record: CKRecord) {
guard let username = record[Player.usernameKey] as? String,
let appleUserRef = record[Player.appleUserRefKey] as? CKRecord.Reference else { return nil }
self.recordID = record.recordID
if let playspaces = record[Player.playspacesKey] as? [CKRecord.Reference] {
self.playspaces = playspaces
} else {
self.playspaces = []
}
self.username = username
if let photoAsset = record[Player.photoKey] as? CKAsset, let photoData = try? Data(contentsOf: photoAsset.fileURL!) {
let photo = UIImage(data: photoData)
self.photo = photo
} else {
self.photo = nil
}
self.appleUserRef = appleUserRef
}
var CKRepresentation: CKRecord {
let record = CKRecord(recordType: Player.recordType, recordID: recordID)
if playspaces.count == 0 {
record.setValue(nil, forKey: Player.playspacesKey)
} else {
record.setValue(playspaces, forKey: Player.playspacesKey)
}
record.setValue(username, forKey: Player.usernameKey)
record.setValue(photoAsset, forKey: Player.photoKey)
record.setValue(appleUserRef, forKey: Player.appleUserRefKey)
return record
}
}
<file_sep>/Leaderboards/Leaderboards/GameController.swift
//
// GameController.swift
// Leaderboards
//
// Created by <NAME> on 9/20/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import Foundation
import CloudKit
class GameController {
static let shared = GameController()
var games = [Game]()
var currentGame: Game?
var gamesNotBelongingToCurrentPlayer = [Game]()
var gamesBelongingToCurrentPlayer = [Game]()
var playersBelongingToCurrentGame = [Player]()
func createGameWith(name: String, completion: @escaping (_ success: Bool) -> Void = { _ in }) {
guard let currentPlayerRecord = PlayerController.shared.currentPlayer?.CKRepresentation,
let currentPlayspaceRecord = PlayspaceController.shared.currentPlayspace?.CKRepresentation else { completion(false); return }
let game = Game(recordID: CKRecord.ID(recordName: UUID().uuidString), name: name, playspace: CKRecord.Reference(record: currentPlayspaceRecord, action: .none), players: [CKRecord.Reference(record: currentPlayerRecord, action: .none)])
CloudKitManager.shared.saveRecord(game.CKRepresentation) { (record, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
self.gamesBelongingToCurrentPlayer.append(game)
completion(true)
}
}
func fetchGamesForCurrentPlayspace(completion: @escaping (_ success: Bool) -> Void = { _ in }) {
guard let currentPlayspace = PlayspaceController.shared.currentPlayspace else { completion(false); return }
let predicate = NSPredicate(format: "playspace == %@", currentPlayspace.recordID)
CloudKitManager.shared.fetchRecordsWithType(Game.recordType, predicate: predicate, recordFetchedBlock: nil) { (records, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
guard let gamesRecords = records else { completion(false); return }
let games = gamesRecords.compactMap { Game(record: $0) }
self.games = games
self.sortGamesForCurrentPlayer(completion: { (success) in
if success {
completion(true)
} else {
completion(false)
}
})
}
}
func sortGamesForCurrentPlayer(completion: @escaping (_ success: Bool) -> Void = { _ in }) {
var gamesNotBelongingToCurrentPlayer = [Game]()
let gamesBelongingToCurrentPlayer = games.filter { (game) -> Bool in
for player in game.players {
if player.recordID == PlayerController.shared.currentPlayer?.recordID {
return true
}
}
gamesNotBelongingToCurrentPlayer.append(game)
return false
}
self.gamesNotBelongingToCurrentPlayer = gamesNotBelongingToCurrentPlayer
self.gamesBelongingToCurrentPlayer = gamesBelongingToCurrentPlayer
completion(true)
}
func addCurrentPlayerToGame(_ game: Game, completion: @escaping (_ game: Game?, _ success: Bool) -> Void = { _, _ in }) {
var game = game
guard let currentPlayer = PlayerController.shared.currentPlayer else { completion(nil, false); return }
game.players.append(CKRecord.Reference(record: currentPlayer.CKRepresentation, action: .none))
CloudKitManager.shared.updateRecords([game.CKRepresentation], perRecordCompletion: nil) { (records, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
guard let gameRecord = records?.first,
let game = Game(record: gameRecord) else { completion(nil, false); return }
completion(game, true)
}
}
func addCurrentPlayerToGame2(_ game: Game, completion: @escaping (_ game: Game?, _ success: Bool) -> Void = { _, _ in }) {
guard let currentPlayer = PlayerController.shared.currentPlayer else { completion(nil, false); return }
CloudKitManager.shared.fetchRecord(withID: game.recordID) { (game, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
guard let game = game,
var players = game.value(forKey: Game.playersKey) as? [CKRecord.Reference] else { completion(nil, false); return }
players.append(CKRecord.Reference(recordID: currentPlayer.recordID, action: .none))
game.setValue(players, forKey: Game.playersKey)
CloudKitManager.shared.updateRecordsIfServerRecordChanged([game], perRecordCompletion: { (_, error) in
if let error = error as? CKError,
error.code == CKError.Code.serverRecordChanged,
let gameServerRecord = error.serverRecord {
guard var players = gameServerRecord.value(forKey: Game.playersKey) as? [CKRecord.Reference] else { completion(nil, false); return }
players.append(CKRecord.Reference(recordID: currentPlayer.recordID, action: .none))
gameServerRecord.setValue(players as CKRecordValue, forKey: Game.playersKey)
CloudKitManager.shared.updateRecordsIfServerRecordChanged([gameServerRecord], perRecordCompletion: { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
}, completion: { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
completion(Game(record: gameServerRecord), true)
})
}
}, completion: { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
completion(Game(record: game), true)
})
}
}
func fetchOpponentsForCurrentGame(completion: @escaping (_ success: Bool) -> Void = { _ in }) {
guard let currentGame = currentGame else { completion(false); return }
let opponentsRecordIDs = currentGame.players.compactMap { $0.recordID }.filter {
if $0 == PlayerController.shared.currentPlayer?.recordID {
return false
}
return true
}
CloudKitManager.shared.fetchRecords(withIDs: opponentsRecordIDs) { (playerRecordsDictionary, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
guard let playerRecordsDictionary = playerRecordsDictionary else { completion(false); return }
var opponentsRecords = [CKRecord]()
for playerRecord in playerRecordsDictionary.values {
opponentsRecords.append(playerRecord)
}
let opponents = opponentsRecords.compactMap { Player(record: $0) }
PlayerController.shared.opponents = opponents
completion(true)
}
}
func fetchAllPlayersForCurrentGame(completion: @escaping (_ success: Bool) -> Void = { _ in }) {
guard let currentGame = currentGame else { completion(false); return }
let playerRecordIDs = currentGame.players.compactMap { $0.recordID }
CloudKitManager.shared.fetchRecords(withIDs: playerRecordIDs) { (playerRecordsDictionary, error) in
if let error = error {
print(error.localizedDescription)
completion(false)
return
}
guard let playerRecordsDictionary = playerRecordsDictionary else { completion(false); return }
var playersRecords = [CKRecord]()
for playerRecord in playerRecordsDictionary.values {
playersRecords.append(playerRecord)
}
let players = playersRecords.compactMap { Player(record: $0) }
self.playersBelongingToCurrentGame = players
completion(true)
}
}
func fetchAllGamesForCurrentPlayer(completion: @escaping (_ games: [Game]?,_ success: Bool) -> Void = { _, _ in }) {
guard let currentPlayer = PlayerController.shared.currentPlayer else { completion(nil, false); return }
let currentPlayerIsInGamePredicate = NSPredicate(format: "players CONTAINS %@", currentPlayer.recordID)
CloudKitManager.shared.fetchRecordsWithType(Game.recordType, predicate: currentPlayerIsInGamePredicate, recordFetchedBlock: nil) { (records, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
guard let gameRecords = records else { completion(nil, false); return }
let games = gameRecords.compactMap { Game(record: $0) }
completion(games, true)
}
}
func fetchPlayspacesForGames(_ games: [Game], completion: @escaping (_ playspaces: [Playspace]?, _ success: Bool) -> Void = { _, _ in }) {
let playspaceRecordIDs = games.compactMap { $0.playspace.recordID }
CloudKitManager.shared.fetchRecords(withIDs: playspaceRecordIDs) { (playspacesDictionary, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
guard let playspacesDictonary = playspacesDictionary else { completion(nil, false); return }
var playspaceRecords = [CKRecord]()
for playspaceRecordID in playspaceRecordIDs {
let playspaceRecord = playspacesDictonary[playspaceRecordID]
if let playspaceRecord = playspaceRecord {
playspaceRecords.append(playspaceRecord)
}
}
let playspaces = playspaceRecords.compactMap { Playspace(record: $0) }
completion(playspaces, true)
}
}
func removeCurrentPlayerFrom(_ game: Game, completion: @escaping (_ game: Game?, _ success: Bool) -> Void = { _, _ in }) {
var game = game
guard let currentPlayer = PlayerController.shared.currentPlayer,
let index = game.players.firstIndex(of: CKRecord.Reference(recordID: currentPlayer.recordID, action: .none)) else { completion(nil, false); return }
game.players.remove(at: index)
CloudKitManager.shared.updateRecords([game.CKRepresentation], perRecordCompletion: nil) { (records, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
guard let gameRecord = records?.first,
let game = Game(record: gameRecord) else { completion(nil, false); return }
completion(game, true)
}
}
func removeCurrentPlayerFrom2(_ game: Game, completion: @escaping (_ game: Game?, _ success: Bool) -> Void = { _, _ in }) {
guard let currentPlayer = PlayerController.shared.currentPlayer else { completion(nil, false); return }
CloudKitManager.shared.fetchRecord(withID: game.recordID) { (game, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
guard let game = game,
var players = game.value(forKey: Game.playersKey) as? [CKRecord.Reference],
let index = players.firstIndex(of: CKRecord.Reference(recordID: currentPlayer.recordID, action: .none)) else { completion(nil, false); return }
players.remove(at: index)
game.setObject(players as CKRecordValue, forKey: Game.playersKey)
CloudKitManager.shared.updateRecordsIfServerRecordChanged([game], perRecordCompletion: { (_, error) in
if let error = error as? CKError,
error.code == CKError.Code.serverRecordChanged,
let gameServerRecord = error.serverRecord {
guard var players = gameServerRecord.value(forKey: Game.playersKey) as? [CKRecord.Reference],
let index = players.firstIndex(of: CKRecord.Reference(recordID: currentPlayer.recordID, action: .none)) else { completion(nil, false); return }
players.remove(at: index)
gameServerRecord.setValue(players as CKRecordValue, forKey: Game.playersKey)
CloudKitManager.shared.updateRecordsIfServerRecordChanged([gameServerRecord], perRecordCompletion: { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
}, completion: { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
completion(Game(record: gameServerRecord), true)
})
}
}, completion: { (_, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
completion(Game(record: game), true)
})
}
}
func fetchGamesFor(_ playspace: Playspace, completion: @escaping (_ games: [CKRecord]?, _ success: Bool) -> Void = { _, _ in }) {
let predicate = NSPredicate(format: "playspace == %@", playspace.recordID)
CloudKitManager.shared.fetchRecordsWithType(Game.recordType, predicate: predicate, recordFetchedBlock: nil) { (records, error) in
if let error = error {
print(error.localizedDescription)
completion(nil, false)
return
}
guard let gamesRecords = records else { completion(nil, false); return }
completion(gamesRecords, true)
}
}
}
<file_sep>/Leaderboards/Leaderboards/Match.swift
//
// Match.swift
// Leaderboards
//
// Created by <NAME> on 9/18/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import Foundation
import CloudKit
struct Match {
let recordID: CKRecord.ID
let game: CKRecord.Reference
let winner: CKRecord.Reference
let winnerScore: Int
let loser: CKRecord.Reference
let loserScore: Int
var verified: Bool
let timestamp: Date
let creator: CKRecord.Reference
let participants: [CKRecord.Reference]
let creatorString: String
let scoreString: String
let gameString: String
}
// MARK: CloudKit
extension Match {
static let timestampKey = "timestamp"
static let verifiedKey = "verified"
static let gameKey = "game"
static let winnerKey = "winner"
static let winnerScoreKey = "winnerScore"
static let loserKey = "loser"
static let loserScoreKey = "loserScore"
static let creatorKey = "creator"
static let participantsKey = "participants"
static let creatorStringKey = "creatorString"
static let scoreStringKey = "scoreString"
static let gameStringKey = "gameString"
static let recordType = "Match"
init?(record: CKRecord) {
guard let verified = record[Match.verifiedKey] as? Bool,
let timestamp = record[Match.timestampKey] as? Date,
let game = record[Match.gameKey] as? CKRecord.Reference,
let winner = record[Match.winnerKey] as? CKRecord.Reference,
let winnerScore = record[Match.winnerScoreKey] as? Int,
let loser = record[Match.loserKey] as? CKRecord.Reference,
let loserScore = record[Match.loserScoreKey] as? Int,
let creator = record[Match.creatorKey] as? CKRecord.Reference,
let participants = record[Match.participantsKey] as? [CKRecord.Reference],
let creatorString = record[Match.creatorStringKey] as? String,
let scoreString = record[Match.scoreStringKey] as? String,
let gameString = record[Match.gameStringKey] as? String else { return nil }
self.recordID = record.recordID
self.verified = verified
self.timestamp = timestamp
self.game = game
self.winner = winner
self.winnerScore = winnerScore
self.loser = loser
self.loserScore = loserScore
self.creator = creator
self.participants = participants
self.creatorString = creatorString
self.scoreString = scoreString
self.gameString = gameString
}
var CKRepresentation: CKRecord {
let record = CKRecord(recordType: Match.recordType, recordID: recordID)
record.setValue(game, forKey: Match.gameKey)
record.setValue(winner, forKey: Match.winnerKey)
record.setValue(winnerScore, forKey: Match.winnerScoreKey)
record.setValue(loser, forKey: Match.loserKey)
record.setValue(loserScore, forKey: Match.loserScoreKey)
record.setValue(verified, forKey: Match.verifiedKey)
record.setValue(timestamp, forKey: Match.timestampKey)
record.setValue(creator, forKey: Match.creatorKey)
record.setValue(participants, forKey: Match.participantsKey)
record.setValue(creatorString, forKey: Match.creatorStringKey)
record.setValue(scoreString, forKey: Match.scoreStringKey)
record.setValue(gameString, forKey: Match.gameStringKey)
return record
}
}
<file_sep>/Leaderboards/Leaderboards/SelectOpponentViewController.swift
//
// SelectOpponentViewController.swift
// Leaderboards
//
// Created by <NAME> on 9/20/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class SelectOpponentViewController: UIViewController {
@IBOutlet weak var tableView: UITableView!
@IBAction func viewButtonTapped(_ sender: Any) {
self.dismiss(animated: true, completion: nil)
}
weak var newMatchVC: NewMatchViewController?
let colorProvider = BackgroundColorProvider()
override func viewDidLoad() {
super.viewDidLoad()
tableView.delegate = self
tableView.dataSource = self
let randomColor = colorProvider.randomColor()
tableView.backgroundColor = randomColor
tableView.tableFooterView = UIView()
tableView.layer.cornerRadius = 5
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
GameController.shared.fetchOpponentsForCurrentGame { (success) in
if success {
DispatchQueue.main.async {
self.tableView.reloadData()
}
}
}
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return UIStatusBarStyle.lightContent
}
}
extension SelectOpponentViewController: UITableViewDataSource, UITableViewDelegate {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return PlayerController.shared.opponents.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "opponentCell", for: indexPath)
cell.textLabel?.text = PlayerController.shared.opponents[indexPath.row].username
cell.textLabel?.textColor = UIColor.white
cell.textLabel?.textAlignment = .center
return cell
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
newMatchVC?.opponent = PlayerController.shared.opponents[indexPath.row]
dismiss(animated: true, completion: nil)
}
func tableView(_ tableView: UITableView, willDisplay cell: UITableViewCell, forRowAt indexPath: IndexPath) {
cell.backgroundColor = UIColor.clear
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
return 75
}
}
<file_sep>/Leaderboards/Leaderboards/PlayerProfileLastFiveTableViewCell.swift
//
// PlayerProfileLastFiveTableViewCell.swift
// Leaderboards
//
// Created by <NAME> on 04/10/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class PlayerProfileLastFiveTableViewCell: UITableViewCell {
@IBOutlet var match1Label: UILabel!
@IBOutlet var match2Label: UILabel!
@IBOutlet var match3Label: UILabel!
@IBOutlet var match4Label: UILabel!
@IBOutlet var match5Label: UILabel!
@IBOutlet var match1View: UIView!
@IBOutlet var match2View: UIView!
@IBOutlet var match3View: UIView!
@IBOutlet var match4View: UIView!
@IBOutlet var match5View: UIView!
override func awakeFromNib() {
super.awakeFromNib()
match1View.layer.cornerRadius = match1View.frame.width / 2
match1View.clipsToBounds = true
match2View.layer.cornerRadius = match1View.frame.width / 2
match2View.clipsToBounds = true
match3View.layer.cornerRadius = match1View.frame.width / 2
match3View.clipsToBounds = true
match4View.layer.cornerRadius = match1View.frame.width / 2
match4View.clipsToBounds = true
match5View.layer.cornerRadius = match1View.frame.width / 2
match5View.clipsToBounds = true
}
func updateViewsWith(matches: [Match], player: Player) {
match1Label.alpha = 0
match2Label.alpha = 0
match3Label.alpha = 0
match4Label.alpha = 0
match5Label.alpha = 0
match1View.alpha = 0
match2View.alpha = 0
match3View.alpha = 0
match4View.alpha = 0
match5View.alpha = 0
for (index, match) in matches.enumerated() {
if match.winner.recordID == player.recordID {
switch index {
case 0:
match1Label.text = "W"
match1Label.alpha = 1
match1View.alpha = 1
case 1:
match2Label.text = "W"
match2Label.alpha = 1
match2View.alpha = 1
case 2:
match3Label.text = "W"
match3Label.alpha = 1
match3View.alpha = 1
case 3:
match4Label.text = "W"
match4Label.alpha = 1
match4View.alpha = 1
case 4:
match5Label.text = "W"
match5Label.alpha = 1
match5View.alpha = 1
default:
break
}
} else {
switch index {
case 0:
match1Label.text = "L"
match1View.backgroundColor = UIColor.red
match1View.alpha = 1
match1Label.alpha = 1
case 1:
match2Label.text = "L"
match2View.backgroundColor = UIColor.red
match2View.alpha = 1
match2Label.alpha = 1
case 2:
match3Label.text = "L"
match3View.backgroundColor = UIColor.red
match3View.alpha = 1
match3Label.alpha = 1
case 3:
match4Label.text = "L"
match4View.backgroundColor = UIColor.red
match4View.alpha = 1
match4Label.alpha = 1
case 4:
match5Label.text = "L"
match5View.backgroundColor = UIColor.red
match5View.alpha = 1
match5Label.alpha = 1
default:
break
}
}
}
}
}
<file_sep>/Leaderboards/Leaderboards/Playspace.swift
//
// Playspace.swift
// Leaderboards
//
// Created by <NAME> on 9/18/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import Foundation
import CloudKit
struct Playspace {
let recordID: CKRecord.ID
let name: String
let password: String
}
// MARK: - CloudKit
extension Playspace {
static let nameKey = "name"
static let passwordKey = "password"
static let recordType = "Playspace"
init?(record: CKRecord) {
guard let name = record[Playspace.nameKey] as? String,
let password = record[Playspace.passwordKey] as? String else { return nil }
self.recordID = record.recordID
self.name = name
self.password = <PASSWORD>
}
var CKRepresentation: CKRecord {
let record = CKRecord(recordType: Playspace.recordType, recordID: recordID)
record.setValue(name, forKey: Playspace.nameKey)
record.setValue(password, forKey: Playspace.passwordKey)
return record
}
}
// MARK: - Equatable
extension Playspace: Equatable {
static func ==(lhs: Playspace, rhs: Playspace) -> Bool {
return lhs.recordID == rhs.recordID
}
}
<file_sep>/Leaderboards/Leaderboards/AppDelegate.swift
//
// AppDelegate.swift
// Leaderboards
//
// Created by <NAME> on 19/09/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
import UserNotifications
import CloudKit
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
UINavigationBar.appearance().titleTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.white]
UINavigationBar.appearance().tintColor = .white
UNUserNotificationCenter.current().requestAuthorization(options: [.badge, .sound, .alert]) { (granted, error) in
if let error = error {
print(error.localizedDescription)
return
}
DispatchQueue.main.async {
UIApplication.shared.registerForRemoteNotifications()
}
}
return true
}
func application(_ application: UIApplication, didRegisterForRemoteNotificationsWithDeviceToken deviceToken: Data) {
PlayerController.shared.fetchCurrentPlayer { (success) in
DispatchQueue.main.async {
if success {
guard let currentPlayer = PlayerController.shared.currentPlayer else { return }
let currentPlayerIsParticipantPredicate = NSPredicate(format: "participants CONTAINS %@", CKRecord.Reference(recordID: currentPlayer.recordID, action: .none))
let matchIsNotVerifiedPredicate = NSPredicate(format: "verified == false")
let currentPlayerIsNotCreatorPredicate = NSPredicate(format: "creator != %@", CKRecord.Reference(recordID: currentPlayer.recordID, action: .none))
let pendingMatchesForCurrentPlayerCompoundPredicate = NSCompoundPredicate(andPredicateWithSubpredicates: [currentPlayerIsParticipantPredicate, matchIsNotVerifiedPredicate, currentPlayerIsNotCreatorPredicate])
let subscription = CKQuerySubscription(recordType: Match.recordType, predicate: pendingMatchesForCurrentPlayerCompoundPredicate, options: .firesOnRecordCreation)
let notificationInfo = CKSubscription.NotificationInfo()
notificationInfo.desiredKeys = ["creatorString", "scoreString", "gameString"]
notificationInfo.alertLocalizationKey = "New Pending Match: %1$@ submitted a %2$@ in %3$@"
notificationInfo.alertLocalizationArgs = ["creatorString", "scoreString", "gameString"]
notificationInfo.soundName = "default"
notificationInfo.shouldBadge = true
subscription.notificationInfo = notificationInfo
CloudKitManager.shared.publicDB.save(subscription) { (_, error) in
if let error = error {
print(error.localizedDescription)
}
}
}
}
}
}
}
<file_sep>/Leaderboards/Leaderboards/GameStatsTableViewCell.swift
//
// GameStatsTableViewCell.swift
// Leaderboards
//
// Created by <NAME> on 10/1/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
import UIKit
class GameStatsTableViewCell: UITableViewCell {
@IBOutlet weak var gameLabel: UILabel!
@IBOutlet weak var playedLabel: UILabel!
@IBOutlet weak var winsLabel: UILabel!
@IBOutlet weak var lossesLabel: UILabel!
@IBOutlet weak var winPercentageLabel: UILabel!
@IBOutlet weak var pointsForLabel: UILabel!
@IBOutlet weak var pointsAgainstLabel: UILabel!
func updateViewsWith(_ gameDictionary: [String: Any]) {
guard let game = gameDictionary["game"] as? Game,
let played = gameDictionary["played"] as? Int,
let wins = gameDictionary["wins"] as? Int,
let losses = gameDictionary["losses"] as? Int,
let winPercentage = gameDictionary["winPercentage"] as? Double,
let pointsFor = gameDictionary["pointsFor"] as? Int,
let pointsAgainst = gameDictionary["pointsAgainst"] as? Int else { return }
gameLabel.text = "\(game.name)"
playedLabel.text = "\(played)"
winsLabel.text = "\(wins)"
lossesLabel.text = "\(losses)"
winPercentageLabel.text = "\(Int(winPercentage * 100))%"
pointsForLabel.text = "\(pointsFor)"
pointsAgainstLabel.text = "\(pointsAgainst)"
}
}
| 03b96541a4fec102dca4f17916a36eb770940893 | [
"Swift",
"Markdown"
] | 43 | Swift | jatinm11/leaderboards | bb5eb9b7e288e36719118f1e493d9d488208c731 | 15f3217a1d31d666c320a5cd6de79d9799baba1b |
refs/heads/master | <file_sep><?php
namespace App\Form;
use App\Entity\Visite;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\Form\Extension\Core\Type\DateType;
use Symfony\Component\Form\Extension\Core\Type\SubmitType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\Form\Extension\Core\Type\TextareaType;
use Symfony\Component\OptionsResolver\OptionsResolver;
class VisiteType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder
->add('motif', TextType::class, array('label' => 'Motif : '))
->add('date', DateType::class,
array('label' => 'Date : ',
'widget' => 'single_text',
'format' => 'yyyy-MM-dd'
,'required' => true,
))
->add('observations', TextAreaType::class, array('label' => 'Observations : ','required' => false, 'attr' => array('cols' => '40','rows' => '4')))
->add('Enregistrer', SubmitType::class);
;
}
public function configureOptions(OptionsResolver $resolver)
{
$resolver->setDefaults([
// uncomment if you want to bind to a class
'data_class' => Visite::class,
]);
}
}
<file_sep><?php
namespace App\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity
* @ORM\Table(name="fournisseur")
*/
class Fournisseur
{
/**
* @ORM\Id()
* @ORM\GeneratedValue()
* @ORM\Column(type="integer")
*/
private $id;
/**
* @ORM\Column(type="string")
*/
private $nom;
/**
* @ORM\Column(type="string",nullable=true)
*/
private $numTelephone;
/**
* @ORM\Column(type="string",nullable=true)
*/
private $mail;
/**
* @ORM\Column(type="string",nullable=true)
*/
private $siteWeb;
/**
* @ORM\Column(type="string",nullable=true)
*/
private $adresse;
/**
* @var text
*
* @ORM\Column(type="text",nullable=true)
*/
private $observations;
function getId() {
return $this->id;
}
function getNom() {
return $this->nom;
}
function getNumTelephone() {
return $this->numTelephone;
}
function getMail() {
return $this->mail;
}
function getSiteWeb() {
return $this->siteWeb;
}
function getAdresse() {
return $this->adresse;
}
function getObservations() {
return $this->observations;
}
function setId($id) {
$this->id = $id;
}
function setNom($nom) {
$this->nom = $nom;
}
function setNumTelephone($numTelephone) {
$this->numTelephone = $numTelephone;
}
function setMail($mail) {
$this->mail = $mail;
}
function setSiteWeb($siteWeb) {
$this->siteWeb = $siteWeb;
}
function setAdresse($adresse) {
$this->adresse = $adresse;
}
function setObservations( $observations) {
$this->observations = $observations;
}
}
<file_sep><?php
namespace App\Repository;
use Doctrine\ORM\EntityRepository;
class PatientRepository extends EntityRepository
{
public function getOrderQueryBuilder()
{
return $this->createQueryBuilder('patient')
->orderBy('patient.nom', 'ASC');
}
}
<file_sep><?php
namespace App\Form;
use App\Entity\Medecin;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\Form\Extension\Core\Type\SubmitType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\Form\Extension\Core\Type\TextareaType;
use Symfony\Component\OptionsResolver\OptionsResolver;
class MedecinType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder
->add('nom', TextType::class, array('label' => 'Nom : '))
->add('specialite', TextType::class, array('label' => 'Specialité : ','required' => false,))
->add('hopital', TextType::class, array('label' => 'Hopital : ','required' => false,))
->add('adresse', TextType::class, array('label' => 'Adresse : ','required' => false,))
->add('mail', TextType::class, array('label' => 'Mail : ','required' => false,))
->add('observations', TextAreaType::class, array('label' => 'Observations : ','required' => false, 'attr' => array('cols' => '40','rows' => '4')))
->add('Enregistrer', SubmitType::class);
;
}
public function configureOptions(OptionsResolver $resolver)
{
$resolver->setDefaults([
// uncomment if you want to bind to a class
'data_class' => Medecin::class,
]);
}
}
<file_sep><?php
namespace App\Entity;
use Doctrine\ORM\Mapping as ORM;
use Symfony\Component\HttpFoundation\File\UploadedFile;
use Symfony\Component\Validator\Constraints as Assert;
/**
* @ORM\Entity
* @ORM\Table(name="document")
*/
class Document
{
/**
* @ORM\Id()
* @ORM\GeneratedValue()
* @ORM\Column(type="integer")
*/
private $id;
/**
* @ORM\Column(type="string", nullable=true)
*/
private $intitule;
/**
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $path;
/**
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $absolutePath;
/**
* @ORM\Column(type="date",nullable=true)
* @Assert\Date()
*/
private $date;
/**
* @var text
*
* @ORM\Column(type="text",nullable=true)
*/
private $observations;
/**
* @ORM\ManyToOne(targetEntity="App\Entity\Patient", inversedBy="documents")
* @ORM\JoinColumn(nullable=false)
*/
private $patient;
function getId() {
return $this->id;
}
function setId($id) {
$this->id = $id;
}
function getIntitule() {
return $this->intitule;
}
function setIntitule($intitule) {
$this->intitule = $intitule;
}
private $file;
function getFile() {
return $this->file;
}
function setFile(UploadedFile $file = null)
{
$this->file = $file;
}
function getAbsolutePath() {
return $this->absolutePath;
}
function setAbsolutePath($absolutePath) {
$this->absolutePath = $absolutePath;
}
function getDate() {
return $this->date;
}
function setDate($date) {
$this->date = $date;
}
function getPath() {
return $this->path;
}
function setPath($path) {
$this->path = $path;
}
public function getPatient(): Patient{
return $this->patient;
}
public function setPatient(Patient $patient) {
$this->patient = $patient;
}
function getObservations() {
return $this->observations;
}
function setObservations( $observations) {
$this->observations = $observations;
}
}
<file_sep><?php
namespace App\Entity;
use Doctrine\ORM\Mapping as ORM;
use Symfony\Component\Validator\Constraints as Assert;
use Doctrine\Common\Collections\ArrayCollection;
use Doctrine\Common\Collections\Collection;
use App\Repository\PatientRepository;
/**
* @ORM\Entity
* @ORM\Entity(repositoryClass="App\Repository\PatientRepository")
* @ORM\Table(name="patient")
*/
class Patient {
/**
* @var int
*
* @ORM\Id
* @ORM\GeneratedValue(strategy="AUTO")
* @ORM\Column(type="integer")
*/
private $id;
/**
* @var string
*
* @ORM\Column(type="string")
*/
private $nom;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $prenom;
/**
* @ORM\Column(name="dateNaiss",type="date", nullable=true)
* @Assert\Date()
*/
private $dateNaiss ;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $adresse;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $CP;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $ville;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $telephone;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $mail;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $sitFam;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $profession;
/**
* @var integer
*
* @ORM\Column(type="integer", nullable=true)
*/
private $nbEnfant;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $accouchement;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $typeHabitat;
/**
* @var text
*
* @ORM\Column(type="text", nullable=true)
*/
private $allergies;
/**
* @var text
*
* @ORM\Column(type="text", nullable=true)
*/
private $traitementEnCours;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $atcdChirurgical;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $atcdFamiliaux;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $atcdMedical;
/**
* @var string
*
* @ORM\Column(type="string", nullable=true)
*/
private $contraception;
/**
* @Assert\Date()
* @ORM\Column(type="date", nullable=true)
*/
private $derniereVisite;
/**
* @var text
*
* @ORM\Column(type="text", nullable=true)
*/
private $motifDerniereVisite;
/**
* @var text
*
* @ORM\Column(type="text", nullable=true)
*/
private $observations;
/**
* @var boolean
*
* @ORM\Column(type="boolean", nullable=true)
*/
private $accepteMedNonTradi;
/**
* @var boolean
*
* @ORM\Column(type="boolean", nullable=true)
*/
private $accepteAcup;
/**
* @ORM\OneToMany(targetEntity="App\Entity\Visite", mappedBy="patient")
*/
private $visites;
public function __construct()
{
$this->visites = new ArrayCollection();
$this->medecins = new ArrayCollection();
$this->couponsQiGong = new ArrayCollection();
$this->documents = new ArrayCollection();
}
/**
* @return Collection|Visite[]
*/
public function getVisites()
{
return $this->visites;
}
public function addVisite(Visite $visite)
{
if ($this->visites->contains($visite)) {
return;
}
$this->visites[] = $visite;
// set the *owning* side!
$visite->setPatient($this);
}
/**
* @ORM\OneToMany(targetEntity="App\Entity\Medecin", mappedBy="patient")
*/
private $medecins;
/**
* @return Collection|Visite[]
*/
public function getMedecins()
{
return $this->medecins;
}
public function addMedecin(Medecin $medecin)
{
if ($this->medecins->contains($medecin)) {
return;
}
$this->medecins[] = $medecin;
// set the *owning* side!
$medecin->setPatient($this);
}
/**
* @ORM\OneToMany(targetEntity="App\Entity\CouponQiGong", mappedBy="patient")
*/
private $couponsQiGong;
/**
* @return Collection|CouponQiGong[]
*/
public function getCouponsQiGong()
{
return $this->couponsQiGong;
}
public function addCouponsQiGong(Visite $couponsQiGong)
{
if ($this->visites->contains($couponsQiGong)) {
return;
}
$this->couponsQiGong[] = $couponsQiGong;
// set the *owning* side!
$couponsQiGong->setPatient($this);
}
/**
* @ORM\OneToMany(targetEntity="App\Entity\Document", mappedBy="patient")
*/
private $documents;
/*
* @return Collection|Document[]
*/
public function getDocuments()
{
return $this->documents;
}
public function addDocument(Document $document)
{
if ($this->documents->contains($document)) {
return;
}
$this->documents[] = $document;
// set the *owning* side!
$document->setPatient($this);
}
function setId($id) {
$this->id = $id;
}
function getId() {
return $this->id;
}
function getNom() {
return $this->nom;
}
function getPrenom() {
return $this->prenom;
}
function getDateNaiss() {
return $this->dateNaiss;
}
function getAdresse() {
return $this->adresse;
}
function getCP() {
return $this->CP;
}
function getVille() {
return $this->ville;
}
function getTelephone() {
return $this->telephone;
}
function getMail() {
return $this->mail;
}
function getSitFam() {
return $this->sitFam;
}
function getProfession() {
return $this->profession;
}
function getNbEnfant() {
return $this->nbEnfant;
}
function getAccouchement() {
return $this->accouchement;
}
function getTypeHabitat() {
return $this->typeHabitat;
}
function getAllergies() {
return $this->allergies;
}
function getTraitementEnCours(){
return $this->traitementEnCours;
}
function getAtcdChirurgical() {
return $this->atcdChirurgical;
}
function getAtcdFamiliaux() {
return $this->atcdFamiliaux;
}
function getAtcdMedical() {
return $this->atcdMedical;
}
function getContraception() {
return $this->contraception;
}
function getObservations(){
return $this->observations;
}
function getAccepteMedNonTradi() {
return $this->accepteMedNonTradi;
}
function getAccepteAcup() {
return $this->accepteAcup;
}
function setNom($nom) {
$this->nom = $nom;
}
function setPrenom($prenom) {
$this->prenom = $prenom;
}
function setDateNaiss($dateNaiss) {
$this->dateNaiss = $dateNaiss;
}
function setAdresse($adresse) {
$this->adresse = $adresse;
}
function setCP($CP) {
$this->CP = $CP;
}
function setVille($ville) {
$this->ville = $ville;
}
function setTelephone($telephone) {
$this->telephone = $telephone;
}
function setMail($mail) {
$this->mail = $mail;
}
function setSitFam($sitFam) {
$this->sitFam = $sitFam;
}
function setProfession($profession) {
$this->profession = $profession;
}
function setNbEnfant($nbEnfant) {
$this->nbEnfant = $nbEnfant;
}
function setAccouchement($accouchement) {
$this->accouchement = $accouchement;
}
function setTypeHabitat($typeHabitat) {
$this->typeHabitat = $typeHabitat;
}
function setAllergies($allergies) {
$this->allergies = $allergies;
}
function setTraitementEnCours( $traitementEnCours) {
$this->traitementEnCours = $traitementEnCours;
}
function setAtcdChirurgical($atcdChirurgical) {
$this->atcdChirurgical = $atcdChirurgical;
}
function setAtcdFamiliaux($atcdFamiliaux) {
$this->atcdFamiliaux = $atcdFamiliaux;
}
function setAtcdMedical($atcdMedical) {
$this->atcdMedical = $atcdMedical;
}
function setContraception($contraception) {
$this->contraception = $contraception;
}
function setObservations( $observations) {
$this->observations = $observations;
}
function setAccepteMedNonTradi($accepteMedNonTradi) {
$this->accepteMedNonTradi = $accepteMedNonTradi;
}
function setAccepteAcup($accepteAcup) {
$this->accepteAcup = $accepteAcup;
}
}
<file_sep>var tr = document.getElementsByClassName('parent');
var saisieNom = document.getElementById('searchNom');
var saisiePrenom = document.getElementById('searchPrenom');
//recherche sur le nom
saisieNom.onkeyup = function() {
tds= document.getElementsByClassName('searchNom');
//Parcours de tous les tr avec class 'searchNom'
for (var i=0;i<tds.length;i++){
//Parcours du nom dans le TR
for (var x=0;x<=saisieNom.value.length;x++)
{
encours=tds[i].innerText.substring(0,x);
if (saisieNom.value === encours)
{
tr[i].style.display='';
}
else
{
tr[i].style.display='none';
}
}
}
};
//recherche sur le prenom
saisiePrenom.onkeyup = function() {
tds= document.getElementsByClassName('searchPrenom');
//Parcours de tous les tr avec class 'searchPrenom'
for (var i=0;i<tds.length;i++){
//Parcours du nom dans le TR
for (var x=0;x<=saisiePrenom.value.length;x++)
{
encours=tds[i].innerText.substring(0,x);
if (saisiePrenom.value === encours)
{
tr[i].style.display='';
}
else
{
tr[i].style.display='none';
}
}
}
};
<file_sep><?php
namespace App\DataFixtures;
use App\Entity\User;
use Doctrine\Bundle\FixturesBundle\Fixture;
use Doctrine\Common\Persistence\ObjectManager;
use Symfony\Component\Security\Core\Encoder\UserPasswordEncoderInterface;
class DataUserFixtures extends Fixture
{
private $passwordEncoder;
public function __construct(UserPasswordEncoderInterface $passwordEncoder)
{
$this->passwordEncoder = $passwordEncoder;
}
public function load(ObjectManager $manager)
{
$user = new User();
$user->setFullName('<NAME>');
$user->setUsername('jane_admin');
$user->setPassword($this->passwordEncoder->encodePassword($user, 'toto'));
$user->setEmail('<EMAIL>');
$user->setRoles(['ROLE_USER']);
$username='jane_admin';
$manager->persist($user);
//$this->addReference($username, $user);
$user2 = new User();
$user2->setFullName('<NAME>');
$user2->setUsername('clement');
$user2->setPassword($this->passwordEncoder->encodePassword($user, 'toto'));
$user2->setEmail('<EMAIL>');
$user2->setRoles(['ROLE_USER']);
//$username='jane_admin';
$manager->persist($user2);
$manager->flush();
}
private function getUserData(): array
{
return [
// $userData = [$fullname, $username, $password, $email, $roles];
['<NAME>', 'jane_admin', 'kitten', '<EMAIL>', ['ROLE_ADMIN']],
['<NAME>', 'tom_admin', 'kitten', '<EMAIL>', ['ROLE_ADMIN']],
['<NAME>', 'john_user', 'kitten', '<EMAIL>', ['ROLE_USER']],
];
}
}
<file_sep><?php
namespace App\Controller;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpKernel\Exception\NotFoundHttpException;
use App\Entity\Patient;
use App\Entity\CouponQiGong;
use App\Form\CouponQiGongType;
use App\Form\EditCouponQiGongType;
class QiGongController extends Controller{
public function menuQiGong()
{
$repository = $this->getDoctrine()->getRepository(CouponQiGong::class);
$listCouponsQiGong = $repository->findAll();
$em = $this->getDoctrine()->getManager();
//$patient = $couponQiGong->getPatient();
return $this->render('QiGong/menuQiGong.html.twig',
array('listCouponsQiGong'=>$listCouponsQiGong)
);
}
public function ajouterCouponQiGong(Request $request)
{
$couponQiGong = new CouponQiGong();
$form = $this->createForm(CouponQiGongType::class, $couponQiGong);
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid()) {
$patientQG = $couponQiGong->getPatient();
$couponQiGong->setPatient($patientQG);
$em = $this->getDoctrine()->getManager();
$em->persist($couponQiGong);
$em->flush();
$repository = $this->getDoctrine()->getRepository(CouponQiGong::class);
$listCouponsQiGong = $repository->findAll();
return $this->redirectToRoute('menu_QiGong',array(
'listCouponsQiGong'=> $listCouponsQiGong,
'patientQG'=>$patientQG));
}
return $this->render('QiGong/ajouterCouponQiGong.html.twig', array(
'form' => $form->createView(),
));
}
public function editerCouponQiGong($idCQG, Request $request){
$em = $this->getDoctrine()->getManager();
$couponQiGong = $em->getRepository(CouponQiGong::class)->find($idCQG);
$patient=$couponQiGong->getPatient();
if (null === $couponQiGong) {
throw new NotFoundHttpException("Le coupon d'id ".$idCQG." n'existe pas.");
}
$form = $this->get('form.factory')->create(EditCouponQiGongType::class, $couponQiGong);
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid())
{
$em->flush();
return $this->redirectToRoute('menu_QiGong',
array('id' => $couponQiGong->getId()));
}
return $this->render('QiGong/editerCouponQiGong.html.twig', array(
'couponQiGong' => $couponQiGong,
'patient'=>$patient,
'form' => $form->createView(),
));
}
public function supprimerCouponQiGong($idCQG, Request $request ){
$em = $this->getDoctrine()->getManager();
$couponQiGong = $em->getRepository(CouponQiGong::class)->find($idCQG);
$patientQG = $couponQiGong->getPatient();
if (null === $couponQiGong) {
throw new NotFoundHttpException("Le coupon d'id ".$idCQG." n'existe pas.");
}
// On crée un formulaire vide, qui ne contiendra que le champ CSRF
$form = $this->get('form.factory')->create();
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid()) {
$em->remove($couponQiGong);
$em->flush();
$request->getSession()->getFlashBag()->add('info', "Le coupon a bien été supprimé.");
return $this->redirectToRoute('menu_QiGong');
}
return $this->render('QiGong/supprimerCouponQiGong.html.twig', array(
'couponQiGong' => $couponQiGong,
'patient'=>$patientQG,
'form' => $form->createView(),
));
}
}
?><file_sep><?php
namespace App\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity
* @ORM\Table(name="visite")
*/
class Visite
{
/**
* @ORM\Id()
* @ORM\GeneratedValue()
* @ORM\Column(type="integer")
*/
private $id;
/**
* @ORM\Column(type="string")
*/
private $motif;
/**
* @ORM\Column(type="date",nullable=true)
*/
private $date;
/**
* @var text
*
* @ORM\Column(type="text",nullable=true)
*/
private $observations;
/**
* @ORM\ManyToOne(targetEntity="App\Entity\Patient", inversedBy="visites")
* @ORM\JoinColumn(nullable=false)
*/
private $patient;
function getId() {
return $this->id;
}
function setId($id) {
$this->id = $id;
}
function getMotif() {
return $this->motif;
}
function setMotif($motif) {
$this->motif = $motif;
}
function getDate() {
return $this->date;
}
function setDate($date) {
$this->date = $date;
}
public function getPatient(): Patient{
return $this->patient;
}
public function setPatient(Patient $patient) {
$this->patient = $patient;
}
function getObservations() {
return $this->observations;
}
function setObservations( $observations) {
$this->observations = $observations;
}
}
<file_sep><?php
namespace App\Form;
use App\Entity\Patient;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\Form\Extension\Core\Type\SubmitType;
use Symfony\Component\Form\Extension\Core\Type\IntegerType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\Form\Extension\Core\Type\TextareaType;
use Symfony\Bridge\Doctrine\Form\Type\EntityType;
use Symfony\Component\OptionsResolver\OptionsResolver;
class CouponQiGongType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder
->add('patient', EntityType::class, array(
'class' => Patient::class,
'choice_label' => 'nom',
'query_builder' => function(\App\Repository\PatientRepository $repository)
{
return $repository->getOrderQueryBuilder();
}
))
->add('nbSeanceEffectuee', IntegerType::class, array('label' => 'Nombre de séances effectuées : '))
->add('observations', TextAreaType::class, array('label' => 'Observations : ','required' => false, 'attr' => array('cols' => '40','rows' => '4')))
->add('Enregistrer', SubmitType::class);
}
public function configureOptions(OptionsResolver $resolver)
{
$resolver->setDefaults([
// uncomment if you want to bind to a class
'data_class' => \App\Entity\CouponQiGong::class,
]);
}
}
<file_sep><?php
namespace App\Entity;
use Doctrine\ORM\Mapping as ORM;
use Symfony\Component\Security\Core\User\UserInterface;
/**
* @ORM\Entity(repositoryClass="App\Repository\UserRepository")
* @ORM\Table(name="user")
*/
class User implements UserInterface, \Serializable
{
/**
* @var int
*
* @ORM\Id
* @ORM\GeneratedValue(strategy="AUTO")
* @ORM\Column(type="integer")
*/
private $id;
/**
* @var string
*
* @ORM\Column(type="string", unique=true)
*/
private $username;
/**
* @var string
*
* @ORM\Column(type="string", unique=true)
*/
private $email;
/**
* @var string
*
* @ORM\Column(type="string")
*/
private $password;
/**
* @var array
*
* @ORM\Column(type="json")
*/
private $roles = [];
public function getId(): int
{
return $this->id;
}
public function setFullName(string $fullName): void
{
$this->fullName = $fullName;
}
public function getFullName(): string
{
return $this->fullName;
}
public function getUsername(): string
{
return $this->username;
}
public function setUsername(string $username): void
{
$this->username = $username;
}
public function getEmail(): string
{
return $this->email;
}
public function setEmail(string $email): void
{
$this->email = $email;
}
public function getPassword()
{
return $this->password;
}
public function setPassword(string $password): void
{
$this->password = $password;
}
/**
* Retourne les rôles de l'user
*/
public function getRoles(): array
{
return array('ROLE_USER');
}
public function setRoles(array $roles): void
{
$this->roles = $roles;
}
/**
* Retour le salt qui a servi à coder le mot de passe
*
* {@inheritdoc}
*/
public function getSalt()
{
return null;
}
/**
* Removes sensitive data from the user.
*
* {@inheritdoc}
*/
public function eraseCredentials(): void
{
// Nous n'avons pas besoin de cette methode car nous n'utilions pas de plainPassword
// Mais elle est obligatoire car comprise dans l'interface UserInterface
// $this->plainPassword = <PASSWORD>;
}
/**
* {@inheritdoc}
*/
public function serialize() {
return serialize(array(
$this->id,
$this->username,
$this->password,
));
}
/**
* {@inheritdoc}
*/
public function unserialize($data) {
list (
$this->id,
$this->username,
$this->password,
// see section on salt below
// $this->salt
) = unserialize($data);
}
}<file_sep><?php
namespace App\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity
* @ORM\Table(name="medecin")
*/
class Medecin
{
/**
* @ORM\Id()
* @ORM\GeneratedValue()
* @ORM\Column(type="integer")
*/
private $id;
/**
* @ORM\Column(type="string")
*/
private $nom;
/**
* @ORM\Column(type="string",nullable=true)
*/
private $specialite;
/**
* @ORM\Column(type="string",nullable=true)
*/
private $mail;
/**
* @ORM\Column(type="string",nullable=true)
*/
private $hopital;
/**
* @ORM\Column(type="string",nullable=true)
*/
private $adresse;
/**
* @var text
*
* @ORM\Column(type="text",nullable=true)
*/
private $observations;
/**
* @ORM\ManyToOne(targetEntity="App\Entity\Patient", inversedBy="medecins")
* @ORM\JoinColumn(nullable=true)
*/
private $patient;
function getId() {
return $this->id;
}
function setId($id) {
$this->id = $id;
}
function getNom() {
return $this->nom;
}
function getSpecialite() {
return $this->specialite;
}
function getMail() {
return $this->mail;
}
function getHopital() {
return $this->hopital;
}
function getAdresse() {
return $this->adresse;
}
public function getPatient(): Patient{
return $this->patient;
}
public function setPatient(Patient $patient) {
$this->patient = $patient;
}
function getObservations() {
return $this->observations;
}
function setObservations( $observations) {
$this->observations = $observations;
}
function setNom($nom) {
$this->nom = $nom;
}
function setSpecialite($specialite) {
$this->specialite = $specialite;
}
function setMail($mail) {
$this->mail = $mail;
}
function setHopital($hopital) {
$this->hopital = $hopital;
}
function setAdresse($adresse) {
$this->adresse = $adresse;
}
}
<file_sep><?php
namespace App\Entity;
use Doctrine\ORM\Mapping as ORM;
use App\Entity\Patient;
/**
* @ORM\Entity
* @ORM\Table(name="couponqigong")
*/
class CouponQiGong
{
/**
* @ORM\Id()
* @ORM\GeneratedValue()
* @ORM\Column(type="integer")
*/
private $id;
/**
* @ORM\Column(type="integer")
*/
private $nbSeanceEffectuee;
/**
* @ORM\Column(type="simple_array",nullable=true)
*/
private $datesSeancesEffectuee;
/**
* @var text
*
* @ORM\Column(type="text",nullable=true)
*/
private $observations;
/**
* @ORM\ManyToOne(targetEntity="App\Entity\Patient", inversedBy="couponsQiGong")
* @ORM\JoinColumn(nullable=false)
*/
private $patient;
function _construct(){
$this->couponQiGong= new couponQiGong;
$this->patient= new Patient();
}
function getId() {
return $this->id;
}
function setId($id) {
$this->id = $id;
}
function getNbSeanceEffectuee() {
return $this->nbSeanceEffectuee;
}
function getDatesSeancesEffectuee() {
return $this->datesSeancesEffectuee;
}
function setNbSeanceEffectuee($nbSeanceEffectuee) {
$this->nbSeanceEffectuee = $nbSeanceEffectuee;
}
function setDatesSeancesEffectuee($datesSeancesEffectuee) {
$this->datesSeancesEffectuee = $datesSeancesEffectuee;
}
public function getPatient(){
return $this->patient;
}
public function setPatient(Patient $patient) {
$this->patient = $patient;
}
function getObservations() {
return $this->observations;
}
function setObservations( $observations) {
$this->observations = $observations;
}
}
<file_sep><?php
namespace App\Form;
use App\Entity\Patient;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\Form\Extension\Core\Type\DateType;
use Symfony\Component\Form\Extension\Core\Type\SubmitType;
use Symfony\Component\Form\Extension\Core\Type\CheckboxType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\Form\Extension\Core\Type\TextareaType;
use Symfony\Component\Form\Extension\Core\Type\IntegerType;
use Symfony\Component\Form\Extension\Core\Type\FileType;
use Symfony\Component\OptionsResolver\OptionsResolver;
class PatientType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder
->add('Nom', TextType::class, array('label' => 'Nom : '))
->add('Prenom', TextType::class, array('label' => 'Prénom : ','required' => false))
->add('dateNaiss', DateType::class,
array('label' => 'Date de naissance : ',
'widget' => 'single_text',
'format' => 'yyyy-MM-dd'
,'required' => false))
->add('Adresse', TextType::class, array('label' => 'Adresse : ','required' => false))
->add('CP', TextType::class, array('label' => 'Code postal : ','required' => false))
->add('Ville', TextType::class, array('label' => 'Ville : ','required' => false))
->add('Telephone', TextType::class, array('label' => 'Téléphone : ','required' => false))
->add('Mail', TextType::class, array('label' => 'Mail : ','required' => false))
->add('sitFam', TextType::class, array('label' => 'Situation familiale : ','required' => false))
->add('Profession', TextType::class, array('label' => 'Profession : ','required' => false))
->add('nbEnfant', IntegerType::class, array('label' => 'Nombre d\'enfant : ','required' => false))
->add('accouchement', TextType::class, array('label' => 'Accouchement : ','required' => false))
->add('typeHabitat', TextType::class, array('label' => 'Type d\'habitat : ','required' => false))
//A rajouter : profSanteType
->add('allergies', TextType::class, array('label' => 'Allergie(s) : ','required' => false))
->add('traitementEnCours', TextAreaType::class, array('label' => 'Traitement en cours : ','required' => false, 'attr' => array('cols' => '40','rows' => '4')))
->add('atcdChirurgical', TextType::class, array('label' => 'ATCD chirurgicaux : ','required' => false))
->add('atcdFamiliaux', TextType::class, array('label' => 'ATCD familiaux : ','required' => false))
->add('atcdMedical', TextType::class, array('label' => 'ATCD médicaux : ','required' => false))
->add('contraception', TextType::class, array('label' => 'Contraception : ','required' => false))
->add('observations', TextAreaType::class, array('label' => 'Observations : ','required' => false, 'attr' => array('cols' => '40','rows' => '4')))
->add('accepteMedNonTradi', CheckboxType::class, array('label' => 'Patient accepte la pratique d\'une médecine non traditionnelle : ', 'attr' => array('style' => 'zoom:2.5;'), 'required' => false))
->add('accepteAcup', CheckboxType::class, array('label' => 'Le patient accepte le traitement par acupuncture : ', 'attr' => array('style' => 'zoom:2.5;'),'required' => false))
->add('Enregistrer', SubmitType::class);
}
public function configureOptions(OptionsResolver $resolver)
{
$resolver->setDefaults([
// uncomment if you want to bind to a class
'data_class' => Patient::class,
]);
}
}
<file_sep><?php
namespace App\Controller;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpKernel\Exception\NotFoundHttpException;
use App\Entity\Patient;
use App\Form\PatientType;
use App\Form\EditPatientType;
use App\Entity\Visite;
use App\Form\VisiteType;
use App\Form\EditVisiteType;
use App\Entity\Document;
use App\Form\DocumentType;
use App\Entity\Fournisseur;
use App\Form\FournisseurType;
use App\Form\EditFournisseurType;
use App\Entity\Medecin;
use App\Form\MedecinType;
use App\Form\EditMedecinType;
class MTCDPController extends Controller{
public function index()
{
return $this->render('login.html.twig');
}
public function mainMenu()
{
return $this->render('mainMenu.html.twig');
}
public function menuPatients()
{
$repository = $this->getDoctrine()->getRepository(Patient::class);
$listPatients = $repository->findAll();
return $this->render('Patients/menuPatients.html.twig', array('listPatients'=>$listPatients));
}
public function ajouterPatient(Request $request)
{
$patient = new Patient();
$form = $this->createForm(PatientType::class, $patient);
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid()) {
$em = $this->getDoctrine()->getManager();
$em->persist($patient);
$em->flush();
$repository = $this->getDoctrine()->getRepository(Patient::class);
return $this->redirectToRoute('menu_patients');
}
return $this->render('Patients/ajouterPatient.html.twig', array(
'form' => $form->createView(),
));
}
public function editerPatient($id, Request $request){
$em = $this->getDoctrine()->getManager();
$patient = $em->getRepository(Patient::class)->find($id);
if (null === $patient) {
throw new NotFoundHttpException("Le patient d'id ".$id." n'existe pas.");
}
$form = $this->get('form.factory')->create(EditPatientType::class, $patient);
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid())
{
$em->flush();
return $this->redirectToRoute('fiche_patient', array('id' => $patient->getId()));
}
return $this->render('Patients/editerPatient.html.twig', array(
'patient' => $patient,
'form' => $form->createView(),
));
}
public function supprimerPatient($id, Request $request ){
$em = $this->getDoctrine()->getManager();
$patient = $em->getRepository(Patient::class)->find($id);
if (null === $patient) {
throw new NotFoundHttpException("Le patient d'id ".$id." n'existe pas.");
}
// On crée un formulaire vide, qui ne contiendra que le champ CSRF
$form = $this->get('form.factory')->create();
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid()) {
$em->remove($patient);
$em->flush();
$request->getSession()->getFlashBag()->add('info', "Le patient a bien été supprimé.");
return $this->redirectToRoute('menu_patients');
}
return $this->render('Patients/supprimerPatient.html.twig', array(
'patient' => $patient,
'form' => $form->createView(),
));
}
public function fichePatient($id)
{
$em = $this->getDoctrine()->getManager();
$patient = $em->getRepository(Patient::class)->find($id);
if (null === $patient) {
throw new NotFoundHttpException("Le patient d'id ".$id." n'existe pas.");
}
return $this->render('Patients/fichePatient.html.twig', array(
'patient' => $patient));
}
public function visitePatient($idPatient,Request $request)
{
$em = $this->getDoctrine()->getManager();
$patient = $em->getRepository(Patient::class)->find($idPatient);
if (null === $patient) {
throw new NotFoundHttpException("Le patient d'id ".$idPatient." n'existe pas.");
}
$visite = new Visite();
$form = $this->createForm(VisiteType::class, $visite);
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid()) {
$em = $this->getDoctrine()->getManager();
$patient->addVisite($visite);
$visite->setPatient($patient);
$em->persist($visite);
$em->flush();
return $this->redirectToRoute('menu_patients');
}
return $this->render('Patients/visitePatient.html.twig', array(
'form' => $form->createView(),
'patient' =>$patient,
));
}
//Affichage d'une fiche visite (historique)
public function ficheVisite($idVisite){
$em = $this->getDoctrine()->getManager();
$visite = $em->getRepository(Visite::class)->find($idVisite);
$patient=$visite->getPatient();
return $this->render('Patients/ficheVisite.html.twig', array(
'patient' => $patient,
'visite'=>$visite
));
}
public function historiqueVisites(){
$listVisites = $this->getDoctrine()
->getRepository(Visite::class)->findAll();
return $this->render('Patients/historiqueVisites.html.twig',array(
'listVisites'=>$listVisites,
));
}
public function historiqueVisitePatient($idPatient){
//Recherche du patient dont on veut afficher l'historique
$patient = $this->getDoctrine()
->getRepository(Patient::class)->find($idPatient);
//Recherche des visites du patient dont on veut afficher l'historique
$listVisites=$patient->getVisites();
return $this->render('Patients/historiqueVisitePatient.html.twig',array(
'patient'=>$patient,
'listVisites'=>$listVisites
));
}
public function editerVisite($idVisite,Request $request){
$em = $this->getDoctrine()->getManager();
$visite = $em->getRepository(Visite::class)->find($idVisite);
$patient=$visite->getPatient();
//die(var_dump($visite->getDate()));
if (null === $visite) {
throw new NotFoundHttpException("La visite d'id ".$idVisite." n'existe pas.");
}
$form = $this->get('form.factory')->create(EditVisiteType::class, $visite);
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid())
{
$em->flush();
return $this->redirectToRoute('historique_visite_patient',
array('idPatient' => $patient->getId(),
'visite'=> $visite,
));
}
return $this->render('Patients/editerVisite.html.twig', array(
'visite'=> $visite,
'patient' => $patient,
'form' => $form->createView(),
));
}
public function supprimerVisite($idVisite,Request $request){
$em = $this->getDoctrine()->getManager();
$visite = $em->getRepository(Visite::class)->find($idVisite);
$patient=$visite->getPatient();
if (null === $visite) {
throw new NotFoundHttpException("La visite d'id ".$idVisite." n'existe pas.");
}
// On crée un formulaire vide, qui ne contiendra que le champ CSRF
$form = $this->get('form.factory')->create();
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid()) {
$em->remove($visite);
$em->flush();
$request->getSession()->getFlashBag()->add('info', "La visite a bien été supprimée.");
return $this->redirectToRoute('historique_visite_patient',
array('idPatient' => $patient->getId()
));
}
return $this->render('Patients/supprimerVisite.html.twig', array(
'patient' => $patient,
'visite'=> $visite,
'form' => $form->createView(),
));
}
public function documentsPatient($idPatient){
$patient = $this->getDoctrine()
->getRepository(Patient::class)->find($idPatient);
$listDocuments= $patient->getDocuments();
//var_dump($patient->getDocuments());
//var_dump($patient->getId());
return $this->render('Patients/documentsPatient.html.twig',array(
'patient'=>$patient,
'idPatient'=>$patient->getId(),
'listDocuments'=>$listDocuments
));
}
public function ajouterDocument(Request $request, $idPatient){
$em = $this->getDoctrine()->getManager();
$patient = $em->getRepository(Patient::class)->find($idPatient);
$listDocuments= $patient->getDocuments();
if (null === $patient) {
throw new NotFoundHttpException("Le patient d'id ".$idPatient." n'existe pas.");
}
$document = new Document();
$form = $this->createForm(DocumentType::class, $document);
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid()) {
$file= $document->getFile();
if (null === $file) {
return;
}
// move takes the target directory and then the target filename to move to
$fileName = $patient->getNom().'-'.$file->getClientOriginalName();
$fileName = str_replace(' ', '-', $fileName);
$fileName = htmlentities( $fileName, ENT_NOQUOTES, 'utf-8' );
$fileName = preg_replace( '#&([A-za-z])(?:acute|cedil|caron|circ|grave|orn|ring|slash|th|tilde|uml);#', '\1', $fileName );
$fileName = preg_replace( '#&([A-za-z]{2})(?:lig);#', '\1', $fileName );
$fileName = preg_replace( '#&[^;]+;#', '', $fileName );
$absolutePath=$this->getParameter('documents_directory').'/'.$fileName;
//die(var_dump($absolutePath.'/'.$fileName));
$file->move(
$this->getParameter('documents_directory'),
$fileName
);
$em = $this->getDoctrine()->getManager();
$patient->addDocument($document);
$document->setPatient($patient);
// path est le nom du fichier car dans twig on indique le dossier uploads/documents
$document->setPath($fileName);
$document->setAbsolutePath($absolutePath);
$em->persist($document);
$em->flush();
return $this->redirectToRoute('documents_patient',array(
'idPatient'=>$idPatient,
'patient'=>$patient,
'listDocuments'=>$listDocuments
));
}
return $this->render('Patients/ajouterDocument.html.twig', array(
'form' => $form->createView(),
'patient'=>$patient,
));
}
public function supprimerDocument(Request $request, $idDocument){
$document = $this->getDoctrine()
->getRepository(Document::class)->find($idDocument);
$patient= $document->getPatient();
$listDocuments= $patient->getDocuments();
$pathFileToRemove=$document->getAbsolutePath();
if ( file_exists($pathFileToRemove)) {
if ($patient->getDocuments()->contains($document)) {
$em = $this->getDoctrine()->getManager();
$document = $em->getRepository(Document::class)->find($idDocument);
if (null === $document) {
throw new NotFoundHttpException("Le document d'id ".$idDocument." n'existe pas.");
}
// On crée un formulaire vide, qui ne contiendra que le champ CSRF
$form = $this->get('form.factory')->create();
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid()) {
$em->remove($document);
$em->flush();
unlink($pathFileToRemove);
$request->getSession()->getFlashBag()->add('info', "Le document a bien été supprimé.");
return $this->redirectToRoute('documents_patient',
array('idPatient' => $patient->getId(),
'listDocuments'=>$listDocuments
));
}
return $this->render('Patients/supprimerDocument.html.twig', array(
'patient' => $patient,
'document'=> $document,
'form' => $form->createView(),
));
}
}
return $this->redirectToRoute('documents_patient',array(
'idPatient'=>$patient->getId(),
'patient'=>$patient,
'listDocuments'=>$listDocuments
));
}
public function menuMateriel()
{
return $this->render('Materiel/menuMateriel.html.twig');
}
public function menuQiGong()
{
return $this->render('QiGong/menuQiGong.html.twig');
}
public function menuComptabilite()
{
return $this->render('Comptabilite/menuComptabilite.html.twig');
}
public function menuParametres()
{
return $this->render('Parametres/menuParametres.html.twig');
}
public function menuFournisseurs()
{
$em = $this->getDoctrine()->getManager();
$listFournisseurs = $em->getRepository(Fournisseur::class)->findAll();
return $this->render('Parametres/menuFournisseurs.html.twig'
, array('listFournisseurs'=> $listFournisseurs ));
}
public function ficheFournisseur($idFournisseur)
{
$em = $this->getDoctrine()->getManager();
$fournisseur = $em->getRepository(Fournisseur::class)->find($idFournisseur);
return $this->render('Parametres/ficheFournisseur.html.twig'
, array('fournisseur'=> $fournisseur ));
}
public function ajouterFournisseur(Request $request){
$fournisseur = new Fournisseur();
$form = $this->createForm(FournisseurType::class, $fournisseur);
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid()) {
$em = $this->getDoctrine()->getManager();
$em->persist($fournisseur);
$em->flush();
$repository = $this->getDoctrine()->getRepository(Fournisseur::class);
return $this->redirectToRoute('menu_fournisseurs');
}
return $this->render('Parametres/ajouterFournisseur.html.twig', array(
'form' => $form->createView(),
));
}
public function editerFournisseur($idFournisseur, Request $request)
{
$em = $this->getDoctrine()->getManager();
$fournisseur = $em->getRepository(Fournisseur::class)->find($idFournisseur);
if (null === $fournisseur) {
throw new NotFoundHttpException("Le fournisseur d'id ".$idFournisseur." n'existe pas.");
}
$form = $this->get('form.factory')->create(EditFournisseurType::class, $fournisseur);
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid())
{
$em->flush();
return $this->redirectToRoute('menu_fournisseurs');
}
return $this->render('Parametres/editerFournisseur.html.twig', array(
'fournisseur' => $fournisseur,
'form' => $form->createView(),
));
}
public function supprimerFournisseur($idFournisseur, Request $request){
$em = $this->getDoctrine()->getManager();
$fournisseur = $em->getRepository(Fournisseur::class)->find($idFournisseur);
if (null === $fournisseur) {
throw new NotFoundHttpException("Le médecin d'id ".$idFournisseur." n'existe pas.");
}
// On crée un formulaire vide, qui ne contiendra que le champ CSRF
$form = $this->get('form.factory')->create();
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid()) {
$em->remove($fournisseur);
$em->flush();
$request->getSession()->getFlashBag()->add('info', "Le fournisseur a bien été supprimé.");
return $this->redirectToRoute('menu_fournisseurs');
}
return $this->render('Parametres/supprimerFournisseur.html.twig', array(
'fournisseur'=> $fournisseur,
'form' => $form->createView(),
));
}
public function menuMedecins()
{
$em = $this->getDoctrine()->getManager();
$listMedecins = $em->getRepository(Medecin::class)->findAll();
return $this->render('Parametres/menuMedecins.html.twig'
, array('listMedecins'=> $listMedecins ));
}
public function ficheMedecin($idMedecin)
{
$em = $this->getDoctrine()->getManager();
$medecin = $em->getRepository(Medecin::class)->find($idMedecin);
return $this->render('Parametres/ficheMedecin.html.twig'
, array('medecin'=> $medecin ));
}
public function ajouterMedecin(Request $request){
$medecin = new Medecin();
$form = $this->createForm(MedecinType::class, $medecin);
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid()) {
$em = $this->getDoctrine()->getManager();
$em->persist($medecin);
$em->flush();
$repository = $this->getDoctrine()->getRepository(Medecin::class);
return $this->redirectToRoute('menu_medecins');
}
return $this->render('Parametres/ajouterMedecin.html.twig', array(
'form' => $form->createView(),
));
}
public function editerMedecin($idMedecin, Request $request)
{
$em = $this->getDoctrine()->getManager();
$medecin = $em->getRepository(Medecin::class)->find($idMedecin);
if (null === $medecin) {
throw new NotFoundHttpException("Le médecin d'id ".$idMedecin." n'existe pas.");
}
$form = $this->get('form.factory')->create(EditMedecinType::class, $medecin);
if ($request->isMethod('POST') && $form->handleRequest($request)->isValid())
{
$em->flush();
return $this->redirectToRoute('menu_medecins'
);
}
return $this->render('Parametres/editerMedecin.html.twig', array(
'medecin' => $medecin,
'form' => $form->createView(),
));
}
public function supprimerMedecin($idMedecin, Request $request)
{
}
}
<file_sep><?php
// src/OC/PlatformBundle/Form/ImageType.php
namespace App\Form;
use App\Entity\Document;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\Extension\Core\Type\FileType;
use Symfony\Component\Form\Extension\Core\Type\DateType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\Form\Extension\Core\Type\TextareaType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\OptionsResolver\OptionsResolver;
use Symfony\Component\Form\Extension\Core\Type\SubmitType;
class DocumentType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder
->add('intitule', TextType::class, array('label' => 'Intitulé : '))
->add('date', DateType::class,array('label' => 'Date : ',
'widget' => 'single_text',
'format' => 'yyyy-MM-dd'
,'required' => false))
->add('observations', TextAreaType::class, array('label' => 'Observations : ','required' => false))
->add('file', FileType::class, array('label' => 'Document : '))
->add('Enregistrer', SubmitType::class);
}
public function configureOptions(OptionsResolver $resolver)
{
$resolver->setDefaults(array(
'data_class' => Document::class,
));
}
} | 01353e7f62c8086e3a748ea98401faa2b1ff0f97 | [
"JavaScript",
"PHP"
] | 17 | PHP | ClementThuet/MCTDP | 561c0b441b0a0b8d6eabfca8bdbfe1487463c88c | 68f5a3a23e04bc9540432445d6674ad1e1cc570b |
refs/heads/master | <repo_name>cochranjd/charty<file_sep>/app/routes/index.js
import Ember from 'ember';
export default Ember.Route.extend({
model: function() {
var prepareData = function(item) {
item.date = new Date(item.date);
item.volume = item.volume/1000000;
return item;
};
return [
Ember.Object.create({
symbol: 'IBM',
color: '#8888FF',
data: IBM.map(prepareData)
}),
Ember.Object.create({
symbol: 'GM',
color: '#FF8888',
data: GM.map(prepareData)
}),
Ember.Object.create({
symbol: 'APPL',
color: '#888888',
data: APPL.map(prepareData)
})
];
}
});
var IBM = [
{date: 'February 2, 2015', volume: 4427700, close: 160.4},
{date: 'January 2, 2015', volume: 6198300, close: 152.24},
{date: 'December 1, 2014', volume: 4799700, close: 159.32},
{date: 'November 3, 2014', volume: 4337700, close: 161.04},
{date: 'October 1, 2014', volume: 6611800, close: 162.15},
{date: 'September 2, 2014', volume: 3195800, close: 187.23},
{date: 'August 1, 2014', volume: 2684000, close: 189.66},
{date: 'July 1, 2014', volume: 4627500, close: 187.93},
{date: 'June 2, 2014', volume: 3939300, close: 177.73},
{date: 'May 1, 2014', volume: 3554200, close: 180.76},
{date: 'April 1, 2014', volume: 5840600, close: 191.52},
{date: 'March 3, 2014', volume: 6072400, close: 187.64},
{date: 'February 3, 2014', volume: 4827500, close: 180.51},
{date: 'January 2, 2014', volume: 6127700, close: 171.29},
{date: 'December 2, 2013', volume: 4838700, close: 181.85},
{date: 'November 1, 2013', volume: 5192700, close: 174.2},
{date: 'October 1, 2013', volume: 5881900, close: 172.82},
{date: 'September 3, 2013', volume: 3773300, close: 178.57},
{date: 'August 1, 2013', volume: 3445800, close: 175.77},
{date: 'July 1, 2013', volume: 4176700, close: 187.14},
{date: 'June 3, 2013', volume: 4513500, close: 183.37},
{date: 'May 1, 2013', volume: 4346900, close: 199.6},
{date: 'April 1, 2013', volume: 5282100, close: 193.43},
{date: 'March 1, 2013', volume: 3988600, close: 203.71},
{date: 'February 1, 2013', volume: 3622800, close: 191.8},
{date: 'January 2, 2013', volume: 4320300, close: 193.13}];
var GM = [
{date: 'February 2, 2015', volume: 24571800, close: 37.62},
{date: 'January 2, 2015', volume: 17098600, close: 32.62},
{date: 'December 1, 2014', volume: 14062100, close: 34.91},
{date: 'November 3, 2014', volume: 10667700, close: 33.13},
{date: 'October 1, 2014', volume: 20150300, close: 31.12},
{date: 'September 2, 2014', volume: 13680000, close: 31.66},
{date: 'August 1, 2014', volume: 9945000, close: 34.19},
{date: 'July 1, 2014', volume: 14495800, close: 33.23},
{date: 'June 2, 2014', volume: 16510900, close: 35.67},
{date: 'May 1, 2014', volume: 13250300, close: 33.7},
{date: 'April 1, 2014', volume: 24202900, close: 33.6},
{date: 'March 3, 2014', volume: 25479300, close: 33.54},
{date: 'February 3, 2014', volume: 32867900, close: 34.96},
{date: 'January 2, 2014', volume: 27042000, close: 34.85},
{date: 'December 2, 2013', volume: 26619300, close: 39.47},
{date: 'November 1, 2013', volume: 22058900, close: 37.41},
{date: 'October 1, 2013', volume: 19282900, close: 35.69},
{date: 'September 3, 2013', volume: 18450200, close: 34.74},
{date: 'August 1, 2013', volume: 11750000, close: 32.92},
{date: 'July 1, 2013', volume: 12857200, close: 34.65},
{date: 'June 3, 2013', volume: 20533200, close: 32.17},
{date: 'May 1, 2013', volume: 12847500, close: 32.73},
{date: 'April 1, 2013', volume: 12143500, close: 29.79},
{date: 'March 1, 2013', volume: 11177300, close: 26.87},
{date: 'February 1, 2013', volume: 10630700, close: 26.22},
{date: 'January 2, 2013', volume: 12355000, close: 27.13}];
var APPL = [
{date: 'February 2, 2015', volume: 62677200, close: 127.08},
{date: 'January 2, 2015', volume: 69450400, close: 116.7},
{date: 'December 1, 2014', volume: 50669200, close: 109.95},
{date: 'November 3, 2014', volume: 44485400, close: 118.46},
{date: 'October 1, 2014', volume: 61078100, close: 107.11},
{date: 'September 2, 2014', volume: 75388300, close: 99.92},
{date: 'August 1, 2014', volume: 46746200, close: 101.66},
{date: 'July 1, 2014', volume: 49637900, close: 94.34},
{date: 'June 2, 2014', volume: 59839500, close: 91.71},
{date: 'May 1, 2014', volume: 74996300, close: 89.24},
{date: 'April 1, 2014', volume: 82044000, close: 82.73},
{date: 'March 3, 2014', volume: 61552000, close: 75.25},
{date: 'February 3, 2014', volume: 82267500, close: 73.78},
{date: 'January 2, 2014', volume: 109889900, close: 69.76},
{date: 'December 2, 2013', volume: 86672400, close: 78.19},
{date: 'November 1, 2013', volume: 69291000, close: 77.5},
{date: 'October 1, 2013', volume: 88189400, close: 72.42},
{date: 'September 3, 2013', volume: 111138700, close: 66.06},
{date: 'August 1, 2013', volume: 94666300, close: 67.51},
{date: 'July 1, 2013', volume: 77966700, close: 62.29},
{date: 'June 3, 2013', volume: 94963100, close: 54.58},
{date: 'May 1, 2013', volume: 111725300, close: 61.9},
{date: 'April 1, 2013', volume: 132443500, close: 60.55},
{date: 'March 1, 2013', volume: 120246400, close: 60.53},
{date: 'February 1, 2013', volume: 127191400, close: 60.36},
{date: 'January 2, 2013', volume: 160032800, close: 61.92}];<file_sep>/app/views/index.js
import Ember from 'ember';
export default Ember.View.extend({
chartWidth: function() {
return this.get('svgWidth') - this.get('leftMargin') - this.get('rightMargin');
}.property('leftMargin', 'rightMargin'),
chartHeight: function() {
return this.get('svgHeight') - this.get('topMargin') - this.get('bottomMargin');
}.property('topMargin', 'bottomMargin'),
svgWidth: 960,
svgHeight: 500,
leftMargin: 50,
rightMargin: 20,
topMargin: 20,
bottomMargin: 30,
domainFunc: null,
valueFunc: null,
viewBox: function() {
return '0 0 ' + this.get('svgWidth') + ' ' + this.get('svgHeight');
}.property('svgWidth', 'svgHeight'),
prepareChart: function() {
var x = d3.time.scale()
.range([0, this.get('chartWidth')]);
var y = d3.scale.linear()
.range([this.get('chartHeight'), 0]);
console.log('Domain Min: ' + this.get('controller.domainMin'));
console.log('Domain Max: ' + this.get('controller.domainMax'));
console.log('Data Max: ' + this.get('controller.dataMax'));
x.domain([this.get('controller.domainMin'), this.get('controller.domainMax')]);
y.domain([0, this.get('controller.dataMax')]);
this.setProperties({
domainFunc: x,
valueFunc: y
});
var xAxis = d3.svg.axis()
.scale(x)
.orient("bottom");
var yAxis = d3.svg.axis()
.scale(y)
.orient("left");
var chart = d3.select("svg g.my-chart");
chart.select('.x.axis').call(xAxis).attr('transform', this.get('xTranslate'));
var y_axis = chart.select('.y.axis').call(yAxis);
y_axis.select('.y-axis-label').remove();
y_axis.append("text")
.attr("class", "y-axis-label")
.attr("transform", "rotate(-90)")
.attr("y", 6)
.attr("dy", ".71em")
.style("text-anchor", "end")
.text(this.get('controller.selectedType') === 'Close' ? 'Price ($)':'Volume (In Millions)');
}.on('didInsertElement').observes('controller.{dataMax,domainMax,domainMin}'),
initFoundation: function() {
this.$().foundation();
}.on('didInsertElement'),
chartTranslate: function() {
return 'translate(' + this.get('leftMargin') + ',' + this.get('topMargin') + ')';
}.property('leftMargin', 'topMargin'),
xTranslate: function() {
return 'translate(0,' + this.get('chartHeight') + ')';
}.property('chartHeight')
});
<file_sep>/app/controllers/index.js
import Ember from 'ember';
export default Ember.ArrayController.extend({
selectedType: 'Volume',
selectedTypes: ['Close', 'Volume'],
typeChange: function() {
console.log('New Type: ' + this.get('selectedType'));
}.observes('selectedType'),
selectableData: function() {
return this.get('model').map(function(item) {
return item.set('selected', true);
});
}.property('model'),
selectedData: function() {
return this.get('selectableData').filterBy('selected', true);
}.property('selectableData', 'selectableData.@each.selected'),
dataMax: function() {
var _type = this.get('selectedType').toLowerCase();
return this.get('selectedData').map(function(item) {
return item.get('data').map(function(dataItem) {
return dataItem[_type];
}).reduce(function(previousMax, nextValue) {
if (item.symbol === 'IBM') {
console.log('Value: ' + nextValue);
}
return Math.max(previousMax, nextValue);
}, 0);
}).reduce(function(previousMax, nextValue) {
return Math.max(previousMax, nextValue);
}, 0);
}.property('selectedData', 'selectedData.@each', 'selectedType'),
domainMin: function() {
var reduceFunction = function(previousMin, nextValue ) {
if (previousMin < nextValue) {
return previousMin;
}
return nextValue;
};
return this.reduceDomain(reduceFunction);
}.property('selectedData', 'selectedData.@each'),
domainMax: function() {
var reduceFunction = function(previousMax, nextValue) {
if (previousMax > nextValue) {
return previousMax;
}
return nextValue;
};
return this.reduceDomain(reduceFunction);
}.property('selectedData', 'selectedData.@each'),
reduceDomain: function(func) {
var _type = this.get('selectedType').toLowerCase();
var data = this.get('selectedData').map(function(item) {
var mappedData = item.get('data').map(function(dataItem) {
return dataItem.date;
});
return mappedData.reduce(func, new Date());
});
return data.reduce(func, new Date());
}
});
<file_sep>/app/components/line-plot.js
import Ember from 'ember';
export default Ember.Component.extend({
tagName: 'path',
classNames: ['data-plot'],
attributeBindings: ['d', 'color:stroke'],
selectedType: null,
color: null,
d: function() {
if (Ember.isBlank(this.get('domainFunc')) || Ember.isBlank(this.get('valueFunc'))) {
return '';
}
return this.get('dataFunc')(this.get('data'));
}.property('data', 'data.@each', 'dataFunc', 'domainFunc', 'valueFunc'),
dataFunc: function() {
var _selectedType = this.get('selectedType').toLowerCase();
return d3.svg.line().x(function(d) {
return this.get('domainFunc')(d.date);
}.bind(this)).y(function(d) {
return this.get('valueFunc')(d[_selectedType]);
}.bind(this));
}.property('selectedType')
});
| d13762607881cf88639c32314022b5b5ddb69a36 | [
"JavaScript"
] | 4 | JavaScript | cochranjd/charty | d1401d75b9a9bd60d0a37e536efb323f4a3e0985 | 9dfce362a72eef28084431280bc241af9026267c |
refs/heads/main | <repo_name>Jara-nicolas05/my-new-repositorio<file_sep>/README.md
# my-new-repositorio
este repositorio corresponde a los ejemplos de clases Big Data
<file_sep>/condicionales.R
#edad de los millenians
edadinicial <- 25
edadfinal <- 35
#edad de la persona
edadpersona <- 15
#haciendo la pregunta de manera logica
( edadinicial <= edadpersona ) && ( edadfinal >= edadpersona )
####Si la reespuesta anterior es FALSE, la persona no es millenians
## en caso contrario la persona es millenias
######################################################################
#############################CONDICIONAL##############################
######################################################################
#1er condicional IF##solo se acciona cuando la pregunta o condicion es verdadera
#####
##ejemplo 1##
edadpersona <- 15
if( ( edadinicial <= edadpersona ) && ( edadfinal <= edadpersona ) )(
print("la persona es millenias")
)
###ejemplo 2
edadpersona <- 30
if( ( edadinicial <= edadpersona ) && ( edadfinal >= edadpersona ) )(
print("la persona es millenias")
)
#ejemplo 3
edadpersona <- 40
if(( edadinicial <= edadpersona ) && ( edadfinal >= edadpersona ))(
print("la persona es millenias")
)
#######el print es una funcion que muestra un mensaje en la consola | 613da8818655a3442698d1f9081bbb6e27bc330e | [
"Markdown",
"R"
] | 2 | Markdown | Jara-nicolas05/my-new-repositorio | 492ed7547a66c6f809e91fe7a58859cea10e0166 | e0d906cf80097aede932956f2f4392ff972c1ac7 |
refs/heads/master | <repo_name>E-David/ScheduleSimply<file_sep>/src/scripts/utils.js
import STORE from "./store"
const UTILS = {
DAYS: ["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],
addMinutes: function(date, minutes) {
return new Date(date.getTime() + minutes * 60000)
},
arraysEqual: function(arr1, arr2) {
var biggerArray, smallerArray
[biggerArray,smallerArray] = (arr1.length > arr2.length) ? [arr1,arr2] : [arr2,arr1]
for (var i = 0; i < biggerArray.length; i ++) {
if (biggerArray[i] !== smallerArray[i]) return false
}
return true
},
// change DateTime object to DayOfWeek: Month/Day
formatDate: function(date) {
return `${this.DAYS[date.getDay()]}: ${date.getMonth() % 12 + 1}/${date.getDate()}`
},
// change DateTime object to Hour:Minutes
formatTime: function(date) {
var rawHours = new Date(date).getHours()
var minutes = new Date(date).getMinutes()
var amOrPm = rawHours < 12 ? "AM" : "PM"
if(minutes === 0) minutes = minutes + "0"
var hours = rawHours % 12 === 0 ? 12 : rawHours % 12
return `${hours}:${minutes} ${amOrPm}`
},
getCurrentUser: function() {
return localStorage.getItem('userName')
},
getNextWeek: function() {
var weekArr = []
for(var i = 0; i < 8; i ++){
//pushes copy of date Object, since the copy is not changed when setDate is used
var date = new Date()
date.setDate(date.getDate() + i)
weekArr.push(date)
}
return weekArr
},
getElNextWeek: function() {
var weekArr = []
for(var i = 8; i < 16; i ++){
//pushes copy of date Object, since the copy is not changed when setDate is used
var date = new Date()
date.setDate(date.getDate() + i)
weekArr.push(date)
}
return weekArr
},
getThirtyMinIncrements: function(start,end) {
var timeBlocksArr = []
while(start.getHours() <= new Date(end).getHours()) {
timeBlocksArr.push(start)
start = this.addMinutes(start,30)
}
return timeBlocksArr
}
}
export default UTILS
<file_sep>/src/scripts/app.js
import React from 'react'
import ReactDOM from 'react-dom'
import Backbone from 'backbone'
import init from './init'
import ScheduleApp from "./views/scheduleApp"
import LoginView from "./views/loginView"
import UTILS from "./utils"
import $ from "jquery"
const app = function() {
const Router = Backbone.Router.extend({
routes: {
"home": "handleHome",
'googleAccess/:token': 'setToken',
'login': 'handleLogin',
"*default": "redirect"
},
handleHome: function() {
ReactDOM.render(<ScheduleApp />, document.querySelector('.container'))
},
handleLogin: function() {
ReactDOM.render(<LoginView />, document.querySelector('.container'))
},
setToken: function(token) {
localStorage.setItem('calendar_token',token)
$.getJSON(`/google/calendar/events?token=${token}`)
.then((resp)=>{
var userName = resp.summary
localStorage.setItem('userName',userName)
location.hash = "home"
}
)
},
redirect: function(){
location.hash = "home"
},
initialize: function(){
Backbone.history.start()
if(!UTILS.getCurrentUser()){
location.hash = "login"
}
}
})
new Router()
}
// x..x..x..x..x..x..x..x..x..x..x..x..x..x..x..x..
// NECESSARY FOR USER FUNCTIONALITY. DO NOT CHANGE.
export const app_name = init()
app()
// x..x..x..x..x..x..x..x..x..x..x..x..x..x..x..x..<file_sep>/src/scripts/views/materialSelect.js
import React from 'react'
import ACTIONS from '../actions'
import UTILS from "../utils"
import STORE from "../store"
const MaterialSelect = React.createClass({
componentDidMount: function() {
this._$select()
},
componentDidUpdate: function(prevProps) {
if (!UTILS.arraysEqual(this.props.optionValues,prevProps.optionValues)) {
this._$select()
}
},
_$select: function() {
$('select').material_select()
$('select').change((e) => {
if(this.props.detailProp === "day"){
ACTIONS.setDetail(this.props.detailProp,e.target.value)
ACTIONS.fetchAvailability(e.target.value)
} else if (this.props.detailProp === "time"){
ACTIONS.setDetail(this.props.detailProp,e.target.value)
ACTIONS.showDetails(e.target.value)
}
})
},
render: function(){
return (
<div style={{display: this.props.showing ? "block" : "none"}}>
<select>
<option defaultValue="">No {this.props.detailProp} selected</option>
{this.props.displayValues.map((rawVal,i) => <option
value={this.props.optionValues[i]}
key={i}
>{rawVal}</option>
)}
</select>
</div>
)
}
})
export default MaterialSelect<file_sep>/src/scripts/actions.js
import STORE from "./store"
import {TaskCollection, TaskModel, ScheduledEventsCollection} from "./models/dataModels"
import User from "./models/userModel"
import $ from 'jquery'
import UTILS from "./utils"
import toastr from "toastr"
const ACTIONS = {
//add task to STORE and save to database. Added model triggers "update" for STORE re-render
addTask: function(userInputObj) {
//adds unique user id to task in order to show only user specific tasks
userInputObj["userName"] = UTILS.getCurrentUser()
var taskModel = new TaskModel(userInputObj)
taskModel.save()
.done((resp) => {
STORE._get("taskCollection").add(taskModel)
ACTIONS.countTasksToBeScheduled()
})
.fail((err) => {
toastr.error("Error when adding task")
})
},
//Once event is scheduled, save task status and when it was scheduled, then reset app:
// 1. set pop up state to false 2. reset tasksToBeScheduled 3. remove collection from tasks
// scheduled
changeTasksToScheduled: function(date) {
var toBeScheduledArr = STORE._get("tasksToBeScheduled")
for(var i = 0; i < toBeScheduledArr.length; i++){
var taskModel = toBeScheduledArr[i]
taskModel.set({
taskStatus: "scheduled",
scheduledDate: date
})
taskModel.save()
.fail((err)=>{
toastr.error("Error when updating task")
})
}
STORE._set({
tasksToBeScheduled: [],
showConfirm: false,
showTime: false,
showPopUp: false,
schedulingDetails: {}
})
ACTIONS.fetchTasks()
},
countTasksLength: function(){
var coll = STORE._get("taskCollection")
var tasksLength = coll.reduce((accumulator,taskModel) => {
return accumulator + taskModel.get("taskLength")
},0)
return tasksLength
},
// with limiter and task Collection, check if the newest task will exceed limiter
// If not, add task to be scheduled. If not but equals limiter, go ahead and have user
// schedule the task (show pop up). If yes, don't add newest task, have pop up show up
countTasksToBeScheduled: function() {
var coll = STORE._get("taskCollection"),
limiter = STORE._get("scheduleLimiter"),
lengthIfTaskAdded = 0,
toBeScheduledArr = [],
propsToUpdate = {}
for(var i = 0; i < coll.models.length; i++){
lengthIfTaskAdded += coll.models[i].get("taskLength")
if(lengthIfTaskAdded > limiter) {
propsToUpdate["showPopUp"] = true
} else if (lengthIfTaskAdded === limiter) {
toBeScheduledArr.push(coll.models[i])
propsToUpdate["showPopUp"] = true
} else {
toBeScheduledArr.push(coll.models[i])
}
}
propsToUpdate["tasksToBeScheduled"] = toBeScheduledArr
STORE._set(propsToUpdate)
},
createEvent: function() {
var whatEvent = this.getTasksToBeScheduledString(),
whenEvent = STORE._get("schedulingDetails"),
startTime = new Date(whenEvent["time"]),
endTime = UTILS.addMinutes(startTime,STORE._get("scheduleLimiter"))
$.getJSON(`/google/calendar/create?what=${whatEvent}&start=${startTime.toISOString()}&end=${endTime.toISOString()}&token=${localStorage.getItem('calendar_token')}`)
.then(
function() {
toastr.success(`Tasks scheduled ${startTime.getMonth() % 12 + 1}/${startTime.getDate()} at ${UTILS.formatTime(startTime)}`)
ACTIONS.changeTasksToScheduled()
},
function(err) {
toastr.error("Error scheduling event")
}
)
},
fetchAvailability: function(date) {
var startOfDayRaw = new Date(date).setHours(0,0,0,0),
startOfDay = new Date(startOfDayRaw).toISOString(),
endOfDayRaw = new Date(date).setHours(23,59,59,999),
endOfDay = new Date(endOfDayRaw).toISOString()
var schEvColl = new ScheduledEventsCollection()
schEvColl.fetch({
data: {
start: startOfDay,
end: endOfDay,
token: localStorage.getItem('calendar_token')
}
}).done((resp)=> {
// pass date and response of occupied times and filter for available time blocks
var openTimes = ACTIONS.getOpenTimeBlocks(date,resp)
STORE._set({
showTime: true,
availableTimes: openTimes
})
})
.fail((err)=> {
toastr.error("Error retrieving tasks")
})
},
fetchTasks: function() {
STORE._get('taskCollection').fetch({
data: {
userName: UTILS.getCurrentUser(),
taskStatus: "unscheduled"
}
}) .done(()=> ACTIONS.countTasksToBeScheduled())
.fail(()=> toastr.error("Error fetching tasks"))
},
//TODO: change this so you have the option to schedule up to start/end time.
//Ex: you have something at 7. You should be able to schedule something from 6:30-7
filterAvailableBlocks: function(dateToSchedule,scheduledTimes) {
var potentialTimes = ACTIONS.getPotentialTimes(dateToSchedule)
var filteredTimes = potentialTimes.filter((time)=> {
if (time < new Date()){
return false
} else {
for(var i = 0; i < scheduledTimes.length; i++){
if(time >= scheduledTimes[i].start && time <= scheduledTimes[i].end){
return false
}
}
}
return true
})
return filteredTimes
},
getOpenTimeBlocks: function(dateToSchedule,occupiedTimes) {
var scheduledTimes = [],
occupiedTimesArr = occupiedTimes.items
for(var i = 0; i < occupiedTimesArr.length; i++) {
scheduledTimes.push({
start: new Date(occupiedTimesArr[i].start.dateTime),
end: new Date(occupiedTimesArr[i].end.dateTime)
})
}
return this.filterAvailableBlocks(dateToSchedule,scheduledTimes)
},
//Get thirty min increments from when you set startDate to when you set endDate
getPotentialTimes: function(date) {
//Need to change this based on user preference
var startDate = new Date(new Date(date).setHours(10,0,0,0))
var endDate = new Date(date).setHours(20,0,0,0)
return UTILS.getThirtyMinIncrements(startDate,endDate)
},
// string used to add to calendar
getTasksToBeScheduledString: function() {
var toBeScheduledArr = STORE._get("tasksToBeScheduled"),
taskArray = []
for(var i = 0; i < toBeScheduledArr.length; i++) {
var task = toBeScheduledArr[i].get("taskName")
taskArray.push(task)
}
return taskArray.join(", ")
},
logoutUser: function() {
localStorage.clear()
location.hash = "login"
},
removeTask: function(taskModel) {
taskModel.destroy()
.fail((err) => {
toastr.error("Error when removing task")
})
},
setDetail: function(prop,val) {
var schedulingDetails = STORE._get('schedulingDetails')
if(!schedulingDetails.hasOwnProperty(prop) && val !== schedulingDetails["day"])
schedulingDetails[prop] = val
},
showDetails: function(val){
if(val !== STORE._get("schedulingDetails")["day"]) {
STORE._set("showConfirm",true)
}
}
}
export default ACTIONS<file_sep>/README.md
#ScheduleSimply
##Getting started
- Schedule tasks using your Google Calendar. First sign in and give the app permissions to view your calendar. This allows the app to 1. filter out times you are busy and 2. schedule events directly onto your calendar.
- Make your to-do list of tasks. A name and length must be provided to successfully add a task.
- To prevent overscheduling or feeling overwhelmed from a huge task list, the app automatically detects how long your bundle of tasks will take. It will ask to schedule the tasks once this limit has been reached.
- Alternatively, you may press the schedule button at the top at any time to schedule any length of tasks immediately.
- Once scheduled, your tasks will be listed as the calendar event name on your Google calendar. Now, any Google notifications you have will apply, reminding you when you tasked yourself to complete these tasks. Good luck!
- Full-stack app allows users to access their tasks accross sessions and platforms. Tasks added will persist if user logs in on different devices
<file_sep>/src/scripts/views/loginView.js
import React from "react"
import ACTIONS from "../actions"
const LoginView = React.createClass({
authUrl: "https://accounts.google.com/o/oauth2/auth?scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcalendar&access_type=offline&response_type=code&client_id=587179870005-4t54t2sn7peb3nf6rcpa6q92ottds8kq.apps.googleusercontent.com&redirect_uri=https%3A%2F%2Fschedulesimply.herokuapp.com%2Fgoogle%2Fcalendar%2Fcode",
_googleAuth: function(event) {
event.preventDefault()
window.location.replace(this.authUrl)
},
render: function() {
return (
<div className="login-view valign-wrapper">
<div className="login-wrapper">
<h1 className="valign">ScheduleSimply</h1>
<h3 className="valign">Plan. Schedule. Do.</h3>
<button className="btn waves-effect waves-light valign" onClick={this._googleAuth}>Google Login</button>
</div>
</div>
)
}
})
export default LoginView<file_sep>/routes/googleRouter.js
let Router = require('express').Router;
const SECRETS = require('../client_secret_2.js')
var google = require('googleapis');
var calendar = google.calendar('v3')
const googleRouter = Router()
var OAuth2 = google.auth.OAuth2
var oauth2Client = new OAuth2(
SECRETS.client_id,
SECRETS.client_secret,
SECRETS.redirect_uris[1]
)
googleRouter
.get('/calendar/code', function(req,res) {
var code = req.query.code
console.log("CODE",code)
oauth2Client.getToken(code, function (err, tokens) {
// Now tokens contains an access_token and an optional refresh_token. Save them.
console.log("TOKENS",tokens)
if (!err) {
oauth2Client.setCredentials(tokens);
}
res.redirect(`/#googleAccess/${oauth2Client.credentials.access_token}`)
})
})
.get('/calendar/events', function(req,res) {
var url = req.query.url,
token = req.query.token,
start = req.query.start,
end = req.query.end
console.log("TOKEN",token)
oauth2Client.setCredentials({
access_token: token
})
calendar.events.list({
auth: oauth2Client,
calendarId: 'primary',
maxResults: 10,
//today's date supplied if none provided. Otherwise you get a bad request.
timeMin: start ? start : new Date().toISOString(),
timeMax: end ? end : new Date().toISOString(),
singleEvents: true,
orderBy: 'startTime'
}, function(err, response) {
if (err) {
console.log('The API returned an error: ' + err);
res.status(400).json(err);
}
else {
res.json(response)
}
})
})
.get('/calendar/create', function(req,res) {
oauth2Client.setCredentials({
access_token: req.query.token
})
console.log(req.query)
var event = {
'summary': req.query.what,
'start': {
'dateTime': req.query.start,
timeZone: 'America/Chicago'
},
end: {
dateTime: req.query.end,
timeZone: 'America/Chicago'
}
}
calendar.events.insert({
auth: oauth2Client,
calendarId: 'primary',
resource: event,
}, function(err, event) {
if (err) {
console.log('There was an error contacting the Calendar service: ' + err);
res.status(400).json(err);
}
else res.json(event)
});
})
module.exports = googleRouter<file_sep>/client_secret.js
var secrets = {
"installed":{
"client_id": "587179870005-4t54t2sn7peb3nf6rcpa6q92ottds8kq.apps.googleusercontent.com",
"project_id": "inspired-access-149420",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://accounts.google.com/o/oauth2/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_secret": "<KEY>",
"redirect_uris": ["http://localhost:3000/google/calendar/code"],
"javascript_origins": ["http://localhost:3000"]
}
}
module.exports = secrets.installed<file_sep>/src/scripts/models/dataModels.js
import Backbone from "backbone"
export const TaskModel = Backbone.Model.extend({
urlRoot: "/api/tasks",
idAttribute: "_id",
defaults: {
taskStatus: "unscheduled"
}
})
export const TaskCollection = Backbone.Collection.extend({
url: "/api/tasks",
model: TaskModel
})
export const ScheduledEventsCollection = Backbone.Collection.extend({
url: "/google/calendar/events"
})<file_sep>/getGoogleUrl.js
const SECRETS = require('./client_secret_2.js')
var google = require('googleapis');
var OAuth2 = google.auth.OAuth2;
var oauth2Client = new OAuth2(
SECRETS.client_id,
SECRETS.client_secret,
SECRETS.redirect_uris[1]
);
// generate a url that asks permissions for Google Calendar scopes
var url = oauth2Client.generateAuthUrl({
scope: 'https://www.googleapis.com/auth/calendar',
access_type: 'offline'
})
console.log(url)
| aa8c2060884f71f4fa942949cb6fa3a6cb6dabba | [
"JavaScript",
"Markdown"
] | 10 | JavaScript | E-David/ScheduleSimply | 0774e4f3f79dc80e783fb8a5833720b88ede4be7 | c59f25e6d1121388a70e8b97327637a0157b63d9 |
refs/heads/master | <file_sep>/*
* Progarm Name: main.c
* Created Time: 2016-12-15 20:57:24
* Last modified: 2016-12-15 21:08:21
* @author: minphone.linails <EMAIL>
*/
#include <stdio.h>
int main(int argc, char **argv)
{
int ret = 0;
printf(" --- object-oriented-C ---\n");
return ret;
}
<file_sep>#! /bin/sh
# Created Time: 2016-04-23 14:26:54
#
cscope -Rbkq
<file_sep>#! /bin/sh
# Created Time: 2016-04-23 14:34:33
#
cd ../build
echo "current exe path:"
pwd
bin/ooc
<file_sep>#! /bin/sh
# Created Time: 2016-06-27 13:25:31
#
cd ../build/
make
<file_sep># readme.txt
# Created Time: 2016-04-23 14:20:56
# Last modified: 2016-12-15 21:07:52
#
cmake_minimum_required(VERSION 2.8)
include_directories(
${PROJECT_SOURCE_DIR}/src
${PROJECT_SOURCE_DIR}/src/libs
${PROJECT_SOURCE_DIR}/src/srcs
)
#aux_source_directory(. SRC_LIST)
#aux_source_directory(./libs/ SRC_LIST)
#aux_source_directory(./srcs/ SRC_LIST)
set(SRC_LIST
main.c
# srcs/xx.c
#####################################################################
# srcs/xx.c
)
add_executable(ooc ${SRC_LIST})
if ("${SYSTEM_TYPE}" STREQUAL "i686")
set(libs_path ${PROJECT_SOURCE_DIR}/src/libs/i686)
endif()
if ("${SYSTEM_TYPE}" STREQUAL "x86_64")
set(libs_path ${PROJECT_SOURCE_DIR}/src/libs/x86_64)
endif()
set(link_lib
#${libs_path}/libsqlite3.a
pthread
dl
)
target_link_libraries(ooc ${link_lib})
<file_sep># readme.txt
# Created Time: 2016-04-23 14:15:51
# Last modified: 2016-12-15 21:07:27
#
cmake_minimum_required(VERSION 2.8)
project(program-train)
set(CMAKE_C_FLAGS "$ENV{CMAKE_C_FLAGS} -std=c99 -g -Wall")
add_subdirectory(src bin)
<file_sep># object-oriented-C
object-oriented C programing
> **author : <NAME>**
>
> **email : <EMAIL>**
<file_sep># readme.txt
# Created Time: 2016-12-15 20:58:53
# Last modified: 2016-12-15 20:58:56
#
<file_sep># readme.txt
# Created Time: 2016-12-15 20:58:32
# Last modified: 2016-12-15 20:58:45
#
Third-Party libs
| 0d2de14494d7fd5f237485d2f1bf648419bd7f15 | [
"CMake",
"Markdown",
"Text",
"C",
"Shell"
] | 9 | C | minphonelin/object-oriented-C | b93c61fd526c1b9ea432285e92ecc038d28965ea | c65b25d5c6551d7b468baa78b00b71590525e79d |
refs/heads/master | <file_sep><?php
function validateUsername($name){
//NO cumple longitud minima
if(strlen($name) < 4)
return false;
//SI longitud pero NO solo caracteres A-z
else if(!preg_match("/^[a-zA-Z]+$/", $name))
return false;
// SI longitud, SI caracteres A-z
else
return true;
}
function validatePassword1($password1){
//NO tiene minimo de 5 caracteres o mas de 12 caracteres
if(strlen($password1) < 5 || strlen($password1) > 12)
return false;
// SI longitud, NO VALIDO numeros y letras
else if(!preg_match("/^[0-9a-zA-Z]+$/", $password1))
return false;
// SI rellenado, SI email valido
else
return true;
}
function validatePassword2($password1, $password2){
//NO coinciden
if($password1 != $password2)
return false;
else
return true;
}
function validateEmail($email){
//NO hay nada escrito
if(strlen($email) == 0)
return false;
// SI escrito, NO VALIDO email
else if(!filter_var($_POST['email'], FILTER_SANITIZE_EMAIL))
return false;
// SI rellenado, SI email valido
else
return true;
}
//Comprobacion de datos
//variables valores por defecto
$username = "";
$usernameValue = "";
$password1 = "";
$password2 = "";
$email = "";
$emailValue = "";
$websiteValue = "";
//Validacion de datos enviados
if(isset($_POST['send'])){
if(!validateUsername($_POST['username']))
$username = "error";
if(!validatePassword1($_POST['password1']))
$password1 = "<PASSWORD>";
if(!validatePassword2($_POST['password1'], $_POST['password2']))
$password2 = "<PASSWORD>";
if(!validateEmail($_POST['email']))
$email = "error";
//Guardamos valores para que no tenga que reescribirlos
$usernameValue = $_POST['username'];
$emailValue = $_POST['email'];
$websiteValue = $_POST['website'];
//Comprobamos si todo ha ido bien
if($username != "error" && $password1 != "error" && $password2 != "<PASSWORD>" && $email != "error")
$status = 1;
}
?>
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<title>Cómo validar un formulario utilizando PHP y Javascript (jQuery)</title>
<link rel="stylesheet" href="main.css" type="text/css" media="screen" />
</head>
<body>
<div class="wrapper">
<div class="section">
<?php if(!isset($status)): ?>
<h1>Formulario de validación en Cliente + Servidor</h1>
<form id="form1" action="index.php" method="post">
<label for="username">Nombre de usuario (<span id="req-username" class="requisites <?php echo $username ?>">A-z, mínimo 4 caracteres</span>):</label>
<input tabindex="1" name="username" id="username" type="text" class="text <?php echo $username ?>" value="<?php echo $usernameValue ?>" />
<label for="password1">Contraseña (<span id="req-password1" class="requisites <?php echo $password1 ?>">Mínimo 5 caracteres, máximo 12 caracteres, letras y números</span>):</label>
<input tabindex="2" name="password1" id="password1" type="password" class="text <?php echo $password1 ?>" value="" />
<label for="password2">Repetir Contraseña (<span id="req-password2" class="requisites <?php echo $password2 ?>">Debe ser igual a la anterior</span>):</label>
<input tabindex="3" name="<PASSWORD>" id="password2" type="<PASSWORD>" class="text <?php echo $password2 ?>" value="" />
<label for="email">E-mail (<span id="req-email" class="requisites <?php echo $email ?>">Un e-mail válido por favor</span>):</label>
<input tabindex="4" name="email" id="email" type="text" class="text <?php echo $email ?>" value="<?php echo $emailValue ?>" />
<label for="website">Sitio Web (Opcional):</label>
<input tabindex="5" name="website" id="website" type="text" class="text" value="<?php echo $websiteValue ?>" />
<div>
<input tabindex="6" name="send" id="send" type="submit" class="submit" value="Enviar formulario" />
</div>
</form>
<?php else: ?>
<h1>¡Formulario enviado con éxito!</h1>
<?php endif; ?>
</div>
</div>
</body>
</html>
| a12b8f46f9970125f9f532384bfa08bbcdc88e8e | [
"PHP"
] | 1 | PHP | juanaves/Ejemplophp | 0b16e19dfcec5694c780c8e2378f893e1ad0ea31 | e861e68beea6253eafabae9199f4109216ff264c |
refs/heads/master | <file_sep>import React from 'react';
import '../App.css';
class Keypad extends React.Component {
render() {
return (
<div className="buttons">
<button className="screen">{this.props.result.length < 35 ? this.props.result : this.state.result.slice(0,-1)}</button>
<br />
<button className="keypad" value="(" onClick={this.props.displayButton}>(</button>
<button className="keypad" value="CE" onClick={this.props.displayButton}>CE</button>
<button className="keypad" value=")" onClick={this.props.displayButton}>)</button>
<button className="keypad" value="C" onClick={this.props.displayButton}>C</button>
<br />
<button className="keypad" value="1" onClick={this.props.displayButton}>1</button>
<button className="keypad" value="2" onClick={this.props.displayButton}>2</button>
<button className="keypad" value="3" onClick={this.props.displayButton}>3</button>
<button className="keypad" value="+" onClick={this.props.displayButton}>+</button>
<br />
<button className="keypad" value="4" onClick={this.props.displayButton}>4</button>
<button className="keypad" value="5" onClick={this.props.displayButton}>5</button>
<button className="keypad" value="6" onClick={this.props.displayButton}>6</button>
<button className="keypad" value="-" onClick={this.props.displayButton}>-</button>
<br />
<button className="keypad" value="7" onClick={this.props.displayButton}>7</button>
<button className="keypad" value="8" onClick={this.props.displayButton}>8</button>
<button className="keypad" value="9" onClick={this.props.displayButton}>9</button>
<button className="keypad" value="*" onClick={this.props.displayButton}>*</button>
<br />
<button className="keypad" value="." onClick={this.props.displayButton}>.</button>
<button className="keypad" value="0" onClick={this.props.displayButton}>0</button>
<button className="keypad" value="=" onClick={this.props.displayButton}>=</button>
<button className="keypad" value="/" onClick={this.props.displayButton}>÷</button>
</div>
);
};
};
export default Keypad;<file_sep># calculator
Simple calculator with four mathematical operations
| ed17dcdb6bd9187f139ddbdaa2c7ad02bb5b0400 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | evangelosc/calculator | 9ec1bb0991da3756b26cb88aedb8c227f6e6b044 | c887a91b672aa526e0bb93039aed28fb8d53e011 |
refs/heads/master | <file_sep># Calculate Area of a circle
pi = 3.14
def calcArea(r):
return pi * r * r
r=8.7
print("Area of circle with radius: ",r," is ", calcArea(r))
r=9.2
print("Area of circle with radius: ",r," is ", calcArea(r))
r=16.03
print("Area of circle with radius: ",r," is ", calcArea(r))
print("++++++++++++++++++++++++++++++++++")
# Write function to check if a value exists in a list
data = ["python", "r", "java", "pascal", "c", "javascript", "assembly","html", "c++"]
def checkVal(search_el):
for e in data:
if e == search_el:
return "Element is present in data list"
return "Element not found"
print (checkVal("python"))
print (checkVal("html"))
print (checkVal("c"))
print("++++++++++++++++++++++++++++++++++")
# Write a function to shift list item to right n times
data = ["python", "r", "java", "pascal", "c", "javascript", "assembly", "html", "c++"]
def shiftItems(listName, n):
shifted = listName[-n:] + listName[:-n]
return shifted
print(data)
print(shiftItems(data, 3))
print(shiftItems(data, 1))
print("++++++++++++++++++++++++++++++++++")
# Use while loop to get data
#pi = 3.14
#def calcArea(r):
# return pi * r * r
while True:
r = input("Enter radius of circle or quit:" )
if r =="quit":
break
else:
print("Area of circle with radius: ",str(r)," is ",str(calcArea(float(r))))
print("++++++++++++++++++++++++++++++++++")
def totalMarks(subj1,subj2,subj3,subj4):
return subj1+subj2+subj3+subj4
def getPercentage(marks):
return marks/4
def showDetails(**kwargs):
print("Hello ", kwargs["name"])
print("Here is your result for class of AI.")
print("Your score card is as follows.")
print("ClassMarks:")
print("\tmath:",kwargs['marks']['math'])
print("\tphysics:",kwargs['marks']['physics'])
print("\tbiology:", kwargs['marks']['biology'])
print("\tcomputer:", kwargs['marks']['computer'])
totalmarks = totalMarks(kwargs['marks']['math'],kwargs['marks']['physics'],kwargs['marks']['biology'],kwargs['marks']['computer'])
print("Total marks are: ",totalmarks)
print("Percentage is as follows:",str(getPercentage(totalmarks)),"%")
print("Maximum marks are in ","Biology")
print("Minimum marks are in ","Math")
print("You are promoted to next class .")
showDetails(name = "Ali",class_ = "AI",marks = {"math" : 50, "physics" : 80, "biology" : 90, "computer" : 67},date = "1 Feb 2020", nextClass = True)
| dcea4a2f3ac8855a7398940aaa04b2e8a3c5e6b5 | [
"Python"
] | 1 | Python | ihtshamtahir/assignments5_AIC | 6860e1009ea3a75481009440977804d6fa3cfb3e | c031a19eb7760be73a78a75f42405def5ece87cf |
refs/heads/master | <file_sep>import datetime
comments = []
users = [
{
"name": "kenn",
"password": "<PASSWORD>",
"role": "admin",
"lastLoginAt": ""
},
{
"name": "issa",
"password": "<PASSWORD>",
"role": "moderator",
"lastLoginAt": ""
},
{
"name": "eric",
"password": "<PASSWORD>",
"role": "normal",
"lastLoginAt": ""
},
{
"name": "steve",
"password": "<PASSWORD>",
"role": "normal"
}
]
def login():
username = input("please input username: ")
for user in users:
if user['name'] == username:
# return user['password']
password = input("please input password: ")
if user['password'] != password:
return 'Wrong password'
user["lastLoginAt"] = datetime.datetime.now()
if user['role'] == "normal":
userinput = input("1. create comment \n 2.Edit comment \n 3. logout ")
if userinput == str("1"):
comment = input("Enter your comment:")
data = {'comment_id': len(comments) +1,
'comment': comment,
'timestamp': datetime.datetime.now() ,
'created_by': username
}
comments.append(data)
return comments
elif userinput == str("2"):
comment_id = int(input('Enter comment id:'))
if not comment_id:
return "Enter comment id"
comment = next((comment for comment in comments if comment["comment_id"] == comment_id), False)
if comment == False:
return "No comment found"
edit = input("Enter your comment here:")
comment["comment"] = edit
return comments
else:
login()
if user['role'] == "moderator":
userinput = input("1. create comment \n 2. edit comment \n 3. delete comment \n 4. logout \n ")
if userinput == str("1"):
comment = input("Enter your comment:")
data = {'comment_id': len(comments) +1,
'comment': comment,
'timestamp': datetime.datetime.now() ,
'created_by': username
}
comments.append(data)
return comments
elif userinput == str("2"):
comment_id = int(input('Enter comment id:'))
if not comment_id:
return "Enter comment id: "
comment = next((comment for comment in comments if comment["comment_id"] == comment_id), False)
if comment == False:
return "No comment found"
edit = input("Enter your comment here:")
comment["comment"] = edit
return comments
elif userinput == str("3"):
comment_id = int(input('Enter comment id'))
if not comment_id:
return 'Enter comment id'
comment = next((comment for comment in comments if comment["comment_id"] == comment_id), False)
if comment == False:
return "No comment found"
comments.remove(comment)
return comments
else:
login()
print(login())<file_sep>aniso8601==4.0.1
Click==7.0
Flask==1.0.2
Flask-API==1.0
Flask-JWT-Extended==3.14.0
Flask-RESTful==0.3.6
itsdangerous==1.1.0
Jinja2==2.10
MarkupSafe==1.1.0
PyJWT==1.7.1
pytz==2018.7
six==1.12.0
Werkzeug==0.14.1
<file_sep># commandlineblog
A command line program using python 3 that implements the functionality mentioned above.
| 3041138130a9feda9ee28166ec8c4b6dff593fac | [
"Markdown",
"Python",
"Text"
] | 3 | Python | IssaIan/commandlineblog | 2cfe197ed85bec3a24b31f55d43e8d1c54d9f7e4 | 1b23f68bbdd6eafed627d00654def752f52488f8 |
refs/heads/main | <file_sep>import React from "react";
import { Map, Marker, Popup, TileLayer } from "react-leaflet";
import "./list-item.css";
export function ListItem(props) {
const position = [
props.parc.geometry.coordinates[1],
props.parc.geometry.coordinates[0]
];
console.log(position);
return (
<div className="card">
<div className="map">
<Map center={position} className="map-container" zoom="15">
<TileLayer
attribution='&copy <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'
url="https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png"
/>
<Marker position={position}>
<Popup>{props.parc.fields["nom_complet"]}</Popup>
</Marker>
</Map>
</div>
<h2>{props.parc.fields["nom_complet"]}</h2>
<div className="infos">
<span className="address">{props.parc.fields["adresse"]}</span>
<span className="transport">
{props.parc.fields["acces_transport_commun"]}
</span>
</div>
</div>
);
}
<file_sep>import React from "react";
import "./styles.css";
import { List } from "./component/list/list";
import "leaflet/dist/leaflet.css";
export default function App() {
return (
<div className="App">
<h1>Pique-nique Time</h1>
<List />
</div>
);
}
<file_sep>import React, { useEffect, useState } from "react";
import { ListItem } from "../list-item/list-item";
import "./list.css";
export function List() {
const [parcs, setParcs] = useState({ records: [] });
useEffect(() => {
fetch(
"https://data.nantesmetropole.fr/api/records/1.0/search/?dataset=244400404_parcs-jardins-nantes&refine.table_pique_nique=OUI"
)
.then((response) => response.json())
.then((data) => setParcs(data));
}, []);
return (
<div className="card-container">
{parcs.records.map((element) => (
<ListItem parc={element} key={element.recordid} />
))}
</div>
);
}
<file_sep># react-park-list
Created with CodeSandbox
| a995f49870d8975942bd13e3382b0b718cafa681 | [
"JavaScript",
"Markdown"
] | 4 | JavaScript | mJehanno/react-park-list | 0e2fd0e1021bf3677b8914647e80fa04d1d3e814 | 06dd3db22011c989be733b86db01bac9404aecb6 |
refs/heads/master | <file_sep>// Quiz contructor
function Quiz(questions) {
this.score = 0;
this.questions = questions;
this.currentQuestionIndex = 0;
this.wrongAnswers = 0;
this.answer = false;
}
//User selected choice
Quiz.prototype.userAnswer = function(userAnswer) {
if (this.getCurrentQuestion().isCorrectAnswer(userAnswer)) {
this.score++;
} else {
this.wrongAnswers++;
}
this.answer = true;
this.currentQuestionIndex++;
};
// get the current question
Quiz.prototype.getCurrentQuestion = function() {
return this.questions[this.currentQuestionIndex];
};
//check if Trivia is over
Quiz.prototype.hasEnded = function() {
return this.currentQuestionIndex >= this.questions.length;
};
<file_sep>// var intervalId;
// var userChoice,
// question,
// answerA,
// answerB,
// answerC,
// answerD,
// choices,
// correct = 0,
// incorrect = 0,
// unanswered = 0;
// var quizQuestions = [
// [
// 'The average person does what thirteen times a day?',
// 'Laugh',
// 'Fart',
// 'Eat',
// 'Sleep',
// 'A'
// ],
// [
// "In Texas it's illegal to swear in front of a what??",
// 'Judge',
// 'Corpse',
// 'Cowboys',
// 'Pigs',
// 'B'
// ],
// [
// 'Coprastastaphobia is the fear of what?',
// 'Constipation',
// 'Flees',
// 'Cockroaches',
// 'Snakes',
// 'A'
// ],
// [
// "In South Dakota it's illegal to fall down and sleep where?",
// 'Dominos',
// 'Chipotle',
// 'Cheese Factory',
// 'Panda Express',
// 'C'
// ],
// [
// "It's illegal in Georgia to do what with a fork?",
// 'Eat Fish Tacos',
// 'Kill a Crab',
// 'Scratch your back',
// 'Eat Fried Chicken',
// 'D'
// ],
// [
// 'As of May 2017, which of these is NOT the name of a Pokemon Professor?',
// 'Oak',
// 'Juniper',
// 'Ficus',
// 'Sycamore',
// 'C'
// ],
// [
// 'The Average American does what 22 times a day?',
// 'Opens Fridge',
// 'Drinks Soda',
// 'Snapchats',
// 'Instagrams',
// 'A'
// ],
// [
// "n California you can't legally buy a mousetrap without having a what?",
// 'Mousetrap',
// 'Knife',
// 'Gun',
// 'Hunting license',
// 'D'
// ],
// [
// 'Which of these items is NOT a one-use held item?',
// 'Air Balloon',
// 'Energy Root',
// 'Sitrus Berry',
// 'Hyper Potion',
// 'D'
// ],
// [
// 'What is <NAME> afraid of?',
// 'His Ex Gf',
// 'Dogs',
// 'Clowns',
// 'Ghost/Dark',
// 'C'
// ]
// ];
// var quiz = {
// time: 500,
// start: function() {
// $('#quizStart').hide();
// //$('.jumbotron').css('background-color', '#79C9FA');
// $('#quizDiv').html(
// "<div id='timeDiv'><h2><center>Time Left: <span id='timeLeft'>500</span></center></h2></div>"
// );
// quiz.renderQuestions();
// $('#quizEnd').show();
// quiz.time = 500;
// correct = 0;
// incorrect = 0;
// unanswered = 0;
// clearInterval(intervalId);
// intervalId = setInterval(quiz.countdown, 1000);
// },
// countdown: function() {
// quiz.time--;
// $('#timeLeft').text(quiz.time);
// if (quiz.time === 0) {
// quiz.generateResultDiv();
// }
// },
// renderQuestions: function() {
// for (i = 0; i < quizQuestions.length; i++) {
// question = quizQuestions[i][0];
// answerA = quizQuestions[i][1];
// answerB = quizQuestions[i][2];
// answerC = quizQuestions[i][3];
// answerD = quizQuestions[i][4];
// $('#quizDiv').append(
// "<div id='question" + (i + 1) + "'><h3>" + question + '</h3></div>'
// );
// $('#question' + (i + 1)).append(
// "<input type='radio' name='answers" +
// (i + 1) +
// "' value='A'> " +
// answerA +
// '<br>'
// );
// $('#question' + (i + 1)).append(
// "<input type='radio' name='answers" +
// (i + 1) +
// "' value='B'> " +
// answerB +
// '<br>'
// );
// $('#question' + (i + 1)).append(
// "<input type='radio' name='answers" +
// (i + 1) +
// "' value='C'> " +
// answerC +
// '<br>'
// );
// $('#question' + (i + 1)).append(
// "<input type='radio' name='answers" +
// (i + 1) +
// "' value='D'> " +
// answerD +
// '<br>'
// );
// }
// },
// generateResultDiv: function() {
// for (j = 0; j < quizQuestions.length; j++) {
// userChoice = $('#question' + (j + 1))
// .find('input:checked')
// .val();
// if (userChoice === quizQuestions[j][5]) {
// correct++;
// } else if (
// userChoice !== quizQuestions[j][5] &&
// userChoice !== undefined
// ) {
// incorrect++;
// } else {
// unanswered++;
// }
// }
// $('#quizDiv').html("<div id='resultsDiv'></div");
// $('#resultsDiv').append(
// '<center><h2>Congratulations! Here is your score!</h2></center><br>'
// );
// $('#resultsDiv').append(
// '<center><h3>You got ' + correct + ' questions correct!</h3></center>'
// );
// $('#resultsDiv').append(
// '<center><h3>And you got ' + incorrect + ' questions wrong!</h3></center>'
// );
// $('#resultsDiv').append(
// "<center><h3>And you didn't answer " +
// unanswered +
// ' questions!</h3></center>'
// );
// $('#resultsDiv').append(
// '<center><h3>Wanna try again? Click the button at the top!</h3></center>'
// );
// $('#quizEnd').hide();
// $('#quizStart').show();
// }
// };
// $(document).ready(function() {
// $('#quizStart').click(quiz.start);
// $('#quizEnd').click(quiz.generateResultDiv);
// });
//---CREATE QUESTIONS
var questions = [
new Question(
'Though it is difficult, you are able to start a fire by rapidly rubbing two cool ranch doritos together for a long period of time.',
['True, but it is difficult', 'False'],
'True, but it is difficult'
),
new Question(
'Singing in the shower lowers your cholesterol, heart rate and risk of cancer and heart disease.',
['True, but I am just guessing', 'False'],
'False'
),
new Question(
'In the weightlessness of space, if a frozen pea touches pepsi it will blow up.',
['True', 'False'],
'True'
),
new Question(
'Monkeys are related to fish because if need be they can breathe underwater.',
['True', 'False'],
'False'
),
new Question(
'Men are 4 times more likely to be struck by lightening than women',
['True', 'False'],
'True'
)
];
//---CREATE QUIZ
var quiz = new Quiz(questions);
//DISPLAY QUIZ
QuizUI.display();
// timer
var triviaTimer = setInterval(function() {
myTriviaTimer();
}, 1000);
var triviaTime = 100;
function myTriviaTimer() {
if (triviaTime !== 0)
document.getElementById('timer').innerHTML = triviaTime--;
}
// Trivia Over
var triviaOver = setTimeout(myTriviaOver, 100000);
function myTriviaOver() {
QuizUI.displayScore();
}
<file_sep>var QuizUI = {
// init function to show Trivia UI
display: function() {
console.log('inside display');
if (quiz.hasEnded()) {
console.log('inside ended');
this.displayScore();
this.restart('restart');
} else {
this.displayQuestion();
this.displayChoices();
this.displayProgress();
}
},
//display the Trivia question
displayQuestion: function() {
var currentQuestionNumber = quiz.currentQuestionIndex + 1;
var questionHTML = '<h3>Question ' + currentQuestionNumber + '</h3>';
questionHTML += '<h4>' + quiz.getCurrentQuestion().question + '</h4>';
this.populateIdWithHTML('question', questionHTML);
},
// display answer choices
displayChoices: function() {
var choices = quiz.getCurrentQuestion().choices;
for (var i = 0; i < choices.length; i++) {
//this.populateIdWithHTML('choice' + i, choices[i]);
this.answerHandler('guess' + i, choices[i]);
}
},
//displayScore
displayScore: function() {
var triviaOverHTML = '<h1>Trivia Over</h1>';
triviaOverHTML += '<h2>Your score is: ' + quiz.score + '</h2>';
triviaOverHTML += '<h2>Wrong answers: ' + quiz.wrongAnswers + '</h2>';
triviaOverHTML +=
"<button id='restart' type='button' class='btn btn-lg'>Start Over</button>";
this.populateIdWithHTML('quiz', triviaOverHTML);
this.restart('restart');
},
populateIdWithHTML: function(id, question) {
var element = document.getElementById(id);
element.innerHTML = question;
},
// TODO: display result
displayAnswer: function(guess) {
var triviaResultHTML = '<h1>Trivia</h1>';
// var triviaResultHTML =
// '<h3>Time remaining(seconds):<span id="timer"></span></h3>';
triviaResultHTML += "<button id='restart'>Start Over</button>";
// if (quiz.getCurrentQuestion().isCorrectAnswer(guess)) {
// triviaResultHTML += '<h2>Correct answer</h2>';
// } else {
// triviaResultHTML += '<h2>Wrong answer</h2>';
// triviaResultHTML +=
// '<h2>Correct answer is:' + quiz.getCurrentQuestion().answer + '</h2>';
// }
//triviaResultHTML += '<button id="next">Next</button>';
//triviaResultHTML += "<button id='restart'>Start Over</button>";
this.populateIdWithHTML('quiz', triviaResultHTML);
this.next('restart');
},
//event handler for selecting answers
answerHandler: function(id, guess) {
var button = document.getElementById(id);
button.onclick = function() {
quiz.userAnswer(guess);
QuizUI.display();
button.style.outline = 0;
};
},
// display number of questions remaining
displayProgress: function() {
var currentQuestionNumber = quiz.currentQuestionIndex + 1;
this.populateIdWithHTML(
'progress',
'Question ' + currentQuestionNumber + ' of ' + quiz.questions.length
);
},
//event handler for restarting Trivia
restart: function(id) {
var restartButton = document.getElementById(id);
restartButton.onclick = function() {
location.reload();
};
},
next: function(id) {
var nextButton = document.getElementById(id);
nextButton.onclick = function() {
Quiz.answer = false;
QuizUI.display();
};
}
};
| acdeecb544dceecb78d57266ce8eb91ab11f4785 | [
"JavaScript"
] | 3 | JavaScript | cheetahM/triviaGame | 18287a8a01e9403b84ae6c6c0238946483afeef2 | 86f3233e6eb2594bfb6547206062a71ee0b3be04 |
refs/heads/main | <repo_name>MidAtlanticPortal/mp-drawing<file_sep>/drawing/migrations/0001_initial.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.contrib.gis.db.models.fields
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AOI',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length='255', verbose_name='Name')),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date Created')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='Date Modified')),
('object_id', models.PositiveIntegerField(null=True, blank=True)),
('manipulators', models.TextField(help_text='csv list of manipulators to be applied', null=True, verbose_name='Manipulator List', blank=True)),
('geometry_orig', django.contrib.gis.db.models.fields.PolygonField(srid=3857, null=True, verbose_name='Original Polygon Geometry', blank=True)),
('geometry_final', django.contrib.gis.db.models.fields.PolygonField(srid=3857, null=True, verbose_name='Final Polygon Geometry', blank=True)),
('description', models.TextField(null=True, blank=True)),
('content_type', models.ForeignKey(related_name='drawing_aoi_related', blank=True, to='contenttypes.ContentType', null=True, on_delete=django.db.models.deletion.SET_NULL)),
('sharing_groups', models.ManyToManyField(related_name='drawing_aoi_related', editable=False, to='auth.Group', blank=True, null=True, verbose_name='Share with the following groups')),
('user', models.ForeignKey(related_name='drawing_aoi_related', to=settings.AUTH_USER_MODEL, on_delete=django.db.models.deletion.CASCADE)),
],
options={
'verbose_name': 'AOI',
'verbose_name_plural': 'AOIs',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='WindEnergySite',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length='255', verbose_name='Name')),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date Created')),
('date_modified', models.DateTimeField(auto_now=True, verbose_name='Date Modified')),
('object_id', models.PositiveIntegerField(null=True, blank=True)),
('manipulators', models.TextField(help_text='csv list of manipulators to be applied', null=True, verbose_name='Manipulator List', blank=True)),
('geometry_orig', django.contrib.gis.db.models.fields.PolygonField(srid=3857, null=True, verbose_name='Original Polygon Geometry', blank=True)),
('geometry_final', django.contrib.gis.db.models.fields.PolygonField(srid=3857, null=True, verbose_name='Final Polygon Geometry', blank=True)),
('description', models.TextField(null=True, blank=True)),
('content_type', models.ForeignKey(related_name='drawing_windenergysite_related', blank=True, to='contenttypes.ContentType', null=True, on_delete=django.db.models.deletion.SET_NULL)),
('sharing_groups', models.ManyToManyField(related_name='drawing_windenergysite_related', editable=False, to='auth.Group', blank=True, null=True, verbose_name='Share with the following groups')),
('user', models.ForeignKey(related_name='drawing_windenergysite_related', to=settings.AUTH_USER_MODEL, on_delete=django.db.models.deletion.CASCADE)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
<file_sep>/README.md
# mp-drawing
Drawing for marine planner refactor.
<file_sep>/drawing/migrations/0002_auto_20161220_1642.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.contrib.gis.db.models.fields
class Migration(migrations.Migration):
dependencies = [
('drawing', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='aoi',
name='geometry_final',
field=django.contrib.gis.db.models.fields.GeometryField(srid=3857, null=True, verbose_name='Final Polygon Geometry', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='aoi',
name='geometry_orig',
field=django.contrib.gis.db.models.fields.GeometryField(srid=3857, null=True, verbose_name='Original Polygon Geometry', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='windenergysite',
name='geometry_final',
field=django.contrib.gis.db.models.fields.GeometryField(srid=3857, null=True, verbose_name='Final Polygon Geometry', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='windenergysite',
name='geometry_orig',
field=django.contrib.gis.db.models.fields.GeometryField(srid=3857, null=True, verbose_name='Original Polygon Geometry', blank=True),
preserve_default=True,
),
]
<file_sep>/drawing/migrations/0005_auto_20190710_2310.py
# Generated by Django 2.2.3 on 2019-07-10 23:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('drawing', '0004_auto_20190710_0058'),
]
operations = [
migrations.AlterField(
model_name='aoi',
name='sharing_groups',
field=models.ManyToManyField(blank=True, editable=False, related_name='drawing_aoi_related', to='auth.Group', verbose_name='Share with the following groups'),
),
migrations.AlterField(
model_name='windenergysite',
name='sharing_groups',
field=models.ManyToManyField(blank=True, editable=False, related_name='drawing_windenergysite_related', to='auth.Group', verbose_name='Share with the following groups'),
),
]
<file_sep>/drawing/admin.py
from django.contrib.gis import admin
from .models import AOI, WindEnergySite
admin.site.register(AOI, admin.OSMGeoAdmin)
admin.site.register(WindEnergySite, admin.OSMGeoAdmin)
<file_sep>/drawing/rpc.py
from rpc4django import rpcmethod
@rpcmethod(login_required=True)
def delete_drawing(uid, **kwargs):
from rpc4django import rpcmethod
from features.registry import get_feature_by_uid
request = kwargs.get('request')
drawing_obj = get_feature_by_uid(uid)
viewable, response = drawing_obj.is_viewable(request.user)
if viewable:
drawing_obj.delete()
<file_sep>/drawing/__init__.py
from __future__ import absolute_import
from drawing.rpc import *
<file_sep>/drawing/migrations/0004_auto_20190710_0058.py
# Generated by Django 2.2.3 on 2019-07-10 00:58
import django.contrib.gis.db.models.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('drawing', '0003_auto_20190510_0001'),
]
operations = [
migrations.AlterField(
model_name='aoi',
name='date_created',
field=models.DateTimeField(auto_now_add=True, verbose_name='Date Created'),
),
migrations.AlterField(
model_name='aoi',
name='date_modified',
field=models.DateTimeField(auto_now=True, verbose_name='Date Modified'),
),
migrations.AlterField(
model_name='aoi',
name='geometry_final',
field=django.contrib.gis.db.models.fields.GeometryField(blank=True, null=True, srid=3857, verbose_name='Final Polygon Geometry'),
),
migrations.AlterField(
model_name='aoi',
name='geometry_orig',
field=django.contrib.gis.db.models.fields.GeometryField(blank=True, null=True, srid=3857, verbose_name='Original Polygon Geometry'),
),
migrations.AlterField(
model_name='aoi',
name='manipulators',
field=models.TextField(blank=True, help_text='csv list of manipulators to be applied', null=True, verbose_name='Manipulator List'),
),
migrations.AlterField(
model_name='aoi',
name='name',
field=models.CharField(max_length=255, verbose_name='Name'),
),
migrations.AlterField(
model_name='aoi',
name='sharing_groups',
field=models.ManyToManyField(blank=True, editable=False, null=True, related_name='drawing_aoi_related', to='auth.Group', verbose_name='Share with the following groups'),
),
migrations.AlterField(
model_name='windenergysite',
name='date_created',
field=models.DateTimeField(auto_now_add=True, verbose_name='Date Created'),
),
migrations.AlterField(
model_name='windenergysite',
name='date_modified',
field=models.DateTimeField(auto_now=True, verbose_name='Date Modified'),
),
migrations.AlterField(
model_name='windenergysite',
name='geometry_final',
field=django.contrib.gis.db.models.fields.GeometryField(blank=True, null=True, srid=3857, verbose_name='Final Polygon Geometry'),
),
migrations.AlterField(
model_name='windenergysite',
name='geometry_orig',
field=django.contrib.gis.db.models.fields.GeometryField(blank=True, null=True, srid=3857, verbose_name='Original Polygon Geometry'),
),
migrations.AlterField(
model_name='windenergysite',
name='manipulators',
field=models.TextField(blank=True, help_text='csv list of manipulators to be applied', null=True, verbose_name='Manipulator List'),
),
migrations.AlterField(
model_name='windenergysite',
name='name',
field=models.CharField(max_length=255, verbose_name='Name'),
),
migrations.AlterField(
model_name='windenergysite',
name='sharing_groups',
field=models.ManyToManyField(blank=True, editable=False, null=True, related_name='drawing_windenergysite_related', to='auth.Group', verbose_name='Share with the following groups'),
),
]
<file_sep>/requirements.txt
-e <EMAIL>:point97/madrona-features.git@HEAD#egg=madrona_features-master
<file_sep>/drawing/urls.py
try:
from django.urls import re_path, include
except (ModuleNotFoundError, ImportError) as e:
from django.conf.urls import url as re_path, include
from .views import *
urlpatterns = [
#'',
#drawings
re_path(r'^get_drawings$', get_drawings),
#feature reports
re_path(r'^wind_report/(\d+)', wind_analysis, name='wind_analysis'), #user requested wind energy site analysis
re_path(r'^aoi_report/(\d+)', aoi_analysis, name='aoi_analysis'), #user requested area of interest analysis
]
| c2ce4b9565fd611782eb8e0a55b7c4b0b3e92ef0 | [
"Markdown",
"Python",
"Text"
] | 10 | Python | MidAtlanticPortal/mp-drawing | c5ff3d918678bcfaa91274632ba4bc3462748bbb | d1c16807119d82fd285cbeb2563a494dc76210d5 |
refs/heads/master | <file_sep>using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class VehicleController : MonoBehaviour
{
public float m_speed = 5.0f;
public Rigidbody rigid;
private void Start()
{
Debug.Log(rigid.centerOfMass);
// rigid.centerOfMass = Vector3.down * 1.5f;
}
private void Update()
{
if (Input.GetMouseButton(0))
{
}
}
private void FixedUpdate()
{
rigid.AddForce(transform.right * m_speed);
}
private void OnValidate()
{
if (rigid == null)
{
rigid = GetComponent<Rigidbody>();
}
}
}
| af38212020fbd33de18cb004590ad91ef398d0ee | [
"C#"
] | 1 | C# | Hyoso/VehicleControls | c945cfd84ddb609bd65fbd8b1c93358a9c673699 | 59acb456301855c109eb5c84267dbd2cd2d82437 |
refs/heads/master | <file_sep>def reformat_languages(languages)
hash = {}
languages.each do |style, language|
language.each do |lang, type|
if !hash.has_key?(lang)
hash[lang] = {}
end
if !hash[lang].has_key?(:style)
hash[lang][:style] = []
end
hash[lang][:style] << style
hash[lang][:type] = type[:type]
end
end
puts hash.inspect
hash
end
| 26230ffc4580df8fca99ea3117c98d3a695fbdac | [
"Ruby"
] | 1 | Ruby | mray6288/programming_languages-prework | da394fb47585b3318ae72f4255cc38856a06134c | f0014c77c8cc1b0ea974a21f9dfcf83e21d641b0 |
refs/heads/master | <file_sep>input.onGesture(Gesture.Shake, function () {
basic.showNumber(number[randint(0, number.length - 1)])
basic.showString("" + (letter[randint(0, letter.length - 1)]))
basic.showString("" + (_symbol[randint(0, _symbol.length - 1)]))
basic.showNumber(number[randint(0, number.length - 1)])
basic.showString("" + (letter[randint(0, letter.length - 1)]))
basic.showString("" + (_symbol[randint(0, _symbol.length - 1)]))
})
let _symbol: string[] = []
let letter: string[] = []
let number: number[] = []
number = [
0,
1,
2,
3,
4,
5,
6,
7,
8,
9
]
letter = [
"a",
"b",
"c",
"d",
"e",
"z",
"y",
"x",
"w",
"v"
]
_symbol = [
"!",
"@",
"#",
"$",
"%"
]
| 09f1dc41a93faa44d111d36f16e026891e9f8093 | [
"TypeScript"
] | 1 | TypeScript | jrflorentine/password-generator | acd9f6814463c2baecdff9cc4dea325468ce9d52 | 359e92c3abf51cf6ebe5e68919bc0cba9c82f735 |
refs/heads/master | <file_sep>"use strict"
var SYMBOL_TABLE = {
' ': {type: null, color: null, symbol: null},
'C': {type: 'wall', color: null, symbol: null},
'-': {type: 'wall', color: null, symbol: null},
'+': {type: 'wall', color: null, symbol: null},
'|': {type: 'wall', color: null, symbol: null},
'a': {type: 'target', color: 'red', symbol: 'moon'},
'b': {type: 'target', color: 'yellow', symbol: 'moon'},
'c': {type: 'target', color: 'blue', symbol: 'moon'},
'd': {type: 'target', color: 'green', symbol: 'moon'},
'e': {type: 'target', color: 'red', symbol: 'gear'},
'f': {type: 'target', color: 'yellow', symbol: 'gear'},
'g': {type: 'target', color: 'blue', symbol: 'gear'},
'h': {type: 'target', color: 'green', symbol: 'gear'},
'i': {type: 'target', color: 'red', symbol: 'saturn'},
'j': {type: 'target', color: 'yellow', symbol: 'saturn'},
'k': {type: 'target', color: 'blue', symbol: 'saturn'},
'l': {type: 'target', color: 'green', symbol: 'saturn'},
'm': {type: 'target', color: 'red', symbol: 'star'},
'n': {type: 'target', color: 'yellow', symbol: 'star'},
'o': {type: 'target', color: 'blue', symbol: 'star'},
'p': {type: 'target', color: 'green', symbol: 'star'},
'r': {type: 'target', color: 'cosmic', symbol: 'cosmic'}
};
// Clones a 2d array
var clone_array = function(arr2d) {
var copy_arr = [];
$.each(arr2d, function(n, row) { // For each row
copy_arr[n] = [];
$.each(row, function(j, cell) { // For each cell
copy_arr[n][j] = cell;
});
});
return copy_arr;
}
// represents one of the four board pieces.
// string representation should be NW (center tiles of board in bottom right corner)
// TODO: Validate that
var Tile = function(string) {
//-- Setup Code
var self = this;
// Translate string into 2d array.
var arr = string.split('\n'); // split string into an array of lines
var layout = [];
$.each(arr, function(n, row_string) {
var row = []
// Map string input chars to space, wall, or target symbols using the symbol look up table
for (var i = 0; i < row_string.length; i++) {
var c = row_string.charAt(i);
row.push(SYMBOL_TABLE[c]);
}
layout.push(row);
});
// Check that each row is the same length, and there are the
// same number of rows as columns.
$.each(layout, function(n, row) {
if (row.length != layout.length) {
throw("Tile: Input string is now square");
}
});
self.size = layout.length;
// returns the passed array rotated 90 degrees clockwise
var rotate = function(orig) {
// create an empty 2d array of the same size as the orig
var rotated = []; $.each(orig, function() { rotated.push([]); });
$.each(orig, function(n, row) {
$.each(row, function(j, cell) {
rotated[j].unshift(cell);
});
});
return rotated;
}
self.NW = clone_array(layout);
self.NE = rotate(self.NW);
self.SE = rotate(self.NE);
self.SW = rotate(self.SE);
return self;
}
// Made of four Tiles defined clockwise starting at NE tile
var Board = function(tile1, tile2, tile3, tile4) {
var add_horizontal = function(left_layout, right_layout) {
var layout = clone_array(left_layout); // TODO: make sure layout is a copy, not a reference
// For each row
for (var i = 0; i < left_layout.length; i++) {
var left_cell = left_layout[i][left_layout.length-1]; // last cell of left layout
var right_cell = right_layout[i][0]; // first cell of right layout
// If left or right column has a wall along the edge the center column
// of the combined layout should have a wall.
if (left_cell == 'wall' || right_cell == 'wall') {
layout[i][layout.length - 1] = 'wall';
}
// Merge left and right layout, droping the first column of the right layout
layout[i] = layout[i].concat(right_layout[i].slice(1))
}
return layout;
}
var add_vericle = function(top_layout, bottom_layout) {
var layout = clone_array(top_layout); // TODO: make sure layout is a copy, not a reference
// For each column
for (var i = 0; i < top_layout.length; i++) {
var bottom_cell = top_layout[top_layout.length - 1][i]; // i-th cell from bottom row of top layout
var top_cell = bottom_layout[0][i]; // i-th cell from top row of bottom layout
// If bottom or top row has a wall along the edge the center row
// of the combined layout should have a wall.
if (bottom_cell == 'wall' || top_cell == 'wall') {
layout[layout.length - 1][i] = 'wall';
}
}
// Merge top and bottom layout, droping the top row of the bottom layout
layout = layout.concat(bottom_layout.slice(1));
return layout;
}
//-- Setup Code
var self = this;
// Check that each tile is the same size.
if (tile1.size != tile2.size || tile1.size != tile3.size || tile1.size != tile4.size) {
throw "Board: All tiles must be the same size";
}
// Add tile 2 to the right of tile 1, merging the center columns
var top_layout = add_horizontal(tile1.NW, tile2.NE);
// Add tile 4 to the right of tile 2, merging the center columns
var bottom_layout = add_horizontal(tile3.SW, tile4.SE);
// Add the combined tiles 3,4 below tiles 1,2, merging the center rows
self.layout = add_vericle(top_layout, bottom_layout);
// Check that the layout is square, and each row has the same number of cells.
$.each(self.layout, function(n, row) {
if (row.length != self.layout.length) {
throw("Board: board is not square");
}
});
self.size = self.layout.length;
return self;
}
// The board is not saved to the State to save
// space when traversing the moves tree.
var State = function(robots) {
var self = this;
// Copy the set of robots.
self.robots = {};
$.each(robots, function(name, p) {
self.robots[name] = {x: p.x, y: p.y};
});
// return a hash uniquely (sp?) identifying this game state
var hash_arr = [];
$.each(self.robots, function(n, r) {
hash_arr.push(r.x, r.y);
});
self.hash = hash_arr.join('|');
// is the space in the passed direction a legal move?
var valid = function(board, robot, direction) {
var x = robot.x; var y = robot.y;
// TODO: change this to a switch statement
var inc = function() {
if (direction == 'up') { y--; }
else if (direction == 'down') { y++; }
else if (direction == 'right') { x++; }
else if (direction == 'left') { x--; }
else { throw "direction must be one of up, down, left, right, got " + direction; }
}
inc();
// Check there is no wall between here and the next space
if ((board[y] == null) || // row doesn't exist
(board[y][x] == null) || // column doesn't exist
(board[y][x].type == 'wall')) // space is a wall
{ return false; }
inc();
// Check that the next space isn't occupied
if ((board[y] == null) || // row doesn't exist
(board[y][x] == null)) // column doesn't exist
{ return false; }
var blocked = false;
$.each(self.robots, function(name, o) { // space isn't occupied by another robot
if (o.x == x && o.y == y) { blocked = true; }
});
if (blocked == true) { return false; }
// If all the other tests failed the move is valid
return true;
}
// Returns an array of States that can be reached from here.
self.moves = function(board) {
var moves = []; // Array of states
$.each(self.robots, function(name, robot) {
$.each(['up', 'down', 'left', 'right'], function(n, direction) {
var moved_robot = { x: robot.x, y: robot.y };
while (valid(board, moved_robot, direction)) {
if (direction == 'up') { moved_robot.y -= 2 }
else if (direction == 'down') { moved_robot.y += 2 }
else if (direction == 'right') { moved_robot.x += 2 }
else if (direction == 'left') { moved_robot.x -= 2 }
else { throw "direction must be one of up, down, left, right, got " + direction; }
}
// If the robot was moved at all copy create a new state with that move
if (moved_robot.x != robot.x || moved_robot.y != robot.y) {
// Copy robots
var move = {};
$.each(self.robots, function(n, p) {
move[n] = {x: p.x, y: p.y};
});
move[name] = {x: moved_robot.x, y: moved_robot.y};
moves.push(new State(move))
}
});
});
return moves;
}
}
var Game = function(board, node) {
var self = this;
var unicode_symbols = {
star: '\u2605', // '★'
gear: '\u2699', // '⚙'
saturn: '\u229a', // '⊚'
moon: '\u263E', // '☾'
cosmic: '\uAA5C', // '꩜'
robot: '\u2603', // '☃'
}
self.board = board;
self.target = null;
// robots x, y positions
self.robots = {
red: null,
green: null,
blue: null,
yellow: null,
}
var draw_board = function() {
// Draw the board
var table = $('<div>').attr('class', 'table');
var board = $('<div>').attr('class', 'board');
table.append($('<div>').attr('class', 'sideboard'));
table.append(board);
$.each(self.board.layout, function(y, row) {
var table_row = $('<div>').attr('class', 'row')
// Each cell
$.each(row, function(x, elem) {
var cell = $('<div>') .attr('data-x-pos', x) .attr('data-y-pos', y);
// Each div is a cell,
var classes = ['cell'];
// Each cell has the class of it's elem's type
if (elem.type != null) { classes.push(elem.type) }
// Odd columned and rowed cells are spaces
if (x % 2 != 0 && y % 2 != 0) { classes.push('space'); }
cell.attr('class', classes.join(' '))
// Add the symbol to the cell
if (elem.symbol != null) {
cell.attr('data-symbol', elem.symbol);
cell.text(unicode_symbols[elem.symbol]);
}
// Add the color to the cell
if (elem.color != null) {
cell.attr('data-color', elem.color);
}
table_row.append(cell);
});
table_row.append($('<div>').attr('class', 'clear'));
board.append(table_row);
})
node.append(table);
}
var draw_target = function() {
if (self.target != null) {
var target = $('<span>')
.text(unicode_symbols[self.target.symbol])
.attr('class', 'target')
.attr('data-color', self.target.color)
node.find('.sideboard').append(target);
}
}
var draw_robots = function() {
$.each(self.robots, function(name, position) {
var robot = $('<span>')
.text(unicode_symbols['robot'])
.attr('class', 'robot ' + name)
.attr('data-color', name)
.attr('draggable', true);
if (position != null && position.x != null && position.y != null) {
node.find('.board [data-x-pos=' + position.x + '][data-y-pos=' + position.y + ']').append(robot);
} else {
node.find('.sideboard').append(robot);
}
});
}
var add_event_listners = function() {
node.find('.robot').bind('dragstart', function(e) {
e = e.originalEvent;
var color = $(this).attr('data-color');
e.dataTransfer.effectAllowed = 'move';
e.dataTransfer.dropEffect = 'move';
e.dataTransfer.setData('color', color);
});
// Allow drop event.
node.find('.space').bind('dragover', function(e) {
e.originalEvent.preventDefault();
});
node.find('.space').bind('drop', function(e) {
e = e.originalEvent;
var elem = $(this);
var color = e.dataTransfer.getData('color')
var position = {x: parseInt(elem.data('x-pos')), y: parseInt(elem.data('y-pos'))};
self.robots[color] = position;
self.update();
});
node.find('.space').bind('dragenter', function(e) {
this.classList.add('over');
});
node.find('.space').bind('dragleave', function(e) {
this.classList.remove('over');
});
node.find('.target').bind('click', function(e) {
var elem = $(this);
var position = {x: parseInt(elem.data('x-pos')), y: parseInt(elem.data('y-pos'))};
var color = elem.data('color');
var symbol = elem.data('symbol');
self.target = {position: position, color: color, symbol: symbol};
self.update();
});
}
self.draw = function() {
node.empty();
draw_board();
draw_robots();
draw_target();
add_event_listners();
}
var count_moves = function() {
console.log('counting moves');
// returns true if the correct robot is in the target position.
var complete;
if (self.target.color == 'cosmic') {
// Any robot can complete the cosmic targets.
complete = function(state) {
var comp = false;
$.each(state.robots, function(n, robot) {
if (robot.x == self.target.position.x && robot.y == self.target.position.y) {
var comp = true
}
});
return comp;
}
} else {
// Colored robots can complete colored targets.
complete = function(state) {
var robot = state.robots[self.target.color]
return (robot.x == self.target.position.x && robot.y == self.target.position.y)
}
}
var first = new State(self.robots);
var queue = [new State(self.robots)];
var next_queue = [];
var num_moves = 0;
var visited = {};
while (next_queue.length > 0 || queue.length > 0) {
// Calculate weight of a move by adding the x and y distances of each robot to the target.
var weight = function(state) {
var sum = 0;
_.each(state.robots, function(robot) {
sum += Math.abs(robot.x - self.target.position.x);
sum += Math.abs(robot.y - self.target.position.y);
});
return sum;
}
// sort queue by weight
queue = _.sortBy(queue, weight)
while (queue.length > 0) {
var next = queue.shift();
console.log(num_moves);
// Check if the puzzle is complete.
if (complete(next)) {
console.log('complete');
return num_moves;
}
// Mark this state as visited.
visited[next.hash] = true;
// Add unvisted moves to the queue.
var is_visited = function(state) {
return visited[state.hash] == true;
}
var unvisted_moves = _.reject(next.moves(self.board.layout), is_visited);
next_queue = next_queue.concat(unvisted_moves);
}
queue = next_queue;
next_queue = [];
num_moves += 1;
}
throw "Exited loop without completing puzzle";
}
self.update = function() {
self.draw();
// TODO: Change this to a 'any' method
var all_placed = true;
$.each(self.robots, function(n, robot) {
if (robot == null) { all_placed = false; }
});
// If all robots are placed and a target is selcted count
// the number of moves the puzzle can be completed in.
if (all_placed && self.target != null) {
var moves = count_moves();
console.log(moves);
}
}
self.update();
return self;
}
$(function (){
console.log('start')
var tiles = {
a1: new Tile(
"+----------------\n" +
"| | \n" +
"| +- \n" +
"| |a \n" +
"| -+ \n" +
"| h| \n" +
"| \n" +
"| n| \n" +
"| -+ \n" +
"| \n" +
"| \n" +
"| \n" +
"+- \n" +
"| |k \n" +
"| +- +-+\n" +
"| |C|\n" +
"| +-+"),
a2: new Tile(
"+----------------\n" +
"| | \n" +
"| \n" +
"| n| \n" +
"| +- -+ \n" +
"| |h \n" +
"| \n" +
"| \n" +
"| \n" +
"| \n" +
"| -+ \n" +
"| k| \n" +
"+- \n" +
"| |a \n" +
"| +- +-+\n" +
"| |C|\n" +
"| +-+"),
b1: new Tile(
"+----------------\n" +
"| | \n" +
"| \n" +
"| p| \n" +
"| -+ \n" +
"| |i \n" +
"| +- \n" +
"| \n" +
"+- +- \n" +
"| |b \n" +
"| \n" +
"| \n" +
"| -+ \n" +
"| g| \n" +
"| +-+\n" +
"| |C|\n" +
"| +-+"),
b2: new Tile(
"+----------------\n" +
"| | \n" +
"| +- \n" +
"| |b \n" +
"| \n" +
"| \n" +
"| \n" +
"| |g \n" +
"| +- \n" +
"| \n" +
"+- -+ \n" +
"| i| \n" +
"| \n" +
"| p| \n" +
"| -+ +-+\n" +
"| |C|\n" +
"| +-+"),
c1: new Tile(
"+----------------\n" +
"| | \n" +
"| \n" +
"| \n" +
"| \n" +
"| o| \n" +
"| -+ \n" +
"| \n" +
"| -+ \n" +
"| d| \n" +
"+- \n" +
"| |e \n" +
"| +- +- \n" +
"| |j \n" +
"| +-+\n" +
"| |C|\n" +
"| +-+"),
c2: new Tile(
"+----------------\n" +
"| | \n" +
"| \n" +
"| |e \n" +
"| +- -+ \n" +
"| d| \n" +
"| \n" +
"| \n" +
"| \n" +
"| o| \n" +
"| -+ +- \n" +
"| |j \n" +
"+- \n" +
"| \n" +
"| +-+\n" +
"| |C|\n" +
"| +-+"),
d1: new Tile(
"+----------------\n" +
"| | \n" +
"| \n" +
"| |c \n" +
"| +- \n" +
"| \n" +
"| -+ \n" +
"| f| \n" +
"| +- \n" +
"| |l \n" +
"| \n" +
"| m| r|\n" +
"| -+ -+\n" +
"| \n" +
"+- +-+\n" +
"| |C|\n" +
" +-+"),
d2: new Tile(
"+----------------\n" +
"| | \n" +
"| \n" +
"| m| \n" +
"| -+ \n" +
"| \n" +
"| \n" +
"| |l \n" +
"| +- +- \n" +
"| |f \n" +
"+- \n" +
"| \n" +
"| -+ \n" +
"| c| \n" +
"| +-+\n" +
"| r| |C|\n" +
" -+ +-+"
)
}
var board = new Board(tiles['a1'], tiles['b1'], tiles['c1'], tiles['d1']);
var game = new Game(board, $('body'));
});
<file_sep># Ricochet Robots
Uses bredth first search to find the least number of moves a ricochet robots
puzzle can be solved in.
Bredth first search will examine all one move solutions before any two move
solutions, and all two move solutions before any three move solutions.
The number of possible moves to examine increases exponentially as the
number of moves increases.
BFS will find with certianty the shortest solution, but it will take too
long.
I need to find an algorithm that finds probable solutions and rates them
on confidince.
# symbols
There are 377,993,952,000 unique board positions
96 boards, 252 spaces, 4 robots. (96 * 252 * 251 * 250 * 249)
93,742,500,096,000 with the silver robot. (above * 248)
- [x] Draw target symbols
- [ ] Select board segments and sides
- [x] Place Robots
- [x] Select target
- [x] Calculate moves
- [ ] Calculate the most moves it could take to solve any situation
- [ ] Draw the moves that can be used to complete the puzzle.
- [ ] Replace rotation code with lin alg
- [ ] Try likely n + 1 move solutions before trying all n move solutions
- [ ] Replace $.each with _.each
- [ ] Clean up code using lodash
| 1193f5a650f00851800a0eb3974eb0544fd2f441 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | everett1992/ricochet_robots | 67d76a99a6fda2aaee9f750a64cb49d440f9f8af | 3721759b6525ee1b1071499e020177eaec9507e2 |
refs/heads/master | <file_sep><project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>acto.actoapp.vl</groupId>
<artifactId>ActoApp</artifactId>
<version>0.0.1-SNAPSHOT</version>
<properties>
<project.java.version>1.8</project.java.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.9.10</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.seleniumhq.selenium/selenium-server -->
<dependency>
<groupId>org.seleniumhq.selenium</groupId>
<artifactId>selenium-server</artifactId>
<version>3.14.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.seleniumhq.selenium/selenium-java -->
<dependency>
<groupId>org.seleniumhq.selenium</groupId>
<artifactId>selenium-java</artifactId>
<version>3.14.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/io.appium/java-client -->
<dependency>
<groupId>io.appium</groupId>
<artifactId>java-client</artifactId>
<version>7.0.0</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/com.googlecode.json-simple/json-simple -->
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>com.browserstack</groupId>
<artifactId>browserstack-local-java</artifactId>
<version>1.0.2</version>
</dependency>
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-java</artifactId>
<version>1.2.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-jvm</artifactId>
<version>1.2.5</version>
<type>pom</type>
</dependency>
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-junit</artifactId>
<version>1.2.5</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/info.cukes/cucumber-testng -->
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-testng</artifactId>
<version>1.2.5</version>
</dependency>
<dependency>
<groupId>net.sourceforge.cobertura</groupId>
<artifactId>cobertura</artifactId>
<version>2.1.1</version>
</dependency>
<dependency>
<groupId>info.cukes</groupId>
<artifactId>cucumber-jvm-deps</artifactId>
<version>1.0.5</version>
</dependency>
<dependency>
<groupId>net.masterthought</groupId>
<artifactId>cucumber-reporting</artifactId>
<version>4.3.0</version>
</dependency>
<dependency>
<groupId>info.cukes</groupId>
<artifactId>gherkin</artifactId>
<version>2.12.2</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>2.0.2-beta</version>
</dependency>
<!-- https://mvnrepository.com/artifact/io.cucumber/cucumber-java -->
<dependency>
<groupId>io.cucumber</groupId>
<artifactId>cucumber-java</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>3.16</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>3.16</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml-schemas</artifactId>
<version>3.16</version>
</dependency>
<dependency>
<groupId>org.apache.pdfbox</groupId>
<artifactId>pdfbox-app</artifactId>
<version>2.0.7</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20170516</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.vimalselvam/cucumber-extentsreport -->
<dependency>
<groupId>com.vimalselvam</groupId>
<artifactId>cucumber-extentsreport</artifactId>
<version>3.0.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.relevantcodes/extentreports -->
<dependency>
<groupId>com.relevantcodes</groupId>
<artifactId>extentreports</artifactId>
<version>2.41.2</version>
</dependency>
<dependency>
<groupId>com.aventstack</groupId>
<artifactId>extentreports</artifactId>
<version>3.0.0</version>
</dependency>
<!-- <dependency> <groupId>com.aventstack</groupId> <artifactId>extentreports</artifactId>
<version>3.0.6</version> </dependency> -->
<dependency>
<groupId>com.github.temyers</groupId>
<artifactId>cucumber-jvm-parallel-plugin</artifactId>
<version>4.2.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version> <!-- or whatever current version -->
<configuration>
<source>${project.java.version}</source>
<target>${project.java.version}</target>
<encoding>${project.build.sourceEncoding}</encoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.20.1</version>
<configuration>
<testFailureIgnore>true</testFailureIgnore>
</configuration>
</plugin>
<plugin>
<groupId>net.masterthought</groupId>
<artifactId>maven-cucumber-reporting</artifactId>
<version>3.13.0</version>
<executions>
<execution>
<id>execution</id>
<phase>verify</phase>
<goals>
<goal>generate</goal>
</goals>
<configuration>
<projectName>ACTO</projectName>
<outputDirectory>${project.build.directory}/cucumber-report-html</outputDirectory>
<cucumberOutput>${project.build.directory}/cucumber.json</cucumberOutput>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project><file_sep>package actoapp.ios.pages;
import actoapp.util.helpers.BaseObject;
import io.appium.java_client.ios.IOSDriver;
import io.appium.java_client.ios.IOSElement;
public class UpdateAvailablePopup extends BaseObject{
@SuppressWarnings("rawtypes")
public UpdateAvailablePopup(IOSDriver driver)
{
super(driver);
}
/**
* This method creates IOSElement object based on property given.
* @return IOSElement for the property named <b>Update_Accessibility_ID</b>.
*/
public IOSElement updateBtn(){
return findWebElementFromPropertyName("Update_Accessibility_ID", timeout);
}
/**
* This method creates IOSElement object based on property given.
* @return IOSElement for the property named <b>NotNow_Accessibility_ID</b>.
*/
public IOSElement notNowBtn(){
return findWebElementFromPropertyName("NotNow_Accessibility_ID", timeout);
}
}
<file_sep>package actoapp.util.helpers;
import java.util.HashMap;
import org.openqa.selenium.By;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.Keys;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.RemoteWebElement;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
import io.appium.java_client.MobileBy;
import io.appium.java_client.ios.IOSDriver;
import io.appium.java_client.ios.IOSElement;
public class BaseObject {
public PropertyReader properties= new PropertyReader();
public long timeout = 25;
@SuppressWarnings("rawtypes")
public IOSDriver driver = null;
@SuppressWarnings("rawtypes")
public BaseObject(IOSDriver driver)
{
this.driver = driver;
}
public By by_From_PropertyFile(String propertyName)
{
System.out.println("\nAction: Find Element\t\tName: "+propertyName+"\t\tValue: "+String.format(properties.readProperty(propertyName)));
if(propertyName.contains("_ID"))
{
return MobileBy.AccessibilityId(properties.readProperty(propertyName));
}
else if(propertyName.contains("_XPATH"))
{
return MobileBy.xpath(properties.readProperty(propertyName));
}
else
{
System.err.println("Please provide valid property name");
}
return null;
}
public IOSElement findElement(String locator)
{
return (IOSElement) driver.findElement(by_From_PropertyFile(locator));
}
public void click(String locator)
{
findElement(locator).click();
}
public void clickAction(String locator , long timeout)
{
System.out.println("\nAction: Click\t\t\tIOSElement: Element Found in Previous Step");
findWebElementFromPropertyName(locator,timeout).click();
}
public String getAttribute(String locator, String attribute)
{
return findElement(locator).getAttribute(attribute);
}
public String getCssValue(String locator, String cssValue)
{
return findElement(locator).getCssValue(cssValue);
}
public void type(String locator, Keys keysss[])
{
findElement(locator).clear();
int i=0;
for (Keys key : keysss)
{
if(i > 0)
{
findElement(locator).sendKeys(",");
}
System.out.println("\nAction: Text Entry\t\tText: "+key);
findElement(locator).sendKeys(key);
i++;
}
}
public void type(String locator, long timeout, String text)
{
System.out.println("\nAction: Text Entry\t\tText: "+text);
findWebElementFromPropertyName(locator,timeout).clear();
findWebElementFromPropertyName(locator,timeout).sendKeys(text);
}
public String verifyElementClass(String locator)
{
return findWebElementFromPropertyName(locator,timeout).getAttribute("class");
}
public boolean verifyElementIsDisplayed(String locator)
{
return findWebElementFromPropertyName(locator,timeout).isDisplayed();
}
public boolean verifyElementIsEnabled(String locator)
{
return findWebElementFromPropertyName(locator,timeout).isEnabled();
}
public boolean verifyTextInElement(String locator, String text)
{
return findWebElementFromPropertyName(locator,timeout).getText().contains(text);
}
public IOSElement findWebElementFromPropertyName(String locator, long timeout){
return waitForElementToBePresent(by_From_PropertyFile(locator),timeout);
}
public void jsScrollIntoView(WebElement element){
((JavascriptExecutor) driver).executeScript("arguments[0].scrollIntoView(true);", element);
}
public IOSElement waitForElementToBePresent(By locatorType, long timeout)
{
return (IOSElement) new WebDriverWait(driver, timeout).until(
ExpectedConditions.elementToBeClickable(locatorType));
}
public void threadWait(int seconds){
try {
System.out.println("\nAction: Wait\t\t\tDuration: "+seconds+" seconds");
Thread.sleep(seconds*1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public void enterText(String locator, long timeout, String textToEnter){
System.out.println("\nAction: Text Entry\t\tText: "+textToEnter);
IOSElement TextField = findWebElementFromPropertyName(locator,timeout);
try {
TextField.clear();
} catch (Exception e) {
System.out.println("Unable to Clear");
}
TextField.sendKeys(textToEnter);
}
public void scrollMobileScreenDown()
{
JavascriptExecutor js = (JavascriptExecutor) driver;
HashMap<String, String> scrollObject = new HashMap<String, String>();
scrollObject.put("direction", "down");
js.executeScript("mobile: scroll", scrollObject);
}
public void scrollUsingDirectionAndElement(WebElement element)
{
JavascriptExecutor js = (JavascriptExecutor) driver;
HashMap<String, String> scrollObject = new HashMap<String, String>();
scrollObject.put("direction", "down");
scrollObject.put("element", ((RemoteWebElement) element).getId());
js.executeScript("mobile: scroll", scrollObject);
}
}
<file_sep>TimeOut = 25
Notifications_Popup
*******************
Notifications_Popup_Accessibility_ID = Allow
Allow_Accessibility_ID = Allow
NotAllow_Accessibility_ID = Not Allow
Update
******
App_Update_Available_Popup_Accessibility_ID = Update
Update_Accessibility_ID = Update
NotNow_Accessibility_ID = Not Now
Acto_Splash
***********
Acto_Splash_Accessibility_ID = Log In
LogIn_Accessibility_ID = Log In
TakeATour_Accessibility_ID = Take a Tour
TryADemo_Accessibility_ID = Try a Demo
Lets_Get_Started
****************
Lets_Get_Started_Accessibility_ID = Continue
domainName_TextField_XPATH = //XCUIElementTypeApplication[@name=\"ACTO\"]/XCUIElementTypeWindow[1]/XCUIElementTypeOther/XCUIElementTypeOther/XCUIElementTypeOther/XCUIElementTypeOther/XCUIElementTypeOther/XCUIElementTypeOther[2]/XCUIElementTypeTextField
continueBtn_Accessibility_ID = Continue
SignIn_Page
***********
userName_TextField_XPATH = //XCUIElementTypeApplication[@name=\"ACTO\"]/XCUIElementTypeWindow[1]/XCUIElementTypeOther/XCUIElementTypeOther/XCUIElementTypeOther/XCUIElementTypeOther/XCUIElementTypeOther/XCUIElementTypeOther[2]/XCUIElementTypeTextField
SignIn_Page_Accessibility_ID = Sign In With Email
SignInWithEmail_Accessibility_ID = Sign In With Email
password_TextField_XPATH = //XCUIElementTypeApplication[@name=\"ACTO\"]/XCUIElementTypeWindow[1]/XCUIElementTypeOther/XCUIElementTypeOther/XCUIElementTypeOther/XCUIElementTypeOther/XCUIElementTypeOther/XCUIElementTypeOther[2]/XCUIElementTypeSecureTextField
SignInBtn_Accessibility_ID = Sign In
OOPS
****
OOPS_Page_Accessibility_ID = Oops
Oops_Ok_Accessibility_ID = OK
EmailWrong_Oops_Ok_Accessibility_ID = Ok
DomainWrong_Oops_Ok_Accessibility_ID = Ok<file_sep>package actoapp.ios.pages;
import actoapp.util.helpers.BaseObject;
import io.appium.java_client.ios.IOSDriver;
import io.appium.java_client.ios.IOSElement;
public class Acto_Splash_Screen extends BaseObject{
@SuppressWarnings("rawtypes")
public Acto_Splash_Screen(IOSDriver driver)
{
super(driver);
}
/**
* This method creates IOSElement object based on property given.
* @return IOSElement for the property named <b>LogIn_Accessibility_ID</b>.
*/
public IOSElement logInBtn(){
return findWebElementFromPropertyName("LogIn_Accessibility_ID", timeout);
}
/**
* This method creates IOSElement object based on property given.
* @return IOSElement for the property named <b>TakeATour_Accessibility_ID</b>.
*/
public IOSElement takeAtourBtn(){
return findWebElementFromPropertyName("TakeATour_Accessibility_ID", timeout);
}
/**
* This method creates IOSElement object based on property given.
* @return IOSElement for the property named <b>TryADemo_Accessibility_ID</b>.
*/
public IOSElement tryAdemoBtn(){
return findWebElementFromPropertyName("TryADemo_Accessibility_ID", timeout);
}
}
<file_sep>package actoapp.ios.pages;
import actoapp.util.helpers.BaseObject;
import io.appium.java_client.ios.IOSDriver;
import io.appium.java_client.ios.IOSElement;
public class Login_Page extends BaseObject{
@SuppressWarnings("rawtypes")
public Login_Page(IOSDriver driver)
{
super(driver);
}
/**
* This method creates IOSElement object based on property given.
* @return IOSElement for the property named <b>userName_TextField_XPATH</b>.
*/
public IOSElement usernameTextFld(){
return findWebElementFromPropertyName("userName_TextField_XPATH", timeout);
}
/**
* This method creates IOSElement object based on property given.
* @return IOSElement for the property named <b>pwd_TextField_XPATH</b>.
*/
public IOSElement passwordTextFld(){
return findWebElementFromPropertyName("pwd_TextField_XPATH", timeout);
}
/**
* This method creates IOSElement object based on property given.
* @return IOSElement for the property named <b>SignInBtn_Accessibility_ID</b>.
*/
public IOSElement signInBtn(){
return findWebElementFromPropertyName("SignInBtn_Accessibility_ID", timeout);
}
}
| 15ad99ffafc5f650401b2795ff9613013fa90b56 | [
"Java",
"Maven POM",
"INI"
] | 6 | Maven POM | chari6477/Acto-Parallel-Testing | bcde28d910f3d7ad9407a399a54039c5164a2a6b | 0dcd5ab02d9f7f879c2241b0874f2eb90557afec |
refs/heads/master | <file_sep><!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<meta name="generator" content="pandoc" />
<meta http-equiv="X-UA-Compatible" content="IE=EDGE" />
<title>TP53 reporter library scan - detailed analysis</title>
<script src="site_libs/header-attrs-2.13/header-attrs.js"></script>
<script src="site_libs/jquery-3.6.0/jquery-3.6.0.min.js"></script>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link href="site_libs/bootstrap-3.3.5/css/united.min.css" rel="stylesheet" />
<script src="site_libs/bootstrap-3.3.5/js/bootstrap.min.js"></script>
<script src="site_libs/bootstrap-3.3.5/shim/html5shiv.min.js"></script>
<script src="site_libs/bootstrap-3.3.5/shim/respond.min.js"></script>
<style>h1 {font-size: 34px;}
h1.title {font-size: 38px;}
h2 {font-size: 30px;}
h3 {font-size: 24px;}
h4 {font-size: 18px;}
h5 {font-size: 16px;}
h6 {font-size: 12px;}
code {color: inherit; background-color: rgba(0, 0, 0, 0.04);}
pre:not([class]) { background-color: white }</style>
<script src="site_libs/jqueryui-1.11.4/jquery-ui.min.js"></script>
<link href="site_libs/tocify-1.9.1/jquery.tocify.css" rel="stylesheet" />
<script src="site_libs/tocify-1.9.1/jquery.tocify.js"></script>
<script src="site_libs/navigation-1.1/tabsets.js"></script>
<script src="site_libs/navigation-1.1/codefolding.js"></script>
<style type="text/css">
code{white-space: pre-wrap;}
span.smallcaps{font-variant: small-caps;}
span.underline{text-decoration: underline;}
div.column{display: inline-block; vertical-align: top; width: 50%;}
div.hanging-indent{margin-left: 1.5em; text-indent: -1.5em;}
ul.task-list{list-style: none;}
</style>
<style type="text/css">
code {
white-space: pre;
}
.sourceCode {
overflow: visible;
}
</style>
<style type="text/css" data-origin="pandoc">
pre > code.sourceCode { white-space: pre; position: relative; }
pre > code.sourceCode > span { display: inline-block; line-height: 1.25; }
pre > code.sourceCode > span:empty { height: 1.2em; }
.sourceCode { overflow: visible; }
code.sourceCode > span { color: inherit; text-decoration: inherit; }
div.sourceCode { margin: 1em 0; }
pre.sourceCode { margin: 0; }
@media screen {
div.sourceCode { overflow: auto; }
}
@media print {
pre > code.sourceCode { white-space: pre-wrap; }
pre > code.sourceCode > span { text-indent: -5em; padding-left: 5em; }
}
pre.numberSource code
{ counter-reset: source-line 0; }
pre.numberSource code > span
{ position: relative; left: -4em; counter-increment: source-line; }
pre.numberSource code > span > a:first-child::before
{ content: counter(source-line);
position: relative; left: -1em; text-align: right; vertical-align: baseline;
border: none; display: inline-block;
-webkit-touch-callout: none; -webkit-user-select: none;
-khtml-user-select: none; -moz-user-select: none;
-ms-user-select: none; user-select: none;
padding: 0 4px; width: 4em;
color: #aaaaaa;
}
pre.numberSource { margin-left: 3em; border-left: 1px solid #aaaaaa; padding-left: 4px; }
div.sourceCode
{ }
@media screen {
pre > code.sourceCode > span > a:first-child::before { text-decoration: underline; }
}
code span.al { color: #ff0000; font-weight: bold; } /* Alert */
code span.an { color: #60a0b0; font-weight: bold; font-style: italic; } /* Annotation */
code span.at { color: #7d9029; } /* Attribute */
code span.bn { color: #40a070; } /* BaseN */
code span.bu { color: #008000; } /* BuiltIn */
code span.cf { color: #007020; font-weight: bold; } /* ControlFlow */
code span.ch { color: #4070a0; } /* Char */
code span.cn { color: #880000; } /* Constant */
code span.co { color: #60a0b0; font-style: italic; } /* Comment */
code span.cv { color: #60a0b0; font-weight: bold; font-style: italic; } /* CommentVar */
code span.do { color: #ba2121; font-style: italic; } /* Documentation */
code span.dt { color: #902000; } /* DataType */
code span.dv { color: #40a070; } /* DecVal */
code span.er { color: #ff0000; font-weight: bold; } /* Error */
code span.ex { } /* Extension */
code span.fl { color: #40a070; } /* Float */
code span.fu { color: #06287e; } /* Function */
code span.im { color: #008000; font-weight: bold; } /* Import */
code span.in { color: #60a0b0; font-weight: bold; font-style: italic; } /* Information */
code span.kw { color: #007020; font-weight: bold; } /* Keyword */
code span.op { color: #666666; } /* Operator */
code span.ot { color: #007020; } /* Other */
code span.pp { color: #bc7a00; } /* Preprocessor */
code span.sc { color: #4070a0; } /* SpecialChar */
code span.ss { color: #bb6688; } /* SpecialString */
code span.st { color: #4070a0; } /* String */
code span.va { color: #19177c; } /* Variable */
code span.vs { color: #4070a0; } /* VerbatimString */
code span.wa { color: #60a0b0; font-weight: bold; font-style: italic; } /* Warning */
.sourceCode .row {
width: 100%;
}
.sourceCode {
overflow-x: auto;
}
.code-folding-btn {
margin-right: -30px;
}
</style>
<script>
// apply pandoc div.sourceCode style to pre.sourceCode instead
(function() {
var sheets = document.styleSheets;
for (var i = 0; i < sheets.length; i++) {
if (sheets[i].ownerNode.dataset["origin"] !== "pandoc") continue;
try { var rules = sheets[i].cssRules; } catch (e) { continue; }
for (var j = 0; j < rules.length; j++) {
var rule = rules[j];
// check if there is a div.sourceCode rule
if (rule.type !== rule.STYLE_RULE || rule.selectorText !== "div.sourceCode") continue;
var style = rule.style.cssText;
// check if color or background-color is set
if (rule.style.color === '' && rule.style.backgroundColor === '') continue;
// replace div.sourceCode by a pre.sourceCode rule
sheets[i].deleteRule(j);
sheets[i].insertRule('pre.sourceCode{' + style + '}', j);
}
}
})();
</script>
<style type = "text/css">
.main-container {
max-width: 940px;
margin-left: auto;
margin-right: auto;
}
img {
max-width:100%;
}
.tabbed-pane {
padding-top: 12px;
}
.html-widget {
margin-bottom: 20px;
}
button.code-folding-btn:focus {
outline: none;
}
summary {
display: list-item;
}
details > summary > p:only-child {
display: inline;
}
pre code {
padding: 0;
}
</style>
<style type="text/css">
.dropdown-submenu {
position: relative;
}
.dropdown-submenu>.dropdown-menu {
top: 0;
left: 100%;
margin-top: -6px;
margin-left: -1px;
border-radius: 0 6px 6px 6px;
}
.dropdown-submenu:hover>.dropdown-menu {
display: block;
}
.dropdown-submenu>a:after {
display: block;
content: " ";
float: right;
width: 0;
height: 0;
border-color: transparent;
border-style: solid;
border-width: 5px 0 5px 5px;
border-left-color: #cccccc;
margin-top: 5px;
margin-right: -10px;
}
.dropdown-submenu:hover>a:after {
border-left-color: #adb5bd;
}
.dropdown-submenu.pull-left {
float: none;
}
.dropdown-submenu.pull-left>.dropdown-menu {
left: -100%;
margin-left: 10px;
border-radius: 6px 0 6px 6px;
}
</style>
<script type="text/javascript">
// manage active state of menu based on current page
$(document).ready(function () {
// active menu anchor
href = window.location.pathname
href = href.substr(href.lastIndexOf('/') + 1)
if (href === "")
href = "index.html";
var menuAnchor = $('a[href="' + href + '"]');
// mark it active
menuAnchor.tab('show');
// if it's got a parent navbar menu mark it active as well
menuAnchor.closest('li.dropdown').addClass('active');
// Navbar adjustments
var navHeight = $(".navbar").first().height() + 15;
var style = document.createElement('style');
var pt = "padding-top: " + navHeight + "px; ";
var mt = "margin-top: -" + navHeight + "px; ";
var css = "";
// offset scroll position for anchor links (for fixed navbar)
for (var i = 1; i <= 6; i++) {
css += ".section h" + i + "{ " + pt + mt + "}\n";
}
style.innerHTML = "body {" + pt + "padding-bottom: 40px; }\n" + css;
document.head.appendChild(style);
});
</script>
<!-- tabsets -->
<style type="text/css">
.tabset-dropdown > .nav-tabs {
display: inline-table;
max-height: 500px;
min-height: 44px;
overflow-y: auto;
border: 1px solid #ddd;
border-radius: 4px;
}
.tabset-dropdown > .nav-tabs > li.active:before {
content: "";
font-family: 'Glyphicons Halflings';
display: inline-block;
padding: 10px;
border-right: 1px solid #ddd;
}
.tabset-dropdown > .nav-tabs.nav-tabs-open > li.active:before {
content: "";
border: none;
}
.tabset-dropdown > .nav-tabs.nav-tabs-open:before {
content: "";
font-family: 'Glyphicons Halflings';
display: inline-block;
padding: 10px;
border-right: 1px solid #ddd;
}
.tabset-dropdown > .nav-tabs > li.active {
display: block;
}
.tabset-dropdown > .nav-tabs > li > a,
.tabset-dropdown > .nav-tabs > li > a:focus,
.tabset-dropdown > .nav-tabs > li > a:hover {
border: none;
display: inline-block;
border-radius: 4px;
background-color: transparent;
}
.tabset-dropdown > .nav-tabs.nav-tabs-open > li {
display: block;
float: none;
}
.tabset-dropdown > .nav-tabs > li {
display: none;
}
</style>
<!-- code folding -->
<style type="text/css">
.code-folding-btn { margin-bottom: 4px; }
</style>
<style type="text/css">
#TOC {
margin: 25px 0px 20px 0px;
}
@media (max-width: 768px) {
#TOC {
position: relative;
width: 100%;
}
}
@media print {
.toc-content {
/* see https://github.com/w3c/csswg-drafts/issues/4434 */
float: right;
}
}
.toc-content {
padding-left: 30px;
padding-right: 40px;
}
div.main-container {
max-width: 1200px;
}
div.tocify {
width: 20%;
max-width: 260px;
max-height: 85%;
}
@media (min-width: 768px) and (max-width: 991px) {
div.tocify {
width: 25%;
}
}
@media (max-width: 767px) {
div.tocify {
width: 100%;
max-width: none;
}
}
.tocify ul, .tocify li {
line-height: 20px;
}
.tocify-subheader .tocify-item {
font-size: 0.90em;
}
.tocify .list-group-item {
border-radius: 0px;
}
.tocify-subheader {
display: inline;
}
.tocify-subheader .tocify-item {
font-size: 0.95em;
}
</style>
</head>
<body>
<div class="container-fluid main-container">
<!-- setup 3col/9col grid for toc_float and main content -->
<div class="row">
<div class="col-xs-12 col-sm-4 col-md-3">
<div id="TOC" class="tocify">
</div>
</div>
<div class="toc-content col-xs-12 col-sm-8 col-md-9">
<div class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-bs-toggle="collapse" data-target="#navbar" data-bs-target="#navbar">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="index.html">TP53 Reporter Scan</a>
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li>
<a href="index.html">Info</a>
</li>
<li>
<a href="barcode-processing-all.html">Data Quality Checks</a>
</li>
<li>
<a href="cDNA-processing-all.html">Detailed Reporter Activity Analysis</a>
</li>
</ul>
<ul class="nav navbar-nav navbar-right">
</ul>
</div><!--/.nav-collapse -->
</div><!--/.container -->
</div><!--/.navbar -->
<div id="header">
<div class="btn-group pull-right float-right">
<button type="button" class="btn btn-default btn-xs btn-secondary btn-sm dropdown-toggle" data-toggle="dropdown" data-bs-toggle="dropdown" aria-haspopup="true" aria-expanded="false"><span>Code</span> <span class="caret"></span></button>
<ul class="dropdown-menu dropdown-menu-right" style="min-width: 50px;">
<li><a id="rmd-show-all-code" href="#">Show All Code</a></li>
<li><a id="rmd-hide-all-code" href="#">Hide All Code</a></li>
</ul>
</div>
<h1 class="title toc-ignore">TP53 reporter library scan - detailed
analysis</h1>
<h4 class="author"><NAME></h4>
<address class="author_afil">
Netherlands Cancer Institute - van Steensel
lab<br><a class="author_email" href="mailto:#"><a
href="mailto:<EMAIL>"
class="email"><EMAIL></a></a>
</address>
<h4 class="date">14/06/2023</h4>
</div>
<hr />
<div id="introduction" class="section level3">
<h3>Introduction</h3>
<p>~6,000 barcoded TP53 reporters were probed in MCF7 TP53WT/KO cells
and stimulated with Nutlin-3a. I previously processed the raw sequencing
data, quantified the pDNA data and normalized the cDNA data. In this
script, a detailed dissection of the reporter activities will be carried
out to understand how TP53 drives transcription and to identify the most
sensitive TP53 reporters.</p>
<hr />
</div>
<div id="setup" class="section level2 tabset">
<h2 class="tabset">Setup</h2>
<!-- little HTML script to do indentation of the table of contents -->
<script>
$(document).ready(function() {
$items = $('div#TOC li');
$items.each(function(idx) {
num_ul = $(this).parentsUntil('#TOC').length;
$(this).css({'text-indent': num_ul * 10, 'padding-left': 0});
});
});
</script>
<style type="text/css">
div.sourceCode {
overflow-x: hidden;
}
</style>
<div id="libraries" class="section level3">
<h3>Libraries</h3>
<hr />
</div>
<div id="functions" class="section level3">
<h3>Functions</h3>
<hr />
</div>
<div id="load-data" class="section level3">
<h3>Load data</h3>
<p><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-3-1.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-3-2.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-3-3.png" width="100%" style="display: block; margin: auto;" /></p>
<hr />
</div>
</div>
<div id="figure-1-characterize-p53-activities-per-condition"
class="section level2">
<h2>Figure 1: Characterize P53 activities per condition</h2>
<p>Aim: I want to characterize the reporter activity distributions in
the tested conditions. Does Nutlin boost P53 reporter activity and is
P53 inactive in the KO cells?
<img src="cDNA-processing-all_files/figure-html/unnamed-chunk-4-1.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-4-2.png" width="100%" style="display: block; margin: auto;" /></p>
<pre><code>## [1] 0.9685877</code></pre>
<pre><code>## [1] 0.9036356</code></pre>
<pre><code>## [1] 0.903232</code></pre>
<p><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-4-3.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-4-4.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-4-5.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-4-6.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-4-7.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-4-8.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-4-9.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-4-10.png" width="100%" style="display: block; margin: auto;" /></p>
<p>Conclusion: 1F: Replicates do correlate well. 1G: Negative controls
are inactive compared to P53 reporters. P53 reporters become more active
in WT cells and even more active upon Nutlin stimulation.</p>
<hr />
</div>
<div
id="figure-2-effect-of-affinity-and-binding-sites-binding-site-positioning"
class="section level2">
<h2>Figure 2: Effect of affinity and binding sites + binding site
positioning</h2>
<p>Aim: How does the binding site affinity, copy number, and their
respective positioning affect reporter activity?</p>
<p><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-5-1.png" width="100%" style="display: block; margin: auto;" /></p>
<pre><code>## [1] 0.006910845</code></pre>
<p><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-5-2.png" width="100%" style="display: block; margin: auto;" /></p>
<pre><code>## [1] 0.02978148</code></pre>
<p><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-5-3.png" width="100%" style="display: block; margin: auto;" /></p>
<pre><code>## [1] 0.0005569714</code></pre>
<p><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-5-4.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-5-5.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-5-6.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-5-7.png" width="100%" style="display: block; margin: auto;" /></p>
<p>Conclusion: BS006 is the most responsive to Nutlin-3a. Addition of
binding sites is super-additive. Positioning of binding sites matters -
putting them directly next to each other is inhibitory, and putting them
close to the TSS leads to higher activity.</p>
<hr />
<p>Figure 3: The effect of the spacer length.</p>
<p>Aim: Show how the spacer length between adjacent binding sites
affects reporter activity.</p>
<p><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-6-1.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-6-2.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-6-3.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-6-4.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-6-5.png" width="100%" style="display: block; margin: auto;" /></p>
<p>Conclusion: Spacer length influences activity periodically. Adjacent
binding sites need to be 180 degrees tilted with respect to each other
to achieve optimal activation.</p>
<hr />
</div>
<div
id="figure-4-the-effect-of-the-minimal-promoter-and-the-spacer-sequence."
class="section level2">
<h2>Figure 4: The effect of the minimal promoter and the spacer
sequence.</h2>
<p>Aim: Show how the P53 reporters interact with the two minimal
promoters and the three spacer sequences.</p>
<p><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-7-1.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-7-2.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-7-3.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-7-4.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-7-5.png" width="100%" style="display: block; margin: auto;" /></p>
<p>Conclusion: Promoter and spacer sequence influence activity
linearly.</p>
<hr />
</div>
<div id="figure-5-6-linear-model-selection-of-best-reporters"
class="section level2">
<h2>Figure 5 & 6: Linear model + Selection of best reporters</h2>
<p>Aim: Can we explain now every observation using a linear model?
<img src="cDNA-processing-all_files/figure-html/unnamed-chunk-8-1.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-8-2.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-8-3.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-8-4.png" width="100%" style="display: block; margin: auto;" /></p>
<pre><code>## [1] 0.08400584</code></pre>
<pre><code>## MODEL INFO:
## Observations: 263 (1 missing obs. deleted)
## Dependent Variable: log2(reporter_activity)
## Type: OLS linear regression
##
## MODEL FIT:
## F(9,253) = 145.09, p = 0.00
## R² = 0.84
## Adj. R² = 0.83
##
## Standard errors: OLS
## ---------------------------------------------------------------
## Est. S.E. t val. p
## -------------------------------- ------- ------ -------- ------
## (Intercept) 3.07 0.07 41.69 0.00
## promotermCMV 1.30 0.08 15.39 0.00
## background2 -0.89 0.08 -10.59 0.00
## background3 0.37 0.08 4.45 0.00
## spacing_degree_transf 0.50 0.03 14.65 0.00
## affinity_id3_med_only 0.35 0.07 5.12 0.00
## affinity_id5_low_only 1.06 0.07 15.49 0.00
## affinity_id7_very-low_only 0.48 0.07 7.03 0.00
## promotermCMV:background2 0.38 0.12 3.19 0.00
## promotermCMV:background3 -0.82 0.12 -6.95 0.00
## ---------------------------------------------------------------</code></pre>
<p><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-8-5.png" width="100%" style="display: block; margin: auto;" /></p>
<pre><code>## MODEL INFO:
## Observations: 259 (5 missing obs. deleted)
## Dependent Variable: log2(reporter_activity)
## Type: OLS linear regression
##
## MODEL FIT:
## F(9,249) = 158.00, p = 0.00
## R² = 0.85
## Adj. R² = 0.85
##
## Standard errors: OLS
## ---------------------------------------------------------------
## Est. S.E. t val. p
## -------------------------------- ------- ------ -------- ------
## (Intercept) 2.09 0.09 24.50 0.00
## promotermCMV 1.60 0.10 16.73 0.00
## background2 -0.88 0.10 -9.15 0.00
## background3 0.53 0.10 5.49 0.00
## spacing_degree_transf 0.19 0.04 4.84 0.00
## affinity_id3_med_only -0.04 0.08 -0.52 0.60
## affinity_id5_low_only 1.41 0.08 17.97 0.00
## affinity_id7_very-low_only -0.26 0.08 -3.32 0.00
## promotermCMV:background2 0.19 0.14 1.42 0.16
## promotermCMV:background3 -1.15 0.13 -8.53 0.00
## ---------------------------------------------------------------</code></pre>
<p><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-8-6.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-8-7.png" width="100%" style="display: block; margin: auto;" /><img src="cDNA-processing-all_files/figure-html/unnamed-chunk-8-8.png" width="100%" style="display: block; margin: auto;" /></p>
<p>Conlusion: Top reporters are better than commercial reporters. Linear
model gives insights into which features are important to drive high
expression.</p>
</div>
<div id="session-info" class="section level1">
<h1>Session Info</h1>
<div class="sourceCode" id="cb10"><pre class="sourceCode r"><code class="sourceCode r"><span id="cb10-1"><a href="#cb10-1" aria-hidden="true" tabindex="-1"></a><span class="fu">paste</span>(<span class="st">"Run time: "</span>,<span class="fu">format</span>(<span class="fu">Sys.time</span>()<span class="sc">-</span>StartTime))</span></code></pre></div>
<pre><code>## [1] "Run time: 35.29388 secs"</code></pre>
<div class="sourceCode" id="cb12"><pre class="sourceCode r"><code class="sourceCode r"><span id="cb12-1"><a href="#cb12-1" aria-hidden="true" tabindex="-1"></a><span class="fu">getwd</span>()</span></code></pre></div>
<pre><code>## [1] "/DATA/usr/m.trauernicht/projects/P53_reporter_scan/docs"</code></pre>
<div class="sourceCode" id="cb14"><pre class="sourceCode r"><code class="sourceCode r"><span id="cb14-1"><a href="#cb14-1" aria-hidden="true" tabindex="-1"></a><span class="fu">date</span>()</span></code></pre></div>
<pre><code>## [1] "Wed Jun 14 09:42:36 2023"</code></pre>
<div class="sourceCode" id="cb16"><pre class="sourceCode r"><code class="sourceCode r"><span id="cb16-1"><a href="#cb16-1" aria-hidden="true" tabindex="-1"></a><span class="fu">sessionInfo</span>()</span></code></pre></div>
<pre><code>## R version 4.0.5 (2021-03-31)
## Platform: x86_64-pc-linux-gnu (64-bit)
## Running under: Ubuntu 20.04.6 LTS
##
## Matrix products: default
## BLAS: /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3
## LAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/liblapack.so.3
##
## locale:
## [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8 LC_MONETARY=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 LC_PAPER=en_US.UTF-8
## [8] LC_NAME=C LC_ADDRESS=C LC_TELEPHONE=C LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C
##
## attached base packages:
## [1] stats4 grid parallel stats graphics grDevices utils datasets methods base
##
## other attached packages:
## [1] ggrastr_1.0.1 jtools_2.1.4 glmnetUtils_1.1.8 glmnet_4.1-4 Matrix_1.5-1 randomForest_4.6-14
## [7] ROCR_1.0-11 cowplot_1.1.1 ggforce_0.3.3 maditr_0.8.3 PCAtools_2.2.0 ggrepel_0.9.1
## [13] DESeq2_1.30.1 SummarizedExperiment_1.20.0 Biobase_2.50.0 MatrixGenerics_1.2.1 matrixStats_0.62.0 GenomicRanges_1.42.0
## [19] GenomeInfoDb_1.26.7 IRanges_2.24.1 S4Vectors_0.28.1 BiocGenerics_0.36.1 tidyr_1.2.0 viridis_0.6.2
## [25] viridisLite_0.4.0 ggpointdensity_0.1.0 ggbiplot_0.55 scales_1.2.0 factoextra_1.0.7.999 shiny_1.7.1
## [31] pheatmap_1.0.12 gridExtra_2.3 RColorBrewer_1.1-3 readr_2.1.2 haven_2.5.0 ggbeeswarm_0.6.0
## [37] plotly_4.10.0 tibble_3.1.6 dplyr_1.0.8 vwr_0.3.0 latticeExtra_0.6-29 lattice_0.20-41
## [43] stringdist_0.9.8 GGally_2.1.2 ggpubr_0.4.0 ggplot2_3.4.0 stringr_1.4.0 plyr_1.8.7
## [49] data.table_1.14.2
##
## loaded via a namespace (and not attached):
## [1] backports_1.4.1 lazyeval_0.2.2 splines_4.0.5 crosstalk_1.2.0 BiocParallel_1.24.1 digest_0.6.29 foreach_1.5.2
## [8] htmltools_0.5.2 fansi_1.0.3 magrittr_2.0.3 memoise_2.0.1 tzdb_0.3.0 annotate_1.68.0 vroom_1.5.7
## [15] prettyunits_1.1.1 jpeg_0.1-9 colorspace_2.0-3 blob_1.2.3 gitcreds_0.1.1 xfun_0.30 crayon_1.5.1
## [22] RCurl_1.98-1.6 jsonlite_1.8.0 genefilter_1.72.1 iterators_1.0.14 survival_3.2-10 glue_1.6.2 polyclip_1.10-0
## [29] gtable_0.3.0 zlibbioc_1.36.0 XVector_0.30.0 DelayedArray_0.16.3 car_3.0-12 BiocSingular_1.6.0 shape_1.4.6
## [36] abind_1.4-5 DBI_1.1.2 rstatix_0.7.0 Rcpp_1.0.8.3 progress_1.2.2 xtable_1.8-4 dqrng_0.3.0
## [43] bit_4.0.4 rsvd_1.0.5 htmlwidgets_1.5.4 httr_1.4.2 ellipsis_0.3.2 farver_2.1.0 pkgconfig_2.0.3
## [50] reshape_0.8.9 XML_3.99-0.9 sass_0.4.1 locfit_1.5-9.4 utf8_1.2.2 labeling_0.4.2 tidyselect_1.1.2
## [57] rlang_1.0.6 reshape2_1.4.4 later_1.3.0 AnnotationDbi_1.52.0 munsell_0.5.0 tools_4.0.5 cachem_1.0.6
## [64] cli_3.4.1 generics_0.1.2 RSQLite_2.2.12 broom_0.8.0 evaluate_0.15 fastmap_1.1.0 yaml_2.3.5
## [71] knitr_1.38 bit64_4.0.5 pander_0.6.5 purrr_0.3.4 nlme_3.1-152 sparseMatrixStats_1.2.1 mime_0.12
## [78] compiler_4.0.5 rstudioapi_0.13 beeswarm_0.4.0 png_0.1-7 ggsignif_0.6.3 tweenr_1.0.2 geneplotter_1.68.0
## [85] bslib_0.3.1 stringi_1.7.6 highr_0.9 forcats_0.5.1 vctrs_0.5.1 pillar_1.7.0 lifecycle_1.0.3
## [92] jquerylib_0.1.4 bitops_1.0-7 irlba_2.3.5 httpuv_1.6.5 R6_2.5.1 promises_1.2.0.1 vipor_0.4.5
## [99] codetools_0.2-18 MASS_7.3-53.1 assertthat_0.2.1 withr_2.5.0 GenomeInfoDbData_1.2.4 mgcv_1.8-34 hms_1.1.1
## [106] beachmat_2.6.4 rmarkdown_2.13 DelayedMatrixStats_1.12.3 carData_3.0-5 Cairo_1.5-15</code></pre>
</div>
</div>
</div>
</div>
<script>
// add bootstrap table styles to pandoc tables
function bootstrapStylePandocTables() {
$('tr.odd').parent('tbody').parent('table').addClass('table table-condensed');
}
$(document).ready(function () {
bootstrapStylePandocTables();
});
</script>
<!-- tabsets -->
<script>
$(document).ready(function () {
window.buildTabsets("TOC");
});
$(document).ready(function () {
$('.tabset-dropdown > .nav-tabs > li').click(function () {
$(this).parent().toggleClass('nav-tabs-open');
});
});
</script>
<!-- code folding -->
<script>
$(document).ready(function () {
window.initializeCodeFolding("hide" === "show");
});
</script>
<script>
$(document).ready(function () {
// temporarily add toc-ignore selector to headers for the consistency with Pandoc
$('.unlisted.unnumbered').addClass('toc-ignore')
// move toc-ignore selectors from section div to header
$('div.section.toc-ignore')
.removeClass('toc-ignore')
.children('h1,h2,h3,h4,h5').addClass('toc-ignore');
// establish options
var options = {
selectors: "h1,h2,h3,h4",
theme: "bootstrap3",
context: '.toc-content',
hashGenerator: function (text) {
return text.replace(/[.\\/?&!#<>]/g, '').replace(/\s/g, '_');
},
ignoreSelector: ".toc-ignore",
scrollTo: 0
};
options.showAndHide = false;
options.smoothScroll = true;
// tocify
var toc = $("#TOC").tocify(options).data("toc-tocify");
});
</script>
<!-- dynamically load mathjax for compatibility with self-contained -->
<script>
(function () {
var script = document.createElement("script");
script.type = "text/javascript";
script.src = "https://mathjax.rstudio.com/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML";
document.getElementsByTagName("head")[0].appendChild(script);
})();
</script>
</body>
</html>
<file_sep>---
title: "pDNA insert matching"
author: "<NAME>"
date: "`r format(Sys.time(), '%Y-%m-%d')`"
output:
prettydoc::html_pretty:
theme: leonids
highlight: github
# toc: true
# toc_float: true
# code_folding: show
# editor_options:
# chunk_output_type: console
---
*knitr document van Steensel lab*
# Introduction
I sequenced the complete insert of the pDNA library of pMT06. I already extracted all sequences in front of the 3' adapter from the sequences data and added counts to identical sequences by starcode. I now want to make an overview about how many pDNA insert sequences in the pDNA still match the designed inserts.
```{r setup, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
knitr::opts_chunk$set(echo = TRUE)
StartTime <-Sys.time()
# 8-digit Date tag:
Date <- substr(gsub("-","",Sys.time()),1,8)
# libraries:
library(seqinr)
library(ShortRead)
library(plyr)
library(maditr)
library(phylotools)
library(tidyr)
library(readr)
library(dplyr)
library(ggplot2)
library(ggbeeswarm)
library(vwr)
library(d3r)
library(sunburstR)
library(LncFinder)
library(plotly)
library(tibble)
library(shiny)
```
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
SetFileName <- function(filename, initials) {
# Set filename with extension and initials to make filename with date integrated.
filename <- substitute(filename)
initials <- substitute(initials)
filename <- paste0(initials, Date, filename)
filename
}
ReadFasta<-function(file) {
# Read the file line by line
fasta<-readLines(file)
# Identify header lines
ind<-grep(">", fasta)
# Identify the sequence lines
s<-data.frame(ind=ind, from=ind+1, to=c((ind-1)[-1], length(fasta)))
# Process sequence lines
seqs<-rep(NA, length(ind))
for(i in 1:length(ind)) {
seqs[i]<-paste(fasta[s$from[i]:s$to[i]], collapse="")
}
# Create a data frame
DF<-data.frame(name=gsub(">", "", fasta[ind]), sequence=seqs)
# Return the data frame as a result object from the function
return(DF)
}
# Function to load PWM matrix
get_pwm_feature_matrix <- function(motif_meta_fn, fimo_fn, db = 2) {
# validate args
valid_dbs <- 1:2
if(!db %in% valid_dbs)
stop('Invalid db (database version). Please use db=1 (maintained for backward compatibility only) or db=2')
# db=1 is maintained for backward compatibility only
if(db == 1) {
# read in motif metadata
motif_meta <- read.csv(motif_meta_fn)
# check whether motif metadata contain essential annotations
if(!all(c('PWM.ID', 'Cognate.TF') %in% colnames(motif_meta))) {
message('The motif metadata file does not contain the essential columns PWM.ID and Cognate.TF')
}
motif_minimal <- motif_meta[, c('PWM.ID', 'Cognate.TF')]
# load fimo output --> extract motif id, sequence id and p-value
df <- read.table(fimo_fn)
df <- df[, c(1, 2, 7)]
colnames(df) <- c('PWM.ID', 'seqid', 'pval')
# add TF id
df <- merge(df, motif_minimal, by = 'PWM.ID')
# group motif hits by sequence id
l <- split(df, df[['seqid']])
# multiple PWM and multiple hits possible. Reduce hits to one per TF, keeping best p-val only
l <- lapply(l, function(x) {
x_by_tf <- split(x, x[['Cognate.TF']], drop = TRUE)
x_by_tf <- lapply(x_by_tf, function(y) y[which.min(y$pval), ])
do.call('rbind', x_by_tf)
})
# initialize feature matrix
n_tf <- motif_minimal[['Cognate.TF']] %>%
unique %>%
length
n_seq <- length(l)
pwm <- matrix(1, nrow = n_seq, ncol = n_tf)
colnames(pwm) <- (motif_minimal[['Cognate.TF']] %>% unique)
# replace :: from names of composite motifs
colnames(pwm) <- str_replace_all(colnames(pwm), '::', '_')
# fill in feature matrix
for(i in 1 : n_seq) {
pwm[i, l[[i]][['Cognate.TF']]] <- l[[i]]$pval
}
# -log10 transform
pwm <- -1 * log10(pwm)
# coerce to tib and return
tib_fimo <- as_data_frame(pwm) %>%
mutate(id = names(l))
dplyr::select(id, everything())
}
# db = 2 (default)
else {
# load metadata
tib_meta <- read_csv(motif_meta_fn) %>%
# extract tf symbol from motif id (Cognate_TF unsafe, it can be empty) and replace :: occurrences
mutate(tf_symbol = str_remove(ID, '_[0-9]*'),
tf_symbol = str_replace(tf_symbol, '::', '_')) %>%
dplyr::select(motif_id = `PWM ID`, tf_symbol)
# load fimo results
tib_fimo <- read_tsv(fimo_fn) %>%
# extract motif id, sequence id and p-value
dplyr::select(motif_id, sequence_name, pval = `p-value`)
# add tf symbol to fimo results
tib_fimo <- tib_fimo %>%
left_join(tib_meta, by = 'motif_id') %>%
# remove hits with missing motif id (composite pwms)
filter(!is.na(tf_symbol))
# select best hit for each motif and sequence
tib_fimo <- tib_fimo %>%
group_by(sequence_name, tf_symbol) %>%
dplyr::slice(which.min(pval)) %>%
ungroup()
# spread into feature matrix
tib_fimo <- tib_fimo %>%
mutate(pval = -1 * log10(pval)) %>%
dplyr::select(-motif_id) %>%
spread(key = tf_symbol, value = pval, fill = 0, drop = TRUE) %>%
# perform cosmetics on the id
mutate(id = sequence_name) %>%
dplyr::select(-c(sequence_name)) %>%
dplyr::select(id, everything())
}
return(tib_fimo)
}
```
### Data import
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Import reference sequences
ref_seq <- ReadFasta("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/mt20200619_oligo_pool_deep.fasta")
# Remove adapters from reference sequence (cause these are not in the sequencing data)
ref_seq$sequence <- gsub("CGGAGCGAACCGAGTTAG", "", ref_seq$sequence)
ref_seq$sequence <- gsub("CATCGTCGCATCCAAGAG", "", ref_seq$sequence)
# Split up in insert and barcode part
## In my case, the barcode should be the last 12 bases of the sequence
ref_seq$barcode <- gsub(".*([A-Z]{12})$", "\\1", ref_seq$sequence)
ref_seq$insert <- gsub("(.*)[A-Z]{12}$", "\\1", ref_seq$sequence)
# Import sequencing files
pDNA_seq <- read_tsv("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/pDNA_insert_seq/processed/6185_1_pMT06_insert_counts.tsv", col_names = c("sequence", "number"))
# Split up in insert and barcode part
## In my case, the barcode should be the last 12 bases of the sequence
pDNA_seq$barcode <- gsub(".*([A-Z]{12})$", "\\1", pDNA_seq$sequence)
pDNA_seq$insert <- gsub("(.*)[A-Z]{12}$", "\\1", pDNA_seq$sequence)
# Calculate reads per million
pDNA_seq$rpm <- ave(pDNA_seq$number, FUN = function(x) ((x+1)/sum(x)) *1e6)
```
## Analysis
### What is the barcode distribution of mapped vs. unmapped for both TFs?
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Match barcodes with original data
pDNA_seq_bc <- pDNA_seq %>% dplyr::select(barcode, rpm) %>% unique() %>% mutate(id = "pDNA")
# Only keep highest barcode values - a bit of cheating here
pDNA_seq_bc <- pDNA_seq_bc[order(pDNA_seq_bc$barcode, -abs(pDNA_seq_bc$rpm) ), ]
pDNA_seq_bc <- pDNA_seq_bc[ !duplicated(pDNA_seq_bc$barcode), ]
ref_seq_bc <- ref_seq %>% dplyr::select(name, barcode)
match_bc <- merge(ref_seq_bc, pDNA_seq_bc, by = "barcode", all = T)
match_bc <- match_bc[!is.na(match_bc$rpm),]
match_bc$TF <- gsub("_.*", "\\1", match_bc$name)
match_bc$match <- "true"
match_bc$match[is.na(match_bc$name)] <- "false"
match_bc$conf <- "high"
match_bc$conf[match_bc$rpm < 10] <- "low"
# Visualize
ggplot(match_bc, aes(x = TF, y = rpm)) +
geom_quasirandom() +
theme_bw() +
xlab("reporter matches to:") +
ylab("reads per million") +
theme(text = element_text(size = 14)) +
ylim(0,300)
```
### Correlate to GC contenct
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Load reference file
ref_seq_2 <- seqinr::read.fasta("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr//data/library_design/output/mt20200619_oligo_pool_deep.fasta")
# Compute gc contents
gc <- compute_GC(ref_seq_2)
# Plot gc distribution
density <- density(gc$GC.content)
plot_ly(x = ~density$x, y = ~density$y, type = 'scatter', mode = 'lines', fill = 'tozeroy') %>%
layout(xaxis = list(title = 'GC content'),
yaxis = list(title = 'Density'))
match_seq <- match_bc[!is.na(match_bc$name),]
gc <- gc %>% rownames_to_column(var = "name")
gc <- merge(gc, match_seq)
gc$count_range <- round_any(gc$rpm, 1)
gc$gc_range <- round_any(gc$GC.content, 0.01)
gc$coord <- paste(gc$count_range, gc$gc_range, sep = "_")
gc$count <- as.numeric(ave(gc$coord, gc$coord, FUN = function(x) length(x)))
plot_ly(data = gc, x = ~GC.content, y = ~rpm, color = ~count, colors = "Oranges") %>%
layout(xaxis = list(title = 'GC content'),
yaxis = list(title = 'reads per million',
range=c(0,230)),
shapes=list(type='line', x0= 0.35, x1= 0.55, y0=10, y1=10, line=list(dash='dot', width=2)))
plot_ly(data = gc, x = ~GC.content, y = ~rpm) %>%
layout(xaxis = list(title = 'GC content'),
yaxis = list(title = 'reads per million',
range=c(0,230)),
shapes=list(type='line', x0= 0.35, x1= 0.55, y0=10, y1=10, line=list(dash='dot', width=2))) %>%
add_markers(alpha = 0.2)
```
### Plot how many barcodes are found in pDNA data
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Match barcodes with original data
ref_seq_bc <- ref_seq %>% dplyr::select(name, barcode)
ref_seq_bc$TF <- gsub("_.*", "\\1", ref_seq_bc$name)
pDNA_seq_bc <- pDNA_seq %>% dplyr::select(barcode, rpm) %>% unique()
## Identify the unmapped fraction
bc_fraction <- data.frame("bc_fraction" = 0)
for (i in 0:15) {
pDNA <- pDNA_seq_bc$barcode[pDNA_seq_bc$rpm >= i]
bc_fraction[i+1,] <- nrow(ref_seq_bc[ref_seq_bc$barcode %in% pDNA,])/
nrow(ref_seq_bc)
}
bc_rep_df <- data.frame("rpm" = 0:15,
"bc_fraction" = bc_fraction*100)
## How many reads match to designed barcodes?
bc_reads <- data.frame("bc_reads" = 0)
for (i in 0:15) {
pDNA <- pDNA_seq_bc[pDNA_seq_bc$rpm >= i,]
bc_reads[i+1,] <- sum(pDNA$rpm[pDNA$barcode %in% ref_seq_bc$barcode], na.rm = T)/
sum(pDNA$rpm, na.rm = T)
}
bc_rep_df <- data.frame(bc_rep_df,
"bc_reads" = bc_reads*100)
# ## How many mis-alignments are there?
# # Select only matched barcodes
# pDNA_seq_insert <- pDNA_seq[pDNA_seq$barcode %in% ref_seq$barcode,]
#
# # Add bc-id to the barcodes found in pDNA
# ref_seq_insert <- ref_seq %>% dplyr::select(barcode, name) %>% setnames("name", "bc-match")
# pDNA_seq_insert <- merge(pDNA_seq_insert, ref_seq_insert)
#
# # Add insert-id to the inserts found in pDNA
# ref_seq_insert <- ref_seq %>% dplyr::select(insert, name) %>% setnames("name", "insert-match")
# ref_seq_insert$`insert-match` <- gsub("(.*)_bc_[0-9]$", "\\1", ref_seq_insert$`insert-match`)
# pDNA_seq_insert <- merge(pDNA_seq_insert, ref_seq_insert)
#
# # Count occurences where bc matches insert
# pDNA_seq_insert$`bc-match` <- gsub("(.*)_bc_[0-9]$", "\\1", pDNA_seq_insert$`bc-match`)
# pDNA_seq_insert <- pDNA_seq_insert %>% unique()
# for (i in 1:nrow(pDNA_seq_insert)) {
# pDNA_seq_insert$match[i] <- identical(pDNA_seq_insert$`bc-match`[i], pDNA_seq_insert$`insert-match`[i])
#
# }
#
# seq_align <- data.frame("seq_align" = 0)
# for (i in 0:15) {
# seq_align[i+1,] <- sum(pDNA_seq_insert$rpm[pDNA_seq_insert$rpm >= i & pDNA_seq_insert$match == T], na.rm = T)/
# sum(pDNA_seq_insert$rpm[pDNA_seq_insert$rpm >= i], na.rm = T)
# }
# bc_rep_df <- data.frame(bc_rep_df,
# "seq_align" = seq_align*100)
#c("#1B998B", "#2D3047", "#FF9B71", "#ECDD7B")
# The line paramater inside add_trace contains a shape parameter. Set this to spline for smoothed connectors
plot_ly(bc_rep_df, x = ~rpm) %>%
add_trace(y = ~bc_fraction, name = 'bc present', type = 'scatter', mode = 'lines+markers',
line = list(shape = 'spline', color = '#FF9B71', width= 3, dash = 'dash'),
marker = list(symbol = "circle-dot", color = '#2D3047',size = 8),
connectgaps = TRUE) %>%
add_trace(y = ~bc_reads, name = 'matched bc reads', type = 'scatter', mode = 'lines+markers',
line = list(shape = 'spline', color = '#1B998B', width= 3, dash = 'dash'),
marker = list(symbol = "circle-dot", color = '#2D3047',size = 8),
connectgaps = TRUE) %>%
# add_trace(y = ~seq_align, name = 'matched bc reads that match insert', type = 'scatter', mode = 'lines+markers',
# line = list(shape = 'spline', color = '#ECDD7B', width= 3, dash = 'dash'),
# marker = list(symbol = "circle-dot", color = '#2D3047',size = 8),
# connectgaps = TRUE) %>%
layout(xaxis = list(title = 'rpm cutoff'), yaxis = list(title = 'barcode number and reads detected (%)', range = c(60, 103)),
legend = list(x =0.75, y = 0.75)) %>%
config(displayModeBar = TRUE)
```
## Match complete sequences using vMatch
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
## Adapted from Noud: Use vmatchPattern to match all sequenced sequences to designed sequences
pDNA_seq_filt <- pDNA_seq[pDNA_seq$rpm > 3,]
count <- 0
### Iterate for every unique sequence
for (i in unique(pDNA_seq_filt$sequence)) {
### Match each sequence against the designed sequence, allowing for 2 mismatches
match <-
vmatchPattern(pattern = pDNA_seq_filt$sequence[pDNA_seq_filt$sequence == i], subject = ref_seq$sequence,
max.mismatch = 2, with.indels = FALSE)
### Save the matched sequence row
match_length <- which(elementNROWS(match) == 1)
### Copy the row to the dataframe
if (length(match_length) == 1){
pDNA_seq_filt$match[pDNA_seq_filt$sequence == i] <- match_length
}
### Keep track of the progress
count <- count + length(i)
percentage <- round(100*(count/length(unique(pDNA_seq_filt$sequence))),3)
if (percentage %% 1 == 0) {
print(paste(percentage, "% progress", sep = ""))
}
}
pDNA_seq_filt$match_id <- "Yes"
pDNA_seq_filt$match_id[is.na(pDNA_seq_filt$match)] <- "No"
# Plot the distribution of sequences that match vs. don't match
ggplot(pDNA_seq_filt, aes(y = rpm, x = match_id, color = match_id)) +
geom_quasirandom() +
geom_boxplot(alpha = 0.4) +
scale_color_brewer(palette = "Dark2") +
theme_bw()
```
## How many raw complete sequences match with the design?
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Match barcodes with original data
ref_seq_seq <- ref_seq %>% dplyr::select(name, sequence)
pDNA_seq_seq <- pDNA_seq %>% dplyr::select(sequence,rpm) %>% unique()
## Identify the unmapped fraction
seq_fraction <- data.frame("seq_fraction" = 0)
for (i in 0:15) {
pDNA <- pDNA_seq_seq$sequence[pDNA_seq_seq$rpm >= i]
seq_fraction[i+1,] <- nrow(ref_seq_seq[ref_seq_seq$sequence %in% pDNA,])/
nrow(ref_seq_seq)
}
seq_rep_df <- data.frame("rpm" = 0:15,
"seq_fraction" = seq_fraction*100)
## How many reads match to designed inserts?
seq_reads <- data.frame("seq_reads" = 0)
for (i in 0:15) {
pDNA <- pDNA_seq_seq[pDNA_seq_seq$rpm >= i,]
seq_reads[i+1,] <- sum(pDNA$rpm[pDNA$sequence %in% ref_seq_seq$sequence], na.rm = T)/
sum(pDNA$rpm, na.rm = T)
}
seq_rep_df <- data.frame(seq_rep_df,
"seq_reads" = seq_reads*100)
#c("#1B998B", "#2D3047", "#FF9B71", "#ECDD7B")
# The line paramater inside add_trace contains a shape parameter. Set this to spline for smoothed connectors
plot_ly(seq_rep_df, x = ~rpm) %>%
add_trace(y = ~seq_fraction, name = 'insert present', type = 'scatter', mode = 'lines+markers',
line = list(shape = 'spline', color = '#FF9B71', width= 3, dash = 'dash'),
marker = list(symbol = "circle-dot", color = '#2D3047',size = 8),
connectgaps = TRUE) %>%
add_trace(y = ~seq_reads, name = 'matched insert reads', type = 'scatter', mode = 'lines+markers',
line = list(shape = 'spline', color = '#1B998B', width= 3, dash = 'dash'),
marker = list(symbol = "circle-dot", color = '#2D3047',size = 8),
connectgaps = TRUE) %>%
layout(xaxis = list(title = 'rpm cutoff'), yaxis = list(title = 'insert number and reads detected (%)', range = c(60, 103)),
legend = list(x =0.75, y = 0.75)) %>%
config(displayModeBar = TRUE)
```
# Identify those barcodes that are attached to a wrong insert
Clearly wrongly assigned barcodes can be assigned to the correct insert
Barcodes that are attached to a mixed population of inserts should to be excluded from any analysis where this plasmid library was used
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# mismatch_df <- pDNA_seq_seq[pDNA_seq_seq$match == F,] %>% dplyr::select(barcode, number) %>% unique()
# mismatch_df <- ddply(mismatch_df,~barcode, summarise, number = sum(number))
#
# match_df <- pDNA_seq_seq[pDNA_seq_seq$match == T,] %>% dplyr::select(barcode, number) %>% unique()
# match_df <- ddply(match_df,~barcode, summarise, number = sum(number))
# setnames(match_df, "number", "match")
# setnames(mismatch_df, "number", "mismatch")
# matching_df <- merge(match_df, mismatch_df, all = T)
# matching_df$match[is.na(matching_df$match)] <- 0.1
# matching_df$mismatch[is.na(matching_df$mismatch)] <- 0.1
#
# # Calculate match/mismatch ratio
# matching_df$ratio <- matching_df$match / matching_df$mismatch
# matching_df$type <- "good matching"
# matching_df$type[log2(matching_df$ratio) < 5 & log2(matching_df$ratio) >= -5] <- "unclear matching"
# matching_df$type[log2(matching_df$ratio) < -5] <- "incorrect matching"
#
# # Plot match/mismatch distribution
# ggplot(matching_df,
# aes(x = match, y = mismatch, colour = type), alpha = 0.5) +
# geom_point(size = 1) +
# xlim(0,1000) + ylim(0,1000) +
# xlab("number of reads: barcodes match with insert") +
# ylab("number of reads: barcodes do not match insert") +
# scale_color_manual(values = c("#1B998B", "#2D3047", "#FF9B71")) +
# theme_bw() + theme(legend.position = c(.8, .8))
#
#
# ggplot(matching_df, aes(x = "match/mismatch ratio", y = log2(ratio), colour = type), alpha = 0.5) +
# geom_quasirandom() + theme_bw() + ylim(-10,15) +
# scale_color_manual(values = c("#1B998B", "#2D3047", "#FF9B71")) +
# theme(legend.position = c(.8, .8))
#
# # Fraction of barcodes with match/mismatch
# n_match <- nrow(matching_df[matching_df$mismatch == 0.1,])
# n_nomatch <- nrow(matching_df[matching_df$mismatch > 0.1,])
#
# # Create donut chart
# data <- data.frame(
# lbls=c("matched:", "unmatched:"),
# count=c(n_match, n_nomatch)
# )
#
# # Compute percentages
# data$fraction <- data$count / sum(data$count)
# data$percentage <- data$fraction * 100
#
#
# # Compute the cumulative percentages (top of each rectangle)
# data$ymax <- cumsum(data$fraction)
#
#
# # Compute the cumulative percentages (top of each rectangle)
# data$ymax <- cumsum(data$fraction)
#
# # Compute the bottom of each rectangle
# data$ymin <- c(0, head(data$ymax, n=-1))
#
# # Compute label position
# data$labelPosition <- (data$ymax + data$ymin) / 2
#
# # Compute a good label
# data$label <- paste0(data$lbls, "\n", round(data$percentage), "%")
#
# # Make the plot
# ggplot(data, aes(ymax=ymax, ymin=ymin, xmax=4, xmin=3, fill=lbls)) +
# geom_rect() +
# geom_text(x=2, aes(y=labelPosition, label=label, color=lbls), size=5) + # x here controls label position (inner / outer)
# scale_fill_manual(values = c("#1B998B", "#2D3047")) +
# scale_color_manual(values = c("#1B998B", "#2D3047")) +
# labs(title = "percentage at least 1 bc wrongly attached to its insert") +
# coord_polar(theta="y") +
# xlim(c(0, 4)) +
# theme_void() +
# theme(legend.position = "none")
```
# Barcode re-evaluation
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# # Barcodes with a mixed match of correct and incorrect insert need to be excluded from the analysis
# matching_df_exclude <- matching_df[log2(matching_df$ratio) < 5 & log2(matching_df$ratio) >= -5,]
#
# # Barcodes with incorrect matching can be assigned to new insert if only 1 insert has all barcodes
# matching_df_incorrect <- matching_df[log2(matching_df$ratio) < -5,]
#
#
#
# # Overview of type of barcode attachment distributio
# # Fraction of barcodes with match/mismatch
# n_total <- nrow(ref_seq)
# n_match <- nrow(matching_df)
# n_nomatch <- n_total - n_match
# n_correct <- nrow(matching_df[log2(matching_df$ratio) >= 5,])
# n_exclude <- nrow(matching_df[log2(matching_df$ratio) < 5 & log2(matching_df$ratio) >= -5,])
# n_incorrect <- nrow(matching_df[log2(matching_df$ratio) < -5,])
#
#
# dat <- data.frame(
# x = rep("x", each = 4),
# condition = c("4. not enough data","3. correct match", "2. ambiguous match", "1. incorrect match"),
# size = c(n_nomatch, n_correct, n_exclude, n_incorrect),
# stringsAsFactors = T
# )
#
# dat$percentage <- 100*(round(dat$size/n_total,2))
#
# dat$pos <- c(16.5, 65, 98, 98.5)
#
# ggplot(dat, aes(fill=condition, y=percentage, x = x)) +
# geom_bar(position="stack", stat = "identity") +
# theme_bw()+scale_fill_grey()+
# geom_text(data=dat,
# aes(x = x, y = pos,
# label = paste0(percentage,"%")), size=4) +
# theme(axis.title.x=element_blank(),
# axis.text.x=element_blank(),
# axis.ticks.x=element_blank())
#
```
# Investigate the mutational load of the barcodes with a good match
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# ## Only select barcodes from design in pDNA data
# matching_df_correct <- matching_df[matching_df$ratio > 2,]
# pDNA_seq_insert <- pDNA_seq[pDNA_seq$barcode %in% matching_df$barcode,] %>%
# dplyr::select(barcode, insert, number) %>%
# setnames("insert", "pDNA")
# ref_seq_insert <- ref_seq %>% dplyr::select(barcode, insert) %>% setnames("insert", "ref")
# match_insert <- merge(pDNA_seq_insert, ref_seq_insert, by = "barcode")
# match_insert$lv <- levenshtein.distance(match_insert$pDNA, match_insert$ref)
#
# ## Read distribution vs. TF reporter length
# ggplot(data = match_insert, aes(x = lv)) +
# geom_histogram(color = "#2D3047") + xlab("Levenshtein distance") +
# labs(title = "Sum pDNA counts vs. at levenshtein distances") +
# theme_classic() +
# theme(axis.text.x = element_text(angle = 45, hjust = 1, vjust = 1, size = 12),
# axis.text.y = element_text(size = 12)) +
# ylab("read count")
# ```
#
#
# # Investigate mutational load of only Trp53 constructs (as they are especially complex to PCR up)
# ```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# ## Only select barcodes from design in pDNA data
# matching_df_correct <- matching_df[matching_df$ratio > 2,]
# pDNA_seq_insert <- pDNA_seq[pDNA_seq$barcode %in% matching_df$barcode,] %>%
# dplyr::select(barcode, insert, number) %>%
# setnames("insert", "pDNA")
# p53_bc <- ref_seq$barcode[grep("Trp53", ref_seq$name)]
# pDNA_seq_insert <- pDNA_seq_insert[pDNA_seq_insert$barcode %in% p53_bc,]
# ref_seq_insert <- ref_seq %>% dplyr::select(barcode, insert) %>% setnames("insert", "ref")
# match_insert <- merge(pDNA_seq_insert, ref_seq_insert, by = "barcode")
# match_insert$lv <- levenshtein.distance(match_insert$pDNA, match_insert$ref)
#
# ## Read distribution vs. TF reporter length
# ggplot(data = match_insert, aes(x = lv)) +
# geom_histogram(color = "#2D3047") + xlab("Levenshtein distance of designed insert vs sequenced insert") +
# labs(title = "Trp53 reporters: is the sequence correct in the pDNA?") +
# theme_classic() +
# theme(axis.text.x = element_text(angle = 45, hjust = 1, vjust = 1, size = 12),
# axis.text.y = element_text(size = 12)) +
# ylab("reporter counts")
```
## Exporting data
```{r}
# # Export barcodes that are attached to multiple inserts
# bc_exclude <- matching_df_exclude$barcode %>% unique()
# write.csv(bc_exclude, "/DATA/usr/m.trauernicht/projects/tf_activity_reporter/data/SuRE_TF_1/pDNA_seq/bc_exclude.csv")
#
# # Export barcodes that are attached to the wrong insert
# bc_replace <- pDNA_seq_incorrect %>% dplyr::select(barcode, `bc-match`, `insert-match`) %>% unique()
# write.csv(bc_replace, "/DATA/usr/m.trauernicht/projects/tf_activity_reporter/data/SuRE_TF_1/pDNA_seq/bc_replace.csv")
```
# Session Info
```{r}
paste("Run time: ",format(Sys.time()-StartTime))
getwd()
date()
sessionInfo()
```
<file_sep>## Script to generate random barcodes
require(DNABarcodes)
barc <- create.dnabarcodes(n = 12, dist = 3, filter.triplets = T, metric = "seqlev",filter.gc = T, filter.self_complementary = T, cores = 24)
write.csv(barc, "barc.csv")
<file_sep>---
title: "genomic P53 response element scoring"
author:
- name: "<NAME>"
email: "<EMAIL>"
affiliation: "Netherlands Cancer Institute - van Steensel lab"
date: '`r format(Sys.time(), "%d/%m/%Y")`'
output:
html_document:
theme: united
highlight: pygments
fig_caption: yes
code_folding: hide
df_print: kable
toc: true
toc_depth: 4
toc_float:
collapsed: false
---
---
### Aim
I want to score the affinity genomic P53 response elements using the same algorithm that was used to create P53 reporters. I then want to test if affinity or number of P53 binding sites in the genome affects transcriptional activity (measured by MPRA or target gene expression).
---
## Setup {.tabset}
<!-- little HTML script to do indentation of the table of contents -->
<script>
$(document).ready(function() {
$items = $('div#TOC li');
$items.each(function(idx) {
num_ul = $(this).parentsUntil('#TOC').length;
$(this).css({'text-indent': num_ul * 10, 'padding-left': 0});
});
});
</script>
```{css, echo = FALSE}
div.sourceCode {
overflow-x: hidden;
}
```
### Libraries
```{r setup, out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
knitr::opts_chunk$set(echo = TRUE)
StartTime <-Sys.time()
# 8-digit Date tag:
Date <- substr(gsub("-","",Sys.time()),1,8)
# libraries:
library(GenomicRanges)
library(BSgenome.Hsapiens.UCSC.hg38)
library(GenomicFeatures)
library(BSgenome)
library(spgs)
library(dplyr)
library(ggplot2)
library(reshape)
library(ggbeeswarm)
library(biomaRt)
library(readr)
library(stringr)
library(maditr)
library(phylotools)
library(plyr)
library(tidyr)
library(ggpubr)
```
### Custom functions
```{r, out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
SetFileName <- function(filename, initials) {
# Set filename with extension and initials to make filename with date integrated.
filename <- substitute(filename)
initials <- substitute(initials)
filename <- paste0(initials, Date, filename)
filename
}
ReadFasta<-function(file) {
# Read the file line by line
fasta<-readLines(file)
# Identify header lines
ind<-grep(">", fasta)
# Identify the sequence lines
s<-data.frame(ind=ind, from=ind+1, to=c((ind-1)[-1], length(fasta)))
# Process sequence lines
seqs<-rep(NA, length(ind))
for(i in 1:length(ind)) {
seqs[i]<-paste(fasta[s$from[i]:s$to[i]], collapse="")
}
# Create a data frame
DF<-data.frame(name=gsub(">", "", fasta[ind]), sequence=seqs)
# Return the data frame as a result object from the function
return(DF)
}
theme_classic_lines <- function() {
theme_pubr(border = T, legend = "top") +
theme(panel.grid.major = element_line(colour = "#adb5bd", size = 0.1),
strip.background = element_rect(fill = "#ced4da"))
}
theme_set(theme_classic_lines())
```
### Load P53-reporter affinities
Aim: Import affinity scores from my P53 reporter library.
```{r, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Import data
load("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/p53_affinity_scoring/p53_per_position.Rda")
# Put the scores of all TSSs into one large data frame - only keep the forward matching
# reporter_scores <- data.frame(matrix(ncol = 215, nrow = 1))
# names(reporter_scores) <- 1:215
#
# for (i in unique(names(per.pos$Scores))) {
# x <- data.frame(data.frame(per.pos$Scores[i]))[1,]
# names(x) <- 1:ncol(x)
# rownames(x) <- i
# reporter_scores <- rbind.fill(reporter_scores, x)
# }
# reporter_scores <- reporter_scores[-1,]
# rownames(reporter_scores) <- names(per.pos$Scores)
# reporter_scores <- reporter_scores %>%
# rownames_to_column(var = "reporter_id")
load("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/p53_affinity_scoring/reporter_affinity_scores.RData")
```
### Extract P53 RE sequences
Aim: Import sequences that were previously used in P53 MPRAs. Those I can then scan using Chaitanya's model.
```{r ChIP_extraction, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Curated list of P53 targets (Nguyen et al. 2018, NAS, Supplementary Table ST8)
nguyen_p53_REs <- read.csv2("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/p53_affinity_scoring/ST8-p53 cistrome targets.csv") %>%
mutate(p53RE.center = round((p53RE.start + p53RE.end) / 2),
p53RE.start = p53RE.center - 500,
p53RE.end = p53RE.center + 500,
width = p53RE.end - p53RE.start) %>%
dplyr::select('chr' = Chr, 'start' = p53RE.start, 'end' = p53RE.end, 'gene' = Current.p53.cistrome.Gene.Symbol, width)
gr <- makeGRangesFromDataFrame(nguyen_p53_REs)
nguyen_p53_REs$seq <- getSeq(BSgenome.Hsapiens.UCSC.hg19::Hsapiens, names = gr, as.character = T)
nguyen_p53_REs <- nguyen_p53_REs %>%
mutate(name = paste("nguyen", gene, sep = "_")) %>%
dplyr::select(name, seq)
# Curated list of P53 targets (Haran et al. 2018)
haran_p53_REs <- read.csv2("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/p53_affinity_scoring/haran_p53RE_curated.csv") %>%
dplyr::select("gene" = Gene.site.name, RE.chromosomal.location) %>%
mutate(RE.chromosomal.location = gsub("crr", "chr", RE.chromosomal.location))
haran_p53_REs <- haran_p53_REs %>%
mutate(RE.chromosomal.location = gsub(" -", "-", RE.chromosomal.location))
haran_p53_REs <- haran_p53_REs %>%
mutate(RE.chromosomal.location = gsub(" ", "-", RE.chromosomal.location))
haran_p53_REs <- haran_p53_REs %>%
mutate(chr = gsub("(chr[A-z0-9]{1,2}).*", "\\1", RE.chromosomal.location))
haran_p53_REs <- haran_p53_REs %>%
mutate(start = as.numeric(gsub(".*:(.*)-.*", "\\1", RE.chromosomal.location)))
haran_p53_REs <- haran_p53_REs %>%
mutate(end = as.numeric(gsub(".*-(.*)", "\\1", RE.chromosomal.location)))
haran_p53_REs <- haran_p53_REs %>%
mutate(center = round((start + end)/2))
haran_p53_REs <- haran_p53_REs %>%
mutate(start = center - 500)
haran_p53_REs <- haran_p53_REs %>%
mutate(end = center + 500)
haran_p53_REs <- haran_p53_REs %>%
mutate(width = end - start)
haran_p53_REs <- haran_p53_REs %>%
dplyr::select(gene, chr, start, end, width) %>%
unique()
haran_p53_REs <- haran_p53_REs[-251,]
gr <- makeGRangesFromDataFrame(haran_p53_REs)
haran_p53_REs$seq <- getSeq(BSgenome.Hsapiens.UCSC.hg19::Hsapiens, names = gr, as.character = T)
haran_p53_REs <- haran_p53_REs %>%
mutate(name = paste("haran", gene, sep = "_")) %>%
dplyr::select(name, seq)
p53_REs <- rbind(nguyen_p53_REs, haran_p53_REs)
p53_REs <- p53_REs[!duplicated(p53_REs$seq),]
# Save to score sequences
p53_REs_export <- p53_REs
# Add other sequences: pMT02 reporters
pMT02_P53 <- ReadFasta("/DATA/usr/m.trauernicht/projects/SuRE-TF/data/library_design/output/mt20210111_oligo_pool.fasta") %>%
filter(str_detect(name, "Trp53")) %>%
setnames("sequence", "seq") %>%
mutate(name = paste("pMT02", name, sep = "_"))
# Add other sequences: published reporter sequences
pMT09_P53 <- ReadFasta("/DATA/usr/m.trauernicht/projects/SuRE-TF/data/library_design/gen-2/mt20210114_oligo_pool_gen2.fasta") %>%
filter(str_detect(name, "P53")) %>%
setnames("sequence", "seq") %>%
mutate(name = paste("pMT09", name, sep = "_"))
p53_REs_export <- rbind.fill(p53_REs_export, pMT02_P53)
p53_REs_export <- rbind.fill(p53_REs_export, pMT09_P53)
# Add other sequences: regions from Younger et al.
younger_p53_REs <- read.csv2("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/p53_affinity_scoring/younger_et_al_genomic_p53_REs.csv") %>%
setnames("Sequence", "seq") %>%
mutate(name = paste("younger", Chrom, Start, sep = "_")) %>%
dplyr::select(name, seq)
p53_REs_export <- rbind.fill(p53_REs_export, younger_p53_REs)
# Add other sequences: regions from Aerts paper
aerts_p53_REs <- read.csv2("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/p53_affinity_scoring/aerts_P53_REs.csv") %>%
dplyr::select("name" = corresponding.peak.name, chr, start, stop) %>%
filter(str_detect(chr, "chr"))
gr <- makeGRangesFromDataFrame(aerts_p53_REs)
aerts_p53_REs$seq <- getSeq(BSgenome.Hsapiens.UCSC.hg19::Hsapiens, names = gr, as.character = T)
aerts_p53_REs <- aerts_p53_REs %>%
mutate(name = paste("aerts_design", name, sep = "_"))
aerts_p53_REs_peak <- read.csv2("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/p53_affinity_scoring/aerts_P53_REs.csv") %>%
dplyr::select("name" = corresponding.peak.name, "chr" = Chr, "start" = start.1, "stop" = stop.1) %>%
filter(str_detect(chr, "chr"))
gr <- makeGRangesFromDataFrame(aerts_p53_REs_peak)
aerts_p53_REs_peak$seq <- getSeq(BSgenome.Hsapiens.UCSC.hg19::Hsapiens, names = gr, as.character = T)
aerts_p53_REs_peak <- aerts_p53_REs_peak %>%
mutate(name = paste("aerts_peak", name, sep = "_"))
aerts_p53_REs <- rbind.fill(aerts_p53_REs_peak, aerts_p53_REs)
aerts_p53_REs <- aerts_p53_REs %>%
dplyr::select(name, seq)
aerts_p53_REs <- aerts_p53_REs[!duplicated(aerts_p53_REs$seq),]
p53_REs_export <- rbind.fill(p53_REs_export, aerts_p53_REs)
p53_REs_export$seq <- toupper(p53_REs_export$seq)
# # Save file
# save(p53_REs_export, file = "/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/p53_affinity_scoring/mt20210412_p53_REs.RData")
```
### Import P53-RE scores
Aim: Import scored P53 response elements.
```{r, fig.width=10, fig.height=7, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Import data
load("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/p53_affinity_scoring/p53_REs_per_position.Rda")
# # Put the scores of all REs into one large data frame - only keep the forward matching
# RE_scores <- data.frame(matrix(ncol = 1502, nrow = 1))
# names(RE_scores) <- 1:1502
#
# count <- 0
# for (i in unique(names(re.per.pos$Scores))) {
# x <- data.frame(data.frame(re.per.pos$Scores[i]))[1,]
# names(x) <- 1:ncol(x)
# RE_scores <- rbind.fill(RE_scores, x)
# count <- count + length(i)
# print(count)
# RE_scores$RE[count+1] <- i
# }
#
# RE_scores <- RE_scores[-1,]
#
# save(RE_scores, file = "/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/p53_affinity_scoring/mt20200504_RE_scores_df.RData")
load("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/p53_affinity_scoring/mt20200504_RE_scores_df.RData")
RE_scores[, "max"] <- apply(RE_scores[, 1:1502], 1, max, na.rm = T)
```
## Highlights: affinity scores of genomic P53-REs
```{r}
RE_scores_filt <- RE_scores %>%
filter(max > 0.000025) %>%
pivot_longer(cols = c(-RE, -max), names_to = "position", values_to = "affinity") %>%
na.omit()
# p <- RE_scores_filt %>% filter(RE == "nguyen_CDKN1A")
#
# ggplot(data = p,
# aes(x = -((nrow(p)/2)-1):(nrow(p)/2),
# y = affinity)) +
# geom_line() +
# theme_bw() +
# xlab("distance to RE center (bp)")+
# scale_color_gradient(low = "#F9E9E4", high = "#DD6B48")+
# ylab("affinity relative to perfect match")+
# xlim(-50,50) +
# ggtitle("P53 affinity across CDKN1A RE")
#
#
# p <- RE_scores_filt %>% filter(RE == "younger_chr7_121151311")
#
# ggplot(data = p,
# aes(x = -((nrow(p)/2)-1):(nrow(p)/2),
# y = affinity)) +
# geom_line() +
# theme_bw() +
# xlab("distance to RE center (bp)")+
# scale_color_gradient(low = "#F9E9E4", high = "#DD6B48")+
# ylab("affinity relative to perfect match")+
# ggtitle("P53 affinity across younger_chr7_121151311")
#
# p <- RE_scores_filt %>% filter(RE == "nguyen_GADD45A")
#
# ggplot(data = p,
# aes(x = -((nrow(p)/2)-1):(nrow(p)/2),
# y = affinity)) +
# geom_line() +
# theme_bw() +
# xlab("distance to RE center (bp)")+
# scale_color_gradient(low = "#F9E9E4", high = "#DD6B48")+
# ylab("affinity relative to perfect match")+
# xlim(-50,50) +
# labs(title = "P53 affinity across GADD45A RE",
# subtitle = "promega reporter & only 1 mismatch from consensus")
#
#
# p <- RE_scores_filt %>% filter(RE == "haran_MDM2_RE1")
#
# ggplot(data = p,
# aes(x = -((nrow(p)/2)-1):(nrow(p)/2),
# y = affinity)) +
# geom_line() +
# theme_bw() +
# xlab("distance to RE center (bp)")+
# scale_color_gradient(low = "#F9E9E4", high = "#DD6B48")+
# ylab("affinity relative to perfect match")+
# xlim(-50,50) +
# labs(title = "P53 affinity across MDM2 RE",
# subtitle = "two adjacent binding sites")
#
# p <- RE_scores_filt %>% filter(RE == "nguyen_FOSL1")
#
# ggplot(data = p,
# aes(x = -((nrow(p)/2)-1):(nrow(p)/2),
# y = affinity)) +
# geom_line() +
# theme_bw() +
# xlab("distance to RE center (bp)")+
# scale_color_gradient(low = "#F9E9E4", high = "#DD6B48")+
# ylab("affinity relative to perfect match")+
# xlim(-50,50) +
# labs(title = "P53 affinity across FOSL1 RE",
# subtitle = "two adjacent binding sites")
#
#
# # Take the highest 20
# relevant <- c("aerts", "younger", "haran", "nguyen")
# p <- RE_scores_filt[grep(paste(relevant, collapse = "|"), RE_scores_filt$RE),] %>%
# arrange(desc(max)) %>%
# top_n(20)
#
# p <- RE_scores_filt[RE_scores_filt$RE %in% p$RE,]
#
# ggplot(data = p %>%
# arrange(desc(max)),
# aes(x = as.numeric(position),
# y = affinity)) +
# geom_line() +
# theme_bw() +
# xlab("position (bp)")+
# scale_color_gradient(low = "#F9E9E4", high = "#DD6B48")+
# ylab("affinity relative to perfect match")+
# labs(title = "P53 affinity - highest matches") +
# facet_wrap(~RE, scales = "free_x", ncol = 3)
#
# ggplot(data = p,
# aes(x = as.numeric(position),
# y = affinity,
# color = RE)) +
# geom_line() +
# theme_bw() +
# xlab("distance to RE center (bp)")+
# ylab("affinity relative to perfect match")+
# labs(title = "P53 affinity across FOSL1 RE",
# subtitle = "two adjacent binding sites")
```
### Define a set of REs to focus on
```{r}
## Choose only sequences from Aerts paper
RE_scores_aerts <- RE_scores %>%
pivot_longer(cols = c(-max, -RE), names_to = "position", values_to = "affinity") %>%
#filter(affinity >= 0.000025) %>%
filter(str_detect(RE, "aerts"))
## Define set used for characterizing genomic TP53 BSs
RE_scores_aerts_peak <- RE_scores_aerts %>%
filter(str_detect(RE, "aerts_peak"))
## Define set with matched MPRA activities
RE_scores_aerts_design <- RE_scores_aerts %>%
filter(str_detect(RE, "aerts_design"))
```
---
## What is the affinity distribution of genomic TP53 BSs compared to my selected BSs?
```{r}
RE_scores_aerts_peak_max <- RE_scores_aerts_peak %>%
distinct(RE, max)
ggplot(RE_scores_aerts_peak_max %>%
mutate(rank = rank(-max)) %>%
filter(rank < 250),
aes(x = "", y = max)) +
#geom_hline(yintercept = 1, lty = 3, color = "red") +
#geom_hline(yintercept = 0.3708171, lty = 3, color = "red") +
geom_hline(yintercept = 0.1382304, lty = 3, color = "red") +
geom_hline(yintercept = 0.05752735, lty = 3, color = "red") +
#geom_hline(yintercept = 0.004223629, lty = 3, color = "blue") +
geom_quasirandom(alpha = .4) +
theme_pubr(border = T) +
xlab("Top 250 TP53 REs") +
ylab("Relative affinity")
ggplot(RE_scores_aerts_peak_max %>%
mutate(rank = rank(-max)),
aes(x = "x", y = log10(max))) +
geom_hline(yintercept = log10(1), lty = 3, color = "red") +
geom_hline(yintercept = log10(0.3708171), lty = 3, color = "red") +
geom_hline(yintercept = log10(0.1382304), lty = 3, color = "red") +
geom_hline(yintercept = log10(0.05752735), lty = 3, color = "red") +
geom_hline(yintercept = log10(6.162213e-05), lty = 3, color = "black") +
geom_hline(yintercept = log10(1.148827e-06), lty = 3, color = "black") +
geom_hline(yintercept = log10(0.0001), lty = 3, color = "green") +
geom_hline(yintercept = log10(0.004223629), lty = 3, color = "blue") +
geom_quasirandom() +
theme_pubr() +
xlab("All TP53 REs") +
ylab("Relative affinity (log10)")
```
---
## How often do adjacent TP53 BSs occur in the genome?
```{r}
RE_scores_aerts_peak_n_BS <- RE_scores_aerts_peak %>%
filter(affinity > 0.0001) %>%
mutate(position = as.numeric(position)) %>%
mutate(max_position = ave(position, RE, FUN = function(x) max(x, na.rm = T))) %>%
mutate(min_position = ave(position, RE, FUN = function(x) min(x, na.rm = T))) %>%
mutate(dif = max_position - min_position)
## Keep only the highest value for overlapping BSs
RE_scores_aerts_peak_n_BS_overlap <- RE_scores_aerts_peak_n_BS %>%
filter(dif < 20) %>%
group_by(RE) %>%
slice_max(n = 1, order_by = affinity, with_ties = T) %>%
ungroup()
RE_scores_aerts_peak_n_BS <- RE_scores_aerts_peak_n_BS %>%
filter(dif >= 20) %>%
rbind(RE_scores_aerts_peak_n_BS_overlap) %>%
#filter(dif <= 70) %>%
add_count(RE)
## All REs with 3 or 4 BSs have actually only 2 BSs with overlapping BSs - I will merge those here
RE_scores_aerts_peak_n_BS_overlap_2 <- RE_scores_aerts_peak_n_BS %>%
filter(n > 2) %>%
filter(n < 5) %>%
group_by(RE) %>%
slice_max(n = 2, order_by = affinity, with_ties = T) %>%
ungroup()
## There is one RE with 5 BSs that actually consists of 3 overlapping BSs - I will merge those here
RE_scores_aerts_peak_n_BS_overlap_3 <- RE_scores_aerts_peak_n_BS %>%
filter(n == 5) %>%
group_by(RE) %>%
slice_max(n = 3, order_by = affinity, with_ties = T) %>%
ungroup()
RE_scores_aerts_peak_n_BS <- RE_scores_aerts_peak_n_BS %>%
filter(n <= 2) %>%
rbind(RE_scores_aerts_peak_n_BS_overlap_2) %>%
rbind(RE_scores_aerts_peak_n_BS_overlap_3) %>%
add_count(RE)
ggplot(RE_scores_aerts_peak_n_BS,
aes(x = as.factor(nn))) +
geom_bar(stat = "count") +
theme_pubr(border = T) +
xlab("Number of BSs")
ggplot(RE_scores_aerts_peak_n_BS %>%
filter(dif < 70),
aes(x = nn)) +
geom_bar(stat = "count") +
theme_pubr() +
xlab("Number of BSs (<50 bp apart)")
```
---
## What is the spacer length distribution between adjacent genomic TP53 BSs?
```{r}
ggplot(RE_scores_aerts_peak_n_BS %>%
filter(nn > 1) %>%
distinct(RE, dif),
aes(x = dif - 20)) +
geom_bar(stat = "count") +
theme_pubr(border = T) +
xlab("BS-BS spacer length")
ggplot(RE_scores_aerts_peak_n_BS %>%
filter(dif < 70) %>%
filter(nn > 1) %>%
distinct(RE, dif),
aes(x = dif - 20)) +
geom_bar(stat = "count") +
theme_pubr() +
xlab("BS-BS spacer length")
```
---
## What is the relation between MPRA activity and TP53 BS affinity?
```{r}
aerts_p53_REs_activity2 <- read.csv2("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/p53_affinity_scoring/aerts_p53_activities2.csv") %>%
dplyr::select("RE" = corresponding.peak.name, expression, padj)
RE_scores_aerts_design_activity <- RE_scores_aerts_design %>%
distinct(RE, max) %>%
filter(max > 0.0001) %>%
mutate(RE = gsub("aerts_design_", "", RE)) %>%
left_join(aerts_p53_REs_activity2)
ggplot(RE_scores_aerts_design_activity,
aes(x = max, y = expression)) +
#geom_vline(xintercept = 1, lty = 3, color = "red") +
#geom_vline(xintercept = 0.3708171, lty = 3, color = "red") +
geom_vline(xintercept = 0.1382304, lty = 3, color = "red") +
geom_vline(xintercept = 0.05752735, lty = 3, color = "red") +
geom_point(aes(color = ifelse(padj < 0.05, "yes", "no"))) +
geom_smooth(method = "lm", color = "black") +
scale_color_manual(values = c("yes" = "red", "no" = "black")) +
theme_pubr() +
xlab("Relative affinity") +
ylab("MPRA activity - Nutlin-3a induction")
ggplot(RE_scores_aerts_design_activity,
aes(x = log10(max), y = expression)) +
#geom_vline(xintercept = log10(1), lty = 3, color = "red") +
#geom_vline(xintercept = log10(0.3708171), lty = 3, color = "red") +
geom_vline(xintercept = log10(0.1382304), lty = 3, color = "red") +
geom_vline(xintercept = log10(0.05752735), lty = 3, color = "red") +
geom_point(aes(color = ifelse(padj < 0.05, "yes", "no"))) +
geom_smooth(method = "lm", color = "black") +
scale_color_manual(values = c("yes" = "red", "no" = "black")) +
theme_pubr() +
xlab("Relative affinity (log10)") +
ylab("MPRA activity - Nutlin-3a induction")
```
---
## How active are elements with 1 TP53 BS vs. elements with multiple TP53 BSs?
```{r}
BS_df <- RE_scores_aerts_peak_n_BS %>%
distinct(RE, dif, nn, n) %>%
mutate(RE = gsub("aerts_peak_", "", RE)) %>%
mutate(overlap = ifelse(n == nn, "no", "yes")) %>%
mutate(overlap = ifelse(dif < 20 & dif > 0, "yes", overlap))
RE_scores_aerts_design_activity_nBS <- RE_scores_aerts_design_activity %>%
left_join(BS_df) %>%
na.omit()
ggplot(RE_scores_aerts_design_activity_nBS,
aes(x = as.factor(nn), y = expression)) +
geom_quasirandom() +
geom_boxplot(alpha = .4, outlier.shape = NA) +
theme_pubr() +
xlab("Number of TP53 BSs") +
ylab("MPRA activity - Nutlin-3a induction")
t.test(RE_scores_aerts_design_activity_nBS$expression[RE_scores_aerts_design_activity_nBS$nn ==1], RE_scores_aerts_design_activity_nBS$expression[RE_scores_aerts_design_activity_nBS$nn ==2])$p.val
ggplot(RE_scores_aerts_design_activity_nBS,
aes(x = as.factor(nn), y = expression)) +
geom_quasirandom(aes(color = overlap)) +
geom_boxplot(alpha = .4, outlier.shape = NA) +
theme_pubr() +
xlab("Number of TP53 BSs") +
ylab("MPRA activity - Nutlin-3a induction")
ggplot(RE_scores_aerts_design_activity_nBS,
aes(x = as.factor(n), y = expression)) +
geom_quasirandom(aes(color = overlap)) +
geom_boxplot(alpha = .4, outlier.shape = NA) +
theme_pubr() +
xlab("Number of TP53 BSs") +
ylab("MPRA activity - Nutlin-3a induction")
ggplot(RE_scores_aerts_design_activity_nBS,
aes(x = as.factor(nn), y = expression)) +
geom_quasirandom(aes(color = log10(max))) +
geom_boxplot(alpha = .4, outlier.shape = NA) +
theme_pubr() +
scale_color_viridis_c(option = "D") +
xlab("Number of TP53 BSs") +
ylab("MPRA activity - Nutlin-3a induction")
ggplot(RE_scores_aerts_design_activity_nBS,
aes(x = log10(max), y = expression)) +
#geom_vline(xintercept = log10(1), lty = 3, color = "red") +
#geom_vline(xintercept = log10(0.3708171), lty = 3, color = "red") +
geom_vline(xintercept = log10(0.1382304), lty = 3, color = "red") +
geom_vline(xintercept = log10(0.05752735), lty = 3, color = "red") +
geom_point(aes(color = ifelse(padj < 0.05, "yes", "no"))) +
geom_smooth(method = "lm", color = "black") +
scale_color_manual(values = c("yes" = "red", "no" = "black")) +
theme_pubr() +
xlab("Relative affinity (log10)") +
ylab("MPRA activity - Nutlin-3a induction") +
facet_wrap(~nn)
ggplot(RE_scores_aerts_design_activity_nBS,
aes(x = max, y = expression)) +
#geom_vline(xintercept = log10(1), lty = 3, color = "red") +
#geom_vline(xintercept = log10(0.3708171), lty = 3, color = "red") +
geom_vline(xintercept = 0.1382304, lty = 3, color = "red") +
geom_vline(xintercept = 0.05752735, lty = 3, color = "red") +
geom_point(aes(color = ifelse(padj < 0.05, "yes", "no"))) +
geom_smooth(method = "lm", color = "black") +
scale_color_manual(values = c("yes" = "red", "no" = "black")) +
theme_pubr() +
xlab("Relative affinity (log10)") +
ylab("MPRA activity - Nutlin-3a induction") +
facet_wrap(~nn)
```
---
## Does the spacer length between adjacent TP53 BSs impact MPRA activity?
```{r}
ggplot(RE_scores_aerts_design_activity_nBS %>%
filter(nn == 2, dif < 70),
aes(x = dif - 20, y = expression)) +
geom_point() +
geom_smooth(method = "loess") +
theme_pubr() +
xlab("BS-BS spacer length") +
ylab("MPRA activity - Nutlin-3a induction")
```
---
### Session Info
```{r}
paste("Run time: ",format(Sys.time()-StartTime))
getwd()
date()
sessionInfo()
```
<file_sep>---
title: "TF-lib pDNA insert sequencing"
author: "<NAME>"
date: "`r format(Sys.time(), '%Y-%m-%d')`"
output:
prettydoc::html_pretty:
theme: leonids
highlight: github
# toc: true
# toc_float: true
# code_folding: show
# editor_options:
# chunk_output_type: console
---
*knitr document van Steensel lab*
# TF-lib pDNA insert sequencing
# Introduction
I sequenced the complete insert of the pDNA library of pMT09. I already extracted all sequences in front of the 3' adapter from the sequences data and added counts to identical sequences by starcode. I now want to make an overview about how many pDNA insert sequences in the pDNA still match the designed inserts.
```{r setup, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
# Load options and libraries
knitr::opts_chunk$set(echo = TRUE)
StartTime <-Sys.time()
# 8-digit Date tag:
Date <- substr(gsub("-","",Sys.time()),1,8)
# libraries:
library(seqinr)
library(ShortRead)
library(plyr)
library(maditr)
library(phylotools)
library(tidyr)
library(readr)
library(dplyr)
library(ggplot2)
library(ggbeeswarm)
library(vwr)
library(d3r)
library(sunburstR)
library(LncFinder)
library(plotly)
library(tibble)
library(stringr)
```
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
### Custom functions
SetFileName <- function(filename, initials) {
# Set filename with extension and initials to make filename with date integrated.
filename <- substitute(filename)
initials <- substitute(initials)
filename <- paste0(initials, Date, filename)
filename
}
ReadFasta<-function(file) {
# Read the file line by line
fasta<-readLines(file)
# Identify header lines
ind<-grep(">", fasta)
# Identify the sequence lines
s<-data.frame(ind=ind, from=ind+1, to=c((ind-1)[-1], length(fasta)))
# Process sequence lines
seqs<-rep(NA, length(ind))
for(i in 1:length(ind)) {
seqs[i]<-paste(fasta[s$from[i]:s$to[i]], collapse="")
}
# Create a data frame
DF<-data.frame(name=gsub(">", "", fasta[ind]), sequence=seqs)
# Return the data frame as a result object from the function
return(DF)
}
```
```{r knits setup, echo=FALSE, warning= FALSE, message=FALSE}
# Save all figures generated in this Rmd document
library(knitr)
filename <- SetFileName("_figures","mt")
dir.create(paste("results/", filename, sep = ""), showWarnings = FALSE)
opts_chunk$set(fig.width = 4, fig.height = 4,
dev=c('png', 'pdf'), fig.path = file.path(paste("results/", filename, "/", sep = "")))
pdf.options(useDingbats = FALSE)
```
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
## Data import
# Import reference sequences
ref_seq <- ReadFasta("/DATA/usr/m.trauernicht/projects/SuRE-TF/data/library_design/gen-2/mt20210326_oligo_pool_gen2.fasta")
# Remove adapters from reference sequence (cause these are not in the sequencing data)
ref_seq$sequence <- gsub("CGGAGCGAACCGAGTTAG", "", ref_seq$sequence)
ref_seq$sequence <- gsub("CATCGTCGCATCCAAGAG", "", ref_seq$sequence)
# Split up in insert and barcode part
## In my case, the barcode should be the last 13 bases of the sequence
ref_seq$barcode <- gsub(".*([A-Z]{13})$", "\\1", ref_seq$sequence)
ref_seq$insert <- gsub("(.*)[A-Z]{13}$", "\\1", ref_seq$sequence)
# Add control group classifier
match <- c("romanov", "pMT02", "TF-seq", "ctrl", "promega")
ref_seq$ctrl <- "no"
ref_seq$ctrl[grep(paste(match, collapse = "|"), ref_seq$name)] <- "yes"
# Import sequencing files
pDNA_seq_files = list.files('/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/pDNA_insert_seq_2/results/',
full.names=T, patter='.*TF.*_counts.tsv')
pDNA_seq <- lapply(pDNA_seq_files, fread, header = FALSE)
names(pDNA_seq)<- gsub('.*//(.*?)_[CGAT]{8}.*_counts.tsv',
'\\1',
pDNA_seq_files)
# Generate wide df - each condition attached as new column
for (i in 1:length(pDNA_seq)) {
if (i == 1) {
pDNA_seq_df <- data.frame(pDNA_seq[i])
pDNA_seq_df[3] <- names(pDNA_seq[i])
names(pDNA_seq_df) <- c("sequence", "count", "name")
pDNA_seq_df <- reshape2::dcast(pDNA_seq_df, sequence ~ name, value.var = "count")
}
else {
pDNA_seq_df_i <- data.frame(pDNA_seq[i])
pDNA_seq_df_i[3] <- names(pDNA_seq[i])
names(pDNA_seq_df_i) <- c("sequence", "count", "name")
pDNA_seq_df_i <- reshape2::dcast(pDNA_seq_df_i, sequence ~ name, value.var = "count")
pDNA_seq_df <- merge(pDNA_seq_df, pDNA_seq_df_i, all = T)
}
}
# Convert to long df - write conditions under each other
pDNA_seq <- melt(pDNA_seq_df, id.vars = "sequence",
variable.name = "condition", value.name = "counts")
# Split up in insert and barcode part
## In my case, the barcode should be the last 13 bases of the sequence
pDNA_seq$barcode <- gsub(".*([A-Z]{13})$", "\\1", pDNA_seq$sequence)
pDNA_seq$insert <- gsub("(.*)[A-Z]{13}$", "\\1", pDNA_seq$sequence)
# Calculate reads per million
pDNA_seq$counts[is.na(pDNA_seq$counts)] <- 0
for (i in unique(pDNA_seq$condition)) {
pDNA_seq$rpm[pDNA_seq$condition == i] <- (pDNA_seq$counts[pDNA_seq$condition == i] + 1) / # Adds a pseudocount of 1
sum(pDNA_seq$counts[pDNA_seq$condition == i]) *1e6
}
```
# Analysis
## What is the barcode distribution of mapped vs. unmapped for both TFs?
```{r read_distribution_per_tf, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Match barcodes with original data
pDNA_seq_bc <- pDNA_seq %>%
dplyr::select(barcode, rpm, condition) %>%
unique() %>%
mutate(id = "pDNA")
# Only keep highest barcode values - a bit of cheating here
pDNA_seq_bc$cond_bc <- paste(pDNA_seq_bc$barcode, pDNA_seq_bc$condition, sep = "_")
pDNA_seq_bc <- pDNA_seq_bc[order(pDNA_seq_bc$cond_bc, -abs(pDNA_seq_bc$rpm) ), ]
pDNA_seq_bc <- pDNA_seq_bc[ !duplicated(pDNA_seq_bc$cond_bc), ]
ref_seq_bc <- ref_seq %>% dplyr::select(name, barcode, ctrl)
match_bc <- merge(ref_seq_bc, pDNA_seq_bc, by = "barcode", all = T)
match_bc <- match_bc[!is.na(match_bc$rpm),]
match_bc$TF <- gsub("_.*", "\\1", match_bc$name)
match_bc$match <- "true"
match_bc$match[is.na(match_bc$name)] <- "false"
match_bc$conf <- "high"
match_bc$conf[match_bc$rpm < 10] <- "low"
# Visualize barcode distribution per TF
ggplot(match_bc %>%
filter(ctrl == "no"), aes(x = TF, y = rpm)) +
geom_quasirandom() +
theme_bw() +
xlab("reporter matches to:") +
ylab("reads per million") +
theme(text = element_text(size = 14), axis.text.x = element_text(angle = 90, hjust = 1, vjust = 1, size = 6)) +
ylim(0,300) + facet_wrap(~condition)
# Plot per TF the amount of high vs. low confidence reporters
x <- match_bc %>%
filter(match == "true", ctrl == "no") %>%
mutate(conf_count = as.numeric(ave(TF, TF, conf, condition, FUN = function(x) length(x)))) %>%
dplyr::select(conf_count, TF, conf, condition) %>%
unique %>%
dcast(TF + condition ~ conf, value.var = "conf_count")
x[is.na(x)] <- 0
x <- x %>%
mutate(sum = high + low,
percent = (high/sum)*100)
ggplot(x,
aes(x = reorder(TF, -percent), y = percent)) +
geom_bar(stat = "identity", position = "dodge") +
theme_bw() +
theme(text = element_text(size = 14), axis.text.x = element_text(angle = 90, hjust = 1, vjust = 1, size = 6)) +
facet_wrap(~condition)
# Visualize read distribution for all matched reporters
ggplot(match_bc %>%
filter(match == "true", ctrl == "no"), aes(x = rpm)) +
geom_density() +
geom_vline(aes(xintercept = 10), linetype = "dashed")+
theme_bw() +
xlim(0, 200) +
facet_wrap(~condition)
ggplot(match_bc %>%
filter(match == "true", ctrl == "no"), aes(x = rpm, color = condition)) +
geom_density() +
geom_vline(aes(xintercept = 10), linetype = "dashed")+
theme_bw() +
scale_color_brewer(palette = "Dark2") +
xlim(0, 200)
ggplot(match_bc %>%
filter(match == "true", ctrl == "no", str_detect(condition, "plasmid")), aes(x = rpm, color = condition)) +
geom_density() +
geom_vline(aes(xintercept = 10), linetype = "dashed")+
theme_bw() +
scale_color_brewer(palette = "Dark2") +
xlim(0, 200)
```
## Correlate to GC contenct
```{r gc_content, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Load reference file
ref_seq_2 <- seqinr::read.fasta("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr//data/library_design/output/mt20200619_oligo_pool_deep.fasta")
# Compute gc contents
gc <- compute_GC(ref_seq_2)
# Plot gc distribution
density <- density(gc$GC.content)
plot_ly(x = ~density$x, y = ~density$y, type = 'scatter', mode = 'lines', fill = 'tozeroy') %>%
layout(xaxis = list(title = 'GC content'),
yaxis = list(title = 'Density'))
match_seq <- match_bc[!is.na(match_bc$name),]
gc <- gc %>% rownames_to_column(var = "name")
gc <- merge(gc, match_seq)
plot_ly(data = gc, x = ~GC.content, y = ~rpm, color = ifelse(gc$rpm >= 10, "black", "red")) %>%
layout(xaxis = list(title = 'GC content'),
yaxis = list(title = 'reads per million',
range=c(0,230)))
```
## Plot how many barcodes are found in pDNA data
```{r barcodes_found, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Filter pDNA data - we should have at least 10 rpm - we want to be sure that we're looking at relevant things
pDNA_seq_filt <- pDNA_seq[pDNA_seq$rpm >= 10,]
# Match barcodes with original data
ref_seq_bc <- ref_seq %>% dplyr::select(name, barcode)
ref_seq_bc$TF <- gsub("_.*", "\\1", ref_seq_bc$name)
pDNA_seq_bc <- pDNA_seq_filt %>%
filter(str_detect(condition, "plasmid")) %>%
dplyr::select(barcode, rpm) %>%
unique() %>%
mutate(id = "pDNA")
match_bc <- merge(ref_seq_bc, pDNA_seq_bc, by = "barcode", all = T)
## Identify the unmapped fraction
match_df_ref <- match_bc[!is.na(match_bc$name),]
n_match <- nrow(match_df_ref[!is.na(match_df_ref$id),])
n_nomatch <- nrow(match_df_ref[is.na(match_df_ref$id),])
# Create donut chart
data <- data.frame(
lbls=c("matched:", "unmatched:"),
count=c(n_match, n_nomatch)
)
# Compute percentages
data$fraction <- data$count / sum(data$count)
data$percentage <- data$fraction * 100
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the bottom of each rectangle
data$ymin <- c(0, head(data$ymax, n=-1))
# Compute label position
data$labelPosition <- (data$ymax + data$ymin) / 2
# Compute a good label
data$label <- paste0(data$lbls, "\n", round(data$percentage), "%")
# Make the plot
ggplot(data, aes(ymax=ymax, ymin=ymin, xmax=4, xmin=3, fill=lbls)) +
geom_rect() +
geom_text(x=2, aes(y=labelPosition, label=label, color=lbls), size=5) + # x here controls label position (inner / outer)
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
scale_color_manual(values = c("#1B998B", "#2D3047")) +
coord_polar(theta="y") +
xlim(c(0, 4)) +
theme_void() +
theme(legend.position = "none")
```
## Plot how many reads match to designed barcodes
```{r, barcode_reads, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Match barcodes with original data
pDNA_seq_bc <- pDNA_seq_filt %>%
filter(str_detect(condition, "plasmid")) %>%
dplyr::select(barcode, rpm) %>%
unique() %>%
mutate(id = "pDNA")
ref_seq_bc <- ref_seq %>% dplyr::select(name, barcode)
ref_seq_bc$TF <- gsub("_.*", "\\1", ref_seq_bc$name)
match_bc <- merge(ref_seq_bc, pDNA_seq_bc, by = "barcode", all = T)
## Identify the unmapped fraction
match_df_ref <- match_bc
match_df_ref$rpm[is.na(match_df_ref$rpm)] <- 0
n_match <- sum(match_df_ref$rpm[!is.na(match_df_ref$id)])
n_nomatch <- sum(match_df_ref$rpm[is.na(match_df_ref$id)])
# Create donut chart
data <- data.frame(
lbls=c("matched:", "unmatched:"),
count=c(n_match, n_nomatch)
)
# Compute percentages
data$fraction <- data$count / sum(data$count)
data$percentage <- data$fraction * 100
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the bottom of each rectangle
data$ymin <- c(0, head(data$ymax, n=-1))
# Compute label position
data$labelPosition <- (data$ymax + data$ymin) / 2
# Compute a good label
data$label <- paste0(data$lbls, "\n", round(data$percentage), "%")
# Make the plot
ggplot(data, aes(ymax=ymax, ymin=ymin, xmax=4, xmin=3, fill=lbls)) +
geom_rect() +
geom_text(x=2, aes(y=labelPosition, label=label, color=lbls), size=5) + # x here controls label position (inner / outer)
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
scale_color_manual(values = c("#1B998B", "#2D3047")) +
coord_polar(theta="y") +
xlim(c(0, 4)) +
theme_void() +
theme(legend.position = "none")
```
## How many raw complete sequences match with the design?
```{r sequences_found, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Match barcodes with original data
ref_seq_seq <- ref_seq %>% dplyr::select(name, sequence)
pDNA_seq_seq <- pDNA_seq_filt %>%
filter(str_detect(condition, "plasmid")) %>%
dplyr::select(sequence, rpm) %>%
unique() %>%
mutate(id = "pDNA")
match_seq <- merge(ref_seq_seq, pDNA_seq_seq, by = "sequence", all = T)
match_seq$TF <- gsub("_.*", "\\1", match_seq$name)
## Identify the unmapped fraction
match_df_ref_seq <- match_seq[!is.na(match_seq$name),]
n_match <- nrow(match_df_ref_seq[!is.na(match_df_ref_seq$id),])
n_nomatch <- nrow(match_df_ref_seq[is.na(match_df_ref_seq$id),])
# Create donut chart
data <- data.frame(
lbls=c("matched:", "unmatched:"),
count=c(n_match, n_nomatch)
)
# Compute percentages
data$fraction <- data$count / sum(data$count)
data$percentage <- data$fraction * 100
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the bottom of each rectangle
data$ymin <- c(0, head(data$ymax, n=-1))
# Compute label position
data$labelPosition <- (data$ymax + data$ymin) / 2
# Compute a good label
data$label <- paste0(data$lbls, "\n", round(data$percentage), "%")
# Make the plot
ggplot(data, aes(ymax=ymax, ymin=ymin, xmax=4, xmin=3, fill=lbls)) +
geom_rect() +
geom_text(x=2, aes(y=labelPosition, label=label, color=lbls), size=5) + # x here controls label position (inner / outer)
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
scale_color_manual(values = c("#1B998B", "#2D3047")) +
coord_polar(theta="y") +
xlim(c(0, 4)) +
theme_void() +
theme(legend.position = "none")
```
## Now we want to know the read distribution of matched/unmatched sequences
```{r reads_matched_sequences, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
## Only select barcodes from design in pDNA data
pDNA_seq_insert <- pDNA_seq[pDNA_seq$barcode %in% ref_seq$barcode,] %>% dplyr::select(sequence, number)
ref_seq_insert <- ref_seq %>% dplyr::select(name, sequence)
match_insert <- merge(pDNA_seq_insert, ref_seq_insert, by = "sequence", all = T)
match_insert$TF[!is.na(match_insert$number)] <- gsub("_.*", "\\1", match_insert$name[!is.na(match_insert$number)])
ggplot(match_insert, aes(x = TF, y = number)) +
geom_quasirandom(alpha = 0.1) +
xlab("TF reporter match")+
theme_bw() +
ylim(0,1000)
## Identify the unmapped fraction
match_df_reads <- match_insert[!is.na(match_insert$number),]
match_df_reads$TF[is.na(match_df_reads$TF)] <- "no-match"
n_match <- sum(match_df_reads$number[match_df_reads$TF == "Trp53"])
n_match <- sum(match_df_reads$number[match_df_reads$TF == "Gr"])
n_nomatch <- sum(match_df_reads$number[match_df_reads$TF == "no-match"])
# Create donut chart
data <- data.frame(
lbls=c("matched:", "unmatched:"),
count=c(n_match, n_nomatch)
)
# Compute percentages
data$fraction <- data$count / sum(data$count)
data$percentage <- data$fraction * 100
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the bottom of each rectangle
data$ymin <- c(0, head(data$ymax, n=-1))
# Compute label position
data$labelPosition <- (data$ymax + data$ymin) / 2
# Compute a good label
data$label <- paste0(data$lbls, "\n", round(data$percentage), "%")
# Make the plot
ggplot(data, aes(ymax=ymax, ymin=ymin, xmax=4, xmin=3, fill=lbls)) +
geom_rect() +
geom_text(x=2, aes(y=labelPosition, label=label, color=lbls), size=5) + # x here controls label position (inner / outer)
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
scale_color_manual(values = c("#1B998B", "#2D3047")) +
labs(title = "percentage of reads from unmatched inserts (only matched barcodes)") +
coord_polar(theta="y") +
xlim(c(0, 4)) +
theme_void() +
theme(legend.position = "none")
```
# Barcodes attached to wrong insert?
```{r, barcode_matching, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Select only matched barcodes
pDNA_seq_insert <- pDNA_seq[pDNA_seq$barcode %in% ref_seq$barcode,]
# Add bc-id to the barcodes found in pDNA
ref_seq_insert <- ref_seq %>% dplyr::select(barcode, name) %>% setnames("name", "bc-match")
pDNA_seq_insert <- merge(pDNA_seq_insert, ref_seq_insert)
# Add insert-id to the inserts found in pDNA
ref_seq_insert <- ref_seq %>% dplyr::select(insert, name) %>% setnames("name", "insert-match")
ref_seq_insert$`insert-match` <- gsub("(.*)_bc_[0-9]$", "\\1", ref_seq_insert$`insert-match`)
ref_seq_insert <- ref_seq_insert %>% unique()
pDNA_seq_insert <- merge(pDNA_seq_insert, ref_seq_insert)
# Count occurences where bc matches insert
pDNA_seq_insert$`bc-match` <- gsub("(.*)_bc_[0-9]$", "\\1", pDNA_seq_insert$`bc-match`)
for (i in 1:nrow(pDNA_seq_insert)) {
pDNA_seq_insert$match[i] <- identical(pDNA_seq_insert$`bc-match`[i], pDNA_seq_insert$`insert-match`[i])
}
pDNA_seq_insert$TF <- gsub("_.*", "\\1", pDNA_seq_insert$`insert-match`)
pDNA_seq_insert <- pDNA_seq_insert[pDNA_seq_insert$TF == "Gr",]
## Identify the unmapped fraction
n_match <- sum(pDNA_seq_insert$number[pDNA_seq_insert$match == T])
n_nomatch <- sum(pDNA_seq_insert$number[pDNA_seq_insert$match == F])
# Create donut chart
data <- data.frame(
lbls=c("correct:", "incorrect:"),
count=c(n_match, n_nomatch)
)
# Compute percentages
data$fraction <- data$count / sum(data$count)
data$percentage <- data$fraction * 100
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the bottom of each rectangle
data$ymin <- c(0, head(data$ymax, n=-1))
# Compute label position
data$labelPosition <- (data$ymax + data$ymin) / 2
# Compute a good label
data$label <- paste0(data$lbls, "\n", round(data$percentage), "%")
# Make the plot
ggplot(data, aes(ymax=ymax, ymin=ymin, xmax=4, xmin=3, fill=lbls)) +
geom_rect() +
geom_text(x=2, aes(y=labelPosition, label=label, color=lbls), size=5) + # x here controls label position (inner / outer)
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
scale_color_manual(values = c("#1B998B", "#2D3047")) +
labs(title = "percentage of reads where bc attached to wrong insert ") +
coord_polar(theta="y") +
xlim(c(0, 4)) +
theme_void() +
theme(legend.position = "none")
ggplot(pDNA_seq_insert, aes(x=match, y=number)) +
geom_quasirandom() +
labs(title = "Read count distribution",
subtitle = "bc attached to correct insert vs. attached to wrong insert")+
ylab("counts") + xlab("match vs. non-match")+
ylim(1,1000)+
theme_bw()
```
# Session Info
```{r}
paste("Run time: ",format(Sys.time()-StartTime))
getwd()
date()
sessionInfo()
```
<file_sep>---
title: "pDNA insert matching"
author: "<NAME>"
date: "`r format(Sys.time(), '%Y-%m-%d')`"
output:
html_document:
theme: journal #cerulean
highlight: monochrome
toc: true
toc_float: true
code_folding: show
editor_options:
chunk_output_type: console
---
# knitr document van Steensel lab
# Introduction
I sequenced the complete insert of the pDNA library of pMT06. I already extracted all sequences in front of the 3' adapter from the sequences data and added counts to identical sequences by starcode. I now want to make an overview about how many pDNA insert sequences in the pDNA still match the designed inserts.
## Description of Data
How to make a good rendering table:
```{r table1, echo=FALSE, message=FALSE, warnings=FALSE, results='asis'}
tabl <- "
| column1 | column2 | column3 |
|----|----|----|
|1 | 2 | 3 |
|a | b | c |
"
cat(tabl) # output the table in a format good for HTML/PDF/docx conversion
```
# Data processing
## Path, Libraries, Parameters and Useful Functions
```{r setup, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
knitr::opts_chunk$set(echo = TRUE)
StartTime <-Sys.time()
# 8-digit Date tag:
Date <- substr(gsub("-","",Sys.time()),1,8)
# libraries:
library(seqinr)
library(ShortRead)
library(plyr)
library(maditr)
library(phylotools)
library(tidyr)
library(readr)
library(dplyr)
library(ggplot2)
library(ggbeeswarm)
library(vwr)
library(d3r)
library(sunburstR)
```
### Custom functions
Functions used thoughout this script.
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
SetFileName <- function(filename, initials) {
# Set filename with extension and initials to make filename with date integrated.
filename <- substitute(filename)
initials <- substitute(initials)
filename <- paste0(initials, Date, filename)
filename
}
ReadFasta<-function(file) {
# Read the file line by line
fasta<-readLines(file)
# Identify header lines
ind<-grep(">", fasta)
# Identify the sequence lines
s<-data.frame(ind=ind, from=ind+1, to=c((ind-1)[-1], length(fasta)))
# Process sequence lines
seqs<-rep(NA, length(ind))
for(i in 1:length(ind)) {
seqs[i]<-paste(fasta[s$from[i]:s$to[i]], collapse="")
}
# Create a data frame
DF<-data.frame(name=gsub(">", "", fasta[ind]), sequence=seqs)
# Return the data frame as a result object from the function
return(DF)
}
# Function to load PWM matrix
get_pwm_feature_matrix <- function(motif_meta_fn, fimo_fn, db = 2) {
# validate args
valid_dbs <- 1:2
if(!db %in% valid_dbs)
stop('Invalid db (database version). Please use db=1 (maintained for backward compatibility only) or db=2')
# db=1 is maintained for backward compatibility only
if(db == 1) {
# read in motif metadata
motif_meta <- read.csv(motif_meta_fn)
# check whether motif metadata contain essential annotations
if(!all(c('PWM.ID', 'Cognate.TF') %in% colnames(motif_meta))) {
message('The motif metadata file does not contain the essential columns PWM.ID and Cognate.TF')
}
motif_minimal <- motif_meta[, c('PWM.ID', 'Cognate.TF')]
# load fimo output --> extract motif id, sequence id and p-value
df <- read.table(fimo_fn)
df <- df[, c(1, 2, 7)]
colnames(df) <- c('PWM.ID', 'seqid', 'pval')
# add TF id
df <- merge(df, motif_minimal, by = 'PWM.ID')
# group motif hits by sequence id
l <- split(df, df[['seqid']])
# multiple PWM and multiple hits possible. Reduce hits to one per TF, keeping best p-val only
l <- lapply(l, function(x) {
x_by_tf <- split(x, x[['Cognate.TF']], drop = TRUE)
x_by_tf <- lapply(x_by_tf, function(y) y[which.min(y$pval), ])
do.call('rbind', x_by_tf)
})
# initialize feature matrix
n_tf <- motif_minimal[['Cognate.TF']] %>%
unique %>%
length
n_seq <- length(l)
pwm <- matrix(1, nrow = n_seq, ncol = n_tf)
colnames(pwm) <- (motif_minimal[['Cognate.TF']] %>% unique)
# replace :: from names of composite motifs
colnames(pwm) <- str_replace_all(colnames(pwm), '::', '_')
# fill in feature matrix
for(i in 1 : n_seq) {
pwm[i, l[[i]][['Cognate.TF']]] <- l[[i]]$pval
}
# -log10 transform
pwm <- -1 * log10(pwm)
# coerce to tib and return
tib_fimo <- as_data_frame(pwm) %>%
mutate(id = names(l))
dplyr::select(id, everything())
}
# db = 2 (default)
else {
# load metadata
tib_meta <- read_csv(motif_meta_fn) %>%
# extract tf symbol from motif id (Cognate_TF unsafe, it can be empty) and replace :: occurrences
mutate(tf_symbol = str_remove(ID, '_[0-9]*'),
tf_symbol = str_replace(tf_symbol, '::', '_')) %>%
dplyr::select(motif_id = `PWM ID`, tf_symbol)
# load fimo results
tib_fimo <- read_tsv(fimo_fn) %>%
# extract motif id, sequence id and p-value
dplyr::select(motif_id, sequence_name, pval = `p-value`)
# add tf symbol to fimo results
tib_fimo <- tib_fimo %>%
left_join(tib_meta, by = 'motif_id') %>%
# remove hits with missing motif id (composite pwms)
filter(!is.na(tf_symbol))
# select best hit for each motif and sequence
tib_fimo <- tib_fimo %>%
group_by(sequence_name, tf_symbol) %>%
dplyr::slice(which.min(pval)) %>%
ungroup()
# spread into feature matrix
tib_fimo <- tib_fimo %>%
mutate(pval = -1 * log10(pval)) %>%
dplyr::select(-motif_id) %>%
spread(key = tf_symbol, value = pval, fill = 0, drop = TRUE) %>%
# perform cosmetics on the id
mutate(id = sequence_name) %>%
dplyr::select(-c(sequence_name)) %>%
dplyr::select(id, everything())
}
return(tib_fimo)
}
```
## Data import
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# Import reference sequences
ref_seq <- ReadFasta("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/mt20200619_oligo_pool_deep.fasta")
# Remove adapters from reference sequence (cause these are not in the sequencing data)
ref_seq$sequence <- gsub("CGGAGCGAACCGAGTTAG", "", ref_seq$sequence)
ref_seq$sequence <- gsub("CATCGTCGCATCCAAGAG", "", ref_seq$sequence)
# Split up in insert and barcode part
## In my case, the barcode should be the last 12 bases of the sequence
ref_seq$barcode <- gsub(".*([A-Z]{12})$", "\\1", ref_seq$sequence)
ref_seq$insert <- gsub("(.*)[A-Z]{12}$", "\\1", ref_seq$sequence)
# Import sequencing files
pDNA_seq <- read_tsv("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/pDNA_insert_seq/processed/6185_1_pMT06_insert_counts.tsv", col_names = c("sequence", "number"))
# Split up in insert and barcode part
## In my case, the barcode should be the last 12 bases of the sequence
pDNA_seq$barcode <- gsub(".*([A-Z]{12})$", "\\1", pDNA_seq$sequence)
pDNA_seq$insert <- gsub("(.*)[A-Z]{12}$", "\\1", pDNA_seq$sequence)
# Calculate reads per million
pDNA_seq$rpm <- ave(pDNA_seq$number, FUN = function(x) x/sum(x) *1e6)
```
# Analysis
## What is the barcode distribution of mapped vs. unmapped for both TFs?
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# Match barcodes with original data
pDNA_seq_bc <- pDNA_seq %>% dplyr::select(barcode, rpm) %>% unique() %>% mutate(id = "pDNA")
# Only keep highest barcode values - a bit of cheating here
pDNA_seq_bc <- pDNA_seq_bc[order(pDNA_seq_bc$barcode, -abs(pDNA_seq_bc$rpm) ), ]
pDNA_seq_bc <- pDNA_seq_bc[ !duplicated(pDNA_seq_bc$barcode), ]
ref_seq_bc <- ref_seq %>% dplyr::select(name, barcode)
match_bc <- merge(ref_seq_bc, pDNA_seq_bc, by = "barcode", all = T)
match_bc <- match_bc[!is.na(match_bc$rpm),]
match_bc$TF <- gsub("_.*", "\\1", match_bc$name)
match_bc$match <- "true"
match_bc$match[is.na(match_bc$name)] <- "false"
match_bc$conf <- "high"
match_bc$conf[match_bc$rpm < 10] <- "low"
# Visualize
ggplot(match_bc, aes(x = TF, y = rpm)) +
geom_quasirandom() +
theme_bw() +
xlab("reporter matches to:") +
ylab("reads per million") +
theme(text = element_text(size = 14)) +
ylim(0,300)
```
## Correlate to GC contenct
```{r}
# Load reference file
ref_seq_2 <- seqinr::read.fasta("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr//data/library_design/output/mt20200619_oligo_pool_deep.fasta")
# Compute gc contents
gc <- compute_GC(ref_seq_2)
# Plot gc distribution
density <- density(gc$GC.content)
plot_ly(x = ~density$x, y = ~density$y, type = 'scatter', mode = 'lines', fill = 'tozeroy') %>%
layout(xaxis = list(title = 'GC content'),
yaxis = list(title = 'Density'))
match_seq <- match_bc[!is.na(match_bc$name),]
gc <- gc %>% rownames_to_column(var = "name")
gc <- merge(gc, match_seq)
plot_ly(data = gc, x = ~GC.content, y = ~rpm, color = ifelse(gc$rpm >= 10, "black", "red")) %>%
layout(xaxis = list(title = 'GC content'),
yaxis = list(title = 'reads per million',
range=c(0,230)))
```
## Plot how many barcodes are found in pDNA data
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# Filter pDNA data - we should have at least 10 rpm - we want to be sure that we're looking at relevant things
pDNA_seq <- pDNA_seq[pDNA_seq$rpm >= 1,]
# Match barcodes with original data
ref_seq_bc <- ref_seq %>% dplyr::select(name, barcode)
ref_seq_bc$TF <- gsub("_.*", "\\1", ref_seq_bc$name)
ref_seq_bc <- ref_seq_bc[ref_seq_bc$TF == "Gr",]
pDNA_seq_bc <- pDNA_seq %>% dplyr::select(barcode) %>% unique() %>% mutate(id = "pDNA")
match_bc <- merge(ref_seq_bc, pDNA_seq_bc, by = "barcode", all = T)
## Identify the unmapped fraction
match_df_ref <- match_bc[!is.na(match_bc$name),]
n_match <- nrow(match_df_ref[!is.na(match_df_ref$id),])
n_nomatch <- nrow(match_df_ref[is.na(match_df_ref$id),])
# Create donut chart
data <- data.frame(
lbls=c("matched:", "unmatched:"),
count=c(n_match, n_nomatch)
)
# Compute percentages
data$fraction <- data$count / sum(data$count)
data$percentage <- data$fraction * 100
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the bottom of each rectangle
data$ymin <- c(0, head(data$ymax, n=-1))
# Compute label position
data$labelPosition <- (data$ymax + data$ymin) / 2
# Compute a good label
data$label <- paste0(data$lbls, "\n", round(data$percentage), "%")
# Make the plot
ggplot(data, aes(ymax=ymax, ymin=ymin, xmax=4, xmin=3, fill=lbls)) +
geom_rect() +
geom_text(x=2, aes(y=labelPosition, label=label, color=lbls), size=5) + # x here controls label position (inner / outer)
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
scale_color_manual(values = c("#1B998B", "#2D3047")) +
coord_polar(theta="y") +
xlim(c(0, 4)) +
theme_void() +
theme(legend.position = "none")
```
## Plot how many reads match to designed barcodes
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# Match barcodes with original data
pDNA_seq_bc <- pDNA_seq %>% dplyr::select(barcode, number) %>% unique() %>% mutate(id = "pDNA")
ref_seq_bc <- ref_seq %>% dplyr::select(name, barcode)
ref_seq_bc$TF <- gsub("_.*", "\\1", ref_seq_bc$name)
match_bc <- merge(ref_seq_bc, pDNA_seq_bc, by = "barcode", all = T)
## Identify the unmapped fraction
match_df_ref <- match_bc
match_df_ref$number[is.na(match_df_ref$number)] <- 0
match_df_ref$TF[is.na(match_df_ref$TF)] <- "no-match"
match_df_ref <- match_df_ref[match_df_ref$TF != "Trp53",]
n_match <- sum(match_df_ref$number[match_df_ref$TF == "Gr"])
n_nomatch <- sum(match_df_ref$number[match_df_ref$TF == "no-match"])
# Create donut chart
data <- data.frame(
lbls=c("matched:", "unmatched:"),
count=c(n_match, n_nomatch)
)
# Compute percentages
data$fraction <- data$count / sum(data$count)
data$percentage <- data$fraction * 100
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the bottom of each rectangle
data$ymin <- c(0, head(data$ymax, n=-1))
# Compute label position
data$labelPosition <- (data$ymax + data$ymin) / 2
# Compute a good label
data$label <- paste0(data$lbls, "\n", round(data$percentage), "%")
# Make the plot
ggplot(data, aes(ymax=ymax, ymin=ymin, xmax=4, xmin=3, fill=lbls)) +
geom_rect() +
geom_text(x=2, aes(y=labelPosition, label=label, color=lbls), size=5) + # x here controls label position (inner / outer)
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
scale_color_manual(values = c("#1B998B", "#2D3047")) +
coord_polar(theta="y") +
xlim(c(0, 4)) +
theme_void() +
theme(legend.position = "none")
```
## How many raw complete sequences match with the design?
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# Match barcodes with original data
ref_seq_seq <- ref_seq %>% dplyr::select(name, sequence)
pDNA_seq_seq <- pDNA_seq %>% dplyr::select(sequence) %>% unique() %>% mutate(id = "pDNA")
match_seq <- merge(ref_seq_seq, pDNA_seq_seq, by = "sequence", all = T)
match_seq$TF <- gsub("_.*", "\\1", match_seq$name)
match_seq$TF[is.na(match_seq$TF)] <- "no-match"
match_seq <- match_seq[match_seq$TF != "Trp53",]
## Identify the unmapped fraction
match_df_ref_seq <- match_seq[!is.na(match_seq$name),]
n_match <- nrow(match_df_ref_seq[match_df_ref_seq$TF == "Gr",])
n_nomatch <- nrow(match_df_ref_seq[match_df_ref_seq$TF == "no-match",])
# Create donut chart
data <- data.frame(
lbls=c("matched:", "unmatched:"),
count=c(n_match, n_nomatch)
)
# Compute percentages
data$fraction <- data$count / sum(data$count)
data$percentage <- data$fraction * 100
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the bottom of each rectangle
data$ymin <- c(0, head(data$ymax, n=-1))
# Compute label position
data$labelPosition <- (data$ymax + data$ymin) / 2
# Compute a good label
data$label <- paste0(data$lbls, "\n", round(data$percentage), "%")
# Make the plot
ggplot(data, aes(ymax=ymax, ymin=ymin, xmax=4, xmin=3, fill=lbls)) +
geom_rect() +
geom_text(x=2, aes(y=labelPosition, label=label, color=lbls), size=5) + # x here controls label position (inner / outer)
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
scale_color_manual(values = c("#1B998B", "#2D3047")) +
coord_polar(theta="y") +
xlim(c(0, 4)) +
theme_void() +
theme(legend.position = "none")
```
## Now we want to know the read distribution of matched/unmatched sequences
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
## Only select barcodes from design in pDNA data
pDNA_seq_insert <- pDNA_seq[pDNA_seq$barcode %in% ref_seq$barcode,] %>% dplyr::select(sequence, number)
ref_seq_insert <- ref_seq %>% dplyr::select(name, sequence)
match_insert <- merge(pDNA_seq_insert, ref_seq_insert, by = "sequence", all = T)
match_insert$TF[!is.na(match_insert$number)] <- gsub("_.*", "\\1", match_insert$name[!is.na(match_insert$number)])
ggplot(match_insert, aes(x = TF, y = number)) +
geom_quasirandom(alpha = 0.1) +
xlab("TF reporter match")+
theme_bw() +
ylim(0,1000)
## Identify the unmapped fraction
match_df_reads <- match_insert[!is.na(match_insert$number),]
match_df_reads$TF[is.na(match_df_reads$TF)] <- "no-match"
n_match <- sum(match_df_reads$number[match_df_reads$TF == "Trp53"])
n_match <- sum(match_df_reads$number[match_df_reads$TF == "Gr"])
n_nomatch <- sum(match_df_reads$number[match_df_reads$TF == "no-match"])
# Create donut chart
data <- data.frame(
lbls=c("matched:", "unmatched:"),
count=c(n_match, n_nomatch)
)
# Compute percentages
data$fraction <- data$count / sum(data$count)
data$percentage <- data$fraction * 100
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the bottom of each rectangle
data$ymin <- c(0, head(data$ymax, n=-1))
# Compute label position
data$labelPosition <- (data$ymax + data$ymin) / 2
# Compute a good label
data$label <- paste0(data$lbls, "\n", round(data$percentage), "%")
# Make the plot
ggplot(data, aes(ymax=ymax, ymin=ymin, xmax=4, xmin=3, fill=lbls)) +
geom_rect() +
geom_text(x=2, aes(y=labelPosition, label=label, color=lbls), size=5) + # x here controls label position (inner / outer)
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
scale_color_manual(values = c("#1B998B", "#2D3047")) +
labs(title = "percentage of reads from unmatched inserts (only matched barcodes)") +
coord_polar(theta="y") +
xlim(c(0, 4)) +
theme_void() +
theme(legend.position = "none")
```
# Barcodes attached to wrong insert?
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# Select only matched barcodes
pDNA_seq_insert <- pDNA_seq[pDNA_seq$barcode %in% ref_seq$barcode,]
# Add bc-id to the barcodes found in pDNA
ref_seq_insert <- ref_seq %>% dplyr::select(barcode, name) %>% setnames("name", "bc-match")
pDNA_seq_insert <- merge(pDNA_seq_insert, ref_seq_insert)
# Add insert-id to the inserts found in pDNA
ref_seq_insert <- ref_seq %>% dplyr::select(insert, name) %>% setnames("name", "insert-match")
ref_seq_insert$`insert-match` <- gsub("(.*)_bc_[0-9]$", "\\1", ref_seq_insert$`insert-match`)
ref_seq_insert <- ref_seq_insert %>% unique()
pDNA_seq_insert <- merge(pDNA_seq_insert, ref_seq_insert)
# Count occurences where bc matches insert
pDNA_seq_insert$`bc-match` <- gsub("(.*)_bc_[0-9]$", "\\1", pDNA_seq_insert$`bc-match`)
for (i in 1:nrow(pDNA_seq_insert)) {
pDNA_seq_insert$match[i] <- identical(pDNA_seq_insert$`bc-match`[i], pDNA_seq_insert$`insert-match`[i])
}
pDNA_seq_insert$TF <- gsub("_.*", "\\1", pDNA_seq_insert$`insert-match`)
pDNA_seq_insert <- pDNA_seq_insert[pDNA_seq_insert$TF == "Gr",]
## Identify the unmapped fraction
n_match <- sum(pDNA_seq_insert$number[pDNA_seq_insert$match == T])
n_nomatch <- sum(pDNA_seq_insert$number[pDNA_seq_insert$match == F])
# Create donut chart
data <- data.frame(
lbls=c("correct:", "incorrect:"),
count=c(n_match, n_nomatch)
)
# Compute percentages
data$fraction <- data$count / sum(data$count)
data$percentage <- data$fraction * 100
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the bottom of each rectangle
data$ymin <- c(0, head(data$ymax, n=-1))
# Compute label position
data$labelPosition <- (data$ymax + data$ymin) / 2
# Compute a good label
data$label <- paste0(data$lbls, "\n", round(data$percentage), "%")
# Make the plot
ggplot(data, aes(ymax=ymax, ymin=ymin, xmax=4, xmin=3, fill=lbls)) +
geom_rect() +
geom_text(x=2, aes(y=labelPosition, label=label, color=lbls), size=5) + # x here controls label position (inner / outer)
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
scale_color_manual(values = c("#1B998B", "#2D3047")) +
labs(title = "percentage of reads where bc attached to wrong insert ") +
coord_polar(theta="y") +
xlim(c(0, 4)) +
theme_void() +
theme(legend.position = "none")
ggplot(pDNA_seq_insert, aes(x=match, y=number)) +
geom_quasirandom() +
labs(title = "Read count distribution",
subtitle = "bc attached to correct insert vs. attached to wrong insert")+
ylab("counts") + xlab("match vs. non-match")+
ylim(1,1000)+
theme_bw()
```
# Identify those barcodes that are attached to a wrong insert
Clearly wrongly assigned barcodes can be assigned to the correct insert
Barcodes that are attached to a mixed population of inserts should to be excluded from any analysis where this plasmid library was used
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
mismatch_df <- pDNA_seq_insert[pDNA_seq_insert$match == F,] %>% dplyr::select(barcode, number) %>% unique()
mismatch_df <- ddply(mismatch_df,~barcode, summarise, number = sum(number))
match_df <- pDNA_seq_insert[pDNA_seq_insert$match == T,] %>% dplyr::select(barcode, number) %>% unique()
match_df <- ddply(match_df,~barcode, summarise, number = sum(number))
setnames(match_df, "number", "match")
setnames(mismatch_df, "number", "mismatch")
matching_df <- merge(match_df, mismatch_df, all = T)
matching_df$match[is.na(matching_df$match)] <- 0.1
matching_df$mismatch[is.na(matching_df$mismatch)] <- 0.1
# Calculate match/mismatch ratio
matching_df$ratio <- matching_df$match / matching_df$mismatch
matching_df$type <- "good matching"
matching_df$type[log2(matching_df$ratio) < 5 & log2(matching_df$ratio) >= -5] <- "unclear matching"
matching_df$type[log2(matching_df$ratio) < -5] <- "incorrect matching"
# Plot match/mismatch distribution
ggplot(matching_df,
aes(x = match, y = mismatch, colour = type), alpha = 0.5) +
geom_point(size = 1) +
xlim(0,1000) + ylim(0,1000) +
xlab("number of reads: barcodes match with insert") +
ylab("number of reads: barcodes do not match insert") +
scale_color_manual(values = c("#1B998B", "#2D3047", "#FF9B71")) +
theme_bw() + theme(legend.position = c(.8, .8))
ggplot(matching_df, aes(x = "match/mismatch ratio", y = log2(ratio), colour = type), alpha = 0.5) +
geom_quasirandom() + theme_bw() + ylim(-10,15) +
scale_color_manual(values = c("#1B998B", "#2D3047", "#FF9B71")) +
theme(legend.position = c(.8, .8))
# Fraction of barcodes with match/mismatch
n_match <- nrow(matching_df[matching_df$mismatch == 0.1,])
n_nomatch <- nrow(matching_df[matching_df$mismatch > 0.1,])
# Create donut chart
data <- data.frame(
lbls=c("matched:", "unmatched:"),
count=c(n_match, n_nomatch)
)
# Compute percentages
data$fraction <- data$count / sum(data$count)
data$percentage <- data$fraction * 100
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the cumulative percentages (top of each rectangle)
data$ymax <- cumsum(data$fraction)
# Compute the bottom of each rectangle
data$ymin <- c(0, head(data$ymax, n=-1))
# Compute label position
data$labelPosition <- (data$ymax + data$ymin) / 2
# Compute a good label
data$label <- paste0(data$lbls, "\n", round(data$percentage), "%")
# Make the plot
ggplot(data, aes(ymax=ymax, ymin=ymin, xmax=4, xmin=3, fill=lbls)) +
geom_rect() +
geom_text(x=2, aes(y=labelPosition, label=label, color=lbls), size=5) + # x here controls label position (inner / outer)
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
scale_color_manual(values = c("#1B998B", "#2D3047")) +
labs(title = "percentage at least 1 bc wrongly attached to its insert") +
coord_polar(theta="y") +
xlim(c(0, 4)) +
theme_void() +
theme(legend.position = "none")
```
# Barcode re-evaluation
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# Barcodes with a mixed match of correct and incorrect insert need to be excluded from the analysis
matching_df_exclude <- matching_df[log2(matching_df$ratio) < 5 & log2(matching_df$ratio) >= -5,]
# Barcodes with incorrect matching can be assigned to new insert if only 1 insert has all barcodes
matching_df_incorrect <- matching_df[log2(matching_df$ratio) < -5,]
# Overview of type of barcode attachment distributio
# Fraction of barcodes with match/mismatch
n_total <- nrow(ref_seq)
n_match <- nrow(matching_df)
n_nomatch <- n_total - n_match
n_correct <- nrow(matching_df[log2(matching_df$ratio) >= 5,])
n_exclude <- nrow(matching_df[log2(matching_df$ratio) < 5 & log2(matching_df$ratio) >= -5,])
n_incorrect <- nrow(matching_df[log2(matching_df$ratio) < -5,])
dat <- data.frame(
x = rep("x", each = 4),
condition = c("4. not enough data","3. correct match", "2. ambiguous match", "1. incorrect match"),
size = c(n_nomatch, n_correct, n_exclude, n_incorrect),
stringsAsFactors = T
)
dat$percentage <- 100*(round(dat$size/n_total,2))
dat$pos <- c(16.5, 65, 98, 98.5)
ggplot(dat, aes(fill=condition, y=percentage, x = x)) +
geom_bar(position="stack", stat = "identity") +
theme_bw()+scale_fill_grey()+
geom_text(data=dat,
aes(x = x, y = pos,
label = paste0(percentage,"%")), size=4) +
theme(axis.title.x=element_blank(),
axis.text.x=element_blank(),
axis.ticks.x=element_blank())
```
# Investigate the mutational load of the barcodes with a good match
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
## Only select barcodes from design in pDNA data
matching_df_correct <- matching_df[matching_df$ratio > 2,]
pDNA_seq_insert <- pDNA_seq[pDNA_seq$barcode %in% matching_df$barcode,] %>%
dplyr::dplyr::select(barcode, insert, number) %>%
setnames("insert", "pDNA")
ref_seq_insert <- ref_seq %>% dplyr::select(barcode, insert) %>% setnames("insert", "ref")
match_insert <- merge(pDNA_seq_insert, ref_seq_insert, by = "barcode")
match_insert$lv <- levenshtein.distance(match_insert$pDNA, match_insert$ref)
## Read distribution vs. TF reporter length
ggplot(data = match_insert, aes(x = lv)) +
geom_histogram(color = "#2D3047") + xlab("Levenshtein distance") +
labs(title = "Sum pDNA counts vs. at levenshtein distances") +
theme_classic() +
theme(axis.text.x = element_text(angle = 45, hjust = 1, vjust = 1, size = 12),
axis.text.y = element_text(size = 12)) +
ylab("read count")
```
# Investigate mutational load of only Trp53 constructs (as they are especially complex to PCR up)
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
## Only select barcodes from design in pDNA data
matching_df_correct <- matching_df[matching_df$ratio > 2,]
pDNA_seq_insert <- pDNA_seq[pDNA_seq$barcode %in% matching_df$barcode,] %>%
dplyr::select(barcode, insert, number) %>%
setnames("insert", "pDNA")
p53_bc <- ref_seq$barcode[grep("Trp53", ref_seq$name)]
pDNA_seq_insert <- pDNA_seq_insert[pDNA_seq_insert$barcode %in% p53_bc,]
ref_seq_insert <- ref_seq %>% dplyr::selectbarcode, insert) %>% setnames("insert", "ref")
match_insert <- merge(pDNA_seq_insert, ref_seq_insert, by = "barcode")
match_insert$lv <- levenshtein.distance(match_insert$pDNA, match_insert$ref)
## Read distribution vs. TF reporter length
ggplot(data = match_insert, aes(x = lv)) +
geom_histogram(color = "#2D3047") + xlab("Levenshtein distance of designed insert vs sequenced insert") +
labs(title = "Trp53 reporters: is the sequence correct in the pDNA?") +
theme_classic() +
theme(axis.text.x = element_text(angle = 45, hjust = 1, vjust = 1, size = 12),
axis.text.y = element_text(size = 12)) +
ylab("reporter counts")
```
## Exporting data
```{r}
# Export barcodes that are attached to multiple inserts
bc_exclude <- matching_df_exclude$barcode %>% unique()
write.csv(bc_exclude, "/DATA/usr/m.trauernicht/projects/tf_activity_reporter/data/SuRE_TF_1/pDNA_seq/bc_exclude.csv")
# Export barcodes that are attached to the wrong insert
bc_replace <- pDNA_seq_incorrect %>% dplyr::select(barcode, `bc-match`, `insert-match`) %>% unique()
write.csv(bc_replace, "/DATA/usr/m.trauernicht/projects/tf_activity_reporter/data/SuRE_TF_1/pDNA_seq/bc_replace.csv")
```
# Session Info
```{r}
paste("Run time: ",format(Sys.time()-StartTime))
getwd()
date()
sessionInfo()
```
<file_sep>
# Optimisation of TP53 reporters by systematic dissection of synthetic TP53 response elements
[](https://zenodo.org/badge/latestdoi/298230427)
**Introduction:**
It is unclear how TP53 binding site architecture relates to TF activity. To test this systematically, a library was designed in collaboration with the Bussemaker lab. Design features of TP53 reporters like binding site copy number, spacer length, or core promoter choice are reviewed.

The designed first library contains:
- 6,000 TF reporters, each with up to 4 TF binding sites, followed by a minP or minCMV and a barcode in the transcription unit
- 5 different TP53 motifs with different predicted binding affinities
- Large range of combinatorial binding affinity
- Spacer length between binding sites varied from 4-14 bp in 1 bp steps
- two different core promoters
- three different synthetic inactive spacer sequences
- 5 barcodes per TF reporter
All TF reporters were designed using FIMO. This way, the spacings were designed to be inactive, while the TF binding sites were ensured to be intact.
**Experimental setup:**
- Nucleofection into TP53-proficient MCF7, A549, or U2OS cells, and TP53-KO MCF7 cells
- TP53 Stimulation with Nutlin-3a or vehicle control (DMSO)
- RNA isolation after 24h, followed by barcode-specific reverse transcription and sequencing library prep
- experiments performed in independent triplicates
**Activity quantification:**
- reporter activity = cDNA counts / pDNA counts
- take average across the 5 barcodes
- then take average across the 3 biological replicates
- calculate enrichment per condition tested over background reporter activity (core promoter-only reporters)
___
**Repository guide:**
- analysis folder: pre-processing of the barcode counts (barcode-preprocessing.Rmd) + analysis of barcode counts, linear modeling, figure generation (cDNA-processing.Rmd) + analysis of genomic TP53 response elements (mt20230623_genomic_motif_enrichment.Rmd)
- library_design folder: contains script that was used to generate the TP53 reporter sequences
- pDNA_insert_seq folder: contains scripts to analyze the full-length sequences of the plasmid pool that was used for transfections
- raw_data_analysis folder: contains the scripts that were used to extract and cluster the raw barcode counts
<file_sep>---
title: "Barcode count pre-processing"
author:
- name: "<NAME>"
email: "<EMAIL>"
affiliation: "Netherlands Cancer Institute - van Steensel lab"
date: '`r format(Sys.time(), "%d/%m/%Y")`'
output:
html_document:
theme: united
highlight: pygments
fig_caption: yes
code_folding: hide
df_print: kable
toc: true
toc_depth: 4
toc_float:
collapsed: false
---
---
### Aim
pMT06 (the pDNA library) was transfected into MCF7 cells that are either TP53 proficient or TP53-KO - 24 hours later mRNA was isolated and barcodes were quantified by sequencing together with pMT06 pDNA counts. This was done in three independent replicates. In this script the barcode counts will be analyzed and some quality checks will be done.
---
## Setup {.tabset}
<!-- little HTML script to do indentation of the table of contents -->
<script>
$(document).ready(function() {
$items = $('div#TOC li');
$items.each(function(idx) {
num_ul = $(this).parentsUntil('#TOC').length;
$(this).css({'text-indent': num_ul * 10, 'padding-left': 0});
});
});
</script>
```{css, echo = FALSE}
div.sourceCode {
overflow-x: hidden;
}
```
### Libraries
```{r setup, out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
knitr::opts_chunk$set(echo = TRUE)
StartTime <-Sys.time()
# 8-digit Date tag:
Date <- substr(gsub("-","",Sys.time()),1,8)
# libraries:
library(data.table)
library(plyr)
library(stringr)
library(ggpubr)
library(GGally)
library(vwr)
library(dplyr)
library(tibble)
library(plotly)
library(ggbeeswarm)
library(haven)
library(readr)
library(parallel)
library(RColorBrewer)
library(gridExtra)
library(pheatmap)
library(shiny)
library(factoextra)
library(ggbiplot)
library(ggpointdensity)
library(viridis)
library(tidyr)
library(DESeq2)
library(PCAtools)
```
### Functions
```{r out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
#Custom functions
SetFileName <- function(filename, initials) {
# Set filename with extension and initials to make filename with date integrated.
filename <- substitute(filename)
initials <- substitute(initials)
filename <- paste0(initials, Date, filename)
filename
}
# From Fede:
# ggpairs custom functions
corColor <- function(data, mapping, color = I("black"), sizeRange = c(1, 3), ...) {
x <- eval_data_col(data, mapping$x)
y <- eval_data_col(data, mapping$y)
r <- cor(x, y, "pairwise.complete.obs")
rt <- format(r, digits = 3)
tt <- as.character(rt)
cex <- max(sizeRange)
# helper function to calculate a useable size
percent_of_range <- function(percent, range) {
percent * diff(range) + min(range, na.rm = TRUE)
}
# plot correlation coefficient
p <- ggally_text(label = tt, mapping = aes(), xP = 0.5, yP = 0.5,
size = I(percent_of_range(cex * abs(r), sizeRange)), color = color, ...) +
theme(panel.grid.minor=element_blank(),
panel.grid.major=element_blank())
corColors <- RColorBrewer::brewer.pal(n = 7, name = "RdYlBu")[2:6]
if (r <= boundaries[1]) {
corCol <- corColors[1]
} else if (r <= boundaries[2]) {
corCol <- corColors[2]
} else if (r < boundaries[3]) {
corCol <- corColors[3]
} else if (r < boundaries[4]) {
corCol <- corColors[4]
} else {
corCol <- corColors[5]
}
p <- p +
theme(panel.background = element_rect(fill = corCol))
return(p)
}
```
### Loading data
```{r data import, fig.width=10, fig.height=7, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Load metadata file that contains all required information about the sequenced samples
metadata_df <- read_csv2("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/mt20230415_metadata.csv") %>%
dplyr::select("path" = count_path, "file" = count_file, "gcf" = gcf_number, replicate:library)
# Load in barcode counts
bc_files <- paste(metadata_df$path, metadata_df$file, sep = "")
bc_files <- lapply(bc_files, fread, header = FALSE)
names(bc_files) <- metadata_df$id
# Import barcode annotation
bc_annotation <- read.csv2("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/library_design/output/tf_df_complete.csv", header = T) %>%
dplyr::select(barcode, tf, oligo.barcode,
spacing, promoter,
position, distance, background, affinity_pos1,
affinity_pos2, affinity_pos3, affinity_pos4, seq.name) %>%
setnames("seq.name", "reporter_id")
```
### Creating count data frames
```{r cluster_compare, fig.width=10, fig.height=7, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Generate long dfs (I merge instead of rbind the data frames because I want to include barcodes with 0 counts)
#bc_df <- bind_rows(bc_list, .id = "column_label")
bc_df <- bind_rows(bc_files, .id = "sample_id") %>%
dplyr::select(sample_id, "barcode" = V1, "starcode_counts" = V2)
# Add barcode annotation to the data (also include barcodes that have 0 counts)
bc_annotation <- merge(bc_annotation, unique(bc_df$sample_id), all = T) %>%
setnames("y", "sample_id")
bc_df <- merge(bc_df, bc_annotation, all = T, by = c("barcode", "sample_id"))
# Remove non-matched barcodes
bc_df <- bc_df[!is.na(bc_df$tf),]
# Add experiment annotation to the data
metadata_selected <- metadata_df %>%
dplyr::select('sample_id' = id, gcf, replicate, condition, sample)
bc_df <- bc_df[!is.na(bc_df$sample_id),]
bc_df <- merge(bc_df, metadata_selected, all = T, by = "sample_id")
# First compute reads per million to estimate the relative counts in their respective sample
bc_df$starcode_counts[is.na(bc_df$starcode_counts)] <- 0
#bc_df <- bc_df[bc_df$starcode_counts > 0,]
for (i in unique(bc_df$sample_id)) {
bc_df$rpm[bc_df$sample_id == i] <- (bc_df$starcode_counts[bc_df$sample_id == i] + 1) / # Adds a pseudocount of 1
sum(bc_df$starcode_counts[bc_df$sample_id == i]) *1e6
}
```
---
## Read distribution
```{r read_distribution, fig.width=10, fig.height=7, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# I want to show the following:
## 1: Read distribution of matched barcodes vs. unmatched barcode
bc_df_reads <- bc_df[!is.na(bc_df$tf),]
bc_df_reads <- bc_df_reads %>%
dplyr::group_by(sample, gcf) %>%
mutate(seq_sum = sum(starcode_counts))
plot_ly(bc_df_reads %>% dplyr::select(sample_id, seq_sum) %>% unique(), x = ~sample_id, y = ~seq_sum, type = 'bar',
marker = list(color = '#D6D5C9',
line = list(color = 'rgb(8,48,107)', width = 1.5))) %>%
layout(title = "Number of matched barcode reads per sample",
yaxis = list(title = "Matched reads"),
xaxis = list(title = "sample"))
for (i in unique(bc_df$gcf)) {
p <- ggplot(bc_df[!is.na(bc_df$tf) & bc_df$gcf == i,], aes(x = tf, y = rpm)) +
geom_jitter(alpha = 0.1) +
theme_bw() +
ylim(0,2500) +
theme(axis.text.x = element_text(angle = 90, hjust = 1, vjust = 1, size = 6)) +
facet_wrap(~sample) + ggtitle(i)
print(p)
}
bc_df_2 <- bc_df[bc_df$rpm <= 250,]
bc_df_2 <- bc_df_2[bc_df_2$rpm >= 0.5,]
bc_df_2 <- bc_df_2[!is.na(bc_df_2$tf),]
for (i in unique(bc_df$gcf)) {
p <- ggplot(bc_df_2[bc_df_2$gcf == i,], aes(x = rpm)) +
geom_histogram(binwidth = 10) +
theme_bw() +
xlim(0,250)+
ylim(0,1000)+
facet_wrap(~sample)+
theme(strip.background =element_rect(fill="#D6D5C9")) +
ggtitle(i)
print(p)
}
for (i in unique(bc_df$gcf)) {
p <- ggplot(bc_df[bc_df$rpm >= 1000 & !is.na(bc_df$tf),] %>%
filter(gcf == i), aes(x = rpm)) +
geom_histogram(binwidth = 40) +
theme_bw() +
xlim(1000,2000)+
ylim(0,25)+
facet_wrap(~sample)+
theme(strip.background =element_rect(fill="#D6D5C9")) +
ggtitle(i)
print(p)
}
n_highly_expressed <- data.frame("sample_id" = unique(bc_df$sample_id),
"n_bc" = "", stringsAsFactors=FALSE)
for (i in unique(bc_df$sample_id)) {
n_highly_expressed$n_bc[n_highly_expressed$sample_id == i] <-
length(bc_df$barcode[bc_df$rpm > 1000 & bc_df$sample_id == i])
}
plot_ly(n_highly_expressed, x = ~sample_id, y = ~as.numeric(n_bc), type = 'bar',
marker = list(color = '#D6D5C9',
line = list(color = 'rgb(8,48,107)', width = 1.5))) %>%
layout(title = "Highly expressed barcodes",
yaxis = list(title = "Number of barcodes with > 500 rpm"),
xaxis = list(title = "sample"))
n_highly_expressed <- data.frame("sample_id" = unique(bc_df$sample_id),
"n_bc" = "", stringsAsFactors=FALSE)
bc_df_2 <- bc_df[grep("random", bc_df$tf),]
for (i in unique(bc_df$sample_id)) {
n_highly_expressed$n_bc[n_highly_expressed$sample_id == i] <-
length(bc_df_2$barcode[bc_df_2$rpm > 300 & bc_df_2$sample_id == i])
}
plot_ly(n_highly_expressed, x = ~sample_id, y = ~as.numeric(n_bc), type = 'bar',
marker = list(color = '#D6D5C9',
line = list(color = 'rgb(8,48,107)', width = 1.5))) %>%
layout(title = "Highly expressed barcodes from random motifs",
yaxis = list(title = "Number of barcodes with > 300 rpm"),
xaxis = list(title = "sample"))
```
*The read distribution plots show that the pDNA samples are uniformly distributed. They also show that the MCF7-TP53-WT cells have highly active TP53 reporters, while random reporters got a lot of reads in the MCF7-TP53-KO cells.*
---
## Read distribution per cutoff
```{r cutoff_read_distribution, fig.width=10, fig.height=7, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
## 2: How many barcodes can I find back at which cutoff? + What is the percentage of barcode reads that match the design at which cutoff?
## Identify the unmapped fraction
bc_fraction <- data.frame("sample_id" = unique(bc_df$sample_id),
"bcs_found" = "", stringsAsFactors=FALSE)
rpm_cutoff <- data.frame("cutoff" = seq(0.0001,10,1), stringsAsFactors=FALSE)
bc_fraction <- merge(bc_fraction, rpm_cutoff)
bc_df_filt <- bc_df[!is.na(bc_df$tf),]
for (i in unique(bc_fraction$cutoff)) {
for (j in unique(bc_df_filt$sample_id)) {
bc_fraction$bcs_found[bc_fraction$cutoff == i & bc_fraction$sample_id == j] <- nrow(bc_df_filt[bc_df_filt$rpm >= i & bc_df_filt$sample_id == j & bc_df_filt$tf == "Trp53",])/
length(unique(bc_annotation$reporter_id[bc_annotation$tf == "Trp53"])) *100
}
}
## How many reads match to designed barcodes?
bc_reads <- data.frame("sample_id" = unique(bc_df$sample_id),
"bc_reads" = "", stringsAsFactors=FALSE)
bc_reads <- merge(bc_reads, rpm_cutoff)
for (i in unique(bc_reads$cutoff)) {
for (j in unique(bc_df_filt$sample_id)) {
bc_reads$bc_reads[bc_reads$cutoff == i & bc_reads$sample_id == j] <- sum(bc_df_filt$rpm[bc_df_filt$rpm >= i & bc_df_filt$sample_id == j])/
sum(bc_df$rpm[bc_df$rpm >= i & bc_df$sample_id == j]) *100
}
}
bc_fraction <- merge(bc_fraction, bc_reads)
bc_fraction$bcs_found <- as.numeric(bc_fraction$bcs_found)
bc_fraction$bc_reads <- as.numeric(bc_fraction$bc_reads)
bc_fraction$gcf <- gsub(".*(gcf.*)", "\\1", bc_fraction$sample_id)
#c("#1B998B", "#2D3047", "#FF9B71", "#ECDD7B")
# Plot to evaluate data quality per cutoff
for (i in unique(bc_fraction$gcf)) {
p <- ggplot(bc_fraction[bc_fraction$gcf == i,]) +
geom_point(aes(x = cutoff, y = bcs_found), color = '#1B998B') +
geom_line(aes(x = cutoff, y = bcs_found), color = '#1B998B') +
geom_point(aes(x = cutoff, y = bc_reads), color = 'black') +
geom_line(aes(x = cutoff, y = bc_reads), color = 'black') +
theme_bw()+
xlab("rpm cutoff")+
ylab("total barcodes (green) and matched barcode reads (black) detected (%)")+
facet_wrap(~sample_id)+
theme(strip.background =element_rect(fill="#D6D5C9"))
print(p)
}
```
*Conclusion: All reads are matched to barcodes that come from the reporter library. Very good. All barcodes can be found back. At a cutoff of ~5 rpm, already ~10% of the barcodes are lost. I guess this is to be expected.*
---
## pDNA-cDNA correlation
```{r pDNA_cDNA_correlation, out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
## 3: What is the correlation of the 24 cDNA bc counts with the pDNA bc counts?
pDNA <- data.frame("pDNA_lib3" = bc_df$rpm[bc_df$sample_id == "pMT06_r2_gcf6502"],
"barcode"= bc_df$barcode[bc_df$sample_id == "pMT06_r2_gcf6502"], stringsAsFactors=FALSE)
bc_df_2 <- merge(pDNA, bc_df_2, all = T)
ggplot(bc_df_2 %>%
filter(str_detect(sample_id, "gcf6502")), aes(x = pDNA_lib3, y = rpm)) +
geom_bin2d(bins = 100)+
xlim(0,1000) +
ylim(0,3000)+
theme_bw()+
facet_wrap(~sample)
ggplot(bc_df_2 %>%
filter(str_detect(sample_id, "gcf6881")), aes(x = pDNA_lib3, y = rpm)) +
geom_bin2d(bins = 100)+
xlim(0,1000) +
ylim(0,3000)+
theme_bw()+
facet_wrap(~sample)
ggplot(bc_df_2 %>%
filter(str_detect(sample_id, "gcf7264")), aes(x = pDNA_lib3, y = rpm)) +
geom_bin2d(bins = 100)+
xlim(0,500) +
ylim(0,500)+
theme_bw()+
facet_wrap(~sample)
# How do the highly represented pDNA_barcodes deviate from the barcode reads in the cDNA data?
## I do this because the correlation of lowly representdied barcodes is very noisy
pDNA_bias <- data.frame("sample_id" = unique(bc_df_2$sample_id[grep("gcf6502|gcf6881|gcf7264", bc_df_2$sample_id)]),
"cor" = "", stringsAsFactors = F)
for (i in unique(pDNA_bias$sample_id)) {
pDNA_bias$cor[pDNA_bias$sample_id == i] <- cor(bc_df_2$rpm[bc_df_2$pDNA_lib3 >= 250 & bc_df_2$sample_id == i],
bc_df_2$pDNA_lib3[bc_df_2$pDNA_lib3 >= 250 & bc_df_2$sample_id == i],
use = "pairwise.complete.obs")
}
plot_ly(pDNA_bias, x = ~sample_id, y = ~as.numeric(cor), type = 'bar',
marker = list(color = '#D6D5C9',
line = list(color = 'rgb(8,48,107)', width = 1.5))) %>%
layout(title = "pDNA-cDNA correlation of highly represented barcodes",
yaxis = list(title = "Correlation of pDNA vs. cDNA read counts (only barcodes with >250 pDNA counts)"),
xaxis = list(title = "sample"))
pDNA_bias <- pDNA_bias %>%
filter(as.numeric(cor) <= 0.25 | str_detect(sample_id, "pMT06"))
bc_df <- bc_df %>%
filter(sample_id %in% c(pDNA_bias$sample))
```
*Conclusion: None of the samples correlate with the pDNA input, which means that we are actually measuring the abundance of transcribed barcodes in the cDNA.*
---
## Replicate correlation
```{r replicate_correlation_rpm, out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
## 4: Correlation plots of the replicates
## Combine replicates of normalized data in 3 different columns
bc_df_rep <- bc_df[!is.na(bc_df$tf),] %>%
dplyr::select(replicate, rpm, barcode, gcf, condition) %>%
mutate(sample_id = paste(replicate, gcf, sep = "_")) %>%
distinct(condition, barcode, rpm, sample_id) %>%
spread(sample_id, rpm)
# Correlation matrix plot
n <- sample(1:nrow(bc_df_rep), 5000)
boundaries <- seq(from = 0.8, by = 0.05, length.out = 4)
# bc_df_rep <- bc_df_rep %>% filter(str_detect(condition, "pDNA", negate = T))
# not_all_na <- function(x) any(!is.na(x))
for (i in unique(bc_df_rep$condition)){
plt <- ggpairs(bc_df_rep[bc_df_rep$condition == i,] %>%
dplyr::select(-barcode, -condition),
upper = list(continuous = corColor),
lower = list(continuous = function(data, mapping, ...) {
ggally_points(data = data[n, ], mapping = mapping, alpha = 0.1, size = 0.5) +
geom_abline(slope = 1, lty = "dashed", col = "red") +
theme_bw()}),
diag = list(continuous = function(data, mapping, ...) {
ggally_densityDiag(data = data, mapping = mapping, alpha = 0.3, fill = "red") +
theme_bw()})) +
ggtitle(paste("Correlation Between Replicates, Condition:", i)) +
theme(text = element_text(size = 10)) +
xlab("rpm") +
ylab("rpm")
#theme_light()
print(plt)
}
```
*The read counts highly correlate!*
---
### Annotation of the reporters
```{r out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
# Annotate the mutated motif of each TF
bc_df$neg_ctrls <- "No"
bc_df$neg_ctrls[grep("random", bc_df$tf)] <- "Yes"
# Annotate random promoter control
bc_df$rand_promoter <- "No"
bc_df$rand_promoter[grep("random", bc_df$promoter)] <- "Yes"
## Mark O'Connell controls
bc_df$positive_ctrl <- "No"
bc_df$positive_ctrl[bc_df$position == 60 | bc_df$position == 70] <- "Yes"
# Annotate affinity ids
## Mixed pool
bc_df$motif_id <- paste(bc_df$affinity_pos1, bc_df$affinity_pos2, bc_df$affinity_pos3, bc_df$affinity_pos4, sep = "_")
bc_df$affinity_id <- "other"
high_med <- read.csv("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/library_design/output/high_sequences.csv") %>%
mutate(motif_id = paste(affinity_pos1, affinity_pos2, affinity_pos3, affinity_pos4, sep = "_"))
bc_df$affinity_id[bc_df$motif_id %in% high_med$motif_id] <- "2_high_med"
med_low <- read.csv("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/library_design/output/med_sequences.csv") %>%
mutate(motif_id = paste(affinity_pos1, affinity_pos2, affinity_pos3, affinity_pos4, sep = "_"))
bc_df$affinity_id[bc_df$motif_id %in% med_low$motif_id] <- "4_med_low"
low_verylow <- read.csv("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/library_design/output/low_sequences.csv") %>%
mutate(motif_id = paste(affinity_pos1, affinity_pos2, affinity_pos3, affinity_pos4, sep = "_"))
bc_df$affinity_id[bc_df$motif_id %in% low_verylow$motif_id] <- "6_low_very-low"
verylow_zero <- read.csv("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/library_design/output/verylow_sequences.csv") %>%
mutate(motif_id = paste(affinity_pos1, affinity_pos2, affinity_pos3, affinity_pos4, sep = "_"))
bc_df$affinity_id[bc_df$motif_id %in% verylow_zero$motif_id] <- "8_very-low_null"
## 4x identical copy reporters
bc_df$affinity_id[bc_df$affinity_pos1 == 0 & bc_df$affinity_pos2 == 0 & bc_df$affinity_pos3 == 0 & bc_df$affinity_pos4 == 0] <- "1_high_only"
bc_df$affinity_id[bc_df$affinity_pos1 == 1 & bc_df$affinity_pos2 == 1 & bc_df$affinity_pos3 == 1 & bc_df$affinity_pos4 == 1] <- "3_med_only"
bc_df$affinity_id[bc_df$affinity_pos1 == 2 & bc_df$affinity_pos2 == 2 & bc_df$affinity_pos3 == 2 & bc_df$affinity_pos4 == 2] <- "5_low_only"
bc_df$affinity_id[bc_df$affinity_pos1 == 3 & bc_df$affinity_pos2 == 3 & bc_df$affinity_pos3 == 3 & bc_df$affinity_pos4 == 3] <- "7_very-low_only"
bc_df$affinity_id[bc_df$affinity_pos1 == 4 & bc_df$affinity_pos2 == 4 & bc_df$affinity_pos3 == 4 & bc_df$affinity_pos4 == 4] <- "9_null_only"
## Other mixed pools
bc_df$affinity_id[bc_df$affinity_pos1 == 0 & bc_df$affinity_pos2 == 0 & bc_df$affinity_pos3 != 0 & bc_df$affinity_pos3 != 1 &
bc_df$affinity_pos4 != 0 & bc_df$affinity_pos4 != 1] <- "10_high_start"
bc_df$affinity_id[bc_df$affinity_pos1 == 0 & bc_df$affinity_pos4 == 0 & bc_df$affinity_pos2 != 0 & bc_df$affinity_pos2 != 1 &
bc_df$affinity_pos3 != 0 & bc_df$affinity_pos3 != 1] <- "11_high_mid"
bc_df$affinity_id[bc_df$affinity_pos1 != 0 & bc_df$affinity_pos1 != 1 & bc_df$affinity_pos2 != 0 & bc_df$affinity_pos2 != 1 &
bc_df$affinity_pos3 == 0 & bc_df$affinity_pos4 == 0] <- "12_high_end"
### Add affinity values for each reporter
reporter_affinity_p53 <- read_tsv("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/parameter_files/p53scores.tsv") %>%
dplyr::select('reporter_id_2' = Name, SumAffinity)
reporter_affinity_p53_complete <-
read_tsv("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/parameter_files/p53scores_complete.tsv") %>%
dplyr::select('reporter_id' = Name, 'reporter_affinity' = SumAffinity)
bc_df <- bc_df %>%
mutate(reporter_id_2 = gsub("(^.*_).*?_(p_.*)","\\1\\2", reporter_id),
reporter_id_2 = gsub("_bc_[0-9]{1}$", "", reporter_id_2))
bc_df <- merge(bc_df, reporter_affinity_p53, all = T, by = "reporter_id_2")
bc_df <- merge(bc_df, reporter_affinity_p53_complete, all = T, by = "reporter_id")
# Add affinity parameters
## Activity vs. cumulative predicted affinity
affinity_df <- read.csv2("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/library_design/mt20201123_affinity.csv", header = T, stringsAsFactors = F)
affinity_df$affinity[affinity_df$id == 4] <- 0
affinity_df$TF[affinity_df$TF == "Trp53"] <- "P53"
affinity_df$TF[affinity_df$TF == "Gr"] <- "GR"
bc_df$tf <- gsub("Trp53", "P53", bc_df$tf)
bc_df$reporter_id <- gsub("Trp53", "P53", bc_df$reporter_id)
bc_df$tf <- gsub("Gr", "GR", bc_df$tf)
bc_df$reporter_id <- gsub("Gr", "GR", bc_df$reporter_id)
### Complicated way of adding cum_affinity information to the df
for (i in unique(bc_df$affinity_pos1)) {
for (j in unique(bc_df$affinity_pos2)) {
for (k in unique(bc_df$affinity_pos3)) {
for (l in unique(bc_df$affinity_pos4)) {
for (m in unique(affinity_df$TF)) {
bc_df$cum_affinity[bc_df$affinity_pos1 == i & bc_df$affinity_pos2 == j & bc_df$affinity_pos3 == k &
bc_df$affinity_pos4 == l & bc_df$tf == m] <-
affinity_df$affinity[affinity_df$id == i & affinity_df$TF == m] +
affinity_df$affinity[affinity_df$id == j & affinity_df$TF == m] +
affinity_df$affinity[affinity_df$id == k & affinity_df$TF == m] +
affinity_df$affinity[affinity_df$id == l & affinity_df$TF == m]
}
}
}
}
}
### Also add the ddG and max_aff information
bc_df <- bc_df %>%
group_by(reporter_id) %>%
mutate(ddG = affinity_pos1 + affinity_pos2 + affinity_pos3 + affinity_pos4,
max_aff = max(affinity_pos1,affinity_pos2,affinity_pos3,affinity_pos4),
cum_affinity = cum_affinity/4 *100,
cum_affinity = round(cum_affinity, 2))
# Add number of non-zero affinity binding sites
bc_df <- bc_df[!is.na(bc_df$sample),]
bc_df$n_sites <- 4
bc_df$n_sites[bc_df$affinity_pos1 == 4] <- bc_df$n_sites[bc_df$affinity_pos1 == 4] - 1
bc_df$n_sites[bc_df$affinity_pos2 == 4] <- bc_df$n_sites[bc_df$affinity_pos2 == 4] - 1
bc_df$n_sites[bc_df$affinity_pos3 == 4] <- bc_df$n_sites[bc_df$affinity_pos3 == 4] - 1
bc_df$n_sites[bc_df$affinity_pos4 == 4] <- bc_df$n_sites[bc_df$affinity_pos4 == 4] - 1
```
---
### Normalization of barcode counts:
Divide cDNA barcode counts through pDNA barcode counts
```{r normalization, out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Normalize data using pDNA data
## Remove all non-matching reads now
bc_df <- bc_df[!is.na(bc_df$tf),]
## Add pDNA data as separate column
### First correlate all pDNA data to ensure that all samples have a good quality
pDNA_lib <- bc_df[grep("pMT06", bc_df$sample),] %>%
dplyr::distinct(barcode, rpm) %>%
setnames("rpm", "pDNA_counts_rpm", skip_absent = T)
bc_df <- merge(pDNA_lib, bc_df, by = c("reporter_id", "barcode"), all = T)
## Compute activity by dividing cDNA bc counts through pDNA bc counts (since I have two totally different pDNA libraries, I need to do it for the two libraries separately)
bc_df$activity <- 0
bc_df$activity <- bc_df$rpm / bc_df$pDNA_counts_rpm
```
---
## Characterize reporter activities
```{r tf_activity, out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
# Histogram - show only high activities per condition
ggplot(bc_df[bc_df$activity >= 5,], aes(x = activity)) +
geom_histogram(binwidth = 1) +
theme_bw() +
xlim(5,15)+
facet_wrap(~sample)+
theme(strip.background =element_rect(fill="#D6D5C9"))
# Barplot - show how many active reporters there are per condition
bc_df_2 <- bc_df %>%
filter(activity >= 4) %>%
group_by(sample) %>%
mutate(active_reporters = length(unique(barcode)))
plot_ly(bc_df_2 %>%
dplyr::select(sample, active_reporters) %>%
unique(),
x = ~sample, y = ~active_reporters, type = 'bar',
marker = list(color = '#D6D5C9',
line = list(color = 'rgb(8,48,107)', width = 1.5))) %>%
layout(title = "Number of highly active barcodes",
yaxis = list(title = "Barcodes with cDNA/pDNA > 4"),
xaxis = list(title = "Condition"))
# Barplot counting high activity barcodes from random motifs
bc_df_2 <- bc_df %>%
filter(activity >= 4,
str_detect(tf, "random")) %>%
group_by(sample_id) %>%
mutate(active_reporters = length(unique(barcode))) %>%
dplyr::select(sample_id, active_reporters) %>%
unique()
plot_ly(bc_df_2, x = ~sample_id, y = ~as.numeric(active_reporters), type = 'bar',
marker = list(color = '#D6D5C9',
line = list(color = 'rgb(8,48,107)', width = 1.5))) %>%
layout(title = "Highly active barcodes from random motifs",
yaxis = list(title = "Number of barcodes with activity > 4"),
xaxis = list(title = "sample_id"))
#bc_df <- bc_df[-grep("MCF7_KO_DMSO_r1_gcf6412", bc_df$sample),]
# Correlation activities vs. pDNA data
ggplot(bc_df, aes(x = pDNA_counts_rpm, y = activity)) +
geom_bin2d(bins = 100)+
ylim(0,50)+
theme_bw()+
facet_wrap(~sample_id)
# Plot number of barcodes with activity > 3 and high pDNA count
bc_df_2 <- bc_df %>%
filter(activity >= 3) %>%
filter(pDNA_counts_rpm >= 200) %>%
group_by(sample_id) %>%
mutate(active_reporters = length(unique(barcode)))
plot_ly(bc_df_2 %>%
dplyr::select(sample_id, active_reporters) %>%
unique(),
x = ~sample_id, y = ~active_reporters, type = 'bar',
marker = list(color = '#D6D5C9',
line = list(color = 'rgb(8,48,107)', width = 1.5))) %>%
layout(title = "Number of highly active barcodes",
yaxis = list(title = "Barcodes with cDNA/pDNA > 4"),
xaxis = list(title = "sample"))
```
---
## Filtering data
```{r out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
## Only keep data based on pDNA counts > 3
bc_df_2 <- bc_df %>%
filter(pDNA_counts_rpm > 3)
## Remove pDNA samples (as I am not interested in their activities)
bc_df_cDNA <- bc_df_2[-grep("pMT06", bc_df_2$sample),]
## Calculate reporter activities
bc_df_cDNA$reporter_id <- gsub(".{5}$", "", bc_df_cDNA$reporter_id)
bc_df_cDNA$reporter_activity <- ave(bc_df_cDNA$activity, bc_df_cDNA$reporter_id,
bc_df_cDNA$sample_id, FUN =
function(x) quantile(x, 0.5))
## Remove data points that are 3xSD away from 50% quantile
bc_df_cDNA$deviation <- bc_df_cDNA$activity / bc_df_cDNA$reporter_activity
# Remove reporters where I only have 2 or less reporters
bc_df_cDNA$n_reporters <- ave(bc_df_cDNA$reporter_id, bc_df_cDNA$reporter_id,
bc_df_cDNA$sample_id, FUN =
function(x) as.numeric(length(x)))
## Choose arbitrary cutoff to get rid of most extreme outliers
bc_df_cDNA_remove <- bc_df_cDNA[(bc_df_cDNA$deviation < 0.25 | bc_df_cDNA$deviation > 4 | bc_df_cDNA$n_reporters <= 2) & (bc_df_cDNA$reporter_activity > 2 | bc_df_cDNA$activity > 2),] %>%
distinct(reporter_id, sample_id)
## Remove data from reporters that have 0 counts because it is likely that the plasmid didn't make it into the cells
bc_df_cDNA_remove2 <- bc_df_cDNA[bc_df_cDNA$starcode_counts == 0,] %>%
mutate(reporter_id3 = paste(reporter_id, sample_id, paste = "_"))
bc_df_cDNA_remove2$n_reporters_0 <- ave(bc_df_cDNA_remove2$reporter_id,
bc_df_cDNA_remove2$reporter_id,
bc_df_cDNA_remove2$sample_id, FUN =
function(x) as.numeric(length(x)))
## Remove data from reporters where all bcs are 0 - probably those didn't make it into the cell
bc_df_cDNA_remove3 <- bc_df_cDNA_remove2 %>%
filter(n_reporters_0 == n_reporters) %>%
distinct(reporter_id, sample_id)
bc_df_cDNA_remove4 <- bc_df_cDNA %>%
mutate(reporter_id3 = paste(reporter_id, sample_id, paste = "_")) %>%
filter(reporter_id3 %in% bc_df_cDNA_remove2$reporter_id3) %>%
filter(!reporter_id3 %in% bc_df_cDNA_remove3$reporter_id3) %>%
mutate(max_count = ave(starcode_counts, reporter_id, sample_id, FUN = max)) %>%
filter(max_count > 25) %>%
filter(starcode_counts == 0) %>%
distinct(reporter_id, sample_id)
bc_df_cDNA_remove <- rbind(bc_df_cDNA_remove, bc_df_cDNA_remove4) %>%
distinct()
bc_df_cDNA_filt <- bc_df_cDNA %>%
anti_join(bc_df_cDNA_remove)
bc_df_cDNA_filt$n_reporters <- ave(bc_df_cDNA_filt$reporter_id, bc_df_cDNA_filt$reporter_id,
bc_df_cDNA_filt$sample_id, FUN =
function(x) as.numeric(length(x)))
bc_df_cDNA_filt <- bc_df_cDNA_filt %>%
filter(n_reporters >= 2)
```
---
## Technical replicate correlations
```{r technical_replicate_correlations, out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
#bc_df_cDNA_filt <- read_tsv("../data/mt20230217_bc_df_cDNA_filt.tsv")
## Combine replicates in 5 different columns
bc_df_rep <- bc_df_cDNA_filt %>%
filter(rand_promoter == "No") %>%
dplyr::select(oligo.barcode, activity, tf, sample_id, reporter_id, pDNA_counts_rpm) %>%
mutate(pDNA_counts_rpm = ave(pDNA_counts_rpm, reporter_id, FUN = function(x) min(x))) %>%
unique() %>%
spread(oligo.barcode, activity)
## Compute the correlation between two technical replicates for each pDNA cutoff, separately for each TF
cor_df <- data.frame("rpm_cutoff" = seq(0,100,1), "cor" = "", stringsAsFactors=FALSE)
cor_df <- merge(unique(bc_df_rep$tf), cor_df)
for (i in unique(cor_df$rpm_cutoff)) {
for (j in unique(cor_df$x)) {
x <- bc_df_rep[bc_df_rep$pDNA_counts_rpm > i & bc_df_rep$tf == j,]
if (nrow(x) == 0) {
cor_df$cor[cor_df$rpm_cutoff == i & cor_df$x == j] <- NA
} else {
cor_df$cor[cor_df$rpm_cutoff == i & cor_df$x == j] <- cor(x$`2`, x$`4`, use = "pairwise.complete.obs", method = "spearman")
}
}
}
ggplot(cor_df) +
geom_point(aes(x = rpm_cutoff, y = as.numeric(cor))) +
theme_bw() +
ylim(0,1) +
facet_wrap(~x)
## Compute the correlation between two technical replicates for each pDNA cutoff, for all reporters together
cor_df <- data.frame("rpm_cutoff" = seq(0,100,1), "cor" = "", stringsAsFactors=FALSE)
for (i in unique(cor_df$rpm_cutoff)) {
x <- bc_df_rep[bc_df_rep$pDNA_counts_rpm > i,]
if (nrow(x) == 0) {
cor_df$cor[cor_df$rpm_cutoff == i] <- NA
} else {
cor_df$cor[cor_df$rpm_cutoff == i] <- cor(x$`2`, x$`4`, "pairwise.complete.obs", method = "spearman")
}
}
ggplot(cor_df) +
geom_point(aes(x = rpm_cutoff, y = as.numeric(cor))) +
theme_bw() +
ylim(0,1)
## Set a pDNA cutoff based on above plots
bc_df_cDNA_filt$noisy <- "No"
bc_df_cDNA_filt$noisy[bc_df_cDNA_filt$pDNA_counts_rpm <= 25] <- "Yes"
bc_df_rep <- bc_df_rep[bc_df_rep$pDNA_counts_rpm >= 25,]
# Correlation matrix plot
n <- sample(1:nrow(bc_df_rep), 5000)
boundaries <- seq(from = 0.8, by = 0.05, length.out = 4)
plt <- ggpairs(bc_df_rep %>% dplyr::select("1", "2", "3", "4", "5"),
upper = list(continuous = corColor),
lower = list(continuous = function(data, mapping, ...) {
ggally_points(data = data[n, ], mapping = mapping, alpha = 0.1, size = 0.5) +
geom_abline(slope = 1, lty = "dashed", col = "red") +
theme_bw()}),
diag = list(continuous = function(data, mapping, ...) {
ggally_densityDiag(data = data, mapping = mapping, alpha = 0.3, fill = "red") +
theme_bw()})) +
ggtitle("Correlation Between Technial Replicates") +
theme(text = element_text(size = 20)) +
xlab("Reporter activity") +
ylab("Reporter activity")
print(plt)
```
*Data correlates very well.*
### Export data
```{r data export, out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
# Polish export dataframe
bc_df_cDNA_filt <- bc_df_cDNA_filt %>%
mutate(log_activity = log2(activity),
log_reporter_activity = log2(reporter_activity))
# Export bc_df for cDNA analysis
filename <- SetFileName("_reporter_activity_filt", "mt")
setwd("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/gcf7264/results/")
write.csv(bc_df_cDNA_filt, file = paste(filename,".csv", sep = ""), row.names = F)
```
---
### Session Info
```{r}
paste("Run time: ",format(Sys.time()-StartTime))
getwd()
date()
sessionInfo()
```
<file_sep>---
title: "TP53 reporter library scan - detailed analysis"
author:
- name: "<NAME>"
email: "<EMAIL>"
affiliation: "Netherlands Cancer Institute - van Steensel lab"
date: '`r format(Sys.time(), "%d/%m/%Y")`'
output:
html_document:
theme: united
highlight: pygments
fig_caption: yes
code_folding: hide
df_print: kable
toc: true
toc_depth: 4
toc_float:
collapsed: false
---
---
### Introduction
~6,000 barcoded TP53 reporters were probed in MCF7 TP53WT/KO cells and stimulated with Nutlin-3a. I previously processed the raw sequencing data, quantified the pDNA data and normalized the cDNA data. In this script, a detailed dissection of the reporter activities will be carried out to understand how TP53 drives transcription and to identify the most sensitive TP53 reporters.
---
## Setup {.tabset}
<!-- little HTML script to do indentation of the table of contents -->
<script>
$(document).ready(function() {
$items = $('div#TOC li');
$items.each(function(idx) {
num_ul = $(this).parentsUntil('#TOC').length;
$(this).css({'text-indent': num_ul * 10, 'padding-left': 0});
});
});
</script>
```{css, echo = FALSE}
div.sourceCode {
overflow-x: hidden;
}
```
### Libraries
```{r setup, out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
knitr::opts_chunk$set(echo = TRUE)
StartTime <-Sys.time()
# 8-digit Date tag:
Date <- substr(gsub("-","",Sys.time()),1,8)
# libraries:
library(RColorBrewer)
library(ggplot2)
library(dplyr)
library(maditr)
library(tibble)
library(pheatmap)
library(ggpubr)
library(ggbeeswarm)
library(ggforce)
library(viridis)
library(plyr)
library(cowplot)
library(gridExtra)
library(GGally)
library(readr)
library(stringr)
library(tidyr)
library(ROCR)
library(plotly)
library(randomForest)
library(glmnet)
library(glmnetUtils)
library(jtools)
library(ggrastr)
library(scales)
```
---
### Functions
```{r out.width= "80%", fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
SetFileName <- function(filename, initials) {
# Set filename with extension and initials to make filename with date integrated.
filename <- substitute(filename)
initials <- substitute(initials)
filename <- paste0(initials, Date, filename)
filename
}
cbind.fill <- function(...){
nm <- list(...)
nm <- lapply(nm, as.matrix)
n <- max(sapply(nm, nrow))
do.call(cbind, lapply(nm, function (x)
rbind(x, matrix(, n-nrow(x), ncol(x)))))
}
# Function to substring the right part of the motif
substrRight <- function(x, n){
substr(x, nchar(x)-n+1, nchar(x))
}
# Function to load PWM matrix
get_pwm_feature_matrix <- function(motif_meta_fn, fimo_fn, db = 2) {
# validate args
valid_dbs <- 1:2
if(!db %in% valid_dbs)
stop('Invalid db (database version). Please use db=1 (maintained for backward compatibility only) or db=2')
# db=1 is maintained for backward compatibility only
if(db == 1) {
# read in motif metadata
motif_meta <- read.csv(motif_meta_fn)
# check whether motif metadata contain essential annotations
if(!all(c('PWM.ID', 'Cognate.TF') %in% colnames(motif_meta))) {
message('The motif metadata file does not contain the essential columns PWM.ID and Cognate.TF')
}
motif_minimal <- motif_meta[, c('PWM.ID', 'Cognate.TF')]
# load fimo output --> extract motif id, sequence id and p-value
df <- read.table(fimo_fn)
df <- df[, c(1, 2, 7)]
colnames(df) <- c('PWM.ID', 'seqid', 'pval')
# add TF id
df <- merge(df, motif_minimal, by = 'PWM.ID')
# group motif hits by sequence id
l <- split(df, df[['seqid']])
# multiple PWM and multiple hits possible. Reduce hits to one per TF, keeping best p-val only
l <- lapply(l, function(x) {
x_by_tf <- split(x, x[['Cognate.TF']], drop = TRUE)
x_by_tf <- lapply(x_by_tf, function(y) y[which.min(y$pval), ])
do.call('rbind', x_by_tf)
})
# initialize feature matrix
n_tf <- motif_minimal[['Cognate.TF']] %>%
unique %>%
length
n_seq <- length(l)
pwm <- matrix(1, nrow = n_seq, ncol = n_tf)
colnames(pwm) <- (motif_minimal[['Cognate.TF']] %>% unique)
# replace :: from names of composite motifs
colnames(pwm) <- str_replace_all(colnames(pwm), '::', '_')
# fill in feature matrix
for(i in 1 : n_seq) {
pwm[i, l[[i]][['Cognate.TF']]] <- l[[i]]$pval
}
# -log10 transform
pwm <- -1 * log10(pwm)
# coerce to tib and return
tib_fimo <- as_data_frame(pwm) %>%
mutate(id = names(l))
dplyr::select(id, everything())
}
# db = 2 (default)
else {
# load metadata
tib_meta <- read_csv(motif_meta_fn) %>%
# extract tf symbol from motif id (Cognate_TF unsafe, it can be empty) and replace :: occurrences
mutate(tf_symbol = str_remove(ID, '_[0-9]*'),
tf_symbol = str_replace(tf_symbol, '::', '_')) %>%
dplyr::select(motif_id = `PWM ID`, tf_symbol)
# load fimo results
tib_fimo <- read_tsv(fimo_fn) %>%
# extract motif id, sequence id and p-value
dplyr::select(motif_id, sequence_name, pval = `p-value`)
# add tf symbol to fimo results
tib_fimo <- tib_fimo %>%
left_join(tib_meta, by = 'motif_id') %>%
# remove hits with missing motif id (composite pwms)
filter(!is.na(tf_symbol))
# select best hit for each motif and sequence
tib_fimo <- tib_fimo %>%
dplyr::group_by(sequence_name, tf_symbol) %>%
dplyr::slice(which.min(pval)) %>%
ungroup()
# spread into feature matrix
tib_fimo <- tib_fimo %>%
mutate(pval = -1 * log10(pval)) %>%
dplyr::select(-motif_id) %>%
spread(key = tf_symbol, value = pval, fill = 0, drop = TRUE) %>%
# perform cosmetics on the id
mutate(id = sequence_name) %>%
dplyr::select(-c(sequence_name)) %>%
dplyr::select(id, everything())
}
return(tib_fimo)
}
# From Fede:
# ggpairs custom functions
corColor <- function(data, mapping, color = I("black"), sizeRange = c(1, 3), ...) {
x <- eval_data_col(data, mapping$x)
y <- eval_data_col(data, mapping$y)
r <- cor(x, y, "pairwise.complete.obs")
rt <- format(r, digits = 3)
tt <- as.character(rt)
cex <- max(sizeRange)
# helper function to calculate a useable size
percent_of_range <- function(percent, range) {
percent * diff(range) + min(range, na.rm = TRUE)
}
# plot correlation coefficient
p <- ggally_text(label = tt, mapping = aes(), xP = 0.5, yP = 0.5,
size = I(percent_of_range(cex * abs(r), sizeRange)), color = color, ...) +
theme(panel.grid.minor=element_blank(),
panel.grid.major=element_blank())
corColors <- RColorBrewer::brewer.pal(n = 7, name = "RdYlBu")[2:6]
if (r <= boundaries[1]) {
corCol <- corColors[1]
} else if (r <= boundaries[2]) {
corCol <- corColors[2]
} else if (r < boundaries[3]) {
corCol <- corColors[3]
} else if (r < boundaries[4]) {
corCol <- corColors[4]
} else {
corCol <- corColors[5]
}
p <- p +
theme(panel.background = element_rect(fill = corCol))
return(p)
}
# Custom ggplot2 themes
theme_classic_lines <- function() {
theme_pubr(border = T, legend = "top") +
theme(panel.grid.major = element_line(colour = "#adb5bd", size = 0.1),
strip.background = element_rect(fill = "#ced4da"))
}
theme_classic_lines_45 <- function() {
theme_pubr(border = T, legend = "top", x.text.angle = 45) +
theme(panel.grid.major = element_line(colour = "#adb5bd", size = 0.1),
strip.background = element_rect(fill = "#ced4da"))
}
theme_classic_lines_90 <- function() {
theme_pubr(border = T, legend = "top", x.text.angle = 90) +
theme(panel.grid.major = element_line(colour = "#adb5bd", size = 0.1),
strip.background = element_rect(fill = "#ced4da"))
}
theme_set(theme_classic_lines())
ggplot_cust <- function(...) ggplot2::ggplot(...) +
scale_color_manual(values = colors_diverse) +
scale_fill_manual(values = colors_diverse)
## save favorite colors
colors_diverse <- c("#264653", "#2a9d8f", "#e9c46a", "#f4a261", "#e76f51")
colors_diverse_2 <- c("#CB997E", "#DDBEA9", "#FFE8D6", "#B7B7A4", "#A5A58D", "#6B705C")
colors_continous <- c("MCF7_KO" = "#DFE4DC", "MCF7_WT_DMSO" = "#CAD2C5", "MCF7_WT_Nutlin" = "#84A98C", "#52796F", "#354F52", "#2F3E46")
colors_continous_2 <- c("MCF7_KO" = "#DFE4DC", "MCF7_WT_DMSO" = "#CAD2C5", "MCF7_WT_Nutlin" = "#84A98C", "A549" = "#CAD2C5", "A549_Nutlin" = "#84A98C", "U2OS" = "#CAD2C5", "U2OS_Nutlin" = "#84A98C")
reds <- c("#DD6B48", "#E38569", "#E7A08A", "#EDBBAB", "#EFC9BC")
colors_promoter <- c("#F6D289", "#ECD9B1", "#FBEDD0")
```
---
### Load data
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
# Import processed bc counts from the preprocessing step
cDNA_df <- read.csv("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/gcf7264/results/mt20230414_reporter_activity_filt.csv", header = T)
cDNA_df$activity[is.na(cDNA_df$activity)] <- 0
# Recalculate mean activities
cDNA_df <- cDNA_df %>%
mutate(reporter_activity = ave(reporter_activity, reporter_id, sample_id, FUN = function(x) mean(x)),
log_reporter_activity = log2(reporter_activity))
# Change names
cDNA_df$tf <- as.character(cDNA_df$tf)
cDNA_df$condition[cDNA_df$condition == "MCF7"] <- "MCF7_WT_DMSO"
cDNA_df$condition[cDNA_df$condition == "MCF7_Nutlin"] <- "MCF7_WT_Nutlin"
# The activities are unnormalized at the moment - check if the conditions are scaled correctly
ggplot_cust(cDNA_df %>%
filter(neg_ctrls == "Yes", str_detect(tf, "53")) %>%
dplyr::select(tf, condition, reporter_activity, reporter_id, gcf, promoter) %>%
unique(),
aes(x = condition, y = reporter_activity, color = tf)) +
geom_quasirandom(dodge.width = 0.75) +
theme(axis.text.x = element_text(angle = 90, hjust = 1, vjust = 0.5)) +
facet_grid(promoter~gcf)
# Now I want to compute the enrichment over the inactive ones to center the data around 1 (1 = inactive)
cDNA_df_neg <- cDNA_df %>%
dplyr::select(tf, sample_id, reporter_activity, reporter_id, neg_ctrls, promoter) %>%
mutate(tf = gsub("_.*", "", tf)) %>%
filter(neg_ctrls == "Yes") %>%
mutate(tf = gsub("(.*random[1-3]{1})_.*", "\\1", reporter_id)) %>%
filter(tf %in% c("P53_random1", "P53_random2")) %>%
unique() %>%
mutate(reporter_activity = ave(reporter_activity, sample_id, promoter, FUN = mean)) %>%
dplyr::select(-reporter_id) %>%
unique() %>%
dplyr::select("background_activity" = reporter_activity, sample_id, promoter) %>%
unique()
cDNA_df <- cDNA_df %>%
mutate(tf = gsub("_.*", "", tf))
cDNA_df <- merge(cDNA_df, cDNA_df_neg)
cDNA_df <- cDNA_df %>%
mutate(reporter_activity_norm = reporter_activity / background_activity,
activity_norm = activity / background_activity)
# Compute means per sample and condition
cDNA_df$reporter_activity_condition <- ave(cDNA_df$reporter_activity_norm, cDNA_df$condition, cDNA_df$reporter_id, FUN = function(x) mean(x))
cDNA_df$reporter_activity_sample <- ave(cDNA_df$reporter_activity_norm, cDNA_df$sample_id, cDNA_df$reporter_id, FUN = function(x) mean(x))
# Additional normalization - this is necessary to correct for the different levels of TP53 in the MCF7-P53-WT and P53-KO cells
cDNA_df_neg2 <- cDNA_df %>%
dplyr::select(tf, condition, reporter_activity_condition, reporter_id, neg_ctrls) %>%
mutate(tf = gsub("_.*", "", tf)) %>%
filter(neg_ctrls == "Yes") %>%
mutate(tf = gsub("(.*random[1-3]{1})_.*", "\\1", reporter_id)) %>%
filter(tf %in% c("GR_random1", "GR_random2")) %>% ## These are control reporters with medium activity - we use them to normalize the activities
unique() %>%
mutate(reporter_activity_condition = ave(reporter_activity_condition, condition, FUN = mean)) %>%
dplyr::select(-reporter_id) %>%
unique() %>%
dplyr::select("condition_activity" = reporter_activity_condition, condition) %>%
unique()
cDNA_df <- merge(cDNA_df, cDNA_df_neg2, by = "condition", all = T)
cDNA_df <- cDNA_df %>%
mutate(reporter_activity_norm = reporter_activity_norm / condition_activity,
activity_norm = activity_norm / condition_activity)
# Compute means per sample
cDNA_df$reporter_activity_condition <- ave(cDNA_df$reporter_activity_norm, cDNA_df$condition, cDNA_df$reporter_id, FUN = function(x) mean(x))
cDNA_df$reporter_activity_sample <- ave(cDNA_df$reporter_activity_norm, cDNA_df$sample_id, cDNA_df$reporter_id, FUN = function(x) mean(x))
# Check again if the conditions are scaled correctly
ggplot_cust(cDNA_df %>%
filter(tf == "GR", neg_ctrls == "Yes") %>%
dplyr::select(tf, sample_id, reporter_activity_sample, reporter_id, gcf, promoter) %>%
unique(),
aes(x = sample_id, y = reporter_activity_sample, color = tf)) +
geom_quasirandom(dodge.width = 0.75) +
theme(axis.text.x = element_text(angle = 90, hjust = 1, vjust = 0.5)) +
facet_wrap(~promoter)
ggplot_cust(cDNA_df %>%
filter(neg_ctrls == "Yes", str_detect(tf, "53")) %>%
dplyr::select(tf, condition, reporter_activity_sample, reporter_id, gcf, promoter) %>%
unique(),
aes(x = condition, y = reporter_activity_sample, color = tf)) +
geom_quasirandom(dodge.width = 0.75) +
theme(axis.text.x = element_text(angle = 90, hjust = 1, vjust = 0.5)) +
facet_grid(promoter~gcf)
## Looks better
```
---
## Figure 1: Characterize P53 activities per condition
Aim: I want to characterize the reporter activity distributions in the tested conditions. Does Nutlin boost P53 reporter activity and is P53 inactive in the KO cells?
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
## Figure 1C: Binding affinity validation
fp_data <- read_delim("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/mt20230425_FP_P53_r1.txt") %>%
pivot_longer(contains("BS"), names_to = "BS", values_to = "Polarization")
null_data <- fp_data %>%
filter(`Conc [μM]` == 0) %>%
dplyr::select("null_point" = Polarization, BS)
fp_data <- merge(fp_data, null_data, all = T) %>%
mutate(Polarization = Polarization - null_point)
ggplot(fp_data %>%
mutate(rel_pol = Polarization / max(Polarization)) %>%
mutate(rel_pol = ifelse(rel_pol < 0, 0, rel_pol)),
aes(x = `Conc [μM]`, y = rel_pol, color = BS)) +
geom_point() +
geom_smooth(method = "glm", method.args = list(family = "binomial"), se = FALSE) +
scale_color_manual(values = c("BS100" = "#DC6641", "BS37" = "#E99D86", "BS14" = "#EDBBAB", "BS6" = "#F7E4DE", "BS0" = "grey90")) +
scale_x_log10()
## Figure 1G: Correlate the three replicates:
compare_df <- cDNA_df %>%
mutate(id = paste(gcf, replicate, sep = "_")) %>%
filter((tf == "P53" & neg_ctrls == "No") | (tf == "GR" & neg_ctrls == "Yes"), promoter != "random", motif_id != "4_4_4_4") %>%
mutate(ctrl_reporter = ifelse(tf == "GR", "Yes", "No")) %>%
dplyr::select(reporter_id, reporter_activity_sample, condition, id, neg_ctrls, ctrl_reporter) %>%
unique() %>%
mutate(reporter_id = paste(reporter_id, condition, sep = "_")) %>%
pivot_wider(names_from = id, values_from = reporter_activity_sample) %>%
column_to_rownames("reporter_id") %>%
mutate(condition = factor(condition, levels = c("MCF7_WT_Nutlin", "MCF7_WT_DMSO", "MCF7_KO")))
p1 <- ggplot(compare_df %>%
mutate(ctrl_reporter = factor(ctrl_reporter, levels = c("No", "Yes"))) %>%
arrange(ctrl_reporter),
aes(x = gcf6502_r1, y = gcf6502_r2)) +
geom_abline(lty = 2)+
geom_point_rast(alpha = 0.2, stroke = 0, raster.dpi=600, aes(color = ctrl_reporter)) +
xlim(0,120) +
ylim(0,120) +
scale_color_manual(values = c("No" = "black", "Yes" = "red"))
p2 <- ggplot(compare_df %>%
mutate(ctrl_reporter = factor(ctrl_reporter, levels = c("No", "Yes"))) %>%
arrange(ctrl_reporter),
aes(x = gcf6502_r1, y = gcf6881_r3)) +
geom_abline(lty = 2)+
geom_point_rast(alpha = 0.2, stroke = 0, raster.dpi=600, aes(color = ctrl_reporter)) +
xlim(0,120) +
ylim(0,120) +
scale_color_manual(values = c("No" = "black", "Yes" = "red"))
p3 <-ggplot(compare_df %>%
mutate(ctrl_reporter = factor(ctrl_reporter, levels = c("No", "Yes"))) %>%
arrange(ctrl_reporter),
aes(x = gcf6502_r2, y = gcf6881_r3)) +
geom_abline(lty = 2)+
geom_point_rast(alpha = 0.2, stroke = 0, raster.dpi=600, aes(color = ctrl_reporter)) +
xlim(0,120) +
ylim(0,120) +
scale_color_manual(values = c("No" = "black", "Yes" = "red"))
plot_grid(p1, p2, p3, nrow = 1)
cor(compare_df$gcf6502_r1, compare_df$gcf6502_r2, method = "pearson", use = "pairwise.complete.obs")
cor(compare_df$gcf6502_r1, compare_df$gcf6881_r3, method = "pearson", use = "pairwise.complete.obs")
cor(compare_df$gcf6881_r3, compare_df$gcf6502_r2, method = "pearson", use = "pairwise.complete.obs")
## Figure 1H: Reporter activity per condition, compared to negative and positive controls
data <- cDNA_df %>%
filter((tf == "P53" & neg_ctrls == "No") , promoter != "random", motif_id != "4_4_4_4") %>%
mutate(ctrl_reporter = ifelse(tf == "GR", "Yes", "No")) %>%
dplyr::select(reporter_activity_condition, positive_ctrl, promoter, condition, neg_ctrls, affinity_id,
tf, condition, reporter_id, positive_ctrl, ctrl_reporter, spacing) %>%
ungroup() %>%
unique() %>%
mutate(condition = factor(condition, levels = c("MCF7_KO", "MCF7_WT_DMSO", "MCF7_WT_Nutlin", "A549", "A549_Nutlin", "U2OS", "U2OS_Nutlin")))
ggplot(data %>%
filter(neg_ctrls == "No"),
aes(y = reporter_activity_condition, x = condition, group = neg_ctrls)) +
geom_quasirandom_rast(aes(color = condition), dodge.width = .75, stroke = 0, alpha = 1, size = 1, raster.dpi = 600, bandwidth = 0.4, width = 0.35, method = "smiley") +
stat_summary(fun = median, fun.min = median, fun.max = median, geom = "crossbar", color = "black", width = 0.25, lwd = 0.4) +
xlab("condition") +
scale_color_manual(values = colors_continous_2) +
ylab("reporter activity")
## Other plots: compare cell types and conditions
data_compare <- data %>%
spread(condition, reporter_activity_condition) %>%
filter(affinity_id %in% c("1_high_only", "3_med_only", "5_low_only", "7_very-low_only"))
ggplot(data_compare,
aes(x = A549, y = MCF7_WT_DMSO, color = promoter)) +
geom_abline(lty = 2) +
geom_point() +
scale_color_manual(values = c("minP" = "grey80", "mCMV" = colors_promoter[1]))
ggplot(data_compare,
aes(x = A549_Nutlin, y = MCF7_WT_Nutlin, color = promoter)) +
geom_abline(lty = 2) +
geom_point() +
scale_color_manual(values = c("minP" = "grey80", "mCMV" = colors_promoter[1]))
ggplot(data_compare,
aes(x = U2OS, y = A549, color = promoter)) +
geom_abline(lty = 2) +
geom_point() +
scale_color_manual(values = c("minP" = "grey80", "mCMV" = colors_promoter[1])) +
facet_wrap(~affinity_id)
ggplot(data_compare,
aes(x = A549, y = A549_Nutlin, color = promoter)) +
geom_abline(lty = 2) +
geom_point() +
scale_color_manual(values = c("minP" = "grey80", "mCMV" = colors_promoter[1]))+
facet_wrap(~affinity_id)
ggplot(data_compare,
aes(x = U2OS, y = U2OS_Nutlin, color = promoter)) +
geom_abline(lty = 2) +
geom_point() +
scale_color_manual(values = c("minP" = "grey80", "mCMV" = colors_promoter[1]))+
facet_wrap(~affinity_id)
ggplot(data_compare,
aes(x = MCF7_WT_DMSO, y = MCF7_WT_Nutlin, color = promoter)) +
geom_abline(lty = 2) +
geom_point() +
scale_color_manual(values = c("minP" = "grey80", "mCMV" = colors_promoter[1]))+
facet_wrap(~affinity_id)
ggplot(data_compare,
aes(x = MCF7_WT_DMSO, y = U2OS_Nutlin, color = promoter)) +
geom_abline(lty = 2) +
geom_point() +
scale_color_manual(values = c("minP" = "grey80", "mCMV" = colors_promoter[1]))+
facet_wrap(~affinity_id)
```
Conclusion: 1F: Replicates do correlate well. 1G: Negative controls are inactive compared to P53 reporters. P53 reporters become more active in WT cells and even more active upon Nutlin stimulation.
---
## Figure 2: Effect of affinity and binding sites + binding site positioning
Aim: How does the binding site affinity, copy number, and their respective positioning affect reporter activity?
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
## Figure 2A: activity per affinity
colors_new <- c("1_high_only" = "#C55330", "3_med_only" = "#DD6B48", "5_low_only" = "#E7A08A", "7_very-low_only" = "#F1C8BB", "9_null_only" = "#F7E4DE", colors_continous[c(1,2,3)])
ggplot(cDNA_df %>%
filter(neg_ctrls == "No", positive_ctrl == "No", spacing == 7, tf == "P53", promoter %in% c("mCMV", "minP"),
condition %in% c("MCF7_KO", "U2OS", "A549", "MCF7_WT_DMSO"),
str_detect(affinity_id, "[1,3,5,7,9]_"), str_detect(affinity_id, "11_high_mid", negate = T)) %>%
mutate(condition = factor(condition, levels = c("U2OS", "A549", "MCF7_WT_DMSO", "MCF7_KO"))) %>%
dplyr::select(tf, reporter_activity_condition, background, reporter_id, condition, affinity_id, promoter) %>%
mutate(affinity_id = factor(affinity_id, levels = c("9_null_only", "7_very-low_only", "5_low_only", "3_med_only", "1_high_only"))) %>%
mutate(reporter_id = gsub("_p_[0-9]{1,2}", "", reporter_id)) %>%
unique() %>%
mutate(reporter_activity_condition = ave(reporter_activity_condition, affinity_id, condition, background, FUN = function(x) mean(x))) %>%
mutate(reporter_activity_condition_mean = ave(reporter_activity_condition, affinity_id, condition, FUN = function(x) mean(x))) %>%
mutate(reporter_activity_condition_sd = ave(reporter_activity_condition, affinity_id, condition, FUN = function(x) sd(x))) %>%
dplyr::select(-reporter_activity_condition, -reporter_id, -background) %>%
distinct(),
aes(x = condition, y = reporter_activity_condition_mean, fill = affinity_id, group = affinity_id)) +
geom_errorbar(aes(ymin=reporter_activity_condition_mean-reporter_activity_condition_sd, ymax=reporter_activity_condition_mean+reporter_activity_condition_sd), width=.2, position=position_dodge(.75)) +
geom_bar(stat = "identity", position = position_dodge(0.75), width = 0.75, color = "white")+
xlab("") +
ylim(0,30) +
ylab("Reporter activity") +
scale_fill_manual(values = colors_new) +
scale_color_manual(values = colors_new)
t_test_df <- cDNA_df %>%
filter(neg_ctrls == "No", positive_ctrl == "No", spacing == 7, tf == "P53", promoter %in% c("mCMV", "minP"),
condition %in% c("MCF7_KO", "U2OS", "A549", "MCF7_WT_DMSO"),
str_detect(affinity_id, "[1,3,5,7,9]_"), str_detect(affinity_id, "11_high_mid", negate = T)) %>%
mutate(condition = factor(condition, levels = c("U2OS", "A549", "MCF7_WT_DMSO", "MCF7_KO"))) %>%
dplyr::select(tf, reporter_activity_condition, background, reporter_id, condition, affinity_id, promoter) %>%
mutate(affinity_id = factor(affinity_id, levels = c("9_null_only", "7_very-low_only", "5_low_only", "3_med_only", "1_high_only"))) %>%
mutate(reporter_id = gsub("_p_[0-9]{1,2}", "", reporter_id)) %>%
unique() %>%
mutate(reporter_activity_condition = ave(reporter_activity_condition, affinity_id, condition, background, FUN = function(x) mean(x))) %>%
distinct()
t.test(t_test_df$reporter_activity_condition[t_test_df$condition == "A549" & t_test_df$affinity_id == "7_very-low_only"],
t_test_df$reporter_activity_condition[t_test_df$condition == "A549" & t_test_df$affinity_id == "5_low_only"], paired = T)$p.value
ggplot(cDNA_df %>%
filter(neg_ctrls == "No", positive_ctrl == "No", spacing == 7, tf == "P53", promoter %in% c("mCMV", "minP"),
condition %in% c("U2OS_Nutlin", "A549_Nutlin", "MCF7_WT_Nutlin", "MCF7_KO"),
str_detect(affinity_id, "[1,3,5,7,9]_"), str_detect(affinity_id, "11_high_mid", negate = T)) %>%
mutate(condition = factor(condition, levels = c("U2OS_Nutlin", "A549_Nutlin", "MCF7_WT_Nutlin", "MCF7_KO"))) %>%
dplyr::select(tf, reporter_activity_condition, background, reporter_id, condition, affinity_id, promoter) %>%
mutate(affinity_id = factor(affinity_id, levels = c("9_null_only", "7_very-low_only", "5_low_only", "3_med_only", "1_high_only"))) %>%
unique() %>%
mutate(reporter_activity_condition = ave(reporter_activity_condition, affinity_id, condition, background, FUN = function(x) mean(x))) %>%
mutate(reporter_activity_condition_mean = ave(reporter_activity_condition, affinity_id, condition, FUN = function(x) mean(x))) %>%
mutate(reporter_activity_condition_sd = ave(reporter_activity_condition, affinity_id, condition, FUN = function(x) sd(x))) %>%
dplyr::select(-reporter_activity_condition, -reporter_id, -background) %>%
distinct(),
aes(x = condition, y = reporter_activity_condition_mean, fill = affinity_id, group = affinity_id)) +
geom_errorbar(aes(ymin=reporter_activity_condition_mean-reporter_activity_condition_sd, ymax=reporter_activity_condition_mean+reporter_activity_condition_sd), width=.2, position=position_dodge(.75)) +
geom_bar(stat = "identity", position = position_dodge(0.75), width = 0.75, color = "white")+
xlab("") +
ylim(0,30) +
ylab("Reporter activity") +
scale_fill_manual(values = colors_new) +
scale_color_manual(values = colors_new)
t_test_df <- cDNA_df %>%
filter(neg_ctrls == "No", positive_ctrl == "No", spacing == 7, tf == "P53", promoter %in% c("mCMV", "minP"),
condition %in% c("U2OS_Nutlin", "A549_Nutlin", "MCF7_WT_Nutlin", "MCF7_KO"),
str_detect(affinity_id, "[1,3,5,7,9]_"), str_detect(affinity_id, "11_high_mid", negate = T)) %>%
mutate(condition = factor(condition, levels = c("U2OS_Nutlin", "A549_Nutlin", "MCF7_WT_Nutlin", "MCF7_KO"))) %>%
dplyr::select(tf, reporter_activity_condition, background, reporter_id, condition, affinity_id, promoter) %>%
mutate(affinity_id = factor(affinity_id, levels = c("9_null_only", "7_very-low_only", "5_low_only", "3_med_only", "1_high_only"))) %>%
mutate(reporter_id = gsub("_p_[0-9]{1,2}", "", reporter_id)) %>%
unique() %>%
mutate(reporter_activity_condition = ave(reporter_activity_condition, affinity_id, condition, background, FUN = function(x) mean(x))) %>%
distinct()
t.test(t_test_df$reporter_activity_condition[t_test_df$condition == "A549_Nutlin" & t_test_df$affinity_id == "7_very-low_only"],
t_test_df$reporter_activity_condition[t_test_df$condition == "A549_Nutlin" & t_test_df$affinity_id == "5_low_only"], paired = T)$p.value
## Figure 2B: Effect of adding binding sites
ggplot(cDNA_df %>%
filter(neg_ctrls == "No", positive_ctrl == "No", promoter %in% c("mCMV", "minP"),
str_detect(condition, "MCF7_WT"), str_detect(motif_id, "4_4_4_4|4_4_4_3|4_4_3_3|4_3_3_3|3_3_3_3"),
tf == "P53", spacing == 7) %>%
distinct(reporter_activity_condition, n_sites, condition, background, promoter) %>%
mutate(reporter_activity_condition = ave(reporter_activity_condition, n_sites, condition, background, FUN = function(x) mean(x))) %>%
mutate(reporter_activity_condition_mean = ave(reporter_activity_condition, n_sites, condition, FUN = function(x) mean(x))) %>%
mutate(reporter_activity_condition_sd = ave(reporter_activity_condition, n_sites, condition, FUN = function(x) sd(x))) %>%
dplyr::select(-reporter_activity_condition) %>%
distinct(),
aes(x = n_sites, y = reporter_activity_condition_mean, fill = condition)) +
geom_errorbar(aes(ymin=reporter_activity_condition_mean-reporter_activity_condition_sd, ymax=reporter_activity_condition_mean+reporter_activity_condition_sd), width=.2, position=position_dodge(.9)) +
geom_bar(stat = "identity", position = position_dodge(0.8), width = 0.9, color = "white")+
ggtitle("reporter activity vs number of binding sites")+
xlab("number of low-affinity binding sites")+
ylab("reporter activity (a.u.)")+
scale_fill_manual(values = colors_continous)
t_test_df <- cDNA_df %>%
filter(neg_ctrls == "No", positive_ctrl == "No", promoter %in% c("mCMV", "minP"),
str_detect(condition, "MCF7_WT"), str_detect(motif_id, "4_4_4_4|4_4_4_3|4_4_3_3|4_3_3_3|3_3_3_3"),
tf == "P53", spacing == 7) %>%
distinct(reporter_activity_condition, n_sites, condition, background, promoter) %>%
mutate(reporter_activity_condition = ave(reporter_activity_condition, n_sites, condition, background, FUN = function(x) mean(x))) %>%
distinct()
t.test(t_test_df$reporter_activity_condition[t_test_df$condition == "MCF7_WT_Nutlin" & t_test_df$n_sites == 1],
t_test_df$reporter_activity_condition[t_test_df$condition == "MCF7_WT_Nutlin" & t_test_df$n_sites == 2], paired = T)$p.value
## Figure 2C: activity for different number of binding sites - the positioning effect
data <- cDNA_df %>%
filter(neg_ctrls == "No", positive_ctrl == "No", promoter %in% c("mCMV", "minP"),
str_detect(condition, "MCF7_WT"), str_detect(condition, "PFT", negate = T), str_detect(affinity_id, "[7-9]_"),
tf == "P53", spacing == 7) %>%
dplyr::select(tf, reporter_activity_condition, n_sites, motif_id, condition, affinity_id, gcf) %>%
unique() %>%
mutate(reporter_activity_condition = ave(reporter_activity_condition, motif_id, condition, FUN = function(x) mean(x))) %>%
unique()
ggplot(data,
aes(x = n_sites, y = reporter_activity_condition, color = condition)) +
geom_point(position = position_dodge(0.75), size = 3, stroke = 0)+
geom_label(data = data %>%
filter(n_sites != 0, n_sites != 4) %>%
dplyr::select(reporter_activity_condition, motif_id, condition, n_sites) %>%
arrange(desc(reporter_activity_condition)) %>%
dplyr::group_by(n_sites) %>%
slice_head(n=3),
aes(label = motif_id),
nudge_x = -.3) +
ggtitle("reporter activity vs number of binding sites")+
xlab("number of low-affinity binding sites")+
ylab("reporter activity (a.u.)")+
scale_color_manual(values = colors_continous)
## Correlation of 2A/B data across cell types
compare_affinities <- cDNA_df %>%
filter(neg_ctrls == "No", promoter == "mCMV", positive_ctrl == "No", spacing == 7, tf == "P53",
str_detect(affinity_id, "[1,3,5,7,9]_"), str_detect(affinity_id, "11_high_mid", negate = T)) %>%
dplyr::select(tf, reporter_activity_condition, background, reporter_id, condition, affinity_id) %>%
mutate(affinity_id = factor(affinity_id, levels = c("9_null_only", "7_very-low_only", "5_low_only", "3_med_only", "1_high_only"))) %>%
unique() %>%
mutate(reporter_activity_condition = ave(reporter_activity_condition, affinity_id, condition, background, FUN = function(x) mean(x))) %>%
mutate(reporter_activity_condition_mean = ave(reporter_activity_condition, affinity_id, condition, FUN = function(x) mean(x))) %>%
dplyr::select(-reporter_activity_condition, -reporter_id, -background) %>%
distinct() %>%
spread(condition, reporter_activity_condition_mean)
p1 <- ggplot(compare_affinities,
aes(x = A549, y = MCF7_WT_DMSO, color = affinity_id)) +
geom_abline(lty = 2) +
geom_point(size = 3) +
xlim(0,30) +
ylim(0,30) +
scale_color_manual(values = rev(reds))
p2 <- ggplot(compare_affinities,
aes(x = U2OS, y = MCF7_WT_DMSO, color = affinity_id)) +
geom_abline(lty = 2) +
geom_point(size = 3) +
xlim(0,30) +
ylim(0,30) +
scale_color_manual(values = rev(reds))
p3 <- ggplot(compare_affinities,
aes(x = A549_Nutlin, y = MCF7_WT_Nutlin, color = affinity_id)) +
geom_abline(lty = 2) +
geom_point(size = 3) +
xlim(0,30) +
ylim(0,30) +
scale_color_manual(values = rev(reds))
p4 <- ggplot(compare_affinities,
aes(x = U2OS_Nutlin, y = MCF7_WT_Nutlin, color = affinity_id)) +
geom_abline(lty = 2) +
geom_point(size = 3) +
xlim(0,30) +
ylim(0,30) +
scale_color_manual(values = rev(reds))
plot_grid(p1, p2, p3, p4, nrow = 2)
colors <- brewer.pal(3, "Greys")
pal <- colorRampPalette(colors)
pheatmap(compare_affinities %>%
dplyr::select(-tf, - MCF7_KO) %>%
column_to_rownames("affinity_id") %>%
t(),
color = pal(100),
border_color = NA,
cellwidth = 10,
cluster_cols = F,
cluster_rows = F,
cellheight = 10)
## Figure 2D: Correlation of 2C data across cell types
positioning_compare <- cDNA_df %>%
filter(neg_ctrls == "No", positive_ctrl == "No", promoter == "mCMV", n_sites != 0,
condition %in% c("MCF7_WT_DMSO", "A549"), str_detect(condition, "PFT", negate = T), str_detect(affinity_id, "[7-9]_"),
tf == "P53", spacing == 7) %>%
dplyr::select(tf, reporter_activity_condition, n_sites, motif_id, condition, affinity_id) %>%
unique() %>%
mutate(reporter_activity_condition = ave(reporter_activity_condition, motif_id, condition, FUN = function(x) mean(x))) %>%
unique()
positioning_compare_long <- positioning_compare%>%
spread(condition, reporter_activity_condition)
ggplot(positioning_compare_long,
aes(x = A549, y = MCF7_WT_DMSO)) +
geom_abline(lty = 2) +
geom_smooth(method = "lm", color = "black", alpha = .2) +
geom_point(aes(size = n_sites, color = as.factor(n_sites))) +
scale_size_continuous(range = c(2,5)) +
scale_color_manual(values = c(`1` = "grey80", `2` = "grey60", `3` = "grey40", `4` = "black"))
```
Conclusion: BS006 is the most responsive to Nutlin-3a. Addition of binding sites is super-additive. Positioning of binding sites matters - putting them directly next to each other is inhibitory, and putting them close to the TSS leads to higher activity.
---
Figure 3: The effect of the spacer length.
Aim: Show how the spacer length between adjacent binding sites affects reporter activity.
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
## Figure 3A: activity per spacing at different affinities
ggplot(cDNA_df %>%
filter(neg_ctrls == "No", promoter == "mCMV", positive_ctrl == "No", str_detect(condition, "MCF7_WT"),
str_detect(affinity_id, "[1,3,5,7]{1}_"), str_detect(affinity_id, "11_high_mid", negate = T),
position == 0, tf == "P53") %>%
distinct(tf, reporter_activity_condition, reporter_id, spacing, condition, affinity_id) %>%
mutate(reporter_activity_spacing = ave(reporter_activity_condition, affinity_id, condition, spacing, FUN = mean)) %>%
unique(),
aes(x = spacing, y = reporter_activity_spacing, color = condition, fill = condition)) +
geom_point() +
geom_smooth()+
#theme_pubr(border = T) +
ggtitle("reporter activity per spacing")+
xlab("spacing length between motifs (bp)")+
ylab("reporter activity (a.u.)")+
scale_color_manual(values = colors_continous)+
scale_fill_manual(values = colors_continous)+
facet_wrap(~affinity_id, nrow = 2) +
scale_x_continuous(breaks = seq(0,10,1))
## Figure 3B: activity as a function of spacer length per minimal promoter and spacer sequence
ggplot(cDNA_df %>%
filter(neg_ctrls == "No", promoter != "random", positive_ctrl == "No",
str_detect(condition, "MCF7|U2OS"),
str_detect(affinity_id, "[5]{1}_"), str_detect(affinity_id, "11_high_mid", negate = T),
position == 0, tf == "P53") %>%
dplyr::select(tf, reporter_activity_condition, reporter_id, spacing, condition, affinity_id, promoter, background) %>%
unique() %>%
mutate(reporter_activity_spacing = ave(reporter_activity_condition, affinity_id, promoter, background, condition, spacing, FUN = function(x) mean(x))) %>%
mutate(promoter2 = ifelse(condition %in% c("U2OS", "U2OS_Nutlin"), "U2OS_mCMV", promoter)) %>%
filter((promoter2 == "U2OS_mCMV" & promoter == "mCMV") | promoter2 != "U2OS_mCMV") %>%
mutate(promoter = promoter2) %>%
mutate(promoter = factor(promoter, levels = c("minP", "mCMV", "U2OS_mCMV"))) %>%
unique(),
aes(x = spacing, y = reporter_activity_spacing, color = condition, fill = condition)) +
geom_point() +
geom_smooth()+
#theme_pubr(border = T) +
ggtitle("reporter activity per spacing")+
xlab("spacing length between motifs (bp)")+
ylab("reporter activity (a.u.)")+
scale_color_manual(values = c("U2OS_Nutlin" = "#84A98C", "U2OS" = "#CAD2C5", "MCF7_WT_Nutlin" = "#84A98C", "MCF7_WT_DMSO" = "#CAD2C5", "MCF7_KO" = "#DFE4DC"))+
scale_fill_manual(values = c("U2OS_Nutlin" = "#84A98C", "U2OS" = "#CAD2C5", "MCF7_WT_Nutlin" = "#84A98C", "MCF7_WT_DMSO" = "#CAD2C5", "MCF7_KO" = "#DFE4DC"))+
facet_grid(background~promoter) +
scale_x_continuous(breaks = seq(0,10,1))
ggplot(cDNA_df %>%
filter(neg_ctrls == "No", promoter != "random", positive_ctrl == "No",
str_detect(condition, "A549|U2OS"),
str_detect(affinity_id, "[5]{1}_"), str_detect(affinity_id, "11_high_mid", negate = T),
position == 0, tf == "P53") %>%
dplyr::select(tf, reporter_activity_condition, spacing, condition, affinity_id) %>%
unique() %>%
mutate(reporter_activity_spacing = ave(reporter_activity_condition, affinity_id, condition, spacing, FUN = function(x) mean(x))) %>%
dplyr::select(-reporter_activity_condition) %>%
unique() %>%
mutate(cell = gsub("_.*", "", condition)),
aes(x = spacing, y = reporter_activity_spacing, color = condition, fill = condition)) +
geom_point() +
geom_smooth()+
#theme_pubr(border = T) +
ggtitle("reporter activity per spacing")+
xlab("spacing length between motifs (bp)")+
ylab("reporter activity (a.u.)")+
scale_color_manual(values = c("U2OS_Nutlin" = "#84A98C", "U2OS" = "#CAD2C5", "A549_Nutlin" = "#84A98C", "A549" = "#CAD2C5"))+
scale_fill_manual(values = c("U2OS_Nutlin" = "#84A98C", "U2OS" = "#CAD2C5", "A549_Nutlin" = "#84A98C", "A549" = "#CAD2C5"))+
facet_wrap(~cell, ncol = 1) +
scale_x_continuous(breaks = seq(0,10,1))
## Correlation of 3B data across cell types
ggplot(cDNA_df %>%
filter(neg_ctrls == "No", promoter != "random", positive_ctrl == "No",
str_detect(condition, "A549"),
str_detect(affinity_id, "[5]{1}_"), str_detect(affinity_id, "11_high_mid", negate = T),
position == 0, tf == "P53") %>%
dplyr::select(tf, reporter_activity_condition, reporter_id, spacing, condition, affinity_id, promoter, background) %>%
unique() %>%
mutate(reporter_activity_spacing = ave(reporter_activity_condition, affinity_id, promoter, background, condition, spacing, FUN = function(x) mean(x))) %>%
mutate(promoter = factor(promoter, levels = c("minP", "mCMV"))) %>%
unique(),
aes(x = spacing, y = reporter_activity_spacing, color = condition, fill = condition)) +
geom_point() +
geom_smooth()+
#theme_pubr(border = T) +
ggtitle("reporter activity per spacing")+
xlab("spacing length between motifs (bp)")+
ylab("reporter activity (a.u.)")+
scale_color_manual(values = colors_continous)+
scale_fill_manual(values = colors_continous)+
facet_grid(background~promoter) +
scale_x_continuous(breaks = seq(0,10,1))
affinity_compare_condition <- cDNA_df %>%
filter(neg_ctrls == "No", promoter == "mCMV", positive_ctrl == "No", condition %in% c("MCF7_WT_Nutlin", "MCF7_WT_DMSO", "A549_Nutlin", "A549"),
str_detect(affinity_id, "[5]{1}_"), str_detect(affinity_id, "11_high_mid", negate = T),
position == 0, tf == "P53", background == 1) %>%
mutate(cell_type = gsub("(.*)_.*", "\\1", condition)) %>%
mutate(stimulation = gsub(".*_(.*)", "\\1", condition)) %>%
mutate(stimulation = ifelse(stimulation == "A549", "DMSO", stimulation)) %>%
distinct(tf, reporter_activity_condition, reporter_id, spacing, condition, affinity_id, promoter, background, cell_type, stimulation) %>%
mutate(reporter_activity_spacing = ave(reporter_activity_condition, affinity_id, promoter, background, condition, spacing, FUN = function(x) mean(x))) %>%
dplyr::select(-reporter_activity_condition, -condition) %>%
mutate(promoter = factor(promoter, levels = c("minP", "mCMV"))) %>%
unique() %>%
spread("cell_type", "reporter_activity_spacing") %>%
mutate(spacing_2 = ifelse(spacing == 1, "high", "normal")) %>%
mutate(spacing_2 = ifelse(spacing == 7, "low", spacing_2))
ggplot(affinity_compare_condition,
aes(x = A549, y = MCF7_WT)) +
geom_abline(lty = 2) +
geom_smooth( method = "lm", color = "black", alpha = .2) +
geom_point(aes(color = spacing_2), size = 3) +
scale_color_manual(values = c("high" = "#84A98C", "normal" = "black", "low" = "#CAD2C5")) +
facet_wrap(~stimulation, ncol =1)
```
Conclusion: Spacer length influences activity periodically. Adjacent binding sites need to be 180 degrees tilted with respect to each other to achieve optimal activation.
---
## Figure 4: The effect of the minimal promoter and the spacer sequence.
Aim: Show how the P53 reporters interact with the two minimal promoters and the three spacer sequences.
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
## Figure 4A: minimal promoters background activity
data <- cDNA_df %>%
filter(neg_ctrls == "Yes", positive_ctrl == "No",tf == "P53") %>%
dplyr::select(tf, reporter_activity, n_sites, motif_id, condition, reporter_id, background, promoter) %>%
mutate(reporter_activity_condition = ave(reporter_activity, reporter_id, condition, FUN = mean)) %>%
dplyr::select(-reporter_activity) %>%
unique() %>%
na.omit()
ggplot(data, aes(x = promoter, y = reporter_activity_condition)) +
geom_quasirandom_rast(stroke = 0, alpha = 1, size = 1, raster.dpi = 600) +
geom_boxplot(aes(fill = promoter), alpha = .8, outlier.shape = NA) +
scale_fill_manual(values = c("#F2BE54", "#FBEDD0", "white"))+
stat_compare_means(method = "wilcox.test")
## Figure 4B: reporter activity distribution - compare promoters
ggplot(cDNA_df %>%
mutate(cell_type = gsub("(.*)_.*", "\\1", condition)) %>%
mutate(stimulation = gsub(".*_(.*)", "\\1", condition)) %>%
mutate(stimulation = ifelse(stimulation == "A549", "DMSO", stimulation)) %>%
mutate(stimulation = ifelse(stimulation == "U2OS", "DMSO", stimulation)) %>%
filter(condition != "MCF7_KO") %>%
filter(neg_ctrls == "No",
motif_id != "4_4_4_4", tf == "P53") %>%
dplyr::select(condition, reporter_activity_condition, reporter_id, positive_ctrl, promoter, tf, cell_type, stimulation) %>%
unique(),
aes(x = stimulation, y = reporter_activity_condition, fill = promoter)) +
geom_quasirandom_rast(dodge.width = .75, stroke = 0, alpha = 1, size = 1, raster.dpi = 600, bandwidth = 0.4, width = 0.1, method = "smiley") +
geom_boxplot(position = position_dodge(0.75), alpha = 0.8, outlier.shape = NA)+
xlab("") +
ylab("reporter activity (a.u.)") +
labs(title = "reporter activity distribution per minimal promoter") +
scale_fill_manual(values = colors_promoter)+
stat_compare_means(method = "wilcox.test") +
facet_wrap(~cell_type)
## Figure 4C: impact of background sequence
ggplot(cDNA_df %>%
mutate(cell_type = gsub("(.*)_.*", "\\1", condition)) %>%
mutate(stimulation = gsub(".*_(.*)", "\\1", condition)) %>%
mutate(stimulation = ifelse(stimulation == "A549", "DMSO", stimulation)) %>%
mutate(stimulation = ifelse(stimulation == "U2OS", "DMSO", stimulation)) %>%
filter(condition != "MCF7_KO") %>%
filter(neg_ctrls == "No", positive_ctrl == "No", promoter != "random",
str_detect(affinity_id, "[9]_", negate = T),
tf == "P53") %>%
mutate(cell_type = factor(cell_type, levels = c("MCF7_WT", "A549", "U2OS"))) %>%
dplyr::select(tf, reporter_activity_condition, background, reporter_id, condition, affinity_id, promoter, cell_type, stimulation) %>%
unique(), aes(x = promoter, y = reporter_activity_condition, color = factor(background))) +
geom_quasirandom_rast(dodge.width = .75, stroke = 0, alpha = 1, size = 1, raster.dpi = 600, bandwidth = 0.4, width = 0.1) +
geom_boxplot(position = position_dodge(.75), alpha = .7, outlier.shape = NA)+
scale_color_manual(values = c("#000000","#666666", "#999999"))+
labs(title = "reporter activity per background")+
xlab("condition")+
ylab("reporter activity")+
facet_grid(cell_type~stimulation)+
stat_compare_means(method = "wilcox.test")
## Figure 4D: correlate minP and mCMV reporter activities
data <- cDNA_df %>%
filter(neg_ctrls == "No", positive_ctrl == "No", promoter != "random", motif_id != "4_4_4_4", tf == "P53") %>%
dplyr::select(tf, reporter_activity_condition, n_sites, motif_id, condition, reporter_id_2, background, promoter) %>%
unique() %>%
spread(promoter, reporter_activity_condition) %>%
na.omit() %>%
filter(condition %in% c("MCF7_WT_DMSO","MCF7_WT_Nutlin"))
ggplot(data, aes(x = minP, y = mCMV)) +
geom_abline(lty = 1) +
geom_abline(slope = mean(data$mCMV)/mean(data$minP), linetype = 2) +
geom_smooth(method = "lm", aes(color = factor(background), fill = factor(background)), alpha = 0.2, lty = 2) +
geom_point(size = 2, aes(color = factor(background)), stroke = 0, alpha = .3) +
scale_color_manual(values = c("#000000","#666666", "#999999")) +
scale_fill_manual(values = c("#000000","#666666", "#999999")) +
facet_wrap(~background) +
ylim(0,120) +
xlim(0,120)
# Figure 4G: reporters with 0bp spacing
data <- cDNA_df %>%
filter(neg_ctrls == "No", positive_ctrl == "No", promoter != "random", motif_id != "4_4_4_4", spacing == 0, tf == "P53") %>%
dplyr::select(tf, reporter_activity_condition, n_sites, motif_id, condition, reporter_id_2, background, promoter) %>%
unique()%>%
spread(promoter, reporter_activity_condition) %>%
na.omit() %>%
filter(condition %in% c("MCF7_WT_DMSO","MCF7_WT_Nutlin"))
ggplot(data,
aes(x = minP, y = mCMV)) +
geom_abline(lty = 1) +
geom_abline(slope = mean(data$mCMV)/mean(data$minP), linetype = 2) +
geom_smooth(method = "lm", aes(color = factor(background), fill = factor(background)), alpha = 0.2, lty = 2) +
geom_point(size = 2.5, aes(color = factor(background)), stroke = 0) +
scale_color_manual(values = c("#000000","#666666", "#999999")) +
scale_fill_manual(values = c("#000000","#666666", "#999999")) +
facet_wrap(~background) +
ylim(0,80) +
xlim(0,80)
```
Conclusion: Promoter and spacer sequence influence activity linearly.
---
## Figure 5 & 6: Linear model + Selection of best reporters
Aim: Can we explain now every observation using a linear model?
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
## Figure 6A: Show that some reporters are superior to commercial reporters
compare_df <- cDNA_df %>%
filter(neg_ctrls == "No",tf == "P53",
position %in% c(0, 60, 70), promoter != "random",
!affinity_id %in% c("9_null_only")) %>%
dplyr::select(reporter_id, reporter_activity_condition, condition, neg_ctrls, tf, promoter, positive_ctrl, motif_id, SumAffinity, spacing) %>%
unique() %>%
pivot_wider(names_from = condition, values_from = reporter_activity_condition) %>%
mutate(dif_MCF_Nutlin = MCF7_WT_Nutlin / MCF7_WT_DMSO) %>%
mutate(dif_KO_Nutlin = MCF7_WT_Nutlin / MCF7_KO) %>%
mutate(dif_KO_DMSO = MCF7_WT_DMSO / MCF7_KO) %>%
mutate(dif_A549_Nutlin = A549_Nutlin / A549) %>%
mutate(dif_U2OS_Nutlin = U2OS_Nutlin / U2OS) %>%
mutate(dif_Nutlin_mean = (dif_MCF_Nutlin + dif_A549_Nutlin + dif_U2OS_Nutlin) / 3) %>%
mutate(Nutlin_mean = (MCF7_WT_Nutlin + A549_Nutlin + U2OS_Nutlin) / 3) %>%
mutate(DMSO_mean = (MCF7_WT_DMSO + A549 + U2OS) / 3) %>%
mutate(sensitivity = Nutlin_mean + DMSO_mean) %>%
na.omit()
ggplot(compare_df,
aes(x = dif_MCF_Nutlin, y = dif_A549_Nutlin)) +
geom_point()
top_reporters <- compare_df %>%
filter(neg_ctrls == "No", str_detect(motif_id,"2", negate = T)) %>%
distinct(sensitivity, reporter_id) %>%
arrange(desc(sensitivity)) %>%
top_n(6)
ggplot(,aes(x = DMSO_mean, y = Nutlin_mean)) +
geom_abline(lty = 2) +
geom_point(data = compare_df %>% filter(positive_ctrl == "No"), aes(color = dif_Nutlin_mean), size = 1.5) +
geom_point(data = compare_df %>% filter(positive_ctrl == "Yes"), color = "red", size = 1.5) +
ylab("Reporter activity (Nutlin-3a)") +
xlab("Reporter activity (DMSO)") +
scale_color_gradient2(low = "#DBE6DD", high = colors_continous[3], limits = c(0,5), oob = squish)
## Figure 6B: Line-plot comparing sensitivities of my reporters to positive controls
top_reporters_selected <- compare_df %>%
filter(reporter_id %in% c("P53_mCMV_p_0_s_1_d_10_bg_1_a1_2_a2_2_a3_2_a4_2", "P53_mCMV_p_0_s_10_d_10_bg_1_a1_2_a2_2_a3_2_a4_2", "P53_mCMV_p_0_s_2_d_10_bg_1_a1_2_a2_2_a3_2_a4_2",
"P53_minP_p_0_s_1_d_10_bg_3_a1_3_a2_3_a3_3_a4_3", "P53_mCMV_p_0_s_10_d_10_bg_1_a1_3_a2_3_a3_3_a4_3", "P53_mCMV_p_0_s_7_d_10_bg_1_a1_3_a2_3_a3_4_a4_3"))
compare_df2 <- cDNA_df %>%
filter(tf == "P53", motif_id != "4_4_4_4", promoter != "random") %>%
filter(reporter_id %in% top_reporters_selected$reporter_id | positive_ctrl == "Yes") %>%
dplyr::select(reporter_id, reporter_activity_condition, condition, neg_ctrls, tf, promoter, positive_ctrl, motif_id, SumAffinity, spacing) %>%
unique()
ggplot(compare_df2 %>%
mutate(reporter_group = ifelse(reporter_id %in% c("P53_mCMV_p_0_s_1_d_10_bg_1_a1_2_a2_2_a3_2_a4_2",
"P53_mCMV_p_0_s_10_d_10_bg_1_a1_2_a2_2_a3_2_a4_2",
"P53_mCMV_p_0_s_2_d_10_bg_1_a1_2_a2_2_a3_2_a4_2"),
"B", "Positive Control")) %>%
mutate(reporter_group = ifelse(reporter_id %in% c("P53_minP_p_0_s_1_d_10_bg_3_a1_3_a2_3_a3_3_a4_3",
"P53_mCMV_p_0_s_10_d_10_bg_1_a1_3_a2_3_a3_3_a4_3",
"P53_mCMV_p_0_s_7_d_10_bg_1_a1_3_a2_3_a3_4_a4_3"),
"A", reporter_group)) %>%
mutate(condition = factor(condition, levels = c("MCF7_KO", "U2OS", "A549", "MCF7_WT_DMSO", "U2OS_Nutlin", "A549_Nutlin", "MCF7_WT_Nutlin"))),
aes(x = condition, y = reporter_activity_condition, color = reporter_group, group = reporter_id)) +
geom_point() +
geom_line() +
ylab("Reporter activity")+
scale_color_manual(values = c("Positive Control" = "red", "A" = "#CAD2C5" , "B" = "#84A98C"))
ggplot(compare_df2 %>%
mutate(reporter_group = ifelse(reporter_id %in% c("P53_mCMV_p_0_s_1_d_10_bg_1_a1_2_a2_2_a3_2_a4_2",
"P53_mCMV_p_0_s_10_d_10_bg_1_a1_2_a2_2_a3_2_a4_2",
"P53_mCMV_p_0_s_2_d_10_bg_1_a1_2_a2_2_a3_2_a4_2"),
"B", "Positive Control")) %>%
mutate(reporter_group = ifelse(reporter_id %in% c("P53_minP_p_0_s_1_d_10_bg_3_a1_3_a2_3_a3_3_a4_3",
"P53_mCMV_p_0_s_10_d_10_bg_1_a1_3_a2_3_a3_3_a4_3",
"P53_mCMV_p_0_s_7_d_10_bg_1_a1_3_a2_3_a3_4_a4_3"),
"A", reporter_group)) %>%
filter(condition != "MCF7_KO") %>%
mutate(condition = factor(condition, levels = c("U2OS", "U2OS_Nutlin", "A549", "A549_Nutlin", "MCF7_WT_DMSO", "MCF7_WT_Nutlin"))) %>%
mutate(reporter_activity_condition_mean = ave(reporter_activity_condition, condition, reporter_group, FUN = mean)) %>%
mutate(reporter_activity_condition_sd = ave(reporter_activity_condition, condition, reporter_group, FUN = sd)) %>%
dplyr::select(-reporter_activity_condition) %>%
mutate(reporter_group = factor(reporter_group, levels = c("Positive Control", "A", "B"))) %>%
distinct(),
aes(x = condition, y = reporter_activity_condition_mean, fill = reporter_group, group = reporter_group)) +
geom_errorbar(aes(ymin=reporter_activity_condition_mean-reporter_activity_condition_sd, ymax=reporter_activity_condition_mean+reporter_activity_condition_sd),
width=.2, position=position_dodge(.75)) +
geom_bar(stat = "identity", position = position_dodge(0.75), width = 0.75, color = "white")+
ylab("Reporter activity")+
scale_fill_manual(values = c("Positive Control" = "red", "A" = "#84A98C" , "B" = "#CAD2C5"))
t_test_df <- compare_df2 %>%
mutate(reporter_group = ifelse(reporter_id %in% c("P53_mCMV_p_0_s_1_d_10_bg_1_a1_2_a2_2_a3_2_a4_2",
"P53_mCMV_p_0_s_10_d_10_bg_1_a1_2_a2_2_a3_2_a4_2",
"P53_mCMV_p_0_s_2_d_10_bg_1_a1_2_a2_2_a3_2_a4_2"),
"B", "Positive Control")) %>%
mutate(reporter_group = ifelse(reporter_id %in% c("P53_minP_p_0_s_1_d_10_bg_3_a1_3_a2_3_a3_3_a4_3",
"P53_mCMV_p_0_s_10_d_10_bg_1_a1_3_a2_3_a3_3_a4_3",
"P53_mCMV_p_0_s_7_d_10_bg_1_a1_3_a2_3_a3_4_a4_3"),
"A", reporter_group)) %>%
filter(condition != "MCF7_KO") %>%
mutate(condition = factor(condition, levels = c("U2OS", "U2OS_Nutlin", "A549", "A549_Nutlin", "MCF7_WT_DMSO", "MCF7_WT_Nutlin"))) %>%
distinct()
t.test(t_test_df$reporter_activity_condition[t_test_df$condition == "A549" & t_test_df$reporter_group == "A"],
t_test_df$reporter_activity_condition[t_test_df$condition == "A549" & t_test_df$reporter_group == "Positive Control"], paired = F)$p.value
## 5C: Linear model to fit most active reporters
## Subselect reporters
cDNA_df_p53 <- cDNA_df %>%
filter(neg_ctrls == "No", positive_ctrl == "No",tf == "P53",
position == 0, promoter != "random",
affinity_id %in% c( "1_high_only",
"3_med_only",
"5_low_only",
"7_very-low_only"),
condition %in% c("A549", "A549_Nutlin", "U2OS", "U2OS_Nutlin", "MCF7_WT_DMSO", "MCF7_WT_Nutlin")) %>%
dplyr::select(condition, reporter_activity_condition, promoter, background,
affinity_pos1, spacing, n_sites, motif_id, affinity_id, reporter_id) %>%
mutate(background = as.factor(background),
reporter_activity = ave(reporter_activity_condition, motif_id, condition, n_sites, spacing, promoter, background, FUN = function(x) mean(x))) %>%
dplyr::select(-reporter_activity_condition) %>%
unique() %>%
pivot_wider(names_from = condition, values_from = reporter_activity) %>%
mutate(Nutlin_mean = (MCF7_WT_Nutlin + A549_Nutlin + U2OS_Nutlin) / 3) %>%
mutate(DMSO_mean = (MCF7_WT_DMSO + A549 + U2OS) / 3) %>%
mutate(n_sites = as.character(n_sites))%>%
mutate(affinity_pos1 = ifelse(affinity_id == "8_very-low_null", "3", affinity_pos1)) %>%
mutate(promoter = as.factor(promoter)) %>%
distinct() %>%
pivot_longer(cols = c("Nutlin_mean", "DMSO_mean"), names_to = "stimulation", values_to = "reporter_activity") %>%
dplyr::select(-A549, -A549_Nutlin, -U2OS, -U2OS_Nutlin, -MCF7_WT_DMSO, -MCF7_WT_Nutlin) %>%
distinct()
## Mutate spacing to helical turn
cDNA_df_p53 <- cDNA_df_p53 %>%
mutate(spacing = spacing - 1) %>%
mutate(spacing_rotation = (spacing) / 10.5) %>%
mutate(spacing_degree = 2* pi * spacing_rotation) %>%
mutate(spacing_degree = ifelse(is.infinite(spacing_degree), 0, spacing_degree)) %>%
mutate(spacing_degree_transf = cos(spacing_degree)) %>%
mutate(close = ifelse(spacing == 0, "yes", "no")) ## Punish reporters that are directly next to each other
# Rename 0,1,2,3,4 in null, very-weak, weak, medium, strong
replace <- data.frame("old" = c(0:4), "new" = c("BS100", "BS037", "BS014", "BS006", "BS000"), stringsAsFactors=FALSE)
for (i in unique(replace$old)) {
cDNA_df_p53$affinity_pos1[cDNA_df_p53$affinity_pos1 == i] <- replace$new[replace$old == i]
}
cDNA_df_p53$promoter <- relevel(cDNA_df_p53$promoter, ref = "minP")
cDNA_df_p53_nutlin <- cDNA_df_p53 %>% filter(stimulation == "Nutlin_mean")
cDNA_df_p53_nutlin$row <- rownames(cDNA_df_p53_nutlin)
## Fitting the model
x <- lm(log2(reporter_activity) ~ promoter*background + spacing_degree_transf + affinity_id, cDNA_df_p53_nutlin)
summ(x)
par(mfrow=c(2,2))
plot(x)
# Get predicted activities per reporter
y <- data.frame(x$fitted.values, stringsAsFactors=FALSE) %>% rownames_to_column()
names(y) <- c("row", "reporter_activity_predicted")
y$id <- 1:nrow(y)
prediction <- data.frame("id" = 1:(nrow(cDNA_df_p53_nutlin)), stringsAsFactors=FALSE)
prediction <- merge(prediction,y, all = T) %>%
dplyr::select(-id)
cDNA_df_p53_nutlin <- merge(cDNA_df_p53_nutlin, prediction)
# What are the individual weights?
weight <- data.frame(x$coefficients, stringsAsFactors=FALSE) %>% rownames_to_column() %>% na.omit()
names(weight) <- c("feature", "weight")
weight_intercepts <- data.frame("feature" = c("1_high_only", "promoterminP", "background1"), "weight" = c(0, 0, 0))
weight <- rbind(weight, weight_intercepts)
categorie <- c("promoter_background", "affinity_id")
weight$features <- gsub(paste(categorie, collapse="|"), "",weight$feature)
weight$cond <- gsub(paste(weight$features, collapse = "|"), "", weight$feature)
weight <- weight %>% filter(feature != "(Intercept)") %>% mutate(stimulation = "Nutlin_mean")
cDNA_df_p53_dmso <- cDNA_df_p53 %>% filter(stimulation == "DMSO_mean")
cDNA_df_p53_dmso$row <- rownames(cDNA_df_p53_dmso)
## Fitting the model
y <- lm(log2(reporter_activity) ~ promoter*background + spacing_degree_transf + affinity_id, cDNA_df_p53_dmso)
summ(y)
par(mfrow=c(2,2))
plot(y)
# Get predicted activities per reporter
y_dmso <- data.frame(y$fitted.values, stringsAsFactors=FALSE) %>% rownames_to_column()
names(y_dmso) <- c("row", "reporter_activity_predicted")
y_dmso$id <- 1:nrow(y_dmso)
prediction <- data.frame("id" = 1:(nrow(cDNA_df_p53_dmso)), stringsAsFactors=FALSE)
prediction <- merge(prediction,y_dmso, all = T) %>%
dplyr::select(-id)
cDNA_df_p53_dmso <- merge(cDNA_df_p53_dmso, prediction)
cDNA_df_p53_all <- rbind(cDNA_df_p53_dmso, cDNA_df_p53_nutlin)
# What are the individual weight_dmsos?
weight_dmso <- data.frame(y$coefficients, stringsAsFactors=FALSE) %>% rownames_to_column() %>% na.omit()
names(weight_dmso) <- c("feature", "weight")
weight_dmso <- rbind(weight_dmso, weight_intercepts)
categorie <- c("promoter_background", "affinity_id")
weight_dmso$features <- gsub(paste(categorie, collapse="|"), "",weight_dmso$feature)
weight_dmso$cond <- gsub(paste(weight_dmso$features, collapse = "|"), "", weight_dmso$feature)
weight_dmso <- weight_dmso %>% filter(feature != "(Intercept)") %>% mutate(stimulation = "DMSO_mean")
weight <- rbind(weight_dmso, weight)
## Figure 5C: Correlation between predicted and measured activity
ggplot(cDNA_df_p53_all %>%
mutate(stimulation = factor(stimulation, levels = c("DMSO_mean", "Nutlin_mean"))),
aes(x = log2(reporter_activity), y = reporter_activity_predicted, color = affinity_pos1
#,
#color = top_6
)) +
geom_abline(lty = 2) +
geom_smooth(color = "black", method = "lm") +
geom_point(size = 2) +
scale_color_manual(values = c("BS100" = "#DC6641", "BS037" = "#E99D86", "BS014" = "#EDBBAB", "BS006" = "#F7E4DE")) +
xlab("Measured reporter activity") +
ylab("Predicted reporter activity") +
theme(legend.position = "none") +
facet_wrap(~stimulation, nrow = 2)
level_order <- c("spacing_degree_transf", "1_high_only", "3_med_only", "5_low_only", "7_very-low_only",
"promoterminP", "promotermCMV", "background1", "background2", "background3", "promotermCMV:background2", "promotermCMV:background3")
## Figure 5D: Weights of the fitted model
ggplot(weight %>%
mutate(stimulation = factor(stimulation, levels = c("DMSO_mean", "Nutlin_mean"))),
aes(x = factor(features, level = level_order), y = weight)) +
geom_bar(stat = "identity", position = "dodge", width = 0.75, color = "black") +
ylab("weight - reporter activity") +
xlab("") +
#scale_fill_manual(values = colors_continous_2[c(1,3,5,6)]) +
guides(fill = F)+
theme_classic_lines_45() +
facet_wrap(~stimulation, nrow = 2)
```
Conlusion: Top reporters are better than commercial reporters. Linear model gives insights into which features are important to drive high expression.
# Session Info
```{r}
paste("Run time: ",format(Sys.time()-StartTime))
getwd()
date()
sessionInfo()
```
<file_sep>from os.path import join
# Globals ---------------------------------------------------------------------
# Full path to working directory
W_DIR = '/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/gcf6301/'
# Expression and copy number (ECN)---------------------------------------------
# Full path to cDNA and pDNA raw data folder (fastq)
ECN_DIR = W_DIR + 'raw/'
# Extract cDNA and pDNA sample names
ECN, = glob_wildcards(join(ECN_DIR, '{ecn,[^/]+}_R1_001.fastq.gz'))
print(ECN)
# Pattern for SE read
S1 = '{ecn}_R1_001.fastq.gz'
<file_sep>---
title: "pMT06-GR pDNA insert sequencing"
author: "<NAME>"
date: "`r format(Sys.time(), '%Y-%m-%d')`"
output:
prettydoc::html_pretty:
theme: leonids
highlight: github
# toc: true
# toc_float: true
# code_folding: show
# editor_options:
# chunk_output_type: console
---
*knitr document van Steensel lab*
# pMT06-GR pDNA insert sequencing
# Introduction
I sequenced the complete insert of the pDNA library of pMT06. I already extracted all sequences in front of the 3' adapter from the sequences data and added counts to identical sequences by starcode. I now want to make an overview about how many pDNA insert sequences in the pDNA still match the designed inserts.
```{r setup, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE, message = FALSE}
# Load all options and libraries
knitr::opts_chunk$set(echo = TRUE)
StartTime <-Sys.time()
# 8-digit Date tag:
Date <- substr(gsub("-","",Sys.time()),1,8)
# libraries:
library(seqinr)
library(ShortRead)
library(plyr)
library(maditr)
library(phylotools)
library(tidyr)
library(readr)
library(dplyr)
library(ggplot2)
library(ggbeeswarm)
library(vwr)
library(d3r)
library(sunburstR)
library(LncFinder)
library(plotly)
library(tibble)
library(GGally)
library(ggpointdensity)
library(viridis)
library(gridExtra)
library(maditr)
```
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# Load all custom functions
SetFileName <- function(filename, initials) {
# Set filename with extension and initials to make filename with date integrated.
filename <- substitute(filename)
initials <- substitute(initials)
filename <- paste0(initials, Date, filename)
filename
}
ReadFasta<-function(file) {
# Read the file line by line
fasta<-readLines(file)
# Identify header lines
ind<-grep(">", fasta)
# Identify the sequence lines
s<-data.frame(ind=ind, from=ind+1, to=c((ind-1)[-1], length(fasta)))
# Process sequence lines
seqs<-rep(NA, length(ind))
for(i in 1:length(ind)) {
seqs[i]<-paste(fasta[s$from[i]:s$to[i]], collapse="")
}
# Create a data frame
DF<-data.frame(name=gsub(">", "", fasta[ind]), sequence=seqs)
# Return the data frame as a result object from the function
return(DF)
}
# ggpairs custom functions
corColor <- function(data, mapping, color = I("black"), sizeRange = c(1, 3), ...) {
x <- eval_data_col(data, mapping$x)
y <- eval_data_col(data, mapping$y)
r <- cor(x, y, use = "na.or.complete")
rt <- format(r, digits = 3)
tt <- as.character(rt)
cex <- max(sizeRange)
# helper function to calculate a useable size
percent_of_range <- function(percent, range) {
percent * diff(range) + min(range, na.rm = TRUE)
}
# plot correlation coefficient
p <- ggally_text(label = tt, mapping = aes(), xP = 0.5, yP = 0.5,
size = I(percent_of_range(cex * abs(r), sizeRange)), color = color, ...) +
theme(panel.grid.minor=element_blank(),
panel.grid.major=element_blank())
corColors <- RColorBrewer::brewer.pal(n = 7, name = "YlOrRd")[2:6]
if (r <= boundaries[1]) {
corCol <- corColors[1]
} else if (r <= boundaries[2]) {
corCol <- corColors[2]
} else if (r < boundaries[3]) {
corCol <- corColors[3]
} else if (r < boundaries[4]) {
corCol <- corColors[4]
} else {
corCol <- corColors[5]
}
p <- p +
theme_bw() +
theme(panel.background = element_rect(fill = alpha(corCol, 0.4)),
panel.grid.major = element_blank(),
panel.grid.minor = element_blank())
return(p)
}
```
```{r knits setup, echo=FALSE, warning= FALSE, message=FALSE}
# Prepare output
library(knitr)
filename <- SetFileName("_figures","mt")
dir.create(paste("results/", filename, sep = ""), showWarnings = FALSE)
opts_chunk$set(fig.width = 4, fig.height = 4,
dev=c('png', 'pdf'), fig.path = file.path(paste("results/", filename, "/", sep = "")))
pdf.options(useDingbats = FALSE)
```
```{r, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
## Data import
# Import reference sequences
ref_seq <- ReadFasta("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/mt20200619_oligo_pool_deep.fasta")
# Remove adapters from reference sequence (cause these are not in the sequencing data)
ref_seq$sequence <- gsub("CGGAGCGAACCGAGTTAG", "", ref_seq$sequence)
ref_seq$sequence <- gsub("CATCGTCGCATCCAAGAG", "", ref_seq$sequence)
# Split up in insert and barcode part
## In my case, the barcode should be the last 12 bases of the sequence
ref_seq$barcode <- gsub(".*([A-Z]{12})$", "\\1", ref_seq$sequence)
ref_seq$insert <- gsub("(.*)[A-Z]{12}$", "\\1", ref_seq$sequence)
# Import sequencing files
pDNA_seq_files = list.files('/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/pDNA_insert_seq_2/results/',
full.names=T, patter='.*MT.*_counts.tsv')
pDNA_prev_file = list.files('/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/pDNA_insert_seq/processed/',
full.names=T, pattern='.*MT.*_counts.tsv')
pDNA_seq_files <- c(pDNA_seq_files, pDNA_prev_file)
pDNA_seq <- lapply(pDNA_seq_files, fread, header = FALSE)
pDNA_seq_files[1:4] <- gsub('_[CGAT]{8}-[CGAT]{8}_S[0-9]{1,2}', '', pDNA_seq_files)
names(pDNA_seq)<- gsub('.*//.*[0-9]{1-2}_MT_lib_(.*?)_counts.tsv',
'\\1',
pDNA_seq_files)
names(pDNA_seq)[5] <- "plasmid_kapa_ecloni"
# Generate wide df - each condition attached as new column
for (i in 1:length(pDNA_seq)) {
if (i == 1) {
pDNA_seq_df <- data.frame(pDNA_seq[i])
pDNA_seq_df[3] <- names(pDNA_seq[i])
names(pDNA_seq_df) <- c("sequence", "count", "name")
pDNA_seq_df <- reshape2::dcast(pDNA_seq_df, sequence ~ name, value.var = "count")
}
else {
pDNA_seq_df_i <- data.frame(pDNA_seq[i])
pDNA_seq_df_i[3] <- names(pDNA_seq[i])
names(pDNA_seq_df_i) <- c("sequence", "count", "name")
pDNA_seq_df_i <- reshape2::dcast(pDNA_seq_df_i, sequence ~ name, value.var = "count")
pDNA_seq_df <- merge(pDNA_seq_df, pDNA_seq_df_i, all = T)
}
}
# Split up in insert and barcode part
## In my case, the barcode should be the last 12 bases of the sequence
pDNA_seq_df$barcode <- gsub(".*([A-Z]{12})$", "\\1", pDNA_seq_df$sequence)
pDNA_seq_df$insert <- gsub("(.*)[A-Z]{12}$", "\\1", pDNA_seq_df$sequence)
```
# Analysis
## How do the different samples correlate?
```{r correlation_all_samples, fig.width=10, fig.height=10, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Convert to long df - write conditions under each other
pDNA_seq <- melt(pDNA_seq_df, id.vars = c("sequence", "barcode", "insert"),
variable.name = "condition", value.name = "counts")
# Calculate reads per million
pDNA_seq$counts[is.na(pDNA_seq$counts)] <- 0
for (i in unique(pDNA_seq$condition)) {
pDNA_seq$rpm[pDNA_seq$condition == i] <- (pDNA_seq$counts[pDNA_seq$condition == i] + 1) / # Adds a pseudocount of 1
sum(pDNA_seq$counts[pDNA_seq$condition == i]) *1e6
}
# Correlation matrix plot
pDNA_seq_df_2 <- pDNA_seq[pDNA_seq$counts >= 5,]
pDNA_seq_df_2 <- dcast(pDNA_seq_df_2, sequence ~ condition, value.var = "rpm")
n <- sample(1:nrow(pDNA_seq_df_2), 10000)
boundaries <- seq(from = 0.1, by = 0.25, length.out = 4)
plt <- ggpairs(pDNA_seq_df_2 %>% dplyr::select(oligos, PCR_mytaq, PCR_kapa, plasmid_mytaq_megax, plasmid_kapa_ecloni),
upper = list(continuous = corColor),
lower = list(continuous = function(data, mapping, ...) {
ggally_points(data = data[n, ], mapping = mapping, alpha = 0.4, size = 1) +
xlim(0,400) + ylim(0,400)+
geom_abline(slope = 1, lty = "dashed", col = "red") +
theme_bw()}),
diag = list(continuous = function(data, mapping, ...) {
ggally_densityDiag(data = data, mapping = mapping, alpha = 0.4, fill = "red") +
xlim(0,400) +
theme_bw()})) +
ggtitle("Read counts: corelation between samples") +
theme(text = element_text(size = 14)) +
xlab("Reads per million") +
ylab("Reads per million")
print(plt)
```
## What is the barcode distribution of mapped vs. unmapped for both TFs?
```{r distribution_per_tf, fig.width=6, fig.height=6, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Match sequences with original data
pDNA_seq_bc <- pDNA_seq %>%
dplyr::select(barcode, rpm, condition) %>%
#filter(rpm > 5) %>%
unique() %>%
mutate(id = "seq")
# Only keep highest barcode values - a bit of cheating here
ref_seq_bc <- ref_seq %>% dplyr::select(name, barcode)
ref_seq_bc_2 <- merge(ref_seq_bc, unique(pDNA_seq_bc$condition), all = T) %>%
setnames("y", "condition")
match_bc <- merge(ref_seq_bc_2, pDNA_seq_bc, all = T)
match_bc$TF <- gsub("_.*", "\\1", match_bc$name)
match_bc$match <- "true"
match_bc$match[is.na(match_bc$name)] <- "false"
match_bc$conf <- "high"
match_bc$conf[match_bc$rpm < 10] <- "low"
match_bc$TF[is.na(match_bc$TF)] <- "x_no_match"
match_bc$cond_bc <- paste(match_bc$barcode, match_bc$condition, sep = "_")
match_bc <- match_bc[order(match_bc$cond_bc, -abs(match_bc$rpm) ), ]
match_bc <- match_bc[ !duplicated(match_bc$cond_bc), ]
# Visualize
ggplot(match_bc[!is.na(match_bc$condition),], aes(x = condition, y = rpm, color = TF)) +
geom_quasirandom(dodge.width = 0.4) +
theme_bw() +
xlab("condition") +
scale_color_brewer(palette = "Dark2") +
ylab("reads per million") +
theme(text = element_text(size = 14), axis.text.x = element_text(angle = 90, hjust = 1, vjust = 1))
```
## Correlate to GC contenct
```{r gc_content, fig.width=6, fig.height=6, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Load reference file
ref_seq_2 <- seqinr::read.fasta("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr//data/library_design/output/mt20200619_oligo_pool_deep.fasta")
# Compute gc contents
gc <- compute_GC(ref_seq_2)
# Plot gc distribution
ggplot(gc, aes(x = GC.content)) +
geom_density() + theme_bw()
match_seq <- match_bc[!is.na(match_bc$name),]
gc <- gc %>% rownames_to_column(var = "name")
gc <- merge(gc, match_seq)
ggplot(gc[!is.na(gc$condition),], aes(x = GC.content, y = rpm)) +
geom_pointdensity() +
scale_color_viridis() +
theme_bw() +
facet_wrap(~condition)
```
## Plot how many barcodes are found in pDNA data
```{r barcodes_found, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Match barcodes with original data
ref_seq_bc <- ref_seq %>% dplyr::select(name, barcode)
ref_seq_bc$TF <- gsub("_.*", "\\1", ref_seq_bc$name)
pDNA_seq_bc <- pDNA_seq %>%
dplyr::select(barcode, rpm, condition) %>%
unique() %>%
mutate(id = "pDNA")
match_bc <- merge(ref_seq_bc, pDNA_seq_bc, by = "barcode", all = T)
match_bc$cond_bc <- paste(match_bc$barcode, match_bc$condition, sep = "_")
match_bc <- match_bc[order(match_bc$cond_bc, -abs(match_bc$rpm) ), ]
match_bc <- match_bc[ !duplicated(match_bc$cond_bc), ]
## Identify the unmapped fraction
match_bc$match <- "match"
match_bc$match[is.na(match_bc$TF)] <- "no_match"
match_bc$id <- 1:nrow(match_bc)
for (i in seq(1,5,1)) {
p<- ggplot(match_bc %>%
filter(rpm > i) %>%
mutate(match_count = ave(id, match, condition, FUN = length),
match_count = 100*match_count/nrow(ref_seq_bc)) %>%
dplyr::select(condition, match_count, match) %>%
unique(), aes(x = match_count, y = condition, fill = match)) +
geom_bar(stat = "identity", position = "dodge") +
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
labs(title = paste("barcodes matching, rpm cutoff:", i, sep ="")) +
theme_bw() +
ylab("")+
xlab("designed barcodes in sequencing data (%)")+
theme(legend.position = "none") +
theme(axis.text.x = element_text(angle = 45, hjust = 1, vjust = 1))
print(p)
}
```
## Plot how many reads match to designed barcodes
```{r matched_barcode_reads, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Now plot the sum of reads from matched vs unmatched barcodes
for (i in seq(1,5,1)) {
p<- ggplot(match_bc %>%
filter(rpm > i) %>%
mutate(match_count = ave(rpm, match, condition, FUN = function(x) sum(x)),
all_count = ave(rpm, condition, FUN = function(x) sum(x)),
match_count = 100*match_count/all_count) %>%
dplyr::select(condition, match_count, match) %>%
unique(), aes(x = match_count, y = condition, fill = match)) +
geom_bar(stat = "identity", position = "dodge") +
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
labs(title = paste("reads from matching barcodes, rpm cutoff:", i, sep ="")) +
theme_bw() +
ylab("")+
xlab("reads from designed barcodes (%)")+
theme(legend.position = "none") +
theme(axis.text.x = element_text(angle = 45, hjust = 1, vjust = 1))
print(p)
}
```
## How many raw complete sequences match with the design?
```{r sequenced_found, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Match barcodes with original data
ref_seq_bc <- ref_seq %>% dplyr::select(name, sequence)
ref_seq_bc$TF <- gsub("_.*", "\\1", ref_seq_bc$name)
pDNA_seq_bc <- pDNA_seq %>%
dplyr::select(sequence, rpm, condition) %>%
filter(rpm > 2) %>%
unique() %>%
mutate(id = "pDNA")
match_bc <- merge(ref_seq_bc, pDNA_seq_bc, by = "sequence", all = T)
match_bc$cond_bc <- paste(match_bc$sequence, match_bc$condition, sep = "_")
match_bc <- match_bc[order(match_bc$cond_bc, -abs(match_bc$rpm) ), ]
match_bc <- match_bc[ !duplicated(match_bc$cond_bc), ]
## Identify the unmapped fraction
match_bc$match <- "match"
match_bc$match[is.na(match_bc$TF)] <- "no_match"
# Make the plot
for (i in seq(1,5,1)) {
p <- ggplot(match_bc %>%
filter(rpm > i) %>%
mutate(match_count = ave(rpm, match, condition, FUN = function(x) length(x)),
match_count = 100*match_count/nrow(ref_seq_bc)) %>%
dplyr::select(condition, match_count, match) %>%
unique(), aes(x = match_count, y = condition, fill = match)) +
geom_bar(stat = "identity", position = "dodge") +
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
theme_bw() +
ylab("")+
ggtitle(paste("complete reporter matching, rpm cutoff:", i)) +
xlab("designed vs. non-matching reporters (%)")+
theme(legend.position = "none")+
theme(axis.text.x = element_text(angle = 45, hjust = 1, vjust = 1))
print(p)
}
```
## Now we want to know the read distribution of matched/unmatched sequences
```{r reads_matched_sequences, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Make the plot
for (i in seq(1,5,1)) {
p <- ggplot(match_bc %>%
filter(rpm > i) %>%
mutate(match_count = ave(rpm, match, condition, FUN = function(x) sum(x)),
all_count = ave(rpm, condition, FUN = function(x) sum(x)),
match_count = 100*match_count/all_count) %>%
dplyr::select(condition, match_count, match) %>%
unique(), aes(x = match_count, y = condition, fill = match)) +
geom_bar(stat = "identity", position = "dodge") +
scale_fill_manual(values = c("#1B998B", "#2D3047")) +
theme_bw() +
ylab("")+
ggtitle(paste("complete reporter matching, rpm cutoff:", i)) +
xlab("designed vs. non-matching reporters (%)")+
theme(legend.position = "none")+
theme(axis.text.x = element_text(angle = 45, hjust = 1, vjust = 1))
print(p)
}
```
I selected all sequences from the pDNA data that have a barcode that is in the design. I then matched the complete sequences with the designed complete sequences, and found that the matched sequences are well represented in the library. Sequences that are not found in the design do not have many reads, and are therefore very unlikely to influence the results of the reporter assays.
# Barcodes attached to wrong insert?
```{r barcode_reporter_matching, fig.width=4, fig.height=4, fig.align= "center", echo=FALSE, warning= FALSE, message=FALSE}
# Select only matched barcodes
pDNA_seq_insert <- pDNA_seq[pDNA_seq$barcode %in% ref_seq$barcode,]
# Add bc-id to the barcodes found in pDNA
ref_seq_insert <- ref_seq %>% dplyr::select(barcode, name) %>% setnames("name", "bc-match")
pDNA_seq_insert <- merge(pDNA_seq_insert, ref_seq_insert, by = "barcode")
# Add insert-id to the inserts found in pDNA
ref_seq_insert <- ref_seq %>% dplyr::select(insert, name) %>% setnames("name", "insert-match")
ref_seq_insert$`insert-match` <- gsub("(.*)_bc_[0-9]$", "\\1", ref_seq_insert$`insert-match`)
ref_seq_insert <- ref_seq_insert %>% unique()
pDNA_seq_insert <- merge(pDNA_seq_insert, ref_seq_insert, by = "insert")
# Count occurences where bc matches insert
pDNA_seq_insert$`bc-match` <- gsub("(.*)_bc_[0-9]$", "\\1", pDNA_seq_insert$`bc-match`)
pDNA_seq_insert$match <- pDNA_seq_insert$`bc-match` == pDNA_seq_insert$`insert-match`
pDNA_seq_insert$TF <- gsub("_.*", "\\1", pDNA_seq_insert$`insert-match`)
# Make the plot
for (i in 1:5) {
p <- ggplot(pDNA_seq_insert %>%
filter(rpm > i) %>%
mutate(match_count = ave(rpm, match, condition, FUN = function(x) sum(x)),
all_count = ave(rpm, condition, FUN = function(x) sum(x)),
match_count = 100*(match_count/all_count)) %>%
dplyr::select(condition, match_count, match) %>%
unique(), aes(x = match_count, y = condition, fill = match)) +
geom_bar(stat = "identity", position = "dodge") +
scale_fill_manual(values = c("#2D3047", "#1B998B")) +
labs(title = paste("reads from correct bc-reporter matching, rpm cutoff:", i, sep = "")) +
theme_bw() +
theme(legend.position = "none")
print(p)
}
```
# Session Info
```{r}
paste("Run time: ",format(Sys.time()-StartTime))
getwd()
date()
sessionInfo()
```
<file_sep>#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~##
'''
Title: BC extraction
Date last modified: 2020/09/16
Python Version: 3.6.2
'''
##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~##
include:
'/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/stimulation_2_gcf6301/raw_data_analysis/config.py'
# Rules -----------------------------------------------------------------------
rule all:
input:
expand('/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/gcf6301/results_b12_d0/{ecn}_barcode_counts.tsv', ecn = ECN)
# Extract barcodes from cDNA/pDNA SE fastq files
rule extract_barcodes_from_fq:
input:
fq = ECN_DIR + S1
output:
tsv = '/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/gcf6301/results_b12_d0/{ecn}_barcodes.tsv'
log:
'/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/gcf6301/logs_b12_d0/{ecn}_extract_barcodes_from_fq.log'
conda:
'/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/stimulation_2_gcf6301/raw_data_analysis/environment.yaml'
script:
'/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/stimulation_2_gcf6301/raw_data_analysis/mt20201117_bc-counts.py'
# Cluster cDNA/pDNA barcodes with Starcode
rule cluster_ecn_barcodes:
input:
starcode_in = '/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/gcf6301/results_b12_d0/{ecn}_barcodes.tsv'
output:
ofn = '/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/gcf6301/results_b12_d0/{ecn}_barcode_counts.tsv',
log:
'/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/gcf6301/logs_b12_d0/{ecn}_cluster_ecn_barcodes.log'
conda:
'/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/stimulation_2_gcf6301/raw_data_analysis/environment.yaml'
threads: 12
shell:
'starcode --threads {threads} --print-clusters -i {input.starcode_in} --dist 0 | \
sort -k1,1 > {output.ofn}'
<file_sep># This script will be used to extract the barcode counts from the fastq.gz files reveived from the sequencing facility
# For each fastq.gz file count barcodes
# 12-bp sequence in front of CATCGTCGCATCCAAGAG should be counted
##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# FC@NKI
# EP-SuRE pipeline
# Extract barcodes from fastq files (cDNA and pDNA SE data)
##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# load modules
import pysam
import regex
# define seq immediately downstream of barcode (truncated to 30nt)
# allow for 3 mismatches
downstream_seq = '(' + 'CATCGTCGCATCCAAGAGGCTAGCTAACTA' + '){e<1}'
# open output file
tsv_out = open(snakemake.output["tsv"], "w")
# open input fastq stream
with pysam.FastxFile(snakemake.input["fq"]) as fq_in:
# iterate over reads
for read in fq_in:
# extract read sequence
seq = read.sequence
# identify downstream seq position
match = regex.search(downstream_seq, seq, regex.BESTMATCH)
# if no match, skip to next
if match is None:
continue
# extract barcode
end_bc = match.span()[0]
barcode = seq[0:end_bc]
# if barcode intact and no N in barcode, write to file
if((len(barcode) >= 12) and ('N' not in barcode)):
# write to output file
tsv_out.write(barcode + '\n')
tsv_out.close()
<file_sep>---
title: "Oligo Design Deep Scan"
author: "<NAME>"
date: "`r format(Sys.time(), '%Y-%m-%d')`"
output:
html_document:
theme: journal #cerulean
highlight: monochrome
toc: true
toc_float: true
code_folding: show
editor_options:
chunk_output_type: console
---
# knitr document van Steensel lab
# Oligo Design Deep Scan
# Introduction
In this script, oligos will be designed for a Trp53 and Gr activity reporter. TF binding sites with variable binding affinities will be placed upstream of a minimal promoter and a barcode in the transcriptional unit.
## Description of Data
How to make a good rendering table:
```{r table1, echo=FALSE, message=FALSE, warnings=FALSE, results='asis'}
tabl <- "
| column1 | column2 | column3 |
|----|----|----|
|1 | 2 | 3 |
|a | b | c |
"
cat(tabl) # output the table in a format good for HTML/PDF/docx conversion
```
# Data processing
## Path, Libraries, Parameters and Useful Functions
```{r setup}
knitr::opts_chunk$set(echo = TRUE)
StartTime <-Sys.time()
# 8-digit Date tag:
Date <- substr(gsub("-","",Sys.time()),1,8)
# libraries:
library(ggplot2)
library(seqinr)
library(seqLogo)
library(universalmotif)
library(Biostrings)
library(SimRAD)
library(gtools)
library(DNABarcodes)
library(phylotools)
library(ape)
library(magrittr)
library(dplyr)
library(readr)
library(stringr)
library(tidyr)
library(heatmaply)
library(pheatmap)
library(tibble)
library(ggseqlogo)
library(RColorBrewer)
library(data.table)
```
### Custom functions
Functions used thoughout this script.
```{r}
SetFileName <- function(filename, initials) {
# Set filename with extension and initials to make filename with date integrated.
filename <- substitute(filename)
initials <- substitute(initials)
filename <- paste0(initials, Date, filename)
filename
}
# Function to substring the right part of the motif
substrRight <- function(x, n){
substr(x, nchar(x)-n+1, nchar(x))
}
# Function to load PWM matrix
get_pwm_feature_matrix <- function(motif_meta_fn, fimo_fn, db = 2) {
# validate args
valid_dbs <- 1:2
if(!db %in% valid_dbs)
stop('Invalid db (database version). Please use db=1 (maintained for backward compatibility only) or db=2')
# db=1 is maintained for backward compatibility only
if(db == 1) {
# read in motif metadata
motif_meta <- read.csv(motif_meta_fn)
# check whether motif metadata contain essential annotations
if(!all(c('PWM.ID', 'Cognate.TF') %in% colnames(motif_meta))) {
message('The motif metadata file does not contain the essential columns PWM.ID and Cognate.TF')
}
motif_minimal <- motif_meta[, c('PWM.ID', 'Cognate.TF')]
# load fimo output --> extract motif id, sequence id and p-value
df <- read.table(fimo_fn)
df <- df[, c(1, 2, 7)]
colnames(df) <- c('PWM.ID', 'seqid', 'pval')
# add TF id
df <- merge(df, motif_minimal, by = 'PWM.ID')
# group motif hits by sequence id
l <- split(df, df[['seqid']])
# multiple PWM and multiple hits possible. Reduce hits to one per TF, keeping best p-val only
l <- lapply(l, function(x) {
x_by_tf <- split(x, x[['Cognate.TF']], drop = TRUE)
x_by_tf <- lapply(x_by_tf, function(y) y[which.min(y$pval), ])
do.call('rbind', x_by_tf)
})
# initialize feature matrix
n_tf <- motif_minimal[['Cognate.TF']] %>%
unique %>%
length
n_seq <- length(l)
pwm <- matrix(1, nrow = n_seq, ncol = n_tf)
colnames(pwm) <- (motif_minimal[['Cognate.TF']] %>% unique)
# replace :: from names of composite motifs
colnames(pwm) <- str_replace_all(colnames(pwm), '::', '_')
# fill in feature matrix
for(i in 1 : n_seq) {
pwm[i, l[[i]][['Cognate.TF']]] <- l[[i]]$pval
}
# -log10 transform
pwm <- -1 * log10(pwm)
# coerce to tib and return
tib_fimo <- as_data_frame(pwm) %>%
mutate(id = names(l))
dplyr::select(id, everything())
}
# db = 2 (default)
else {
# load metadata
tib_meta <- read_csv(motif_meta_fn) %>%
# extract tf symbol from motif id (Cognate_TF unsafe, it can be empty) and replace :: occurrences
mutate(tf_symbol = str_remove(ID, '_[0-9]*'),
tf_symbol = str_replace(tf_symbol, '::', '_')) %>%
dplyr::select(motif_id = `PWM ID`, tf_symbol)
# load fimo results
tib_fimo <- read_tsv(fimo_fn) %>%
# extract motif id, sequence id and p-value
dplyr::select(motif_id, sequence_name, pval = `p-value`)
# add tf symbol to fimo results
tib_fimo <- tib_fimo %>%
left_join(tib_meta, by = 'motif_id') %>%
# remove hits with missing motif id (composite pwms)
filter(!is.na(tf_symbol))
# select best hit for each motif and sequence
tib_fimo <- tib_fimo %>%
group_by(sequence_name, tf_symbol) %>%
dplyr::slice(which.min(pval)) %>%
ungroup()
# spread into feature matrix
tib_fimo <- tib_fimo %>%
mutate(pval = -1 * log10(pval)) %>%
dplyr::select(-motif_id) %>%
spread(key = tf_symbol, value = pval, fill = 0, drop = TRUE) %>%
# perform cosmetics on the id
mutate(id = sequence_name) %>%
dplyr::select(-c(sequence_name)) %>%
dplyr::select(id, everything())
}
return(tib_fimo)
}
```
## Data import
```{r}
# Import chosen TFs
motifs <- read.csv2("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/parameter_files/TF_motifs.csv", header = T)
```
## Create DNA barcodes
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
## Create barcodes with length = 12
# barc <- create.dnabarcodes(n = 12, dist = 3, filter.triplets = T, metric = "seqlev",
# filter.gc = T, filter.self_complementary = T, cores = 24)
barc <- read.csv("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/barc.csv") %>%
dplyr::select(x) %>% setnames("x","barcode")
# Filter out ATGs
barc <- barc[-grep("ATG",barc$barcode),]
# Filter out EcoRI & NheI sites
ecori_nhei <- c("GAATTC","GCTAGC")
ecori_nhei_bc <- c("GAATT","GCTAG") # as primer2 seq starts with a `C`
barc <- barc[-grep(paste(ecori_nhei_bc, collapse = "|"),barc)]
print(paste("total barcodes:",length(barc)))
```
# Oligo design
## Creating a surrogate DF - sequences will be added in later stages
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
# Create all desired parameters here
## TF-motifs
tf.motifs <- data.frame(tf=c("Trp53","Gr"))
## 5 barcodes
oligo.barcodes <- data.frame(oligo.barcode=1:5)
## 3 backgrounds
backgrounds <- data.frame(background=1:3)
## 2 minimal promoters
promoters <- data.frame(promoter=c("minP", "mCMV"))
## standard spacing of 7bp/10bp - this should in theory make up a total distance of 31bp from start to start
spacings <- data.frame(tf=c("Trp53", "Gr"),
spacing = c(7,10))
## standard promoter distance of 10bp - this was most active in first screen
distances <- data.frame(distance = 10)
## starting bases in front of first motif
position <- data.frame(position = 0)
## Combinations of high-med affinities
affinity_pos1 <- data.frame(affinity_pos1=c(0,1))
affinity_pos2 <- data.frame(affinity_pos2=c(0,1))
affinity_pos3 <- data.frame(affinity_pos3=c(0,1))
affinity_pos4 <- data.frame(affinity_pos4=c(0,1))
# Create whole df by merging all conditions
tf.df <- Reduce(function(x, y) merge(x, y, all=TRUE),
list(tf.motifs, oligo.barcodes, spacings, position,
promoters,distances,backgrounds,affinity_pos1,
affinity_pos2, affinity_pos3,affinity_pos4))
## Combinations of med-low affinities
affinity_pos1 <- data.frame(affinity_pos1=c(1,2))
affinity_pos2 <- data.frame(affinity_pos2=c(1,2))
affinity_pos3 <- data.frame(affinity_pos3=c(1,2))
affinity_pos4 <- data.frame(affinity_pos4=c(1,2))
tf.df2 <- Reduce(function(x, y) merge(x, y, all=TRUE),
list(tf.motifs, oligo.barcodes, backgrounds,
spacings, distances, promoters, position,
affinity_pos1, affinity_pos2,
affinity_pos3, affinity_pos4))
## Combinations of low-verylow affinities
affinity_pos1 <- data.frame(affinity_pos1=c(2,3))
affinity_pos2 <- data.frame(affinity_pos2=c(2,3))
affinity_pos3 <- data.frame(affinity_pos3=c(2,3))
affinity_pos4 <- data.frame(affinity_pos4=c(2,3))
tf.df3 <- Reduce(function(x, y) merge(x, y, all=TRUE),
list(tf.motifs, oligo.barcodes, backgrounds,
spacings, distances, promoters, position,
affinity_pos1, affinity_pos2,
affinity_pos3, affinity_pos4))
## Combinations of verylow-null affinities
affinity_pos1 <- data.frame(affinity_pos1=c(3,4))
affinity_pos2 <- data.frame(affinity_pos2=c(3,4))
affinity_pos3 <- data.frame(affinity_pos3=c(3,4))
affinity_pos4 <- data.frame(affinity_pos4=c(3,4))
tf.df4 <- Reduce(function(x, y) merge(x, y, all=TRUE),
list(tf.motifs, oligo.barcodes, backgrounds,
spacings, distances, promoters, position,
affinity_pos1, affinity_pos2,
affinity_pos3, affinity_pos4))
tf.df <- unique(rbind(tf.df,tf.df2,tf.df3,tf.df4))
## Add sequences with only strong sites in front/middle/back to investigate promoter positioning effects
tf.df.front <- tf.df[tf.df$affinity_pos1 == 0 & tf.df$affinity_pos2 == 0 & tf.df$affinity_pos3 == 0 & tf.df$affinity_pos4 == 0,]
tf.df.front.4 <- tf.df.front %>%
mutate(affinity_pos1 = 4,
affinity_pos2 = 4)
tf.df.front.3 <- tf.df.front %>%
mutate(affinity_pos1 = 3,
affinity_pos2 = 3)
tf.df.front.2 <- tf.df.front %>%
mutate(affinity_pos1 = 2,
affinity_pos2 = 2)
tf.df.front <- rbind(tf.df.front.4, tf.df.front.3, tf.df.front.2)
tf.df.middle <- tf.df[tf.df$affinity_pos1 == 0 & tf.df$affinity_pos2 == 0 & tf.df$affinity_pos3 == 0 & tf.df$affinity_pos4 == 0,]
tf.df.middle.4 <- tf.df.middle %>%
mutate(affinity_pos2 = 4,
affinity_pos3 = 4)
tf.df.middle.3 <- tf.df.middle %>%
mutate(affinity_pos2 = 3,
affinity_pos3 = 3)
tf.df.middle.2 <- tf.df.middle %>%
mutate(affinity_pos2 = 2,
affinity_pos3 = 2)
tf.df.middle <- rbind(tf.df.middle.4, tf.df.middle.3, tf.df.middle.2)
tf.df.back <- tf.df[tf.df$affinity_pos1 == 0 & tf.df$affinity_pos2 == 0 & tf.df$affinity_pos3 == 0 & tf.df$affinity_pos4 == 0,]
tf.df.back.4 <- tf.df.back %>%
mutate(affinity_pos3 = 4,
affinity_pos4 = 4)
tf.df.back.3 <- tf.df.back %>%
mutate(affinity_pos3 = 3,
affinity_pos4 = 3)
tf.df.back.2 <- tf.df.back %>%
mutate(affinity_pos3 = 2,
affinity_pos4 = 2)
tf.df.back <- rbind(tf.df.back.4, tf.df.back.3, tf.df.back.2)
tf.df.pos <- rbind(tf.df.front, tf.df.middle, tf.df.back)
tf.df<- rbind(tf.df, tf.df.pos)
## Add more spacings to test spacing preferences
## only for the 4 standard affinities
## spacing range from 0-10 bp
space <- tf.df[tf.df$affinity_pos1 == 0 & tf.df$affinity_pos2 == 0 & tf.df$affinity_pos3 == 0 & tf.df$affinity_pos4 == 0 | tf.df$affinity_pos1 == 1 & tf.df$affinity_pos2 == 1 & tf.df$affinity_pos3 == 1 & tf.df$affinity_pos4 == 1 | tf.df$affinity_pos1 == 2 & tf.df$affinity_pos2 == 2 & tf.df$affinity_pos3 == 2 & tf.df$affinity_pos4 == 2 | tf.df$affinity_pos1 == 3 & tf.df$affinity_pos2 == 3 & tf.df$affinity_pos3 == 3 & tf.df$affinity_pos4 == 3 | tf.df$affinity_pos1 == 4 & tf.df$affinity_pos2 == 4 & tf.df$affinity_pos3 == 4 & tf.df$affinity_pos4 == 4,] %>% dplyr::select(-spacing) %>% unique()
space_gr <- space[space$tf == "Gr",]
space_trp53 <- space[space$tf == "Trp53",]
spacing_trp53 <- data.frame(spacing = c(0,1,2,3,4,5,6,8,9,10))
spacing_gr <- data.frame(spacing = c(0,1,2,3,4,5,6,7,8,9))
space_gr <- merge(space_gr, spacing_gr, all = T)
space_trp53 <- merge(space_trp53, spacing_trp53, all = T)
tf.df <- unique(rbind(tf.df,space_gr, space_trp53))
## position - move TF motifs in 1bp steps - nucleosome position preference?
## only for the 4 standard affinities
## add sequence in front of first motif
dist <- tf.df[tf.df$affinity_pos1 == 0 & tf.df$affinity_pos2 == 0 & tf.df$affinity_pos3 == 0 & tf.df$affinity_pos4 == 0 | tf.df$affinity_pos1 == 1 & tf.df$affinity_pos2 == 1 & tf.df$affinity_pos3 == 1 & tf.df$affinity_pos4 == 1 | tf.df$affinity_pos1 == 2 & tf.df$affinity_pos2 == 2 & tf.df$affinity_pos3 == 2 & tf.df$affinity_pos4 == 2 | tf.df$affinity_pos1 == 3 & tf.df$affinity_pos2 == 3 & tf.df$affinity_pos3 == 3 & tf.df$affinity_pos4 == 3 | tf.df$affinity_pos1 == 4 & tf.df$affinity_pos2 == 4 & tf.df$affinity_pos3 == 4 & tf.df$affinity_pos4 == 4,] %>% dplyr::select(-position) %>% unique()
dist_trp53 <- dist[dist$spacing == 7 & dist$tf == "Trp53",]
dist_gr <- dist[dist$spacing == 10 & dist$tf == "Gr",]
position <- data.frame(position = c(1:10))
dist_gr <- merge(dist_gr, position, all = T)
dist_trp53 <- merge(dist_trp53, position, all = T)
tf.df <- unique(rbind(tf.df,dist_gr, dist_trp53))
# Adding the DNA sequence from 5' to 3'
## Constant 5' primer sequence
tf.df$primer1_seq <- "CGGAGCGAACCGAGTTAG"
### Motif 1-4
for (i in unique(tf.df$affinity_pos1)) {
for (j in unique(tf.df$tf)) {
tf.df$motif1[tf.df$affinity_pos1 == i & tf.df$tf == j] <-
motifs$Motif[motifs$TF == j & motifs$Affinity == i]
tf.df$motif2[tf.df$affinity_pos2 == i & tf.df$tf == j] <-
motifs$Motif[motifs$TF == j & motifs$Affinity == i]
tf.df$motif3[tf.df$affinity_pos3 == i & tf.df$tf == j] <-
motifs$Motif[motifs$TF == j & motifs$Affinity == i]
tf.df$motif4[tf.df$affinity_pos4 == i & tf.df$tf == j] <-
motifs$Motif[motifs$TF == j & motifs$Affinity == i]
}
}
## Spacer sequence between TF motifs
tf.df$space1 <- ""
tf.df$space2 <- ""
tf.df$space3 <- ""
tf.df$distance.seq <- ""
## Minimal promoter
tf.df$promoter_sequence[tf.df$promoter == "minP"]<- "TAGAGGGTATATAATGGAAGCTCGACTTCCAG"
tf.df$promoter_sequence[tf.df$promoter == "mCMV"] <- "GGCGTTTACTATGGGAGGTCTATATAAGCAGAGCTCGTTTAGTGAACCGTCAGATC"
## S1 Illumina adapter
tf.df$s1_primer <- "CACGACGCTCTTCCGATCT"
## Barcode
tf.df$barcode <- ""
## 3' Primer sequence
tf.df$primer2_seq <- "CATCGTCGCATCCAAGAG"
```
## Add positive & negative controls
```{r}
# Random inactive promoters
promoter_inactive <- "GGTTAGCGATCCAATTCAGCTAGATTTTAAGC"
# Just select the first random sequence promoter for a single condition
# Select 10bp spacer, 0bp position, minP, background 1 - keep all replicates & all TFs
# Bind to tf.df
rd.promoter <- tf.df
rd.promoter <- rd.promoter[rd.promoter$spacing == 10 & rd.promoter$tf == "Gr" |
rd.promoter$spacing == 7 & rd.promoter$tf == "Trp53",]
rd.promoter <- rd.promoter[rd.promoter$background == 1 & rd.promoter$position == 0 &
rd.promoter$promoter == "minP",]
rd.promoter$promoter <- "random"
rd.promoter$promoter_sequence <- promoter_inactive
tf.df <- rbind(tf.df, rd.promoter)
# Add shuffled negative controls
## Only for 10bp spacing & 0bp position
control.df <- subset(tf.df, select = c(-tf,-motif1,-motif2,-motif3,-motif4))
control.df <- unique(control.df[control.df$affinity_pos1 == "0" & control.df$affinity_pos2 == "0" &
control.df$affinity_pos3 == "0" & control.df$affinity_pos4 == "0",])
control.df <- control.df[control.df$background == 1,]
control.df <- control.df[grep(paste(c(1,2,3,4), collapse = "|"), control.df$oligo.barcode),]
control.motifs <- read.csv2("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/parameter_files/TF_motifs_ctrl.csv", header = T)
control.motifs <- merge(control.motifs, control.df)
tf.df <- rbind(tf.df, control.motifs)
```
## There is a lot of unwanted TF binding using the spacers from previous approach
## Instead, I can try to design new spacers than have limited TF binding at all lengths (0-10bp)
### Step 1: Generate 10bp spacers(Gr) & 7bp spacers (Trp53) that don't bind any TFs
```{r}
# Subselect one spacing and neighboring TF motifs, make rows for random spacings that will be created
motif <- tf.df %>% dplyr::select(tf, motif1, motif2, affinity_pos1, affinity_pos2) %>% unique()
motif$tf <- paste(motif$tf, motif$affinity_pos1, motif$affinity_pos2, sep = "_")
motif$space <- ""
iteration <- 1:2000
motif <- merge(motif, iteration, all=T)
# Generate 2000 random spacings between TF motifs
# GC content ~50%
set.seed(948)
for (i in 1:2000) {
motif$space[motif$y ==i] <- sim.DNAseq(10, GCfreq = 0.5)
}
motif$space[grep("Trp53", motif$tf)] <- substr(motif$space[grep("Trp53", motif$tf)], 1, 7)
motif_space <- motif
motif_space$seq.name <- paste(motif_space$tf, motif_space$y, sep = "_")
# Assemble sequence to test: 4 rim bases of the TF motifs and the spacer sequence
motif_space$seq.text <- paste(substrRight(motif_space$motif1, 4),
motif_space$space,
substr(motif_space$motif2, 1, 4), sep = "")
# Write fasta file to run on FIMO script
motif_space_export <- motif_space
motif_space_export <- motif_space_export %>% dplyr::select("seq.name", "seq.text") %>% unique()
#dat2fasta(motif_space_export, outfile = "/DATA/usr/m.trauernicht/projects/tf_activity_reporter/data/trp53_model/fimo/spacing_new.fasta")
```
## Run FIMO script again
```{bash run fimo spacings, eval = FALSE}
# motfn=/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/20170320_pwms_selected.meme
# odir=/home/m.trauernicht/mydata/projects/tf_activity_reporter/data/trp53_model/output/spacing_new
# query="/home/m.trauernicht/mydata/projects/tf_activity_reporter/data/trp53_model/fimo/spacing_new.fasta"
# nice -n 19 fimo --no-qvalue --thresh 1e-4 --verbosity 1 --o $odir $motfn $query
```
## load fimo results
We built a TF motif matrix using -log10 transformed FIMO scores. We used this feature encoding throughout the rest of this analysis, unless otherwise stated.
```{r build tf motif matrices db2: tf_check, out.width= "100%", fig.align= "center", echo=FALSE}
# load motif Metadata --> PWM feature matrix
tib_pwm_10bp_space_new <- get_pwm_feature_matrix(motif_meta_fn = '/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/fc181127_curated_metadata_no_composite_filt.csv',
fimo_fn = '/home/m.trauernicht/mydata/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/spacing_new/fimo.tsv',
db = 2)
```
## visualize fimo results
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
## We make a selection of 9 10bp spacer
# convert to binary
tib_pwm_10bp_space_new_binary <- tib_pwm_10bp_space_new %>% mutate_if(is.numeric, function(x) ifelse((x==0 | is.na(x)),0,1))
tib_pwm_10bp_space_new_binary_top <- tib_pwm_10bp_space_new_binary
# compute rowsums to get cumulative binding
tib_pwm_10bp_space_new_binary_top$binding <- rowSums(tib_pwm_10bp_space_new_binary_top[,2:ncol(tib_pwm_10bp_space_new_binary_top)])
# select only cumulative binding and id
tib_pwm_10bp_space_new_binary_top <- tib_pwm_10bp_space_new_binary_top %>%
dplyr::select(id,binding)
# make third column wiht only the space_id - then sum up the binding scores per space_id
tib_pwm_10bp_space_new_binary_top$space <- gsub(".*_(.*)", "\\1", tib_pwm_10bp_space_new_binary_top$id)
tib_pwm_10bp_space_new_binary_top <- tib_pwm_10bp_space_new_binary_top %>%
dplyr::select(-id)
tib_pwm_10bp_space_new_binary_top$cum_binding <- ave(tib_pwm_10bp_space_new_binary_top$binding, tib_pwm_10bp_space_new_binary_top$space,
FUN = sum)
tib_pwm_10bp_space_new_binary_top <- tib_pwm_10bp_space_new_binary_top %>%
dplyr::select(-binding)
tib_pwm_10bp_space_new_binary_top <- unique(tib_pwm_10bp_space_new_binary_top)
# Identify spacers that were exlcuded from fimo script (due to 0 hits)
space_id <- unique(tib_pwm_10bp_space_new_binary_top$space)
iteration <- 1:2000
space_nohit <- iteration[! iteration %in% space_id]
# Remove EcoRI/NheI sites
remove_10bp <- motif_space[grep(paste(ecori_nhei, collapse = "|"), motif_space$seq.text),]
remove_10bp <- unique(remove_10bp$y)
space_nohit <- space_nohit[!space_nohit %in% remove_10bp]
print(paste("we have ", length(space_nohit), " hits with no associated TF binding", sep = ""))
```
## Step 2: Truncate the list of possible spacings and take those with least TF binding
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
spacers <- motif[motif$y %in% space_nohit,] %>%
dplyr::select(y, space) %>% unique() %>%
filter(nchar(space) == 10) %>%
setnames(c("y", "space"), c("seq.name", "seq.text"))
spacers$seq.name <- paste(spacers$seq.name, "_10", sep = "")
spacers_9 <- spacers %>%
mutate(seq.name = gsub("_10","_9", seq.name),
seq.text = substr(seq.text,1,9))
spacers_8 <- spacers %>%
mutate(seq.name = gsub("_10","_8", seq.name),
seq.text = substr(seq.text,1,8))
spacers_7 <- spacers %>%
mutate(seq.name = gsub("_10","_7", seq.name),
seq.text = substr(seq.text,1,7))
spacers_6 <- spacers %>%
mutate(seq.name = gsub("_10","_6", seq.name),
seq.text = substr(seq.text,1,6))
spacers_5 <- spacers %>%
mutate(seq.name = gsub("_10","_5", seq.name),
seq.text = substr(seq.text,1,5))
spacers_4 <- spacers %>%
mutate(seq.name = gsub("_10","_4", seq.name),
seq.text = substr(seq.text,1,4))
spacers_3 <- spacers %>%
mutate(seq.name = gsub("_10","_3", seq.name),
seq.text = substr(seq.text,1,3))
spacers_2 <- spacers %>%
mutate(seq.name = gsub("_10","_2", seq.name),
seq.text = substr(seq.text,1,2))
spacers_1 <- spacers %>%
mutate(seq.name = gsub("_10","_1", seq.name),
seq.text = substr(seq.text,1,1))
spacers <- rbind(spacers, spacers_9, spacers_8, spacers_7, spacers_6, spacers_5, spacers_4,
spacers_3, spacers_2, spacers_1)
motif <- tf.df %>% dplyr::select(tf, motif1, motif2, affinity_pos1, affinity_pos2) %>% unique()
motif$tf <- paste(motif$tf, motif$affinity_pos1, motif$affinity_pos2, sep = "_")
spacers <- merge(spacers, motif)
space <- spacers
# Assemble sequence to test: 4 rim bases of the TF motifs and the spacer sequence
spacers$seq.text <- paste(substrRight(spacers$motif1, 4),
spacers$seq.text,
substr(spacers$motif2, 1, 4), sep = "")
spacers$seq.name <- paste(spacers$seq.name, spacers$tf, sep = "_")
# Write fasta file to run on FIMO script
spacers_export <- spacers
spacers_export <- spacers_export %>% dplyr::select(seq.name, seq.text) %>% unique()
#dat2fasta(spacers_export, outfile = "/DATA/usr/m.trauernicht/projects/tf_activity_reporter/data/trp53_model/fimo/spacing_new_trunc.fasta")
```
## Run FIMO script again
```{bash run fimo spacings, eval = FALSE}
# motfn=/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/20170320_pwms_selected.meme
# odir=/home/m.trauernicht/mydata/projects/tf_activity_reporter/data/trp53_model/output/spacing_new_trunc_2
# query="/home/m.trauernicht/mydata/projects/tf_activity_reporter/data/trp53_model/fimo/spacing_new_trunc.fasta"
# nice -n 19 fimo --no-qvalue --thresh 1e-4 --verbosity 1 --o $odir $motfn $query
```
## load fimo results
We built a TF motif matrix using -log10 transformed FIMO scores. We used this feature encoding throughout the rest of this analysis, unless otherwise stated.
```{r build tf motif matrices db2: tf_check, out.width= "100%", fig.align= "center", echo=FALSE}
# load motif Metadata --> PWM feature matrix
tib_pwm_10bp_space_new_trunc <- get_pwm_feature_matrix(motif_meta_fn = '/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/fc181127_curated_metadata_no_composite_filt.csv',
fimo_fn = '/home/m.trauernicht/mydata/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/spacing_new_trunc_2/fimo.tsv',
db = 2)
```
## visualize fimo results
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
## We make a selection of 9 10bp spacer
# convert to binary
tib_pwm_10bp_space_new_trunc_binary <- tib_pwm_10bp_space_new_trunc %>% mutate_if(is.numeric, function(x) ifelse((x==0 | is.na(x)),0,1))
tib_pwm_10bp_space_new_trunc_binary_top <- tib_pwm_10bp_space_new_trunc_binary
# compute rowsums to get cumulative binding
tib_pwm_10bp_space_new_trunc_binary_top$binding <- rowSums(tib_pwm_10bp_space_new_trunc_binary_top[,2:ncol(tib_pwm_10bp_space_new_trunc_binary_top)])
# select only cumulative binding and id
tib_pwm_10bp_space_new_trunc_binary_top <- tib_pwm_10bp_space_new_trunc_binary_top %>%
dplyr::select(id,binding)
# make third column wiht only the space_id - then sum up the binding scores per space_id
tib_pwm_10bp_space_new_trunc_binary_top$space <- gsub("(^[0-9]{1,4})_.*", "\\1", tib_pwm_10bp_space_new_trunc_binary_top$id)
tib_pwm_10bp_space_new_trunc_binary_top <- tib_pwm_10bp_space_new_trunc_binary_top %>%
dplyr::select(-id)
tib_pwm_10bp_space_new_trunc_binary_top$cum_binding <- ave(tib_pwm_10bp_space_new_trunc_binary_top$binding, tib_pwm_10bp_space_new_trunc_binary_top$space,
FUN = sum)
tib_pwm_10bp_space_new_trunc_binary_top <- tib_pwm_10bp_space_new_trunc_binary_top %>%
dplyr::select(-binding)
tib_pwm_10bp_space_new_trunc_binary_top <- unique(tib_pwm_10bp_space_new_trunc_binary_top)
# Select spaces without TF hits
space_id <- unique(tib_pwm_10bp_space_new_trunc_binary_top$space)
iteration <- unique(gsub("(^[0-9]{1,4})_.*", "\\1", spacers$seq.name))
space_nohit <- iteration[! iteration %in% space_id]
select <- space_nohit[!space_nohit %in% tib_pwm_10bp_space_new_trunc_binary_top$space]
# Order to see which TF binds to them
tib_pwm_10bp_space_new_trunc_binary_top <- tib_pwm_10bp_space_new_trunc_binary_top[order(tib_pwm_10bp_space_new_trunc_binary_top$cum_binding),]
tib_pwm_10bp_space_new_trunc_binary_top <- tib_pwm_10bp_space_new_trunc_binary_top$space[1:8]
# convert to matrix - leave out id
tib_pwm_10bp_space_new_trunc$space <- gsub("(^[0-9]{1,4})_.*", "\\1", tib_pwm_10bp_space_new_trunc$id)
tib_pwm_10bp_space_new_trunc_select <- tib_pwm_10bp_space_new_trunc[tib_pwm_10bp_space_new_trunc$space %in%
tib_pwm_10bp_space_new_trunc_binary_top,]
tib_pwm_10bp_space_new_truncMatrix <- as.matrix(dplyr::select(tib_pwm_10bp_space_new_trunc_select,-id,-space))
# assign ids as rownames of matrix
rownames(tib_pwm_10bp_space_new_truncMatrix) <- tib_pwm_10bp_space_new_trunc_select$id
```
```{r, fig.height=10, fig.width=10, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
#remove columns with only 0s and plot heatmap
heatmaply(tib_pwm_10bp_space_new_truncMatrix[, colSums(tib_pwm_10bp_space_new_truncMatrix != 0) > 0])
```
# Select spacers
```{r}
# Select sequences from original file
space <- space %>%
dplyr::select(seq.name, seq.text) %>% unique() %>%
mutate(length = gsub(".*_(.*)", "\\1", seq.name),
id = gsub("(.*)_.*", "\\1", seq.name)) %>%
dplyr::select(-seq.name) %>% setnames("seq.text", "spacing")
select <- union(select, tib_pwm_10bp_space_new_trunc_binary_top)
spacers_selected <- unique(space[space$id %in% select,])
```
# Generate 3 new 10bp distances - the distances used previously couldn't be used because a LEF1 motif was created with one of the GR motifs
```{r}
# Subselect one spacing and neighboring TF motifs, make rows for random spacings that will be created
motif <- tf.df %>% dplyr::select(tf, motif4, affinity_pos4, promoter, promoter_sequence) %>% unique()
motif$tf <- paste(motif$tf, motif$affinity_pos4, motif$promoter, sep = "_")
motif$space <- ""
iteration <- 1:500
motif <- merge(motif, iteration, all=T)
# Generate 2000 random spacings between TF motifs
# GC content ~50%
set.seed(23423)
for (i in 1:500) {
motif$space[motif$y ==i] <- sim.DNAseq(10, GCfreq = 0.5)
}
motif_space <- motif
motif_space$seq.name <- paste(motif_space$tf, motif_space$y, sep = "_")
# Assemble sequence to test: 4 rim bases of the TF motifs and the spacer sequence
motif_space$seq.text <- paste(substrRight(motif_space$motif4, 4),
motif_space$space,
substr(motif_space$promoter_sequence, 1, 4), sep = "")
# Write fasta file to run on FIMO script
motif_space_export <- motif_space
motif_space_export <- motif_space_export %>% dplyr::select("seq.name", "seq.text") %>% unique()
#dat2fasta(motif_space_export, outfile = "/DATA/usr/m.trauernicht/projects/tf_activity_reporter/data/trp53_model/fimo/distance_new.fasta")
```
## Run FIMO script again
```{bash run fimo spacings, eval = FALSE}
# motfn=/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/20170320_pwms_selected.meme
# odir=/home/m.trauernicht/mydata/projects/tf_activity_reporter/data/trp53_model/output/distance_new
# query="/home/m.trauernicht/mydata/projects/tf_activity_reporter/data/trp53_model/fimo/distance_new.fasta"
# nice -n 19 fimo --no-qvalue --thresh 1e-4 --verbosity 1 --o $odir $motfn $query
```
## load fimo results
We built a TF motif matrix using -log10 transformed FIMO scores. We used this feature encoding throughout the rest of this analysis, unless otherwise stated.
```{r build tf motif matrices db2: tf_check, out.width= "100%", fig.align= "center", echo=FALSE}
# load motif Metadata --> PWM feature matrix
tib_pwm_10bp_dist_new <- get_pwm_feature_matrix(motif_meta_fn = '/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/fc181127_curated_metadata_no_composite_filt.csv',
fimo_fn = '/home/m.trauernicht/mydata/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/distance_new/fimo.tsv',
db = 2)
```
## visualize fimo results
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
## We make a selection of 9 10bp spacer
# convert to binary
tib_pwm_10bp_dist_new_binary <- tib_pwm_10bp_dist_new %>% mutate_if(is.numeric, function(x) ifelse((x==0 | is.na(x)),0,1))
tib_pwm_10bp_dist_new_binary_top <- tib_pwm_10bp_dist_new_binary
# compute rowsums to get cumulative binding
tib_pwm_10bp_dist_new_binary_top$binding <- rowSums(tib_pwm_10bp_dist_new_binary_top[,2:ncol(tib_pwm_10bp_dist_new_binary_top)])
# select only cumulative binding and id
tib_pwm_10bp_dist_new_binary_top <- tib_pwm_10bp_dist_new_binary_top %>%
dplyr::select(id,binding)
# make third column wiht only the space_id - then sum up the binding scores per space_id
tib_pwm_10bp_dist_new_binary_top$space <- gsub(".*_(.*)", "\\1", tib_pwm_10bp_dist_new_binary_top$id)
tib_pwm_10bp_dist_new_binary_top <- tib_pwm_10bp_dist_new_binary_top %>%
dplyr::select(-id)
tib_pwm_10bp_dist_new_binary_top$cum_binding <- ave(tib_pwm_10bp_dist_new_binary_top$binding, tib_pwm_10bp_dist_new_binary_top$space,
FUN = sum)
tib_pwm_10bp_dist_new_binary_top <- tib_pwm_10bp_dist_new_binary_top %>%
dplyr::select(-binding)
tib_pwm_10bp_dist_new_binary_top <- unique(tib_pwm_10bp_dist_new_binary_top)
# Identify spacers that were exlcuded from fimo script (due to 0 hits)
space_id <- unique(tib_pwm_10bp_dist_new_binary_top$space)
iteration <- 1:500
space_nohit <- iteration[! iteration %in% space_id]
# Remove EcoRI/NheI sites
remove_10bp <- motif_space[grep(paste(ecori_nhei, collapse = "|"), motif_space$seq.text),]
remove_10bp <- unique(remove_10bp$y)
space_nohit <- space_nohit[!space_nohit %in% remove_10bp]
print(paste("we have ", length(space_nohit), " hits with no associated TF binding", sep = ""))
```
# Select distances
```{r}
# Select sequences from original file
motif <- motif %>%
dplyr::select(space, y) %>% unique()
distance_selected <- motif[motif$y %in% space_nohit,]
distance_selected <- distance_selected$space[1:3]
```
# Add spacing at beginning to test postioning theory
## Generate 500 random 10bp sequences - truncate - test in combination with primer1 & motif1 - take top 3
```{r}
position <- tf.df %>% dplyr::select(primer1_seq, motif1, tf, affinity_pos1) %>% unique()
position$tf <- paste(position$tf, position$affinity_pos1, sep = "_")
position$space <- ""
iteration <- 1:500
position <- merge(position, iteration, all=T)
# Generate 2000 random spacings between TF motifs
# GC content ~50%
set.seed(21424)
for (i in 1:500) {
position$space[position$y ==i] <- sim.DNAseq(10, GCfreq = 0.5)
}
position$seq.name <- paste(position$tf, "_10", sep = "")
position_9 <- position %>%
mutate(seq.name = gsub("_10","_9", seq.name),
space = substr(space,1,9))
position_8 <- position %>%
mutate(seq.name = gsub("_10","_8", seq.name),
space = substr(space,1,8))
position_7 <- position %>%
mutate(seq.name = gsub("_10","_7", seq.name),
space = substr(space,1,7))
position_6 <- position %>%
mutate(seq.name = gsub("_10","_6", seq.name),
space = substr(space,1,6))
position_5 <- position %>%
mutate(seq.name = gsub("_10","_5", seq.name),
space = substr(space,1,5))
position_4 <- position %>%
mutate(seq.name = gsub("_10","_4", seq.name),
space = substr(space,1,4))
position_3 <- position %>%
mutate(seq.name = gsub("_10","_3", seq.name),
space = substr(space,1,3))
position_2 <- position %>%
mutate(seq.name = gsub("_10","_2", seq.name),
space = substr(space,1,2))
position_1 <- position %>%
mutate(seq.name = gsub("_10","_1", seq.name),
space = substr(space,1,1))
position <- rbind(position, position_9, position_8, position_7, position_6, position_5, position_4,
position_3, position_2, position_1)
position_space <- position
# Assemble sequence to test: 4 rim bases of the TF motifs and the spacer sequence
position_space$seq.name <- paste(position_space$seq.name, position_space$y, sep = "_")
position_space$seq.text <- paste(substrRight(position_space$primer1_seq, 4),
position_space$space,
substr(position_space$motif1, 1, 4), sep = "")
# Write fasta file to run on FIMO script
position_space_export <- position_space
position_space_export <- position_space_export %>% dplyr::select(seq.name, seq.text) %>% unique()
#dat2fasta(position_space_export, outfile = "/DATA/usr/m.trauernicht/projects/tf_activity_reporter/data/trp53_model/fimo/position.fasta")
```
## Run FIMO script again
```{bash run fimo spacings, eval = FALSE}
# motfn=/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/20170320_pwms_selected.meme
# odir=/home/m.trauernicht/mydata/projects/tf_activity_reporter/data/trp53_model/output/position
# query="/home/m.trauernicht/mydata/projects/tf_activity_reporter/data/trp53_model/fimo/position.fasta"
# nice -n 19 fimo --no-qvalue --thresh 1e-4 --verbosity 1 --o $odir $motfn $query
```
## load fimo results
We built a TF motif matrix using -log10 transformed FIMO scores. We used this feature encoding throughout the rest of this analysis, unless otherwise stated.
```{r build tf motif matrices db2: tf_check, out.width= "100%", fig.align= "center", echo=FALSE}
# load motif Metadata --> PWM feature matrix
tib_pwm_position <- get_pwm_feature_matrix(motif_meta_fn = '/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/fc181127_curated_metadata_no_composite_filt.csv',
fimo_fn = '/home/m.trauernicht/mydata/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/position/fimo.tsv',
db = 2)
```
## visualize fimo results
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
## We make a selection of 9 10bp spacer
# convert to binary
tib_pwm_position_binary <- tib_pwm_position %>% mutate_if(is.numeric, function(x) ifelse((x==0 | is.na(x)),0,1))
tib_pwm_position_binary_top <- tib_pwm_position_binary
# compute rowsums to get cumulative binding
tib_pwm_position_binary_top$binding <- rowSums(tib_pwm_position_binary_top[,2:ncol(tib_pwm_position_binary_top)])
# select only cumulative binding and id
tib_pwm_position_binary_top <- tib_pwm_position_binary_top %>%
dplyr::select(id,binding)
# make third column wiht only the space_id - then sum up the binding scores per space_id
tib_pwm_position_binary_top$space <- gsub(".*_(.*)", "\\1", tib_pwm_position_binary_top$id)
tib_pwm_position_binary_top <- tib_pwm_position_binary_top %>%
dplyr::select(-id)
tib_pwm_position_binary_top$cum_binding <- ave(tib_pwm_position_binary_top$binding, tib_pwm_position_binary_top$space,
FUN = sum)
tib_pwm_position_binary_top <- tib_pwm_position_binary_top %>%
dplyr::select(-binding)
tib_pwm_position_binary_top <- unique(tib_pwm_position_binary_top)
# Identify spacers that were exlcuded from fimo script (due to 0 hits)
space_id <- unique(tib_pwm_position_binary_top$space)
iteration <- 1:500
space_nohit <- iteration[! iteration %in% space_id]
# Remove EcoRI/NheI sites
remove_10bp <- position_space[grep(paste(ecori_nhei, collapse = "|"), position_space$seq.text),]
remove_10bp <- unique(remove_10bp$y)
space_nohit <- space_nohit[!space_nohit %in% remove_10bp]
print(paste("we have ", length(space_nohit), " hits with no associated TF binding", sep = ""))
select <- space_nohit[!space_nohit %in% tib_pwm_position_binary_top$space]
# Order to see which TF binds to them
tib_pwm_position_binary_top <- tib_pwm_position_binary_top[order(tib_pwm_position_binary_top$cum_binding),]
tib_pwm_position_binary_top <- tib_pwm_position_binary_top$space[1:2]
# convert to matrix - leave out id
tib_pwm_position$space <- gsub(".*_(.*)", "\\1", tib_pwm_position$id)
tib_pwm_position_select <- tib_pwm_position[tib_pwm_position$space %in%
tib_pwm_position_binary_top,]
tib_pwm_positionMatrix <- as.matrix(dplyr::select(tib_pwm_position_select,-id,-space))
# assign ids as rownames of matrix
rownames(tib_pwm_positionMatrix) <- tib_pwm_position_select$id
```
```{r, fig.height=10, fig.width=10, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
#remove columns with only 0s and plot heatmap
heatmaply(tib_pwm_positionMatrix[, colSums(tib_pwm_positionMatrix != 0) > 0])
```
# Select position spacers
```{r}
# Select sequences from original file
position <- position %>%
dplyr::select(seq.name, space, y) %>% unique() %>%
mutate(length = gsub(".*_(.*)", "\\1", seq.name)) %>%
dplyr::select(-seq.name)
select <- union(select, tib_pwm_position_binary_top)
position_selected <- unique(position[position$y %in% select,])
```
## Add spacings to tf.df
```{r}
# Add new spacings
## Prepare spacings
spacers_selected$id <- 1:9
spacers_selected_1 <- spacers_selected[spacers_selected$id == c(1,2,3),]
spacers_selected_2 <- spacers_selected[spacers_selected$id == c(4,5,6),]
spacers_selected_2$id <- 1:3
spacers_selected_3 <- spacers_selected[spacers_selected$id == c(7,8,9),]
spacers_selected_3$id <- 1:3
zero <- data.frame(spacing = c("","",""),
length = c(0,0,0),
id = 1:3)
spacers_selected_1 <- rbind(spacers_selected_1, zero)
spacers_selected_2 <- rbind(spacers_selected_2, zero)
spacers_selected_3 <- rbind(spacers_selected_3, zero)
## Add spacings
for (i in 1:3) {
for (j in 0:10) {
tf.df$space1[tf.df$background == i & tf.df$spacing == j] <-
spacers_selected_1$spacing[spacers_selected_1$length == j & spacers_selected_1$id == i]
tf.df$space2[tf.df$background == i & tf.df$spacing == j] <-
spacers_selected_2$spacing[spacers_selected_2$length == j & spacers_selected_2$id == i]
tf.df$space3[tf.df$background == i & tf.df$spacing == j] <-
spacers_selected_3$spacing[spacers_selected_3$length == j & spacers_selected_3$id == i]
}
}
## Add distances
for (i in 1:3) {
tf.df$distance.seq[tf.df$background == i] <- distance_selected[i]
}
## Add 3' spacers
## Prepare spacings
position_selected$y <- 1:3
zero <- data.frame(space = c("","",""),
length = c(0,0,0),
y = 1:3)
position_selected <- rbind(position_selected, zero)
## Add spacings
for (i in 1:3) {
for (j in 0:10) {
tf.df$position_seq[tf.df$background == i & tf.df$position == j] <-
position_selected$space[position_selected$length == j & position_selected$y == i]
}
}
```
## Include published Trp53 & Gr reporters
```{r}
## O'Connell reporters - copied REs
trp53_re <- "TACAGAACATGTCTAAGCATGCTGTGCCTTGCCTGGACTTGCCTGGCCTTGCCTTGGG"
gr_re <- "GGGAACATTATGTCCTGTGGGAACAGTATGTCCTGAGGGAACATTATGTCCTGTGGGAACATTATGTCCTGT"
## Copy sequences in tf.df
tf.df.connell <- unique(tf.df[tf.df$position == 0 &
tf.df$affinity_pos1 == 0 &
tf.df$affinity_pos2 == 0 & tf.df$affinity_pos3 == 0 &
tf.df$affinity_pos4 == 0,]) %>%
filter(tf %in% c("Gr", "Trp53"))
tf.df.connell <- tf.df.connell[tf.df.connell$spacing == 10 & tf.df.connell$tf == "Gr" |
tf.df.connell$spacing == 7 & tf.df.connell$tf == "Trp53",]
tf.df.connell <- tf.df.connell %>%
mutate(motif1 = "",
space1 = "",
space2 = "",
space3 = "",
motif3 = "",
motif4 = "")
## REs as motif2
tf.df.connell$motif2[tf.df.connell$tf == "Trp53"] <- trp53_re
tf.df.connell$motif2[tf.df.connell$tf == "Gr"] <- gr_re
## REs are shorter than mine - add 3' additional sequence as space1 to prevent PCR bias
### For Gr 60 bp, for Trp53 70 bp
### Generate random sequences - select those with least binding
tf.df.connell$position[tf.df.connell$tf == "Gr"] <- 60
tf.df.connell$position[tf.df.connell$tf == "Trp53"] <- 70
position <- tf.df.connell %>% dplyr::select(primer1_seq, motif2, tf) %>% unique()
position$space <- ""
iteration <- 1:500
position <- merge(position, iteration, all=T)
# Generate 2000 random spacings between TF motifs
# GC content ~50%
set.seed(2429)
for (i in 1:500) {
position$space[position$y ==i] <- sim.DNAseq(70, GCfreq = 0.5)
}
position$seq.name <- paste(position$tf, "_70", sep = "")
position[position$tf == "Gr",] <- position[position$tf == "Gr",] %>%
mutate(seq.name = gsub("_70","_60", seq.name),
space = substr(space,1,60))
position_space <- position
# Assemble sequence to test: 4 rim bases of the TF motifs and the spacer sequence
position_space$seq.name <- paste(position_space$seq.name, position_space$y, sep = "_")
position_space$seq.text <- paste(substrRight(position_space$primer1_seq, 4),
position_space$space,
substr(position_space$motif2, 1, 4), sep = "")
# Write fasta file to run on FIMO script
position_space_export <- position_space
position_space_export <- position_space_export %>% dplyr::select(seq.name, seq.text) %>% unique()
#dat2fasta(position_space_export, outfile = "/DATA/usr/m.trauernicht/projects/tf_activity_reporter/data/trp53_model/fimo/position_connell.fasta")
```
## Run FIMO script again
```{bash run fimo spacings, eval = FALSE}
# motfn=/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/20170320_pwms_selected.meme
# odir=/home/m.trauernicht/mydata/projects/tf_activity_reporter/data/trp53_model/output/position_connell
# query="/home/m.trauernicht/mydata/projects/tf_activity_reporter/data/trp53_model/fimo/position_connell.fasta"
# nice -n 19 fimo --no-qvalue --thresh 1e-4 --verbosity 1 --o $odir $motfn $query
```
## load fimo results
We built a TF motif matrix using -log10 transformed FIMO scores. We used this feature encoding throughout the rest of this analysis, unless otherwise stated.
```{r build tf motif matrices db2: tf_check, out.width= "100%", fig.align= "center", echo=FALSE}
# load motif Metadata --> PWM feature matrix
tib_pwm_position_connell <- get_pwm_feature_matrix(motif_meta_fn = '/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/fc181127_curated_metadata_no_composite_filt.csv',
fimo_fn = '/home/m.trauernicht/mydata/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/position_connell/fimo.tsv',
db = 2)
```
## visualize fimo results
```{r out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
## We make a selection of 9 10bp spacer
# convert to binary
tib_pwm_position_connell_binary <- tib_pwm_position_connell %>% mutate_if(is.numeric, function(x) ifelse((x==0 | is.na(x)),0,1))
tib_pwm_position_connell_binary_top <- tib_pwm_position_connell_binary
# compute rowsums to get cumulative binding
tib_pwm_position_connell_binary_top$binding <- rowSums(tib_pwm_position_connell_binary_top[,2:ncol(tib_pwm_position_connell_binary_top)])
# select only cumulative binding and id
tib_pwm_position_connell_binary_top <- tib_pwm_position_connell_binary_top %>%
dplyr::select(id,binding)
# make third column wiht only the space_id - then sum up the binding scores per space_id
tib_pwm_position_connell_binary_top$space <- gsub(".*_(.*)", "\\1", tib_pwm_position_connell_binary_top$id)
tib_pwm_position_connell_binary_top <- tib_pwm_position_connell_binary_top %>%
dplyr::select(-id)
tib_pwm_position_connell_binary_top$cum_binding <- ave(tib_pwm_position_connell_binary_top$binding, tib_pwm_position_connell_binary_top$space,
FUN = sum)
tib_pwm_position_connell_binary_top <- tib_pwm_position_connell_binary_top %>%
dplyr::select(-binding)
tib_pwm_position_connell_binary_top <- unique(tib_pwm_position_connell_binary_top)
# Order to see which TF binds to them
tib_pwm_position_connell_binary_top <- tib_pwm_position_connell_binary_top[order(tib_pwm_position_connell_binary_top$cum_binding),]
tib_pwm_position_connell_binary_top <- tib_pwm_position_connell_binary_top$space[1:8]
# convert to matrix - leave out id
tib_pwm_position_connell$space <- gsub(".*_(.*)", "\\1", tib_pwm_position_connell$id)
tib_pwm_position_connell_select <- tib_pwm_position_connell[tib_pwm_position_connell$space %in%
tib_pwm_position_connell_binary_top,]
tib_pwm_position_connellMatrix <- as.matrix(dplyr::select(tib_pwm_position_connell_select,-id,-space))
# assign ids as rownames of matrix
rownames(tib_pwm_position_connellMatrix) <- tib_pwm_position_connell_select$id
```
```{r, fig.height=10, fig.width=10, out.width= "100%", fig.align= "center", echo=FALSE, warning= FALSE}
#remove columns with only 0s and plot heatmap
heatmaply(tib_pwm_position_connellMatrix[, colSums(tib_pwm_position_connellMatrix != 0) > 0])
```
## Select Connell spacers
```{r}
## Add spacers to df
position_selected <- position[position$y %in% c("34", "82", "91", "205", "227", "406", "445", "471"),] %>%
dplyr::select(space, tf, y) %>% unique()
position_selected$y <- c(1,1,2,2,3,3)
for (i in 1:3){
for (j in unique(tf.df.connell$tf)){
tf.df.connell$position_seq[tf.df.connell$background == i & tf.df.connell$tf == j] <-
position_selected$space[position_selected$tf == j & position_selected$y == i]
}
}
## Add Connell reporters to tf.df
tf.df <- rbind(tf.df, tf.df.connell)
```
## Check by FIMO if all spacings are inactive
```{r}
# Combine sequence and screen for binding
tf.df$seq.name <- paste(tf.df$tf, tf.df$promoter, "p", tf.df$position, "s", tf.df$spacing, "d", tf.df$distance, "bg",
tf.df$background, "a1", tf.df$affinity_pos1, "a2",
tf.df$affinity_pos2, "a3", tf.df$affinity_pos3, "a4", tf.df$affinity_pos4,
"bc", tf.df$oligo.barcode, sep = "_")
tf.df$seq.text <- paste(tf.df$primer1_seq, tf.df$position_seq, tf.df$motif1, tf.df$space1, tf.df$motif2,
tf.df$space2, tf.df$motif3, tf.df$space3, tf.df$motif4, tf.df$distance.seq,
sep = "")
tf.df.export <- subset(tf.df, select = c(seq.name, seq.text))
# dat2fasta(tf.df.export, outfile = "/DATA/usr/m.trauernicht/projects/tf_activity_reporter/data/trp53_model/fimo/reporter_check.fasta")
```
## Run FIMO script again
```{bash run fimo db2_9, eval = FALSE}
# motfn=/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/20170320_pwms_selected.meme
# odir=/home/m.trauernicht/mydata/projects/tf_activity_reporter/data/trp53_model/output/reporter_check_2
# query=/home/m.trauernicht/mydata/projects/tf_activity_reporter/data/trp53_model/fimo/reporter_check.fasta
# nice -n 19 fimo --no-qvalue --thresh 1e-4 --verbosity 1 --o $odir $motfn $query
```
## load fimo results
We built a TF motif matrix using -log10 transformed FIMO scores. We used this feature encoding throughout the rest of this analysis, unless otherwise stated.
```{r build tf motif matrices db2: tf_check, out.width= "100%", fig.align= "center", echo=FALSE}
# load motif Metadata --> PWM feature matrix
tib_pwm_tf_check <- get_pwm_feature_matrix(motif_meta_fn = '/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/fc181127_curated_metadata_no_composite_filt.csv',
fimo_fn = '/home/m.trauernicht/mydata/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/reporter_check/fimo.tsv',
db = 2)
```
```{r, fig.height=4, fig.width=4, out.width= "100%", fig.align= "center", echo=FALSE, eval=FALSE}
# Generate heatmaps in a loop for each TF and each affinity group
# Select only hits
tib_pwm_tf_check$TF <- gsub("[.*.|_].*","\\1",tib_pwm_tf_check$id)
tib_pwm_tf_check <- tib_pwm_tf_check[grepl('minP',tib_pwm_tf_check$id),]
# Add affinity label
## Add affinity information
tf.labels <- subset(tf.df, select = c(seq.name, affinity_pos1, affinity_pos2, affinity_pos3,
affinity_pos4))
names(tf.labels) <- c("id", "affinity_1", "affinity_2", "affinity_3", "affinity_4")
tib_pwm_tf_check <- merge(tib_pwm_tf_check, tf.labels)
## Compute affinity sum
tib_pwm_tf_check$affinity <- tib_pwm_tf_check$affinity_1 + tib_pwm_tf_check$affinity_2 + tib_pwm_tf_check$affinity_3 + tib_pwm_tf_check$affinity_4
## Add label to each affinity sum
labels <- data.frame(c("4xhigh", "3xhigh - 1xmed", "2xhigh - 2xmed", "1xhigh - 3xmed", "4xmed",
"3xmed - 1xlow", "2xmed - 2xlow", "1xmed - 3xlow", "4xlow", "3xlow - 1xvery_low",
"2xlow - 2xvery_low", "1xlow - 3xvery_low", "4xvery_low", "3xvery_low - 1xnull",
"2xvery_low - 2xnull", "1xvery_low - 3xnull", "4xnull"), c(0:16))
names(labels) <- c("label", "affinity")
for (i in 0:16) {
tib_pwm_tf_check$label[tib_pwm_tf_check$affinity == i] <- labels$label[labels$affinity == i]
}
tib_pwm_tf_check$label <- paste(tib_pwm_tf_check$TF, tib_pwm_tf_check$label, sep = "_")
## Rename rownames for heatmap visualization
tib_pwm_tf_check_2 <- unique(tib_pwm_tf_check) %>%
remove_rownames %>% column_to_rownames(var="id")
myBreaks1 <- seq(0,10,0.1)
# Create heatmaps for each TF
for (i in unique(tib_pwm_tf_check_2$label)) {
data <- tib_pwm_tf_check_2[tib_pwm_tf_check_2$label == i,]
data <- data %>% dplyr::select(-label, -TF, -affinity, -affinity_1, -affinity_2, -affinity_3, -affinity_4)
data <- data[,colSums(data != 0) > 0]
p <- pheatmap(as.matrix(data),
main = paste(i),
border_color = "#000000",
breaks = myBreaks1)
print(p)
}
```
```{r, fig.height=4, fig.width=4, out.width= "100%", fig.align= "center", echo=FALSE, eval=FALSE}
## Correlation model affinity score vs. predicted FIMO affinity
cor_sequences <- tib_pwm_tf_check %>% dplyr::select(TF, affinity, NR3C1, TP53)
cor_sequences <- cor_sequences[cor_sequences$affinity == c(0,4,8,12,16),]
cor_sequences$affinity[cor_sequences$affinity == 0] <- "high"
cor_sequences$affinity[cor_sequences$affinity == 4] <- "med"
cor_sequences$affinity[cor_sequences$affinity == 8] <- "low"
cor_sequences$affinity[cor_sequences$affinity == 12] <- "very low"
cor_sequences$affinity[cor_sequences$affinity == 16] <- "null"
cor_sequences <- unique(cor_sequences)
# Order x-axis
cor_sequences$affinity <- factor(cor_sequences$affinity ,levels = c("high", "med", "low", "very low", "null"))
### Gr
ggplot(cor_sequences[cor_sequences$TF == "Gr",], aes(x = affinity, y = NR3C1)) +
geom_bar(stat="identity") +
xlab("Selex predicted binding site in reporter") + ylab("FIMO predicted affinity") +
theme(text = element_text(size = 14)) +
labs(title= paste("SELEX vs FIMO affinity, Gr")) +
theme_classic()
### Trp53
ggplot(cor_sequences[cor_sequences$TF == "Trp53",], aes(x = affinity, y = TP53)) +
geom_bar(stat="identity") +
xlab("Selex predicted binding site in reporter") + ylab("FIMO predicted affinity") +
theme(text = element_text(size = 14)) +
labs(title= paste("SELEX vs FIMO affinity, Trp53")) +
theme_classic()
```
## Add barcodes
```{r out.width= "100%", fig.align= "center", echo=FALSE}
set.seed(123)
barcodes <- sample(barc)
tf.df$barcode <- barcodes[1:nrow(tf.df)]
# Check if there are any duplicate barcodes
paste("duplicate barcodes: ", nrow(tf.df[duplicated(tf.df$barcode),]), sep ="")
# Save for later export
tf.df.long.export <- tf.df
```
# Double check all generated reporters for any binding other than P53
```{r}
tf.df.check <- read.csv("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/tf_df_complete.csv", sep = ";") %>%
dplyr::select(-X)
tf.df.check2 <- tf.df.check %>%
filter(tf == "Trp53") %>%
mutate(seq.text = paste(motif1, space1, motif2, space2, motif3, space3, motif4, distance.seq, sep = ""),
seq.name = gsub("(Gr|Trp53)_.*(_p_[0-9]{1,2}_.*)", "\\1\\2", seq.name),
seq.name = gsub("_bc_[0-9]{1}$", "", seq.name))
tf.df.check <- tf.df.check2 %>%
dplyr::select(seq.name, seq.text) %>%
unique()
dat2fasta(tf.df.check, outfile = "/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/fimo/tf_check_background.fasta")
```
# run fimo
```{r}
# motfn=/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/20170320_pwms_selected.meme
# odir=/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/fimo/tf_check
# query=/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/fimo/tf_check.fasta
# nice -n 19 fimo --no-qvalue --thresh 1e-4 --verbosity 1 --o $odir $motfn $query
```
```{r build tf motif matrices db2: tf_check, out.width= "100%", fig.align= "center", echo=FALSE}
# load motif Metadata --> PWM feature matrix
tib_pwm_tf_check <- get_pwm_feature_matrix(motif_meta_fn = '/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/fc181127_curated_metadata_no_composite_filt.csv',
fimo_fn = '/home/m.trauernicht/mydata/projects/SuRE_deep_scan_trp53_gr/data/library_design/fimo/tf_check/fimo.tsv',
db = 2)
tib_pwm_tf_check <- merge(tib_pwm_tf_check, tf.df.check2, by.x = "id", by.y = "seq.name", all = T) %>%
filter(oligo.barcode == 1, position == 0, promoter == "minP")
tib_pwm_tf_check$affinity_id <- "0_mixed_any"
tib_pwm_tf_check$affinity_id[tib_pwm_tf_check$affinity_pos1 == 0 & tib_pwm_tf_check$affinity_pos2 == 0 & tib_pwm_tf_check$affinity_pos3 == 4 & tib_pwm_tf_check$affinity_pos4 == 4] <- "6_high_start"
tib_pwm_tf_check$affinity_id[tib_pwm_tf_check$affinity_pos1 == 4 & tib_pwm_tf_check$affinity_pos2 == 4 & tib_pwm_tf_check$affinity_pos3 == 0 & tib_pwm_tf_check$affinity_pos4 == 0] <- "7_high_end"
tib_pwm_tf_check$affinity_id[tib_pwm_tf_check$affinity_pos1 == 3 & tib_pwm_tf_check$affinity_pos2 == 3 & tib_pwm_tf_check$affinity_pos3 == 3 & tib_pwm_tf_check$affinity_pos4 == 3] <- "2_very_low_only"
tib_pwm_tf_check$affinity_id[tib_pwm_tf_check$affinity_pos1 == 0 & tib_pwm_tf_check$affinity_pos2 == 0 & tib_pwm_tf_check$affinity_pos3 == 0 & tib_pwm_tf_check$affinity_pos4 == 0] <- "5_high_only"
tib_pwm_tf_check$affinity_id[tib_pwm_tf_check$affinity_pos1 == 1 & tib_pwm_tf_check$affinity_pos2 == 1 & tib_pwm_tf_check$affinity_pos3 == 1 & tib_pwm_tf_check$affinity_pos4 == 1] <- "4_med_only"
tib_pwm_tf_check$affinity_id[tib_pwm_tf_check$affinity_pos1 == 2 & tib_pwm_tf_check$affinity_pos2 == 2 & tib_pwm_tf_check$affinity_pos3 == 2 & tib_pwm_tf_check$affinity_pos4 == 2] <- "3_low_only"
tib_pwm_tf_check$affinity_id[tib_pwm_tf_check$affinity_pos1 == 4 & tib_pwm_tf_check$affinity_pos2 == 4 & tib_pwm_tf_check$affinity_pos3 == 4 & tib_pwm_tf_check$affinity_pos4 == 4] <- "1_null_only"
tib_pwm_tf_check <- tib_pwm_tf_check %>%
filter(affinity_id != "0_mixed_any", affinity_id != "6_high_start", affinity_id != "7_high_end", affinity_id == "2_very_low_only")
# Generate heatmaps in a loop for each TF
# Select only hits
tib_pwm_tf_check_2 <- tib_pwm_tf_check %>%
remove_rownames %>% column_to_rownames(var="id")
# Create breaks for heatmap (to make legend between heatmaps comparable)
myBreaks1 <- seq(0,8,0.08)
# Create heatmaps for each TF
tib_pwm_tf_check_2 <- tib_pwm_tf_check_2[,colSums(tib_pwm_tf_check_2 != 0) > 0]
tib_pwm_tf_check_2 <- tib_pwm_tf_check_2[,c(-27:-ncol(tib_pwm_tf_check_2))]
tib_pwm_tf_check_2 <- as.matrix(tib_pwm_tf_check_2)
pheatmap(tib_pwm_tf_check_2,
border_color = "#000000",
breaks = myBreaks1)
```
# Check-up & Visualizations
```{r out.width= "100%", fig.align= "center", echo=FALSE}
# Length distribution
# Save intermediate df for later purpose
tf.array <- tf.df
tf.df$seq.text <- paste(tf.df$primer1_seq, tf.df$position_seq, tf.df$motif1, tf.df$space1, tf.df$motif2,
tf.df$space2, tf.df$motif3, tf.df$space3, tf.df$motif4,
tf.df$distance.seq, tf.df$promoter_sequence, tf.df$s1_primer,
tf.df$barcode, tf.df$primer2_seq, sep = "")
tf.df_2 <- tf.df
tf.df_2$seq.text <- paste(tf.df_2$position_seq, tf.df_2$motif1, tf.df_2$space1, tf.df_2$motif2,
tf.df_2$space2, tf.df_2$motif3, tf.df_2$space3, tf.df_2$motif4,
tf.df_2$distance.seq, sep = "")
tf.df_2$seq.name <- paste(tf.df_2$tf, "p", tf.df_2$position, "s", tf.df_2$spacing, "d", tf.df_2$distance, "bg",
tf.df_2$background, "a1", tf.df_2$affinity_pos1, "a2",
tf.df_2$affinity_pos2, "a3", tf.df_2$affinity_pos3, "a4", tf.df_2$affinity_pos4, sep = "_")
print(nrow(tf.df[grep(paste(ecori_nhei, collapse = "|"),tf.df$seq.text),]))
tf.df$nchar <- nchar(tf.df$seq.text)
ggplot(tf.df, aes(x = nchar)) +
geom_histogram(aes(y=..count..), colour="black", fill="#E69F00", binwidth = 5)+
xlab("Length Oligo (binwidth = 5)") + ylab("Frequency") +
labs(title = "Overview of oligo lengths in pool",
subtitle = paste("Total oligos =", nrow(tf.df), "|",
"max oligo length =", max(nchar(tf.df$seq.text)), "|",
"min oligo length = ", min(nchar(tf.df$seq.text)), "|",
"mean oligo length = ", round(mean(nchar(tf.df$seq.text))))) +
theme_classic()
```
## Exporting potential data.
```{r}
## Exporting
tf.df.export <- tf.df_2 %>% dplyr::select(seq.name, seq.text) %>% unique()
paste("duplicate sequence names: ", nrow(tf.df.export[duplicated(tf.df.export$seq.name),]), sep ="")
paste("duplicate sequences: ", nrow(tf.df.export[duplicated(tf.df.export$seq.text),]), sep ="")
filename <- SetFileName("_oligo_pool_deep_RE_only", "mt")
# Write csv file
setwd("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/")
write.csv(tf.df.export, file = paste(filename,".csv", sep = ""), row.names = F)
# Write fasta file
setwd("/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/output/")
dat2fasta(tf.df.export, outfile = paste(filename,".fasta", sep = ""))
```
# Compare published backgrounds (Davis et al. 2019, biorxiv) with my new backgrounds
```{r out.width= "100%", fig.align= "center", echo=FALSE}
#bg41:
bg41 <- "TGTTCAGAAGGGCCAGAAATGCCAAGGACTCAGGGGAGGAGAATTAAGTCAGAGAGTTTCATTACTGAGTGTTGTTTGACTTTGTTGTCACGGATTCATTTAACCATCTCTCTACCATGGTAAAAATGTGTATCCTATGTCCAGTATGAA"
bg52 <- "CCAGGGAAAGCAGTCGGTGAGACCAGGCACAGTAGGATAGTTAGTTAGCTACCACATGTAAAGCTGAGAGCAGATGGTGCCGTTGATATAGAGCGCAGGAATGTGCGTGTTTATGCGGGTGCGTTTCTGTGCGTGTGCGTGTGGAAACCA"
# Create whole df by merging all conditions
tf.df_davis <- phylotools::read.fasta("/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/library_design/output/mt20210208_oligo_pool_deep_RE_only.fasta")
tf.df_davis <- tf.df_davis %>%
separate(col = seq.name, into = c("TF", "p", "position", "s", "spacing",
"d", "distance", "b", "background",
"a1", "aff1", "a2", "aff2", "a3", "aff3",
"a4", "aff4"), sep = "_") %>%
dplyr::select(-p, -s, -d, -b, -a1, -a2, -a3, -a4, -distance) %>%
filter(TF == "Trp53", position == 0, spacing == 7) %>%
unique()
tf.df_davis <- tf.df_davis %>%
mutate(motif1 = substr(seq.text, 0, 24),
space1 = substr(seq.text, 25, 31),
motif2 = substr(seq.text, 32, 55),
space2 = substr(seq.text, 56, 62),
motif3 = substr(seq.text, 63, 86),
space3 = substr(seq.text, 87, 93),
motif4 = substr(seq.text, 94, 117),
space4 = substr(seq.text, 118, 127))
tf.df_davis_bg_1 <- tf.df_davis %>%
filter(background == "1") %>%
mutate(space1 = substr(bg41, 50, 57),
space2 = substr(bg41, 58, 64),
space3 = substr(bg41, 65, 71),
space4 = substr(bg41, 72, 78)) %>%
mutate(background = "bg41")
tf.df_davis_bg_2 <- tf.df_davis %>%
filter(background == "2") %>%
mutate(space1 = substr(bg52, 50, 57),
space2 = substr(bg52, 58, 64),
space3 = substr(bg52, 65, 71),
space4 = substr(bg52, 72, 78)) %>%
mutate(background = "bg52")
tf.df_davis <- rbind(tf.df_davis, tf.df_davis_bg_1, tf.df_davis_bg_2)
tf.df_davis <- tf.df_davis %>%
mutate(seq.name = paste(background, aff1, aff2, aff3, aff4, sep = "_"),
seq.text = paste(motif1, space1, motif2, space2, motif3, space3, motif4, space4, sep = "")) %>%
dplyr::select(seq.name, seq.text) %>%
unique()
#dat2fasta(tf.df_davis, outfile = "/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/fimo/background_check.fasta")
```
## Run FIMO script again
```{bash run fimo db2_11, eval = FALSE}
# motfn=/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/20170320_pwms_selected.meme
# odir=/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/fimo/background_check/
# query=/DATA/usr/m.trauernicht/projects/SuRE_deep_scan_trp53_gr/data/library_design/fimo/background_check.fasta
# nice -n 19 fimo --no-qvalue --thresh 1e-4 --verbosity 1 --o $odir $motfn $query
```
```{r build tf motif matrices db2: tf_bg_davis, out.width= "100%", fig.align= "center", echo=FALSE}
# load motif Metadata --> PWM feature matrix
tib_pwm_tf_bg_davis <- get_pwm_feature_matrix(motif_meta_fn = '/home/m.trauernicht/mydata/data/TFDB/natoli_update_2017/fc181127_curated_metadata_no_composite_filt.csv',
fimo_fn = '/DATA/usr/m.trauernicht/projects/P53_reporter_scan/data/library_design/fimo/background_check/fimo.tsv',
db = 2)
```
```{r, fig.height=4, fig.width=4, out.width= "100%", fig.align= "center", echo=FALSE, eval=FALSE}
# Generate heatmaps in a loop for each TF
# Select only hits
tib_pwm_tf_bg_davis$bg <- gsub("[.*_].*","\\1",tib_pwm_tf_bg_davis$id)
tib_pwm_tf_bg_davis_2 <- tib_pwm_tf_bg_davis %>%
remove_rownames %>% column_to_rownames(var="id")
for (i in unique(tib_pwm_tf_bg_davis_2$bg)) {
data <- tib_pwm_tf_bg_davis_2[tib_pwm_tf_bg_davis_2$bg == i,]
data <- data[,colSums(data != 0) > 0]
p <- pheatmap(as.matrix(data %>% dplyr::select(-bg)),
main = paste(i),
border_color = "#000000")
print(p)
}
```
## Compare specificity between my random backgrounds and 2 of the backgrounds used in Davis et al
```{r, fig.height=10, fig.width=10, out.width= "100%", fig.align= "center", echo=FALSE}
# Motif occurencies (binary): how many TFs are binding, TF-seq vs. my approach
tib_pwm_all_bg_binary <- tib_pwm_tf_bg_davis_2 %>% mutate_if(is.numeric, function(x) ifelse((x==0 | is.na(x)),0,1))
# Remove P53/63/73
tib_pwm_all_bg_binary <- tib_pwm_all_bg_binary %>%
dplyr::select(-TP53, -TP63, -TP73)
tib_pwm_all_bg_binary$MotifOccurencies <- rowSums(tib_pwm_all_bg_binary %>% dplyr::select(-bg))
# Change colors
tib_pwm_all_bg_binary$type <- "my_background"
tib_pwm_all_bg_binary$type[grep("bg", tib_pwm_all_bg_binary$bg)] <- "davis_background"
colors_diverse <- c("#264653", "#2a9d8f", "#e9c46a", "#f4a261", "#e76f51")
# Generate plot
tib_pwm_all_bg_binary%>%
dplyr::select(bg,MotifOccurencies, type)%>%
ggplot(aes(bg, MotifOccurencies, fill=type))+
geom_boxplot() +
scale_fill_manual(values = colors_diverse)+
theme_pubr(border = T) +
coord_flip()
```
# Session Info
```{r}
paste("Run time: ",format(Sys.time()-StartTime))
getwd()
date()
sessionInfo()
```
<file_sep>---
title: "Systematic analysis of transcriptional activation from synthetic TP53 binding sites"
output: html_document
date: "2023-06-13"
---
[](https://zenodo.org/badge/latestdoi/298230427)
Website guide: In the "Data Quality Checks" tab plots can be found that describe the pre-processing of the barcode counts and quality assurance. In the "Detailed Reporter Activity Analysis" tab all figures that were generated for the manuscript can be found.
---
**Introduction:**
It is unclear how TP53 binding site architecture relates to TF activity. To test this systematically, a library was designed in collaboration with the Bussemaker lab. Design features of TP53 reporters like binding site copy number, spacer length, or core promoter choice are reviewed.

The designed first library contains:
- 6,000 TF reporters, each with up to 4 TF binding sites, followed by a minP or minCMV and a barcode in the transcription unit
- 5 different TP53 motifs with different predicted binding affinities
- Large range of combinatorial binding affinity
- Spacer length between binding sites varied from 4-14 bp in 1 bp steps
- two different core promoters
- three different synthetic inactive spacer sequences
- 5 barcodes per TF reporter
All TF reporters were designed using FIMO. This way, the spacings were designed to be inactive, while the TF binding sites were ensured to be intact.
---
**Experimental setup:**
- Nucleofection into TP53-proficient MCF7 cells and TP53-KO MCF7 cells
- TP53 Stimulation with Nutlin-3a or vehicle control (DMSO)
- RNA isolation after 24h, followed by barcode-specific reverse transcription and sequencing library prep
- experiments performed in independent triplicates
---
**Activity quantification:**
- reporter activity = cDNA counts / pDNA counts
- take average across the 5 barcodes
- then take average across the 3 biological replicates
- calculate enrichment per condition tested over background reporter activity (core promoter-only reporters)
___
**Repository guide:**
- analysis folder: pre-processing of the barcode counts (barcode-preprocessing.Rmd) + analysis of barcode counts, linear modeling, figure generation (cDNA-processing.Rmd)
- files indicated with #_all# contain data from all three probed cell lines (MCF7, U2OS, A549) whereas the other files only contain the MCF7 data
- library_design folder: contains script that was used to generate the TP53 reporter sequences
- pDNA_insert_seq folder: contains scripts to analyze the full-length sequences of the plasmid pool that was used for transfections
- raw_data_analysis folder: contains the scripts that were used to extract and cluster the raw barcode counts
| 2d4661d49e02723dbc532ab147522470fbfc6afa | [
"HTML",
"Markdown",
"Python",
"R",
"RMarkdown"
] | 15 | HTML | mtrauernicht/P53_reporter_scan | 7ef50b22e008f33b7949293011640ab4898e191e | 21ed0aed62a09477fa2b337a698d35fea7c3f370 |
refs/heads/master | <file_sep>class CategoryController < ApplicationController
before_action :sidebar, only: [:show]
def show
article_ids = Pick.where(genre: params[:id]).group(:article_id).order('count_article_id DESC').limit(9).count(:article_id).keys
@picks = article_ids.collect{|key| Pick.where(article_id: key).order("body ASC").last }
@articles = Article.where(genre: params[:id]).order("id DESC").limit(5)
end
private
def sidebar
@side_picks = Pick.includes(:user).includes(:article).order("id DESC").limit(10)
respond_to do |format|
format.html
format.json
end
end
end
<file_sep>class AddAnonymousToPicks < ActiveRecord::Migration[5.0]
def change
add_column :picks, :anonymous, :integer, null: false
end
end
<file_sep>class Pick < ApplicationRecord
require 'date'
# Association
belongs_to :user
belongs_to :article
has_many :likes, dependent: :destroy
def like_user(user_id)
likes.find_by(user_id: user_id)
end
# validation
validates :user_id, presence: true
validates :article_id, presence: true
validates :genre, presence: true
# categoryのtitle
def which_title
title = ["エンタメ", "テクノロジー", "ビジネス", "政治・経済", "金融・マーケット", "キャリア・教育", "社会・スポーツ", "イノベーション"]
num = self.genre
return title[num - 1]
end
# pickの時間算出
def how_pick_created_at
pick_time = self.created_at
now_time = DateTime.now
difference = now_time.to_i - pick_time.to_i
seconds = difference % 60
minutes = difference / 60 % 60
hours = difference / 60 / 60 % 24
days = difference / 60 / 60 / 24
if days != 0
return pick_time.strftime("%Y年%m月%d日")
elsif hours != 0
return "#{hours}時間前"
elsif minutes != 0
return "#{minutes}分前"
else
return "#{seconds}秒前"
end
end
def how_pick_updated_at
pick_time = self.updated_at
now_time = DateTime.now
difference = now_time.to_i - pick_time.to_i
seconds = difference % 60
minutes = difference / 60 % 60
hours = difference / 60 / 60 % 24
days = difference / 60 / 60 / 24
if days != 0
return pick_time.strftime("%Y年%m月%d日")
elsif hours != 0
return "#{hours}時間前"
elsif minutes != 0
return "#{minutes}分前"
else
return "#{seconds}秒前"
end
end
end
<file_sep>$(document).on("turbolinks:load", function(){
// 記事投稿ボタンのjs
$("#contribution_btn, #contribution_menu_btn").on("click", function(){
$("#contribution").show();
$("#contribution_header").show();
$("#form_url, #form_image, #form_title, #form_source, #form_genre, #form_here").val("");
$("#form_url").attr("autofocus", true)
$("#contribution_bottom").hide();
$("#contribution_alert").hide();
clearInterval(autoSlide);
clearInterval(interval);
})
$("#contribution__box").on("click", function(e){
e.stopPropagation();
})
$("#contribution, #close__contribution").on("click", function(){
$("#contribution").hide();
$("#form_url, #form_image, #form_title, #form_source, #form_genre, #form_here").val("");
if( !window.location.href.match(/register|users/) ){
autoInterval();
if( location.href.match(/^http:\/\/172.16.58.3:3000\/*$/) != null || location.href.match(/^http:\/\/172.16.58.3:3000\/articles\/*$/) != null || location.href.match(/^http:\/\/localhost:3000\/*$/) != null || location.href.match(/^http:\/\/localhost:3000\/articles\/*$/) != null ){
autoSlideDo();
}
}
});
// formのtext_fieldのenterキーでのsubmit防止
$(document).on("keypress", "input:not(.allow_submit)", function(e) {
return e.which !== 13;
});
// 記事投稿機能のjs
// 投稿された記事が適切か判定
var appropriate_judgement = function( title, keywords, description, source ){
// 不適切でない語の消去
var exclusion = new RegExp(/wave|naver/ig);
var excluded_title = title.replace(exclusion, "");
var excluded_keywords = keywords.replace(exclusion, "");
var excluded_description = description.replace(exclusion, "");
var excluded_source = source.replace(exclusion, "");
// 調査対象の設定
var elements = [ excluded_title, excluded_keywords, excluded_description, excluded_source ];
var out_content = new RegExp(/エロ|えろ|無修正|XVIDEOS|FC2|porn|pin|fuck|ero|teen|jk|緊縛|調教|風俗|adult|近親相姦|隠し撮り|ハメ撮り|AV|アダルト|あだると|援助交際|援交|同人誌|セックス|せっくす|sex|えっち|エッチ|死にたい|自殺掲示板|幇助|消えたい|樹海|一緒に自殺|自殺サイト|一緒に死|濡れ場|濡れ|お宝|ヌード|ぬーど|ようつべ|youtube|you tube|tube|yourfile/i)
// 調査対象の判定
var alert = function( elements ){
for( i = 0, len = elements.length; i < len; i = i + 1 ){
if( elements[i].match( out_content ) !== null ){
return 1;
}
}
return 2;
}
// 判定結果のreturn
return alert( elements );
}
// 投稿された記事のジャンル判定
var which_genre = function( keywords, title, description ){
// 調査対象の設定
var elements = [ keywords, title, description ];
// 判定用のカテゴリーワード
var technology = [ "tech", "テック", "ネットワーク", "テクノロジー", "IoT", "AI", "人工知能", "機械学習", "IT", "衛星", "宇宙", "自動車産業", "特許", "シリコンバレー", "データセンター", "ec", "EC", "qr", "QR"," スマホ決済", "電子決済", "science", "iss", "検索エンジン", "電気自動車", "ev", "自動運転", "製薬" ]
var business = [ "business", "ビジネス", "トラベル", "商品", "産業", "環境", "世界の動き", "企業", "経営", "不振", "知財", "株価", "銘柄", "分析", "自動車産業", "年収", "給料", "電子マネー", "顧客", "電子商取引", "ec", "EC", "接客", "サービス", "小売", "プライベートブランド", "pb", "売上", "業界", "不動産", "人材育成", "輸出", "輸入", "価格", "手頃", "新製品", "新商品", "it", "儲け", "投資信託", "信託", "製薬", "事業承継", "ipo", "上場", "m&a", "買収" ]
var economics =[ "economics", "経済", "日経平均", "国際", "民法", "国会", "法相", "減税", "国税庁", "財務省", "年末調整", "インフレ", "デフレ", "内閣", "政治", "少子高齢化", "GDP", "トランプ", "北朝鮮", "朝鮮", "ミサイル", "財政", "政府", "歳出", "歳入", "予算", "輸出", "輸入", "投資信託", "信託", "銀行", "当局", "エコノミスト", "ミクロ", "マクロ", "極右", "極左", "訪日", "外国人観光客", "外国人旅行者", "交渉", "nafta", "NAFTA", "tpp", "規制", "戦争", "政権", "大統領", "首相", "大臣", "白人", "黒人", "人種", "自治体", "frb", "利上", "地政学", "マイナス金利", "赤字", "保険", "ipo", "上場", "m&a", "買収" ]
var market = [ "market", "経済", "市況", "市場", "日経平均", "経営", "戦略", "証券", "株価", "株式", "株主", "マーケット", "上場", "円高", "円安", "インフレ", "デフレ", "景気", "金融", "業界", "不動産", "決算", "信託", "銀行", "投資", "valu", "信用", '小幅', "反発", "回復", "為替", "利益確定", "ユーロ安", "ユーロ高", "上値", "下値", "地方銀行", "事業承継", "frb", "利上", "相場", "地政学", "エネルギー", "前場", "後場", "続落", "売り優勢", "輸出株", "大型株", "マイナス金利", "赤字", "保険", "ipo", "上場" ]
var education = [ "education", "carrier", "心", "こころ", "メンタル", "小学", "中学", "高校", "専門学校", "大学", "就活", "就職活動", "いじめ", "孤独", "介護", "病院", "教育", "勉強", "義務教育", "文部科学省", "キャリア", "ポストドクター", "研修医", "働き方改革", "働き方", "ライフ", "仕事", "しごと", "研究", "トラベル", "旅行", "育成", "戦争", "自治体", "留学", "エネルギー", "リモートワーク", "フリーランス", "時間管理", "人生設計", "親", "家族", "ワークスタイル", "ワークライフ", "職場", "社内", "上司", "スクール", "遺族", "筋トレ", "筋肉", "食事", "給食", "格差社会", "ビジネスパーソン", "若手社員", "自死", "自殺", "夫婦", "テクニック", "処世術", "できる人", "デキる", "コミュニケーション", "知見", "学ぶ", "マニュアル", "コラム", "ビジネス書", "ビジネス本", "職人", "インターン", "m&a", "買収", "農業", "研修", "新人", "新入社員" ]
var sports = [ "sports", "social", "スポーツ", "野球", "サッカー", "フットボール", "アメリカンフットボール", "アメフト", "ラグビー", "テニス", "卓球", "水泳", "陸上", "食事", "社会", "ゴルフ", "メジャー", "グルメ", "釣り", "観光", "人生設計", "カルチャー", "宗教", "仏教", "ヒンドゥー", "キリスト", "ユダヤ", "イスラム", "親", "家族", "ワークスタイル", "ワークライフ", "いじめ", "孤独", "介護", "病院", "夫婦", "自治体", "映画", "娯楽", "アニメ", "ゲーム", "漫画", "マンガ", "戦争", "相撲", "バスケ", "NBA", "NFL", "NHL", "F1", "f1", "nba", "nhl", "nfl", "バレー", "柔道", "スキー", "スケート", "格闘技", "プロレス", "ボクシング", "k1", "空手", "柔道", "剣道", "ダンス", "食べ物", "スイーツ", "フルーツ", "遺産", "南極", "北極", "寿命", "遺族", "動物", "生態系", "甲子園", "fc", "スペイン1部リーグ", "農業", "健康", "筋トレ", "筋肉トレーニング", "トレーニング", "スタジアム", "出産", "出生", "被曝", "被ばく" ]
var innovation = [ "tech", "innovation", "革新", "変革", "革命", "it", "イノベーション", "ブロックチェーン", "AI", "人工知能", "機械学習", "iPS", "山中", "スタートアップ", "ベンチャー", "電子商取引", "ec", "EC", "share", "シェア", "valu", "ビットコイン", "bit", "フリーランス", "ロボット", "省力", "テック", "医療", "ガン", "がん", "癌", "診断", "アグリテック", "農業", "破壊的", "創造的", "創業者", "精神", "マインド", "こころ", "心", "ココロ", "メンタル", "集中力", "分析", "立ち上げ", "立上", "機会", "未来", "sf", "フィクション", "格言", "名言", "リーダー" ]
var reg = [ technology, business, economics, market, education, sports, innovation ];
// 変数定義
var technology_num = 0;
var business_num = 0;
var economics_num = 0;
var market_num = 0;
var education_num = 0;
var sports_num = 0;
var innovation_num = 0;
var genre = [ technology_num, business_num, economics_num, market_num, education_num, sports_num, innovation_num ];
// genreの集計(判定)
for( i = 0, genre_len = genre.length; i < genre_len; i = i + 1 ){
for( j = 0, reg_len = reg[i].length; j < reg_len; j = j + 1 ){
var word = new RegExp(reg[i][j],"i")
for( k = 0, ele_len = elements.length; k < ele_len; k = k + 1 ){
if( elements[k].match(word) ){
genre[i] = genre[i] + 1
}
}
}
}
// genre = [0] の場合の足し算
var genre_sum = function( genre ){
sum = 0;
for( i = 0, len = genre.length; i < len; i = i + 1 ){
sum = sum + genre[i]
}
return sum;
}
// genre判定結果のreturn
if( genre_sum( genre ) === 0 ){
return 1;
}else{
return genre.indexOf(Math.max.apply(null,genre)) + 2;
}
}
// 記事投稿の非同期通信
$("#form_url").on("keyup", function(){
$("#form_image, #form_title, #form_source, #form_genre, #form_here").val("");
var input = $("#form_url").val();
if( input === "" | input === null | input === undefined ){
// inputが空の時
$("#contribution_bottom").hide();
$("#contribution_alert").hide();
}else{
// YQLを利用したxdomainスクレイピング
/**
* jQuery.ajax mid - CROSS DOMAIN AJAX
* ---
* @author <NAME> (http://james.padolsey.com)
* @version 0.11
* @updated 12-JAN-10
* ---
* Note: Read the README!
* ---
* @info http://james.padolsey.com/javascript/cross-domain-requests-with-jquery/
*/
jQuery.ajax = (function(_ajax){
var protocol = location.protocol,
hostname = location.hostname,
exRegex = RegExp(protocol + '//' + hostname),
YQL = 'http' + (/^https/.test(protocol)?'s':'') + '://query.yahooapis.com/v1/public/yql?callback=?',
query = 'select * from htmlstring where url="{URL}" and xpath="*"';
function isExternal(url) {
return !exRegex.test(url) && /:\/\//.test(url);
}
return function(o) {
var url = o.url;
if ( /get/i.test(o.type) && !/json/i.test(o.dataType) && isExternal(url) ) {
// Manipulate options so that JSONP-x request is made to YQL
o.url = YQL;
o.dataType = 'json';
o.data = {
q: query.replace(
'{URL}',
url + (o.data ?
(/\?/.test(url) ? '&' : '?') + jQuery.param(o.data)
: '')
),
diagnostics: true,
env: 'store://datatables.org/alltableswithkeys',
format: 'xml'
};
// Since it's a JSONP request
// complete === success
if (!o.success && o.complete) {
o.success = o.complete;
delete o.complete;
}
o.success = (function(_success){
return function(data) {
if (_success) {
// Fake XHR callback.
_success.call(this, {
responseText: (data.results[0] || '')
// YQL screws with <script>s
// Get rid of them
.replace(/<script[^>]+?\/>|<script(.|\s)*?\/script>/gi, '')
}, 'success');
}
};
})(o.success);
}
return _ajax.apply(this, arguments);
};
})(jQuery.ajax);
$.ajax({
type: 'GET',
url: input,
}).done(function(data){
var result = "";
result = data.results[0]
if( result === undefined ){
// URLが見つからない時
$("#form_image, #form_title, #form_source, #form_genre, #form_here").val("");
$("#contribution_bottom").hide();
$("#contribution_alert").text("指定された URL のページが見つかりません");
$("#contribution_alert").show();
}else{
// URLが見つかった時
// 変数設定
var keywords = "";
var title = "";
var description = "";
var image = "";
var source = "";
// keywordsのスクレイピング
if( result.match(/(<meta content=")(.*)(" name="keywords"\/>)/) !== null ){
keywords = result.match(/(<meta content=")(.*?)(" .*)(name="keywords"\/>)/)[2]
}
// titleのスクレイピング
if( result.match(/(<meta content=")(.*)(" property="og:title"\/>)/) !== null ){
title = result.match(/(<meta content=")(.*?)(" .*)(property="og:title"\/>)/)[2].replace(/&quot;/g, '"')
}else if( result.match(/(<meta content=")(.*)(" name="twitter:title"\/>)/) !== null ){
title = result.match(/(<meta content=")(.*?)(" .*)(name="twitter:title"\/>)/)[2].replace(/&quot;/g, '"')
}else if( result.match(/(<title>)(.*)(<\/title>)/) !== null ){
title = result.match(/(<title>)(.*)(<\/title>)/)[2].replace(/&quot;/g, '"')
}
// descriptionのスクレイピング
if( result.match(/(<meta content=")(.*)(" property="og:description"\/>)/) !== null ){
description = result.match(/(<meta content=")(.*?)(" .*)(property="og:description"\/>)/)[2].replace(/&quot;/g, '"')
}else if( result.match(/(<meta content=")(.*)(" name="twitter:description"\/>)/) !== null ){
description = result.match(/(<meta content=")(.*?)(" .*)(name="twitter:description"\/>)/)[2].replace(/&quot;/g, '"')
}else if( result.match(/(<meta content=")(.*)(" name="description"\/>)/) !== null ){
description = result.match(/(<meta content=")(.*?)(" .*)(name="description"\/>)/)[2].replace(/&quot;/g, '"')
}
// imageのスクレイピング
if( result.match(/(<meta content=")(.*)(" property="og:image"\/>)/) !== null ){
image = result.match(/(<meta content=")(.*?)(" .*)(property="og:image"\/>)/)[2]
}else if( result.match(/(<meta content=")(.*)(" name="twitter:image"\/>)/) !== null ){
image = result.match(/(<meta content=")(.*?)(" .*)(name="twitter:image"\/>)/)[2]
}
// sourceのスクレイピング
if( result.match(/(<meta content=")(.*)(" property="og:site_name"\/>)/) !== null ){
source = result.match(/(<meta content=")(.*?)(" .*)(property="og:site_name"\/>)/)[2].replace(/&quot;/g, '"')
}else if( result.match(/(<meta content=")(.*)(" property="nordot:contentsholder_unit_name"\/>)/) !== null ){
source = result.match(/(<meta content=")(.*?)(" .*)(property="nordot:contentsholder_unit_name"\/>)/)[2].replace(/&quot;/g, '"')
}
var appropriate_article = appropriate_judgement( title, keywords, description, source );
if( appropriate_article === 1 ){
// 不適切なサイトの可能性あり
$("#form_image, #form_title, #form_source, #form_genre, #form_here").val("");
$("#contribution_bottom").hide();
$("#contribution_alert").text("指定された URL は不適切なサイトの可能性があります");
$("#contribution_alert").show();
}else if( appropriate_article === 2 ){
// 不適切なサイトとして引っかからなかった場合
// modal上の説明文のjs
if( $("#contribution_header").css("display") === "block" ){
$("#contribution_header").slideUp("slow");
}
$("#contribution_alert").hide();
// genre変数の定義と判定
var genre = which_genre( keywords, title, description )
// URLはあるが、必要事項の有無で条件分岐
if( title === "" || title === null || title.match(/エラー/) !== null || title.match(/ERROR/i) !== null || title.match(/404/i) !== null || title.match(/Not Found/i) !== null || title.match(/見つかりません/i) !== null || title.match(/みつかりません/i) !== null || title.match(/登録/i) !== null ){
// titleが見つからない時
$("#contribution_bottom").hide();
$("#contribution_alert").text("指定された URL のページが見つかりません");
$("#contribution_alert").show();
}else if( image === null || image === "" ){
// titleはあるがimageがない時
// スクレイピング結果の描画
$("#contribution_title").text(title);
if( $("#contribution_descroption").text(description) !== null ){
$("#contribution_descroption").text(description);
}
// スクレイピング結果の表示
$("#contribution_bottom").show();
$("#contribution_image").hide();
$(".bottom__article--detail").css({
"width": "100%",
"padding-left": "0"
})
// formへの代入
$("#form_title").val(title);
if( source !== "" || source !== null ){
$("#form_source").val(source);
}else{
$("#form_source").val("No site name");
}
$("#form_genre").val(genre);
$("#form_here").val(location.href)
}else{
// titleとimageの両方が存在する場合
// スクレイピング結果の描画
$("#contribution_title").text(title);
if( $("#contribution_descroption").text(description) !== null ){
$("#contribution_descroption").text(description)
}
$("#contribution_image").attr('src', image)
// スクレイピング結果の表示
$("#contribution_bottom").show();
$("#contribution_image").show();
$(".bottom__article--detail").css({
"width": "383px",
"padding-left": "20px"
})
// formへの代入
$("#form_title").val(title);
if( source !== "" || source !== null ){
$("#form_source").val(source);
}else{
$("#form_source").val("No site name");
}
$("#form_image").val(image);
$("#form_genre").val(genre);
$("#form_here").val(location.href)
}
}
}
}).fail(function(){
$("#form_image, #form_title, #form_source, #form_genre, #form_here").val("");
$("#contribution_bottom").hide();
$("#contribution_alert").text("指定された URL のページがない、もしくは YQL の API が止まっています。");
$("#contribution_alert").show();
// YQLが停止しているため、リロードを行う(他のAPIに支障をきたすので)。YQLが動いたら下記を消去する
$("#contribution__box").on("click", function(e){
e.stopPropagation();
})
$("#contribution, #close__contribution").on("click", function(){
location.reload()
});
})
}
});
})
<file_sep>class Article < ApplicationRecord
require 'date'
# Association
has_many :picks
# Varidation
validates :title, presence: true
validates :url, presence: true
validates :genre, presence: true
# 関連記事一覧のtitle
def which_title
title = ["エンタメ", "テクノロジー", "ビジネス", "政治・経済", "金融・マーケット", "キャリア・教育", "社会・スポーツ", "イノベーション"]
num = self.genre
return title[num - 1]
end
# articleの時間算出
def how_article_time
article_time = self.created_at
now_time = DateTime.now
difference = now_time.to_i - article_time.to_i
seconds = difference % 60
minutes = difference / 60 % 60
hours = difference / 60 / 60 % 24
days = difference / 60 / 60 / 24
if days != 0
return article_time.strftime("%Y年%m月%d日")
elsif hours != 0
return "#{hours}時間前"
elsif minutes != 0
return "#{minutes}分前"
else
return "#{seconds}秒前"
end
end
end
<file_sep>class UsersController < ApplicationController
def show
@user = User.find( params[:id] )
@picks = Pick.where( user_id: @user ).order( "id DESC" )
@likes = Like.where( pick_user_id: @user ).count
# binding.pry
end
def edit
end
def update
user = User.find(current_user)
if user.update(user_params)
redirect_to user_path(current_user.id)
else
render :edit
end
end
private
def user_params
params.require(:user).permit(:last_name, :first_name, :email, :company, :position, :profile)
end
end
<file_sep>class PicksController < ApplicationController
def create
pick = Pick.new(pick_params)
pick.likes_count = 0
if pick.save
if pick.anonymous == 1
current_user.anonymous -= 1
current_user.save
end
redirect_to article_path(params[:article_id])
else
root_path
end
end
def update
@pick = Pick.find(params[:id])
# binding.pry
if pick_params[:body].empty? && pick_params[:anonymous].to_i == 1
@pick.update(pick_params)
@pick.update(anonymous: 0)
current_user.anonymous += 1
current_user.save
elsif @pick.anonymous > pick_params[:anonymous].to_i
@pick.update(pick_params)
current_user.anonymous += 1
current_user.save
elsif @pick.anonymous < pick_params[:anonymous].to_i
@pick.update(pick_params)
current_user.anonymous -= 1
current_user.save
else
@pick.update(pick_params)
end
redirect_to article_path(params[:article_id])
end
def destroy
pick = Pick.find(destroy_params[:id])
if pick.destroy
if pick.anonymous == 1
current_user.anonymous += 1
current_user.save
end
redirect_to article_path(destroy_params[:article_id])
end
end
private
def pick_params
params.require(:pick).permit(:body, :anonymous).merge(genre: Article.find(params[:article_id]).genre, user_id: current_user.id, article_id: params[:article_id] )
end
def destroy_params
params.permit(:article_id, :id)
end
end
<file_sep>Rails.application.routes.draw do
devise_for :users, {path: :register, controllers: { registrations: 'registrations' }}
root 'articles#index'
resources :articles, only: [:index, :show, :create] do
resources :picks, only: [:create, :update, :destroy]
end
resources :likes, only: [:create, :destroy]
resources :users, only: [:show, :edit, :update] do
resources :follows, only: [:index, :create, :destroy]
resources :followers, only: [:index]
end
resources :category, only: [:show]
end
<file_sep>$(document).on('turbolinks:load', function(){
// 「みんなのコメント一覧」表示の有無
if( $("#all_picks").children(".pick_item").length === 0 ){
$("#main__picks--title").hide();
}
// 「みんなのコメント一覧」などのタブの表示
var all_picks_num = $("#all_picks").children(".pick_item").length;
var signed_picks_num = $("#all_picks").children(".pick_item[data-anonymous='0']").length;
var anonymous_picks_num = $("#all_picks").children(".pick_item[data-anonymous='1']").length;
$("#all_pick_tab").text("みんなのコメント一覧 " + all_picks_num + "件");
$("#signed_pick_tab").text("実名Picks " + signed_picks_num + "件");
$("#anonymous_pick_tab").text("匿名Picks " + anonymous_picks_num + "件");
// 「みんなのコメント一覧」などのタブの表示
$(document).on("click", "#all_pick_tab, #signed_pick_tab, #anonymous_pick_tab", function(){
$(".this_tab").removeClass("this_tab");
$(this).addClass("this_tab");
$("#all_picks").children(".pick_item").hide();
if( $(this).attr("id") == "all_pick_tab" ){
$("#all_picks").children(".pick_item").show();
}else if( $(this).attr("id") == "signed_pick_tab" ){
$("#all_picks").children(".pick_item[data-anonymous='0']").show();
}else{
$("#all_picks").children(".pick_item[data-anonymous='1']").show();
}
})
// body が無い pick 数の表示
var pick_num = $(".display-picklist__link").length
$("#display-picklist-btn").text("他 " + pick_num + " 件の Pick を表示");
// body が無い pick リスト表示
$("#display-picklist-btn").on("click", function(){
$(this).hide();
$("#display-picklist").show();
});
// Pickのtext_fieldにfocusした際の処理
anonymous_initial_value = $("#anonymous_text_field").val()
$(document).on("focusin", "#text_field", function(){
$("#text_field").removeClass("text").addClass("focus_text");
$("#right_box").addClass("focus_right");
});
$(document).on("keyup", "#text_field", function(){
if( $("#text_field").val() == '' ){
$(".anonymous_zero").hide();
$("#anonymous_text_field").attr("value", 0)
$(".change_word").text("Pick")
$(".anonymous_off").hide()
$(".anonymous_on").hide()
}else{
$(".anonymous_zero").show();
if( $("#anonymous_text_field").val() == 0 ){
$(".change_word").text("Pick(実名)")
$(".anonymous_off").show()
}else{
$(".change_word").text("Pick(匿名)")
$(".anonymous_on").show()
}
}
})
$(document).on("focusout", "#text_field", function(){
if( $("#text_field").val() == '' ){
$("#text_field").removeClass("focus_text").addClass("text")
$("#right_box").removeClass("focus_right");
}
})
// 匿名ボタンを押した時の処理
$(document).on("click", ".anonymous_off", function(){
$(this).hide()
$(".anonymous_on").show();
$("#anonymous_text_field").attr("value", 1)
$(".change_word").text("Pick(匿名)")
});
$(document).on("click", ".anonymous_on", function(){
$(this).hide()
$(".anonymous_off").show()
$("#anonymous_text_field").attr("value", 0)
$(".change_word").text("Pick(実名)")
});
// 投稿ボタンの処理
// 空のpick.bodyにtextを挿入する時
$(document).on("click", "#create_pick", function(){
if( $("#anonymous_text_field").val() == 0 ){
$("#pick_form").submit();
}else{
$("#anonymous_confirmation_modal").show();
$("#anonymous_confirmation").text("上記事項、確認した上でPick")
$("#anonymous_cancel").on("click", function(){
$("#anonymous_confirmation_modal").hide();
});
$("#anonymous_confirmation").on("click", function(){
$("#pick_form").submit();
});
}
})
// 既にpick.bodyに値が入っており、Pick Updateの処理
$(document).on("click", "#confirmation_button", function(){
if( $("#anonymous_text_field").val() == 0 ){
$("#confirmation").show();
}else if( $("#text_field").val() == '' ){
$("#confirmation").show();
}else{
$("#anonymous_confirmation_modal").show();
$("#anonymous_confirmation").text("上記事項、確認しました")
$("#anonymous_cancel").on("click", function(){
$("#anonymous_confirmation_modal").hide();
});
$("#anonymous_confirmation").on("click", function(){
$("#anonymous_confirmation_modal").hide();
$("#confirmation").show();
});
}
})
$("#update_cancel").on("click", function(){
$("#confirmation").hide();
});
$("#update_confirmation").on("click", function(){
$("#pick_form").submit();
});
});
<file_sep>class FollowsController < ApplicationController
def index
@user = User.find( params[:user_id] )
@picks = Pick.where( user_id: @user ).order( "id DESC" )
@likes = Like.where( pick_user_id: @user ).count
@follow = @user.all_following
@follower = @user.followers
end
def create
@following = User.find(follow_params[:opponent_id])
current_user.follow(@following)
@user = User.find(follow_params[:user_id])
@follow_num = @user.follow_count
@follower_num = @user.followers_count
respond_to do |format|
format.html
format.json
end
end
def destroy
@followed = User.find(follow_params[:opponent_id])
current_user.stop_following(@followed)
@user = User.find(follow_params[:user_id])
@follow_num = @user.follow_count
@follower_num = @user.followers_count
respond_to do |format|
format.html
format.json
end
end
private
def follow_params
params.permit(:opponent_id, :user_id)
end
end
<file_sep>class LikesController < ApplicationController
def create
@pick_user_id = Pick.find( params[:pick_id] ).user_id
@like = Like.where( user_id: current_user.id, pick_id: params[:pick_id], pick_user_id: @pick_user_id ).first_or_initialize
if @like.save
@pick = Pick.find( params[:pick_id] )
render json: @pick, only: [:likes_count]
end
end
def destroy
@like = current_user.likes.find_by( pick_id: params[:pick_id] )
if @like.destroy
@pick = Pick.find( params[:pick_id] )
render json: @pick, only: [:likes_count]
end
end
private
def like_params
params.permit(:pick_id)
end
end
<file_sep>class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
before_action :configure_permitted_parameters, if: :devise_controller?
after_filter :store_location
def configure_permitted_parameters
devise_parameter_sanitizer.permit(:account_update, keys: [:first_name, :last_name, :company, :image, :position])
end
def after_sign_in_path_for(resource)
if user_signed_in? && current_user.first_name?
session[:previous_url] || root_path
else
edit_user_registration_path
end
end
def after_sign_out_path_for(resource)
root_path
end
def after_update_path_for(resource)
session[:previous_url] || root_path
end
private
def store_location
if (request.fullpath != "/register/sign_in" && \
request.fullpath != "/register/sign_up" && \
request.fullpath != "/register/edit" && \
request.fullpath != "/register/password" && \
!request.xhr?)
session[:previous_url] = request.fullpath
end
end
end
<file_sep># json.userId current_user.id
# json.userFirst current_user.first_name
# json.userLast current_user.last_name
# json.userImage current_user.image
# json.userCompany current_user.company
# json.userPosition current_user.position
# json.body @pick.body
# json.yourselflike @like
# json.articleLike @pick.likes_count
# json.id @pick.id
<file_sep>$(document).on('turbolinks:load', function(){
if( parseInt($("#profile").css("height")) > 37 ){
$(".information__profile").css({
'display': '-webkit-box',
'-webkit-box-orient': 'vertical',
'-webkit-line-clamp': '2',
'overflow': 'hidden'
})
$("#read_more").show();
}
$(document).on('click', '#read_more', function(){
$("#profile").css("display", "block");
$(this).hide();
})
});
<file_sep>$(document).on('turbolinks:load', function(){
// フォローをする処理
$(document).on("click", ".follow_button__follow", function(e){
// 必要な要素
e.preventDefault();
var opponent_id = $(this).data("id");
var page_user_id = $(this).data("here");
var this_user = $(e.currentTarget);
// フォローの非同期通信
$.ajax({
url: '/users/' + page_user_id + '/follows',
type: 'POST',
data: {
opponent_id: opponent_id,
},
dataType: 'json'
}).done(function(data){
this_user.removeClass("follow_button__follow").addClass("follow_button__followed");
this_user.find(".word").text("フォロー中");
$("#follow_num").text(data.follow_num);
$("#follower_num").text(data.follower_num);
}).fail(function(){
});
});
// フォローを外す処理
$(document).on("click", ".follow_button__followed", function(e){
// 必要な要素
e.preventDefault();
var opponent_id = $(this).data("id");
var page_user_id = $(this).data("here");
var this_user = $(e.currentTarget);
// フォローを外す非同期通信
$.ajax({
url: '/users/' + page_user_id + '/follows/' + opponent_id,
type: 'DELETE',
data: {
opponent_id: opponent_id,
},
dataType: 'json'
}).done(function(data){
this_user.removeClass("follow_button__followed").addClass("follow_button__follow");
this_user.find(".word").text("フォロー");
$("#follow_num").text(data.follow_num);
$("#follower_num").text(data.follower_num);
}).fail(function(){
});
});
});
<file_sep>json.array! @side_picks do |pick|
json.id pick.id
json.body pick.body
json.userId pick.user.id
json.userFirst pick.user.first_name
json.userLast pick.user.last_name
json.userImage pick.user.image
json.articleId pick.article.id
json.articleTitle pick.article.title
json.anonymous pick.anonymous
end
<file_sep>class CreateLikes < ActiveRecord::Migration[5.0]
def change
create_table :likes do |t|
t.references :user, foreign_key: true
t.references :pick, foreign_key: true
t.integer :pick_user_id, null: false
t.timestamps
end
add_index :likes, [:user_id, :pick_id], unique: true
end
end
<file_sep>class FollowersController < ApplicationController
def index
@user = User.find( params[:user_id] )
@picks = Pick.where( user_id: @user ).order( "id DESC" )
@likes = Like.where( pick_user_id: @user ).count
@follow = @user.all_following
@follower = @user.followers
end
end
<file_sep>$(document).on('turbolinks:load', function(){
// サインインボタンのjs
$("#sign_in, #sign_in_user_show").on("click", function(){
$("#popup_signin").show();
});
$("#popup_signin_sign").on("click", function(e){
e.stopPropagation();
});
$("#popup_signin, #close__in").on("click", function(){
$("#popup_signin").hide();
});
// サインアップボタンのjs
$("#sign_up, #sign_up_side_logo, #sign_up_side_button, #sign_up_side_follow_button, #sign_up_user_show, #fav_sign_up, .fa_sign_up").on("click", function(){
$("#popup_signup").show();
});
$("#sign_up_show_text, #sign_up_show_submit").on("click", function(e){
$("#popup_signup").show();
return false;
});
$("#popup_signup_sign").on("click", function(e){
e.stopPropagation();
});
$("#popup_signup, #close__up").on("click", function(){
$("#popup_signup").hide();
});
// サインアップ・サインイン切り替えボタンのjs
$("#change_up").on("click", function(){
$("#popup_signin").hide();
$("#popup_signup").show();
});
$("#change_in").on("click", function(){
$("#popup_signup").hide();
$("#popup_signin").show();
});
// contribution modal の処理
$(document).on("keyup", "#contribution_text_field", function(){
if( $("#contribution_text_field").val() == '' ){
$(".modal_anonymous_zero").hide();
$("#modal_anonymous_text_field").attr("value", 0)
$("#modal_change_word").text("Pick")
$("#anonymous_modal_off").hide()
$("#anonymous_modal_on").hide()
}else{
$(".modal_anonymous_zero").show();
if( $("#modal_anonymous_text_field").val() == 0 ){
$("#modal_change_word").text("Pick(実名)")
$("#anonymous_modal_off").show()
}else{
$("#modal_change_word").text("Pick(匿名)")
$("#anonymous_modal_on").show()
}
}
})
// 匿名ボタンを押した時の処理
$("#anonymous_modal_off").on("click", function(){
$(this).hide()
$("#anonymous_modal_on").show();
$("#modal_anonymous_text_field").attr("value", 1)
$("#modal_change_word").text("Pick(匿名)")
});
$("#anonymous_modal_on").on("click", function(){
$(this).hide()
$("#anonymous_modal_off").show()
$("#modal_anonymous_text_field").attr("value", 0)
$("#modal_change_word").text("Pick(実名)")
});
// 投稿ボタンの処理
$("#modal_change_word").on("click", function(){
if( $("#modal_anonymous_text_field").val() == 0 ){
$("#contribution_form").submit();
}else{
$("#anonymous_confirmation_modal").show();
$("#anonymous_confirmation").text("上記事項、確認した上でPick")
$("#anonymous_cancel").on("click", function(){
$("#anonymous_confirmation_modal").hide();
});
$("#anonymous_confirmation").on("click", function(){
$("#contribution_form").submit();
});
}
})
});
<file_sep>class AddLikeToPicks < ActiveRecord::Migration[5.0]
def change
add_column :picks, :likes_count, :integer, null: false
end
end
<file_sep>$(document).on('turbolinks:load', function(){
if( location.href.match(/^http:\/\/192.168.3.11:3000\/articles\/[0-9]+/) != null || location.href.match(/^http:\/\/localhost:3000\/articles\/[0-9]+/) != null ){
// お気に入り登録する時
$(document).on("click", ".fav_off", function(e){
// 必要な要素
var pick_id = $(this).data("id");
var this_pick = $(e.currentTarget);
// 非同期通信
$.ajax({
url: '/likes',
type: 'POST',
data: {
pick_id: pick_id
},
dataType: 'json'
}).done(function(data){
this_pick.removeClass("fav_off").addClass("fav_on");
this_pick.find(".num").text( data.likes_count );
}).fail(function(){
});
});
// お気に入り解除する時
$(document).on("click", ".fav_on", function(e){
// 必要な要素
var pick_id = $(this).data("id");
var this_pick = $(e.currentTarget);
// 非同期通信
$.ajax({
url: '/likes/' + pick_id,
type: 'DELETE',
data: {
pick_id: pick_id
},
dataType: 'json'
}).done(function(data){
this_pick.removeClass("fav_on").addClass("fav_off");
this_pick.find(".num").text( data.likes_count );
}).fail(function(){
});
});
}
})
<file_sep>json.follow_num @follow_num
json.follower_num @follower_num
<file_sep>require 'mechanize'
class Scraping
def self.category_link
category_links = []
agent = Mechanize.new
page = agent.get("https://newspicks.com/")
elements = page.search(".theme_menu .menu-item")
len = elements.length
num = 0
while num < len-1
category_links << "https://newspicks.com" + elements[num][:href]
num += 1
end
genre = 2
category_links.each do |link|
technology_link(link, genre)
genre += 1
end
end
def self.technology_link(link, genre)
article_links = []
agent = Mechanize.new
page = agent.get(link)
elements = page.search(".news-card a:first-of-type")
elements.each do |ele|
if ele.get_attribute('href').match(/^\/n/)
article_links << "https://newspicks.com" + ele.get_attribute('href')
end
end
article_links.each do |link|
get_details(link, genre)
end
end
def self.get_details(link, genre)
agent = Mechanize.new
page = agent.get(link)
element_image = page.at(".news-image")[:style].gsub(/background-image:url\(/, "").gsub(/\)/, "") if page.at(".news-image")
element_title = page.at("h1.title").inner_text if page.at("h1.title")
element_url = page.at(".read-more")[:href] if page.at(".read-more")
element_source = page.at(".meta .meta-block:first-child").inner_text if page.at(".meta .meta-block:first-child")
article = Article.where(title: element_title).first_or_initialize
article.genre = genre
article.image = element_image
article.url = element_url
article.source = element_source
article.save
end
end
<file_sep>class ArticlesController < ApplicationController
before_action :sidebar, only: [:index, :show]
def index
hot_ids = Pick.where.not(genre: 1).group(:article_id).order('count_article_id DESC').limit(7).count(:article_id).keys
@hot = hot_ids.collect{|key| Pick.includes(:user).includes(:article).where(article_id: key).order("body ASC").last }
entertainment_ids = Pick.where(genre: 1).group(:article_id).order('count_article_id DESC').limit(3).count(:article_id).keys
@entertainment = entertainment_ids.collect{|key| Pick.includes(:user).includes(:article).where(article_id: key).order("body ASC").last }
technology_ids = Pick.where(genre: 2).group(:article_id).order('count_article_id DESC').limit(3).count(:article_id).keys
@technology = technology_ids.collect{|key| Pick.includes(:user).includes(:article).where(article_id: key).order("body ASC").last }
business_ids = Pick.where(genre: 3).group(:article_id).order('count_article_id DESC').limit(3).count(:article_id).keys
@business = business_ids.collect{|key| Pick.includes(:user).includes(:article).where(article_id: key).order("body ASC").last }
economic_ids = Pick.where(genre: 4).group(:article_id).order('count_article_id DESC').limit(3).count(:article_id).keys
@economic = economic_ids.collect{|key| Pick.includes(:user).includes(:article).where(article_id: key).order("body ASC").last }
market_ids = Pick.where(genre: 5).group(:article_id).order('count_article_id DESC').limit(3).count(:article_id).keys
@market = market_ids.collect{|key| Pick.includes(:user).includes(:article).where(article_id: key).order("body ASC").last }
education_ids = Pick.where(genre: 6).group(:article_id).order('count_article_id DESC').limit(3).count(:article_id).keys
@education = education_ids.collect{|key| Pick.includes(:user).includes(:article).where(article_id: key).order("body ASC").last }
sports_ids = Pick.where(genre: 7).group(:article_id).order('count_article_id DESC').limit(3).count(:article_id).keys
@sports = sports_ids.collect{|key| Pick.includes(:user).includes(:article).where(article_id: key).order("body ASC").last }
innovation_ids = Pick.where(genre: 8).group(:article_id).order('count_article_id DESC').limit(3).count(:article_id).keys
@innovation = innovation_ids.collect{|key| Pick.includes(:user).includes(:article).where(article_id: key).order("body ASC").last }
@articles = Article.order("id DESC").limit(8)
end
def show
if user_signed_in?
@pick = Pick.where(article_id: params[:id]).where(user_id: current_user.id).first_or_initialize
else
@pick = Pick.new
end
@article = Article.find(params[:id])
@related = Article.where(genre: @article[:genre]).where.not(id: params[:id]).order("id DESC").limit(7)
end
def create
article = Article.where( title: contribution_params[:title] ).first_or_initialize
if article.id?
pick = Pick.where( user_id: current_user.id, article_id: article.id ).first_or_initialize
pick.body = contribution_params[:body]
pick.genre = contribution_params[:genre]
pick.anonymous = contribution_params[:anonymous]
pick.likes_count = 0
pick.save
if contribution_params[:anonymous] == '1'
current_user.anonymous -= 1
current_user.save
end
else
article.title = contribution_params[:title]
article.url = contribution_params[:url]
article.image = contribution_params[:image]
article.source = contribution_params[:source]
article.genre = contribution_params[:genre]
if article.save
contribution_article = Article.where(title: contribution_params[:title])
pick = Pick.new( body: contribution_params[:body], genre: contribution_params[:genre], user_id: current_user.id, article_id: contribution_article[0].id, likes_count: 0, anonymous: contribution_params[:anonymous] )
pick.save
if contribution_params[:anonymous] == '1'
current_user.anonymous -= 1
current_user.save
end
end
end
redirect_to contribution_params[:here]
end
private
def sidebar
@side_picks = Pick.includes(:user).includes(:article).order("id DESC").limit(10)
respond_to do |format|
format.html
format.json
end
end
def contribution_params
params.permit(:url, :image, :title, :source, :genre, :body, :anonymous, :here )
end
end
<file_sep># DB設計
## Users table
|Column |Type |Option |
|----------|-------|-----------------------|
|id |integer|null: false |
|first_name|string | |
|last_name |string | |
|mail |string |null: false |
|password |string |null: false |
|company |string | |
|position |string | |
|image |string | |
|profile |string | |
|anonymous |integer|null: false, default: 5|
### Association
* has_many :likes
* has_many :picks
* acts_as_followable
* acts_as_follower
### Add index
* :email, unique: true
* :reset_password_token, unique: true
## Picks table
|Column |Type |Option |
|-----------|-------|------------------------------|
|id |integer|null: false |
|body |text | |
|user_id |integer|null: false, fareign_key: true|
|article_id |integer|null: false, fareign_key: true|
|created_at |string |null: false |
|genre |integer|null: false |
|likes_count|integer|null: false |
|anonymous |integer|null: false |
### Association
* belongs_to :user
* belongs_to :article
* has_many :likes, dependent: :destroy
### Add index
* [:user_id, :article_id], unique: true
### Varidation
* :user_id, presence: true
* :article_id, presence: true
* :genre, presence: true
## Likes table
|Column |Type |Option |
|------------|-------|------------------------------|
|id |integer|null: false |
|user_id |integer|null: false, fareign_key: true|
|pick_id |integer|null: false, fareign_key: true|
|pick_user_id|integer|null: false, fareign_key: true|
### Association
* belongs_to :pick, counter_cache: :likes_count
* belongs_to :user
### Add index
* [:user_id, :pick_id], unique: true
### Varidation
* :user_id, uniqueness: { scope: [:pick_id] }
## Articles table
|Column |Type |Option |
|---------|-------|-----------|
|id |integer|null: false|
|title |stirng |null: false|
|image |string | |
|url |string |null: false|
|genre |integer|null: false|
|source |string |null: false|
### Association
* has_many :picks
### Varidation
* :title, presence: true
* :url, presence: true
* :genre, presence: true
## Follows table (gem 'acts_as_follower')
|Column |Type |Option |
|---------------|-------|------------------------------|
|id |integer|null: false |
|followable_type|string |null: false, fareign_key: true|
|followable_id |integer|null: false |
|follower_type |string |null: false, fareign_key: true|
|follower_id |integer|null: false |
|blocked |TINYINT|null: false |
### Association
* belongs_to :followable, polymorphic: true
* belongs_to :follower, polymorphic: true
### Add index
* ["follower_id", "follower_type"], name: "fk_follows"
* ["followable_id", "followable_type"], name: "fk_followables"
* [:followable_id, :follower_id], unique: true
<file_sep>class Like < ApplicationRecord
belongs_to :pick, counter_cache: :likes_count
belongs_to :user
validates :user_id, uniqueness: { scope: [:pick_id] }
end
<file_sep>class AddIndexToFollows < ActiveRecord::Migration[5.0]
def change
add_index :follows, [:followable_id, :follower_id], unique: true
end
end
| 2cd107c3a9f4db99e61d0f0e7673d603927374a9 | [
"JavaScript",
"Ruby",
"Markdown"
] | 27 | Ruby | nao37/Naos-Picks | eb5bc3c6ba9e8e8606d2890ffb79ccf737946a4a | a28721f97ba2dc94141da988b2391228c03cc04f |
refs/heads/master | <file_sep>import Vue from 'vue'
import Vuetify from 'vuetify'
import { BootstrapVue } from 'bootstrap-vue'
Vue.use(Vuetify)
Vue.use(BootstrapVue)
| 5eb2cd4aa678ccb8a2aa33052bdfa5aa36acfca7 | [
"JavaScript"
] | 1 | JavaScript | Moyib/NuxtJS | 270320b7424989b6496d4f5ac0789c8090989760 | f6a1934105f36066a03f44893015584379ce4506 |
refs/heads/master | <file_sep>scipy
numpy
tensorflow==1.5
flask
gunicorn
flask_cors
gensim<file_sep># Random sentence generator

## How to install locally?
The *client* and the *Flask API* are derived from: https://github.com/guillaumegenthial/api_ner
The following requirements are needed:
- numpy
- tensorflow==1.5
- flask
- gunicorn
- gensim
To run the service:
- run *app.py*.
- open in your favorite browser the client stored in *client/client.html*.
- enjoy the service...
## How to deploy ?
- Heroku (with *requirements.txt*, *Procfile*, *runtime.txt*)
- ...
## Examples
- Input: *"i like"*
- Output: *"i like you , and i want you to know what i 've done ."*
<file_sep>import preprocess_helper
import tensorflow as tf
import numpy as np
import gensim
def get_model_api():
"""Returns lambda function for api"""
# 1. initialize model once and for all
print("- Load vocabulary list")
vocabulary_size = 20000
embedding_dimension = 100
use_word2vec_emb = True
vocab, generated_embeddings = preprocess_helper.load_frequent_words_and_embeddings("data/vocab_with_emb.txt")
print("- Load checkpoint")
checkpoint_file = tf.train.latest_checkpoint("model/pretrained_w2v_hidden_layer_1024/checkpoints/")
graph = tf.Graph()
with graph.as_default():
session_conf = tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=False)
sess = tf.Session(config=session_conf)
with sess.as_default():
# Load the saved meta graph and restore variables
print("- Restore the model")
saver = tf.train.import_meta_graph("{}.meta".format(checkpoint_file))
saver.restore(sess, checkpoint_file)
# Get the placeholders from the graph by name
inputs = graph.get_operation_by_name("inputs").outputs[0]
vocab_embedding = graph.get_operation_by_name("vocab_embedding").outputs[0]
discard_last_prediction = graph.get_operation_by_name("discard_last_prediction").outputs[0]
# Tensors we want to evaluate
probabilities = graph.get_operation_by_name("softmax_layer/Reshape_2").outputs[0]
print("- Model successfully restored")
# Construct the embedding matrix
vocab_emb = np.zeros(shape=(20000, 100))
w2v_model = gensim.models.KeyedVectors.load_word2vec_format("data/wordembeddings-dim100.word2vec", binary=False)
for tok, idx in vocab.items():
if tok in w2v_model.vocab:
vocab_emb[idx] = w2v_model[tok]
else:
vocab_emb[idx] = generated_embeddings[tok]
print("- Embedding done")
def model_api(input_data):
"""
Args:
input_data: submitted to the API, raw string
Returns:
output_data: after some transformation, to be
returned to the API
"""
# 2. process input
input_data = input_data.lower()
input_data_token = preprocess_helper.replace_unknown_words([input_data], vocab)
input_data_token, _ = preprocess_helper.add_tokens_to_sentences(input_data_token,
vocab,
30,
eos_token=False,
pad_sentence=False)
continuation_length = 30
for cont in range(continuation_length):
all_probabilities = sess.run([probabilities], {inputs: input_data_token,
vocab_embedding: vocab_emb,
discard_last_prediction: False})
all_probabilities = np.squeeze(all_probabilities)
all_probabilities = all_probabilities[-1, :]
# artificially set to zero the proba of the token <unk>
all_probabilities[19996] = 0
# sort and take the value of Nth largest one...
n = 20000 - 10
sorted_proba = np.sort(all_probabilities)
thresh = sorted_proba[n]
all_probabilities[np.abs(all_probabilities) < thresh] = 0
sum_all_probabilities = np.sum(all_probabilities)
all_probabilities = all_probabilities / sum_all_probabilities
predicted_word = np.random.choice(20000, 1, p=all_probabilities)
input_data_token = np.concatenate((input_data_token, [predicted_word]), axis=1)
sentence = ''
for i in range(len(input_data_token[0])):
word = (list(vocab.keys())[list(vocab.values()).index(input_data_token[0][i])])
sentence += word
sentence += ' '
# remove all the brackets sign
sentence = sentence.replace("<bos>", "")
sentence = sentence.replace("<eos>", "")
sentence = sentence.replace("<pad>", "")
sentence = sentence.replace("<", "")
sentence = sentence.replace(">", "")
print(sentence)
# 5. return the output for the api
return sentence
return model_api
| 632dfba241f88c2de9739e01873f8939616a9de8 | [
"Markdown",
"Python",
"Text"
] | 3 | Text | guillaumejaume/sentence-generator-as-a-service | 18a7f4635437c85a8cd11f6c71ca4b6251fe916f | 17e32b52245bb1353cb43ae2aca81d49ff40dcd5 |
refs/heads/master | <file_sep><?php
$sLangName = "English";
$aLang = array(
'charset' => 'UTF-8',
'werde_countryspecificvat_countrytext' => 'Country Specific VAT ',
);<file_sep><?php
/**
* Metadata version
*/
$sMetadataVersion = '1.1';
/**
* Module information
*/
$aModule = array(
'id' => 'werde_countryspecificvat',
'title' => 'Country Specific VAT',
'description' => array(
'en' => 'Set different VAT Taxes for specific countries',
),
'thumbnail' => 'logo.png',
'version' => '1.0',
'author' => '<NAME>',
'url' => 'http://www.marjankolev.com',
'email' => '<EMAIL>',
// Extend class that will setting up entered VAT to any country
'extend' => array(
'oxvatselector' => 'werde/countryspecificvat/Application/models/modvatselector'
),
// Block in Admin->Countries, adding field for entering VAT for each country
'blocks' => array(
array(
'template' => 'country_main.tpl',
'block' => 'admin_country_main_form',
'file' => '/views/blocks/admin_country_main_form.tpl',
),
),
// Alter table oxcountry on activate (Adding field for Country Specific VAT)
'files' => array(
'countryspecificvat_events' => 'werde/countryspecificvat/admin/countryspecificvat_events.php',
),
// Alter table oxcountry on activate (Adding field for Country Specific VAT)
'events' => array(
'onActivate' => 'countryspecificvat_events::onActivate',
),
);<file_sep><?php
use OxidEsales\Eshop\Core\Registry;
use OxidEsales\Eshop\Core\DatabaseProvider;
use OxidEsales\Eshop\Core\DbMetaDataHandler;
use OxidEsales\Eshop\Core\Module\Module;
class countryspecificvat_events extends oxUbase{
public static function onActivate(){
// Call function that is adding column when module is activated
self::addColumn();
}
protected static function addColumn(){
$dbMetaDataHandler = oxNew(DbMetaDataHandler::class);
$oxDb = DatabaseProvider::getDb();
// Check if the field is not exist, only then alter the table
if (!$dbMetaDataHandler->fieldExists('werde_country_specific_vat', 'oxcountry')) {
$sql = "ALTER TABLE oxcountry ADD werde_country_specific_vat INT NOT NULL DEFAULT 0";
$oxDb->execute($sql);
}
}
}<file_sep><?php
class modVatSelector extends modVatSelector_parent
{
/**
* get Country specific VAT for user
*
* @param oxUser $oUser given user object
* @param bool $blCacheReset reset cache
*
* @throws oxObjectException if wrong country
* @return double | false
*/
public function getUserVat( oxUser $oUser, $blCacheReset = false )
{
// Check if for the country VAT is defined.
if ( ( $dVat = parent::getUserVat( $oUser, $blCacheReset ) ) === false ) {
$myConfig = oxRegistry::getConfig();
$countryId = $oUser->getActiveCountry();
$sQLQuery = "SELECT werde_country_specific_vat FROM oxcountry WHERE oxid = '{$countryId}'";
$countrySpecificVAT = oxDb::getDb()->getOne($sQLQuery);
$countrySpecificVAT = (int)$countrySpecificVAT;
if($countrySpecificVAT){
$dVat = $countrySpecificVAT;
}
}
return $dVat;
}
} | 5d1a81c90f0d4ac19c961a16a6baad3d2f63ffca | [
"PHP"
] | 4 | PHP | marjankolev/werde_countryspecificvat | 4b7203a467eebacace27550b299986d455f872cf | c4ed61ccb8f2d4c8c71eff6295cab884f31cb2d4 |
refs/heads/master | <file_sep>//import liabrary
import React,{Component} from 'react';
import ReactDOM from 'react-dom';
//create component
class Game extends Component {
render(){
return(
<div>
Hello from Game Component..
</div>
);
}
}
//export component
expor default Game;
<file_sep>//this code run automatically at client side
import React,{Component} from 'react';
import ReactDOM from 'react-dom';
import Game from './components/Game';
class App extends Component{
render(){
return(
//<Game />
<div>
Hello form main.js
</div>
);
}
};
Meteor.startup(()=>{
ReactDOM.render(<App/>,document.querySelector('.container'));
});
<file_sep># DemoProject
get hand on github functionality
| c6e65e94dd16f0b75b8c50df18a3fec41f00c340 | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | vitthalbhakad/DemoProject | 6294b45fd28d52945e39e4ae7e1359a85819eec5 | 98c7ed8c67c2108520aa834e9c0a7818c168fe3c |
refs/heads/master | <repo_name>RubenMorenoLeyva/eventGram<file_sep>/README.md
# eventGram
eventGram es un proyecto realizado por el equipo Talentum Malaga para el Concurso Equinox 2015 de 11paths.
Se trata de un bot de Telegram que permite realizar grupos entre personas que comparten el mismo ocio en su localidad.
Para su utilización, buscamos eventGram en el buscador de contactos de telegram y lo agregramos como contacto.
Una vez lo tengamos agregado, introducimos el comando /start o /help, y se desplegarán las funcionalidades del bot.
<file_sep>/event_bot.py
__author__ = 'Malaga-talentum'
import telebot
import sqlite3
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import csv
#path watchdog (csv file)
path=''
#put your token api bot
bot = telebot.TeleBot(token="")
list_tags = ['concert','theatre','football','basketball','science','exposition','conference','fair','circus']
class MyHandler(FileSystemEventHandler):
def on_modified(self, event):
#path watchdog (csv file)
if event.src_path == '':
list_news = send_event()
conn = sqlite3.connect("eventgram.db")
cursor = conn.cursor()
changes = cursor.execute("SELECT user_id FROM tags WHERE name=? and location=?",(list_news[0],list_news[1]))
for id in changes:
message = "There is a new event: "+ list_news[2]+ ".The event match your tag: " + list_news[0]+", "+list_news[1] +"."
bot.send_message(int(id[0]), message)
conn.close()
event_handler = MyHandler()
observer = Observer()
observer.schedule(event_handler, path=path, recursive=False)
observer.start()
def send_event():
#path where is csv file that contains events
with open("") as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
last_line = (row['tag'], row['location'],row['title'])
csvfile.close()
return last_line
@bot.message_handler(commands=['start', 'help'])
def send_welcome(message):
mess = "Welcome to EventGram!\nThe commands for user are:\n/subscribe tag location\n/unsubscribe tag location\nThe command for group is:\n/addfriends tag location link"
bot.reply_to(message, mess)
@bot.message_handler(commands=['subscribe'])
def subscribe_tag(message):
chat_id = message.chat.id
if chat_id > 0:
list = message.text.split()
if len(list) == 3:
tag = list[1]
location = list[2]
if list_tags.__contains__(tag):
conn = sqlite3.connect("eventgram.db")
c = conn.cursor()
c.execute("INSERT INTO tags (user_id, name, location) values (?,?,?)", (chat_id, tag,location))
conn.commit()
bot.reply_to(message,"Tag added")
conn.close()
else:
bot.reply_to(message, "Introduce a valid tag. Here are the valid tags: %s"%list_tags)
else:
bot.reply_to(message, "Invalid command. Example: /subscribe theatre madrid")
else:
bot.reply_to(message, "Groups cannot subscribe to events")
@bot.message_handler(commands=['addfriends'])
def addfriends_group(message):
chat_id = message.chat.id
if chat_id < 0:
list = message.text.split()
if len(list) == 4:
tag = list[1]
location = list[2]
link = list[3]
if list_tags.__contains__(tag):
conn = sqlite3.connect("eventgram.db")
cursor = conn.cursor()
result = cursor.execute("SELECT user_id FROM tags WHERE name=? and location=?",(tag,location))
for r in result:
mess = "You have been invited to the group " + message.chat.title + ". The tags are " + tag + ", " + location + ". Enjoy your new group!\n" + link
bot.send_message(int(r[0]),mess)
conn.close()
else:
bot.reply_to(message, "Introduce tag correcto: %s"%list_tags)
else:
bot.reply_to(message, "Invalid command. Example: /addfriends theatre madrid link")
else:
bot.reply_to(message, "Users cannot add friends. Create a group to add other people.")
@bot.message_handler(commands=['unsubscribe'])
def unsubscribe_tag(message):
chat_id = message.chat.id
if chat_id > 0:
list = message.text.split()
if len(list) == 3:
tag = list[1]
location = list[2]
if list_tags.__contains__(tag):
conn = sqlite3.connect("eventgram.db")
c = conn.cursor()
c.execute("DELETE FROM tags where user_id=? and name=? and location=?", (chat_id, tag, location,))
conn.commit()
bot.reply_to(message,"Tag removed")
conn.close()
else:
bot.reply_to(message, "Introduce a valid tag. Here are the valid tags: %s"%list_tags)
else:
bot.reply_to(message, "Invalid command. Example: /unsubscribe theatre madrid")
else:
bot.reply_to(message, "Groups cannot unsubscribe to events")
bot.polling()
| cd806293b49d33415cc7f3522663d38dba934b24 | [
"Markdown",
"Python"
] | 2 | Markdown | RubenMorenoLeyva/eventGram | e47664d6445767a3d6e272a48b02ffef4cc3e1b5 | 39e5ed2b90210a66ba1e180d581fe23755f8fd0b |
refs/heads/main | <repo_name>Swadreams/AngularJS-Weather-App<file_sep>/README.md
#Exercise on AngularJS Directives - (31/01/2019)
Live Website: https://swadreams.github.io/AngularJS-Weather-App
Getting Started
---------------
* Start the application by using index.html file.
----------------------------------------------------
* Folder Structure :
* directives :
* Contains reusable directives
* script :
* Contains js files of main and sub modules
* css :
* Contains css files

<file_sep>/directives/sunrise-sunset.js
'use strict';
const baseUrl = "http://api.openweathermap.org/data/2.5/weather?q=";
angular.module('sunrise-sunset',["clickDirectiveModule"])
.directive("sunriseSunset",["$http", function($http){
return {
restrict : "E",
template : `<div click-dir="testFunction(city)">
<div ng-bind='city' class='ind-city'> </div>
<table class="weather-table">
<thead> <th> Sunrise </th> <th> Sunset </th></thead>
<tbody> <tr><td> {{sunrise}} </td> <td> {{sunset}} </td> </tr>
</tbody></table>
<div>`,
scope : {
city : '@'
},
link : function(scope, element, attrs){
let url = baseUrl + scope.city + ",india&appid=3d8b309701a13f65b660fa2c64cdc517";
$http.get(url)
.then(function(response){
var data = response.data.sys;
scope.sunrise = new Date(data.sunrise * 1000).toLocaleTimeString()
scope.sunset = new Date(data.sunset * 1000).toLocaleTimeString();
}, function(error){
console.log("Error while calling API. Error : " + error);
});
scope.testFunction = function(city) {
alert(`You clicked on city ${city}. This function is called by custom click directive.`);
}
}
}
}]); <file_sep>/script/app.js
'use strict';
(function(){
angular.module("weatherApp",["ngRoute", "sunrise-sunset"])
.config(['$routeProvider', function($routeProvider){
$routeProvider
.when('/cities', {
template : `<section class="cities-weather"> <div class="item" ng-repeat="city in $ctrl.cities">
<sunrise-sunset city="{{city}}"> Test </sunrise-sunset> </div>
</section>`,
controller: 'weatherController as $ctrl'
});
$routeProvider.otherwise({redirectTo: '/cities'});
}])
.controller('weatherController', function(){
this.message = "Indian Cities Weather";
this.cities = [
"Pune",
"Mumbai",
"Delhi",
"Ahmedabad",
"Bengaluru"
];
})
})();<file_sep>/directives/click-dir.js
angular.module('clickDirectiveModule', [])
.directive('clickDir', btnClick);
function btnClick() {
return {
restrict: 'A',
scope: {
clickDir: '&'
},
link: function(scope, element, attrs){
element.on('click', function(e) {
scope.clickDir();
});
}
};
} | 171367d4ec36d662726c6c997d74e59f3b5a83fb | [
"Markdown",
"JavaScript"
] | 4 | Markdown | Swadreams/AngularJS-Weather-App | 5a78c2baa8537bda396bd45b5a0cf7a4bb5e0b87 | 2a7c8e87860b037c33a55cb7b0a9e983b1e3958a |
refs/heads/master | <file_sep>import React, { Component } from 'react';
import styled from 'styled-components';
const Button = styled.button`
color: ${(props) => props.color};
background-color: green;
border: 1px solid ${(props) => props.borderColor};
display: inline-block;
font-weight: normal;
text-align: center;
white-space: nowrap;
vertical-align: middle;
cursor: pointer;
user-select: none;
padding: 0.75rem 1.75rem;
font-size: 1rem;
border-radius: 0.3rem;
&:hover {
background-color: #21906d;
border-color: #21906d;
}
`;
export default class extends Component {
render() {
return <Button
onClick={this.props.onClick}
color={this.props.color || 'red'}
borderColor={this.props.outline || 'black'}>
{this.props.children}
</Button>
}
} | 0096d3fe660616f5425b2677c4f5c94c9e82dab9 | [
"JavaScript"
] | 1 | JavaScript | taonga/react-workshop | 395794abf157652e088daa4068aa445fd1efdd1b | 570adc96ca30b9336600464c844cd1a01fb2cedd |
refs/heads/master | <file_sep># exp-er-server
## Install
Exp ER Server requires node v6.0
$ git clone https://github.com/mbrooks/exp-er-server
$ cd exp-er-server
$ npm install
$ npm start
<file_sep>
const privateQuestions = [
{
question: 'Are you in need of immediate rescue? yes or no',
inputType: 'boolean',
},
{
question: 'How many people are with you? enter a valid number',
inputType: 'number',
},
{
question: 'Does you or someone in your group need immediate medical attention? yes or no',
inputType: 'boolean',
},
{
question: 'Please message any additional information',
inputType: 'string',
}
];
const QuestionStorage = {
getAll() {
return privateQuestions;
},
};
module.exports = QuestionStorage;
<file_sep>const http = require('http');
const config = require('config');
const log = require('./lib/log');
const app = require('./lib/server');
const chat = require('./lib/chat');
const server = http.createServer(app);
chat.start(server);
server.listen(config.server.port, () => {
log.info(`App listening on port ${config.server.port}!`);
});
<file_sep>
global.privateAnswers = {};
const AnswerStorage = {
getAll() {
return global.privateAnswers;
},
get(room) {
return global.privateAnswers[room] || [];
},
getByQuestionNumber(room, questionNumber) {
return global.privateAnswers[room][questionNumber] || '';
},
add(room, data) {
if (!global.privateAnswers[room]) {
global.privateAnswers[room] = [];
}
global.privateAnswers[room].push(data);
},
};
module.exports = AnswerStorage;
<file_sep>global.privateMessages = {};
const MessageStorage = {
getAll() {
return global.privateMessages;
},
get(room) {
return global.privateMessages[room] || [];
},
add(room, data) {
if (!global.privateMessages[room]) {
global.privateMessages[room] = [];
}
global.privateMessages[room].push(data);
},
};
module.exports = MessageStorage;
<file_sep>const home = require('./controllers/home');
const chatLogs = require('./controllers/chatLogs');
const log = require('../lib/log');
module.exports = (app) => {
app.get('/', home.index);
app.get('/chatLogs', chatLogs.index);
// handle errors
app.use((err, req, res, next) => {
// if the error has a status, return the error with that status
if (err.status) {
return res.status(err.status).json({ message: err.message });
}
const validationError = /^ValidationError:.*/;
if (validationError.test(err.message)) {
return res.status(400).json({ message: err.message });
}
if (err.message) {
log.error(err.message);
}
if (err.stack) {
log.error(err.stack);
}
// error page
return res.status(500).json({ message: 'Internal Server Error' });
});
// assume 404 since no middleware responded
app.use((req, res) => {
res.status(404).json({ message: '404 Not found' });
});
};
<file_sep>const questionStorage = require('../state/questionStorage');
const answerStorage = require('../state/answerStorage');
const messageStorage = require('../state/MessageStorage');
const masterQuestionList = questionStorage.getAll();
const _ = require('lodash');
function calculatePriority(answers) {
let result = 'code 1';
console.log(answers);
if (answers[0] && answers[0].toLowerCase() === 'yes') {
result = 'code 3';
}
if (answers[1] && !isNaN(answers[1])) {
if (answers[1] > 30) {
result = 'code 3';
}
if (answers[1] > 5) {
result = 'code 2';
}
}
if (answers[2] && answers[2].toLowerCase() === 'yes') {
result = 'code 3';
}
return result;
}
exports.index = (req, res, next) => {
const messages = messageStorage.getAll();
const responses = [];
let caseNumber = 1000;
Object.keys(messages).forEach((key) => {
const questions = _.clone(masterQuestionList);
const answers = answerStorage.get(key);
// figure out things
const priority = calculatePriority(answers);
for (i = 0; i < questions.length; i++) {
questions[i].answer = answers[i];
}
// find coordinates
let latitude;
let longitude;
const coordinates = messages[key].find((message) => {
return message.latitude && message.longitude;
});
if (coordinates) {
latitude = coordinates.latitude;
longitude = coordinates.longitude;
}
const response = {
id: key,
caseNumber,
lastMessage: messages[key][messages[key].length - 1].message,
latitude,
longitude,
timestamp: messages[key][messages[key].length - 1].timestamp,
priority,
chatLogs: messages[key],
questions,
};
responses.push(response);
caseNumber = caseNumber + 1;
});
res.json(responses);
};
| 2eaf0a864c1d7eba9dfbab0364c32c20ae1ce8a7 | [
"Markdown",
"JavaScript"
] | 7 | Markdown | mbrooks/exp-er-server | 8b5d8f259846e70fb095b5ed8ed9064a6e7220ac | f5506b4b129a8e88787d5477478df6cd9e69cf8d |
refs/heads/master | <repo_name>Tanya-tech/competitive_programming_all<file_sep>/CodeChef/test.cpp
#include <iostream>
using namespace std;
int main() {
//code
int t; cin >> t;
while(t--) {
int n; cin >> n;
int arr[n], zcnt = 0, ocnt = 0, tcnt = 0;
for(int i = 0; i < n; i++) {
cin >> arr[i];
if(arr[i] == 0) zcnt++;
else if(arr[i] == 1) ocnt++;
else tcnt++;
}
//cout << zcnt << " " << ocnt << " " << tcnt << endl;
int ans[n] = {0};
for(int i = 0; i < zcnt; i++) {ans[i] = 0;}
for(int i = 0+zcnt; i < zcnt+ocnt; i++) {ans[i] = 1;}
for(int i = 0+zcnt+ocnt; i < zcnt+ocnt+tcnt; i++) {ans[i] = 2;}
for(auto i : ans) cout << i << " " ; cout << endl;
//for(int i = 0; i < n; i++) cout << ans[i] << " "; cout << endl;
}
return 0;
}<file_sep>/leetCode/readme.md
#### This will have very few files has leetcode has its online editor and submissions are made then and there.
#### Link to my leetcode profile : https://leetcode.com/kushuu_/<file_sep>/kickStart/test.py
s = "123456"
print(s[:3])
print("ok") | 8da5638e6af052faa9c0525b85116081f3813af5 | [
"Markdown",
"Python",
"C++"
] | 3 | C++ | Tanya-tech/competitive_programming_all | de5356328937dab1cd5b6f6dc0471191ae95e6e5 | d30596093fbe4a039544dbbf07f07136fa30f944 |
refs/heads/master | <repo_name>mbartoli/Militia<file_sep>/militia/tests/path-test.py
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # This is your Project Root
CONFIG_PATH = os.path.join(BASE_DIR, 'tools/config.json')
#print CONFIG_PATH
confPath = os.path.dirname(os.path.dirname(__file__)) +'/config.json'
print confPath | d7d8bf74480f09ee01f00ae67f9ec6316cc04b21 | [
"Python"
] | 1 | Python | mbartoli/Militia | d7d29e6d6b891ec15406d852b35805cc48eacc3f | 32589370c14502799e2add24f77ea20ff23b230e |
refs/heads/master | <repo_name>AndreaFedeli/js-mail-dadi<file_sep>/dadi/script.js
var giocatore =prompt("Giocatore 1 lancia il dado");
var lancioG = Math.floor(Math.random()*6)+1;
var computer= prompt("Ora tocca al Computer")
var lancioC = Math.floor(Math.random()*6)+1;
if (lancioG>lancioC){
console.log("Giocatore 1 hai vinto");
}else {
console.log("Il Computer ha vinto");
}
console.log(lancioG);
console.log(lancioC);
<file_sep>/script.js
var listaMail = ["<EMAIL>" , "<EMAIL>" , "<EMAIL>" , "<EMAIL>"];
var mail = prompt("Inserisci la tua e-mail");
for (var i = 0; i < listaMail.length; i++) {
if (mail == listaMail[i]) {
alert("Credenziali corrette");
}
else {
alert("ERRORE");
}
}
| 1f0da850ca71eb0e8a83d74fc07fa736bdda4aea | [
"JavaScript"
] | 2 | JavaScript | AndreaFedeli/js-mail-dadi | 38fe41e70937182b4d1cb0f77a147e69fbee9cee | db18b2287a7dcde58b54db4f63eca44003e444fa |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.